diff --git a/.classpath b/.classpath index 5ad163ae880..918957e788e 100644 --- a/.classpath +++ b/.classpath @@ -22,7 +22,7 @@ - + @@ -31,13 +31,14 @@ - + + @@ -59,9 +60,11 @@ + + @@ -113,7 +116,7 @@ - + @@ -128,7 +131,7 @@ - + diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 12713a9820a..ef86d22c564 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,7 @@ To help the community judge your pull request (PR), please include the following - A context reference to a [Github Issue](https://github.com/eXist-db/exist/issues), a message in the [eXist-open mailinglist](http://exist-open.markmail.org), or a [specification](https://www.w3.org/TR/xquery-31/). - Tests. The [XQSuite - Annotation-based Test Framework for XQuery](http://exist-db.org/exist/apps/doc/xqsuite.xml) makes it very easy for you to create tests. These tests can be executed from the [eXide editor](http://exist-db.org/exist/apps/eXide/index.html) via XQuery > Run as Test. -Your PR will be tested using [Travis CI](https://travis-ci.org/eXist-db/exist) and [AppVeyor](https://ci.appveyor.com/project/AdamRetter/exist) against a number of operating systems and environments. The build status is visible in the PR. +Your PR will be tested using [Travis CI](https://travis-ci.com/eXist-db/exist) and [AppVeyor](https://ci.appveyor.com/project/AdamRetter/exist) against a number of operating systems and environments. The build status is visible in the PR. To detect errors in your PR before submitting it, please run eXist's full test suite on your own system via `build.sh test`. diff --git a/.gitignore b/.gitignore index 57d6e4210ae..b316f958d0c 100644 --- a/.gitignore +++ b/.gitignore @@ -46,7 +46,6 @@ nbproject/private/ start.jar test/external/ test/junit/ -test/src/org/exist/xquery/xqts/ test/temp/ tools/jetty/logs/ tools/jetty/tmp/ @@ -71,7 +70,6 @@ webapp/WEB-INF/dwr20.dtd webapp/WEB-INF/logs/*.log webapp/WEB-INF/logs/*.log.* webapp/WEB-INF/web.xml -webapp/xqts/config.xml tools/yajsw/logs/wrapper.log tools/yajsw/logs/wrapper.log.lck Project_Default.xml diff --git a/.idea/compiler.xml b/.idea/compiler.xml index 5e7e131601f..93ab7df3305 100644 --- a/.idea/compiler.xml +++ b/.idea/compiler.xml @@ -1,7 +1,7 @@ - diff --git a/.travis.yml b/.travis.yml index 823e524ad3f..37e0d0853b1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,10 +4,17 @@ matrix: include: - jdk: openjdk8 - jdk: oraclejdk8 - - jdk: oraclejdk9 + - jdk: openjdk9 + - jdk: openjdk10 + - jdk: openjdk11 +# - jdk: oraclejdk11 - os: osx osx_image: xcode9.2 env: JAVA_HOME=$(/usr/libexec/java_home) script: ./build.sh -Dexist.autodeploy=off -Dtest.haltonerror=true -Dtest.haltonfailure=true clean clean-all all test +cache: + directories: + - $HOME/.ivy2 + - $HOME/.m2 notifications: hipchat: ec8fcfa661addc56a361a8ef536320@integrations diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..fd558d01073 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,77 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at info@exist-db.org. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq + diff --git a/README.md b/README.md index eacc6cff7fd..e043f69f75c 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,9 @@ -## eXist Native XML Database +## eXist-db Native XML Database -[![Build Status](https://travis-ci.org/eXist-db/exist.png?branch=develop)](https://travis-ci.org/eXist-db/exist) +[![Build Status](https://travis-ci.com/eXist-db/exist.png?branch=develop)](https://travis-ci.com/eXist-db/exist) [![Build status](https://ci.appveyor.com/api/projects/status/27jtt2m7s4qvey11/branch/develop?svg=true)](https://ci.appveyor.com/project/AdamRetter/exist/branch/develop) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/c5d7a02842dd4a3c85b1b2ad421b0d13)](https://www.codacy.com/app/eXist-db/exist?utm_source=github.com&utm_medium=referral&utm_content=eXist-db/exist&utm_campaign=Badge_Grade) [![Java 8](https://img.shields.io/badge/java-8-blue.svg)](http://java.oracle.com) @@ -14,7 +14,11 @@ [![Download](https://api.bintray.com/packages/existdb/releases/exist/images/download.svg)](https://bintray.com/existdb/releases/exist/_latestVersion) [![HipChat](https://img.shields.io/badge/hipchat-eXist–db-blue.svg)](https://www.hipchat.com/gEBQ3SNfp) -eXist is a high-performance open source native XML database—a NoSQL document database and application platform built entirely around XML technologies. The main homepage for eXist can be found at [exist-db.org](http://www.exist-db.org "eXist Homepage"). This is the GitHub repository of eXist source code, and this page links to resources for downloading, building, and contributing to eXist, below. +[![Code of Conduct](https://img.shields.io/badge/code%20of%20conduct-contributor%20covenant-%235e0d73.svg)](http://contributor-covenant.org/version/1/4/) + +eXist-db is a high-performance open source native XML database—a NoSQL document database and application platform built entirely around XML technologies. The main homepage for eXist-db can be found at [exist-db.org](http://www.exist-db.org "eXist Homepage"). This is the GitHub repository of eXist source code, and this page links to resources for downloading, building, and contributing to eXist-db, below. + +The eXist-db community has adopted the [Contributor Covenant](https://www.contributor-covenant.org/) [Code of Conduct](https://www.contributor-covenant.org/version/1/4/code-of-conduct). ## Resources diff --git a/appveyor.yml b/appveyor.yml index 92eb499ee54..d9aeda72710 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,9 +8,8 @@ environment: JAVA_HOME: C:\Program Files\Java\jdk9 - APPVEYOR_BUILD_WORKER_IMAGE: Ubuntu JAVA_HOME: /usr/lib/jvm/java-8-openjdk-amd64 -# Disabled Ubuntu OpenJDK due to AspectJ bug - https://bugs.eclipse.org/bugs/show_bug.cgi?id=534801 -# - APPVEYOR_BUILD_WORKER_IMAGE: Ubuntu -# JAVA_HOME: /usr/lib/jvm/java-9-openjdk-amd64 + - APPVEYOR_BUILD_WORKER_IMAGE: Ubuntu + JAVA_HOME: /usr/lib/jvm/java-9-openjdk-amd64 - APPVEYOR_BUILD_WORKER_IMAGE: Ubuntu JAVA_HOME: /usr/lib/jvm/java-10-openjdk-amd64 diff --git a/bin/functions.d/eXist-settings.sh b/bin/functions.d/eXist-settings.sh index 51c1857a7b2..e237bc33208 100755 --- a/bin/functions.d/eXist-settings.sh +++ b/bin/functions.d/eXist-settings.sh @@ -1,6 +1,8 @@ -# -*-Shell-script-*- -# Common eXist script functions and settings -# $Id:eXist-settings.sh 7231 2008-01-14 22:33:35Z wolfgang_m $ +#!/usr/bin/env bash + +## +# Common eXist-db script functions and settings +## get_exist_home() { case "$1" in diff --git a/bin/functions.d/getopt-settings.sh b/bin/functions.d/getopt-settings.sh index a672e68f987..f5095643991 100755 --- a/bin/functions.d/getopt-settings.sh +++ b/bin/functions.d/getopt-settings.sh @@ -1,6 +1,8 @@ -# -*-Shell-script-*- -# Common eXist script functions and settings for getopt -# $Id$ +#!/usr/bin/env bash + +## +# Common eXist-db script functions and settings for getopt +## CLIENT_OPTS="|-u|--user|-P|--password|-p|--parse|-C|--config|-r|--remove|-c|--collection|-f|--resource|-g|--get|-m|--mkcol|-R|--rmcol|-x|--xpath|-n|--howmany|-o|--option|-O|--output|-F|--file|-t|--threads|-X|--xupdate|-T|--trace|" @@ -26,100 +28,98 @@ declare -a JAVA_OPTS declare -a CLIENT_PROPS substring() { - [ "${1#*$2*}" = "$1" ] && return 1 - return 0 + [ "${1#*$2*}" = "$1" ] && return 1 + return 0 } is_integer() { - [ $1 -eq 1 ] 2> /dev/null; - if [ $? -eq 2 ]; then - echo "Port need to be an integer" > /dev/stderr; - exit 1 - fi - return 0 + [ $1 -eq 1 ] 2> /dev/null; + if [ $? -eq 2 ]; then + echo "Port need to be an integer" > /dev/stderr; + exit 1 + fi + return 0 } is_jmx_switch() { - if substring "${JMX_OPTS}" "|$1|"; then - JMX_ENABLED=1; - return 0; - elif substring "|$1|" "$JMX_SHORT_EQUAL"; then - JMX_ENABLED=1; - JMX_PORT="${1#${JMX_SHORT_EQUAL}}" && is_integer "${JMX_PORT}"; - return 0; - elif substring "|$1|" "${JMX_LONG_EQUAL}"; then - JMX_ENABLED=1; - JMX_PORT="${1#${JMX_LONG_EQUAL}}" && is_integer "${JMX_PORT}"; - return 0; - elif substring "|$1|" "${JMX_SHORT}"; then - JMX_ENABLED=1; - JMX_PORT="${1#${JMX_SHORT}}" && is_integer "${JMX_PORT}"; - return 0; - elif substring "|$1|" "${JMX_LONG}"; then - JMX_ENABLED=1; - JMX_PORT="${1#${JMX_LONG}}" && is_integer "${JMX_PORT}"; - return 0; - fi - return 1; + if substring "${JMX_OPTS}" "|$1|"; then + JMX_ENABLED=1; + return 0; + elif substring "|X$1|" "X$JMX_SHORT_EQUAL"; then + JMX_ENABLED=1; + JMX_PORT="${1#${JMX_SHORT_EQUAL}}" && is_integer "${JMX_PORT}"; + return 0; + elif substring "|X$1|" "X${JMX_LONG_EQUAL}"; then + JMX_ENABLED=1; + JMX_PORT="${1#${JMX_LONG_EQUAL}}" && is_integer "${JMX_PORT}"; + return 0; + elif substring "|X$1|" "X${JMX_SHORT}"; then + JMX_ENABLED=1; + JMX_PORT="${1#${JMX_SHORT}}" && is_integer "${JMX_PORT}"; + return 0; + elif substring "|X$1|" "X${JMX_LONG}"; then + JMX_ENABLED=1; + JMX_PORT="${1#${JMX_LONG}}" && is_integer "${JMX_PORT}"; + return 0; + fi + return 1; } check_quiet_switch() { - if substring "${QUIET_OPTS}" "|$1|"; then - QUIET_ENABLED=1; - fi + if substring "${QUIET_OPTS}" "|$1|"; then + QUIET_ENABLED=1; + fi } get_opts() { - local -a ALL_OPTS=( "$@" ) - local found_jmx_opt - local found_pidfile_opt - - for OPT in "${ALL_OPTS[@]}" ; do - if [ -n "$found_jmx_opt" ] ; then - unset found_jmx_opt - local found_jmx_opt - if ! substring "${OPT}" $"-" && is_integer "${OPT}"; then - JMX_PORT="$OPT" - continue - fi - elif [ -n "$found_pidfile_opt" ]; then - unset found_pidfile_opt - PIDFILE=$OPT - continue - fi - - if [ $OPT == "--forking" ]; then - FORKING=1 - continue - fi - - if is_jmx_switch "$OPT"; then - found_jmx_opt=1 - elif [ $OPT == "--pidfile" ]; then - found_pidfile_opt=1 - else - check_quiet_switch "$OPT"; - JAVA_OPTS[${NR_JAVA_OPTS}]="$OPT"; - let "NR_JAVA_OPTS += 1"; - fi - done - - if [ "${QUIET_ENABLED}" -eq 0 ]; then - echo "${JAVA_OPTS[@]}"; - fi + local -a ALL_OPTS=( "$@" ) + local found_jmx_opt + local found_pidfile_opt + + for OPT in "${ALL_OPTS[@]}" ; do + if [ -n "$found_jmx_opt" ] ; then + unset found_jmx_opt + local found_jmx_opt + if ! substring "${OPT}" $"-" && is_integer "${OPT}"; then + JMX_PORT="$OPT" + continue + fi + elif [ -n "$found_pidfile_opt" ]; then + unset found_pidfile_opt + PIDFILE=$OPT + continue + fi + + if [ $OPT == "--forking" ]; then + FORKING=1 + continue + fi + + if is_jmx_switch "$OPT"; then + found_jmx_opt=1 + elif [ $OPT == "--pidfile" ]; then + found_pidfile_opt=1 + else + check_quiet_switch "$OPT"; + JAVA_OPTS[${NR_JAVA_OPTS}]="$OPT"; + let "NR_JAVA_OPTS += 1"; + fi + done + + if [ "${QUIET_ENABLED}" -eq 0 ]; then + echo "${JAVA_OPTS[@]}"; + fi } get_client_props() { - shopt -s extglob - while IFS="=" read -r key value - do - case "${key}" in - \#* ) ;; - * ) - #echo "Read client properties key: ${key}, value: ${value}" - CLIENT_PROPS["${key}"]="${value}" - ;; - esac -done < ${EXIST_HOME}/client.properties - + shopt -s extglob + while IFS="=" read -r key value; do + case "${key}" in + \#* ) ;; + * ) + #echo "Read client properties key: ${key}, value: ${value}" + CLIENT_PROPS["${key}"]="${value}" + ;; + esac + done < ${EXIST_HOME}/client.properties } diff --git a/bin/functions.d/jmx-settings.sh b/bin/functions.d/jmx-settings.sh index 40d31c86d94..71e7b6f1845 100755 --- a/bin/functions.d/jmx-settings.sh +++ b/bin/functions.d/jmx-settings.sh @@ -1,6 +1,8 @@ -# -*-Shell-script-*- -# Common eXist script functions and settings for JMX -# $Id:jmx-settings.sh 7231 2008-01-14 22:33:35Z wolfgang_m $ +#!/usr/bin/env bash + +## +# Common eXist-db script functions and settings for JMX +## JMX_ENABLED=0 JMX_PORT=1099 diff --git a/build.properties b/build.properties index 8dcbe0dd9b0..ff0caacbfae 100644 --- a/build.properties +++ b/build.properties @@ -4,7 +4,7 @@ # # $Id$ project.name = eXist-db -project.version = 4.3.0-SNAPSHOT +project.version = 5.0.0-SNAPSHOT # db settings config.dataDir = webapp/WEB-INF/data @@ -27,6 +27,9 @@ keystore.password = secret keystore.file = key.store keystore.validity = 100000 +# The EnsureLockAspect enforces the locking contracts by annotation +enable.ensurelocking.aspect=false + autodeploy=dashboard,shared,eXide,monex,functx,usermanager autodeploy.repo=http://demo.exist-db.org/exist/apps/public-repo use.autodeploy.feature=true @@ -35,11 +38,6 @@ use.autodeploy.feature=true junit.reports = test junit.output = true junit.forked.VM.maxmemory = 512m -# Converted junit tests from external testsuites -# reuire more memory, e g -# XSLTS requires 512m -# XQTS requires more than 400m -junit.forked.VM.maxmemory.external = 512m proxy.nonproxyhosts = proxy.host = proxy.port = 0 @@ -80,9 +78,6 @@ antlr.traceTreeWalker = false # You might need to change PermSpace to atleast 84 MB eg -XX:MaxPermSize=84m # If you only want to point to your own izpack installation directory # add this in local.build.properties instead so you don't commit it by mistake. -# -# For generating the MacOSX bundle please download the "Java Application Bundler" from -# https://java.net/projects/appbundler (store in IzPack/lib) izpack.dir = /Applications/IzPack/ # If you wish to sign .app applications and .DMG packages for MacOS diff --git a/build/scripts/build-impl.xml b/build/scripts/build-impl.xml index 40102b9f8be..8ded8dedb4e 100644 --- a/build/scripts/build-impl.xml +++ b/build/scripts/build-impl.xml @@ -36,7 +36,6 @@ - @@ -100,7 +99,7 @@ - + @@ -168,9 +167,6 @@ - - @@ -217,7 +213,8 @@ - + + @@ -271,6 +268,7 @@ + @@ -296,6 +294,9 @@ + + + @@ -742,12 +743,6 @@ - - - - - - @@ -799,7 +794,6 @@ - @@ -809,11 +803,6 @@ - - - - - diff --git a/build/scripts/dist.xml b/build/scripts/dist.xml index 7654941e59f..2c7f61fb4ff 100644 --- a/build/scripts/dist.xml +++ b/build/scripts/dist.xml @@ -219,144 +219,119 @@ - + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + - + - - - - - - - - - + - - - - - - - - - + - - - - - + - - - - - - - - + + - - + + - - + + - - + + + + - + + - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + - - - - - - + - + + + - + + + - - + + @@ -368,27 +343,71 @@ + - + - - + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - + + + + + + + + diff --git a/build/scripts/jarsigner.xml b/build/scripts/jarsigner.xml index 2e586ac230c..0d87bc77da6 100644 --- a/build/scripts/jarsigner.xml +++ b/build/scripts/jarsigner.xml @@ -62,6 +62,9 @@ + + + @@ -134,6 +137,9 @@ + + + @@ -181,7 +187,7 @@ - + diff --git a/build/scripts/junit.xml b/build/scripts/junit.xml index 6b3587f3160..48f2aff8dd3 100644 --- a/build/scripts/junit.xml +++ b/build/scripts/junit.xml @@ -182,6 +182,7 @@ + @@ -204,7 +205,13 @@ - + + + + + + + @@ -289,6 +296,9 @@ + + + @@ -442,140 +452,4 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - JUnit tests generated. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - JUnit tests generated. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/build/scripts/macosx.xml b/build/scripts/macosx.xml index 91ea2afa526..0b71e955f27 100644 --- a/build/scripts/macosx.xml +++ b/build/scripts/macosx.xml @@ -13,11 +13,11 @@ - + - + @@ -171,7 +171,6 @@ - diff --git a/build/scripts/setup.xml b/build/scripts/setup.xml index ecb847fdc20..7690423d43c 100644 --- a/build/scripts/setup.xml +++ b/build/scripts/setup.xml @@ -46,6 +46,7 @@ Download xar files from the package website. --> + @@ -55,6 +56,10 @@ + + + + diff --git a/conf.xml.tmpl b/conf.xml.tmpl index d5329fb3b4d..b32af81063c 100644 --- a/conf.xml.tmpl +++ b/conf.xml.tmpl @@ -138,6 +138,7 @@ If you need stable, incremental ids, set the option doc-ids to "incremental". + - minDiskSpace: The amount of disk space (in megabytes) which should be available for the database to continue operations. If free disk space goes below @@ -145,6 +146,47 @@ switch to read-only mode in order to prevent potential data loss. Set the limit large enough to allow all pending operations to complete. The default is 1 gigabyte. + + - posix-chown-restricted: + As defined by POSIX.1 for _POSIX_CHOWN_RESTRICTED. + + When posix-chown-restricted="true" (the default) then: + 1. Only a superuser process can change the user ID of the file. + 2. A non-superuser process can change the group ID of the file + if the process owns the file (the effective user ID equals + the user ID of the file), and group equals either the + effective group ID of the process or one of the + process’s supplementary group IDs. + This means that when posix-chown-restricted="true", you can’t change + the user ID of your files. You can change the group ID of files that + you own, but only to groups that you belong to. + + When posix-chown-restricted="false" you can change the user ID of + any file that you own, effectively "giving away the file" to + another user. Such a setting has negative security implications, + further details can be seen in the "Rationale" section for the + chown function in the POSIX.1-2017 (Issue 7, 2018 edition) standard. + See: http://pubs.opengroup.org/onlinepubs/9699919799/functions/chown.html#tag_16_59_07 + + - preserve-on-copy + When copying Collections and Documents within the database, the + default (`false`), is not to preserve their attributes + (modification time, mode, user-id, group-id, and ACL). + + NOTE: Not preserving attributes, is inline with both the GNU and + BSD `cp` commands, and therefore expected behaviour; The target + Collection or Document is created following the rules of the + target parent, and the effective user and their umask. + + Setting preserve-on-copy="true" changes the default behaviour + so that the target Collection or Document of a copy, has the same + attributes as the source. + + The preserve-on-copy setting can be overridden on a case-by-case + basis by setting the `preserve` flag to either `true` or `false` + when calling xmldb:copy(), or via any other API that supports copy. + Omitting the preserve flag when calling a copy operation, implies + the behaviour that is set in this configuration. ===================================================================== @@ -157,7 +199,7 @@ --> + doc-ids="default" minDiskSpace="128M" posix-chown-restricted="true" preserve-on-copy="false"> + + + + + + + + + + + + diff --git a/exist-versioning-release.md b/exist-versioning-release.md index 89d01fb922b..cbddad928f3 100644 --- a/exist-versioning-release.md +++ b/exist-versioning-release.md @@ -122,7 +122,11 @@ Once development on a new stable version is complete, the following steps will p project.version = 3.1.0 ``` - And commit the changes and push to `origin` (or `upstream` if you are on a fork). + And commit the changes and push to `origin` (or `upstream` if you are on a fork): + ``` + $ git commit build.properties -m "[release] Set version for 3.1.0 release" + $ git push origin/develop + ``` 4. Git tag **and sign** eXist-3.1.0 from the `HEAD` of `develop` branch and push the tag to `origin` (or `upstream` if you are on a fork): ``` @@ -139,7 +143,11 @@ Once development on a new stable version is complete, the following steps will p project.version = 3.2.0-SNAPSHOT ``` - And commit the changes and push to `origin` (or `upstream` if you are on a fork). + And commit the changes and push to `origin` (or `upstream` if you are on a fork): + ``` + $ git commit build.properties -m "[release] Set version to 3.2.0-SNAPSHOT" + $ git push origin/develop + ``` **NOTE:** We increment to the next `MINOR` version, rather than to the next `PATCH` or `MAJOR` version, for two reasons. First, we assume the next version will likely contain features and not just bug patches, although this does not prevent us from doing a `3.1.1` (a `PATCH` release) release next, should we have only patches. By the same token, the future is uncertain and we recognise that it is easier to release features with non-breaking API changes and patches, although this still does not prevent us from doing a `4.0.0` release next, should we have breaking API changes. @@ -166,19 +174,21 @@ Once development on a new stable version is complete, the following steps will p 4. Perform the build of the tag: ``` $ git checkout eXist-3.1.0 - $ ./build.sh jnlp-unsign-all all jnlp-sign-exist jnlp-sign-core jnlp-sign-exist-extensions - $ ./build.sh installer app-signed dist-war + $ ./build.sh clean clean-all jnlp-unsign-all all jnlp-sign-exist jnlp-sign-core jnlp-sign-exist-extensions + $ ./build.sh installer app-signed dist-war dist-bz2 ``` #### Publishing the Product Release -1. Login to https://bintray.com/existdb/ and create a new "Version", then upload the files `$EXIST_HOME/installer/eXist-db-setup-3.2.0.jar`, `$EXIST_HOME/dist/eXist-db-3.2.0.dmg` and `$EXIST_HOME/dist/exist-3.1.0.war`. Once the files have uploaded, make sure to click "Publish" to publish them to the version. Once published, you need to go to the "Files" section of the version, and click "Actions"->"Show in downloads list" for each file. +1. Login to https://bintray.com/existdb/ and create a new "Version", then upload the files `$EXIST_HOME/installer/eXist-db-setup-3.2.0.jar`, `$EXIST_HOME/dist/eXist-db-3.2.0.dmg`, `$EXIST_HOME/dist/exist-3.1.0.war`, and `$EXIST_HOME/dist/exist-3.1.0.tar.bz2`. Once the files have uploaded, make sure to click "Publish" to publish them to the version. Once published, you need to go to the "Files" section of the version, and click "Actions"->"Show in downloads list" for each file. 2. Update and publish the latest Maven artifacts as described here: https://github.com/exist-db/mvn-repo 3. Ask [Evolved Binary](http://www.evolvedbinary.com) to build and upload new Docker Images for the latest release. -4. Edit the links for the downloads on the eXist website. +4. Update the Mac HomeBrew for eXist-db, see: [Releasing to Homebrew](https://github.com/eXist-db/exist/blob/develop/exist-versioning-release.md#releasing-to-homebrew). + +5. Edit the links for the downloads on the eXist website. 1. `$ git clone https://github.com/exist-db/website.git` @@ -218,7 +228,7 @@ Once development on a new stable version is complete, the following steps will p ``` - 3. Edit the file `expath-pkg.xml` and modify `version="3.1.0"` to reflect the new version. + 3. Edit the file `expath-pkg.xml` and bump the version i.e. `version="4"` to reflect the new version. 4. Commit your change and push: `$ git commit index.html expath-pkg.xml -m "Update for eXist-3.1.0 website" && git push origin master` @@ -228,17 +238,21 @@ Once development on a new stable version is complete, the following steps will p 7. Visit http://www.exist-db.org/exist/apps/dashboard/index.html, login and upload the new `build/homepage.xar` file via the Package Manager. -5. Login to the blog at http://exist-db.org/exist/apps/wiki/blogs/eXist/ and add a new news item which announces the release and holds the release notes. It should be named like http://exist-db.org/exist/apps/wiki/blogs/eXist/eXistdb310 +6. Login to the blog at http://exist-db.org/exist/apps/wiki/blogs/eXist/ and add a new news item which announces the release and holds the release notes. It should be named like http://exist-db.org/exist/apps/wiki/blogs/eXist/eXistdb310 + +7. Visit the GitHub releases page https://github.com/eXist-db/exist/releases and create a new release, enter the tag you previously created and link the release notes from the blog and the binaries from BinTray. + +8. Send an email to the `exist-open` mailing list announcing the release with a title similar to `[ANN] Release of eXist 3.1.0`, copy and paste the release notes from the blog into the email and reformat appropriately (see past emails). -6. Visit the GitHub releases page https://github.com/eXist-db/exist/releases and create a new release, enter the tag you previously created and link the release notes from the blog and the binaries from BinTray. +9. Tweet about it using the `existdb` twitter account. -7. Send an email to the `exist-open` mailing list announcing the release with a title similar to `[ANN] Release of eXist 3.1.0`, copy and paste the release notes from the blog into the email and reformat appropriately (see past emails). +10. Post it to the LinkedIn eXist-db group: https://www.linkedin.com/groups/35624 -8. Tweet about it using the `existdb` twitter account. +11. Submit a news item to XML.com - https://www.xml.com/news/submit-news-item/. -9. Submit a news item to XML.com - https://www.xml.com/news/submit-news-item/. +12. Update the Wikipedia page with the new version details - https://en.wikipedia.org/wiki/EXist. -10. Go to GitHub and move all issues and PRs which are still open for the release milestone to the next release milestone. Close the release milestone. +13. Go to GitHub and move all issues and PRs which are still open for the release milestone to the next release milestone. Close the release milestone. ### Preparing a Patch Release diff --git a/extensions/betterform/README.md b/extensions/betterform/README.md new file mode 100644 index 00000000000..712ef211814 --- /dev/null +++ b/extensions/betterform/README.md @@ -0,0 +1,29 @@ +# betterFORM eXist-db Integration + +Since eXist-db 5.0.0, the full betterFORM XForms package is no longer enabled by default, instead just the `ResourceServlet` is present (as this is needed by eXide and a bunch of other apps). + + +## Install Instructions + +By default only the betterForm ResourceServlet is installed. If you want the full betterForm XForms experience, then: + +1. Stop eXist-db (if it is running). + +2. Set the property `include.feature.betterform` in your `$EXIST_HOME/extensions/local.build.properties` file. See `$EXIST_HOME/extensions/build.properties` for details. + +3. `cd $EXIST_HOME/extensions/betterform`. + +4. Run the following: `../../build.sh install` (or you can use `..\..\build.bat install` if you are on Windows). + +5. Start eXist-db. + + +## Uninstall Instructions + +1. Stop eXist-db (if it is running). + +2. `cd $EXIST_HOME/extensions/betterform`. + +3. Run the following: `../../build.sh clean` (or you can use `..\..\build.bat clean` if you are on Windows). + +4. Start eXist-db. diff --git a/extensions/betterform/build.xml b/extensions/betterform/build.xml index 982468e9f1f..30ca6d804a4 100644 --- a/extensions/betterform/build.xml +++ b/extensions/betterform/build.xml @@ -1,53 +1,48 @@ - - + + + + + + + + + + - + + + + + + + + + + + + + + + - - - - - - XSLT Generating @{out} - - - - - - - - - - - - - - - + + + - - @@ -55,77 +50,104 @@ - - - Add betterFORM config parameters to eXist to ${exist.web.xml} - + - + - - Deploy betterFORM resources to eXist webapp (${exist.rootdir}/webapp/WEB-INF/) - + + - + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - Uninstalling betterFORM. - + - - + - - - - - - - - + + + + + + + + - - - - - - - - - - - - - - Creating uninstall folder. - + + + + + + + + + + + + + - + - - Copying betterFORM configuration files to WEB-INF dir. - - - - - - - - - diff --git a/extensions/betterform/main/etc/MergeWebXML.xsl b/extensions/betterform/main/etc/MergeWebXML.xsl index 683505aecad..943616a00d6 100644 --- a/extensions/betterform/main/etc/MergeWebXML.xsl +++ b/extensions/betterform/main/etc/MergeWebXML.xsl @@ -1,44 +1,114 @@ - + exclude-result-prefixes="webxml xs" + version="2.0"> - + - - + + + + + + + + + + - - - - - ====================== betterFORM configuration file ====================== - - - + + + + + betterFORM configuration file + + betterform.configfile WEB-INF/betterform-config.xml - - - - - ====================== betterFORM filter and servlets ====================== - - - ====================== betterFORM servlets ====================== - - + + + + + + + + + + betterFORM servlets + + + + + + + + + + + + + + betterFORM servlet mappings + + + + + + + + + + + + betterFORM filter + + + + + + + betterFORM filter mappings + + + + + + + + + + + + + ResourceServlet + de.betterform.agent.web.resources.ResourceServlet + + caching + true + + + + + + + ResourceServlet + /bfResources/* + + + + Flux org.directwebremoting.servlet.DwrServlet @@ -59,24 +129,13 @@ inspector de.betterform.agent.web.servlet.XFormsInspectorServlet - - ResourceServlet - de.betterform.agent.web.resources.ResourceServlet - - caching - true - - error de.betterform.agent.web.servlet.ErrorServlet - - - - ====================== betterFORM servlets mapping ====================== - - + + + Flux /Flux/* @@ -97,30 +156,20 @@ inspector /inspector/* - - ResourceServlet - /bfResources/* - error /error/* - - - - ====================== betterFORM filter ====================== - - + + + XFormsFilter de.betterform.agent.web.filter.XFormsFilter - - - - ====================== betterFORM filter mapping ====================== - - + + + XFormsFilter /apps/* @@ -129,21 +178,31 @@ XFormsFilter XFormsPostServlet - - + de.betterform.agent.web.servlet.BfServletContextListener - - - ====================== betterFORM filter and servlets ====================== - - + + + + + + + + + + + + + + + + diff --git a/extensions/build.properties b/extensions/build.properties index 6158f2589a8..e4c37acd577 100644 --- a/extensions/build.properties +++ b/extensions/build.properties @@ -15,7 +15,11 @@ ## Features # betterFORM XForms engine -include.feature.betterform = true +# Valid options are: +# 1. true betterForm will be compiled and deployed into eXist-db (will downgrade the version of Saxon to Saxon HE 9.6.0-7) +# 2. false No betterForm components will be compiled +# 3. resourceServlet betterForm will be compiled but only the betterForm ResourceServlet will be deployed +include.feature.betterform = resourceServlet # EXQuery RESTXQ include.feature.exquery.restxq = true diff --git a/extensions/debuggee/src/org/exist/debuggee/DebuggeeJointImpl.java b/extensions/debuggee/src/org/exist/debuggee/DebuggeeJointImpl.java index 3710d7957fe..0d68ac4a38e 100644 --- a/extensions/debuggee/src/org/exist/debuggee/DebuggeeJointImpl.java +++ b/extensions/debuggee/src/org/exist/debuggee/DebuggeeJointImpl.java @@ -62,8 +62,8 @@ public class DebuggeeJointImpl implements DebuggeeJoint, Status { private int stackDepth = 0; private CommandContinuation command = null; - private Stack commands = new Stack(); - + private Deque commands = new ArrayDeque(); + private int breakpointNo = 0; //> private Map> filesBreakpoints = @@ -77,9 +77,6 @@ public class DebuggeeJointImpl implements DebuggeeJoint, Status { private CompiledXQuery compiledXQuery; private boolean inProlog = false; - - public DebuggeeJointImpl() { - } protected void setCompiledScript(CompiledXQuery compiledXQuery) { this.compiledXQuery = compiledXQuery; @@ -246,7 +243,7 @@ public void expressionEnd(Expression expr) { } private synchronized void waitCommand() { - if (commands.size() != 0 && command.isStatus(BREAK)) { + if (!commands.isEmpty() && command.isStatus(BREAK)) { command = commands.pop(); ((AbstractCommandContinuation)command).setCallStackDepth(stackDepth); @@ -297,7 +294,7 @@ public synchronized void continuation(CommandContinuation command) { this.command = command; } else { - commands.add(command); + commands.push(command); } notifyAll(); diff --git a/extensions/debuggee/src/org/exist/debuggee/ScriptRunner.java b/extensions/debuggee/src/org/exist/debuggee/ScriptRunner.java index 5293327d396..c7bf7e62786 100644 --- a/extensions/debuggee/src/org/exist/debuggee/ScriptRunner.java +++ b/extensions/debuggee/src/org/exist/debuggee/ScriptRunner.java @@ -31,6 +31,8 @@ import org.exist.xquery.CompiledXQuery; import org.exist.xquery.XQuery; +import static org.exist.util.ThreadUtils.nameInstanceThread; + /** * @author Dmitriy Shabanov * @@ -48,7 +50,7 @@ public ScriptRunner(SessionImpl session, CompiledXQuery compiled) { this.session = session; expression = compiled; - thread = new Thread(this); + thread = newInstanceThread(BrokerPool.getInstance(), "scriptRunner" this); thread.setDaemon(true); thread.setName("Debug session "+compiled.getContext().hashCode()); } diff --git a/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Command.java b/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Command.java index 62a318976b3..c8e2d65f40c 100644 --- a/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Command.java +++ b/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Command.java @@ -194,7 +194,7 @@ public static Command parse(IoSession session, String message) { public static String getFileuri(org.exist.source.Source fileuri) { // System.out.println("getFileuri dbgp:"+fileuri.getType()+"://"+fileuri.getKey()); - if (fileuri.type().toLowerCase().equals("file")) { + if (fileuri.type().equalsIgnoreCase("file")) { try { return new java.io.File(fileuri.path()).toURI().toURL().toString(); } catch (MalformedURLException e) { diff --git a/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Source.java b/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Source.java index 88dc47fff97..51c04f7e229 100644 --- a/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Source.java +++ b/extensions/debuggee/src/org/exist/debuggee/dbgp/packets/Source.java @@ -27,6 +27,7 @@ import org.exist.debuggee.dbgp.Errors; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; @@ -96,7 +97,6 @@ public void exec() { return; } - DocumentImpl resource = null; InputStream is = null; try { @@ -109,11 +109,11 @@ public void exec() { XmldbURI pathUri = XmldbURI.create( URLDecoder.decode( fileURI.substring(15) , "UTF-8" ) ); Database db = getJoint().getContext().getDatabase(); - try(final DBBroker broker = db.getBroker()) { - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + try(final DBBroker broker = db.getBroker(); + final LockedDocument resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { - if (resource.getResourceType() == DocumentImpl.BINARY_FILE) { - is = broker.getBinaryResource((BinaryDocument) resource); + if (resource.getDocument().getResourceType() == DocumentImpl.BINARY_FILE) { + is = broker.getBinaryResource((BinaryDocument) resource.getDocument()); } else { //TODO: xml source??? return; @@ -150,10 +150,6 @@ public void exec() { } } } - - if(resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); - } } } diff --git a/extensions/debuggee/src/org/exist/debugger/DebuggerImpl.java b/extensions/debuggee/src/org/exist/debugger/DebuggerImpl.java index 727f30eb2e7..6597d9016f5 100644 --- a/extensions/debuggee/src/org/exist/debugger/DebuggerImpl.java +++ b/extensions/debuggee/src/org/exist/debugger/DebuggerImpl.java @@ -50,6 +50,8 @@ import org.w3c.dom.NodeList; import org.w3c.dom.Text; +import static org.exist.util.ThreadUtils.newGlobalThread; + /** * @author Dmitriy Shabanov * @@ -117,7 +119,7 @@ public DebuggingSource init(String url) throws IOException, sources = new HashMap(); currentTransactionId = 1; - Thread session = new Thread(new HttpSession(this, url)); + Thread session = newGlobalThread("debuggerHttpSession" new HttpSession(this, url)); session.start(); // 30s timeout diff --git a/extensions/debuggee/src/org/exist/debugger/dbgp/CodecFactory.java b/extensions/debuggee/src/org/exist/debugger/dbgp/CodecFactory.java index c9b2a27a4ab..28f023432d9 100644 --- a/extensions/debuggee/src/org/exist/debugger/dbgp/CodecFactory.java +++ b/extensions/debuggee/src/org/exist/debugger/dbgp/CodecFactory.java @@ -37,9 +37,6 @@ public class CodecFactory implements ProtocolCodecFactory { private Map encoders = new HashMap(); private Map decoders = new HashMap(); - - public CodecFactory() { - } public ProtocolDecoder getDecoder(IoSession ioSession) throws Exception { synchronized (decoders) {//TODO: rewrite??? diff --git a/extensions/debuggee/test/org/exist/debugger/DebuggerTest.java b/extensions/debuggee/test/org/exist/debugger/DebuggerTest.java index 6af31dcb10d..1db125f4b4a 100644 --- a/extensions/debuggee/test/org/exist/debugger/DebuggerTest.java +++ b/extensions/debuggee/test/org/exist/debugger/DebuggerTest.java @@ -358,7 +358,7 @@ public void testResourceNotExistOrNotRunnable() throws IOException { // jetty.port.jetty debugger.init("http://127.0.0.1:" + System.getProperty("jetty.port") + "/exist/logo.jpg"); - assertTrue("This point should not be reached", false); + fail("This point should not be reached"); } catch (IOException e) { exception = e; @@ -372,7 +372,7 @@ public void testResourceNotExistOrNotRunnable() throws IOException { // jetty.port.jetty debugger.init("http://127.0.0.1:" + System.getProperty("jetty.port") + "/notExist/fibo.xql"); - assertTrue("This point should not be reached", false); + fail("This point should not be reached"); } catch (IOException e) { exception = e; diff --git a/extensions/exiftool/src/org/exist/exiftool/xquery/MetadataFunctions.java b/extensions/exiftool/src/org/exist/exiftool/xquery/MetadataFunctions.java index b8e63bee8fb..7215a38ca98 100644 --- a/extensions/exiftool/src/org/exist/exiftool/xquery/MetadataFunctions.java +++ b/extensions/exiftool/src/org/exist/exiftool/xquery/MetadataFunctions.java @@ -12,6 +12,7 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.source.Source; import org.exist.source.SourceFactory; @@ -90,11 +91,10 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce } - private Sequence extractMetadataFromLocalResource(XmldbURI docUri) throws XPathException { - DocumentImpl doc = null; - try { - doc = context.getBroker().getXMLResource(docUri, LockMode.READ_LOCK); - if (doc instanceof BinaryDocument) { + private Sequence extractMetadataFromLocalResource(final XmldbURI docUri) throws XPathException { + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(docUri, LockMode.READ_LOCK)) { + + if (lockedDoc != null && lockedDoc.getDocument() instanceof BinaryDocument) { //resolve real filesystem path of binary file final Path binaryFile = ((NativeBroker) context.getBroker()).getCollectionBinaryFileFsPath(docUri); if (!Files.exists(binaryFile)) { @@ -106,10 +106,6 @@ private Sequence extractMetadataFromLocalResource(XmldbURI docUri) throws XPathE } } catch (PermissionDeniedException pde) { throw new XPathException("Could not access binary document: " + pde.getMessage(), pde); - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } } } @@ -159,6 +155,9 @@ private Sequence exifToolWebExtract(final URI uri) throws XPathException { try(final OutputStream stdOut = p.getOutputStream()) { final Source src = SourceFactory.getSource(context.getBroker(), null, uri.toString(), false); + if (src == null) { + throw new XPathException("Could not read source for the Exiftool: " + uri.toString()); + } try(final InputStream isSrc = src.getInputStream()) { //write the remote data to stdOut diff --git a/extensions/expath/lib/http-client-java-1.0-SNAPSHOT.jar b/extensions/expath/lib/http-client-java-1.0-SNAPSHOT.jar index be0d8afdb6f..aea03b9404e 100644 Binary files a/extensions/expath/lib/http-client-java-1.0-SNAPSHOT.jar and b/extensions/expath/lib/http-client-java-1.0-SNAPSHOT.jar differ diff --git a/extensions/expath/src/org/expath/exist/SendRequestFunction.java b/extensions/expath/src/org/expath/exist/SendRequestFunction.java index 3a8302b4109..129a06920d4 100644 --- a/extensions/expath/src/org/expath/exist/SendRequestFunction.java +++ b/extensions/expath/src/org/expath/exist/SendRequestFunction.java @@ -23,7 +23,6 @@ import java.net.URI; import java.net.URISyntaxException; - import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -170,7 +169,7 @@ private EXistResult sendOnce(final URI uri, final HttpRequest request, final Req HttpConnection conn = null; try { conn = new ApacheHttpConnection(uri); - final HttpResponse response = request.send(result, conn, parser.getSendAuth() ? parser.getCredentials() : null); + final HttpResponse response = request.send(result, conn, parser.getCredentials()); if(response.getStatus() == HttpStatus.SC_UNAUTHORIZED && parser.getCredentials() != null) { // requires authorization, try again with auth result = new EXistResult(getContext()); diff --git a/extensions/expath/src/org/expath/exist/ZipEntryFunctions.java b/extensions/expath/src/org/expath/exist/ZipEntryFunctions.java index 21eb96c6f11..418a241ccab 100644 --- a/extensions/expath/src/org/expath/exist/ZipEntryFunctions.java +++ b/extensions/expath/src/org/expath/exist/ZipEntryFunctions.java @@ -26,6 +26,7 @@ import java.io.Reader; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; +import javax.annotation.Nullable; import javax.xml.transform.stream.StreamSource; import org.apache.logging.log4j.LogManager; @@ -33,6 +34,7 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.QName; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; @@ -222,7 +224,7 @@ public interface ZipFileSource extends AutoCloseable { } protected static class ZipFileFromDb implements ZipFileSource { - private BinaryDocument binaryDoc = null; + private LockedDocument binaryDoc = null; private final XmldbURI uri; public ZipFileFromDb(final XmldbURI uri) { @@ -235,26 +237,30 @@ public ZipInputStream getStream(final DBBroker broker) throws IOException, Permi binaryDoc = getDoc(broker); } - return new ZipInputStream(broker.getBinaryResource(binaryDoc)); + return new ZipInputStream(broker.getBinaryResource((BinaryDocument)binaryDoc.getDocument())); } @Override public void close() { if (binaryDoc != null) { - binaryDoc.getUpdateLock().release(LockMode.READ_LOCK); + binaryDoc.close(); } } - private BinaryDocument getDoc(final DBBroker broker) throws PermissionDeniedException { - final DocumentImpl doc = broker.getXMLResource(uri, LockMode.READ_LOCK); - if (doc == null) { + /** + * @return only binary document otherwise null + */ + @Nullable + private LockedDocument getDoc(final DBBroker broker) throws PermissionDeniedException { + final LockedDocument lockedDoc = broker.getXMLResource(uri, LockMode.READ_LOCK); + if(lockedDoc == null) { return null; - } else if (doc.getResourceType() != DocumentImpl.BINARY_FILE) { - doc.getUpdateLock().release(LockMode.READ_LOCK); + } else if (lockedDoc.getDocument().getResourceType() != DocumentImpl.BINARY_FILE) { + lockedDoc.close(); return null; } - return (BinaryDocument) doc; + return lockedDoc; } } } \ No newline at end of file diff --git a/extensions/expath/src/org/expath/exist/ZipFileFunctions.java b/extensions/expath/src/org/expath/exist/ZipFileFunctions.java index a57206fb546..a294c0840e6 100644 --- a/extensions/expath/src/org/expath/exist/ZipFileFunctions.java +++ b/extensions/expath/src/org/expath/exist/ZipFileFunctions.java @@ -6,6 +6,7 @@ import org.exist.dom.QName; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.memtree.MemTreeBuilder; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.util.io.FastByteArrayOutputStream; import org.exist.xmldb.XmldbURI; @@ -232,13 +233,12 @@ private Sequence createZip(Element zipFile) { // copied from public interface ZipFileSource { - public ZipInputStream getStream() throws IOException, PermissionDeniedException; - - public void close(); + ZipInputStream getStream() throws IOException, PermissionDeniedException; + void close(); } private class ZipFileFromDb implements ZipFileSource { - private BinaryDocument binaryDoc = null; + private LockedDocument binaryDoc = null; private final XmldbURI uri; public ZipFileFromDb(XmldbURI uri) { @@ -247,33 +247,32 @@ public ZipFileFromDb(XmldbURI uri) { @Override public ZipInputStream getStream() throws IOException, PermissionDeniedException { - if (binaryDoc == null) { binaryDoc = getBinaryDoc(); } - return new ZipInputStream(context.getBroker().getBinaryResource(binaryDoc)); + return new ZipInputStream(context.getBroker().getBinaryResource((BinaryDocument)binaryDoc.getDocument())); } @Override public void close() { if (binaryDoc != null) { - binaryDoc.getUpdateLock().release(LockMode.READ_LOCK); + binaryDoc.close(); } } - private BinaryDocument getBinaryDoc() throws PermissionDeniedException { - final DocumentImpl doc = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK); - if (doc == null) { + private LockedDocument getBinaryDoc() throws PermissionDeniedException { + final LockedDocument lockedDocment = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK); + if (lockedDocment == null) { return null; } - if(doc.getResourceType() != DocumentImpl.BINARY_FILE) { - doc.getUpdateLock().release(LockMode.READ_LOCK); + if(lockedDocment.getDocument().getResourceType() != DocumentImpl.BINARY_FILE) { + lockedDocment.close(); return null; } - return (BinaryDocument) doc; + return lockedDocment; } } diff --git a/extensions/expath/src/org/expath/tools/model/exist/EXistAttribute.java b/extensions/expath/src/org/expath/tools/model/exist/EXistAttribute.java index 8cad1450996..8d89d5c6c55 100644 --- a/extensions/expath/src/org/expath/tools/model/exist/EXistAttribute.java +++ b/extensions/expath/src/org/expath/tools/model/exist/EXistAttribute.java @@ -53,7 +53,7 @@ public String getValue() { @Override public boolean getBoolean() throws ToolsException { - return attribute.getValue().toLowerCase().equals("true"); + return attribute.getValue().equalsIgnoreCase("true"); } @Override diff --git a/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqServiceCompiledXQueryCacheImpl.java b/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqServiceCompiledXQueryCacheImpl.java index 78db3e75fa2..88824a42ab1 100644 --- a/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqServiceCompiledXQueryCacheImpl.java +++ b/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqServiceCompiledXQueryCacheImpl.java @@ -36,6 +36,7 @@ import org.exist.extensions.exquery.restxq.RestXqServiceCompiledXQueryCache; import org.exist.storage.DBBroker; import org.exist.xquery.CompiledXQuery; +import org.exist.xquery.XPathException; import org.exquery.restxq.RestXqService; import org.exquery.restxq.RestXqServiceException; import org.jctools.queues.atomic.MpmcAtomicArrayQueue; @@ -75,11 +76,14 @@ public CompiledXQuery getCompiledQuery(final DBBroker broker, final URI xqueryLo CompiledXQuery xquery = queue.poll(); if(xquery == null) { xquery = XQueryCompiler.compile(broker, xqueryLocation); + } else { + // prepare the context for re-use + try { + xquery.getContext().prepareForReuse(); + } catch (final XPathException e) { + throw new RestXqServiceException("Unable to prepare compiled XQuery for reuse", e); + } } - - //reset the state of the query - xquery.reset(); - xquery.getContext().getWatchDog().reset(); xquery.getContext().prepareForExecution(); return xquery; @@ -140,4 +144,4 @@ public U next() { } }; } -} \ No newline at end of file +} diff --git a/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqStartupTrigger.java b/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqStartupTrigger.java index 00114e66927..ff8ad333cd5 100644 --- a/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqStartupTrigger.java +++ b/extensions/exquery/restxq/src/main/java/org/exist/extensions/exquery/restxq/impl/RestXqStartupTrigger.java @@ -30,6 +30,7 @@ import java.util.Map; import org.exist.storage.DBBroker; import org.exist.storage.StartupTrigger; +import org.exist.storage.txn.Txn; /** * Loads the RESTXQ Registry from disk during database startup @@ -41,7 +42,7 @@ public class RestXqStartupTrigger implements StartupTrigger { @Override - public void execute(final DBBroker broker, final Map> params) { + public void execute(final DBBroker broker, final Txn transaction, final Map> params) { RestXqServiceRegistryManager.getRegistry(broker.getBrokerPool()); } -} \ No newline at end of file +} diff --git a/extensions/fluent/src/org/exist/fluent/Database.java b/extensions/fluent/src/org/exist/fluent/Database.java index 2f4954c43f3..b8b7dac274b 100644 --- a/extensions/fluent/src/org/exist/fluent/Database.java +++ b/extensions/fluent/src/org/exist/fluent/Database.java @@ -15,8 +15,9 @@ import org.exist.dom.persistent.TextImpl; import org.exist.security.*; import org.exist.storage.*; -import org.exist.storage.lock.Lock; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.sync.Sync; import org.exist.storage.txn.TransactionManager; import org.exist.util.*; @@ -29,6 +30,8 @@ import java.text.MessageFormat; import java.util.*; +import static org.exist.util.ThreadUtils.newInstanceThread; + /** *

The global entry point to an embedded instance of the eXist database. * The static methods on this class control the lifecycle of the database connection. It follows that @@ -70,6 +73,7 @@ public static void startup(Path configFile) { BrokerPool.configure(dbName, 1, 5, config); pool = BrokerPool.getInstance(dbName); txManager = pool.getTransactionManager(); + lockManager = pool.getLockManager(); configureRootCollection(configFile); defragmenter.start(); QueryService.statistics().reset(); @@ -104,20 +108,17 @@ static void configureRootCollection(Path configFile) { } // Now force reload and reindex so it'll pick up the new settings. - Transaction tx = db.requireTransactionWithBroker(); try { - pool.getConfigurationManager().addConfiguration(tx.tx, tx.broker, tx.broker.getCollection(XmldbURI.ROOT_COLLECTION_URI), configXml.toString()); - tx.commit(); - DBBroker broker = db.acquireBroker(); - try { - broker.reindexCollection(XmldbURI.ROOT_COLLECTION_URI); - } finally { - db.releaseBroker(broker); + try(final Transaction tx = db.requireTransactionWithBroker()) { + pool.getConfigurationManager().addConfiguration(tx.tx, tx.broker, tx.broker.getCollection(XmldbURI.ROOT_COLLECTION_URI), configXml.toString()); + tx.commit(); + } + try(final Transaction tx = db.requireTransactionWithBroker()) { + tx.broker.reindexCollection(tx.tx, XmldbURI.ROOT_COLLECTION_URI); + tx.commit(); } - } catch (final PermissionDeniedException | IOException | CollectionConfigurationException e) { + } catch (final PermissionDeniedException | IOException | LockException | CollectionConfigurationException e) { throw new DatabaseException(e); - } finally { - tx.abortIfIncomplete(); } } @@ -246,6 +247,7 @@ public static Database current() throws DatabaseException { try { pool = BrokerPool.getInstance(dbName); txManager = pool.getTransactionManager(); + lockManager = pool.getLockManager(); //configureRootCollection(configFile); //defragmenter.start(); //QueryService.statistics().reset(); @@ -283,6 +285,7 @@ static String normalizePath(String path) { public static final String ROOT_PREFIX = XmldbURI.ROOT_COLLECTION; private static volatile BrokerPool pool; private static TransactionManager txManager; + private static LockManager lockManager; private static final ThreadLocal localTransaction = new ThreadLocal(); private static final WeakHashMap instrumentedBrokers = new WeakHashMap(); @@ -383,9 +386,10 @@ public boolean contains(String path) { if (broker.getCollection(XmldbURI.create(path)) != null) return true; String folderPath = path.substring(0, i); String name = path.substring(i+1); - Collection collection = broker.openCollection(XmldbURI.create(folderPath), LockMode.NO_LOCK); - if (collection == null) return false; - return collection.getDocument(broker, XmldbURI.create(name)) != null; + try(final Collection collection = broker.openCollection(XmldbURI.create(folderPath), LockMode.NO_LOCK)) { + if (collection == null) return false; + return collection.getDocument(broker, XmldbURI.create(name)) != null; + } } catch(PermissionDeniedException pde) { throw new DatabaseException(pde.getMessage(), pde); } finally { @@ -485,12 +489,18 @@ public QueryService query(final java.util.Collection context */ static Transaction requireTransaction() { Transaction t = localTransaction.get(); - return t == null ? new Transaction(txManager, null) : new Transaction(t, null); + return t == null ? new Transaction(txManager, lockManager, null) : new Transaction(txManager, t, lockManager, null); } Transaction requireTransactionWithBroker() { Transaction t = localTransaction.get(); - return t == null ? new Transaction(txManager, this) : new Transaction(t, this); + if (t == null) { + try (final DBBroker broker = acquireBroker()) { + return new Transaction(txManager, broker.getCurrentTransaction(), lockManager, this); + } + } else { + return new Transaction(txManager, t, lockManager, this); + } } void checkSame(Resource o) { @@ -553,7 +563,7 @@ private static class Defragmenter implements Runnable { public void start() { if (thread != null) return; - thread = new Thread(this, "Database defragmenter"); + thread = newInstanceThread(pool, "fluent.database-defragmenter", this); thread.setPriority(Thread.NORM_PRIORITY-3); thread.setDaemon(true); thread.start(); @@ -604,22 +614,19 @@ public void run() { it.remove(); } else { // Must hold write lock on doc before checking stale map to avoid race condition - if (doc.getUpdateLock().attempt(LockMode.WRITE_LOCK)) try { + try(final ManagedDocumentLock updateLock = pool.getLockManager().acquireDocumentWriteLock(doc.getURI())) { String docPath = normalizePath(doc.getURI().getCollectionPath()); if (!staleMap.containsKey(docPath)) { LOG.debug("defragmenting " + docPath); count++; - Transaction tx = Database.requireTransaction(); - try { + try(final Transaction tx = Database.requireTransaction()) { broker.defragXMLResource(tx.tx, doc); tx.commit(); it.remove(); - } finally { - tx.abortIfIncomplete(); } } - } finally { - doc.getUpdateLock().release(LockMode.WRITE_LOCK); + } catch(final LockException e) { + // not a problem, we only attempted the lock! } } } diff --git a/extensions/fluent/src/org/exist/fluent/Document.java b/extensions/fluent/src/org/exist/fluent/Document.java index 2d908d51dc2..1aead899e02 100644 --- a/extensions/fluent/src/org/exist/fluent/Document.java +++ b/extensions/fluent/src/org/exist/fluent/Document.java @@ -127,9 +127,9 @@ public void remove(Document.Listener listener) { */ public static class MetadataFacet extends NamedResource.MetadataFacet { private final DocumentMetadata docMetadata; - private MetadataFacet(Permission permissions, DocumentMetadata docMetadata, Database db) { - super(permissions, db); - this.docMetadata = docMetadata; + private MetadataFacet(final DocumentImpl doc, final Database db) { + super(doc, db); + this.docMetadata = doc.getMetadata(); } @Override public Date creationDate() {return new Date(docMetadata.getCreated());} @@ -199,7 +199,7 @@ public ListenersFacet listeners() { } @Override public MetadataFacet metadata() { - if (metadata == null) metadata = new MetadataFacet(doc.getPermissions(), doc.getMetadata(), db); + if (metadata == null) metadata = new MetadataFacet(doc, db); return metadata; } diff --git a/extensions/fluent/src/org/exist/fluent/Folder.java b/extensions/fluent/src/org/exist/fluent/Folder.java index de40bf32715..d49d6c2166a 100644 --- a/extensions/fluent/src/org/exist/fluent/Folder.java +++ b/extensions/fluent/src/org/exist/fluent/Folder.java @@ -15,7 +15,6 @@ import org.exist.collections.triggers.TriggerException; import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; import org.exist.storage.lock.Lock.LockMode; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -312,6 +311,38 @@ public void remove(Document.Listener listener) { } } + public class DocumentsFacetIterator implements Iterator { + private Iterator delegate; + private Document last; + + public DocumentsFacetIterator() { + acquire(LockMode.READ_LOCK); + try { + delegate = handle.iterator(broker); + } catch(PermissionDeniedException | LockException e) { + throw new DatabaseException(e.getMessage(), e); + } finally { + release(); + } + } + + public void remove() { + staleMarker.check(); + if (last == null) throw new IllegalStateException("no document to remove"); + last.delete(); + last = null; + } + public boolean hasNext() { + staleMarker.check(); + return delegate.hasNext(); + } + public Document next() { + staleMarker.check(); + last = Document.newInstance(delegate.next(), Folder.this); + return last; + } + } + private DocumentsFacet() { super(Folder.this.namespaceBindings, Folder.this.db); } @@ -351,7 +382,7 @@ public XMLDocument completed(Node[] nodes) { try { name.setContext(handle); IndexInfo info = handle.validateXMLResource(tx.tx, broker, XmldbURI.create(name.get()), node); - changeLock(LockMode.NO_LOCK); + //changeLock(LockMode.NO_LOCK); handle.store(tx.tx, broker, info, node); commit(); } catch (EXistException e) { @@ -415,7 +446,7 @@ public XMLDocument load(Name name, Source.XML source) { source.applyOldName(name); name.setContext(handle); IndexInfo info = handle.validateXMLResource(tx.tx, broker, XmldbURI.create(name.get()), source.toInputSource()); - changeLock(LockMode.NO_LOCK); + //changeLock(LockMode.NO_LOCK); handle.store(tx.tx, broker, info, source.toInputSource()); commit(); } catch (EXistException e) { @@ -558,8 +589,8 @@ protected void prepareContext(DBBroker broker_) { try { docs = handle.allDocs(broker_, new DefaultDocumentSet(), false); baseUri = new AnyURIValue(handle.getURI()); - }catch (PermissionDeniedException pde) { - throw new DatabaseException(pde.getMessage(), pde); + } catch (final PermissionDeniedException | LockException e) { + throw new DatabaseException(e.getMessage(), e); } finally { release(); } @@ -573,38 +604,10 @@ protected void prepareContext(DBBroker broker_) { * * @return an iterator over the folder's immediate documents */ + @Override public Iterator iterator() { - return new Iterator() { - private Iterator delegate; - private Document last; - { - acquire(LockMode.READ_LOCK); - try { - delegate = handle.iterator(broker); - } catch(PermissionDeniedException | LockException e) { - throw new DatabaseException(e.getMessage(), e); - } finally { - release(); - } - } - public void remove() { - staleMarker.check(); - if (last == null) throw new IllegalStateException("no document to remove"); - last.delete(); - last = null; - } - public boolean hasNext() { - staleMarker.check(); - return delegate.hasNext(); - } - public Document next() { - staleMarker.check(); - last = Document.newInstance(delegate.next(), Folder.this); - return last; - } - }; + return new DocumentsFacetIterator(); } - } /** @@ -691,13 +694,14 @@ public void remove(org.exist.fluent.Listener listener) { broker = db.acquireBroker(); Collection collection; if (createIfMissing) { - tx = Database.requireTransaction(); - try { + + try{ + tx = db.requireTransactionWithBroker(); collection = createInternal(path); tx.commit(); } finally { - tx.abortIfIncomplete(); - } + tx.close(); + } } else { try { collection = broker.getCollection(XmldbURI.create(path)); @@ -736,7 +740,7 @@ public ListenersFacet listeners() { } @Override public MetadataFacet metadata() { - if (metadata == null) metadata = new MetadataFacet(getQuickHandle().getPermissionsNoLock(), db) { + if (metadata == null) metadata = new MetadataFacet(getQuickHandle(), db) { @Override public Date creationDate() { return new Date(getQuickHandle().getCreationTime()); } @@ -795,7 +799,7 @@ private Collection createInternal(String targetPath) { void transact(LockMode _lockMode) { if (tx != null) throw new IllegalStateException("transaction already in progress"); - tx = Database.requireTransaction(); + tx = db.requireTransactionWithBroker(); acquire(_lockMode); } @@ -838,10 +842,16 @@ void acquire(LockMode _lockMode, DBBroker _broker) { } void release() { - if (broker == null || handle == null) throw new IllegalStateException("broker not acquired"); - if (tx != null) tx.abortIfIncomplete(); - if (lockMode != LockMode.NO_LOCK) handle.getLock().release(lockMode); - if (ownBroker) db.releaseBroker(broker); + if (broker == null || handle == null) { + throw new IllegalStateException("broker not acquired"); + } + if (tx != null) { + tx.close(); + } + handle.close(); + if (ownBroker) { + db.releaseBroker(broker); + } ownBroker = false; broker = null; handle = null; @@ -853,14 +863,23 @@ void changeLock(LockMode newLockMode) { if (lockMode == newLockMode) return; if (lockMode == LockMode.NO_LOCK) { try { - handle.getLock().acquire(newLockMode); + switch(newLockMode) { + case READ_LOCK: + broker.getBrokerPool().getLockManager().acquireCollectionReadLock(handle.getURI()); + break; + case WRITE_LOCK: + broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(handle.getURI()); + break; + case NO_LOCK: + break; + } lockMode = newLockMode; } catch (LockException e) { throw new DatabaseException(e); } } else { if (newLockMode != LockMode.NO_LOCK) throw new IllegalStateException("cannot change between read and write lock modes"); - handle.getLock().release(lockMode); + handle.close(); lockMode = newLockMode; } } @@ -1110,9 +1129,9 @@ private Sequence getDocsSequence(boolean recursive) { acquire(LockMode.READ_LOCK); try { docs = handle.allDocs(broker, new DefaultDocumentSet(), recursive); - } catch(PermissionDeniedException pde) { - throw new DatabaseException(pde.getMessage(), pde); - } finally { + } catch (final PermissionDeniedException | LockException e) { + throw new DatabaseException(e.getMessage(), e); + } finally { release(); } Sequence result = new ExtArrayNodeSet(docs.getDocumentCount(), 1); @@ -1144,8 +1163,8 @@ private Sequence getDocsSequence(boolean recursive) { try { docs = handle.allDocs(broker_, new DefaultDocumentSet(), true); baseUri = new AnyURIValue(handle.getURI()); - } catch(PermissionDeniedException pde) { - throw new DatabaseException(pde.getMessage(), pde); + } catch (final PermissionDeniedException | LockException e) { + throw new DatabaseException(e.getMessage(), e); } finally { release(); } @@ -1212,4 +1231,4 @@ DocumentImpl moveOrCopyDocument(DocumentImpl doc, Name name, boolean copy) { } -} \ No newline at end of file +} diff --git a/extensions/fluent/src/org/exist/fluent/Item.java b/extensions/fluent/src/org/exist/fluent/Item.java index 527362a98fc..4e7007373e4 100644 --- a/extensions/fluent/src/org/exist/fluent/Item.java +++ b/extensions/fluent/src/org/exist/fluent/Item.java @@ -52,7 +52,8 @@ public Node node() { Item that = (Item) o; if (this.item == that.item) return true; if (this.item instanceof AtomicValue && that.item instanceof AtomicValue) { - AtomicValue thisValue = (AtomicValue) this.item, thatValue = (AtomicValue) that.item; + AtomicValue thisValue = (AtomicValue) this.item; + AtomicValue thatValue = (AtomicValue) that.item; try { return thisValue.getType() == thatValue.getType() diff --git a/extensions/fluent/src/org/exist/fluent/ItemList.java b/extensions/fluent/src/org/exist/fluent/ItemList.java index df2c0498f0d..7360d84feb6 100644 --- a/extensions/fluent/src/org/exist/fluent/ItemList.java +++ b/extensions/fluent/src/org/exist/fluent/ItemList.java @@ -16,7 +16,12 @@ * @version $Revision: 1.17 $ ($Date: 2006/08/14 23:18:22 $) */ public class ItemList extends Resource implements Iterable { - + + private Sequence seq; + private List items, modifiableItems; + private ValuesFacet values; + private NodesFacet nodes; + /** * A facet that treats each item in the list as its effective string value. Atomic values * are converted to strings, while nodes are converted to the concatenation of all their @@ -194,12 +199,6 @@ public String toString() { } } - - private Sequence seq; - private List items, modifiableItems; - private ValuesFacet values; - private NodesFacet nodes; - private ItemList() { super(null, null); this.seq = Sequence.EMPTY_SEQUENCE; @@ -295,12 +294,9 @@ public Item get(int index) { * it doesn't make sense to try to delete. */ public void deleteAllNodes() { - Transaction tx = Database.requireTransaction(); - try { + try(final Transaction tx = db.requireTransactionWithBroker()) { for (Item item : items) if (item instanceof Node) ((Node) item).delete(); tx.commit(); - } finally { - tx.abortIfIncomplete(); } } diff --git a/extensions/fluent/src/org/exist/fluent/NamedResource.java b/extensions/fluent/src/org/exist/fluent/NamedResource.java index dc86eca4dc5..a7befc2edae 100644 --- a/extensions/fluent/src/org/exist/fluent/NamedResource.java +++ b/extensions/fluent/src/org/exist/fluent/NamedResource.java @@ -5,6 +5,7 @@ import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; +import org.exist.security.PermissionFactory; import org.exist.storage.DBBroker; /** @@ -25,12 +26,12 @@ public static abstract class MetadataFacet { Pattern.compile("(a|(u?g?o?){1,3})((=r?w?u?)|([-+](r?w?u?){1,3}))(,(a|(u?g?o?){1,3})((=r?w?u?)|([-+](r?w?u?){1,3})))*"); private static final Pattern SEGMENT_REGEX = Pattern.compile("([augo]+)([-+=])([rwu]*)"); - private Permission permissions; - private final Database db; + private org.exist.Resource resource; + private final Database db; - protected MetadataFacet(Permission permissions, Database db) { - this.permissions = permissions; - this.db = db; + protected MetadataFacet(final org.exist.Resource resource, final Database db) { + this.resource = resource; + this.db = db; } /** @@ -45,52 +46,40 @@ protected MetadataFacet(Permission permissions, Database db) { * * @return the owner of this resource */ - public String owner() {return permissions.getOwner().getName();} + public String owner() {return resource.getPermissions().getOwner().getName();} /** * Set the owner of this resource for purposes of permission management. * * @param owner the new owner of this resource */ - public void owner(String owner) { - DBBroker broker = null; - try { - broker = db.acquireBroker(); - permissions.setOwner(owner); - } catch(PermissionDeniedException pde) { - throw new DatabaseException(pde.getMessage(), pde); - } finally { - if(broker != null) { - db.releaseBroker(broker); - } - } - } + public void owner(final String owner) { + try (final DBBroker broker = db.acquireBroker()) { + PermissionFactory.chown(broker, resource.getPermissions(), Optional.ofNullable(owner), Optional.empty()); + } catch (final PermissionDeniedException pde) { + throw new DatabaseException(pde.getMessage(), pde); + } + } /** * Return the group who has privileged access to this resource for purposes of permission management. * * @return the owning group of this resource */ - public String group() {return permissions.getGroup().getName();} + public String group() {return resource.getPermissions().getGroup().getName();} /** * Set the group that will have privileged access to this resource for purposes of permission management. * * @param group the new owning group of this resource */ - public void group(String group) { - DBBroker broker = null; - try { - broker = db.acquireBroker(); - permissions.setGroup(group); - } catch(PermissionDeniedException pde) { - throw new DatabaseException(pde.getMessage(), pde); - } finally { - if(broker != null) { - db.releaseBroker(broker); - } - } - } + public void group(final String group) { + try (final DBBroker broker = db.acquireBroker()) { + PermissionFactory.chown(broker, resource.getPermissions(), Optional.empty(), Optional.ofNullable(group)); + } catch (final PermissionDeniedException pde) { + throw new DatabaseException(pde.getMessage(), pde); + } + } /** * Return whether the given subject has the given permission. The "who" character refers to @@ -129,10 +118,10 @@ public boolean hasPermission(final char who, final char what) { default: throw new IllegalArgumentException("illegal permission \"who\" code '" + who + "'"); } - return (permissions.getMode() & mask) == mask; + return (resource.getPermissions().getMode() & mask) == mask; } - private int convertPermissionBit(char what) { + private int convertPermissionBit(final char what) { switch(what) { case Permission.READ_CHAR: return Permission.READ; case Permission.WRITE_CHAR: return Permission.WRITE; @@ -141,7 +130,7 @@ private int convertPermissionBit(char what) { } } - private int convertPermissionBits(String what) { + private int convertPermissionBits(final String what) { int perms = 0; for (int i=0; i it = query().all( + for (final Iterator it = query().all( "for $prefix in in-scope-prefixes($_1) return ($prefix, namespace-uri-for-prefix($prefix, $_1))", this).values().iterator(); it.hasNext(); ) { - String prefix = it.next(), namespace = it.next(); - if (!NamespaceMap.isReservedPrefix(prefix)) namespaceMap.put(prefix, namespace); + final String prefix = it.next(); + final String namespace = it.next(); + if (!NamespaceMap.isReservedPrefix(prefix)) { + namespaceMap.put(prefix, namespace); + } } return namespaceMap; } @@ -141,11 +144,13 @@ public NamespaceMap inScopeNamespaces() { */ public int compareDocumentOrderTo(Node node) { if (this.item == node.item) return 0; - NodeValue nv1 = (NodeValue) this.item, nv2 = (NodeValue) node.item; + NodeValue nv1 = (NodeValue) this.item; + NodeValue nv2 = (NodeValue) node.item; if (nv1.getImplementationType() != nv2.getImplementationType()) throw new DatabaseException("can't compare different node types, since they can never be in the same document"); if (nv1.getImplementationType() == NodeValue.PERSISTENT_NODE) { - NodeProxy n1 = (NodeProxy) item, n2 = (NodeProxy) node.item; + NodeProxy n1 = (NodeProxy) item; + NodeProxy n2 = (NodeProxy) node.item; if (n1.getOwnerDocument().getDocId() != n2.getOwnerDocument().getDocId()) throw new DatabaseException("can't compare document order of nodes in disparate documents: this node is in " + document() + " and the argument node in " + node.document()); if (n1.getNodeId().equals(n2.getNodeId())) return 0; @@ -155,7 +160,8 @@ public int compareDocumentOrderTo(Node node) { throw new DatabaseException("unable to compare nodes", e); } } else if (nv1.getImplementationType() == NodeValue.IN_MEMORY_NODE) { - org.exist.dom.memtree.NodeImpl n1 = (org.exist.dom.memtree.NodeImpl) nv1, n2 = (org.exist.dom.memtree.NodeImpl) nv2; + org.exist.dom.memtree.NodeImpl n1 = (org.exist.dom.memtree.NodeImpl) nv1; + org.exist.dom.memtree.NodeImpl n2 = (org.exist.dom.memtree.NodeImpl) nv2; final org.exist.dom.memtree.DocumentImpl n1Doc = n1.getNodeType() == org.w3c.dom.Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl)n1 : n1.getOwnerDocument(); final org.exist.dom.memtree.DocumentImpl n2Doc = n2.getNodeType() == org.w3c.dom.Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl)n2 : n2.getOwnerDocument(); @@ -199,8 +205,7 @@ public ElementBuilder append() { final StoredNode node = (StoredNode) getDOMNode(); return new ElementBuilder(namespaceBindings, true, new ElementBuilder.CompletedCallback() { public Node completed(org.w3c.dom.Node[] nodes) { - Transaction tx = db.requireTransactionWithBroker(); - try { + try(final Transaction tx = db.requireTransactionWithBroker()) { final DocumentImpl ownerDoc = node.getOwnerDocument(); tx.lockWrite(ownerDoc); DocumentTrigger trigger = fireTriggerBefore(tx); @@ -217,8 +222,6 @@ public Node completed(org.w3c.dom.Node[] nodes) { throw new DatabaseException(e); } catch (TriggerException e) { throw new DatabaseException("append aborted by listener", e); - } finally { - tx.abortIfIncomplete(); } } }); @@ -250,8 +253,8 @@ public void delete() { } else if (parent == null) { throw new DatabaseException("cannot delete node with no parent"); } else { - Transaction tx = db.requireTransactionWithBroker(); - try { + + try(final Transaction tx = db.requireTransactionWithBroker()) { if (parent instanceof NodeHandle) { tx.lockWrite(((NodeHandle) parent).getOwnerDocument()); } @@ -263,8 +266,6 @@ public void delete() { throw new DatabaseException(e); } catch (TriggerException e) { throw new DatabaseException("delete aborted by listener", e); - } finally { - tx.abortIfIncomplete(); } } } @@ -307,8 +308,7 @@ public ElementBuilder replace() { return new ElementBuilder(namespaceBindings, false, new ElementBuilder.CompletedCallback() { public Object completed(org.w3c.dom.Node[] nodes) { assert nodes.length == 1; - Transaction tx = db.requireTransactionWithBroker(); - try { + try(final Transaction tx = db.requireTransactionWithBroker()) { DocumentImpl doc = (DocumentImpl) oldNode.getOwnerDocument(); tx.lockWrite(doc); DocumentTrigger trigger = fireTriggerBefore(tx); @@ -322,8 +322,6 @@ public Object completed(org.w3c.dom.Node[] nodes) { throw new DatabaseException(e); } catch (TriggerException e) { throw new DatabaseException("append aborted by listener", e); - } finally { - tx.abortIfIncomplete(); } } }); @@ -347,8 +345,7 @@ public AttributeBuilder update() { final ElementImpl elem = (ElementImpl) getDOMNode(); return new AttributeBuilder(elem, namespaceBindings, new AttributeBuilder.CompletedCallback() { public void completed(NodeList removeList, NodeList addList) { - Transaction tx = db.requireTransactionWithBroker(); - try { + try(final Transaction tx = db.requireTransactionWithBroker()) { DocumentImpl doc = elem.getOwnerDocument(); tx.lockWrite(doc); DocumentTrigger trigger = fireTriggerBefore(tx); @@ -357,8 +354,6 @@ public void completed(NodeList removeList, NodeList addList) { tx.commit(); } catch (TriggerException e) { throw new DatabaseException("append aborted by listener", e); - } finally { - tx.abortIfIncomplete(); } } }); @@ -377,7 +372,7 @@ private DocumentTrigger fireTriggerBefore(Transaction tx) throws TriggerExceptio DocumentImpl docimpl = ((NodeProxy) item).getOwnerDocument(); Collection col = docimpl.getCollection(); - DocumentTrigger trigger = new DocumentTriggers(tx.broker, null, col, col.getConfiguration(tx.broker)); + DocumentTrigger trigger = new DocumentTriggers(tx.broker, tx.tx, null, col, col.getConfiguration(tx.broker)); trigger.beforeUpdateDocument(tx.broker, tx.tx, docimpl); diff --git a/extensions/fluent/src/org/exist/fluent/QueryService.java b/extensions/fluent/src/org/exist/fluent/QueryService.java index ecf4a9d0e52..03ba191f379 100644 --- a/extensions/fluent/src/org/exist/fluent/QueryService.java +++ b/extensions/fluent/src/org/exist/fluent/QueryService.java @@ -1,5 +1,6 @@ package org.exist.fluent; +import org.exist.collections.ManagedLocks; import org.exist.dom.persistent.MutableDocumentSet; import org.exist.dom.persistent.DocumentSet; import org.exist.dom.persistent.DefaultDocumentSet; @@ -13,6 +14,7 @@ import org.exist.security.PermissionDeniedException; import org.exist.source.*; import org.exist.storage.*; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; import org.exist.xquery.*; import org.exist.xquery.functions.fn.*; @@ -313,8 +315,9 @@ ItemList executeQuery(String query, WrapperFactory wrapperFactory, Object[] para context = new XQueryContext(broker.getBrokerPool()); buildXQueryStaticContext(context, true); } else { - context = compiledQuery.getContext(); // static context already set + context = compiledQuery.getContext(); + context.prepareForReuse(); } buildXQueryDynamicContext(context, params, docsToLock, true); t2 = System.currentTimeMillis(); @@ -322,11 +325,9 @@ ItemList executeQuery(String query, WrapperFactory wrapperFactory, Object[] para compiledQuery = xquery.compile(broker, context, source); t3 = System.currentTimeMillis(); } - docsToLock.lock(broker, false); - try { + try(final ManagedLocks docLocks = docsToLock.lock(broker, false)) { return new ItemList(xquery.execute(broker, wrap(compiledQuery, wrapperFactory, context), base), namespaceBindings.extend(), db); } finally { - docsToLock.unlock(); t4 = System.currentTimeMillis(); } } finally { @@ -551,6 +552,7 @@ public QueryAnalysis analyze(String query, Object... params) { t3 = System.currentTimeMillis(); } else { context = (AnalysisXQueryContext) compiledQuery.getContext(); + context.prepareForReuse(); t2 = System.currentTimeMillis(); } return new QueryAnalysis( diff --git a/extensions/fluent/src/org/exist/fluent/Transaction.java b/extensions/fluent/src/org/exist/fluent/Transaction.java index 08e2d57cf27..799402d94b6 100644 --- a/extensions/fluent/src/org/exist/fluent/Transaction.java +++ b/extensions/fluent/src/org/exist/fluent/Transaction.java @@ -2,8 +2,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; import org.exist.storage.txn.*; import org.exist.util.LockException; @@ -16,24 +15,24 @@ * * @author Piotr Kaminski */ -class Transaction { +class Transaction implements AutoCloseable { private final TransactionManager txManager; + private final LockManager lockManager; final Txn tx; - final DBBroker broker; + DBBroker broker; private final Database db; - private boolean complete; /** * Begin a new transaction. * * @param txManager the manager to use */ - Transaction(TransactionManager txManager, Database db) { + Transaction(TransactionManager txManager, LockManager lockManager, Database db) { this.txManager = txManager; + this.lockManager = lockManager; this.tx = txManager.beginTransaction(); this.db = db; this.broker = db == null ? null : db.acquireBroker(); - complete = false; } /** @@ -41,46 +40,63 @@ class Transaction { * * @param tx the transaction to join */ - Transaction(Transaction tx, Database db) { - this.txManager = null; - this.tx = tx.tx; + Transaction(TransactionManager txManager, Transaction tx, LockManager lockManager, Database db) { + this.txManager = txManager; + this.lockManager = lockManager; + this.tx = tx == null ? txManager.beginTransaction() : tx.tx; + this.db = db; + this.broker = db == null ? null : db.acquireBroker(); + } + + Transaction(TransactionManager txManager, Txn tx, LockManager lockManager, Database db) { + this.txManager = txManager; + this.lockManager = lockManager; + this.tx = tx == null ? txManager.beginTransaction() : tx; this.db = db; this.broker = db == null ? null : db.acquireBroker(); - complete = true; } void commit() { - if (complete) return; + if (tx.getState() == Txn.State.COMMITTED) return; try { - if (tx != null && txManager != null) try { - txManager.commit(tx); - complete = true; - } catch (TransactionException e) { - throw new DatabaseException(e); - } + if (tx != null && txManager != null) { + try { + txManager.commit(tx); + } catch (TransactionException e) { + throw new DatabaseException(e); + } + } } finally { - if (broker != null) db.releaseBroker(broker); + if (broker != null) { + db.releaseBroker(broker); + broker = null; + } } } - - void abortIfIncomplete() { - if (complete) return; - if (tx != null && txManager != null) txManager.abort(tx); - if (broker != null) db.releaseBroker(broker); - complete = true; + + @Override + public void close() { + if (tx != null && txManager != null) { + txManager.close(tx); + } + + if (broker != null) { + db.releaseBroker(broker); + broker = null; + } } - - void lockWrite(DocumentImpl doc) { + + void lockWrite(final DocumentImpl doc) { try { - tx.acquireLock(doc.getUpdateLock(), LockMode.WRITE_LOCK); + tx.acquireDocumentLock(() -> lockManager.acquireDocumentWriteLock(doc.getURI())); } catch (LockException e) { throw new DatabaseException(e); } } - - void lockRead(DocumentImpl doc) { + + void lockRead(final DocumentImpl doc) { try { - tx.acquireLock(doc.getUpdateLock(), LockMode.READ_LOCK); + tx.acquireDocumentLock(() -> lockManager.acquireDocumentReadLock(doc.getURI())); } catch (LockException e) { throw new DatabaseException(e); } diff --git a/extensions/fluent/src/org/exist/fluent/WeakMultiValueHashMap.java b/extensions/fluent/src/org/exist/fluent/WeakMultiValueHashMap.java index 31691b5f8a0..ad7c0816b2e 100644 --- a/extensions/fluent/src/org/exist/fluent/WeakMultiValueHashMap.java +++ b/extensions/fluent/src/org/exist/fluent/WeakMultiValueHashMap.java @@ -3,6 +3,8 @@ import java.lang.ref.WeakReference; import java.util.*; +import static org.exist.util.ThreadUtils.newGlobalThread; + class WeakMultiValueHashMap { /** @@ -10,15 +12,11 @@ class WeakMultiValueHashMap { */ private static final int SWEEP_COUNT = 100; - private final Map>> map = new HashMap>>(); + private final Map>> map = new HashMap<>(); private int putCounter; public synchronized void put(K key, V value) { - Collection> list = map.get(key); - if (list == null) { - list = new LinkedList>(); - map.put(key, list); - } + final Collection> list = map.computeIfAbsent(key, k -> new LinkedList<>()); list.add(new WeakReference(value)); putCounter = (putCounter + 1) % SWEEP_COUNT; if (putCounter == 0) SWEEPER.clean(this); @@ -42,44 +40,53 @@ public synchronized boolean containsKey(K key) { @SuppressWarnings("unchecked") public synchronized Iterable get(final K key) { final Collection> list = map.get(key); - if (list == null) return Database.EMPTY_ITERABLE; - - return new Iterable() { - public java.util.Iterator iterator() { - return new Iterator() { - private final Iterator> it = list.iterator(); - private V nextItem; {advance();} - private void advance() { - synchronized(WeakMultiValueHashMap.this) { - while(nextItem == null && it.hasNext()) { - nextItem = it.next().get(); - if (nextItem == null) it.remove(); - } - if (!it.hasNext() && list.isEmpty()) map.remove(key); - } - } - public boolean hasNext() { - advance(); - return nextItem != null; - } - public V next() { - advance(); - if (nextItem == null) throw new NoSuchElementException(); - V item = nextItem; - nextItem = null; - return item; - } - public void remove() { - throw new UnsupportedOperationException(); - } - }; + if (list == null) { + return Database.EMPTY_ITERABLE; + } + return () -> new WeakMultiValueHashMapIterator(key, list); + } + + private class WeakMultiValueHashMapIterator implements Iterator { + private final K key; + private final Collection> list; + private final Iterator> it; + private V nextItem; + + public WeakMultiValueHashMapIterator(final K key, final Collection> list) { + this.key = key; + this.list = list; + this.it = list.iterator(); + advance(); + } + + private void advance() { + synchronized(WeakMultiValueHashMap.this) { + while(nextItem == null && it.hasNext()) { + nextItem = it.next().get(); + if (nextItem == null) it.remove(); + } + if (!it.hasNext() && list.isEmpty()) map.remove(key); } - }; + } + public boolean hasNext() { + advance(); + return nextItem != null; + } + public V next() { + advance(); + if (nextItem == null) throw new NoSuchElementException(); + V item = nextItem; + nextItem = null; + return item; + } + public void remove() { + throw new UnsupportedOperationException(); + } } private static final Sweeper SWEEPER = new Sweeper(); static { - Thread thread = new Thread(SWEEPER, "WeakMultiValueHashMap sweeper"); + Thread thread = newGlobalThread("fluent.weakMultiValueHashMap.sweeper", SWEEPER); thread.setPriority(Thread.NORM_PRIORITY-3); thread.setDaemon(true); thread.start(); diff --git a/extensions/fluent/test/src/org/exist/fluent/DatabaseTestCase.java b/extensions/fluent/test/src/org/exist/fluent/DatabaseTestCase.java index aae952c35f1..666f9712ca2 100644 --- a/extensions/fluent/test/src/org/exist/fluent/DatabaseTestCase.java +++ b/extensions/fluent/test/src/org/exist/fluent/DatabaseTestCase.java @@ -58,8 +58,7 @@ public abstract class DatabaseTestCase { } private static void wipeDatabase() throws Exception { - Transaction tx = db.requireTransactionWithBroker(); - try { + try(final Transaction tx = db.requireTransactionWithBroker()) { Collection root = tx.broker.getCollection(XmldbURI.ROOT_COLLECTION_URI); for (Iterator it = root.collectionIterator(tx.broker); it.hasNext(); ) { XmldbURI childName = it.next(); @@ -76,8 +75,6 @@ private static void wipeDatabase() throws Exception { } } tx.commit(); - } finally { - tx.abortIfIncomplete(); } } diff --git a/extensions/fluent/test/src/org/exist/fluent/DocumentTest.java b/extensions/fluent/test/src/org/exist/fluent/DocumentTest.java index 5d158fb5096..325aad272cc 100644 --- a/extensions/fluent/test/src/org/exist/fluent/DocumentTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/DocumentTest.java @@ -7,30 +7,31 @@ public class DocumentTest extends DatabaseTestCase { @Test public void nameAndPathFromLoad() { - Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); + final Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); assertEquals("foo", doc.name()); assertEquals("/top/foo", doc.path()); } @Test public void contentsAsStringFromLoad() { - Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); + final Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); assertEquals("helloworld", doc.contentsAsString()); } @Test public void lengthFromLoad1() { - Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); + final Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("helloworld")); assertEquals(10, doc.length()); } @Test public void lengthFromLoad2() { - Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("")); + final Document doc = db.createFolder("/top").documents().load(Name.create(db, "foo"), Source.blob("")); assertEquals(0, doc.length()); } @Test public void copy1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Document original = c1.documents().load(Name.create(db, "original"), Source.blob("helloworld")); - Document copy = original.copy(c2, Name.keepCreate(db)); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Document original = c1.documents().load(Name.create(db, "original"), Source.blob("helloworld")); + final Document copy = original.copy(c2, Name.keepCreate(db)); assertEquals(1, c1.documents().size()); assertEquals(1, c2.documents().size()); assertEquals("helloworld", original.contentsAsString()); @@ -38,9 +39,10 @@ public class DocumentTest extends DatabaseTestCase { } @Test public void copy2() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Document original = c1.documents().load(Name.create(db, "original.xml"), Source.xml("")); - Document copy = original.copy(c2, Name.keepCreate(db)); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Document original = c1.documents().load(Name.create(db, "original.xml"), Source.xml("")); + final Document copy = original.copy(c2, Name.keepCreate(db)); assertEquals(1, c1.documents().size()); assertEquals(1, c2.documents().size()); assertEquals("", original.contentsAsString()); @@ -48,8 +50,9 @@ public class DocumentTest extends DatabaseTestCase { } @Test public void move1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Document doc = c1.documents().load(Name.create(db, "original"), Source.blob("helloworld")); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Document doc = c1.documents().load(Name.create(db, "original"), Source.blob("helloworld")); doc.move(c2, Name.keepCreate(db)); assertEquals(0, c1.documents().size()); assertEquals(1, c2.documents().size()); @@ -58,8 +61,9 @@ public class DocumentTest extends DatabaseTestCase { } @Test public void move2() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Document doc = c1.documents().load(Name.create(db, "original.xml"), Source.xml("")); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Document doc = c1.documents().load(Name.create(db, "original.xml"), Source.xml("")); doc.move(c2, Name.keepCreate(db)); assertEquals(0, c1.documents().size()); assertEquals(1, c2.documents().size()); diff --git a/extensions/fluent/test/src/org/exist/fluent/FolderTest.java b/extensions/fluent/test/src/org/exist/fluent/FolderTest.java index 2fd4e8ab8f9..073e9499bec 100644 --- a/extensions/fluent/test/src/org/exist/fluent/FolderTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/FolderTest.java @@ -144,33 +144,33 @@ public void getChild3() { @Test public void getChild5() { db.createFolder("/top/nested"); - Folder c1 = db.getFolder("/top"); + final Folder c1 = db.getFolder("/top"); c1.namespaceBindings().put("foo", "http://www.ideanest.com/"); - Folder c2 = c1.children().get("nested"); + final Folder c2 = c1.children().get("nested"); assertEquals("http://www.ideanest.com/", c2.namespaceBindings().get("foo")); } @Test public void namespace1() { - Folder c1 = db.getFolder("/"); + final Folder c1 = db.getFolder("/"); c1.namespaceBindings().put("foo", "http://www.ideanest.com/"); assertEquals("http://www.ideanest.com/", c1.namespaceBindings().get("foo")); } @Test public void namespace2() { - Folder c1 = db.getFolder("/"); + final Folder c1 = db.getFolder("/"); c1.namespaceBindings().put("", "http://www.ideanest.com/"); assertEquals("http://www.ideanest.com/", c1.namespaceBindings().get("")); } @Test public void namespace3() { - Folder c1 = db.getFolder("/"); + final Folder c1 = db.getFolder("/"); c1.namespaceBindings().put("foo", "http://www.ideanest.com/"); c1.namespaceBindings().remove("foo"); assertNull(c1.namespaceBindings().get("foo")); } @Test public void namespace4() { - Folder c1 = db.getFolder("/"); + final Folder c1 = db.getFolder("/"); c1.namespaceBindings().put("foo", "http://www.ideanest.com/"); c1.namespaceBindings().put("bar", "urn:blah"); c1.namespaceBindings().remove("foo"); @@ -179,7 +179,7 @@ public void getChild3() { } @Test public void namespace5() { - Folder c1 = db.getFolder("/"); + final Folder c1 = db.getFolder("/"); c1.namespaceBindings().put("foo", "http://www.ideanest.com/"); c1.namespaceBindings().put("bar", "urn:blah"); c1.namespaceBindings().clear(); @@ -188,13 +188,13 @@ public void getChild3() { } @Test public void buildDocument1() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.create(db, "doc1")).elem("test").end("test").commit(); assertEquals(1, c1.documents().size()); } @Test public void buildDocument2() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.create(db, "doc1")).elem("test1").end("test1").commit(); c1.documents().build(Name.overwrite(db, "doc1")).elem("test2").end("test2").commit(); assertEquals(1, c1.documents().size()); @@ -202,26 +202,26 @@ public void getChild3() { @Test(expected = DatabaseException.class) public void buildDocument3() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.create(db, "doc1")).elem("test1").end("test1").commit(); c1.documents().build(Name.create(db, "doc1")).elem("test2").end("test2").commit(); } @Test public void buildDocument4() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); assertEquals(1, c1.documents().size()); } @Test public void buildDocument5() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); assertEquals(2, c1.documents().size()); } @Test public void buildDocument6() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.create(db, "child/doc1")).elem("test").end("test").commit(); assertEquals(0, c1.documents().size()); assertEquals(1, db.getFolder("/top/child").documents().size()); @@ -232,7 +232,7 @@ public void buildDocument3() { } @Test public void size2() { - Folder c1 = db.createFolder("/top/nested"); + final Folder c1 = db.createFolder("/top/nested"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.documents().build(Name.create(db, "doc1")).elem("test").end("test").commit(); @@ -241,21 +241,21 @@ public void buildDocument3() { } @Test public void childrenSize1() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); assertEquals(0, c1.children().size()); } @Test public void childrenSize2() { db.createFolder("/top/nested1"); db.createFolder("/top/nested2"); - Folder c1 = db.getFolder("/top"); + final Folder c1 = db.getFolder("/top"); assertEquals(2, c1.children().size()); } @Test public void childrenSize3() { db.createFolder("/top/nested1"); db.createFolder("/top/nested2").documents().build(Name.generate(db)).elem("test").end("test").commit(); - Folder c1 = db.getFolder("/top"); + final Folder c1 = db.getFolder("/top"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); assertEquals(2, c1.children().size()); } @@ -263,21 +263,21 @@ public void buildDocument3() { @Test public void iterateChildren1() { db.createFolder("/top1"); db.createFolder("/top2"); - Collection children = new ArrayList(); - for (Folder child : db.getFolder("/").children()) { + final Collection children = new ArrayList<>(); + for (final Folder child : db.getFolder("/").children()) { children.add(child); } assertEquals(3, children.size()); } @Test public void clear1() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.clear(); assertEquals(0, c1.documents().size()); } @Test public void clear2() { - Folder c1 = db.createFolder("/top"); + final Folder c1 = db.createFolder("/top"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.clear(); @@ -287,7 +287,7 @@ public void buildDocument3() { @Test public void clear3() { db.createFolder("/top/nested1"); db.createFolder("/top/nested2"); - Folder c1 = db.getFolder("/top"); + final Folder c1 = db.getFolder("/top"); c1.clear(); assertEquals(0, c1.children().size()); } @@ -296,7 +296,7 @@ public void buildDocument3() { db.createFolder("/top/nested1"); db.createFolder("/top/nested1/more"); db.createFolder("/top/nested2"); - Folder c1 = db.getFolder("/top"); + final Folder c1 = db.getFolder("/top"); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.documents().build(Name.generate(db)).elem("test").end("test").commit(); c1.clear(); @@ -305,7 +305,7 @@ public void buildDocument3() { } @Test public void delete1() { - Folder c1 = db.createFolder("/top/nested"); + final Folder c1 = db.createFolder("/top/nested"); db.getFolder("/top/nested"); c1.delete(); try { @@ -318,7 +318,7 @@ public void buildDocument3() { @Test(expected = DatabaseException.class) public void delete2() { db.createFolder("/top/nested/more"); - Folder c1 = db.getFolder("/top/nested"); + final Folder c1 = db.getFolder("/top/nested"); db.getFolder("/top/nested/more"); c1.delete(); db.getFolder("/top/nested/more"); @@ -346,61 +346,62 @@ public void delete2() { } @Test public void getDocument1() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); - Document d = c1.documents().get("original"); + final Document d = c1.documents().get("original"); assertNotNull(d); } @Test public void getDocument2() { - Folder c1 = db.createFolder("/c1"); - Folder c2 = db.createFolder("/c1/c2"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c1/c2"); c2.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); - Document d = c1.documents().get("c2/original"); + final Document d = c1.documents().get("c2/original"); assertNotNull(d); } @Test public void containsDocument1() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); assertTrue(c1.documents().contains("original")); } @Test public void containsDocument2() { - Folder c1 = db.createFolder("/c1"); - Folder c2 = db.createFolder("/c1/c2"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c1/c2"); c2.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); assertTrue(c1.documents().contains("c2/original")); } @Test public void query1() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); c1.query().single("/test"); } @Test public void query2() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.namespaceBindings().put("", "http://example.com"); c1.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); c1.query().single("/test"); } @Test public void queryGetFreshService() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db, "original")).namespace("", "foo").elem("test").end("test").commit(); c1.query().namespace("", "foo").single("/test"); assertFalse(c1.query().exists("/test")); // namespace bindings not propagated from previous query } @Test public void queryBaseUri() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db, "original")).elem("test").end("test").commit(); assertTrue(c1.query().single("doc-available('original')").booleanValue()); } @Test public void convertToSequence() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); c1.documents().build(Name.create(db, "one")).elem("test").end("test").commit(); c1.children().create("sub").documents().build(Name.create(db, "another")) .elem("test").end("test").commit(); assertEquals(0, c2.query().all("/test").size()); @@ -409,7 +410,8 @@ public void delete2() { } @Test public void convertDocumentsToSequence() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); c1.documents().build(Name.create(db, "one")).elem("test").end("test").commit(); c1.children().create("sub").documents().build(Name.create(db, "another")).elem("test").end("test").commit(); assertEquals(0, c2.query().all("/test").size()); @@ -417,33 +419,36 @@ public void delete2() { } @Test public void move1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Folder f = c1.children().create("f"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Folder f = c1.children().create("f"); f.move(c2, Name.keepCreate(db)); assertEquals("/c2/f", f.path()); assertEquals(c2, f.parent()); } @Test public void move2() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Folder f = c1.children().create("f"); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Folder f = c1.children().create("f"); f.move(c2, Name.create(db, "g")); assertEquals("/c2/g", f.path()); assertEquals(c2, f.parent()); } @Test public void move3() { - Folder c1 = db.createFolder("/c1"); - Folder f = c1.children().create("f"); + final Folder c1 = db.createFolder("/c1"); + final Folder f = c1.children().create("f"); f.move(f.parent(), Name.create(db, "g")); assertEquals("/c1/g", f.path()); assertEquals(c1, f.parent()); } @Test public void copy1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - Folder f1 = c1.children().create("f"); - Folder f2 = f1.copy(c2, Name.keepCreate(db)); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final Folder f1 = c1.children().create("f"); + final Folder f2 = f1.copy(c2, Name.keepCreate(db)); assertEquals("/c1/f", f1.path()); assertEquals(c1, f1.parent()); assertEquals("/c2/f", f2.path()); @@ -451,9 +456,9 @@ public void delete2() { } @Test public void copy2() { - Folder c1 = db.createFolder("/c1"); - Folder f1 = c1.children().create("f1"); - Folder f2 = f1.copy(f1.parent(), Name.create(db, "f2")); + final Folder c1 = db.createFolder("/c1"); + final Folder f1 = c1.children().create("f1"); + final Folder f2 = f1.copy(f1.parent(), Name.create(db, "f2")); assertEquals("/c1/f1", f1.path()); assertEquals(c1, f1.parent()); assertEquals("/c1/f2", f2.path()); diff --git a/extensions/fluent/test/src/org/exist/fluent/ItemListTest.java b/extensions/fluent/test/src/org/exist/fluent/ItemListTest.java index fb12e60c53a..48530a59469 100644 --- a/extensions/fluent/test/src/org/exist/fluent/ItemListTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/ItemListTest.java @@ -5,43 +5,49 @@ public class ItemListTest extends DatabaseTestCase { @Test public void equals1() { - ItemList list1 = db.query().all("(1, 2, 3)"), list2 = db.query().all("(1, 2, 3)"); - assertTrue(list1.equals(list2)); + final ItemList list1 = db.query().all("(1, 2, 3)"); + final ItemList list2 = db.query().all("(1, 2, 3)"); + assertEquals(list1, list2); assertEquals(list1.hashCode(), list2.hashCode()); } @Test public void equals2() { - ItemList list1 = db.query().all("(1, 2, 3)"), list2 = db.query().all("(1, 2, 4)"); - assertFalse(list1.equals(list2)); + final ItemList list1 = db.query().all("(1, 2, 3)"); + final ItemList list2 = db.query().all("(1, 2, 4)"); + assertNotEquals(list1, list2); // can't assert anything about their hashcodes } @Test public void equals3() { - ItemList list1 = db.query().all("(1, 2, 3)"), list2 = db.query().all("(1, 2)"); - assertFalse(list1.equals(list2)); + final ItemList list1 = db.query().all("(1, 2, 3)"); + final ItemList list2 = db.query().all("(1, 2)"); + assertNotEquals(list1, list2); // can't assert anything about their hashcodes } @Test public void equals4() { - ItemList list1 = db.query().all("(1, 2)"), list2 = db.query().all("(1, 2, 3)"); - assertFalse(list1.equals(list2)); + final ItemList list1 = db.query().all("(1, 2)"); + final ItemList list2 = db.query().all("(1, 2, 3)"); + assertNotEquals(list1, list2); // can't assert anything about their hashcodes } @Test public void nodesEquals1() { - ItemList.NodesFacet list1 = db.query().all("(1, 2, 3)").nodes(), list2 = db.query().all("(1, 2, 3)").nodes(); - assertTrue(list1.equals(list2)); + final ItemList.NodesFacet list1 = db.query().all("(1, 2, 3)").nodes(); + final ItemList.NodesFacet list2 = db.query().all("(1, 2, 3)").nodes(); + assertEquals(list1, list2); assertEquals(list1.hashCode(), list2.hashCode()); } @Test public void valuesEquals1() { - ItemList.ValuesFacet list1 = db.query().all("(1, 2, 3)").values(), list2 = db.query().all("(1, 2, 3)").values(); - assertTrue(list1.equals(list2)); + final ItemList.ValuesFacet list1 = db.query().all("(1, 2, 3)").values(); + final ItemList.ValuesFacet list2 = db.query().all("(1, 2, 3)").values(); + assertEquals(list1, list2); assertEquals(list1.hashCode(), list2.hashCode()); } @Test public void convertToSequence() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) .elem("a") .elem("b") .elem("c").end("c") @@ -52,22 +58,22 @@ public class ItemListTest extends DatabaseTestCase { .elem("c").end("c") .end("a").commit(); assertEquals(3, doc.query().all("//c").size()); - ItemList res = doc.query().all("//(b|d)"); - assertEquals(2, doc.query().all("$_1//c", new Object[] { res }).size()); + final ItemList res = doc.query().all("//(b|d)"); + assertEquals(2, doc.query().all("$_1//c", res).size()); } @Test(expected=DatabaseException.class) public void stale1() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( + final XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( "")); - ItemList list = doc.query().all("/foo/*"); + final ItemList list = doc.query().all("/foo/*"); doc.query().all("//bar1").deleteAllNodes(); doc.query().all("$_1", list); } @Test public void stale2() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( + final XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( "")); - ItemList list = doc.query().all("/foo/*"); + final ItemList list = doc.query().all("/foo/*"); doc.query().all("//bar1").deleteAllNodes(); list.removeDeletedNodes(); assertEquals(1, list.size()); @@ -75,14 +81,14 @@ public class ItemListTest extends DatabaseTestCase { } @Test public void deleteAllNodes1() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( + final XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( "")); doc.query().all("//bar").deleteAllNodes(); assertEquals("", doc.contentsAsString()); } @Test public void deleteAllNodes2() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( + final XMLDocument doc = db.createFolder("/top").documents().load(Name.generate(db), Source.xml( "")); doc.query().all("//bar").deleteAllNodes(); assertEquals(0, db.getFolder("/top").documents().size()); diff --git a/extensions/fluent/test/src/org/exist/fluent/ItemTest.java b/extensions/fluent/test/src/org/exist/fluent/ItemTest.java index 443cd18c2fb..4152c9761a5 100644 --- a/extensions/fluent/test/src/org/exist/fluent/ItemTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/ItemTest.java @@ -7,37 +7,40 @@ public class ItemTest extends DatabaseTestCase { @Test public void equals1() { - Item item1 = db.query().single("3"), item2 = db.query().single("3"); + final Item item1 = db.query().single("3"); + final Item item2 = db.query().single("3"); assertTrue(item1.equals(item2)); assertEquals(item1.hashCode(), item2.hashCode()); } @Test public void equals2() { - Item item1 = db.query().single("2"), item2 = db.query().single("3"); + final Item item1 = db.query().single("2"); + final Item item2 = db.query().single("3"); assertFalse(item1.equals(item2)); // can't assert anything about their hashcodes } @Test public void equals3() { - Item item1 = db.query().single("2"), item2 = db.query().single("'foo'"); + final Item item1 = db.query().single("2"); + final Item item2 = db.query().single("'foo'"); assertFalse(item1.equals(item2)); // can't assert anything about their hashcodes } @Test public void equals4() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) .elem("root") .elem("text1").text("foo").end("text1") .elem("text2").text("foo").end("text2") .end("root").commit(); - Item item1 = doc.query().single("xs:string(//text1/text())"); - Item item2 = doc.query().single("xs:string(//text2/text())"); + final Item item1 = doc.query().single("xs:string(//text1/text())"); + final Item item2 = doc.query().single("xs:string(//text2/text())"); assertTrue(item1.equals(item2)); assertEquals(item1.hashCode(), item2.hashCode()); } @Test public void convertToSequence() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db, "test")) .elem("a") .elem("b") .elem("c").end("c") @@ -48,19 +51,20 @@ public class ItemTest extends DatabaseTestCase { .elem("c").end("c") .end("a").commit(); assertEquals(3, doc.query().all("//c").size()); - Item res = doc.query().single("//b"); + final Item res = doc.query().single("//b"); assertEquals(1, doc.query().all("$_1//c", res).size()); } @Test public void toItemList() { - Item item = db.query().single("3"); - ItemList list = item.toItemList(); + final Item item = db.query().single("3"); + final ItemList list = item.toItemList(); assertEquals(1, list.size()); assertEquals(item, list.get(0)); } @Test public void comparableValue() { - Item item1 = db.query().single("3"), item2 = db.query().single("4"); + final Item item1 = db.query().single("3"); + final Item item2 = db.query().single("4"); assertTrue(item1.comparableValue().compareTo(item2.comparableValue()) < 0); } diff --git a/extensions/fluent/test/src/org/exist/fluent/NodeTest.java b/extensions/fluent/test/src/org/exist/fluent/NodeTest.java index 075f7898e3b..a9d2f90cd64 100644 --- a/extensions/fluent/test/src/org/exist/fluent/NodeTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/NodeTest.java @@ -43,7 +43,8 @@ public void comparableValue() { public void equals1() { XMLDocument doc = db.createFolder("/test").documents().build(Name.create(db,"foo")) .elem("top").elem("child").end("child").end("top").commit(); - Object o1 = doc.query().single("//child"), o2 = doc.query().single("//child"); + final Object o1 = doc.query().single("//child"); + final Object o2 = doc.query().single("//child"); assertTrue(o1.equals(o2)); assertEquals(o1.hashCode(), o2.hashCode()); } @@ -52,7 +53,8 @@ public void equals1() { public void equals2() { XMLDocument doc = db.createFolder("/test").documents().build(Name.create(db,"foo")) .elem("top").elem("child").end("child").end("top").commit(); - Object o1 = doc.query().single("//child"), o2 = doc.query().single("//top"); + final Object o1 = doc.query().single("//child"); + final Object o2 = doc.query().single("//top"); assertFalse(o1.equals(o2)); // can't assert unequal hashCodes, they're allowed to be the same } @@ -64,17 +66,24 @@ public void equals3() { .elem("top").elem("child").end("child").end("top").commit(); XMLDocument doc2 = folder.documents().build(Name.create(db,"foo2")) .elem("top").elem("child").end("child").end("top").commit(); - Object o1 = doc1.query().single("//top"), o2 = doc2.query().single("//top"); + final Object o1 = doc1.query().single("//top"); + final Object o2 = doc2.query().single("//top"); assertFalse(o1.equals(o2)); } @Test public void compareDocumentOrderTo1() { - Node root = db.getFolder("/").documents().load(Name.generate(db), Source.xml( + final Node root = db.getFolder("/").documents().load(Name.generate(db), Source.xml( "")).root(); - Node a = root.query().single("//a").node(), aa = root.query().single("//aa").node(); - Node b = root.query().single("//b").node(), bb = root.query().single("//bb").node(); - Node c = root.query().single("//c").node(), cc = root.query().single("//cc").node(); + final Node a = root.query().single("//a").node(); + final Node aa = root.query().single("//aa").node(); + + final Node b = root.query().single("//b").node(); + final Node bb = root.query().single("//bb").node(); + + final Node c = root.query().single("//c").node(); + final Node cc = root.query().single("//cc").node(); + assertEquals(0, a.compareDocumentOrderTo(a)); assertEquals(0, a.compareDocumentOrderTo(root.query().single("//a").node())); assertThat(a.compareDocumentOrderTo(b), lessThan(0)); @@ -86,11 +95,18 @@ public void compareDocumentOrderTo1() { @Test public void compareDocumentOrderTo2() { - ItemList nodes = db.query().all("let $x := return ($x//a, $x//aa, $x//b, $x//bb, $x//c, $x//cc, $x)"); - Node root = nodes.get(6).node(); - Node a = nodes.get(0).node(), aa = nodes.get(1).node(); - Node b = nodes.get(2).node(), bb = nodes.get(3).node(); - Node c = nodes.get(4).node(), cc = nodes.get(5).node(); + final ItemList nodes = db.query().all("let $x := return ($x//a, $x//aa, $x//b, $x//bb, $x//c, $x//cc, $x)"); + final Node root = nodes.get(6).node(); + + final Node a = nodes.get(0).node(); + final Node aa = nodes.get(1).node(); + + final Node b = nodes.get(2).node(); + final Node bb = nodes.get(3).node(); + + final Node c = nodes.get(4).node(); + final Node cc = nodes.get(5).node(); + assertEquals(0, a.compareDocumentOrderTo(a)); assertThat(a.compareDocumentOrderTo(b), lessThan(0)); assertThat(c.compareDocumentOrderTo(b), greaterThan(0)); diff --git a/extensions/fluent/test/src/org/exist/fluent/XMLDocumentTest.java b/extensions/fluent/test/src/org/exist/fluent/XMLDocumentTest.java index 4d857234200..3bc9ada90d7 100644 --- a/extensions/fluent/test/src/org/exist/fluent/XMLDocumentTest.java +++ b/extensions/fluent/test/src/org/exist/fluent/XMLDocumentTest.java @@ -11,36 +11,37 @@ public class XMLDocumentTest extends DatabaseTestCase { @Test public void query1() { - Folder c1 = db.createFolder("/c1"); - XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final Folder c1 = db.createFolder("/c1"); + final XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); doc.query().single("/test"); } @Test public void query2() { - Folder c1 = db.createFolder("/c1"); + final Folder c1 = db.createFolder("/c1"); c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); - XMLDocument doc = c1.documents().get("original").xml(); + final XMLDocument doc = c1.documents().get("original").xml(); doc.query().single("/test"); } @Test public void query3() { - Folder c1 = db.createFolder("/c1"); - XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final Folder c1 = db.createFolder("/c1"); + final XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); assertEquals(1, doc.query().all("/test").size()); } @Test public void query4() { - Folder c1 = db.createFolder("/c1"); - XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final Folder c1 = db.createFolder("/c1"); + final XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); c1.documents().build(Name.create(db,"another")).elem("test").end("test").commit(); doc.query().single("/test"); assertEquals(2, c1.query().all("/test").size()); } @Test public void copy1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - XMLDocument original = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); - XMLDocument copy = original.copy(c2, Name.keepCreate(db)); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final XMLDocument original = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final XMLDocument copy = original.copy(c2, Name.keepCreate(db)); assertEquals(1, c1.documents().size()); c1.query().single("/test"); assertEquals(1, c2.documents().size()); @@ -49,8 +50,9 @@ public class XMLDocumentTest extends DatabaseTestCase { } @Test public void move1() { - Folder c1 = db.createFolder("/c1"), c2 = db.createFolder("/c2"); - XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final Folder c1 = db.createFolder("/c1"); + final Folder c2 = db.createFolder("/c2"); + final XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); doc.move(c2, Name.keepCreate(db)); assertEquals(0, c1.documents().size()); assertFalse(c1.query().exists("/test")); @@ -61,63 +63,63 @@ public class XMLDocumentTest extends DatabaseTestCase { } @Test public void delete1() { - Folder c1 = db.createFolder("/c1"); - XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); + final Folder c1 = db.createFolder("/c1"); + final XMLDocument doc = c1.documents().build(Name.create(db,"original")).elem("test").end("test").commit(); doc.delete(); assertEquals(0, c1.documents().size()); } @Test public void delete2() { - Folder c1 = db.createFolder("/c1"); - XMLDocument doc1 = c1.documents().build(Name.create(db,"doc1")).elem("test").attr("xml:id", "a").end("test").commit(); - XMLDocument doc2 = c1.documents().build(Name.create(db,"doc2")).elem("test2").attr("xml:id", "b").end("test2").commit(); + final Folder c1 = db.createFolder("/c1"); + final XMLDocument doc1 = c1.documents().build(Name.create(db,"doc1")).elem("test").attr("xml:id", "a").end("test").commit(); + final XMLDocument doc2 = c1.documents().build(Name.create(db,"doc2")).elem("test2").attr("xml:id", "b").end("test2").commit(); doc1.delete(); doc2.delete(); assertEquals(0, c1.documents().size()); } @Test public void convertToSequence() { - Folder c = db.createFolder("/top"); + final Folder c = db.createFolder("/top"); c.documents().build(Name.create(db,"one")).elem("test").end("test").commit(); - XMLDocument doc = c.documents().build(Name.create(db,"two")).elem("test").end("test").commit(); + final XMLDocument doc = c.documents().build(Name.create(db,"two")).elem("test").end("test").commit(); assertEquals(2, c.query().all("/test").size()); assertEquals(1, c.query().all("$_1/test", new Object[] { doc }).size()); } @Test public void nameAndPathFromCreate() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); assertEquals("foo", doc.name()); assertEquals("/top/foo", doc.path()); } @Test public void nameAndPathFromLoad() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); + final XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); assertEquals("foo", doc.name()); assertEquals("/top/foo", doc.path()); } @Test public void contentsAsStringFromCreate() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); assertEquals("", doc.contentsAsString()); } @Test public void contentsAsStringFromLoad() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); + final XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); assertEquals("", doc.contentsAsString()); } @Test public void lengthFromCreate() { - XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); + final XMLDocument doc = db.createFolder("/top").documents().build(Name.create(db,"foo")).elem("root").end("root").commit(); assertThat(doc.length(), Matchers.greaterThan(0L)); } @Test public void lengthFromLoad() { - XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); + final XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); assertThat(doc.length(), Matchers.greaterThan(0L)); } @Test public void writeToOutputStream() throws IOException { - XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); + final XMLDocument doc = db.createFolder("/top").documents().load(Name.create(db,"foo"), Source.xml("")); try (final FastByteArrayOutputStream out = new FastByteArrayOutputStream()) { doc.write(out); out.close(); diff --git a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndex.java b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndex.java index 6f74589e038..a659ad931aa 100644 --- a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndex.java +++ b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndex.java @@ -68,10 +68,6 @@ public class LuceneIndex extends AbstractIndex implements RawBackupSupport { protected SearcherManager searcherManager = null; protected ReaderManager readerManager = null; - public LuceneIndex() { - //Nothing special to do - } - public String getDirName() { return DIR_NAME; } diff --git a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndexWorker.java b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndexWorker.java index c51edbda9f0..f4a339f7027 100644 --- a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndexWorker.java +++ b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndexWorker.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.*; import org.apache.lucene.util.*; import org.exist.collections.Collection; +import org.exist.dom.persistent.*; import org.exist.indexing.*; import org.exist.indexing.StreamListener.ReindexMode; import org.exist.indexing.lucene.PlainTextHighlighter.Offset; @@ -43,16 +44,6 @@ import org.exist.dom.QName; import org.exist.dom.memtree.MemTreeBuilder; import org.exist.dom.memtree.NodeImpl; -import org.exist.dom.persistent.Match; -import org.exist.dom.persistent.ElementImpl; -import org.exist.dom.persistent.IStoredNode; -import org.exist.dom.persistent.NodeProxy; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.NewArrayNodeSet; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.AbstractCharacterData; -import org.exist.dom.persistent.NodeSet; -import org.exist.dom.persistent.AttrImpl; import org.exist.numbering.NodeId; import org.exist.security.PermissionDeniedException; import org.exist.storage.*; @@ -117,7 +108,7 @@ public class LuceneIndexWorker implements OrderedValuesIndex, QNamedKeysIndex { private ReindexMode mode = ReindexMode.STORE; private LuceneConfig config; - private Stack contentStack = null; + private Deque contentStack = null; private Set nodesToRemove = null; private List nodesToWrite = null; private Document pendingDoc = null; @@ -133,6 +124,8 @@ public class LuceneIndexWorker implements OrderedValuesIndex, QNamedKeysIndex { private boolean isReindexing; + private final StreamListener listener = new LuceneStreamListener(); + public LuceneIndexWorker(LuceneIndex parent, DBBroker broker) { this.index = parent; this.broker = broker; @@ -288,8 +281,6 @@ public IStoredNode getReindexRoot(IStoredNode node, N return null; } - private StreamListener listener = new LuceneStreamListener(); - @Override public StreamListener getListener() { return listener; @@ -728,11 +719,10 @@ public void collect(int docNum) throws IOException { // document is in a collection if (isDocumentMatch(fDocUri, toBeMatchedURIs)) { - DocumentImpl storedDoc = null; - try { + try(final LockedDocument lockedStoredDoc = context.getBroker().getXMLResource(XmldbURI.createInternal(fDocUri), LockMode.READ_LOCK)) { // try to read document to check if user is allowed to access it - storedDoc = context.getBroker().getXMLResource(XmldbURI.createInternal(fDocUri), LockMode.READ_LOCK); - if (storedDoc == null) { + + if (lockedStoredDoc == null) { return; } @@ -764,10 +754,6 @@ public void collect(int docNum) throws IOException { attribs.clear(); } catch (PermissionDeniedException e) { // not allowed to read the document: ignore the match. - } finally { - if (storedDoc != null) { - storedDoc.getUpdateLock().release(LockMode.READ_LOCK); - } } } } @@ -1059,7 +1045,8 @@ public Occurrences[] scanIndex(XQueryContext context, DocumentSet docs, NodeSet List qnames = hints == null ? null : (List)hints.get(QNAMES_KEY); qnames = getDefinedIndexes(qnames); //Expects a StringValue - String start = null, end = null; + String start = null; + String end = null; long max = Long.MAX_VALUE; if (hints != null) { Object vstart = hints.get(START_VALUE); @@ -1190,15 +1177,18 @@ private void addPending(PendingDoc pending) { } private static class PendingDoc { - NodeId nodeId; - CharSequence text; - QName qname; - LuceneIndexConfig idxConf; - float boost; + private final NodeId nodeId; + private final QName qname; + private final NodePath path; + private final CharSequence text; + private final float boost; + private final LuceneIndexConfig idxConf; - private PendingDoc(NodeId nodeId, QName qname, NodePath path, CharSequence text, float boost, LuceneIndexConfig idxConf) { + private PendingDoc(final NodeId nodeId, final QName qname, final NodePath path, final CharSequence text, + final float boost, final LuceneIndexConfig idxConf) { this.nodeId = nodeId; this.qname = qname; + this.path = path; this.text = text; this.idxConf = idxConf; this.boost = boost; @@ -1206,11 +1196,11 @@ private PendingDoc(NodeId nodeId, QName qname, NodePath path, CharSequence text, } private static class PendingAttr { - AttrImpl attr; - LuceneIndexConfig conf; - NodePath path; + private final AttrImpl attr; + private final LuceneIndexConfig conf; + private final NodePath path; - public PendingAttr(AttrImpl attr, NodePath path, LuceneIndexConfig conf) { + public PendingAttr(final AttrImpl attr, final NodePath path, final LuceneIndexConfig conf) { this.attr = attr; this.conf = conf; this.path = path; @@ -1346,17 +1336,17 @@ public void startElement(Txn transaction, ElementImpl element, NodePath path) { currentElement = element; if (mode == ReindexMode.STORE && config != null) { - if (contentStack != null && !contentStack.isEmpty()) { - for (TextExtractor extractor : contentStack) { + if (contentStack != null) { + for (final TextExtractor extractor : contentStack) { extractor.startElement(element.getQName()); } } - Iterator configIter = config.getConfig(path); + Iterator configIter = config.getConfig(path); if (configIter != null) { if (contentStack == null) { - contentStack = new Stack<>(); - } + contentStack = new ArrayDeque<>(); + } while (configIter.hasNext()) { LuceneIndexConfig configuration = configIter.next(); if (configuration.match(path)) { @@ -1373,8 +1363,8 @@ public void startElement(Txn transaction, ElementImpl element, NodePath path) { @Override public void endElement(Txn transaction, ElementImpl element, NodePath path) { if (config != null) { - if (mode == ReindexMode.STORE && contentStack != null && !contentStack.isEmpty()) { - for (TextExtractor extractor : contentStack) { + if (mode == ReindexMode.STORE && contentStack != null) { + for (final TextExtractor extractor : contentStack) { extractor.endElement(element.getQName()); } } @@ -1460,8 +1450,8 @@ public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { @Override public void characters(Txn transaction, AbstractCharacterData text, NodePath path) { - if (contentStack != null && !contentStack.isEmpty()) { - for (TextExtractor extractor : contentStack) { + if (contentStack != null) { + for (final TextExtractor extractor : contentStack) { extractor.beforeCharacters(); extractor.characters(text.getXMLString()); } diff --git a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneMatchListener.java b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneMatchListener.java index 0b65c6b84da..29aefebc9ac 100644 --- a/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneMatchListener.java +++ b/extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneMatchListener.java @@ -182,6 +182,7 @@ private void scanMatches(final NodeProxy p) { while (reader.hasNext()) { final int ev = reader.next(); switch (ev) { + case XMLStreamConstants.END_ELEMENT: if (--level < 0) { break; diff --git a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/GetField.java b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/GetField.java index f65fa9f2637..d5c1e1d6eb8 100644 --- a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/GetField.java +++ b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/GetField.java @@ -23,6 +23,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.indexing.lucene.LuceneIndex; import org.exist.indexing.lucene.LuceneIndexWorker; import org.exist.security.PermissionDeniedException; @@ -64,24 +65,19 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { XmldbURI uri = XmldbURI.createInternal(args[0].getStringValue()); String field = args[1].getStringValue(); - - DocumentImpl doc = null; - try { - doc = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK); - if (doc == null) { + + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK)) { + if (lockedDoc == null) { return Sequence.EMPTY_SEQUENCE; } // Get the lucene worker - LuceneIndexWorker index = (LuceneIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(LuceneIndex.ID); - String content = index.getFieldContent(doc.getDocId(), field); + final LuceneIndexWorker index = (LuceneIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(LuceneIndex.ID); + final String content = index.getFieldContent(lockedDoc.getDocument().getDocId(), field); return content == null ? Sequence.EMPTY_SEQUENCE : new org.exist.xquery.value.StringValue(content); } catch (PermissionDeniedException e) { throw new XPathException(this, LuceneModule.EXXQDYFT0001, "Permission denied to read document " + args[0].getStringValue()); } catch (IOException e) { throw new XPathException(this, LuceneModule.EXXQDYFT0002, "IO error while reading document " + args[0].getStringValue()); - } finally { - if (doc != null) - doc.getUpdateLock().release(LockMode.READ_LOCK); } } diff --git a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Index.java b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Index.java index 59643ec38af..c5544e90ad0 100644 --- a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Index.java +++ b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Index.java @@ -25,6 +25,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.indexing.StreamListener.ReindexMode; import org.exist.indexing.lucene.LuceneIndex; import org.exist.indexing.lucene.LuceneIndexWorker; @@ -98,7 +99,7 @@ public Index(XQueryContext context, FunctionSignature signature) { @Override public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { - DocumentImpl doc = null; + try { // Retrieve Lucene LuceneIndexWorker index = (LuceneIndexWorker) context.getBroker() @@ -109,29 +110,30 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce String path = args[0].itemAt(0).getStringValue(); // Retrieve document from database - doc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK); - - // Verify the document actually exists - if (doc == null) { - throw new XPathException("Document " + path + " does not exist."); - } - - boolean flush = args.length == 2 || args[2].effectiveBooleanValue(); - - // Note: code order is important here, - index.setDocument(doc, ReindexMode.STORE); - index.setMode(ReindexMode.STORE); - - // Get 'solr' node from second parameter - NodeValue descriptor = (NodeValue) args[1].itemAt(0); - - // Pas document and index instructions to indexer - index.indexNonXML(descriptor); - - if (flush) { - // Make sure things are written - index.writeNonXML(); - } + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK)) { + // Verify the document actually exists + final DocumentImpl doc = lockedDoc == null ? null : lockedDoc.getDocument(); + if (doc == null) { + throw new XPathException(this, "Document " + path + " does not exist."); + } + + boolean flush = args.length == 2 || args[2].effectiveBooleanValue(); + + // Note: code order is important here, + index.setDocument(doc, ReindexMode.STORE); + index.setMode(ReindexMode.STORE); + + // Get 'solr' node from second parameter + NodeValue descriptor = (NodeValue) args[1].itemAt(0); + + // Pas document and index instructions to indexer + index.indexNonXML(descriptor); + + if (flush) { + // Make sure things are written + index.writeNonXML(); + } + } } else { // "close" index.writeNonXML(); @@ -139,12 +141,7 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce } catch (Exception ex) { // PermissionDeniedException logger.error(ex.getMessage(), ex); - throw new XPathException(ex); - - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } + throw new XPathException(this, ex); } // Return nothing [status would be nice] diff --git a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/InspectIndex.java b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/InspectIndex.java index 290b818f5b3..8bce2e7ccb3 100644 --- a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/InspectIndex.java +++ b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/InspectIndex.java @@ -24,6 +24,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.indexing.lucene.LuceneIndex; import org.exist.indexing.lucene.LuceneIndexWorker; import org.exist.security.PermissionDeniedException; @@ -66,29 +67,23 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { String path = args[0].itemAt(0).getStringValue(); - DocumentImpl doc = null; - try { + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK)) { // Retrieve document from database - doc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK); // Verify the document actually exists - if (doc == null) { + if (lockedDoc == null) { throw new XPathException(this, "Document " + path + " does not exist."); } final LuceneIndexWorker index = (LuceneIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(LuceneIndex.ID); - return new BooleanValue(index.hasIndex(doc.getDocId())); + return new BooleanValue(index.hasIndex(lockedDoc.getDocument().getDocId())); } catch (PermissionDeniedException e) { throw new XPathException(this, LuceneModule.EXXQDYFT0001, e.getMessage()); } catch (URISyntaxException e) { throw new XPathException(this, LuceneModule.EXXQDYFT0003, e.getMessage()); } catch (IOException e) { throw new XPathException(this, LuceneModule.EXXQDYFT0002, e.getMessage()); - } finally { - if(doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } } } diff --git a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Query.java b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Query.java index 73853a3c634..f1b96d08b88 100644 --- a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Query.java +++ b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/Query.java @@ -35,6 +35,8 @@ import org.exist.dom.persistent.VirtualNodeSet; import org.exist.indexing.lucene.LuceneIndex; import org.exist.indexing.lucene.LuceneIndexWorker; +import org.exist.numbering.NodeId; +import org.exist.stax.ExtendedXMLStreamReader; import org.exist.storage.ElementValue; import org.exist.xquery.*; import org.exist.xquery.value.FunctionParameterSequenceType; @@ -316,13 +318,21 @@ protected Properties parseOptions(Sequence contextSequence, Item contextItem) th Sequence optSeq = getArgument(2).eval(contextSequence, contextItem); NodeValue optRoot = (NodeValue) optSeq.itemAt(0); try { - XMLStreamReader reader = context.getXMLStreamReader(optRoot); + final int thisLevel = optRoot.getNodeId().getTreeLevel(); + final XMLStreamReader reader = context.getXMLStreamReader(optRoot); + reader.next(); reader.next(); - reader.next(); while (reader.hasNext()) { int status = reader.next(); if (status == XMLStreamReader.START_ELEMENT) { options.put(reader.getLocalName(), reader.getElementText()); + } else if (status == XMLStreamReader.END_ELEMENT) { + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + if (otherLevel == thisLevel) { + // finished `optRoot` element... + break; // exit-while + } } } return options; diff --git a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/RemoveIndex.java b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/RemoveIndex.java index b915a4dbae1..d88bd03fbf4 100644 --- a/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/RemoveIndex.java +++ b/extensions/indexes/lucene/src/org/exist/xquery/modules/lucene/RemoveIndex.java @@ -21,6 +21,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.indexing.StreamListener.ReindexMode; import org.exist.indexing.lucene.LuceneIndex; import org.exist.indexing.lucene.LuceneIndexWorker; @@ -60,16 +61,13 @@ public RemoveIndex(XQueryContext context) { @Override public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { - DocumentImpl doc = null; - try { - // Get first parameter, this is the document - String path = args[0].itemAt(0).getStringValue(); - - // Retrieve document from database - doc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK); + // Get first parameter, this is the document + final String path = args[0].itemAt(0).getStringValue(); + // Retrieve document from database + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(XmldbURI.xmldbUriFor(path), LockMode.READ_LOCK);) { // Verify the document actually exists - if (doc == null) { + if (lockedDoc == null) { throw new XPathException("Document " + path + " does not exist."); } @@ -78,17 +76,12 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) .getIndexController().getWorkerByIndexId(LuceneIndex.ID); // Note: code order is important here, - index.setDocument(doc, ReindexMode.REMOVE_BINARY); + index.setDocument(lockedDoc.getDocument(), ReindexMode.REMOVE_BINARY); index.flush(); } catch (Exception ex) { // PermissionDeniedException throw new XPathException(ex); - - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } } // Return nothing [status would be nice] diff --git a/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/LuceneIndexTest.java b/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/LuceneIndexTest.java index 04fa1437681..4a7a657e1e4 100644 --- a/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/LuceneIndexTest.java +++ b/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/LuceneIndexTest.java @@ -75,7 +75,7 @@ public class LuceneIndexTest { protected static String XUPDATE_END = ""; - + private static final String XML1 = "
" + " The title in big letters" + @@ -389,7 +389,7 @@ public void inlineAndIgnore() throws EXistException, CollectionConfigurationExce seq = xquery.execute(broker, "/article[ft:query(., 'note')]", null); assertNotNull(seq); assertEquals(0, seq.getItemCount()); - + seq = xquery.execute(broker, "/article[ft:query(., 'ignore')]", null); assertNotNull(seq); assertEquals(0, seq.getItemCount()); @@ -494,7 +494,7 @@ public void queryTranslation() throws EXistException, CollectionConfigurationExc final XQueryContext context = new XQueryContext(broker.getBrokerPool()); final CompiledXQuery compiled = xquery.compile(broker, context, "declare variable $q external; " + - "ft:query(//p, util:parse($q)/query)"); + "ft:query(//p, parse-xml($q)/query)"); context.declareVariable("q", "heiterkeit"); Sequence seq = xquery.execute(broker, compiled, null); @@ -733,11 +733,11 @@ public void removeCollection() throws EXistException, CollectionConfigurationExc root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI); assertNotNull(root); broker.saveCollection(transaction, root); - + transact.commit(transaction); root = null; - + checkIndex(docs, broker, null, null, 0); } } @@ -746,9 +746,11 @@ public void removeCollection() throws EXistException, CollectionConfigurationExc public void reindex() throws EXistException, CollectionConfigurationException, PermissionDeniedException, SAXException, LockException, IOException, QName.IllegalQNameException { final DocumentSet docs = configureAndStore(COLLECTION_CONFIG1, XML1, "dropDocument.xml"); final BrokerPool pool = existEmbeddedServer.getBrokerPool(); - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { + final TransactionManager transact = pool.getTransactionManager(); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Txn transaction = transact.beginTransaction()) { - broker.reindexCollection(TestConstants.TEST_COLLECTION_URI); + broker.reindexCollection(transaction, TestConstants.TEST_COLLECTION_URI); checkIndex(docs, broker, new QName[] { new QName("head") }, "title", 1); final Occurrences[] o = checkIndex(docs, broker, new QName[]{new QName("p")}, "with", 1); @@ -759,6 +761,8 @@ public void reindex() throws EXistException, CollectionConfigurationException, P final QName attrQN = new QName("rend", XMLConstants.NULL_NS_URI, ElementValue.ATTRIBUTE); checkIndex(docs, broker, new QName[] { attrQN }, null, 2); checkIndex(docs, broker, new QName[] { attrQN }, "center", 1); + + transaction.commit(); } } @@ -1180,7 +1184,7 @@ private DocumentSet configureAndStore(final String configuration, final String d docs.add(info.getDocument()); transact.commit(transaction); } - + return docs; } @@ -1213,7 +1217,7 @@ private DocumentSet configureAndStore(String configuration, Path directory) thro } transact.commit(transaction); } - + return docs; } @@ -1290,4 +1294,3 @@ public static void cleanupDb() { TestUtils.cleanupDB(); } } - diff --git a/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/SerializeAttrMatchesTest.java b/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/SerializeAttrMatchesTest.java index 812e6e62b1e..3adccf8b47a 100644 --- a/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/SerializeAttrMatchesTest.java +++ b/extensions/indexes/lucene/test/src/org/exist/indexing/lucene/SerializeAttrMatchesTest.java @@ -55,6 +55,9 @@ public class SerializeAttrMatchesTest { + @ClassRule + public static final ExistEmbeddedServer existEmbeddedServer = new ExistEmbeddedServer(true, false); + private static final String COLLECTION_CONFIG = "" + " " + @@ -118,9 +121,6 @@ private DocumentSet configureAndStore(final String configuration, final String d return docs; } - @ClassRule - public static final ExistEmbeddedServer existEmbeddedServer = new ExistEmbeddedServer(true, false); - @Before public void setup() throws EXistException, PermissionDeniedException, IOException, TriggerException { final BrokerPool pool = existEmbeddedServer.getBrokerPool(); diff --git a/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndex.java b/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndex.java index ce37a90b93c..96411483d18 100644 --- a/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndex.java +++ b/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndex.java @@ -49,10 +49,6 @@ public class NGramIndex extends AbstractIndex implements RawBackupSupport { protected BFile db; private int gramSize = 3; private Path dataFile = null; - - public NGramIndex() { - //Nothing to do - } @Override public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException { diff --git a/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndexWorker.java b/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndexWorker.java index 203cf2ed867..9f638e56382 100644 --- a/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndexWorker.java +++ b/extensions/indexes/ngram/src/org/exist/indexing/ngram/NGramIndexWorker.java @@ -20,13 +20,8 @@ package org.exist.indexing.ngram; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Stack; -import java.util.TreeMap; +import java.util.*; +import java.util.concurrent.locks.ReentrantLock; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; @@ -71,8 +66,8 @@ import org.exist.storage.index.BFile; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; import org.exist.util.*; import org.exist.util.serializer.AttrList; @@ -85,7 +80,6 @@ import static java.nio.charset.StandardCharsets.UTF_8; /** - * * Each index entry maps a key (collectionId, ngram) to a list of occurrences, which has the * following structure: * @@ -102,21 +96,26 @@ public class NGramIndexWorker implements OrderedValuesIndex, QNamedKeysIndex { @SuppressWarnings("unused") private static final byte IDX_GENERIC = 1; - private ReindexMode mode = ReindexMode.STORE; + private final DBBroker broker; + private final LockManager lockManager; private final org.exist.indexing.ngram.NGramIndex index; + + private ReindexMode mode = ReindexMode.STORE; private char[] buf = new char[1024]; - private int currentChar = 0; private DocumentImpl currentDoc = null; - private final DBBroker broker; + private Map config; + private Deque contentStack = new ArrayDeque<>(); + @SuppressWarnings("unused") - private IndexController controller; - private final Map ngrams = new TreeMap(); + private IndexController controller; + private final Map ngrams = new TreeMap<>(); private final VariableByteOutputStream os = new VariableByteOutputStream(7); private NGramMatchListener matchListener = null; - public NGramIndexWorker(DBBroker broker, org.exist.indexing.ngram.NGramIndex index) { + public NGramIndexWorker(final DBBroker broker, final org.exist.indexing.ngram.NGramIndex index) { this.broker = broker; + this.lockManager = broker.getBrokerPool().getLockManager(); this.index = index; Arrays.fill(buf, ' '); } @@ -125,7 +124,7 @@ public NGramIndexWorker(DBBroker broker, org.exist.indexing.ngram.NGramIndex ind public String getIndexId() { return org.exist.indexing.ngram.NGramIndex.ID; } - + @Override public String getIndexName() { return index.getIndexName(); @@ -140,22 +139,23 @@ public int getN() { } @Override - public Object configure(IndexController controller, NodeList configNodes, Map namespaces) throws DatabaseConfigurationException { + public Object configure(final IndexController controller, final NodeList configNodes, final Map namespaces) throws DatabaseConfigurationException { this.controller = controller; // We use a map to store the QNames to be indexed - Map map = new TreeMap(); - Node node; - for(int i = 0; i < configNodes.getLength(); i++) { - node = configNodes.item(i); - if(node.getNodeType() == Node.ELEMENT_NODE && + final Map map = new TreeMap<>(); + for (int i = 0; i < configNodes.getLength(); i++) { + final Node node = configNodes.item(i); + if (node.getNodeType() == Node.ELEMENT_NODE && INDEX_ELEMENT.equals(node.getLocalName())) { - String qname = ((Element)node).getAttribute(QNAME_ATTR); - if (qname == null || qname.length() == 0) + final String qname = ((Element) node).getAttribute(QNAME_ATTR); + if (qname == null || qname.length() == 0) { throw new DatabaseConfigurationException("Configuration error: element " + node.getNodeName() + - " must have an attribute " + QNAME_ATTR); - if (LOG.isTraceEnabled()) + " must have an attribute " + QNAME_ATTR); + } + if (LOG.isTraceEnabled()) { LOG.trace("NGram index defined on " + qname); - NGramIndexConfig config = new NGramIndexConfig(namespaces, qname); + } + final NGramIndexConfig config = new NGramIndexConfig(namespaces, qname); map.put(config.getQName(), config); } } @@ -165,39 +165,42 @@ public Object configure(IndexController controller, NodeList configNodes, Map entry : ngrams.entrySet()) { - QNameTerm key = entry.getKey(); - OccurrenceList occurences = entry.getValue(); + } + + for (final Map.Entry entry : ngrams.entrySet()) { + final QNameTerm key = entry.getKey(); + final OccurrenceList occurences = entry.getValue(); occurences.sort(); os.clear(); os.writeInt(currentDoc.getDocId()); os.writeByte(key.qname.getNameType()); os.writeInt(occurences.getTermCount()); //Mark position - int lenOffset = os.position(); + final int lenOffset = os.position(); //Dummy value : actual one will be written below os.writeFixedInt(0); + NodeId previous = null; for (int m = 0; m < occurences.getSize(); ) { try { previous = occurences.getNode(m).write(previous, os); - } catch (IOException e) { + } catch (final IOException e) { LOG.error("IOException while writing nGram index: " + e.getMessage(), e); } - int freq = occurences.getOccurrences(m); + final int freq = occurences.getOccurrences(m); os.writeInt(freq); for (int n = 0; n < freq; n++) { os.writeInt(occurences.getOffset(m + n)); @@ -207,56 +210,54 @@ private void saveIndex() { //Write (variable) length of node IDs + frequency + offsets os.writeFixedInt(lenOffset, os.position() - lenOffset - 4); - ByteArray data = os.data(); - if (data.size() == 0) + final ByteArray data = os.data(); + if (data.size() == 0) { continue; - Lock lock = index.db.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + } - NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname, + try (final ManagedLock dbLock = lockManager.acquireBtreeWriteLock(index.db.getLockName())) { + final NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname, index.getBrokerPool().getSymbols(), key.term); index.db.append(value, data); - } catch (LockException e) { + } catch (final LockException e) { LOG.warn("Failed to acquire lock for file " + FileUtils.fileName(index.db.getFile()), e); - } catch (IOException e) { + } catch (final IOException e) { LOG.warn("IO error for file " + FileUtils.fileName(index.db.getFile()), e); - } catch (ReadOnlyException e) { + } catch (final ReadOnlyException e) { LOG.warn("Read-only error for file " + FileUtils.fileName(index.db.getFile()), e); } finally { - lock.release(LockMode.WRITE_LOCK); os.clear(); } } ngrams.clear(); } - private void dropIndex(ReindexMode mode) { - if (ngrams.size() == 0) + private void dropIndex(final ReindexMode mode) { + if (ngrams.isEmpty()) { return; - for (Map.Entry entry : ngrams.entrySet()) { - QNameTerm key = entry.getKey(); - OccurrenceList occurencesList = entry.getValue(); + } + + for (final Map.Entry entry : ngrams.entrySet()) { + final QNameTerm key = entry.getKey(); + final OccurrenceList occurencesList = entry.getValue(); occurencesList.sort(); os.clear(); - Lock lock = index.db.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - - NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname, + try (final ManagedLock dbLock = lockManager.acquireBtreeWriteLock(index.db.getLockName())) { + final NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname, index.getBrokerPool().getSymbols(), key.term); boolean changed = false; os.clear(); - VariableByteInput is = index.db.getAsStream(value); - if (is == null) + final VariableByteInput is = index.db.getAsStream(value); + if (is == null) { continue; + } while (is.available() > 0) { - int storedDocId = is.readInt(); - byte nameType = is.readByte(); - int occurrences = is.readInt(); + final int storedDocId = is.readInt(); + final byte nameType = is.readByte(); + final int occurrences = is.readInt(); //Read (variable) length of node IDs + frequency + offsets - int length = is.readFixedInt(); + final int length = is.readFixedInt(); if (storedDocId != currentDoc.getDocId()) { // data are related to another document: // copy them to any existing data @@ -274,12 +275,12 @@ private void dropIndex(ReindexMode mode) { // removing nodes: need to filter out the node ids to be removed // feed the new list with the GIDs + final OccurrenceList newOccurrences = new OccurrenceList(); NodeId previous = null; - OccurrenceList newOccurrences = new OccurrenceList(); for (int m = 0; m < occurrences; m++) { - NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); + final NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); previous = nodeId; - int freq = is.readInt(); + final int freq = is.readInt(); // add the node to the new list if it is not // in the list of removed nodes if (!occurencesList.contains(nodeId)) { @@ -291,20 +292,20 @@ private void dropIndex(ReindexMode mode) { } } // append the data from the new list - if(newOccurrences.getSize() > 0) { + if (newOccurrences.getSize() > 0) { //Don't forget this one newOccurrences.sort(); os.writeInt(currentDoc.getDocId()); os.writeByte(nameType); os.writeInt(newOccurrences.getTermCount()); //Mark position - int lenOffset = os.position(); + final int lenOffset = os.position(); //Dummy value : actual one will be written below os.writeFixedInt(0); previous = null; for (int m = 0; m < newOccurrences.getSize(); ) { previous = newOccurrences.getNode(m).write(previous, os); - int freq = newOccurrences.getOccurrences(m); + final int freq = newOccurrences.getOccurrences(m); os.writeInt(freq); for (int n = 0; n < freq; n++) { os.writeInt(newOccurrences.getOffset(m + n)); @@ -325,17 +326,16 @@ private void dropIndex(ReindexMode mode) { index.db.remove(value); } else { if (index.db.put(value, os.data()) == BFile.UNKNOWN_ADDRESS) { - LOG.error("Could not put index data for token '" + key.term + "' in '" + + LOG.error("Could not put index data for token '" + key.term + "' in '" + FileUtils.fileName(index.db.getFile()) + "'"); } } } - } catch (LockException e) { + } catch (final LockException e) { LOG.warn("Failed to acquire lock for file " + FileUtils.fileName(index.db.getFile()), e); - } catch (IOException e) { + } catch (final IOException e) { LOG.warn("IO error for file " + FileUtils.fileName(index.db.getFile()), e); } finally { - lock.release(LockMode.WRITE_LOCK); os.clear(); } } @@ -343,50 +343,44 @@ private void dropIndex(ReindexMode mode) { } @Override - public void removeCollection(Collection collection, DBBroker broker, boolean reindex) { - if (LOG.isDebugEnabled()) + public void removeCollection(final Collection collection, final DBBroker broker, final boolean reindex) { + if (LOG.isDebugEnabled()) { LOG.debug("Dropping NGram index for collection " + collection.getURI()); - final Lock lock = index.db.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + } + try (final ManagedLock dbLock = lockManager.acquireBtreeWriteLock(index.db.getLockName())) { // remove generic index - Value value = new NGramQNameKey(collection.getId()); + final Value value = new NGramQNameKey(collection.getId()); index.db.removeAll(null, new IndexQuery(IndexQuery.TRUNC_RIGHT, value)); - } catch (LockException e) { + } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(index.db.getFile()) + "'", e); - } catch (BTreeException e) { + } catch (final BTreeException | IOException e) { LOG.error(e.getMessage(), e); - } catch (IOException e) { - LOG.error(e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } - public NodeSet search(int contextId, DocumentSet docs, List qnames, String query, String ngram, XQueryContext context, NodeSet contextSet, int axis) - throws XPathException { - if (qnames == null || qnames.isEmpty()) - qnames = getDefinedIndexes(context.getBroker(), docs); + public NodeSet search(final int contextId, final DocumentSet docs, final List qnames, final String query, + final String ngram, final XQueryContext context, final NodeSet contextSet, final int axis) + throws XPathException { + final List searchQnames; + if (qnames == null || qnames.isEmpty()) { + searchQnames = getDefinedIndexes(context.getBroker(), docs); + } else { + searchQnames = qnames; + } + final NodeSet result = new ExtArrayNodeSet(docs.getDocumentCount(), 250); - for (Iterator iter = docs.getCollectionIterator(); iter.hasNext();) { + for (final Iterator iter = docs.getCollectionIterator(); iter.hasNext(); ) { final int collectionId = iter.next().getId(); - for (int i = 0; i < qnames.size(); i++) { - QName qname = qnames.get(i); - NGramQNameKey key = new NGramQNameKey(collectionId, qname, index.getBrokerPool().getSymbols(), query); - final Lock lock = index.db.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); - SearchCallback cb = new SearchCallback(contextId, query, ngram, docs, contextSet, context, result, axis == NodeSet.ANCESTOR); - int op = query.codePointCount(0, query.length()) < getN() ? IndexQuery.TRUNC_RIGHT : IndexQuery.EQ; + for (final QName qname : searchQnames) { + final NGramQNameKey key = new NGramQNameKey(collectionId, qname, index.getBrokerPool().getSymbols(), query); + try (final ManagedLock dbLock = lockManager.acquireBtreeReadLock(index.db.getLockName())) { + final SearchCallback cb = new SearchCallback(contextId, query, ngram, docs, contextSet, context, result, axis == NodeSet.ANCESTOR); + final int op = query.codePointCount(0, query.length()) < getN() ? IndexQuery.TRUNC_RIGHT : IndexQuery.EQ; index.db.query(new IndexQuery(op, key), cb); - } catch (LockException e) { + } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(index.db.getFile()) + "'", e); - } catch (IOException e) { - LOG.error(e.getMessage() + " in '" + FileUtils.fileName(index.db.getFile()) + "'", e); - } catch (BTreeException e) { + } catch (final IOException | BTreeException e) { LOG.error(e.getMessage() + " in '" + FileUtils.fileName(index.db.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } } @@ -400,81 +394,77 @@ public NodeSet search(int contextId, DocumentSet docs, List qnames, Strin * Check index configurations for all collection in the given DocumentSet and return * a list of QNames, which have indexes defined on them. * - * @param broker - * @param docs - * + * @param broker the database broker + * @param docs documents */ - private List getDefinedIndexes(DBBroker broker, DocumentSet docs) { - List indexes = new ArrayList(20); - for (Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { - Collection collection = i.next(); - IndexSpec idxConf = collection.getIndexConfiguration(broker); + private List getDefinedIndexes(final DBBroker broker, final DocumentSet docs) { + final List indexes = new ArrayList<>(20); + for (final Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { + final Collection collection = i.next(); + final IndexSpec idxConf = collection.getIndexConfiguration(broker); if (idxConf != null) { - Map config = (Map) idxConf.getCustomIndexSpec(NGramIndex.ID); + final Map config = (Map) idxConf.getCustomIndexSpec(NGramIndex.ID); if (config != null) { - for (Object name : config.keySet()) { - QName qn = (QName) name; - indexes.add(qn); + for (final Object name : config.keySet()) { + indexes.add((QName) name); } } } } return indexes; } - + @Override - public boolean checkIndex(DBBroker broker) { - return true; + public boolean checkIndex(final DBBroker broker) { + return true; } @Override - public Occurrences[] scanIndex(XQueryContext context, DocumentSet docs, NodeSet contextSet, Map hints) { - List qnames = hints == null ? null : (List)hints.get(QNAMES_KEY); + public Occurrences[] scanIndex(final XQueryContext context, final DocumentSet docs, final NodeSet contextSet, final Map hints) { + List qnames = hints == null ? null : (List) hints.get(QNAMES_KEY); + //Expects a StringValue - Object start = hints == null ? null : hints.get(START_VALUE); + final Object start = hints == null ? null : hints.get(START_VALUE); + //Expects a StringValue - Object end = hints == null ? null : hints.get(END_VALUE); - if (qnames == null || qnames.isEmpty()) + final Object end = hints == null ? null : hints.get(END_VALUE); + + if (qnames == null || qnames.isEmpty()) { qnames = getDefinedIndexes(context.getBroker(), docs); + } + //TODO : use the IndexWorker.VALUE_COUNT hint, if present, to limit the number of returned entries - final Lock lock = index.db.getLock(); final IndexScanCallback cb = new IndexScanCallback(docs, contextSet); - for (int q = 0; q < qnames.size(); q++) { - for (Iterator i = docs.getCollectionIterator(); i.hasNext();) { + for (final QName qname : qnames) { + for (final Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { final int collectionId = i.next().getId(); final IndexQuery query; if (start == null) { - Value startRef = new NGramQNameKey(collectionId); + final Value startRef = new NGramQNameKey(collectionId); query = new IndexQuery(IndexQuery.TRUNC_RIGHT, startRef); } else if (end == null) { - Value startRef = new NGramQNameKey(collectionId, qnames.get(q), - index.getBrokerPool().getSymbols(), start.toString().toLowerCase()); + final Value startRef = new NGramQNameKey(collectionId, qname, + index.getBrokerPool().getSymbols(), start.toString().toLowerCase()); query = new IndexQuery(IndexQuery.TRUNC_RIGHT, startRef); } else { - Value startRef = new NGramQNameKey(collectionId, qnames.get(q), - index.getBrokerPool().getSymbols(), start.toString().toLowerCase()); - Value endRef = new NGramQNameKey(collectionId, qnames.get(q), - index.getBrokerPool().getSymbols(), end.toString().toLowerCase()); + final Value startRef = new NGramQNameKey(collectionId, qname, + index.getBrokerPool().getSymbols(), start.toString().toLowerCase()); + final Value endRef = new NGramQNameKey(collectionId, qname, + index.getBrokerPool().getSymbols(), end.toString().toLowerCase()); query = new IndexQuery(IndexQuery.BW, startRef, endRef); } - try { - lock.acquire(LockMode.READ_LOCK); + try (final ManagedLock dbLock = lockManager.acquireBtreeReadLock(index.db.getLockName())) { index.db.query(query, cb); - } catch (LockException e) { + } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(index.db.getFile()) + "'", e); - } catch (IOException e) { - LOG.error(e.getMessage(), e); - } catch (BTreeException e) { + } catch (final IOException | BTreeException e) { LOG.error(e.getMessage(), e); - } catch (TerminatedException e) { + } catch (final TerminatedException e) { LOG.warn(e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } } - Occurrences[] result = new Occurrences[cb.map.size()]; - return cb.map.values().toArray(result); + return cb.map.values().toArray(new Occurrences[cb.map.size()]); } //This listener is always the same whatever the document and the mode @@ -487,43 +477,50 @@ public StreamListener getListener() { } @Override - public MatchListener getMatchListener(DBBroker broker, NodeProxy proxy) { + public MatchListener getMatchListener(final DBBroker broker, final NodeProxy proxy) { return getMatchListener(broker, proxy, null); } - public MatchListener getMatchListener(DBBroker broker, NodeProxy proxy, NGramMatchCallback callback) { + public MatchListener getMatchListener(final DBBroker broker, final NodeProxy proxy, final NGramMatchCallback callback) { boolean needToFilter = false; Match nextMatch = proxy.getMatches(); while (nextMatch != null) { - if (nextMatch.getIndexId() == org.exist.indexing.ngram.NGramIndex.ID) { + if (nextMatch.getIndexId().equals(org.exist.indexing.ngram.NGramIndex.ID)) { needToFilter = true; break; } nextMatch = nextMatch.getNextMatch(); } - if (!needToFilter) + if (!needToFilter) { return null; - if (matchListener == null) + } + if (matchListener == null) { matchListener = new NGramMatchListener(broker, proxy); - else + } else { matchListener.reset(broker, proxy); + } matchListener.setMatchCallback(callback); return matchListener; } @Override - public IStoredNode getReindexRoot(IStoredNode node, NodePath path, boolean insert, boolean includeSelf) { - if (node.getNodeType() == Node.ATTRIBUTE_NODE) + public IStoredNode getReindexRoot(final IStoredNode node, final NodePath path, + final boolean insert, final boolean includeSelf) { + if (node.getNodeType() == Node.ATTRIBUTE_NODE) { return null; - IndexSpec indexConf = node.getOwnerDocument().getCollection().getIndexConfiguration(broker); + } + + final IndexSpec indexConf = node.getOwnerDocument().getCollection().getIndexConfiguration(broker); if (indexConf != null) { - Map config = (Map) indexConf.getCustomIndexSpec(NGramIndex.ID); - if (config == null) + final Map config = (Map) indexConf.getCustomIndexSpec(NGramIndex.ID); + if (config == null) { return null; + } + boolean reindexRequired = false; - int len = node.getNodeType() == Node.ELEMENT_NODE && !includeSelf ? path.length() - 1 : path.length(); + final int len = node.getNodeType() == Node.ELEMENT_NODE && !includeSelf ? path.length() - 1 : path.length(); for (int i = 0; i < len; i++) { - QName qn = path.getComponent(i); + final QName qn = path.getComponent(i); if (config.get(qn) != null) { reindexRequired = true; break; @@ -533,10 +530,12 @@ public IStoredNode getReindexRoot(IStoredNode node, N IStoredNode topMost = null; IStoredNode currentNode = node; while (currentNode != null) { - if (config.get(currentNode.getQName()) != null) - topMost = currentNode; - if (currentNode.getOwnerDocument().getCollection().isTempCollection() && currentNode.getNodeId().getTreeLevel() == 2) + if (config.get(currentNode.getQName()) != null) { + topMost = currentNode; + } + if (currentNode.getOwnerDocument().getCollection().isTempCollection() && currentNode.getNodeId().getTreeLevel() == 2) { break; + } //currentNode = (StoredNode) currentNode.getParentNode(); currentNode = currentNode.getParentStoredNode(); } @@ -551,24 +550,24 @@ public IStoredNode getReindexRoot(IStoredNode node, N * by counting the codepoints, not the characters. The resulting strings may * thus be longer than the ngram size. * - * @param text - * + * @param text the text to tokenize */ - public String[] tokenize(String text) { - int len = text.codePointCount(0, text.length()); - int gramSize = index.getN(); - String[] ngrams = new String[len]; + public String[] tokenize(final String text) { + final int len = text.codePointCount(0, text.length()); + final int gramSize = index.getN(); + final String[] ngrams = new String[len]; int next = 0; int pos = 0; - StringBuilder bld = new StringBuilder(gramSize); + final StringBuilder bld = new StringBuilder(gramSize); for (int i = 0; i < len; i++) { bld.setLength(0); int offset = pos; for (int count = 0; count < gramSize && offset < text.length(); count++) { - int codepoint = Character.toLowerCase(text.codePointAt(offset)); + final int codepoint = Character.toLowerCase(text.codePointAt(offset)); offset += Character.charCount(codepoint); - if (count == 0) + if (count == 0) { pos = offset; // advance pos to next character + } bld.appendCodePoint(codepoint); } ngrams[next++] = bld.toString(); @@ -576,7 +575,7 @@ public String[] tokenize(String text) { return ngrams; } - private void indexText(NodeId nodeId, QName qname, String text) { + private void indexText(final NodeId nodeId, final QName qname, final String text) { final String[] ngram = tokenize(text); final int len = text.length(); for (int i = 0, j = 0, cp; i < len; i += Character.charCount(cp), j++) { @@ -593,73 +592,61 @@ private void indexText(NodeId nodeId, QName qname, String text) { } } - private void checkBuffer() { - if (currentChar + index.getN() > buf.length) { - buf = new char[1024]; - Arrays.fill(buf, ' '); - currentChar = 0; - } - } - - private Map config; - private Stack contentStack = null; - @Override - public void setDocument(DocumentImpl document) { - setDocument(document, ReindexMode.UNKNOWN); + public void setDocument(final DocumentImpl document) { + setDocument(document, ReindexMode.UNKNOWN); } @Override - public void setMode(ReindexMode newMode) { + public void setMode(final ReindexMode newMode) { // wolf: unnecessary call to setDocument? // setDocument(currentDoc, newMode); mode = newMode; } - + @Override public DocumentImpl getDocument() { - return currentDoc; + return currentDoc; } - + @Override public ReindexMode getMode() { - return mode; - } - + return mode; + } + @Override - public void setDocument(DocumentImpl document, ReindexMode newMode) { - currentDoc = document; + public void setDocument(final DocumentImpl document, final ReindexMode newMode) { + currentDoc = document; //config = null; contentStack = null; - IndexSpec indexConf = document.getCollection().getIndexConfiguration(broker); - if (indexConf != null) + final IndexSpec indexConf = document.getCollection().getIndexConfiguration(broker); + if (indexConf != null) { config = (Map) indexConf.getCustomIndexSpec(org.exist.indexing.ngram.NGramIndex.ID); + } mode = newMode; } @Override - public QueryRewriter getQueryRewriter(XQueryContext context) { + public QueryRewriter getQueryRewriter(final XQueryContext context) { return null; } private class NGramStreamListener extends AbstractStreamListener { - public NGramStreamListener() { - //Nothing to do - } - @Override - public void startElement(Txn transaction, ElementImpl element, NodePath path) { + public void startElement(final Txn transaction, final ElementImpl element, final NodePath path) { if (config != null && config.get(element.getQName()) != null) { - if (contentStack == null) contentStack = new Stack(); - XMLString contentBuf = new XMLString(); + if (contentStack == null) { + contentStack = new ArrayDeque<>(); + } + final XMLString contentBuf = new XMLString(); contentStack.push(contentBuf); } super.startElement(transaction, element, path); } @Override - public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { + public void attribute(final Txn transaction, final AttrImpl attrib, final NodePath path) { if (config != null && config.get(attrib.getQName()) != null) { indexText(attrib.getNodeId(), attrib.getQName(), attrib.getValue()); } @@ -667,19 +654,19 @@ public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { } @Override - public void endElement(Txn transaction, ElementImpl element, NodePath path) { + public void endElement(final Txn transaction, final ElementImpl element, final NodePath path) { if (config != null && config.get(element.getQName()) != null) { - XMLString content = contentStack.pop(); + final XMLString content = contentStack.pop(); indexText(element.getNodeId(), element.getQName(), content.toString()); } super.endElement(transaction, element, path); } @Override - public void characters(Txn transaction, AbstractCharacterData text, NodePath path) { + public void characters(final Txn transaction, final AbstractCharacterData text, final NodePath path) { if (contentStack != null && !contentStack.isEmpty()) { - for (XMLString next : contentStack) { - next.append(text.getXMLString()); + for (final Iterator it = contentStack.descendingIterator(); it.hasNext(); ) { + it.next().append(text.getXMLString()); } } super.characters(transaction, text, path); @@ -687,27 +674,26 @@ public void characters(Txn transaction, AbstractCharacterData text, NodePath pat @Override public IndexWorker getWorker() { - return NGramIndexWorker.this; + return NGramIndexWorker.this; } } - private class NGramMatchListener extends AbstractMatchListener { - + private static class NGramMatchListener extends AbstractMatchListener { private Match match; - private Stack offsetStack = null; + private Deque offsetStack = null; private NGramMatchCallback callback = null; @SuppressWarnings("unused") - private NodeProxy root; + private NodeProxy root; - public NGramMatchListener(DBBroker broker, NodeProxy proxy) { + private NGramMatchListener(final DBBroker broker, final NodeProxy proxy) { reset(broker, proxy); } - protected void setMatchCallback(NGramMatchCallback cb) { + void setMatchCallback(final NGramMatchCallback cb) { this.callback = cb; } - protected void reset(DBBroker broker, NodeProxy proxy) { + protected void reset(final DBBroker broker, final NodeProxy proxy) { this.root = proxy; this.match = proxy.getMatches(); setNextInChain(null); @@ -721,47 +707,59 @@ protected void reset(DBBroker broker, NodeProxy proxy) { Match nextMatch = this.match; while (nextMatch != null) { if (proxy.getNodeId().isDescendantOf(nextMatch.getNodeId())) { - if (ancestors == null) + if (ancestors == null) { ancestors = new ExtArrayNodeSet(); + } ancestors.add(new NodeProxy(proxy.getOwnerDocument(), nextMatch.getNodeId())); } nextMatch = nextMatch.getNextMatch(); } if (ancestors != null && !ancestors.isEmpty()) { - for (NodeProxy p : ancestors) { + for (final NodeProxy p : ancestors) { + + final int thisLevel = p.getNodeId().getTreeLevel(); + int startOffset = 0; try { - XMLStreamReader reader = broker.getXMLStreamReader(p, false); + final XMLStreamReader reader = broker.getXMLStreamReader(p, false); while (reader.hasNext()) { - int ev = reader.next(); - NodeId nodeId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); - if (nodeId.equals(proxy.getNodeId())) + final int ev = reader.next(); + + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + if (otherId.equals(proxy.getNodeId())) { break; - if (ev == XMLStreamConstants.CHARACTERS) + } + final int otherLevel = otherId.getTreeLevel(); + + if (ev == XMLStreamConstants.CHARACTERS) { startOffset += reader.getText().length(); + } else if (ev == XMLStreamConstants.END_ELEMENT && otherLevel == thisLevel) { + // finished element... + break; // exit-while + } } - } catch (IOException e) { - LOG.warn("Problem found while serializing XML: " + e.getMessage(), e); - } catch (XMLStreamException e) { + } catch (final IOException | XMLStreamException e) { LOG.warn("Problem found while serializing XML: " + e.getMessage(), e); } - if (offsetStack == null) - offsetStack = new Stack(); + if (offsetStack == null) { + offsetStack = new ArrayDeque<>(); + } offsetStack.push(new NodeOffset(p.getNodeId(), startOffset)); } } } @Override - public void startElement(QName qname, AttrList attribs) throws SAXException { + public void startElement(final QName qname, final AttrList attribs) throws SAXException { Match nextMatch = match; // check if there are any matches in the current element // if yes, push a NodeOffset object to the stack to track // the node contents while (nextMatch != null) { if (nextMatch.getNodeId().equals(getCurrentNode().getNodeId())) { - if (offsetStack == null) - offsetStack = new Stack(); + if (offsetStack == null) { + offsetStack = new ArrayDeque<>(); + } offsetStack.push(new NodeOffset(nextMatch.getNodeId())); break; } @@ -771,7 +769,7 @@ public void startElement(QName qname, AttrList attribs) throws SAXException { } @Override - public void endElement(QName qname) throws SAXException { + public void endElement(final QName qname) throws SAXException { Match nextMatch = match; // check if we need to pop the stack while (nextMatch != null) { @@ -785,26 +783,26 @@ public void endElement(QName qname) throws SAXException { } @Override - public void characters(CharSequence seq) throws SAXException { + public void characters(final CharSequence seq) throws SAXException { List offsets = null; // a list of offsets to process if (offsetStack != null) { // walk through the stack to find matches which start in // the current string of text - for (int i = 0; i < offsetStack.size(); i++) { - NodeOffset no = offsetStack.get(i); - int end = no.offset + seq.length(); + for (final Iterator it = offsetStack.descendingIterator(); it.hasNext(); ) { + final NodeOffset no = it.next(); + final int end = no.offset + seq.length(); // scan all matches Match next = match; while (next != null) { - if (next.getIndexId() == NGramIndex.ID && next.getNodeId().equals(no.nodeId)) { - int freq = next.getFrequency(); + if (next.getIndexId().equals(NGramIndex.ID) && next.getNodeId().equals(no.nodeId)) { + final int freq = next.getFrequency(); for (int j = 0; j < freq; j++) { - Match.Offset offset = next.getOffset(j); + final Match.Offset offset = next.getOffset(j); if (offset.getOffset() < end && - offset.getOffset() + offset.getLength() > no.offset) { + offset.getOffset() + offset.getLength() > no.offset) { // add it to the list to be processed if (offsets == null) { - offsets = new ArrayList(4); + offsets = new ArrayList<>(4); } // adjust the offset and add it to the list int start = offset.getOffset() - no.offset; @@ -813,8 +811,9 @@ public void characters(CharSequence seq) throws SAXException { len = len - Math.abs(start); start = 0; } - if (start + len > seq.length()) + if (start + len > seq.length()) { len = seq.length() - start; + } offsets.add(new Match.Offset(start, len)); } } @@ -828,9 +827,9 @@ public void characters(CharSequence seq) throws SAXException { // now print out the text, marking all matches with a match element if (offsets != null) { FastQSort.sort(offsets, 0, offsets.size() - 1); - String s = seq.toString(); + final String s = seq.toString(); int pos = 0; - for (Match.Offset offset : offsets) { + for (final Match.Offset offset : offsets) { if (offset.getOffset() > pos) { super.characters(s.substring(pos, pos + (offset.getOffset() - pos))); } @@ -842,7 +841,7 @@ public void characters(CharSequence seq) throws SAXException { try { callback.match(nextListener, s.substring(offset.getOffset(), offset.getOffset() + offset.getLength()), new NodeProxy(getCurrentNode())); - } catch (XPathException e) { + } catch (final XPathException e) { throw new SAXException("An error occurred while calling match callback: " + e.getMessage(), e); } } @@ -857,47 +856,46 @@ public void characters(CharSequence seq) throws SAXException { } private static class NodeOffset { - NodeId nodeId; - int offset = 0; + private final NodeId nodeId; + private int offset; - public NodeOffset(NodeId nodeId) { - this.nodeId = nodeId; + private NodeOffset(final NodeId nodeId) { + this(nodeId, 0); } - public NodeOffset(NodeId nodeId, int offset) { + private NodeOffset(final NodeId nodeId, final int offset) { this.nodeId = nodeId; this.offset = offset; } } - - private static class QNameTerm implements Comparable { - QName qname; - String term; + private static class QNameTerm implements Comparable { + private final QName qname; + private final String term; - public QNameTerm(QName qname, String term) { + private QNameTerm(final QName qname, final String term) { this.qname = qname; this.term = term; } @Override - public int compareTo(QNameTerm other) { - int cmp = qname.compareTo(other.qname); - if (cmp == 0) + public int compareTo(final QNameTerm other) { + final int cmp = qname.compareTo(other.qname); + if (cmp == 0) { return term.compareTo(other.term); + } return cmp; } } private static class NGramQNameKey extends Value { - private static final int COLLECTION_ID_OFFSET = 1; private static final int NAMETYPE_OFFSET = COLLECTION_ID_OFFSET + Collection.LENGTH_COLLECTION_ID; // 5 private static final int NAMESPACE_OFFSET = NAMETYPE_OFFSET + ElementValue.LENGTH_TYPE; // 6 private static final int LOCALNAME_OFFSET = NAMESPACE_OFFSET + SymbolTable.LENGTH_NS_URI; // 8 private static final int NGRAM_OFFSET = LOCALNAME_OFFSET + SymbolTable.LENGTH_LOCAL_NAME; // 10 - public NGramQNameKey(int collectionId) { + NGramQNameKey(final int collectionId) { len = Collection.LENGTH_COLLECTION_ID + 1; data = new byte[len]; data[0] = IDX_QNAME; @@ -918,7 +916,7 @@ public NGramQNameKey(int collectionId, QName qname, SymbolTable symbols) { } */ - public NGramQNameKey(int collectionId, QName qname, SymbolTable symbols, String ngram) { + NGramQNameKey(final int collectionId, final QName qname, final SymbolTable symbols, final String ngram) { len = UTF8.encoded(ngram) + NGRAM_OFFSET; data = new byte[len]; data[0] = IDX_QNAME; @@ -933,8 +931,8 @@ public NGramQNameKey(int collectionId, QName qname, SymbolTable symbols, String } private final class SearchCallback implements BTreeCallback { - private final int contextId; + @SuppressWarnings("unused") private final String query; private final String ngram; private final DocumentSet docs; @@ -943,8 +941,9 @@ private final class SearchCallback implements BTreeCallback { private final NodeSet resultSet; private final boolean returnAncestor; - public SearchCallback(int contextId, String query, String ngram, DocumentSet docs, NodeSet contextSet, - XQueryContext context, NodeSet result, boolean returnAncestor) { + SearchCallback(final int contextId, final String query, final String ngram, final DocumentSet docs, + final NodeSet contextSet, final XQueryContext context, final NodeSet result, + final boolean returnAncestor) { this.contextId = contextId; this.query = query; this.ngram = ngram; @@ -956,45 +955,49 @@ public SearchCallback(int contextId, String query, String ngram, DocumentSet doc } @Override - public boolean indexInfo(Value key, long pointer) throws TerminatedException { - String ngram = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8); + public boolean indexInfo(final Value key, final long pointer) throws TerminatedException { + final String ngram = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8); - VariableByteInput is; try { - is = index.db.getAsStream(pointer); + final VariableByteInput is = index.db.getAsStream(pointer); //Does the token already has data in the index ? - if (is == null) + if (is == null) { return true; + } + while (is.available() > 0) { - int storedDocId = is.readInt(); - byte nameType = is.readByte(); - int occurrences = is.readInt(); + final int storedDocId = is.readInt(); + final byte nameType = is.readByte(); + final int occurrences = is.readInt(); //Read (variable) length of node IDs + frequency + offsets - int length = is.readFixedInt(); - DocumentImpl storedDocument = docs.getDoc(storedDocId); + final int length = is.readFixedInt(); + final DocumentImpl storedDocument = docs.getDoc(storedDocId); + //Exit if the document is not concerned if (storedDocument == null) { is.skipBytes(length); continue; } + NodeId previous = null; for (int m = 0; m < occurrences; m++) { - NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); + final NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); previous = nodeId; - int freq = is.readInt(); - NodeProxy nodeProxy = new NodeProxy(storedDocument, nodeId, nameTypeToNodeType(nameType)); + final int freq = is.readInt(); + final NodeProxy nodeProxy = new NodeProxy(storedDocument, nodeId, nameTypeToNodeType(nameType)); // if a context set is specified, we can directly check if the // matching node is a descendant of one of the nodes // in the context set. if (contextSet != null) { - int sizeHint = contextSet.getSizeHint(storedDocument); + final int sizeHint = contextSet.getSizeHint(storedDocument); if (returnAncestor) { - NodeProxy parentNode = contextSet.parentWithChild(nodeProxy, false, true, NodeProxy.UNKNOWN_NODE_LEVEL); + final NodeProxy parentNode = contextSet.parentWithChild(nodeProxy, false, true, NodeProxy.UNKNOWN_NODE_LEVEL); if (parentNode != null) { readMatches(ngram, is, nodeId, freq, parentNode); resultSet.add(parentNode, sizeHint); - } else + } else { is.skip(freq); + } } else { readMatches(ngram, is, nodeId, freq, nodeProxy); resultSet.add(nodeProxy, sizeHint); @@ -1008,14 +1011,14 @@ public boolean indexInfo(Value key, long pointer) throws TerminatedException { } } return false; - } catch (IOException e) { + } catch (final IOException e) { LOG.error(e.getMessage(), e); return true; } } private short nameTypeToNodeType(final byte nameType) { - switch(nameType) { + switch (nameType) { case ElementValue.ELEMENT: return Node.ELEMENT_NODE; @@ -1028,15 +1031,18 @@ private short nameTypeToNodeType(final byte nameType) { } } - private void readMatches(String current, VariableByteInput is, NodeId nodeId, int freq, NodeProxy parentNode) throws IOException { + private void readMatches(final String current, final VariableByteInput is, final NodeId nodeId, final int freq, + final NodeProxy parentNode) throws IOException { int diff = 0; - if (current.length() > ngram.length()) + if (current.length() > ngram.length()) { diff = current.lastIndexOf(ngram); - Match match = new NGramMatch(contextId, nodeId, ngram, freq); + } + final Match match = new NGramMatch(contextId, nodeId, ngram, freq); for (int n = 0; n < freq; n++) { int offset = is.readInt(); - if (diff > 0) + if (diff > 0) { offset += diff; + } match.addOffset(offset, ngram.length()); } parentNode.addMatch(match); @@ -1044,58 +1050,53 @@ private void readMatches(String current, VariableByteInput is, NodeId nodeId, in } private final class IndexScanCallback implements BTreeCallback { - private final DocumentSet docs; private NodeSet contextSet; - private final Map map = new TreeMap(); - - //IndexScanCallback(DocumentSet docs) { - //this.docs = docs; - //} + private final Map map = new TreeMap<>(); - IndexScanCallback(DocumentSet docs, NodeSet contextSet) { + IndexScanCallback(final DocumentSet docs, final NodeSet contextSet) { this.docs = docs; this.contextSet = contextSet; } - /* (non-Javadoc) - * @see org.dbxml.core.filer.BTreeCallback#indexInfo(org.dbxml.core.data.Value, long) - */ @Override - public boolean indexInfo(Value key, long pointer) throws TerminatedException { - String term = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8); + public boolean indexInfo(final Value key, final long pointer) { + final String term = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8); - VariableByteInput is; + final VariableByteInput is; try { is = index.db.getAsStream(pointer); } catch (IOException e) { LOG.error(e.getMessage(), e); return true; } + try { while (is.available() > 0) { boolean docAdded = false; - int storedDocId = is.readInt(); - byte nameType = is.readByte(); - int occurrences = is.readInt(); + final int storedDocId = is.readInt(); + @SuppressWarnings("unused") final byte nameType = is.readByte(); + final int occurrences = is.readInt(); //Read (variable) length of node IDs + frequency + offsets - int length = is.readFixedInt(); - DocumentImpl storedDocument = docs.getDoc(storedDocId); + final int length = is.readFixedInt(); + final DocumentImpl storedDocument = docs.getDoc(storedDocId); + //Exit if the document is not concerned if (storedDocument == null) { is.skipBytes(length); continue; } + NodeId previous = null; for (int m = 0; m < occurrences; m++) { - NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); + final NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is); previous = nodeId; - int freq = is.readInt(); + final int freq = is.readInt(); is.skip(freq); boolean include = true; //TODO : revisit if (contextSet != null) { - NodeProxy parentNode = contextSet.parentWithChild(storedDocument, nodeId, false, true); + final NodeProxy parentNode = contextSet.parentWithChild(storedDocument, nodeId, false, true); include = (parentNode != null); } if (include) { @@ -1112,11 +1113,10 @@ public boolean indexInfo(Value key, long pointer) throws TerminatedException { } } } - } catch(IOException e) { + } catch (final IOException e) { LOG.error(e.getMessage() + " in '" + FileUtils.fileName(index.db.getFile()) + "'", e); } return true; } } - } diff --git a/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/AddMatch.java b/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/AddMatch.java index e6c5c0cd228..5ed2cfa64e9 100644 --- a/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/AddMatch.java +++ b/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/AddMatch.java @@ -64,26 +64,37 @@ public AddMatch(XQueryContext context) { } @Override - public Sequence eval(Sequence[] args, Sequence contextSequence) + public Sequence eval(final Sequence[] args, final Sequence contextSequence) throws XPathException { - if (args[0].isEmpty()) + if (args[0].isEmpty()) { return args[0]; + } - NodeValue nv = (NodeValue) args[0].itemAt(0); - if (!nv.isPersistentSet()) + final NodeValue nv = (NodeValue) args[0].itemAt(0); + if (!nv.isPersistentSet()) { return nv; - NodeProxy node = (NodeProxy) nv; + } + final NodeProxy node = (NodeProxy) nv; + final int thisLevel = node.getNodeId().getTreeLevel(); String matchStr = null; NodeId nodeId = null; try { for (final XMLStreamReader reader = context.getBroker().getXMLStreamReader(node, true); reader.hasNext(); ) { - int status = reader.next(); + final int status = reader.next(); if (status == XMLStreamConstants.CHARACTERS) { matchStr = reader.getText(); nodeId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); break; } + + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + + if (status == XMLStreamConstants.END_ELEMENT && otherLevel == thisLevel) { + // finished the level... + break; // exit-for + } } } catch (IOException e) { throw new XPathException(this, ErrorCodes.FOER0000, "Exception caught while reading document"); diff --git a/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/NGramSearch.java b/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/NGramSearch.java index b2f60cf7ac7..ba5c0562f2f 100644 --- a/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/NGramSearch.java +++ b/extensions/indexes/ngram/src/org/exist/xquery/modules/ngram/NGramSearch.java @@ -403,7 +403,7 @@ private static List tokenizeQuery(final String query) throws XPathExcept token.append(query.substring(i, i + 2)); i++; } else { - throw new XPathException("err:FTDY0020: query string is terminated by an unescaped backslash"); + throw new XPathException(ErrorCodes.FTDY0020, "Query string is terminated by an unescaped backslash"); } } else { if (currentChar == '.') { diff --git a/extensions/indexes/ngram/test/src/org/exist/indexing/ngram/CustomIndexTest.java b/extensions/indexes/ngram/test/src/org/exist/indexing/ngram/CustomIndexTest.java index 4a1e3f43341..4b3cfd4b87c 100644 --- a/extensions/indexes/ngram/test/src/org/exist/indexing/ngram/CustomIndexTest.java +++ b/extensions/indexes/ngram/test/src/org/exist/indexing/ngram/CustomIndexTest.java @@ -450,14 +450,14 @@ public void xupdateRename() throws EXistException, LockException, XPathException } @Test - public void reindex() throws PermissionDeniedException, XPathException, URISyntaxException, EXistException, IOException { + public void reindex() throws PermissionDeniedException, XPathException, URISyntaxException, EXistException, IOException, LockException { final BrokerPool pool = existEmbeddedServer.getBrokerPool(); final TransactionManager transact = pool.getTransactionManager(); try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); final Txn transaction = transact.beginTransaction()) { //Doh ! This reindexes *all* the collections for *every* index - broker.reindexCollection(XmldbURI.xmldbUriFor("/db")); + broker.reindexCollection(transaction, XmldbURI.xmldbUriFor("/db")); checkIndex(broker, docs, "cha", 1); checkIndex(broker, docs, "le8", 1); @@ -496,10 +496,10 @@ public void dropIndex() throws EXistException, PermissionDeniedException, XPathE checkIndex(broker, docs, "cha", 1); checkIndex(broker, docs, "le8", 1); - Collection root = broker.openCollection(TestConstants.TEST_COLLECTION_URI, LockMode.WRITE_LOCK); - assertNotNull(root); - - root.removeXMLResource(transaction, broker, XmldbURI.create("test_string.xml")); + try(final Collection root = broker.openCollection(TestConstants.TEST_COLLECTION_URI, LockMode.WRITE_LOCK)) { + assertNotNull(root); + root.removeXMLResource(transaction, broker, XmldbURI.create("test_string.xml")); + } checkIndex(broker, docs, "cha", 0); diff --git a/extensions/indexes/range/src/org/exist/indexing/range/RangeIndexWorker.java b/extensions/indexes/range/src/org/exist/indexing/range/RangeIndexWorker.java index f0eda21d6dc..a8a1acb4c70 100644 --- a/extensions/indexes/range/src/org/exist/indexing/range/RangeIndexWorker.java +++ b/extensions/indexes/range/src/org/exist/indexing/range/RangeIndexWorker.java @@ -101,7 +101,7 @@ public class RangeIndexWorker implements OrderedValuesIndex, QNamedKeysIndex { private Set nodesToRemove = null; private RangeIndexConfig config = null; private RangeIndexListener listener = new RangeIndexListener(); - private Stack contentStack = null; + private Deque contentStack = null; private int cachedNodesSize = 0; private int maxCachedNodesSize = 4096 * 1024; @@ -797,14 +797,16 @@ private class RangeIndexListener extends AbstractStreamListener { @Override public void startElement(Txn transaction, ElementImpl element, NodePath path) { if (mode == ReindexMode.STORE && config != null) { - if (contentStack != null && !contentStack.isEmpty()) { - for (TextCollector extractor : contentStack) { + if (contentStack != null) { + for (final TextCollector extractor : contentStack) { extractor.startElement(element.getQName(), path); } } Iterator configIter = config.getConfig(path); if (configIter != null) { - if (contentStack == null) contentStack = new Stack(); + if (contentStack == null) { + contentStack = new ArrayDeque<>(); + } while (configIter.hasNext()) { RangeIndexConfigElement configuration = configIter.next(); if (configuration.match(path)) { @@ -821,8 +823,8 @@ public void startElement(Txn transaction, ElementImpl element, NodePath path) { @Override public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { path.addComponent(attrib.getQName()); - if (contentStack != null && !contentStack.isEmpty()) { - for (TextCollector collector : contentStack) { + if (contentStack != null) { + for (final TextCollector collector : contentStack) { collector.attribute(attrib, path); } } @@ -849,8 +851,8 @@ public void attribute(Txn transaction, AttrImpl attrib, NodePath path) { @Override public void endElement(Txn transaction, ElementImpl element, NodePath path) { if (config != null) { - if (mode == ReindexMode.STORE && contentStack != null && !contentStack.isEmpty()) { - for (TextCollector extractor : contentStack) { + if (mode == ReindexMode.STORE && contentStack != null) { + for (final TextCollector extractor : contentStack) { extractor.endElement(element.getQName(), path); } } @@ -863,7 +865,7 @@ public void endElement(Txn transaction, ElementImpl element, NodePath path) { RangeIndexConfigElement configuration = configIter.next(); boolean match = configuration.match(path); if (match) { - TextCollector collector = contentStack.pop(); + final TextCollector collector = contentStack.pop(); match = collector instanceof ComplexTextCollector ? match && ((ComplexTextCollector)collector).getConfig().matchConditions(element) : match; @@ -878,8 +880,8 @@ public void endElement(Txn transaction, ElementImpl element, NodePath path) { @Override public void characters(Txn transaction, AbstractCharacterData text, NodePath path) { - if (contentStack != null && !contentStack.isEmpty()) { - for (TextCollector collector : contentStack) { + if (contentStack != null) { + for (final TextCollector collector : contentStack) { collector.characters(text, path); } } diff --git a/extensions/indexes/range/test/src/xquery/field-type.xql b/extensions/indexes/range/test/src/xquery/field-type.xql index a70f85d9b70..0f7b03db3ec 100644 --- a/extensions/indexes/range/test/src/xquery/field-type.xql +++ b/extensions/indexes/range/test/src/xquery/field-type.xql @@ -42,17 +42,17 @@ function rt:setup() { xmldb:create-collection("/db/system/config/db", $rt:INDEXED_COLLECTION_NAME), xmldb:store("/db/system/config/db/" || $rt:INDEXED_COLLECTION_NAME, "collection.xconf", $rt:COLLECTION_CONFIG), xmldb:create-collection("/db", $rt:INDEXED_COLLECTION_NAME), - xmldb:store($rt:INDEXED_COLLECTION_NAME, "test.xml", $rt:DATA), + xmldb:store($rt:INDEXED_COLLECTION, "test.xml", $rt:DATA), xmldb:create-collection("/db", $rt:NON_INDEXED_COLLECTION_NAME), - xmldb:store($rt:NON_INDEXED_COLLECTION_NAME, "test.xml", $rt:DATA) + xmldb:store($rt:NON_INDEXED_COLLECTION, "test.xml", $rt:DATA) }; declare %test:tearDown function rt:cleanup() { - xmldb:remove($rt:INDEXED_COLLECTION_NAME), + xmldb:remove($rt:INDEXED_COLLECTION), xmldb:remove("/db/system/config/db/" || $rt:INDEXED_COLLECTION_NAME), - xmldb:remove($rt:NON_INDEXED_COLLECTION_NAME) + xmldb:remove($rt:NON_INDEXED_COLLECTION) }; declare function rt:get-note($div as element(div)) as element(note) { @@ -65,7 +65,7 @@ declare function rt:get-note($div as element(div)) as element(note) { declare %test:assertXPath('/@type = "summary"') function rt:test-indexed-collection() { - rt:get-note(collection($rt:INDEXED_COLLECTION_NAME)//div) + rt:get-note(collection($rt:INDEXED_COLLECTION)//div) }; (:~ @@ -74,5 +74,5 @@ function rt:test-indexed-collection() { declare %test:assertXPath('/@type = "summary"') function rt:test-non-indexed-collection() { - rt:get-note(collection($rt:NON_INDEXED_COLLECTION_NAME)//div) + rt:get-note(collection($rt:NON_INDEXED_COLLECTION)//div) }; \ No newline at end of file diff --git a/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndex.java b/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndex.java index 556735f21bd..a8794a9e75e 100644 --- a/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndex.java +++ b/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndex.java @@ -9,8 +9,8 @@ import org.exist.storage.DBBroker; import org.exist.storage.btree.DBException; import org.exist.storage.index.BTreeStore; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.util.DatabaseConfigurationException; import org.exist.util.FileUtils; import org.exist.util.LockException; @@ -18,6 +18,7 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.file.Path; +import java.util.concurrent.locks.ReentrantLock; /** * SortIndex helps to improve the performance of 'order by' expressions in XQuery. @@ -38,9 +39,6 @@ public class SortIndex extends AbstractIndex implements RawBackupSupport { protected static final Logger LOG = LogManager.getLogger(SortIndex.class); protected BTreeStore btree; - public SortIndex() { - } - @Override public void open() throws DatabaseConfigurationException { final Path file = getDataDir().resolve(FILE_NAME); @@ -64,9 +62,8 @@ public void close() throws DBException { public void sync() throws DBException { if (btree == null) return; - final Lock lock = btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + final LockManager lockManager = pool.getLockManager(); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(btree.getLockName())) { btree.flush(); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(btree.getFile()) + "'", e); @@ -74,8 +71,6 @@ public void sync() throws DBException { } catch (final DBException e) { LOG.error(e.getMessage(), e); //TODO : throw an exception ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } } diff --git a/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndexWorker.java b/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndexWorker.java index 302249c4ee3..0d0ec4c2ab3 100644 --- a/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndexWorker.java +++ b/extensions/indexes/sort/src/org/exist/indexing/sort/SortIndexWorker.java @@ -14,8 +14,8 @@ import org.exist.storage.btree.BTreeException; import org.exist.storage.btree.IndexQuery; import org.exist.storage.btree.Value; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.util.*; import org.exist.xquery.QueryRewriter; import org.exist.xquery.TerminatedException; @@ -26,15 +26,18 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.locks.ReentrantLock; public class SortIndexWorker implements IndexWorker { private ReindexMode mode = ReindexMode.STORE; private DocumentImpl document = null; private SortIndex index; + private final LockManager lockManager; public SortIndexWorker(final SortIndex index) { this.index = index; + this.lockManager = index.getBrokerPool().getLockManager(); } public void setDocument(final DocumentImpl doc, final ReindexMode mode) { @@ -76,9 +79,7 @@ public void flush() { public void createIndex(final String name, final List items) throws EXistException, LockException { // get an id for the new index final short id = getOrRegisterId(name); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { long idx = 0; for (final SortItem item : items) { final byte[] key = computeKey(id, item.getNode()); @@ -86,8 +87,6 @@ public void createIndex(final String name, final List items) throws EX } } catch (final LockException | IOException | BTreeException e) { throw new EXistException("Exception caught while creating sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @@ -107,15 +106,11 @@ public boolean hasIndex(final String name) throws EXistException, LockException */ public long getIndex(final String name, final NodeProxy proxy) throws EXistException, LockException { final short id = getId(name); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeReadLock(index.btree.getLockName())) { final byte[] key = computeKey(id, proxy); return index.btree.findValue(new Value(key)); } catch (final LockException | IOException | BTreeException e) { throw new EXistException("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -128,9 +123,7 @@ public long getIndex(final String name, final NodeProxy proxy) throws EXistExcep */ public void remove(final String name) throws EXistException, LockException { final short id = getId(name); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final byte[] fromKey = computeKey(id); final byte[] toKey = computeKey((short) (id + 1)); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); @@ -139,8 +132,6 @@ public void remove(final String name) throws EXistException, LockException { removeId(name); } catch (final BTreeException | TerminatedException | IOException e) { throw new EXistException("Exception caught while deleting sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -150,17 +141,13 @@ public void remove(final String name, final DocumentImpl doc) throws EXistExcept } private void remove(final DocumentImpl doc, final short id) throws LockException, EXistException { - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final byte[] fromKey = computeKey(id, doc.getDocId()); final byte[] toKey = computeKey(id, doc.getDocId() + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); index.btree.remove(query, null); } catch (final BTreeException | TerminatedException | IOException e) { throw new EXistException("Exception caught while deleting sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -170,9 +157,7 @@ public void remove(final DocumentImpl doc) { final byte[] fromKey = new byte[]{1}; final byte[] endKey = new byte[]{2}; - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(endKey)); final FindIdCallback callback = new FindIdCallback(true); index.btree.query(query, callback); @@ -183,8 +168,6 @@ public void remove(final DocumentImpl doc) { } catch (final BTreeException | EXistException | LockException | TerminatedException | IOException e) { SortIndex.LOG.debug("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -202,17 +185,13 @@ private short getOrRegisterId(final String name) throws EXistException, LockExce final byte[] fromKey = {1}; final byte[] endKey = {2}; final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(endKey)); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final FindIdCallback callback = new FindIdCallback(false); index.btree.query(query, callback); id = (short) (callback.max + 1); registerId(id, name); } catch (final IOException | TerminatedException | BTreeException e) { throw new EXistException("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } return id; @@ -222,14 +201,10 @@ private void registerId(final short id, final String name) throws EXistException final byte[] key = new byte[1 + UTF8.encoded(name)]; key[0] = 1; UTF8.encode(name, key, 1); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { index.btree.addValue(new Value(key), id); } catch (final LockException | IOException | BTreeException e) { throw new EXistException("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -237,14 +212,10 @@ private void removeId(final String name) throws EXistException { final byte[] key = new byte[1 + UTF8.encoded(name)]; key[0] = 1; UTF8.encode(name, key, 1); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { index.btree.removeValue(new Value(key)); } catch (final LockException | IOException | BTreeException e) { throw new EXistException("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -252,14 +223,10 @@ private short getId(final String name) throws EXistException, LockException { final byte[] key = new byte[1 + UTF8.encoded(name)]; key[0] = 1; UTF8.encode(name, key, 1); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = lockManager.acquireBtreeReadLock(index.btree.getLockName())) { return (short) index.btree.findValue(new Value(key)); } catch (final BTreeException | IOException e) { throw new EXistException("Exception caught while reading sort index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } diff --git a/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndex.java b/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndex.java index fa1cc764715..51443d0cca3 100644 --- a/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndex.java +++ b/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndex.java @@ -82,10 +82,6 @@ public interface SpatialOperator { protected int max_docs_in_context_to_refine_query = 10; - public AbstractGMLJDBCIndex() { - //Nothing to do here - } - @Override public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException { super.configure(pool, dataDir, config); diff --git a/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndexWorker.java b/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndexWorker.java index ffa1da252a3..6247a663d78 100644 --- a/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndexWorker.java +++ b/extensions/indexes/spatial/src/org/exist/indexing/spatial/AbstractGMLJDBCIndexWorker.java @@ -86,9 +86,10 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Iterator; import java.util.Map; -import java.util.Stack; import java.util.TreeMap; public abstract class AbstractGMLJDBCIndexWorker implements IndexWorker { @@ -703,8 +704,8 @@ public Geometry transformGeometry(Geometry geometry, String sourceCRS, String ta private class GMLStreamListener extends AbstractStreamListener { - Stack srsNamesStack = new Stack(); - ElementImpl deferredElement; + private final Deque srsNamesStack = new ArrayDeque(); + private ElementImpl deferredElement; @Override public IndexWorker getWorker() { diff --git a/extensions/indexes/spatial/src/org/exist/indexing/spatial/GMLHSQLIndex.java b/extensions/indexes/spatial/src/org/exist/indexing/spatial/GMLHSQLIndex.java index f6fad448b79..142074fe73f 100644 --- a/extensions/indexes/spatial/src/org/exist/indexing/spatial/GMLHSQLIndex.java +++ b/extensions/indexes/spatial/src/org/exist/indexing/spatial/GMLHSQLIndex.java @@ -58,10 +58,6 @@ public class GMLHSQLIndex extends AbstractGMLJDBCIndex implements RawBackupSuppo private DBBroker connectionOwner = null; private long connectionTimeout = 100000L; - public GMLHSQLIndex() { - //Nothing to do ;-) - } - @Override public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException { super.configure(pool, dataDir, config); diff --git a/extensions/metadata/interface/src/main/java/org/exist/storage/md/CollectionEvents.java b/extensions/metadata/interface/src/main/java/org/exist/storage/md/CollectionEvents.java index a7b1b5c54b4..8140108ee3e 100644 --- a/extensions/metadata/interface/src/main/java/org/exist/storage/md/CollectionEvents.java +++ b/extensions/metadata/interface/src/main/java/org/exist/storage/md/CollectionEvents.java @@ -41,7 +41,7 @@ public class CollectionEvents implements CollectionTrigger { @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn txn, Collection parent, Map> parameters) throws TriggerException { } @Override @@ -104,18 +104,15 @@ private void deleteCollectionRecursive(DBBroker broker, Collection collection) t final XmldbURI uri = collection.getURI(); for(Iterator i = collection.collectionIterator(broker); i.hasNext(); ) { - final XmldbURI childName = i.next(); - //TODO : resolve URIs !!! name.resolve(childName) - final Collection child = broker.openCollection(uri.append(childName), LockMode.NO_LOCK); - if(child == null) { + final XmldbURI childName = i.next(); + //TODO : resolve URIs !!! name.resolve(childName) + try (final Collection child = broker.openCollection(uri.append(childName), LockMode.NO_LOCK)) { + if (child == null) { // LOG.warn("Child collection " + childName + " not found"); - } else { - try { - deleteCollectionRecursive(broker, child); - } finally { - child.release(LockMode.NO_LOCK); - } - } + } else { + deleteCollectionRecursive(broker, child); + } + } } } diff --git a/extensions/metadata/interface/src/main/java/org/exist/storage/md/DocumentEvents.java b/extensions/metadata/interface/src/main/java/org/exist/storage/md/DocumentEvents.java index 61d57d6dc18..94f524634ec 100644 --- a/extensions/metadata/interface/src/main/java/org/exist/storage/md/DocumentEvents.java +++ b/extensions/metadata/interface/src/main/java/org/exist/storage/md/DocumentEvents.java @@ -106,6 +106,6 @@ public void afterUpdateDocumentMetadata(DBBroker broker, Txn txn, DocumentImpl d } @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn txn, Collection parent, Map> parameters) throws TriggerException { } } diff --git a/extensions/metadata/interface/src/main/java/org/exist/storage/md/MDStorageManager.java b/extensions/metadata/interface/src/main/java/org/exist/storage/md/MDStorageManager.java index dd0f9ca5a87..ac4693e1e5b 100644 --- a/extensions/metadata/interface/src/main/java/org/exist/storage/md/MDStorageManager.java +++ b/extensions/metadata/interface/src/main/java/org/exist/storage/md/MDStorageManager.java @@ -38,6 +38,7 @@ import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; import org.exist.storage.MetaStorage; +import org.exist.storage.txn.Txn; import org.exist.storage.md.xquery.MetadataModule; import org.exist.util.serializer.SAXSerializer; import org.exist.xquery.XQueryContext; @@ -52,7 +53,7 @@ * */ public class MDStorageManager implements Plug, BackupHandler, RestoreHandler { - + protected final static Logger LOG = LogManager.getLogger(MDStorageManager.class); public final static String PREFIX = "md"; @@ -63,7 +64,7 @@ public class MDStorageManager implements Plug, BackupHandler, RestoreHandler { public final static String KEY = "key"; public final static String VALUE = "value"; public final static String VALUE_IS_DOCUMENT = "value-is-document"; - + public final static String PREFIX_UUID = PREFIX+":"+UUID; public final static String PREFIX_KEY = PREFIX+":"+KEY; public final static String PREFIX_META = PREFIX+":"+META; @@ -71,15 +72,18 @@ public class MDStorageManager implements Plug, BackupHandler, RestoreHandler { public final static String PREFIX_VALUE_IS_DOCUMENT = PREFIX+":"+VALUE_IS_DOCUMENT; protected static MDStorageManager inst = null; - + + private Metas collectionMetas = null; + private Metas currentMetas = null; + MetaData md; - + public MDStorageManager(PluginsManager manager) throws PermissionDeniedException { try { @SuppressWarnings("unchecked") - Class backend = + Class backend = (Class) Class.forName("org.exist.storage.md.MetaDataImpl"); - + Constructor ctor = backend.getConstructor(Database.class); md = ctor.newInstance(manager.getDatabase()); } catch (Exception e) { @@ -88,20 +92,20 @@ public MDStorageManager(PluginsManager manager) throws PermissionDeniedException } inst = this; - + Database db = manager.getDatabase(); - + inject(db, md); db.registerDocumentTrigger(DocumentEvents.class); db.registerCollectionTrigger(CollectionEvents.class); - + Map> map = (Map>) db.getConfiguration().getProperty(XQueryContext.PROPERTY_BUILT_IN_MODULES); map.put( - NAMESPACE_URI, + NAMESPACE_URI, MetadataModule.class); } - + private void inject(Database db, MetaStorage md) { try { Field field = db.getClass().getDeclaredField("metaStorage"); @@ -116,12 +120,12 @@ private void inject(Database db, MetaStorage md) { } catch (IllegalAccessException e) { e.printStackTrace(); } - + } - + @Override - public void start(DBBroker broker) throws EXistException { + public void start(DBBroker broker, final Txn transaction) throws EXistException { } @Override @@ -146,16 +150,16 @@ private void backup(Metas ms, SAXSerializer serializer) throws SAXException { AttributesImpl attr = new AttributesImpl(); attr.addAttribute(NAMESPACE_URI, UUID, PREFIX_UUID, "CDATA", m.getUUID()); attr.addAttribute(NAMESPACE_URI, KEY, PREFIX_KEY, "CDATA", m.getKey()); - + Object value = m.getValue(); if (value instanceof DocumentImpl) { DocumentImpl doc = (DocumentImpl) value; - + attr.addAttribute(NAMESPACE_URI, VALUE, PREFIX_VALUE, "CDATA", doc.getURI().toString()); attr.addAttribute(NAMESPACE_URI, VALUE_IS_DOCUMENT, PREFIX_VALUE_IS_DOCUMENT, "CDATA", "true"); - + } else { - + attr.addAttribute(NAMESPACE_URI, VALUE, PREFIX_VALUE, "CDATA", value.toString()); } @@ -175,7 +179,7 @@ public void backup(Collection collection, AttributesImpl attrs) { LOG.warn("No metadata found to backup for collection: " + collection.getURI()); return; } - + // System.out.println("backup collection "+colection.getURI()); backup(ms, attrs); } @@ -229,23 +233,31 @@ public void backup(Document document, SAXSerializer serializer) throws SAXExcept } //restore methods - private Metas collectionMetas = null; - private Metas currentMetas = null; @Override - public void setDocumentLocator(Locator locator) {} + public void setDocumentLocator(Locator locator) { + //no-op + } @Override - public void startDocument() throws SAXException {} + public void startDocument() throws SAXException { + //no-op + } @Override - public void endDocument() throws SAXException {} + public void endDocument() throws SAXException { + //no-op + } @Override - public void startPrefixMapping(String prefix, String uri) throws SAXException {} + public void startPrefixMapping(String prefix, String uri) throws SAXException { + //no-op + } @Override - public void endPrefixMapping(String prefix) throws SAXException {} + public void endPrefixMapping(String prefix) throws SAXException { + //no-op + } @Override public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException { @@ -253,7 +265,7 @@ public void startElement(String uri, String localName, String qName, Attributes String uuid = atts.getValue(NAMESPACE_URI, UUID); String key = atts.getValue(NAMESPACE_URI, KEY); String value = atts.getValue(NAMESPACE_URI, VALUE); - + if (currentMetas == null) { md._addMeta(collectionMetas, uuid, key, value); } else { @@ -272,28 +284,36 @@ public void endElement(String uri, String localName, String qName) throws SAXExc } @Override - public void characters(char[] ch, int start, int length) throws SAXException {} + public void characters(char[] ch, int start, int length) throws SAXException { + //no-op + } @Override - public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {} + public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { + //no-op + } @Override - public void processingInstruction(String target, String data) throws SAXException {} + public void processingInstruction(String target, String data) throws SAXException { + //no-op + } @Override - public void skippedEntity(String name) throws SAXException {} + public void skippedEntity(String name) throws SAXException { + //no-op + } @Override public void startCollectionRestore(Collection collection, Attributes atts) { if (collection == null) return; - + // System.out.println("startCollectionRestore "+colection.getURI()); String uuid = atts.getValue(NAMESPACE_URI, UUID); if (uuid != null) collectionMetas = md.replaceMetas(collection.getURI(), uuid); else - collectionMetas = md.addMetas(collection); + collectionMetas = md.addMetas(collection); } @Override @@ -305,13 +325,13 @@ public void endCollectionRestore(Collection collection) { public void startDocumentRestore(Document document, Attributes atts) { if (document == null) return; - + // System.out.println("startDocument "+document.getURI()); String uuid = atts.getValue(NAMESPACE_URI, UUID); if (uuid != null) currentMetas = md.replaceMetas(document instanceof DocumentImpl ? ((DocumentImpl)document).getURI() : null, uuid); else - currentMetas = md.addMetas(document); + currentMetas = md.addMetas(document); } @Override diff --git a/extensions/metadata/interface/src/test/java/org/exist/storage/md/DocumentAsValueTest.java b/extensions/metadata/interface/src/test/java/org/exist/storage/md/DocumentAsValueTest.java index 6840e6eab33..eea73d63d25 100644 --- a/extensions/metadata/interface/src/test/java/org/exist/storage/md/DocumentAsValueTest.java +++ b/extensions/metadata/interface/src/test/java/org/exist/storage/md/DocumentAsValueTest.java @@ -84,6 +84,12 @@ public class DocumentAsValueTest { private static DocumentImpl doc1 = null; private static DocumentImpl doc2 = null; + private static Properties contentsOutputProps = new Properties(); + static { + contentsOutputProps.setProperty(OutputKeys.INDENT, "yes"); + contentsOutputProps.setProperty(EXistOutputKeys.OUTPUT_DOCTYPE, "yes"); + } + @Test public void test_00() throws Exception { startDB(); @@ -106,11 +112,6 @@ public void test_00() throws Exception { } } - public Properties contentsOutputProps = new Properties(); - { - contentsOutputProps.setProperty( OutputKeys.INDENT, "yes" ); - contentsOutputProps.setProperty( EXistOutputKeys.OUTPUT_DOCTYPE, "yes" ); - } private String serializer(DBBroker broker, DocumentImpl document) throws SAXException { Serializer serializer = broker.getSerializer(); serializer.setUser(broker.getCurrentSubject()); diff --git a/extensions/metadata/interface/src/test/java/org/exist/storage/md/MatchDocumentsTest.java b/extensions/metadata/interface/src/test/java/org/exist/storage/md/MatchDocumentsTest.java index 1512a4f2dd6..8cb03cfd239 100644 --- a/extensions/metadata/interface/src/test/java/org/exist/storage/md/MatchDocumentsTest.java +++ b/extensions/metadata/interface/src/test/java/org/exist/storage/md/MatchDocumentsTest.java @@ -21,15 +21,19 @@ import static org.junit.Assert.*; +import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Optional; import org.exist.EXistException; import org.exist.collections.Collection; +import org.exist.collections.CollectionConfigurationException; import org.exist.collections.CollectionConfigurationManager; import org.exist.collections.IndexInfo; +import org.exist.collections.triggers.TriggerException; import org.exist.dom.persistent.DocumentImpl; +import org.exist.security.PermissionDeniedException; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; @@ -39,11 +43,13 @@ import org.exist.util.Configuration; import org.exist.util.ConfigurationHelper; import org.exist.util.DatabaseConfigurationException; +import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; +import org.xml.sax.SAXException; /** * @author Dmitriy Shabanov @@ -96,9 +102,8 @@ public void deleteCollection() throws Exception { final Metas doc3Metadata = md.getMetas(doc3uri); assertNotNull(doc3Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { final DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -136,10 +141,6 @@ public void deleteCollection() throws Exception { matching = md.matchDocuments(KEY1, VALUE2); assertEquals(0, matching.size()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -154,11 +155,9 @@ public void moveCollection() throws Exception { final Metas doc3metadata = md.getMetas(doc3uri); assertNotNull(doc3metadata); - Collection col1 = null; - Collection parentCol = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); - parentCol = broker.openCollection(col2uri.removeLastSegment(), LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + final Collection parentCol = broker.openCollection(col2uri.removeLastSegment(), LockMode.WRITE_LOCK)) { final DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -166,7 +165,7 @@ public void moveCollection() throws Exception { doc1Metadata.put(KEY1, doc2); doc1Metadata.put(KEY2, VALUE1); - //add metas for binaty document + //add metas for binary document doc3metadata.put(KEY1, VALUE2); doc3metadata.put(KEY2, doc2); @@ -185,9 +184,6 @@ public void moveCollection() throws Exception { try(final Txn txn = txnManager.beginTransaction()) { broker.moveCollection(txn, col1, parentCol, col2uri.lastSegment()); txnManager.commit(txn); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } matching = md.matchDocuments(KEY2, VALUE1); @@ -195,13 +191,6 @@ public void moveCollection() throws Exception { assertEquals(1, matching.size()); assertEquals(doc4uri, matching.get(0).getURI()); - } finally { - if(parentCol != null) { - parentCol.release(LockMode.WRITE_LOCK); - } - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -213,9 +202,8 @@ public void renameXMLResource() throws Exception { final Metas doc1Metadata = md.getMetas(doc1uri); assertNotNull(doc1Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { final DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -246,10 +234,6 @@ public void renameXMLResource() throws Exception { assertEquals(1, matching.size()); assertEquals(doc2uri, matching.get(0).getURI()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -261,9 +245,8 @@ public void moveXMLResource() throws Exception { final Metas doc1Metadata = md.getMetas(doc1uri); assertNotNull(doc1Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -278,10 +261,10 @@ public void moveXMLResource() throws Exception { final TransactionManager txnManager = pool.getTransactionManager(); try(final Txn txn = txnManager.beginTransaction()) { - Collection col2 = broker.getOrCreateCollection(txn, col2uri); + final Collection col2 = broker.getOrCreateCollection(txn, col2uri); broker.saveCollection(txn, col2); - DocumentImpl doc1 = col1.getDocument(broker, doc1uri.lastSegment()); + final DocumentImpl doc1 = col1.getDocument(broker, doc1uri.lastSegment()); broker.moveResource(txn, doc1, col2, doc4uri.lastSegment()); txnManager.commit(txn); @@ -295,10 +278,6 @@ public void moveXMLResource() throws Exception { assertEquals(1, matching.size()); assertEquals(doc4uri, matching.get(0).getURI()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -319,9 +298,8 @@ public void deleteXMLResource() throws Exception { assertNotNull(doc1Metadata); //add some test key-values to metadata of doc1 - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { final DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -350,10 +328,6 @@ public void deleteXMLResource() throws Exception { matching = md.matchDocuments(KEY2, VALUE1); assertEquals(0, matching.size()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -365,9 +339,8 @@ public void renameBinaryResource() throws Exception { final Metas doc3Metadata = md.getMetas(doc3uri); assertNotNull(doc3Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); //add first key-value @@ -398,10 +371,6 @@ public void renameBinaryResource() throws Exception { assertEquals(1, matching.size()); assertEquals(doc6uri, matching.get(0).getURI()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -413,9 +382,8 @@ public void moveBinaryResource() throws Exception { final Metas doc3Metadata = md.getMetas(doc3uri); assertNotNull(doc3Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -447,10 +415,6 @@ public void moveBinaryResource() throws Exception { assertEquals(1, matching.size()); assertEquals(doc5uri, matching.get(0).getURI()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @@ -462,10 +426,8 @@ public void deleteBinaryResource() throws Exception { final Metas doc3Metadata = md.getMetas(doc3uri); assertNotNull(doc3Metadata); - Collection col1 = null; - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) { - - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK)) { DocumentImpl doc2 = col1.getDocument(broker, doc2uri.lastSegment()); @@ -493,15 +455,11 @@ public void deleteBinaryResource() throws Exception { matching = md.matchDocuments(KEY2, VALUE1); assertEquals(0, matching.size()); - } finally { - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } @Before - public void startDB() throws DatabaseConfigurationException, EXistException { + public void startDB() throws DatabaseConfigurationException, EXistException, PermissionDeniedException, IOException, SAXException, CollectionConfigurationException, LockException { final Path confFile = ConfigurationHelper.lookup("conf.xml"); Configuration config = new Configuration(confFile.toAbsolutePath().toString()); @@ -512,8 +470,8 @@ public void startDB() throws DatabaseConfigurationException, EXistException { final TransactionManager txnManager = pool.getTransactionManager(); try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); - final Txn txn = txnManager.beginTransaction()) { - final Collection root = broker.getOrCreateCollection(txn, col1uri); + final Txn txn = txnManager.beginTransaction(); + final Collection root = broker.getOrCreateCollection(txn, col1uri)) { assertNotNull(root); broker.saveCollection(txn, root); @@ -523,19 +481,18 @@ public void startDB() throws DatabaseConfigurationException, EXistException { //store test data IndexInfo info = root.validateXMLResource(txn, broker, doc1uri.lastSegment(), XML1); root.store(txn, broker, info, XML1); + info = root.validateXMLResource(txn, broker, doc2uri.lastSegment(), XML2); root.store(txn, broker, info, XML2); + root.addBinaryResource(txn, broker, doc3uri.lastSegment(), BINARY.getBytes(), null); txnManager.commit(txn); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); } } @After - public void cleanup() { + public void cleanup() throws TriggerException, PermissionDeniedException, EXistException, IOException { clean(); shutdown(); } @@ -546,34 +503,23 @@ private void shutdown() { pool = null; } - private void clean() { + private void clean() throws EXistException, PermissionDeniedException, IOException, TriggerException { final TransactionManager txnManager = pool.getTransactionManager(); - Collection col1 = null; - Collection col2 = null; try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject())); - final Txn txn = txnManager.beginTransaction()) { + final Txn txn = txnManager.beginTransaction(); + final Collection col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); + final Collection col2 = broker.openCollection(col2uri, LockMode.WRITE_LOCK)) { - col1 = broker.openCollection(col1uri, LockMode.WRITE_LOCK); if(col1 != null) { broker.removeCollection(txn, col1); } - col2 = broker.openCollection(col2uri, LockMode.WRITE_LOCK); + if(col2 != null) { broker.removeCollection(txn, col2); } txnManager.commit(txn); - } catch (Exception e) { - e.printStackTrace(); - fail(e.getMessage()); - } finally { - if(col2 != null) { - col2.release(LockMode.WRITE_LOCK); - } - if(col1 != null) { - col1.release(LockMode.WRITE_LOCK); - } } } } diff --git a/extensions/metadata/sleepycat/src/main/java/org/exist/storage/md/MetaDataImpl.java b/extensions/metadata/sleepycat/src/main/java/org/exist/storage/md/MetaDataImpl.java index 99b77989e95..5d10613d226 100644 --- a/extensions/metadata/sleepycat/src/main/java/org/exist/storage/md/MetaDataImpl.java +++ b/extensions/metadata/sleepycat/src/main/java/org/exist/storage/md/MetaDataImpl.java @@ -41,6 +41,8 @@ import com.sleepycat.persist.*; import org.w3c.dom.Document; +import javax.annotation.Nullable; + /** * @author Dmitriy Shabanov * @@ -497,8 +499,6 @@ public void moveMetas(XmldbURI oldUri, XmldbURI newUri) { // delMetas(ms); // // MetasImpl newMs = new MetasImpl((MetasImpl)ms, uri); - - return; } // public void updateMetas(XmldbURI oldD, DocumentImpl newD) { diff --git a/extensions/modules/src/org/exist/xquery/modules/compression/AbstractCompressFunction.java b/extensions/modules/src/org/exist/xquery/modules/compression/AbstractCompressFunction.java index ddf5be22f1c..4844814e2df 100644 --- a/extensions/modules/src/org/exist/xquery/modules/compression/AbstractCompressFunction.java +++ b/extensions/modules/src/org/exist/xquery/modules/compression/AbstractCompressFunction.java @@ -24,12 +24,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.collections.Collection; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DefaultDocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.MutableDocumentSet; +import org.exist.dom.persistent.*; import org.exist.security.PermissionDeniedException; +import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.serializers.Serializer; import org.exist.util.Base64Decoder; import org.exist.util.FileUtils; @@ -165,46 +165,43 @@ private void compressFromUri(OutputStream os, URI uri, boolean useHierarchy, Str } else { - // try for a doc - DocumentImpl doc = null; - try - { - XmldbURI xmldburi = XmldbURI.create(uri); - doc = context.getBroker().getXMLResource(xmldburi, LockMode.READ_LOCK); - - if(doc == null) - { - // no doc, try for a collection - Collection col = context.getBroker().getCollection(xmldburi); - - if(col != null) - { - // got a collection - compressCollection(os, col, useHierarchy, stripOffset); - } - else - { - // no doc or collection + final XmldbURI xmldburi = XmldbURI.create(uri); + + // try for a collection + try(final Collection collection = context.getBroker().openCollection(xmldburi, LockMode.READ_LOCK)) { + if(collection != null) { + compressCollection(os, collection, useHierarchy, stripOffset); + return; + } + } catch(final PermissionDeniedException | LockException | SAXException | IOException pde) { + throw new XPathException(this, pde.getMessage()); + } + + + // otherwise, try for a doc + try(final Collection collection = context.getBroker().openCollection(xmldburi.removeLastSegment(), LockMode.READ_LOCK)) { + if(collection == null) { + throw new XPathException(this, "Invalid URI: " + uri.toString()); + } + + try(final LockedDocument doc = collection.getDocumentWithLock(context.getBroker(), xmldburi.lastSegment(), LockMode.READ_LOCK)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + if(doc == null) { throw new XPathException(this, "Invalid URI: " + uri.toString()); } + + compressResource(os, doc.getDocument(), useHierarchy, stripOffset, method, resourceName); + return; } - else - { - // got a doc - compressResource(os, doc, useHierarchy, stripOffset, method, resourceName); - } - } - catch(PermissionDeniedException | LockException | SAXException | IOException pde) - { + } catch(final PermissionDeniedException | LockException | SAXException | IOException pde) { throw new XPathException(this, pde.getMessage()); - } finally - { - if(doc != null) - doc.getUpdateLock().release(LockMode.READ_LOCK); } } - } catch (IOException e) { + } catch (final IOException e) { throw new XPathException(this, e.getMessage()); } @@ -272,116 +269,94 @@ private void compressFile(final OutputStream os, final Path file, boolean useHie * Whether to use a folder hierarchy in the archive file that * reflects the collection hierarchy */ - private void compressElement(OutputStream os, Element element, boolean useHierarchy, String stripOffset) throws XPathException - { + private void compressElement(final OutputStream os, final Element element, final boolean useHierarchy, + final String stripOffset) throws XPathException { - final String ns = element.getNamespaceURI(); - if(!(element.getNodeName().equals("entry") || (ns != null && ns.length() > 0))) { - throw new XPathException(this, "Item must be type of xs:anyURI or element entry."); - } + final String ns = element.getNamespaceURI(); + if(!(element.getNodeName().equals("entry") || (ns != null && ns.length() > 0))) { + throw new XPathException(this, "Item must be type of xs:anyURI or element entry."); + } - if(element.getChildNodes().getLength() > 1) - throw new XPathException(this, "Entry content is not valid XML fragment."); + if(element.getChildNodes().getLength() > 1) { + throw new XPathException(this, "Entry content is not valid XML fragment."); + } - String name = element.getAttribute("name"); + String name = element.getAttribute("name"); // if(name == null) // throw new XPathException(this, "Entry must have name attribute."); - String type = element.getAttribute("type"); - - if("uri".equals(type)) - { - compressFromUri(os, URI.create(element.getFirstChild().getNodeValue()), useHierarchy, stripOffset, element.getAttribute("method"), name); - return; - } + final String type = element.getAttribute("type"); - if(useHierarchy) - { - name = removeLeadingOffset(name, stripOffset); - } - else - { - name = name.substring(name.lastIndexOf("/") + 1); - } + if("uri".equals(type)) { + compressFromUri(os, URI.create(element.getFirstChild().getNodeValue()), useHierarchy, stripOffset, element.getAttribute("method"), name); + return; + } - if("collection".equals(type)) - name += "/"; - - Object entry = null; + if (useHierarchy) { + name = removeLeadingOffset(name, stripOffset); + } else { + name = name.substring(name.lastIndexOf("/") + 1); + } - try - { - - entry = newEntry(name); + if("collection".equals(type)) { + name += "/"; + } - if(!"collection".equals(type)) - { - byte[] value; - CRC32 chksum = new CRC32(); - Node content = element.getFirstChild(); + Object entry = null; + try { + entry = newEntry(name); + if(!"collection".equals(type)) { + byte[] value; + final CRC32 chksum = new CRC32(); + final Node content = element.getFirstChild(); - if(content == null) - { - value = new byte[0]; - } - else - { - if(content.getNodeType() == Node.TEXT_NODE) - { - String text = content.getNodeValue(); - Base64Decoder dec = new Base64Decoder(); - if("binary".equals(type)) - { - //base64 binary - dec.translate(text); - value = dec.getByteArray(); - } - else - { - //text - value = text.getBytes(); - } - } - else - { - //xml - Serializer serializer = context.getBroker().getSerializer(); - serializer.setUser(context.getUser()); - serializer.setProperty("omit-xml-declaration", "no"); - getDynamicSerializerOptions(serializer); - value = serializer.serialize((NodeValue) content).getBytes(); + if(content == null) { + value = new byte[0]; + } else { + if(content.getNodeType() == Node.TEXT_NODE) { + String text = content.getNodeValue(); + Base64Decoder dec = new Base64Decoder(); + if("binary".equals(type)) { + //base64 binary + dec.translate(text); + value = dec.getByteArray(); + } else { + //text + value = text.getBytes(); } + } else { + //xml + Serializer serializer = context.getBroker().getSerializer(); + serializer.setUser(context.getUser()); + serializer.setProperty("omit-xml-declaration", "no"); + getDynamicSerializerOptions(serializer); + value = serializer.serialize((NodeValue) content).getBytes(); } + } - if (entry instanceof ZipEntry && - "store".equals(element.getAttribute("method"))) { - ((ZipEntry) entry).setMethod(ZipOutputStream.STORED); - chksum.update(value); - ((ZipEntry) entry).setCrc(chksum.getValue()); - ((ZipEntry) entry).setSize(value.length); - } - putEntry(os, entry); - - os.write(value); + if (entry instanceof ZipEntry && + "store".equals(element.getAttribute("method"))) { + ((ZipEntry) entry).setMethod(ZipOutputStream.STORED); + chksum.update(value); + ((ZipEntry) entry).setCrc(chksum.getValue()); + ((ZipEntry) entry).setSize(value.length); } + putEntry(os, entry); + + os.write(value); } - catch(IOException | SAXException ioe) - { - throw new XPathException(this, ioe.getMessage(), ioe); - } - finally - { - if(entry != null) - try - { - closeEntry(os); - } - catch(IOException ioe) - { - throw new XPathException(this, ioe.getMessage(), ioe); - } + } catch(final IOException | SAXException ioe) { + throw new XPathException(this, ioe.getMessage(), ioe); + } finally { + if(entry != null) { + try { + closeEntry(os); + } catch (final IOException ioe) { + throw new XPathException(this, ioe.getMessage(), ioe); + } } + } } private void getDynamicSerializerOptions(Serializer serializer) throws SAXException { @@ -469,22 +444,21 @@ private void compressResource(OutputStream os, DocumentImpl doc, boolean useHier */ private void compressCollection(OutputStream os, Collection col, boolean useHierarchy, String stripOffset) throws IOException, SAXException, LockException, PermissionDeniedException { // iterate over child documents - MutableDocumentSet childDocs = new DefaultDocumentSet(); - col.getDocuments(context.getBroker(), childDocs); - for (Iterator itChildDocs = childDocs.getDocumentIterator(); itChildDocs.hasNext();) { + final DBBroker broker = context.getBroker(); + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final MutableDocumentSet childDocs = new DefaultDocumentSet(); + col.getDocuments(broker, childDocs); + for (final Iterator itChildDocs = childDocs.getDocumentIterator(); itChildDocs.hasNext();) { DocumentImpl childDoc = itChildDocs.next(); - childDoc.getUpdateLock().acquire(LockMode.READ_LOCK); - try { + try(final ManagedDocumentLock updateLock = lockManager.acquireDocumentReadLock(childDoc.getURI())) { compressResource(os, childDoc, useHierarchy, stripOffset, "", null); - } finally { - childDoc.getUpdateLock().release(LockMode.READ_LOCK); } } // iterate over child collections - for (Iterator itChildCols = col.collectionIterator(context.getBroker()); itChildCols.hasNext();) { + for (final Iterator itChildCols = col.collectionIterator(broker); itChildCols.hasNext();) { // get the child collection XmldbURI childColURI = itChildCols.next(); - Collection childCol = context.getBroker().getCollection(col.getURI().append(childColURI)); + Collection childCol = broker.getCollection(col.getURI().append(childColURI)); // recurse compressCollection(os, childCol, useHierarchy, stripOffset); } diff --git a/extensions/modules/src/org/exist/xquery/modules/compression/CompressionModule.java b/extensions/modules/src/org/exist/xquery/modules/compression/CompressionModule.java index 3246a728506..5287467e7a9 100644 --- a/extensions/modules/src/org/exist/xquery/modules/compression/CompressionModule.java +++ b/extensions/modules/src/org/exist/xquery/modules/compression/CompressionModule.java @@ -59,6 +59,14 @@ public class CompressionModule extends AbstractInternalModule { functionDefs(UnGZipFunction.class, UnGZipFunction.signatures[0] ), + functionDefs(DeflateFunction.class, + DeflateFunction.signatures[0], + DeflateFunction.signatures[1] + ), + functionDefs(InflateFunction.class, + InflateFunction.signatures[0], + InflateFunction.signatures[1] + ), functionDefs(TarFunction.class, TarFunction.signatures[0], TarFunction.signatures[1], diff --git a/extensions/modules/src/org/exist/xquery/modules/compression/DeflateFunction.java b/extensions/modules/src/org/exist/xquery/modules/compression/DeflateFunction.java new file mode 100644 index 00000000000..84b336bdfbf --- /dev/null +++ b/extensions/modules/src/org/exist/xquery/modules/compression/DeflateFunction.java @@ -0,0 +1,110 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + * $Id$ + */ +package org.exist.xquery.modules.compression; + +import java.io.IOException; +import java.util.zip.Deflater; +import java.util.zip.DeflaterOutputStream; + +import org.exist.dom.QName; +import org.exist.util.io.FastByteArrayOutputStream; +import org.exist.xquery.BasicFunction; +import org.exist.xquery.Cardinality; +import org.exist.xquery.FunctionSignature; +import org.exist.xquery.XPathException; +import org.exist.xquery.XQueryContext; +import org.exist.xquery.value.Base64BinaryValueType; +import org.exist.xquery.value.BinaryValue; +import org.exist.xquery.value.BinaryValueFromInputStream; +import org.exist.xquery.value.FunctionParameterSequenceType; +import org.exist.xquery.value.BooleanValue; +import org.exist.xquery.value.Sequence; +import org.exist.xquery.value.SequenceType; +import org.exist.xquery.value.Type; + + +/** + * Deflate compression + * + * @author Olaf Schreck + * @version 1.0 + */ +public class DeflateFunction extends BasicFunction +{ + private final static QName DEFLATE_FUNCTION_NAME = new QName("deflate", CompressionModule.NAMESPACE_URI, CompressionModule.PREFIX); + + public final static FunctionSignature signatures[] = { + new FunctionSignature( + DEFLATE_FUNCTION_NAME, + "Deflate data (RFC 1950)", + new SequenceType[] { + new FunctionParameterSequenceType("data", Type.BASE64_BINARY, Cardinality.EXACTLY_ONE, "The data to Deflate") + }, + new SequenceType( + Type.BASE64_BINARY, Cardinality.ZERO_OR_ONE) + ), + new FunctionSignature( + DEFLATE_FUNCTION_NAME, + "Deflate data (RFC 1951)", + new SequenceType[] { + new FunctionParameterSequenceType("data", Type.BASE64_BINARY, Cardinality.EXACTLY_ONE, "The data to Deflate"), + new FunctionParameterSequenceType("raw", Type.BOOLEAN, Cardinality.EXACTLY_ONE, "If true, create raw deflate data that is not wrapped inside zlib header and checksum.") + }, + new SequenceType( + Type.BASE64_BINARY, Cardinality.ZERO_OR_ONE) + ) + }; + + + public DeflateFunction(XQueryContext context, FunctionSignature signature) + { + super(context, signature); + } + + @Override + public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException + { + // is there some data to Deflate? + if(args[0].isEmpty()) + return Sequence.EMPTY_SEQUENCE; + + BinaryValue bin = (BinaryValue) args[0].itemAt(0); + + boolean rawflag = false; + if(args.length > 1 && !args[1].isEmpty()) + rawflag = args[1].itemAt(0).convertTo(Type.BOOLEAN).effectiveBooleanValue(); + + Deflater defl = new Deflater(java.util.zip.Deflater.DEFAULT_COMPRESSION, rawflag); + + // deflate the data + try(final FastByteArrayOutputStream baos = new FastByteArrayOutputStream(); + DeflaterOutputStream dos = new DeflaterOutputStream(baos, defl)) { + bin.streamBinaryTo(dos); + dos.flush(); + dos.finish(); + + return BinaryValueFromInputStream.getInstance(context, new Base64BinaryValueType(), baos.toFastByteInputStream()); + } catch(IOException ioe) { + throw new XPathException(this, ioe.getMessage(), ioe); + } + } +} diff --git a/extensions/modules/src/org/exist/xquery/modules/compression/EntryFunctions.java b/extensions/modules/src/org/exist/xquery/modules/compression/EntryFunctions.java index de364295255..66c43f49298 100644 --- a/extensions/modules/src/org/exist/xquery/modules/compression/EntryFunctions.java +++ b/extensions/modules/src/org/exist/xquery/modules/compression/EntryFunctions.java @@ -276,21 +276,18 @@ protected void eval(final String path, final DataType dataType, final Optional + * @version 1.0 + */ +public class InflateFunction extends BasicFunction +{ + + public final static FunctionSignature signatures[] = { + new FunctionSignature( + new QName("inflate", CompressionModule.NAMESPACE_URI, CompressionModule.PREFIX), + "Inflate data (RFC 1950)", + new SequenceType[] { + new FunctionParameterSequenceType("inflate-data", Type.BASE64_BINARY, Cardinality.EXACTLY_ONE, "The inflate data to uncompress.") + }, + new SequenceType(Type.BASE64_BINARY, Cardinality.ZERO_OR_ONE) + ), + new FunctionSignature( + new QName("inflate", CompressionModule.NAMESPACE_URI, CompressionModule.PREFIX), + "Inflate data (RFC 1951)", + new SequenceType[] { + new FunctionParameterSequenceType("inflate-data", Type.BASE64_BINARY, Cardinality.EXACTLY_ONE, "The inflate data to uncompress."), + new FunctionParameterSequenceType("raw", Type.BOOLEAN, Cardinality.EXACTLY_ONE, "If true, expect raw deflate data that is not wrapped inside zlib header and checksum.") + }, + new SequenceType(Type.BASE64_BINARY, Cardinality.ZERO_OR_ONE) + ) + }; + + public InflateFunction(XQueryContext context, FunctionSignature signature) + { + super(context, signature); + } + + @Override + public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException + { + // is there some data to inflate? + if(args[0].isEmpty()) + return Sequence.EMPTY_SEQUENCE; + + final BinaryValue bin = (BinaryValue) args[0].itemAt(0); + + boolean rawflag = false; + if(args.length > 1 && !args[1].isEmpty()) + rawflag = args[1].itemAt(0).convertTo(Type.BOOLEAN).effectiveBooleanValue(); + + Inflater infl = new Inflater(rawflag); + + // uncompress the data + try(final InflaterInputStream iis = new InflaterInputStream(bin.getInputStream(), infl); + final FastByteArrayOutputStream baos = new FastByteArrayOutputStream()) { + int read = -1; + final byte[] b = new byte[4096]; + while ((read = iis.read(b)) != -1) { + baos.write(b, 0, read); + } + + return BinaryValueFromInputStream.getInstance(context, new Base64BinaryValueType(), baos.toFastByteInputStream()); + } catch(final IOException ioe) { + throw new XPathException(this, ioe.getMessage(), ioe); + } + } +} diff --git a/extensions/modules/src/org/exist/xquery/modules/compression/example-deflate.xql b/extensions/modules/src/org/exist/xquery/modules/compression/example-deflate.xql new file mode 100644 index 00000000000..a9aef7eb09f --- /dev/null +++ b/extensions/modules/src/org/exist/xquery/modules/compression/example-deflate.xql @@ -0,0 +1,42 @@ +xquery version "3.1"; + +declare namespace compression = "http://exist-db.org/xquery/compression"; +declare namespace util = "http://exist-db.org/xquery/util"; + +(:~ +: +: Simple example showing how to use compression:deflate() / inflate() +: +: @author Olaf Schreck +:) + +let $testinput := "Hello World!" + +(: RFC1950 deflate [compressed data wrapped in zlib header/footer] :) +let $ex_defl := compression:deflate(util:string-to-binary($testinput)) +let $ex_infl := compression:inflate($ex_defl) +let $output := util:base64-decode($ex_infl) +let $result := if ($output = $testinput) then "OK" else "FAIL" + +(: RFC1951 deflate [raw compression without zlib header/footer] :) +(: for raw deflate/inflate, set 2nd arg to true() :) +let $ex_rawdefl := compression:deflate(util:string-to-binary($testinput), true()) +let $ex_rawinfl := compression:inflate($ex_rawdefl, true()) +let $rawoutput := util:base64-decode($ex_rawinfl) +let $rawresult := if ($rawoutput = $testinput) then "OK" else "FAIL" + +return + + + {$ex_defl} + {$ex_infl} + {$output} + {$result} + + + {$ex_rawdefl} + {$ex_rawinfl} + {$rawoutput} + {$rawresult} + + diff --git a/extensions/modules/src/org/exist/xquery/modules/counter/CounterModule.java b/extensions/modules/src/org/exist/xquery/modules/counter/CounterModule.java index aa7f1029228..30a750b5cbc 100644 --- a/extensions/modules/src/org/exist/xquery/modules/counter/CounterModule.java +++ b/extensions/modules/src/org/exist/xquery/modules/counter/CounterModule.java @@ -25,6 +25,7 @@ import org.exist.xquery.AbstractInternalModule; import org.exist.xquery.FunctionDef; import org.exist.xquery.XPathException; +import org.exist.xquery.XQueryContext; import java.util.Arrays; import java.util.List; @@ -65,8 +66,12 @@ public class CounterModule extends AbstractInternalModule { public CounterModule(Map> parameters) throws XPathException { super(functions, parameters, true); + } + + @Override + public void prepare(final XQueryContext context) throws XPathException { declareVariable(EXCEPTION_QNAME, null); - declareVariable(EXCEPTION_MESSAGE_QNAME, null); + declareVariable(EXCEPTION_MESSAGE_QNAME, null); } /* (non-Javadoc) @@ -93,6 +98,5 @@ public String getDefaultPrefix() { public String getReleaseVersion() { return RELEASED_IN_VERSION; } - } diff --git a/extensions/modules/src/org/exist/xquery/modules/exi/EXIUtils.java b/extensions/modules/src/org/exist/xquery/modules/exi/EXIUtils.java index 9c7962ea67d..dd359e0499a 100644 --- a/extensions/modules/src/org/exist/xquery/modules/exi/EXIUtils.java +++ b/extensions/modules/src/org/exist/xquery/modules/exi/EXIUtils.java @@ -50,7 +50,7 @@ protected static InputStream getInputStream(Item item, XQueryContext context) th Serializer serializer = context.getBroker().newSerializer(); NodeValue node = (NodeValue) item; - return new NodeInputStream(serializer, node); + return new NodeInputStream(context.getBroker().getBrokerPool(), serializer, node); default: LOG.error("Wrong item type " + Type.getTypeName(item.getType())); throw new XPathException("wrong item type " + Type.getTypeName(item.getType())); diff --git a/extensions/modules/src/org/exist/xquery/modules/expathrepo/Deploy.java b/extensions/modules/src/org/exist/xquery/modules/expathrepo/Deploy.java index 82d1bdad404..05bc3bb5e8c 100644 --- a/extensions/modules/src/org/exist/xquery/modules/expathrepo/Deploy.java +++ b/extensions/modules/src/org/exist/xquery/modules/expathrepo/Deploy.java @@ -34,6 +34,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; import org.exist.dom.memtree.MemTreeBuilder; +import org.exist.dom.persistent.LockedDocument; import org.exist.repo.Deployment; import org.exist.repo.PackageLoader; import org.exist.security.PermissionDeniedException; @@ -144,14 +145,14 @@ public Sequence eval(final Sequence[] args, final Sequence contextSequence) final String pkgName = args[0].getStringValue(); try { - Deployment deployment = new Deployment(context.getBroker()); + Deployment deployment = new Deployment(); final Optional target; if (isCalledAs("deploy")) { String userTarget = null; if (getArgumentCount() == 2) { userTarget = args[1].getStringValue(); } - target = deployment.deploy(pkgName, context.getRepository(), userTarget); + target = deployment.deploy(context.getBroker(), context.getBroker().getCurrentTransaction(), pkgName, context.getRepository(), userTarget); } else if (isCalledAs("install-and-deploy")) { String version = null; final String repoURI; @@ -169,7 +170,7 @@ public Sequence eval(final Sequence[] args, final Sequence contextSequence) } target = installAndDeployFromDb(pkgName, repoURI); } else { - target = deployment.undeploy(pkgName, context.getRepository()); + target = deployment.undeploy(context.getBroker(), context.getBroker().getCurrentTransaction(), pkgName, context.getRepository()); } target.orElseThrow(() -> new XPathException("expath repository is not available.")); return statusReport(target); @@ -183,10 +184,10 @@ public Sequence eval(final Sequence[] args, final Sequence contextSequence) private Optional installAndDeploy(final String pkgName, final String version, final String repoURI) throws XPathException { try { final RepoPackageLoader loader = new RepoPackageLoader(repoURI); - final Deployment deployment = new Deployment(context.getBroker()); + final Deployment deployment = new Deployment(); final Path xar = loader.load(pkgName, new PackageLoader.Version(version, false)); if (xar != null) { - return deployment.installAndDeploy(xar, loader); + return deployment.installAndDeploy(context.getBroker(), context.getBroker().getCurrentTransaction(), xar, loader); } return Optional.empty(); } catch (final MalformedURLException e) { @@ -199,25 +200,26 @@ private Optional installAndDeploy(final String pkgName, final String ver private Optional installAndDeployFromDb(final String path, final String repoURI) throws XPathException { final XmldbURI docPath = XmldbURI.createInternal(path); - DocumentImpl doc = null; - try { - doc = context.getBroker().getXMLResource(docPath, LockMode.READ_LOCK); - if (doc.getResourceType() != DocumentImpl.BINARY_FILE) + try(final LockedDocument lockedDoc = context.getBroker().getXMLResource(docPath, LockMode.READ_LOCK)) { + if(lockedDoc == null) { + throw new XPathException(this, EXPathErrorCode.EXPDY001, path + " no such .xar", new StringValue(path)); + } + + final DocumentImpl doc = lockedDoc.getDocument(); + if (doc.getResourceType() != DocumentImpl.BINARY_FILE) { throw new XPathException(this, EXPathErrorCode.EXPDY001, path + " is not a valid .xar", new StringValue(path)); + } final Path file = ((NativeBroker)context.getBroker()).getCollectionBinaryFileFsPath(doc.getURI()); RepoPackageLoader loader = null; if (repoURI != null) { loader = new RepoPackageLoader(repoURI); } - final Deployment deployment = new Deployment(context.getBroker()); - return deployment.installAndDeploy(file, loader); + final Deployment deployment = new Deployment(); + return deployment.installAndDeploy(context.getBroker(), context.getBroker().getCurrentTransaction(), file, loader); } catch (PackageException | IOException | PermissionDeniedException e) { LOG.error(e.getMessage(), e); throw new XPathException(this, EXPathErrorCode.EXPDY007, "Package installation failed: " + e.getMessage(), new StringValue(e.getMessage())); - } finally { - if (doc != null) - doc.getUpdateLock().release(LockMode.READ_LOCK); } } diff --git a/extensions/modules/src/org/exist/xquery/modules/expathrepo/InstallFunction.java b/extensions/modules/src/org/exist/xquery/modules/expathrepo/InstallFunction.java index 403218f536a..6de12a7dc97 100644 --- a/extensions/modules/src/org/exist/xquery/modules/expathrepo/InstallFunction.java +++ b/extensions/modules/src/org/exist/xquery/modules/expathrepo/InstallFunction.java @@ -26,9 +26,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.repo.ExistPkgInfo; import org.exist.repo.ExistRepository; import org.exist.security.PermissionDeniedException; @@ -99,18 +99,14 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) pkg = parent_repo.installPackage(uri, force, interact); repo.get().reportAction(ExistRepository.Action.INSTALL, pkg.getName()); } else { - // .xar is stored as a binary resource - BinaryDocument doc = null; - try { - doc = _getDocument(pkgOrPath); - Path file = ((NativeBroker)context.getBroker()).getCollectionBinaryFileFsPath(doc.getURI()); - LOG.debug("Installing file: " + file.toAbsolutePath().toString()); - pkg = parent_repo.installPackage(file, force, interact); - repo.get().reportAction(ExistRepository.Action.INSTALL, pkg.getName()); - } finally { - if (doc != null) - doc.getUpdateLock().release(LockMode.READ_LOCK); - } + // .xar is stored as a binary resource + try(final LockedDocument lockedDoc = getBinaryDoc(pkgOrPath);) { + final DocumentImpl doc = lockedDoc.getDocument(); + Path file = ((NativeBroker)context.getBroker()).getCollectionBinaryFileFsPath(doc.getURI()); + LOG.debug("Installing file: " + file.toAbsolutePath().toString()); + pkg = parent_repo.installPackage(file, force, interact); + repo.get().reportAction(ExistRepository.Action.INSTALL, pkg.getName()); + } } ExistPkgInfo info = (ExistPkgInfo) pkg.getInfo("exist"); if (info != null && !info.getJars().isEmpty()) @@ -149,23 +145,23 @@ private URI _getURI(String s) throws XPathException } } - private BinaryDocument _getDocument(String path) throws XPathException { + private LockedDocument getBinaryDoc(final String path) throws XPathException { try { - XmldbURI uri = XmldbURI.createInternal(path); - DocumentImpl doc = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK); - if (doc == null) { + final XmldbURI uri = XmldbURI.createInternal(path); + final LockedDocument lockedDoc = context.getBroker().getXMLResource(uri, LockMode.READ_LOCK); + if (lockedDoc == null) { throw new XPathException(this, EXPathErrorCode.EXPDY001, path + " is not .xar resource", new StringValue(path) ); - } else if (doc.getResourceType() != DocumentImpl.BINARY_FILE) { - doc.getUpdateLock().release(LockMode.READ_LOCK); + } else if (lockedDoc.getDocument().getResourceType() != DocumentImpl.BINARY_FILE) { + lockedDoc.close(); throw new XPathException(this, EXPathErrorCode.EXPDY001, path + " is not a valid .xar, it's not a binary resource", new StringValue(path) ); } - return (BinaryDocument) doc; + return lockedDoc; } catch (PermissionDeniedException e) { throw new XPathException(this, EXPathErrorCode.EXPDY003, e.getMessage(), new StringValue(path), e); } diff --git a/extensions/modules/src/org/exist/xquery/modules/file/SerializeToFile.java b/extensions/modules/src/org/exist/xquery/modules/file/SerializeToFile.java index 8918f9e48be..3b2ed240dc7 100644 --- a/extensions/modules/src/org/exist/xquery/modules/file/SerializeToFile.java +++ b/extensions/modules/src/org/exist/xquery/modules/file/SerializeToFile.java @@ -35,6 +35,7 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; +import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @@ -177,8 +178,10 @@ private Properties parseXMLSerializationOptions(final Sequence sSerializeParams) //parse serialization options final Properties outputProperties = new Properties(); + // defaults outputProperties.setProperty( OutputKeys.INDENT, "yes" ); outputProperties.setProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" ); + outputProperties.setProperty( OutputKeys.ENCODING, "UTF-8" ); if (sSerializeParams.hasOne() && Type.subTypeOf(sSerializeParams.getItemType(), Type.NODE)) { SerializerUtils.getSerializationOptions(this, (NodeValue) sSerializeParams.itemAt(0), outputProperties); @@ -205,7 +208,7 @@ private void serializeXML(final SequenceIterator siNode, final Properties output try (final OutputStream os = Files.newOutputStream(file, ops); - final Writer writer = new OutputStreamWriter(os)) { + final Writer writer = new OutputStreamWriter(os, Charset.forName(outputProperties.getProperty(OutputKeys.ENCODING)))) { serializer.setProperties(outputProperties); diff --git a/extensions/modules/src/org/exist/xquery/modules/file/Sync.java b/extensions/modules/src/org/exist/xquery/modules/file/Sync.java index bacefbc6a27..6944fc01907 100644 --- a/extensions/modules/src/org/exist/xquery/modules/file/Sync.java +++ b/extensions/modules/src/org/exist/xquery/modules/file/Sync.java @@ -138,9 +138,7 @@ private void saveCollection(final XmldbURI collectionPath, Path targetDir, final } List subcollections = null; - Collection collection = null; - try { - collection = context.getBroker().openCollection(collectionPath, LockMode.READ_LOCK); + try(final Collection collection = context.getBroker().openCollection(collectionPath, LockMode.READ_LOCK)) { if (collection == null) { reportError(output, "Collection not found: " + collectionPath); return; @@ -160,9 +158,6 @@ private void saveCollection(final XmldbURI collectionPath, Path targetDir, final for (final Iterator i = collection.collectionIterator(context.getBroker()); i.hasNext(); ) { subcollections.add(i.next()); } - } finally { - if (collection != null) - collection.getLock().release(LockMode.READ_LOCK); } for (final XmldbURI childURI : subcollections) { diff --git a/extensions/modules/src/org/exist/xquery/modules/file/UnicodeReader.java b/extensions/modules/src/org/exist/xquery/modules/file/UnicodeReader.java index f47c502e0bd..181ec1cdf9b 100644 --- a/extensions/modules/src/org/exist/xquery/modules/file/UnicodeReader.java +++ b/extensions/modules/src/org/exist/xquery/modules/file/UnicodeReader.java @@ -124,7 +124,8 @@ protected void init() throws IOException String encoding; byte bom[] = new byte[BOM_SIZE]; - int n, unread; + int n; + int unread; n = internalIn.read( bom, 0, bom.length ); if( (bom[0] == (byte)0x00) && (bom[1] == (byte)0x00) && (bom[2] == (byte)0xFE) && (bom[3] == (byte)0xFF) ) { diff --git a/extensions/modules/src/org/exist/xquery/modules/image/CropFunction.java b/extensions/modules/src/org/exist/xquery/modules/image/CropFunction.java index aa91ff38bc7..271098dbd65 100644 --- a/extensions/modules/src/org/exist/xquery/modules/image/CropFunction.java +++ b/extensions/modules/src/org/exist/xquery/modules/image/CropFunction.java @@ -117,16 +117,26 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce int y1 = 0; int x2 = MAXHEIGHT; int y2 = MAXWIDTH; - + int width = 0; + int height = 0; if(!args[1].isEmpty()) { x1 = ((IntegerValue) args[1].itemAt(0)).getInt(); if(args[1].hasMany()) { y1 = ((IntegerValue) args[1].itemAt(1)).getInt(); x2 = ((IntegerValue) args[1].itemAt(2)).getInt(); y2 = ((IntegerValue) args[1].itemAt(3)).getInt(); + width = x2 - x1; + height = y2 - y1; } } - + if(width < 1 ) { + logger.error("cropping error: x2 value must be greater than x1"); + return Sequence.EMPTY_SEQUENCE; + } + if(height < 1) { + logger.error("cropping error: y2 must be greater than y1"); + return Sequence.EMPTY_SEQUENCE; + } //get the mime-type String mimeType = args[2].itemAt(0).getStringValue(); String formatName = mimeType.substring(mimeType.indexOf("/")+1); @@ -146,14 +156,14 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce } //crop the image - Image cropImage = Toolkit.getDefaultToolkit().createImage(new FilteredImageSource(image.getSource(), new CropImageFilter(x1, y1, x2, y2))); + Image cropImage = Toolkit.getDefaultToolkit().createImage(new FilteredImageSource(image.getSource(), new CropImageFilter(x1, y1, width, height))); if(cropImage instanceof BufferedImage) { // just in case cropImage is allready an BufferedImage bImage = (BufferedImage)cropImage; } else { - bImage = new BufferedImage(cropImage.getHeight(null), - cropImage.getWidth(null),BufferedImage.TYPE_INT_RGB); + bImage = new BufferedImage(cropImage.getWidth(null), + cropImage.getHeight(null),BufferedImage.TYPE_INT_RGB); Graphics2D g = bImage.createGraphics(); // Paint the image onto the buffered image g.drawImage(cropImage, 0, 0, null); g.dispose(); @@ -170,4 +180,4 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce throw new XPathException(this, e.getMessage()); } } -} \ No newline at end of file +} diff --git a/extensions/modules/src/org/exist/xquery/modules/memcached/MemcachedClientFunction.java b/extensions/modules/src/org/exist/xquery/modules/memcached/MemcachedClientFunction.java index 9688db39d4a..4089e3f46e2 100644 --- a/extensions/modules/src/org/exist/xquery/modules/memcached/MemcachedClientFunction.java +++ b/extensions/modules/src/org/exist/xquery/modules/memcached/MemcachedClientFunction.java @@ -85,7 +85,7 @@ public Sequence eval( Sequence[] args, Sequence contextSequence ) throws XPathEx ialist.add(ia); } while (i.hasNext()); - boolean isbinary = args[1].isEmpty() ? false : new Boolean(args[1].itemAt(0).getStringValue()); + final boolean isbinary = args[1].isEmpty() ? false : Boolean.valueOf(args[1].itemAt(0).getStringValue()); MemcachedClient client; try { diff --git a/extensions/modules/src/org/exist/xquery/modules/process/Execute.java b/extensions/modules/src/org/exist/xquery/modules/process/Execute.java index b02db460222..a41caf855a3 100644 --- a/extensions/modules/src/org/exist/xquery/modules/process/Execute.java +++ b/extensions/modules/src/org/exist/xquery/modules/process/Execute.java @@ -3,6 +3,8 @@ import org.exist.dom.QName; import org.exist.dom.memtree.ElementImpl; import org.exist.dom.memtree.MemTreeBuilder; +import org.exist.numbering.NodeId; +import org.exist.stax.ExtendedXMLStreamReader; import org.exist.util.FileUtils; import org.exist.xquery.*; import org.exist.xquery.value.*; @@ -58,7 +60,9 @@ public Sequence eval(final Sequence[] args, final Sequence contextSequence) thro Map environment = null; if (!args[1].isEmpty()) { try { - XMLStreamReader reader = context.getXMLStreamReader((NodeValue) args[1].itemAt(0)); + final NodeValue options = (NodeValue) args[1].itemAt(0); + final int thisLevel = options.getNodeId().getTreeLevel(); + final XMLStreamReader reader = context.getXMLStreamReader(options); reader.next(); while (reader.hasNext()) { int status = reader.next(); @@ -78,6 +82,13 @@ public Sequence eval(final Sequence[] args, final Sequence contextSequence) thro if (key != null && value != null) environment.put(key, value); } + } else if (status == XMLStreamReader.END_ELEMENT) { + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + if (otherLevel == thisLevel) { + // finished `optRoot` element... + break; // exit-while + } } } } catch (XMLStreamException | IOException e) { diff --git a/extensions/modules/src/org/exist/xquery/modules/scheduler/ScheduleFunctions.java b/extensions/modules/src/org/exist/xquery/modules/scheduler/ScheduleFunctions.java index 6ce77f81d3a..f1e7fd37591 100644 --- a/extensions/modules/src/org/exist/xquery/modules/scheduler/ScheduleFunctions.java +++ b/extensions/modules/src/org/exist/xquery/modules/scheduler/ScheduleFunctions.java @@ -117,7 +117,8 @@ public class ScheduleFunctions extends BasicFunction new QName( SCHEDULE_XQUERY_CRON_JOB, SchedulerModule.NAMESPACE_URI, SchedulerModule.PREFIX ), "Schedules the named XQuery resource (e.g. /db/foo.xql) according to the Cron expression. " + "XQuery job's will be launched under the guest account initially, although the running XQuery may switch permissions through calls to xmldb:login(). " + - "The job will be registered using the job name.", + "The job will be registered using the job name. " + + "Jobs submitted via this function are transitory and will be lost on a server restart. To ensure the persistence of scheduled tasks add them to the conf.xml file.", new SequenceType[] { new FunctionParameterSequenceType( "xquery-resource", Type.STRING, Cardinality.EXACTLY_ONE, "The path to the XQuery resource" ), @@ -134,7 +135,8 @@ public class ScheduleFunctions extends BasicFunction "The job will be registered using the job name. The final argument can be used to specify " + "parameters for the job, which will be passed to the query as external variables. Parameters are specified " + "in an XML fragment with the following structure: " + - "", + " " + + "Jobs submitted via this function are transitory and will be lost on a server restart. To ensure the persistence of scheduled tasks add them to the conf.xml file.", new SequenceType[] { new FunctionParameterSequenceType( "xquery-resource", Type.STRING, Cardinality.EXACTLY_ONE, "The path to the XQuery resource" ), @@ -152,7 +154,8 @@ public class ScheduleFunctions extends BasicFunction "The job will be registered using the job name. The job parameters argument can be used to specify " + "parameters for the job, which will be passed to the query as external variables. Parameters are specified " + "in an XML fragment with the following structure: " + - "", + " " + + "Jobs submitted via this function are transitory and will be lost on a server restart. To ensure the persistence of scheduled tasks add them to the conf.xml file.", new SequenceType[] { new FunctionParameterSequenceType( "xquery-resource", Type.STRING, Cardinality.EXACTLY_ONE, "The path to the XQuery resource" ), @@ -172,7 +175,8 @@ public class ScheduleFunctions extends BasicFunction "parameters for the job, which will be passed to the query as external variables. Parameters are specified " + "in an XML fragment with the following structure: " + "" + - ", Given the delay passed and the repeat value.", + ", Given the delay passed and the repeat value. " + + "Jobs submitted via this function are transitory and will be lost on a server restart. To ensure the persistence of scheduled tasks add them to the conf.xml file.", new SequenceType[] { new FunctionParameterSequenceType( "xquery-resource", Type.STRING, Cardinality.EXACTLY_ONE, "The path to the XQuery resource" ), @@ -193,7 +197,8 @@ public class ScheduleFunctions extends BasicFunction "parameters for the job, which will be passed to the query as external variables. Parameters are specified " + "in an XML fragment with the following structure: " + "" + - ", Given the delay passed and the repeat value.", + ", Given the delay passed and the repeat value. " + + "Jobs submitted via this function are transitory and will be lost on a server restart. To ensure the persistence of scheduled tasks add them to the conf.xml file.", new SequenceType[] { new FunctionParameterSequenceType( "xquery-resource", Type.STRING, Cardinality.EXACTLY_ONE, "The path to the XQuery resource" ), diff --git a/extensions/modules/src/org/exist/xquery/modules/simpleql/SimpleQLLexer.java b/extensions/modules/src/org/exist/xquery/modules/simpleql/SimpleQLLexer.java index 4792d2b29b4..a39376c7aec 100644 --- a/extensions/modules/src/org/exist/xquery/modules/simpleql/SimpleQLLexer.java +++ b/extensions/modules/src/org/exist/xquery/modules/simpleql/SimpleQLLexer.java @@ -42,12 +42,12 @@ public SimpleQLLexer(LexerSharedInputState state) { caseSensitiveLiterals = true; setCaseSensitive(true); literals = new Hashtable(); - literals.put(new ANTLRHashString("NOT", this), Integer.valueOf(11)); - literals.put(new ANTLRHashString("OR", this), Integer.valueOf(9)); - literals.put(new ANTLRHashString("NICHT", this), Integer.valueOf(12)); - literals.put(new ANTLRHashString("UND", this), Integer.valueOf(8)); - literals.put(new ANTLRHashString("ODER", this), Integer.valueOf(10)); - literals.put(new ANTLRHashString("AND", this), Integer.valueOf(7)); + literals.put(new ANTLRHashString("NOT", this), new Integer(11)); + literals.put(new ANTLRHashString("OR", this), new Integer(9)); + literals.put(new ANTLRHashString("NICHT", this), new Integer(12)); + literals.put(new ANTLRHashString("UND", this), new Integer(8)); + literals.put(new ANTLRHashString("ODER", this), new Integer(10)); + literals.put(new ANTLRHashString("AND", this), new Integer(7)); } public Token nextToken() throws TokenStreamException { diff --git a/extensions/modules/src/org/exist/xquery/modules/sql/ExecuteFunction.java b/extensions/modules/src/org/exist/xquery/modules/sql/ExecuteFunction.java index d9ed4d0c782..99edb4293f9 100644 --- a/extensions/modules/src/org/exist/xquery/modules/sql/ExecuteFunction.java +++ b/extensions/modules/src/org/exist/xquery/modules/sql/ExecuteFunction.java @@ -35,6 +35,7 @@ import org.exist.dom.memtree.MemTreeBuilder; import org.exist.xquery.BasicFunction; import org.exist.xquery.Cardinality; +import org.exist.xquery.ErrorCodes; import org.exist.xquery.FunctionSignature; import org.exist.xquery.XPathException; import org.exist.xquery.XQueryContext; @@ -377,7 +378,8 @@ public ExecuteFunction( XQueryContext context, FunctionSignature signature ) for( int i = 0; i < paramElements.getLength(); i++ ) { Element param = ( (Element)paramElements.item( i ) ); - String value = param.getFirstChild().getNodeValue(); + final Node valueNode = param.getFirstChild(); + String value = valueNode != null ? valueNode.getNodeValue() : null; String type = param.getAttributeNS( SQLModule.NAMESPACE_URI, TYPE_ATTRIBUTE_NAME ); builder.startElement( new QName( PARAM_ELEMENT_NAME, SQLModule.NAMESPACE_URI, SQLModule.PREFIX ), null ); @@ -439,6 +441,8 @@ private void setParametersOnPreparedStatement( Statement stmt, Element parameter for (int i = 0; i < paramElements.getLength(); i++) { Element param = ((Element) paramElements.item(i)); Node child = param.getFirstChild(); + String value; + int sqlType; // Prevent NPE if (child != null) { @@ -446,18 +450,26 @@ private void setParametersOnPreparedStatement( Statement stmt, Element parameter child = ((ReferenceNode) child).getReference().getNode(); } - final String value = child.getNodeValue(); - final String type = param.getAttributeNS(SQLModule.NAMESPACE_URI, TYPE_ATTRIBUTE_NAME); - final int sqlType = SQLUtils.sqlTypeFromString(type); - - if (sqlType == Types.TIMESTAMP) { - final DateTimeValue dv = new DateTimeValue(value); - final Timestamp timestampValue = new Timestamp(dv.getDate().getTime()); - ((PreparedStatement) stmt).setTimestamp(i + 1, timestampValue); - - } else { - ((PreparedStatement) stmt).setObject(i + 1, value, sqlType); - } + value = child.getNodeValue(); + } else { + // TODO for VARCHAR, either null or "" could be appropriate (an empty sql:param element is ambiguous) + value = null; + } + + final String type = param.getAttributeNS(SQLModule.NAMESPACE_URI, TYPE_ATTRIBUTE_NAME); + if (type != null) { + sqlType = SQLUtils.sqlTypeFromString(type); + } else { + throw new XPathException(ErrorCodes.ERROR, " must contain attribute sql:type"); + } + + if (sqlType == Types.TIMESTAMP) { + final DateTimeValue dv = new DateTimeValue(value); + final Timestamp timestampValue = new Timestamp(dv.getDate().getTime()); + ((PreparedStatement) stmt).setTimestamp(i + 1, timestampValue); + + } else { + ((PreparedStatement) stmt).setObject(i + 1, value, sqlType); } } diff --git a/extensions/scheduler/src/main/java/org/exist/scheduler/SchedulerManager.java b/extensions/scheduler/src/main/java/org/exist/scheduler/SchedulerManager.java index 538b3c32e27..83d4f730a3a 100644 --- a/extensions/scheduler/src/main/java/org/exist/scheduler/SchedulerManager.java +++ b/extensions/scheduler/src/main/java/org/exist/scheduler/SchedulerManager.java @@ -21,6 +21,7 @@ */ package org.exist.scheduler; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.LogManager; @@ -28,14 +29,17 @@ import org.exist.Database; import org.exist.EXistException; import org.exist.collections.Collection; +import org.exist.collections.triggers.TriggerException; import org.exist.config.*; import org.exist.config.annotation.*; import org.exist.plugin.Plug; import org.exist.plugin.PluginsManager; +import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; +import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; /** @@ -69,39 +73,32 @@ public SchedulerManager(final PluginsManager pm) { } @Override - public void start(final DBBroker broker) throws EXistException { + public void start(final DBBroker broker, final Txn transaction) throws EXistException { final Collection systemCollection; try { systemCollection = broker.getCollection(XmldbURI.SYSTEM); if(systemCollection == null) { - throw new EXistException("/db/system collecton does not exist!"); + throw new EXistException("/db/system collection does not exist!"); } } catch(final PermissionDeniedException e) { throw new EXistException(e); } - - final TransactionManager transaction = broker.getDatabase().getTransactionManager(); - Txn txn = null; try { collection = broker.getCollection(COLLECTION_URI); if (collection == null) { - txn = transaction.beginTransaction(); - collection = broker.getOrCreateCollection(txn, COLLECTION_URI); + collection = broker.getOrCreateCollection(transaction, COLLECTION_URI); if (collection == null){ return; } //if db corrupted it can lead to unrunnable issue //throw new ConfigurationException("Collection '/db/system/scheduler' can't be created."); - collection.setPermissions(0770); - broker.saveCollection(txn, collection); - - transaction.commit(txn); - } - } catch (final Exception e) { - transaction.abort(txn); + collection.setPermissions(broker, Permission.DEFAULT_SYSTEM_COLLECTION_PERM); + broker.saveCollection(transaction, collection); + } + } catch(final TriggerException | PermissionDeniedException | IOException | LockException e) { e.printStackTrace(); LOG.debug("loading configuration failed: " + e.getMessage(), e); } @@ -140,4 +137,4 @@ public Scheduler getScheduler() { public Database getDatabase() { return db; } -} \ No newline at end of file +} diff --git a/extensions/security/ldap/README.txt b/extensions/security/ldap/README.txt index 7c04f098785..818b2486805 100644 --- a/extensions/security/ldap/README.txt +++ b/extensions/security/ldap/README.txt @@ -13,15 +13,15 @@ To enable LDAP authentication you need to make sure that the file /db/system/sec some-ldap-password (&(objectClass=user)(sAMAccountName=${account-name})) - .... - ... + ... + .. .... .... (&(objectClass=group)(sAMAccountName=${group-name})) - .... - ... + ... + .. .... .... diff --git a/extensions/security/ldap/src/org/exist/security/realm/TransformationContext.java b/extensions/security/ldap/src/org/exist/security/realm/TransformationContext.java index a948ea8b48a..93860a2eec8 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/TransformationContext.java +++ b/extensions/security/ldap/src/org/exist/security/realm/TransformationContext.java @@ -3,10 +3,8 @@ import java.util.List; /** - * * @author aretter */ public interface TransformationContext { - - public List getAdditionalGroups(); + List getAdditionalGroups(); } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPPrincipalRestrictionList.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPPrincipalRestrictionList.java index c479e02a635..e54fa3b6fc4 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPPrincipalRestrictionList.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPPrincipalRestrictionList.java @@ -2,6 +2,7 @@ import java.util.ArrayList; import java.util.List; + import org.exist.config.Configurable; import org.exist.config.Configuration; import org.exist.config.Configurator; @@ -9,21 +10,20 @@ import org.exist.config.annotation.ConfigurationFieldAsElement; /** - * * @author aretter */ @ConfigurationClass("") public abstract class AbstractLDAPPrincipalRestrictionList implements Configurable { - + @ConfigurationFieldAsElement("principal") - private List principals = new ArrayList(); + private List principals = new ArrayList<>(); protected Configuration configuration; - public AbstractLDAPPrincipalRestrictionList(Configuration config) { + public AbstractLDAPPrincipalRestrictionList(final Configuration config) { this.configuration = Configurator.configure(this, config); } - + @Override public Configuration getConfiguration() { return configuration; @@ -33,11 +33,11 @@ public Configuration getConfiguration() { public boolean isConfigured() { return (configuration != null); } - + public List getPrincipals() { return principals; } - + public void addPrincipal(String principal) { this.principals.add(principal); } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPSearchPrincipal.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPSearchPrincipal.java index c1b097a4573..3dc8a1f7fd9 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPSearchPrincipal.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/AbstractLDAPSearchPrincipal.java @@ -4,6 +4,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; + import org.exist.config.Configurable; import org.exist.config.Configuration; import org.exist.config.Configurator; @@ -13,7 +14,6 @@ import org.exist.security.AXSchemaType; /** - * * @author aretter */ @ConfigurationClass("") @@ -23,20 +23,20 @@ public abstract class AbstractLDAPSearchPrincipal implements Configurable { protected String searchFilterPrefix = null; @ConfigurationFieldAsElement("search-attribute") - protected Map searchAttributes = new HashMap(); + protected Map searchAttributes = new HashMap<>(); @ConfigurationFieldAsElement("metadata-search-attribute") - protected Map metadataSearchAttributes = new HashMap(); - + protected Map metadataSearchAttributes = new HashMap<>(); + @ConfigurationFieldAsElement("whitelist") protected LDAPPrincipalWhiteList whiteList = null; - + @ConfigurationFieldAsElement("blacklist") protected LDAPPrincipalBlackList blackList = null; protected Configuration configuration; - public AbstractLDAPSearchPrincipal(Configuration config) { + public AbstractLDAPSearchPrincipal(final Configuration config) { this.configuration = Configurator.configure(this, config); } @@ -44,18 +44,18 @@ public String getSearchFilterPrefix() { return searchFilterPrefix; } - public String getSearchAttribute(LDAPSearchAttributeKey ldapSearchAttributeKey) { + public String getSearchAttribute(final LDAPSearchAttributeKey ldapSearchAttributeKey) { return searchAttributes.get(ldapSearchAttributeKey.getKey()); } - public String getMetadataSearchAttribute(AXSchemaType axSchemaType) { + public String getMetadataSearchAttribute(final AXSchemaType axSchemaType) { return metadataSearchAttributes.get(axSchemaType.getNamespace()); } public Set getMetadataSearchAttributeKeys() { - Set metadataSearchAttributeKeys = new HashSet(); - for(String key : metadataSearchAttributes.keySet()) { + final Set metadataSearchAttributeKeys = new HashSet<>(); + for (final String key : metadataSearchAttributes.keySet()) { metadataSearchAttributeKeys.add(AXSchemaType.valueOfNamespace(key)); } return metadataSearchAttributeKeys; @@ -78,7 +78,7 @@ public LDAPPrincipalBlackList getBlackList() { public LDAPPrincipalWhiteList getWhiteList() { return whiteList; } - + public enum LDAPSearchAttributeKey { NAME("name"), DN("dn"), @@ -89,17 +89,18 @@ public enum LDAPSearchAttributeKey { OBJECT_SID("objectSid"); private final String key; - LDAPSearchAttributeKey(String key) { + + LDAPSearchAttributeKey(final String key) { this.key = key; } - public String getKey(){ + public String getKey() { return key; } - public static LDAPSearchAttributeKey valueOfKey(String key) { - for(LDAPSearchAttributeKey ldapSearchAttributeKey : LDAPSearchAttributeKey.values()) { - if(ldapSearchAttributeKey.getKey().equals(key)){ + public static LDAPSearchAttributeKey valueOfKey(final String key) { + for (final LDAPSearchAttributeKey ldapSearchAttributeKey : LDAPSearchAttributeKey.values()) { + if (ldapSearchAttributeKey.getKey().equals(key)) { return ldapSearchAttributeKey; } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPAccountImpl.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPAccountImpl.java index bd8e72c883d..631881c7aa8 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPAccountImpl.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPAccountImpl.java @@ -11,39 +11,38 @@ import org.exist.storage.DBBroker; /** - * * @author aretter */ @ConfigurationClass("account") public class LDAPAccountImpl extends AccountImpl { - public LDAPAccountImpl(AbstractRealm realm, Configuration configuration) throws ConfigurationException { + public LDAPAccountImpl(final AbstractRealm realm, final Configuration configuration) throws ConfigurationException { super(realm, configuration); } - public LDAPAccountImpl(DBBroker broker, AbstractRealm realm, AccountImpl from_user) throws ConfigurationException { + public LDAPAccountImpl(final DBBroker broker, final AbstractRealm realm, final AccountImpl from_user) throws ConfigurationException { super(broker, realm, from_user); } - public LDAPAccountImpl(DBBroker broker, AbstractRealm realm, int id, Account from_user) throws ConfigurationException, PermissionDeniedException { + public LDAPAccountImpl(final DBBroker broker, final AbstractRealm realm, final int id, final Account from_user) throws ConfigurationException, PermissionDeniedException { super(broker, realm, id, from_user); } - public LDAPAccountImpl(DBBroker broker, AbstractRealm realm, String name) throws ConfigurationException { + public LDAPAccountImpl(final DBBroker broker, final AbstractRealm realm, final String name) throws ConfigurationException { super(broker, realm, name); } - public LDAPAccountImpl(DBBroker broker, AbstractRealm realm, int id, String name, String password) throws ConfigurationException { + public LDAPAccountImpl(final DBBroker broker, final AbstractRealm realm, final int id, final String name, final String password) throws ConfigurationException { super(broker, realm, id, name, password); } - LDAPAccountImpl(AbstractRealm realm, Configuration config, boolean removed) throws ConfigurationException { + LDAPAccountImpl(final AbstractRealm realm, final Configuration config, final boolean removed) throws ConfigurationException { super(realm, config, removed); } @Override - public Group addGroup(Group group) throws PermissionDeniedException { - if(group instanceof LDAPGroupImpl) { + public Group addGroup(final Group group) throws PermissionDeniedException { + if (group instanceof LDAPGroupImpl) { //TODO //we dont support writes to LDAP yet! return null; @@ -54,13 +53,13 @@ public Group addGroup(Group group) throws PermissionDeniedException { } @Override - public Group addGroup(String name) throws PermissionDeniedException { + public Group addGroup(final String name) throws PermissionDeniedException { Group group = getRealm().getGroup(name); //allow LDAP users to have groups from other realms - if(group == null) { - //if the group is not present in this realm, look externally - group = getRealm().getExternalGroup(name); + if (group == null) { + //if the group is not present in this realm, look externally + group = getRealm().getExternalGroup(name); } return addGroup(group); } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPGroupImpl.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPGroupImpl.java index fe016569d0a..f4042816637 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPGroupImpl.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPGroupImpl.java @@ -8,26 +8,25 @@ import org.exist.storage.DBBroker; /** - * * @author aretter */ @ConfigurationClass("group") public class LDAPGroupImpl extends AbstractGroup { - public LDAPGroupImpl(AbstractRealm realm, Configuration configuration) throws ConfigurationException { + public LDAPGroupImpl(final AbstractRealm realm, final Configuration configuration) throws ConfigurationException { super(realm, configuration); } - public LDAPGroupImpl(DBBroker broker, AbstractRealm realm, int id, String name) throws ConfigurationException { + public LDAPGroupImpl(final DBBroker broker, final AbstractRealm realm, final int id, final String name) throws ConfigurationException { super(broker, realm, id, name, null); } - LDAPGroupImpl(AbstractRealm realm, Configuration config, boolean removed) throws ConfigurationException { + LDAPGroupImpl(final AbstractRealm realm, final Configuration config, final boolean removed) throws ConfigurationException { this(realm, config); this.removed = removed; } - LDAPGroupImpl(DBBroker broker, AbstractRealm realm, String name) throws ConfigurationException { + LDAPGroupImpl(final DBBroker broker, final AbstractRealm realm, final String name) throws ConfigurationException { super(broker, realm, name); } } \ No newline at end of file diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalBlackList.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalBlackList.java index a76b5d40ea2..0032b9c4791 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalBlackList.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalBlackList.java @@ -6,18 +6,17 @@ import org.exist.config.annotation.ConfigurationClass; /** - * * @author aretter */ @ConfigurationClass("blacklist") public class LDAPPrincipalBlackList extends AbstractLDAPPrincipalRestrictionList implements Configurable { - - public LDAPPrincipalBlackList(Configuration config) { + + public LDAPPrincipalBlackList(final Configuration config) { super(config); //it require, because class's fields initializing after super constructor - if(this.configuration != null) { + if (this.configuration != null) { this.configuration = Configurator.configure(this, this.configuration); } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalWhiteList.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalWhiteList.java index 5ee71e84fbf..a203305ea9f 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalWhiteList.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPPrincipalWhiteList.java @@ -6,18 +6,17 @@ import org.exist.config.annotation.ConfigurationClass; /** - * * @author aretter */ @ConfigurationClass("whitelist") public class LDAPPrincipalWhiteList extends AbstractLDAPPrincipalRestrictionList implements Configurable { - - public LDAPPrincipalWhiteList(Configuration config) { + + public LDAPPrincipalWhiteList(final Configuration config) { super(config); //it require, because class's fields initializing after super constructor - if(this.configuration != null) { + if (this.configuration != null) { this.configuration = Configurator.configure(this, this.configuration); } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPRealm.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPRealm.java index 71b13e4fc50..ad74dced8bf 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPRealm.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPRealm.java @@ -2,21 +2,21 @@ * eXist Open Source Native XML Database * Copyright (C) 2010 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * + * * $Id$ */ package org.exist.security.realm.ldap; @@ -24,11 +24,10 @@ import java.lang.reflect.Field; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.Set; +import javax.annotation.Nullable; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.Attribute; @@ -37,6 +36,9 @@ import javax.naming.directory.SearchControls; import javax.naming.directory.SearchResult; import javax.naming.ldap.LdapContext; + +import com.evolvedbinary.j8fu.tuple.Tuple2; +import com.evolvedbinary.j8fu.function.BiFunction3E; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.EXistException; @@ -57,6 +59,9 @@ import org.exist.security.internal.aider.UserAider; import org.exist.security.realm.ldap.AbstractLDAPSearchPrincipal.LDAPSearchAttributeKey; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; + +import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple; /** * @author Dmitriy Shabanov @@ -65,34 +70,30 @@ @ConfigurationClass("realm") //TODO: id = LDAP public class LDAPRealm extends AbstractRealm { - private final static Logger LOG = LogManager.getLogger(LDAPRealm.class); + private static final Logger LOG = LogManager.getLogger(LDAPRealm.class); @ConfigurationFieldAsAttribute("id") public static String ID = "LDAP"; @ConfigurationFieldAsAttribute("version") - public final static String version = "1.0"; - + public static final String version = "1.0"; + @ConfigurationFieldAsAttribute("principals-are-case-insensitive") private boolean principalsAreCaseInsensitive; @ConfigurationFieldAsElement("context") protected LdapContextFactory ldapContextFactory; - public LDAPRealm(SecurityManagerImpl sm, Configuration config) { + public LDAPRealm(final SecurityManagerImpl sm, final Configuration config) { super(sm, config); } protected LdapContextFactory ensureContextFactory() { - if(this.ldapContextFactory == null) { - - if(LOG.isDebugEnabled()) { + if (this.ldapContextFactory == null) { + if (LOG.isDebugEnabled()) { LOG.debug("No LdapContextFactory specified - creating a default instance."); } - - LdapContextFactory factory = new LdapContextFactory(configuration); - - this.ldapContextFactory = factory; + this.ldapContextFactory = new LdapContextFactory(configuration); } return this.ldapContextFactory; } @@ -103,47 +104,49 @@ public String getId() { } @Override - public void start(DBBroker broker) throws EXistException { - super.start(broker); + public void start(final DBBroker broker, final Txn transaction) throws EXistException { + super.start(broker, transaction); } private String ensureCase(final String username) { - if(username == null){ + if (username == null) { return null; } - - if (principalsAreCaseInsensitive) { + + if (principalsAreCaseInsensitive) { return username.toLowerCase(); } return username; } - + @Override public Subject authenticate(final String username, final Object credentials) throws AuthenticationException { - + final String name = ensureCase(username); - + // Binds using the username and password provided by the user. LdapContext ctx = null; try { - ctx = ensureContextFactory().getLdapContext(name, String.valueOf(credentials)); + ctx = getContextWithCredentials(Optional.of(Tuple(name, String.valueOf(credentials)))); final AbstractAccount account = (AbstractAccount) getAccount(ctx, name); if (account == null) { if (LOG.isDebugEnabled()) { - LOG.debug("Account '"+name+"' can not be found."); + LOG.debug("Account '" + name + "' can not be found."); } - throw new AuthenticationException( - AuthenticationException.ACCOUNT_NOT_FOUND, - "Account '"+name+"' can not be found."); + throw new AuthenticationException( + AuthenticationException.ACCOUNT_NOT_FOUND, + "Account '" + name + "' can not be found."); } return new AuthenticatedLdapSubjectAccreditedImpl(account, ctx, String.valueOf(credentials)); - } catch(final NamingException e) { - LOG.debug(e.getMessage(), e); - if(e instanceof javax.naming.AuthenticationException) { + } catch (final NamingException e) { + if (LOG.isDebugEnabled()) { + LOG.debug(e.getMessage(), e); + } + if (e instanceof javax.naming.AuthenticationException) { throw new AuthenticationException(AuthenticationException.ACCOUNT_NOT_FOUND, e.getMessage()); } else { throw new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, e.getMessage()); @@ -153,197 +156,184 @@ public Subject authenticate(final String username, final Object credentials) thr LdapUtils.closeContext(ctx); } } - + private List getGroupMembershipForLdapUser(final LdapContext ctx, final DBBroker broker, final SearchResult ldapUser) throws NamingException { - final List memberOf_groups = new ArrayList<>(); - + final LDAPSearchContext search = ensureContextFactory().getSearch(); - final String userDistinguishedName = (String)ldapUser.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.DN)).get(); + final String userDistinguishedName = (String) ldapUser.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.DN)).get(); final List memberOf_groupNames = findGroupnamesForUserDistinguishedName(ctx, userDistinguishedName); - for(final String memberOf_groupName : memberOf_groupNames) { + for (final String memberOf_groupName : memberOf_groupNames) { memberOf_groups.add(getGroup(ctx, broker, memberOf_groupName)); } - + //TODO expand to a general method that rewrites the useraider based on the realTransformation - if(ensureContextFactory().getTransformationContext() != null){ + if (ensureContextFactory().getTransformationContext() != null) { final List additionalGroupNames = ensureContextFactory().getTransformationContext().getAdditionalGroups(); - if(additionalGroupNames != null) { - for(final String additionalGroupName : additionalGroupNames) { + if (additionalGroupNames != null) { + for (final String additionalGroupName : additionalGroupNames) { final Group additionalGroup = getSecurityManager().getGroup(additionalGroupName); - if(additionalGroup != null) { + if (additionalGroup != null) { memberOf_groups.add(additionalGroup); } } } } - + return memberOf_groups; } - + private List> getMetadataForLdapUser(final SearchResult ldapUser) throws NamingException { - - final List> metadata = new ArrayList>(); - + final List> metadata = new ArrayList<>(); final LDAPSearchAccount searchAccount = ensureContextFactory().getSearch().getSearchAccount(); - final Attributes userAttributes = ldapUser.getAttributes(); - + //store any requested metadata - for(final AXSchemaType axSchemaType : searchAccount.getMetadataSearchAttributeKeys()) { + for (final AXSchemaType axSchemaType : searchAccount.getMetadataSearchAttributeKeys()) { final String searchAttribute = searchAccount.getMetadataSearchAttribute(axSchemaType); - if(userAttributes != null) { + if (userAttributes != null) { final Attribute userAttribute = userAttributes.get(searchAttribute); - if(userAttribute != null) { + if (userAttribute != null) { final String attributeValue = userAttribute.get().toString(); - metadata.add(new SimpleEntry(axSchemaType, attributeValue)); + metadata.add(new SimpleEntry<>(axSchemaType, attributeValue)); } } } - + return metadata; } - - public Account refreshAccountFromLdap(final Account account) throws PermissionDeniedException, AuthenticationException{ - + + public Account refreshAccountFromLdap(final Account account) throws PermissionDeniedException, AuthenticationException { final int UPDATE_NONE = 0; final int UPDATE_GROUP = 1; final int UPDATE_METADATA = 2; - + final Subject invokingUser = getSecurityManager().getCurrentSubject(); - - if(!invokingUser.hasDbaRole() && invokingUser.getId() != account.getId()) { + + if (!invokingUser.hasDbaRole() && invokingUser.getId() != account.getId()) { throw new PermissionDeniedException("You do not have permission to modify the account"); } - + + LdapContext ctx = null; try { - final LdapContext ctx = getContext(invokingUser); + ctx = getContext(invokingUser); final SearchResult ldapUser = findAccountByAccountName(ctx, account.getName()); - if(ldapUser == null) { + if (ldapUser == null) { throw new AuthenticationException(AuthenticationException.ACCOUNT_NOT_FOUND, "Could not find the account in the LDAP"); } - - return executeAsSystemUser(ctx, new Unit(){ - @Override - public Account execute(LdapContext ctx, DBBroker broker) throws EXistException, PermissionDeniedException, NamingException { - + + return executeAsSystemUser(ctx, (ctx2, broker) -> { + int update = UPDATE_NONE; - + //1) get the ldap group membership - final List memberOf_groups = getGroupMembershipForLdapUser(ctx, broker, ldapUser); - + final List memberOf_groups = getGroupMembershipForLdapUser(ctx2, broker, ldapUser); + //2) get the ldap primary group - final String primaryGroup = findGroupBySID(ctx, getPrimaryGroupSID(ldapUser)); - + final String primaryGroup = findGroupBySID(ctx2, getPrimaryGroupSID(ldapUser)); + //append the ldap primaryGroup to the head of the ldap group list, and compare //to the account group list - memberOf_groups.add(0, getGroup(ctx, broker, primaryGroup)); + memberOf_groups.add(0, getGroup(ctx2, broker, primaryGroup)); final String accountGroups[] = account.getGroups(); - - if(!accountGroups[0].equals(ensureCase(primaryGroup))) { + + if (!accountGroups[0].equals(ensureCase(primaryGroup))) { update |= UPDATE_GROUP; } else { - if(accountGroups.length != memberOf_groups.size()) { + if (accountGroups.length != memberOf_groups.size()) { update |= UPDATE_GROUP; } else { - for(int i = 0; i < accountGroups.length; i++) { - + for (final String accountGroup : accountGroups) { boolean found = false; - for(Group memberOf_group : memberOf_groups) { - if(accountGroups[i].equals(ensureCase(memberOf_group.getName()))) { + for (final Group memberOf_group : memberOf_groups) { + if (accountGroup.equals(ensureCase(memberOf_group.getName()))) { found = true; break; } } - if(!found) { + if (!found) { update |= UPDATE_GROUP; break; } } } } - + //3) check metadata final List> ldapMetadatas = getMetadataForLdapUser(ldapUser); final Set accountMetadataKeys = account.getMetadataKeys(); - if(accountMetadataKeys.size() != ldapMetadatas.size()) { + if (accountMetadataKeys.size() != ldapMetadatas.size()) { update |= UPDATE_METADATA; } else { - for(SchemaType accountMetadataKey : accountMetadataKeys) { + for (SchemaType accountMetadataKey : accountMetadataKeys) { final String accountMetadataValue = account.getMetadataValue(accountMetadataKey); boolean found = false; - for(SimpleEntry ldapMetadata : ldapMetadatas) { - if(accountMetadataKey.equals(ldapMetadata.getKey()) && accountMetadataValue.equals(ldapMetadata.getValue())) { + for (SimpleEntry ldapMetadata : ldapMetadatas) { + if (accountMetadataKey.equals(ldapMetadata.getKey()) && accountMetadataValue.equals(ldapMetadata.getValue())) { found = true; break; } } - if(!found) { + if (!found) { update |= UPDATE_METADATA; break; } } } - + //update the groups? - if((update & UPDATE_GROUP) == UPDATE_GROUP) { + if ((update & UPDATE_GROUP) == UPDATE_GROUP) { try { - Field fld = account.getClass().getSuperclass().getDeclaredField("groups"); + final Field fld = account.getClass().getSuperclass().getDeclaredField("groups"); fld.setAccessible(true); fld.set(account, memberOf_groups); - } catch(NoSuchFieldException nsfe) { + } catch (final NoSuchFieldException | IllegalAccessException nsfe) { throw new EXistException(nsfe.getMessage(), nsfe); - } catch(IllegalAccessException iae) { - throw new EXistException(iae.getMessage(), iae); } } - + //update the metdata? - if((update & UPDATE_METADATA) == UPDATE_METADATA) { + if ((update & UPDATE_METADATA) == UPDATE_METADATA) { account.clearMetadata(); - for(SimpleEntry ldapMetadata : ldapMetadatas) { + for (final SimpleEntry ldapMetadata : ldapMetadatas) { account.setMetadataValue(ldapMetadata.getKey(), ldapMetadata.getValue()); } } - - if(update != UPDATE_NONE) { - boolean updated = getSecurityManager().updateAccount(account); - if(!updated) { + + if (update != UPDATE_NONE) { + final boolean updated = getSecurityManager().updateAccount(account); + if (!updated) { LOG.error("Could not update account"); } } return account; - } }); - } catch(final NamingException ne) { + } catch (final NamingException | EXistException ne) { throw new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage(), ne); - } catch(final EXistException ee) { - throw new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ee.getMessage(), ee); + } finally { + LdapUtils.closeContext(ctx); } - } - + private Account createAccountInDatabase(final LdapContext ctx, final String username, final SearchResult ldapUser, final String primaryGroupName) throws AuthenticationException { - final LDAPSearchAccount searchAccount = ensureContextFactory().getSearch().getSearchAccount(); + //final LDAPSearchAccount searchAccount = ensureContextFactory().getSearch().getSearchAccount(); try { - return executeAsSystemUser(ctx, new Unit(){ - @Override - public Account execute(final LdapContext ctx, final DBBroker broker) throws EXistException, PermissionDeniedException, NamingException { - - if(LOG.isDebugEnabled()) { - LOG.debug("Saving account '"+username+"'."); - } - + return executeAsSystemUser(ctx, (ctx2, broker) -> { + + if (LOG.isDebugEnabled()) { + LOG.debug("Saving account '" + username + "'."); + } + //get (or create) the primary group if it doesnt exist final Group primaryGroup = getGroup(ctx, broker, primaryGroupName); @@ -361,12 +351,12 @@ public Account execute(final LdapContext ctx, final DBBroker broker) throws EXis final UserAider userAider = new UserAider(ID, username, primaryGroup); //add the member groups - for(final Group memberOf_group : getGroupMembershipForLdapUser(ctx, broker, ldapUser)) { + for (final Group memberOf_group : getGroupMembershipForLdapUser(ctx, broker, ldapUser)) { userAider.addGroup(memberOf_group); } //store any requested metadata - for(final SimpleEntry metadata : getMetadataForLdapUser(ldapUser)) { + for (final SimpleEntry metadata : getMetadataForLdapUser(ldapUser)) { userAider.setMetadataValue(metadata.getKey(), metadata.getValue()); } @@ -397,99 +387,121 @@ public Account execute(final LdapContext ctx, final DBBroker broker) throws EXis }*/ return account; - } }); - } catch(final Exception e) { - LOG.debug(e); + } catch (final Exception e) { + if (LOG.isDebugEnabled()) { + LOG.debug(e); + } throw new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, e.getMessage(), e); } } - private interface Unit { - public R execute(LdapContext ctx, DBBroker broker) throws EXistException, PermissionDeniedException, NamingException; - } - - private R executeAsSystemUser(final LdapContext ctx, final Unit unit) throws EXistException, PermissionDeniedException, NamingException { - try(final DBBroker broker = getDatabase().get(Optional.of(getSecurityManager().getSystemSubject()))) { + private interface LDAPFunction extends BiFunction3E {} + + private R executeAsSystemUser(final LdapContext ctx, final LDAPFunction ldapFunction) throws EXistException, PermissionDeniedException, NamingException { + try (final DBBroker broker = getDatabase().get(Optional.of(getSecurityManager().getSystemSubject()))) { //perform as SYSTEM user - return unit.execute(ctx, broker); + return ldapFunction.apply(ctx, broker); } } - + private Group createGroupInDatabase(final DBBroker broker, final String groupname) throws AuthenticationException { try { //return sm.addGroup(instantiateGroup(this, groupname)); return getSecurityManager().addGroup(broker, new GroupAider(ID, groupname)); - } catch(Exception e) { + } catch (Exception e) { throw new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, e.getMessage(), e); } } - private LdapContext getContext(final Subject invokingUser) throws NamingException { + private LdapContext getContext(@Nullable final Subject invokingUser) throws NamingException { + return getContext(Optional.ofNullable(invokingUser)); + } + + private LdapContext getContext(final Optional invokingUser) throws NamingException { + final Optional> credentials = invokingUser + .filter(subject -> subject instanceof AuthenticatedLdapSubjectAccreditedImpl) + .map(subject -> (AuthenticatedLdapSubjectAccreditedImpl) subject) + .map(subject -> Tuple(subject.getUsername(), subject.getAuthenticatedCredentials())); + + return getContextWithCredentials(credentials); + } + + /** + * Gets an LDAP Context for the provided user details, + * or if none are provided the default configured + * credentials are used. + * + * @param optCredentials Explicit credentials + * @return An LDAP Context + */ + private LdapContext getContextWithCredentials(final Optional> optCredentials) throws NamingException { final LdapContextFactory ctxFactory = ensureContextFactory(); - final LdapContext ctx; - if(invokingUser != null && invokingUser instanceof AuthenticatedLdapSubjectAccreditedImpl) { - //use the provided credentials for the lookup - ctx = ctxFactory.getLdapContext(invokingUser.getUsername(), ((AuthenticatedLdapSubjectAccreditedImpl) invokingUser).getAuthenticatedCredentials(), null); - } else { - //use the default credentials for lookup - LDAPSearchContext searchCtx = ctxFactory.getSearch(); - ctx = ctxFactory.getLdapContext(searchCtx.getDefaultUsername(), searchCtx.getDefaultPassword(), null); - } - return ctx; + final Tuple2 credentials = optCredentials.orElseGet(() -> defaultCredentials(ctxFactory)); + return ctxFactory.getLdapContext(credentials._1, credentials._2, null); + } + + private Tuple2 defaultCredentials(final LdapContextFactory ctxFactory) { + final LDAPSearchContext searchCtx = ctxFactory.getSearch(); + return Tuple(searchCtx.getDefaultUsername(), searchCtx.getDefaultPassword()); } @Override public final synchronized Account getAccount(String name) { name = ensureCase(name); - + //first attempt to get the cached account final Account acct = super.getAccount(name); - if(acct != null) { + if (acct != null) { return acct; } else { LdapContext ctx = null; try { ctx = getContext(getSecurityManager().getDatabase().getActiveBroker().getCurrentSubject()); return getAccount(ctx, name); - } catch(final NamingException ne) { - LOG.debug(ne.getMessage(), ne); - LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); + } catch (final NamingException ne) { + if (LOG.isDebugEnabled()) { + LOG.debug(ne.getMessage(), ne); + } + LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); return null; } finally { - if(ctx != null){ + if (ctx != null) { LdapUtils.closeContext(ctx); } } } } - public final synchronized Account getAccount(final LdapContext ctx, String name) { - + private synchronized Account getAccount(final LdapContext ctx, String name) { name = ensureCase(name); - + if (LOG.isDebugEnabled()) { - LOG.debug("Get request for account '"+name+"'."); + LOG.debug("Get request for account '" + name + "'."); } - + //first attempt to get the cached account final Account acct = super.getAccount(name); - if(acct != null) { - LOG.debug("Cached used."); + if (acct != null) { + if (LOG.isDebugEnabled()) { + LOG.debug("Cached used."); + } //XXX: synchronize with LDAP return acct; } else { //if the account is not cached, we should try and find it in LDAP and cache it if it exists - try{ + try { //do the lookup final SearchResult ldapUser = findAccountByAccountName(ctx, name); - LOG.debug("LDAP search return '"+ldapUser+"'."); + if (LOG.isDebugEnabled()) { + LOG.debug("LDAP search return '" + ldapUser + "'."); + } - if(ldapUser == null) { + if (ldapUser == null) { return null; } else { //found a user from ldap so cache them and return @@ -505,162 +517,155 @@ public final synchronized Account getAccount(final LdapContext ctx, String name) } return createAccountInDatabase(ctx, name, ldapUser, ensureCase(primaryGroup)); //registerAccount(acct); //TODO do we need this - } catch(final AuthenticationException ae) { + } catch (final AuthenticationException ae) { LOG.error(ae.getMessage(), ae); return null; } } - } catch(final NamingException ne) { - LOG.debug(ne.getMessage(), ne); - //LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); - return null; - } finally { - if(ctx != null){ - LdapUtils.closeContext(ctx); + } catch (final NamingException ne) { + if (LOG.isDebugEnabled()) { + LOG.debug(ne.getMessage(), ne); } + //LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); + return null; } } } - + /** * The binary data is in form: * byte[0] - revision level * byte[1] - count of sub-authorities * byte[2-7] - 48 bit authority (big-endian) * and then count x 32 bit sub authorities (little-endian) - * + *

* The String value is: S-Revision-Authority-SubAuthority[n]... - * + *

* http://forums.oracle.com/forums/thread.jspa?threadID=1155740&tstart=0 */ private static String decodeSID(final byte[] sid) { - + final StringBuilder strSid = new StringBuilder("S-"); // get version final int revision = sid[0]; strSid.append(Integer.toString(revision)); - + //next byte is the count of sub-authorities final int countSubAuths = sid[1] & 0xFF; - + //get the authority long authority = 0; //String rid = ""; - for(int i = 2; i <= 7; i++) { - authority |= ((long)sid[i]) << (8 * (5 - (i - 2))); + for (int i = 2; i <= 7; i++) { + authority |= ((long) sid[i]) << (8 * (5 - (i - 2))); } strSid.append("-"); strSid.append(Long.toHexString(authority)); - + //iterate all the sub-auths int offset = 8; int size = 4; //4 bytes for each sub auth - for(int j = 0; j < countSubAuths; j++) { + for (int j = 0; j < countSubAuths; j++) { long subAuthority = 0; - for(int k = 0; k < size; k++) { - subAuthority |= (long)(sid[offset + k] & 0xFF) << (8 * k); + for (int k = 0; k < size; k++) { + subAuthority |= (long) (sid[offset + k] & 0xFF) << (8 * k); } - + strSid.append("-"); strSid.append(subAuthority); - + offset += size; } - - return strSid.toString(); + + return strSid.toString(); } - + private String getPrimaryGroupSID(final SearchResult ldapUser) throws NamingException { final LDAPSearchContext search = ensureContextFactory().getSearch(); - + final Object objSID = ldapUser.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.OBJECT_SID)).get(); final String strObjectSid; if (objSID instanceof String) { strObjectSid = objSID.toString(); } else { - strObjectSid = decodeSID((byte[])objSID); + strObjectSid = decodeSID((byte[]) objSID); } - - final String strPrimaryGroupID = (String)ldapUser.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.PRIMARY_GROUP_ID)).get(); - + + final String strPrimaryGroupID = (String) ldapUser.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.PRIMARY_GROUP_ID)).get(); + return strObjectSid.substring(0, strObjectSid.lastIndexOf('-') + 1) + strPrimaryGroupID; } - + public final synchronized Group getGroup(final Subject invokingUser, final DBBroker broker, String name) { name = ensureCase(name); - + final Group grp = getGroup(name); - if(grp != null) { + if (grp != null) { return grp; } else { //if the group is not cached, we should try and find it in LDAP and cache it if it exists LdapContext ctx = null; try { ctx = getContext(invokingUser); - + return getGroup(ctx, broker, name); - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); return null; } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } } } - public final synchronized Group getGroup(final LdapContext ctx, final DBBroker broker, final String name) { - - if(name == null){ + private synchronized Group getGroup(final LdapContext ctx, final DBBroker broker, final String name) { + if (name == null) { return null; } - + final String gName = ensureCase(name); - + final Group grp = getGroup(gName); - if(grp != null) { + if (grp != null) { return grp; } else { //if the group is not cached, we should try and find it in LDAP and cache it if it exists try { //do the lookup final SearchResult ldapGroup = findGroupByGroupName(ctx, removeDomainPostfix(gName)); - if(ldapGroup == null) { + if (ldapGroup == null) { return null; } else { //found a group from ldap so cache them and return try { return createGroupInDatabase(broker, gName); //registerGroup(grp); //TODO do we need to do this? - } catch(final AuthenticationException ae) { + } catch (final AuthenticationException ae) { LOG.error(ae.getMessage(), ae); return null; } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); return null; - } finally { - if(ctx != null) { - LdapUtils.closeContext(ctx); - } } } } - + private String addDomainPostfix(final String principalName) { String name = principalName; - if(name.indexOf("@") == -1){ + if (!name.contains("@")) { name += '@' + ensureContextFactory().getDomain(); } return name; } - + private String removeDomainPostfix(final String principalName) { String name = principalName; - if(name.indexOf('@') > -1 && name.endsWith(ensureContextFactory().getDomain())) { + if (name.contains("@") && name.endsWith(ensureContextFactory().getDomain())) { name = name.substring(0, name.indexOf('@')); } return name; @@ -670,57 +675,71 @@ private boolean checkAccountRestrictionList(final String accountname) { final LDAPSearchContext search = ensureContextFactory().getSearch(); return checkPrincipalRestrictionList(accountname, search.getSearchAccount()); } - + private boolean checkGroupRestrictionList(final String groupname) { final LDAPSearchContext search = ensureContextFactory().getSearch(); return checkPrincipalRestrictionList(groupname, search.getSearchGroup()); } - + private boolean checkPrincipalRestrictionList(final String principalName, final AbstractLDAPSearchPrincipal searchPrinciple) { - + String name = ensureCase(principalName); - - if(name.indexOf('@') > -1) { - name = name.substring(0, name.indexOf('@')); + + if (name.indexOf('@') > -1) { + name = name.substring(0, name.indexOf('@')); } - + List blackList = null; - if(searchPrinciple.getBlackList() != null) { + if (searchPrinciple.getBlackList() != null) { blackList = searchPrinciple.getBlackList().getPrincipals(); } - + List whiteList = null; - if(searchPrinciple.getWhiteList() != null) { + if (searchPrinciple.getWhiteList() != null) { whiteList = searchPrinciple.getWhiteList().getPrincipals(); } - - if(blackList != null) { - for(String blackEntry : blackList) { - if(ensureCase(blackEntry).equals(name)) { + + if (blackList != null) { + for (String blackEntry : blackList) { + if (ensureCase(blackEntry).equals(name)) { return false; } } } - - if(whiteList != null && whiteList.size() > 0) { - for(String whiteEntry : whiteList) { - if(ensureCase(whiteEntry).equals(name)) { + + if (whiteList != null && whiteList.size() > 0) { + for (String whiteEntry : whiteList) { + if (ensureCase(whiteEntry).equals(name)) { return true; } } return false; } - + return true; } - + + /** + * Escapes '\', '(', and ')' characters. + * + * @param searchAttribute The search attribute string. + * + * @return the escaped search attribute. + */ + private String escapeSearchAttribute(final String searchAttribute) { + return searchAttribute + .replace("\\", "\\5c") + .replace("(", "\\28") + .replace(")", "\\29"); + } + private SearchResult findAccountByAccountName(final DirContext ctx, final String accountName) throws NamingException { - if(!checkAccountRestrictionList(accountName)) { + if (!checkAccountRestrictionList(accountName)) { return null; } - - final String userName = removeDomainPostfix(accountName); + + final String userName = escapeSearchAttribute(removeDomainPostfix(accountName)); final LDAPSearchContext search = ensureContextFactory().getSearch(); final SearchAttribute sa = new SearchAttribute(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME), userName); @@ -732,52 +751,52 @@ private SearchResult findAccountByAccountName(final DirContext ctx, final String final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); SearchResult searchResult = null; - if(results.hasMoreElements()) { - searchResult = (SearchResult) results.nextElement(); + if (results.hasMoreElements()) { + searchResult = results.nextElement(); //make sure there is not another item available, there should be only 1 match - if(results.hasMoreElements()) { + if (results.hasMoreElements()) { LOG.error("Matched multiple users for the accountName: " + accountName); } } - + return searchResult; } private String findGroupBySID(final DirContext ctx, final String sid) throws NamingException { - + final LDAPSearchContext search = ensureContextFactory().getSearch(); final SearchAttribute sa = new SearchAttribute(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.OBJECT_SID), sid); final String searchFilter = buildSearchFilter(search.getSearchGroup().getSearchFilterPrefix(), sa); final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - + final NamingEnumeration results = ctx.search(search.getAbsoluteBase(), searchFilter, searchControls); - if(results.hasMoreElements()) { - SearchResult searchResult = (SearchResult) results.nextElement(); + if (results.hasMoreElements()) { + SearchResult searchResult = results.nextElement(); //make sure there is not another item available, there should be only 1 match - if(results.hasMoreElements()) { + if (results.hasMoreElements()) { LOG.error("Matched multiple groups for the group with SID: " + sid); return null; } else { - return addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get()); + return addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get()); } } LOG.error("Matched no group with SID: " + sid); return null; } - + private SearchResult findGroupByGroupName(final DirContext ctx, final String groupName) throws NamingException { - if(!checkGroupRestrictionList(groupName)) { + if (!checkGroupRestrictionList(groupName)) { return null; } - + final LDAPSearchContext search = ensureContextFactory().getSearch(); - final SearchAttribute sa = new SearchAttribute(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME), groupName); + final SearchAttribute sa = new SearchAttribute(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME), escapeSearchAttribute(groupName)); final String searchFilter = buildSearchFilter(search.getSearchGroup().getSearchFilterPrefix(), sa); final SearchControls searchControls = new SearchControls(); @@ -785,11 +804,11 @@ private SearchResult findGroupByGroupName(final DirContext ctx, final String gro final NamingEnumeration results = ctx.search(search.getAbsoluteBase(), searchFilter, searchControls); - if(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); + if (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); //make sure there is not another item available, there should be only 1 match - if(results.hasMoreElements()) { + if (results.hasMoreElements()) { LOG.error("Matched multiple groups for the groupName: " + groupName); return null; } else { @@ -817,9 +836,9 @@ public boolean updateAccount(final Account account) throws PermissionDeniedExcep } @Override - public boolean deleteAccount(final Account account) throws PermissionDeniedException, EXistException { + public boolean deleteAccount(final Account account) { // TODO we dont support writting to LDAP - //XXX: delete local cache? + //XXX: delete local cache? return false; } @@ -829,16 +848,16 @@ public boolean updateGroup(final Group group) throws PermissionDeniedException, } @Override - public boolean deleteGroup(final Group group) throws PermissionDeniedException, EXistException { - //XXX: delete local cache? + public boolean deleteGroup(final Group group) { + //XXX: delete local cache? return false; } private class SearchAttribute { private final String name; private final String value; - - public SearchAttribute(final String name, final String value) { + + SearchAttribute(final String name, final String value) { this.name = name; this.value = value; } @@ -851,14 +870,14 @@ public String getValue() { return value; } } - + private String buildSearchFilter(final String searchPrefix, final SearchAttribute sa) { final StringBuilder builder = new StringBuilder(); builder.append("("); builder.append(buildSearchCriteria(searchPrefix)); - if(sa.getName() != null && sa.getValue() != null) { + if (sa.getName() != null && sa.getValue() != null) { builder.append("("); builder.append(sa.getName()); builder.append("="); @@ -868,41 +887,41 @@ private String buildSearchFilter(final String searchPrefix, final SearchAttribut builder.append(")"); return builder.toString(); } - + private String buildSearchFilterUnion(final String searchPrefix, final List searchAttributes) { final StringBuilder builder = new StringBuilder(); builder.append("("); builder.append(buildSearchCriteria(searchPrefix)); - if(!searchAttributes.isEmpty()) { + if (!searchAttributes.isEmpty()) { builder.append("(|"); - - for(SearchAttribute sa : searchAttributes) { + + for (final SearchAttribute sa : searchAttributes) { builder.append("("); builder.append(sa.getName()); builder.append("="); builder.append(sa.getValue()); builder.append(")"); } - + builder.append(")"); } - + builder.append(")"); return builder.toString(); } - private String buildSearchCriteria(String searchPrefix) { + private String buildSearchCriteria(final String searchPrefix) { return "&(" + searchPrefix + ")"; } @Override public List findUsernamesWhereNameStarts(String startsWith) { - - startsWith = ensureCase(startsWith); - - final List usernames = new ArrayList(); + + startsWith = escapeSearchAttribute(ensureCase(startsWith)); + + final List usernames = new ArrayList<>(); LdapContext ctx = null; try { @@ -914,66 +933,66 @@ public List findUsernamesWhereNameStarts(String startsWith) { final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String username = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkAccountRestrictionList(username)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String username = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkAccountRestrictionList(username)) { usernames.add(username); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } return usernames; } - + @Override public List findUsernamesWhereNamePartStarts(final String startsWith) { - - final String sWith = ensureCase(startsWith); - - final List usernames = new ArrayList(); + + final String sWith = escapeSearchAttribute(ensureCase(startsWith)); + + final List usernames = new ArrayList<>(); LdapContext ctx = null; try { ctx = getContext(getSecurityManager().getCurrentSubject()); final LDAPSearchContext search = ensureContextFactory().getSearch(); - + final SearchAttribute firstNameSa = new SearchAttribute(search.getSearchAccount().getMetadataSearchAttribute(AXSchemaType.FIRSTNAME), sWith + "*"); final SearchAttribute lastNameSa = new SearchAttribute(search.getSearchAccount().getMetadataSearchAttribute(AXSchemaType.LASTNAME), sWith + "*"); - final List sas = new ArrayList(); + final List sas = new ArrayList<>(); sas.add(firstNameSa); sas.add(lastNameSa); - + final String searchFilter = buildSearchFilterUnion(search.getSearchAccount().getSearchFilterPrefix(), sas); final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String username = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkAccountRestrictionList(username)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String username = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkAccountRestrictionList(username)) { usernames.add(username); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } @@ -983,10 +1002,10 @@ public List findUsernamesWhereNamePartStarts(final String startsWith) { @Override public List findUsernamesWhereUsernameStarts(final String startsWith) { - - final String sWith = ensureCase(startsWith); - - final List usernames = new ArrayList(); + + final String sWith = escapeSearchAttribute(ensureCase(startsWith)); + + final List usernames = new ArrayList<>(); LdapContext ctx = null; try { @@ -998,70 +1017,65 @@ public List findUsernamesWhereUsernameStarts(final String startsWith) { final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String username = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - - if(checkAccountRestrictionList(username)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String username = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + + if (checkAccountRestrictionList(username)) { usernames.add(username); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } return usernames; } - - - private List findGroupnamesForUserDistinguishedName(final LdapContext ctx, final String userDistinguishedName) { - final List groupnames = new ArrayList(); + + private List findGroupnamesForUserDistinguishedName(final LdapContext ctx, final String userDistinguishedName) { + final List groupnames = new ArrayList<>(); try { final LDAPSearchContext search = ensureContextFactory().getSearch(); - final SearchAttribute sa = new SearchAttribute(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.MEMBER), userDistinguishedName); + final SearchAttribute sa = new SearchAttribute(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.MEMBER), escapeSearchAttribute(userDistinguishedName)); final String searchFilter = buildSearchFilter(search.getSearchGroup().getSearchFilterPrefix(), sa); final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getAbsoluteBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String groupname = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkGroupRestrictionList(groupname)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String groupname = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkGroupRestrictionList(groupname)) { groupnames.add(groupname); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); - } finally { - if(ctx != null) { - LdapUtils.closeContext(ctx); - } } return groupnames; } - + @Override public List findGroupnamesWhereGroupnameStarts(final String startsWith) { - final String sWith = ensureCase(startsWith); - - final List groupnames = new ArrayList(); + final String sWith = escapeSearchAttribute(ensureCase(startsWith)); + + final List groupnames = new ArrayList<>(); LdapContext ctx = null; try { @@ -1073,34 +1087,34 @@ public List findGroupnamesWhereGroupnameStarts(final String startsWith) final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String groupname = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkGroupRestrictionList(groupname)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String groupname = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkGroupRestrictionList(groupname)) { groupnames.add(groupname); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } return groupnames; } - + @Override public List findGroupnamesWhereGroupnameContains(final String fragment) { - final String part = ensureCase(fragment); - - final List groupnames = new ArrayList(); + final String part = escapeSearchAttribute(ensureCase(fragment)); + + final List groupnames = new ArrayList<>(); LdapContext ctx = null; try { @@ -1112,22 +1126,22 @@ public List findGroupnamesWhereGroupnameContains(final String fragment) final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String groupname = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkGroupRestrictionList(groupname)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String groupname = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkGroupRestrictionList(groupname)) { groupnames.add(groupname); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(ne); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } @@ -1137,7 +1151,7 @@ public List findGroupnamesWhereGroupnameContains(final String fragment) @Override public List findAllGroupNames() { - final List groupnames = new ArrayList(); + final List groupnames = new ArrayList<>(); LdapContext ctx = null; try { @@ -1149,31 +1163,31 @@ public List findAllGroupNames() { final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String groupname = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkGroupRestrictionList(groupname)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String groupname = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkGroupRestrictionList(groupname)) { groupnames.add(groupname); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } return groupnames; } - + @Override public List findAllUserNames() { - final List usernames = new ArrayList(); + final List usernames = new ArrayList<>(); LdapContext ctx = null; try { @@ -1185,21 +1199,21 @@ public List findAllUserNames() { final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - final SearchResult searchResult = (SearchResult) results.nextElement(); - final String accountname = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkAccountRestrictionList(accountname)) { + while (results.hasMoreElements()) { + final SearchResult searchResult = results.nextElement(); + final String accountname = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkAccountRestrictionList(accountname)) { usernames.add(accountname); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } @@ -1210,11 +1224,11 @@ public List findAllUserNames() { @Override public List findAllGroupMembers(final String groupName) { - final String name = ensureCase(groupName); - - final List groupMembers = new ArrayList(); - - if(!checkGroupRestrictionList(name)) { + final String name = escapeSearchAttribute(ensureCase(groupName)); + + final List groupMembers = new ArrayList<>(); + + if (!checkGroupRestrictionList(name)) { return groupMembers; } @@ -1225,29 +1239,29 @@ public List findAllGroupMembers(final String groupName) { //find the dn of the group SearchResult searchResult = findGroupByGroupName(ctx, removeDomainPostfix(name)); final LDAPSearchContext search = ensureContextFactory().getSearch(); - final String dnGroup = (String)searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.DN)).get(); + final String dnGroup = (String) searchResult.getAttributes().get(search.getSearchGroup().getSearchAttribute(LDAPSearchAttributeKey.DN)).get(); //find all accounts that are a member of the group - final SearchAttribute sa = new SearchAttribute(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.MEMBER_OF), dnGroup); + final SearchAttribute sa = new SearchAttribute(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.MEMBER_OF), escapeSearchAttribute(dnGroup)); final String searchFilter = buildSearchFilter(search.getSearchAccount().getSearchFilterPrefix(), sa); final SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); - searchControls.setReturningAttributes(new String[] { search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME) }); + searchControls.setReturningAttributes(new String[]{search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)}); final NamingEnumeration results = ctx.search(search.getBase(), searchFilter, searchControls); - while(results.hasMoreElements()) { - searchResult = (SearchResult) results.nextElement(); - final String member = ensureCase(addDomainPostfix((String)searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); - if(checkAccountRestrictionList(member)) { + while (results.hasMoreElements()) { + searchResult = results.nextElement(); + final String member = ensureCase(addDomainPostfix((String) searchResult.getAttributes().get(search.getSearchAccount().getSearchAttribute(LDAPSearchAttributeKey.NAME)).get())); + if (checkAccountRestrictionList(member)) { groupMembers.add(member); } } - } catch(final NamingException ne) { + } catch (final NamingException ne) { LOG.error(new AuthenticationException(AuthenticationException.UNNOWN_EXCEPTION, ne.getMessage())); } finally { - if(ctx != null) { + if (ctx != null) { LdapUtils.closeContext(ctx); } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchAccount.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchAccount.java index 66c8f845542..ff0ea381c39 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchAccount.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchAccount.java @@ -6,17 +6,16 @@ import org.exist.config.annotation.ConfigurationClass; /** - * * @author aretter */ @ConfigurationClass("account") public class LDAPSearchAccount extends AbstractLDAPSearchPrincipal implements Configurable { - - public LDAPSearchAccount(Configuration config) { + + public LDAPSearchAccount(final Configuration config) { super(config); //it require, because class's fields initializing after super constructor - if(this.configuration != null) { + if (this.configuration != null) { this.configuration = Configurator.configure(this, this.configuration); } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchContext.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchContext.java index 894a8f45583..6848832ff11 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchContext.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchContext.java @@ -9,7 +9,6 @@ import org.exist.config.annotation.ConfigurationFieldAsElement; /** - * * @author aretter */ @ConfigurationClass("search") @@ -32,22 +31,24 @@ public class LDAPSearchContext implements Configurable { private final Configuration configuration; - public LDAPSearchContext(Configuration config) { + public LDAPSearchContext(final Configuration config) { this.configuration = Configurator.configure(this, config); } public String getBase() { return base; } - + public String getAbsoluteBase() throws NamingException { - if(getBase() != null) { + if (getBase() != null) { int index; - if ((index = getBase().indexOf("dc=")) >= 0) + if ((index = getBase().indexOf("dc=")) >= 0) { return getBase().substring(index); - - if ((index = getBase().indexOf("DC=")) >= 0) + } + + if ((index = getBase().indexOf("DC=")) >= 0) { return getBase().substring(index); + } } else { throw new NamingException("no 'base' defined"); } @@ -79,4 +80,4 @@ public boolean isConfigured() { public Configuration getConfiguration() { return configuration; } -} \ No newline at end of file +} diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchGroup.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchGroup.java index 70bc2fa9c34..66a6ec90299 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchGroup.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPSearchGroup.java @@ -12,7 +12,7 @@ @ConfigurationClass("group") public class LDAPSearchGroup extends AbstractLDAPSearchPrincipal implements Configurable { - public LDAPSearchGroup(Configuration config) { + public LDAPSearchGroup(final Configuration config) { super(config); //it require, because class's fields initializing after super constructor diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPTransformationContext.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPTransformationContext.java index 58efe14ff21..d42133e0245 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPTransformationContext.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LDAPTransformationContext.java @@ -2,6 +2,7 @@ import java.util.ArrayList; import java.util.List; + import org.exist.config.Configurable; import org.exist.config.Configuration; import org.exist.config.Configurator; @@ -10,7 +11,6 @@ import org.exist.config.annotation.ConfigurationFieldAsElement; /** - * * @author aretter */ @ConfigurationClass("transformation") @@ -22,13 +22,13 @@ public class LDAPTransformationContext implements TransformationContext, Configu private final Configuration configuration; - public LDAPTransformationContext(Configuration config) { + public LDAPTransformationContext(final Configuration config) { this.configuration = Configurator.configure(this, config); } @Override public List getAdditionalGroups() { - List additionalGroups = new ArrayList(); + final List additionalGroups = new ArrayList<>(); additionalGroups.add(addGroup); return additionalGroups; } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapContextFactory.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapContextFactory.java index b3342608c8a..996e13f3ddf 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapContextFactory.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapContextFactory.java @@ -2,21 +2,21 @@ * eXist Open Source Native XML Database * Copyright (C) 2010 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * + * * $Id$ */ package org.exist.security.realm.ldap; @@ -29,6 +29,7 @@ import javax.naming.NamingException; import javax.naming.ldap.InitialLdapContext; import javax.naming.ldap.LdapContext; + import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -40,14 +41,13 @@ /** * @author Dmitriy Shabanov - * */ @ConfigurationClass("context") public class LdapContextFactory implements Configurable { - protected final static Logger LOG = LogManager.getLogger(LdapContextFactory.class); + private static final Logger LOG = LogManager.getLogger(LdapContextFactory.class); - protected static final String SUN_CONNECTION_POOLING_PROPERTY = "com.sun.jndi.ldap.connect.pool"; + private static final String SUN_CONNECTION_POOLING_PROPERTY = "com.sun.jndi.ldap.connect.pool"; @ConfigurationFieldAsElement("authentication") protected String authentication = "simple"; @@ -89,48 +89,48 @@ public LdapContextFactory(final Configuration config) { } public LdapContext getSystemLdapContext() throws NamingException { - return getLdapContext(systemUsername, systemPassword); + return getLdapContext(systemUsername, systemPassword); } - + public LdapContext getLdapContext(final String username, final String password) throws NamingException { return getLdapContext(username, password, null); } - - public LdapContext getLdapContext(String username, final String password, final MapadditionalEnv) throws NamingException { - + + public LdapContext getLdapContext(String username, final String password, final Map additionalEnv) throws NamingException { + if (url == null) { throw new IllegalStateException("An LDAP URL must be specified of the form ldap://:"); } - if(StringUtils.isBlank(password)) { + if (StringUtils.isBlank(password)) { throw new IllegalStateException("Password for LDAP authentication may not be empty."); } if (username != null && principalPattern != null) { - username = principalPatternFormat.format(new String[] { username }); + username = principalPatternFormat.format(new String[]{username}); } final Hashtable env = new Hashtable(); env.put(Context.SECURITY_AUTHENTICATION, authentication); - if(ssl) { + if (ssl) { env.put(Context.SECURITY_PROTOCOL, "ssl"); } - + if (username != null) { env.put(Context.SECURITY_PRINCIPAL, username); } - + if (password != null) { env.put(Context.SECURITY_CREDENTIALS, password); } - + env.put(Context.INITIAL_CONTEXT_FACTORY, contextFactoryClassName); env.put(Context.PROVIDER_URL, url); //Absolutely nessecary for working with Active Directory env.put("java.naming.ldap.attributes.binary", "objectSid"); - + // the following is helpful in debugging errors //env.put("com.sun.jndi.ldap.trace.ber", System.err); @@ -140,7 +140,7 @@ public LdapContext getLdapContext(String username, final String password, final env.put(SUN_CONNECTION_POOLING_PROPERTY, "true"); } - if(additionalEnv != null) { + if (additionalEnv != null) { env.putAll(additionalEnv); } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapUtils.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapUtils.java index 2f18d2aad57..83fe53efc81 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapUtils.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/LdapUtils.java @@ -2,21 +2,21 @@ * eXist Open Source Native XML Database * Copyright (C) 2010 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * + * * $Id$ */ package org.exist.security.realm.ldap; @@ -29,21 +29,19 @@ /** * @author Dmitriy Shabanov - * */ -public class LdapUtils { +class LdapUtils { + private static final Logger LOG = LogManager.getLogger(LdapUtils.class); - protected final static Logger LOG = LogManager.getLogger(LdapUtils.class); - - public static void closeContext(LdapContext ctx) { - try { - if (ctx != null) { - ctx.close(); - } - } catch (NamingException e) { - if (LOG.isDebugEnabled()) { - LOG.error("Exception while closing LDAP context. ", e); - } - } - } + static void closeContext(final LdapContext ctx) { + try { + if (ctx != null) { + ctx.close(); + } + } catch (final NamingException e) { + if (LOG.isDebugEnabled()) { + LOG.error("Exception while closing LDAP context. ", e); + } + } + } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/AccountFunctions.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/AccountFunctions.java index 89fc2527725..b5f289887dc 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/AccountFunctions.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/AccountFunctions.java @@ -23,6 +23,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; + import org.exist.dom.QName; import org.exist.security.Account; import org.exist.security.AuthenticationException; @@ -40,67 +41,60 @@ import org.exist.security.realm.ldap.LDAPRealm; /** - * * @author Adam Retter */ public class AccountFunctions extends BasicFunction { - public final static FunctionSignature signatures[] = { - new FunctionSignature( - new QName("update-account", LDAPModule.NAMESPACE_URI, LDAPModule.PREFIX), - "Refreshed the cached LDAP account details from the LDAP directory", - new SequenceType[] { - new SequenceType(Type.STRING, Cardinality.EXACTLY_ONE) - }, - new SequenceType(Type.EMPTY, Cardinality.ZERO) - ) + public static final FunctionSignature signatures[] = { + new FunctionSignature( + new QName("update-account", LDAPModule.NAMESPACE_URI, LDAPModule.PREFIX), + "Refreshed the cached LDAP account details from the LDAP directory", + new SequenceType[]{ + new SequenceType(Type.STRING, Cardinality.EXACTLY_ONE) + }, + new SequenceType(Type.EMPTY, Cardinality.ZERO) + ) }; - - public AccountFunctions(XQueryContext context, FunctionSignature signature) { + + public AccountFunctions(final XQueryContext context, final FunctionSignature signature) { super(context, signature); } - + @Override - public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException { + public Sequence eval(final Sequence[] args, final Sequence contextSequence) throws XPathException { final SecurityManager sm = context.getBroker().getBrokerPool().getSecurityManager(); final LDAPRealm ldapRealm = getLdapRealm(sm); final String accountName = args[0].itemAt(0).getStringValue(); - + final Account ldapAccount = sm.getAccount(accountName); - if(ldapAccount == null) + if (ldapAccount == null) throw new XPathException("The Account '" + accountName + "' does not exist!"); - + try { ldapRealm.refreshAccountFromLdap(ldapAccount); - } catch(PermissionDeniedException pde) { + } catch (final PermissionDeniedException | AuthenticationException pde) { throw new XPathException(this, pde); - } catch(AuthenticationException ae) { - throw new XPathException(this, ae); } - + return Sequence.EMPTY_SEQUENCE; } - - private LDAPRealm getLdapRealm(SecurityManager sm) throws XPathException { + + private LDAPRealm getLdapRealm(final SecurityManager sm) throws XPathException { try { - Method mFindRealm = sm.getClass().getDeclaredMethod("findRealmForRealmId", String.class); + final Method mFindRealm = sm.getClass().getDeclaredMethod("findRealmForRealmId", String.class); mFindRealm.setAccessible(true); - Realm realm = (Realm)mFindRealm.invoke(sm, LDAPRealm.ID); - if(realm == null) { + final Realm realm = (Realm) mFindRealm.invoke(sm, LDAPRealm.ID); + if (realm == null) { throw new XPathException("The LDAP Realm is not in use!"); } - return (LDAPRealm)realm; - - } catch (NoSuchMethodException ex) { + return (LDAPRealm) realm; + + } catch (final NoSuchMethodException ex) { throw new XPathException(this, "The LDAP Realm is not in use!", ex); - } catch (SecurityException se) { + } catch (final SecurityException | IllegalArgumentException | IllegalAccessException se) { throw new XPathException(this, "Permission to access the LDAP Realm is denied: " + se.getMessage(), se); - } catch (IllegalArgumentException iae) { - throw new XPathException(this, "Permission to access the LDAP Realm is denied: " + iae.getMessage(), iae); - } catch (IllegalAccessException iae) { - throw new XPathException(this, "Permission to access the LDAP Realm is denied: " + iae.getMessage(), iae); - } catch (InvocationTargetException ite) { + } catch (final InvocationTargetException ite) { throw new XPathException(this, "An error occured whilst accessing the LDAP Realm: " + ite.getMessage(), ite); } } diff --git a/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/LDAPModule.java b/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/LDAPModule.java index 68f807fc905..f59b96ffd94 100644 --- a/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/LDAPModule.java +++ b/extensions/security/ldap/src/org/exist/security/realm/ldap/xquery/LDAPModule.java @@ -1,17 +1,3 @@ -package org.exist.security.realm.ldap.xquery; - -import java.util.List; -import java.util.Map; -import org.exist.xquery.AbstractInternalModule; -import org.exist.xquery.FunctionDef; - -/** - * - * http://exist-db.org/xquery/versioning - * - * @author Adam Retter - */ - /* * eXist Open Source Native XML Database * Copyright (C) 2001-11 The eXist-db Project @@ -30,23 +16,32 @@ * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - * $Id$ + */ +package org.exist.security.realm.ldap.xquery; + +import java.util.List; +import java.util.Map; + +import org.exist.xquery.AbstractInternalModule; +import org.exist.xquery.FunctionDef; + +/** + * @author Adam Retter */ public class LDAPModule extends AbstractInternalModule { - public final static String NAMESPACE_URI = "http://exist-db.org/security/ldap/xquery"; - public final static String PREFIX = "ldap"; - public final static String RELEASED_IN_VERSION = "eXist-1.5"; - + public static final String NAMESPACE_URI = "http://exist-db.org/security/ldap/xquery"; + public static final String PREFIX = "ldap"; + public static final String RELEASED_IN_VERSION = "eXist-1.5"; + public static final FunctionDef[] functions = { - new FunctionDef(AccountFunctions.signatures[0], AccountFunctions.class) + new FunctionDef(AccountFunctions.signatures[0], AccountFunctions.class) }; - public LDAPModule(Map> parameters) { + public LDAPModule(final Map> parameters) { super(functions, parameters); } - + @Override public String getNamespaceURI() { return NAMESPACE_URI; diff --git a/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceFacebook.java b/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceFacebook.java index 87024d8959a..e0778847026 100644 --- a/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceFacebook.java +++ b/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceFacebook.java @@ -74,8 +74,7 @@ public static void saveAccessToken(HttpServletRequest request, OAuthService serv if (semicolonPos > 0) { String _charset = contentType.substring(semicolonPos + 1).trim(); if (_charset.startsWith("charset")) { - //charset = - _charset.substring(_charset.indexOf('=') + 1); + //charset = _charset.substring(_charset.indexOf('=') + 1); } contentType = contentType.substring(0, semicolonPos); } diff --git a/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceGoogle.java b/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceGoogle.java index 3a43837d6e5..19eca4d1c79 100644 --- a/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceGoogle.java +++ b/extensions/security/oauth/src/org/exist/security/realm/oauth/ServiceGoogle.java @@ -74,8 +74,7 @@ public static void saveAccessToken(HttpServletRequest request, OAuthService serv if (semicolonPos > 0) { String _charset = contentType.substring(semicolonPos + 1).trim(); if (_charset.startsWith("charset")) { - //charset = - _charset.substring(_charset.indexOf('=') + 1); + //charset = _charset.substring(_charset.indexOf('=') + 1); } contentType = contentType.substring(0, semicolonPos); } diff --git a/extensions/security/openid/src/org/exist/security/realm/openid/OpenIDUtility.java b/extensions/security/openid/src/org/exist/security/realm/openid/OpenIDUtility.java index 8f4c5ca0fc0..34b0893d4ae 100644 --- a/extensions/security/openid/src/org/exist/security/realm/openid/OpenIDUtility.java +++ b/extensions/security/openid/src/org/exist/security/realm/openid/OpenIDUtility.java @@ -25,6 +25,7 @@ import org.apache.logging.log4j.Logger; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.SchemaType; import org.exist.security.Subject; import org.exist.source.Source; @@ -86,14 +87,10 @@ public static boolean registerUser(Subject principal) { } xqueryResourcePath = xqueryResourcePath.trim(); LOG.info("org.exist.security.openid.verify_logging_script = \"" + xqueryResourcePath + "\""); - - BrokerPool pool = null; try { - DocumentImpl resource = null; - Source source = null; - pool = BrokerPool.getInstance(); + final BrokerPool pool = BrokerPool.getInstance(); try(final DBBroker broker = pool.get(Optional.of(principal))) { if (broker == null) { @@ -104,36 +101,33 @@ public static boolean registerUser(Subject principal) { XmldbURI pathUri = XmldbURI.create(xqueryResourcePath); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + try(final LockedDocument lockedResource = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { + if (lockedResource == null) { + LOG.info("Resource " + xqueryResourcePath + " does not exist."); + LOG.info("pathURI " + pathUri); + return true; + } + - if (resource != null) { LOG.info("Resource " + xqueryResourcePath + " exists."); - source = new DBSource(broker, (BinaryDocument) resource, true); - } else { - LOG.info("Resource " + xqueryResourcePath + " does not exist."); - LOG.info("pathURI " + pathUri); - return true; - } + final Source source = new DBSource(broker, (BinaryDocument) lockedResource.getDocument(), true); - XQuery xquery = pool.getXQueryService(); + XQuery xquery = pool.getXQueryService(); - if (xquery == null) { - LOG.error("broker unable to retrieve XQueryService"); - return false; - } + if (xquery == null) { + LOG.error("broker unable to retrieve XQueryService"); + return false; + } - XQueryContext context = new XQueryContext(broker.getBrokerPool()); + XQueryContext context = new XQueryContext(broker.getBrokerPool()); - CompiledXQuery compiled = xquery.compile(broker, context, source); + CompiledXQuery compiled = xquery.compile(broker, context, source); - Properties outputProperties = new Properties(); + Properties outputProperties = new Properties(); - Sequence result = xquery.execute(broker, compiled, null, outputProperties); - LOG.info("XQuery execution results: " + result.toString()); - } finally { - if(resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); + Sequence result = xquery.execute(broker, compiled, null, outputProperties); + LOG.info("XQuery execution results: " + result.toString()); } } } catch (Exception e) { @@ -142,4 +136,4 @@ public static boolean registerUser(Subject principal) { } return true; } -} \ No newline at end of file +} diff --git a/extensions/webdav/src/org/exist/webdav/ExistCollection.java b/extensions/webdav/src/org/exist/webdav/ExistCollection.java index bbbfc116cef..7f29d266dd3 100644 --- a/extensions/webdav/src/org/exist/webdav/ExistCollection.java +++ b/extensions/webdav/src/org/exist/webdav/ExistCollection.java @@ -91,34 +91,26 @@ public void initMetadata() { return; } - try (final DBBroker broker = brokerPool.get(Optional.of(subject))) { - // Get access to collection - Collection collection = null; - try { - collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK); - - if (collection == null) { - LOG.error(String.format("Collection for %s cannot be opened for metadata", xmldbUri)); - return; - } + try (final DBBroker broker = brokerPool.get(Optional.of(subject)); + final Collection collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK)) { - // Retrieve some meta data - permissions = collection.getPermissionsNoLock(); - readAllowed = permissions.validate(subject, Permission.READ); - writeAllowed = permissions.validate(subject, Permission.WRITE); - executeAllowed = permissions.validate(subject, Permission.EXECUTE); - - creationTime = collection.getCreationTime(); - lastModified = creationTime; // Collection does not have more information. - - ownerUser = permissions.getOwner().getUsername(); - ownerGroup = permissions.getGroup().getName(); - } finally { - // Clean up collection - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } + if (collection == null) { + LOG.error(String.format("Collection for %s cannot be opened for metadata", xmldbUri)); + return; } + + // Retrieve some meta data + permissions = collection.getPermissionsNoLock(); + readAllowed = permissions.validate(subject, Permission.READ); + writeAllowed = permissions.validate(subject, Permission.WRITE); + executeAllowed = permissions.validate(subject, Permission.EXECUTE); + + creationTime = collection.getCreationTime(); + lastModified = creationTime; // Collection does not have more information. + + ownerUser = permissions.getOwner().getUsername(); + ownerGroup = permissions.getGroup().getName(); + } catch (final PermissionDeniedException | EXistException pde) { LOG.error(pde); } @@ -133,21 +125,13 @@ public void initMetadata() { public List getCollectionURIs() { final List collectionURIs = new ArrayList<>(); - try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { - // Try to read as specified subject - Collection collection = null; - try { - collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK); - // Get all collections - final Iterator collections = collection.collectionIteratorNoLock(broker); // QQ: use collectionIterator ? - while (collections.hasNext()) { - collectionURIs.add(xmldbUri.append(collections.next())); + try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); + final Collection collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK)) { + // Get all collections + final Iterator collections = collection.collectionIteratorNoLock(broker); // QQ: use collectionIterator ? + while (collections.hasNext()) { + collectionURIs.add(xmldbUri.append(collections.next())); - } - } finally { - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } } } catch (final EXistException | PermissionDeniedException e) { LOG.error(e); @@ -164,22 +148,13 @@ public List getCollectionURIs() { public List getDocumentURIs() { final List documentURIs = new ArrayList<>(); - try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { - Collection collection = null; - try { - // Try to read as specified subject - collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK); + try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); + final Collection collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK)) { - // Get all documents - final Iterator documents = collection.iteratorNoLock(broker); // QQ: use 'iterator' - while (documents.hasNext()) { - documentURIs.add(documents.next().getURI()); - } - } finally { - // Clean up resources - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } + // Get all documents + final Iterator documents = collection.iteratorNoLock(broker); // QQ: use 'iterator' + while (documents.hasNext()) { + documentURIs.add(documents.next().getURI()); } } catch (final PermissionDeniedException | EXistException e) { LOG.error(e); @@ -199,15 +174,13 @@ void delete() { LOG.debug(String.format("Deleting '%s'", xmldbUri)); } - Collection collection = null; - final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { + final Txn txn = txnManager.beginTransaction(); + final Collection collection = broker.openCollection(xmldbUri, LockMode.WRITE_LOCK)) { // Open collection if possible, else abort - collection = broker.openCollection(xmldbUri, LockMode.WRITE_LOCK); if (collection == null) { txnManager.abort(txn); return; @@ -226,12 +199,7 @@ void delete() { LOG.error(e); } finally { - // TODO: check if can be done earlier - if (collection != null) { - collection.release(LockMode.WRITE_LOCK); - } - - if (LOG.isDebugEnabled()) { + if(LOG.isDebugEnabled()) { LOG.debug("Finished delete"); } } @@ -244,16 +212,16 @@ public XmldbURI createCollection(String name) throws PermissionDeniedException, } XmldbURI newCollection = xmldbUri.append(name); - Collection collection = null; final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { + final Txn txn = txnManager.beginTransaction(); + final Collection collection = broker.openCollection(newCollection, LockMode.WRITE_LOCK)) { // Check if collection exists. not likely to happen since availability is // checked by ResourceFactory - collection = broker.openCollection(newCollection, LockMode.WRITE_LOCK); + if (collection != null) { final String msg = "Collection already exists"; @@ -265,15 +233,16 @@ public XmldbURI createCollection(String name) throws PermissionDeniedException, } // Create collection - Collection created = broker.getOrCreateCollection(txn, newCollection); - broker.saveCollection(txn, created); - broker.flush(); + try (final Collection created = broker.getOrCreateCollection(txn, newCollection)) { + broker.saveCollection(txn, created); + broker.flush(); - // Commit change - txnManager.commit(txn); + // Commit change + txnManager.commit(txn); - if (LOG.isDebugEnabled()) { - LOG.debug("Collection created sucessfully"); + if (LOG.isDebugEnabled()) { + LOG.debug("Collection created sucessfully"); + } } } catch (EXistException | PermissionDeniedException e) { LOG.error(e); @@ -284,13 +253,7 @@ public XmldbURI createCollection(String name) throws PermissionDeniedException, throw new EXistException(e); } finally { - - // TODO: check if can be done earlier - if (collection != null) { - collection.release(LockMode.WRITE_LOCK); - } - - if (LOG.isDebugEnabled()) { + if(LOG.isDebugEnabled()) { LOG.debug("Finished creation"); } } @@ -312,9 +275,6 @@ public XmldbURI createFile(String newName, InputStream is, Long length, String c mime = MimeType.BINARY_TYPE; } - // References to the database - Collection collection = null; - // To support LockNullResource, a 0-byte XML document can be received. Since 0-byte // XML documents are not supported a small file will be created. @@ -329,11 +289,11 @@ public XmldbURI createFile(String newName, InputStream is, Long length, String c final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { + final Txn txn = txnManager.beginTransaction(); + final Collection collection = broker.openCollection(xmldbUri, LockMode.WRITE_LOCK)) { // Check if collection exists. not likely to happen since availability is checked // by ResourceFactory - collection = broker.openCollection(xmldbUri, LockMode.WRITE_LOCK); if (collection == null) { LOG.debug(String.format("Collection %s does not exist", xmldbUri)); txnManager.abort(txn); @@ -385,11 +345,6 @@ public XmldbURI createFile(String newName, InputStream is, Long length, String c throw e; } finally { - // TODO: check if can be done earlier - if (collection != null) { - collection.release(LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished creation"); } @@ -415,48 +370,46 @@ void resourceCopyMove(XmldbURI destCollectionUri, String newName, Mode mode) thr throw new EXistException(ex.getMessage()); } - Collection srcCollection = null; // This class contains already the URI of the resource that shall be moved/copied - final XmldbURI srcCollectionUri = xmldbUri; + XmldbURI srcCollectionUri = xmldbUri; // use WRITE_LOCK if moving or if src and dest collection are the same final LockMode srcCollectionLockMode = mode == Mode.MOVE || destCollectionUri.equals(srcCollectionUri) ? LockMode.WRITE_LOCK : LockMode.READ_LOCK; - Collection destCollection = null; final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { + final Txn txn = txnManager.beginTransaction(); + final Collection srcCollection = broker.openCollection(srcCollectionUri, srcCollectionLockMode)) { // Open collection if possible, else abort - srcCollection = broker.openCollection(srcCollectionUri, srcCollectionLockMode); if (srcCollection == null) { txnManager.abort(txn); return; // TODO throw } - // Open collection if possible, else abort - destCollection = broker.openCollection(destCollectionUri, LockMode.WRITE_LOCK); - if (destCollection == null) { - LOG.debug(String.format("Destination collection %s does not exist.", xmldbUri)); - txnManager.abort(txn); - return; // TODO throw? - } + try(final Collection destCollection = broker.openCollection(destCollectionUri, LockMode.WRITE_LOCK)) { + if (destCollection == null) { + LOG.debug(String.format("Destination collection %s does not exist.", xmldbUri)); + txnManager.abort(txn); + return; // TODO throw? + } - // Perform actial move/copy - if (mode == Mode.COPY) { - broker.copyCollection(txn, srcCollection, destCollection, newNameUri); + // Perform actial move/copy + if (mode == Mode.COPY) { + broker.copyCollection(txn, srcCollection, destCollection, newNameUri); - } else { - broker.moveCollection(txn, srcCollection, destCollection, newNameUri); - } + } else { + broker.moveCollection(txn, srcCollection, destCollection, newNameUri); + } - // Commit change - txnManager.commit(txn); + // Commit change + txnManager.commit(txn); - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Collection %sd successfully", mode)); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Collection %sd successfully", mode)); + } } } catch (LockException e) { @@ -469,15 +422,6 @@ void resourceCopyMove(XmldbURI destCollectionUri, String newName, Mode mode) thr LOG.error(e); throw new EXistException(e.getMessage()); } finally { - - if (destCollection != null) { - destCollection.release(LockMode.WRITE_LOCK); - } - - if (srcCollection != null) { - srcCollection.release(srcCollectionLockMode); - } - if (LOG.isDebugEnabled()) { LOG.debug(String.format("Finished %s", mode)); } diff --git a/extensions/webdav/src/org/exist/webdav/ExistDocument.java b/extensions/webdav/src/org/exist/webdav/ExistDocument.java index a487993c7cc..9b28284f71a 100644 --- a/extensions/webdav/src/org/exist/webdav/ExistDocument.java +++ b/extensions/webdav/src/org/exist/webdav/ExistDocument.java @@ -27,6 +27,7 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.persistent.LockToken; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Account; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; @@ -98,11 +99,10 @@ public void initMetadata() { } try (final DBBroker broker = brokerPool.get(Optional.of(subject))) { - DocumentImpl document = null; - try { - // If it is not a collection, check if it is a document - document = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK); + // If it is not a collection, check if it is a document + try (final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK)) { + final DocumentImpl document = lockedDocument.getDocument(); if (document.getResourceType() == DocumentImpl.XML_FILE) { isXmlDocument = true; } @@ -124,11 +124,6 @@ public void initMetadata() { // Get (estimated) file size contentLength = document.getContentLength(); - } finally { - // Cleanup resources - if (document != null) { - document.getUpdateLock().release(LockMode.READ_LOCK); - } } } catch (final EXistException | PermissionDeniedException e) { LOG.error(e); @@ -162,11 +157,10 @@ public void stream(OutputStream os) throws IOException, PermissionDeniedExceptio try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { - DocumentImpl document = null; - try { - // If it is not a collection, check if it is a document - document = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK); + // If it is not a collection, check if it is a document + try(final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK)) { + final DocumentImpl document = lockedDocument.getDocument(); if (document.getResourceType() == DocumentImpl.XML_FILE) { // Stream XML document @@ -183,10 +177,6 @@ public void stream(OutputStream os) throws IOException, PermissionDeniedExceptio broker.readBinaryResource((BinaryDocument) document, os); os.flush(); } - } finally { - if (document != null) { - document.getUpdateLock().release(LockMode.READ_LOCK); - } } } catch (EXistException e) { LOG.error(e); @@ -240,20 +230,17 @@ void delete() { LOG.debug(String.format("Deleting %s", xmldbUri)); } - Collection collection = null; - DocumentImpl resource = null; + // Need to split path into collection and document name + final XmldbURI collName = xmldbUri.removeLastSegment(); + final XmldbURI docName = xmldbUri.lastSegment(); final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { - - // Need to split path into collection and document name - XmldbURI collName = xmldbUri.removeLastSegment(); - XmldbURI docName = xmldbUri.lastSegment(); + final Txn txn = txnManager.beginTransaction(); + final Collection collection = broker.openCollection(collName, LockMode.WRITE_LOCK)) { // Open collection if possible, else abort - collection = broker.openCollection(collName, LockMode.WRITE_LOCK); if (collection == null) { LOG.debug("Collection does not exist"); txnManager.abort(txn); @@ -261,25 +248,29 @@ void delete() { } // Open document if possible, else abort - resource = collection.getDocument(broker, docName); - if (resource == null) { - LOG.debug(String.format("No resource found for path: %s", xmldbUri)); - txnManager.abort(txn); - return; - } + try(final LockedDocument lockedResource = collection.getDocumentWithLock(broker, docName, LockMode.READ_LOCK)) { + if (lockedResource == null) { + LOG.debug(String.format("No resource found for path: %s", xmldbUri)); + txnManager.abort(txn); + return; + } - if (resource.getResourceType() == DocumentImpl.BINARY_FILE) { - collection.removeBinaryResource(txn, broker, resource.getFileURI()); + final DocumentImpl resource = lockedResource.getDocument(); + if (resource.getResourceType() == DocumentImpl.BINARY_FILE) { + collection.removeBinaryResource(txn, broker, resource.getFileURI()); + } else { + collection.removeXMLResource(txn, broker, resource.getFileURI()); + } - } else { - collection.removeXMLResource(txn, broker, resource.getFileURI()); - } + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); - // Commit change - txnManager.commit(txn); + // Commit change + txnManager.commit(txn); - if (LOG.isDebugEnabled()) { - LOG.debug("Document deleted sucessfully"); + if (LOG.isDebugEnabled()) { + LOG.debug("Document deleted sucessfully"); + } } } catch (final LockException e) { @@ -287,12 +278,6 @@ void delete() { } catch (final EXistException | IOException | TriggerException | PermissionDeniedException e) { LOG.error(e); } finally { - - // TODO: check if can be done earlier - if (collection != null) { - collection.release(LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished delete"); } @@ -308,12 +293,10 @@ public LockToken getCurrentLock() { LOG.debug("Get current lock " + xmldbUri); } - DocumentImpl document = null; - - try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { + try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); + final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK)) { - // If it is not a collection, check if it is a document - document = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK); + final DocumentImpl document = lockedDocument.getDocument(); if (document == null) { LOG.debug("No resource found for path: " + xmldbUri); @@ -353,11 +336,6 @@ public LockToken getCurrentLock() { return null; } finally { - - if (document != null) { - document.getUpdateLock().release(LockMode.READ_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished probe lock"); } @@ -374,13 +352,11 @@ public LockToken lock(LockToken inputToken) throws PermissionDeniedException, LOG.debug("create lock " + xmldbUri); } - DocumentImpl document = null; - - try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { - - // Try to get document (add catch?) - document = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK); + // Try to get document + try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); + final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK);) { + final DocumentImpl document = lockedDocument.getDocument(); if (document == null) { if (LOG.isDebugEnabled()) { @@ -449,12 +425,6 @@ public LockToken lock(LockToken inputToken) throws PermissionDeniedException, LOG.error(e); throw new EXistException(e); } finally { - - // TODO: check if can be done earlier - if (document != null) { - document.getUpdateLock().release(LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished create lock"); } @@ -470,17 +440,14 @@ void unlock() throws PermissionDeniedException, DocumentNotLockedException, EXis LOG.debug("unlock " + xmldbUri); } - DocumentImpl document = null; - final TransactionManager txnManager = brokerPool.getTransactionManager(); + // Try to get document try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { - - - // Try to get document (add catch?) - document = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK); + final Txn txn = txnManager.beginTransaction(); + final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK)) { + final DocumentImpl document = lockedDocument.getDocument(); if (document == null) { final String msg = String.format("No resource found for path: %s", xmldbUri); LOG.debug(msg); @@ -518,10 +485,6 @@ void unlock() throws PermissionDeniedException, DocumentNotLockedException, EXis LOG.error(e); throw new EXistException(e); } finally { - if (document != null) { - document.getUpdateLock().release(LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished create lock"); } @@ -545,26 +508,22 @@ void resourceCopyMove(XmldbURI destCollectionUri, String newName, Mode mode) thr throw new EXistException(ex.getMessage()); } - Collection srcCollection = null; // use WRITE_LOCK if moving or if src and dest collection are the same final LockMode srcCollectionLockMode = mode == Mode.MOVE || destCollectionUri.equals(xmldbUri.removeLastSegment()) ? LockMode.WRITE_LOCK : LockMode.READ_LOCK; DocumentImpl srcDocument = null; - Collection destCollection = null; - + // Need to split path into collection and document name + final XmldbURI srcCollectionUri = xmldbUri.removeLastSegment(); + final XmldbURI srdDocumentUri = xmldbUri.lastSegment(); final TransactionManager txnManager = brokerPool.getTransactionManager(); try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); - final Txn txn = txnManager.beginTransaction()) { - - // Need to split path into collection and document name - XmldbURI srcCollectionUri = xmldbUri.removeLastSegment(); - XmldbURI srdDocumentUri = xmldbUri.lastSegment(); + final Txn txn = txnManager.beginTransaction(); + final Collection srcCollection = broker.openCollection(srcCollectionUri, srcCollectionLockMode)) { // Open collection if possible, else abort - srcCollection = broker.openCollection(srcCollectionUri, srcCollectionLockMode); if (srcCollection == null) { txnManager.abort(txn); return; // TODO throw @@ -579,30 +538,30 @@ void resourceCopyMove(XmldbURI destCollectionUri, String newName, Mode mode) thr } // Open collection if possible, else abort - destCollection = broker.openCollection(destCollectionUri, LockMode.WRITE_LOCK); - if (destCollection == null) { - LOG.debug(String.format("Destination collection %s does not exist.", xmldbUri)); - txnManager.abort(txn); - return; - } + try (final Collection destCollection = broker.openCollection(destCollectionUri, LockMode.WRITE_LOCK)) { + if (destCollection == null) { + LOG.debug(String.format("Destination collection %s does not exist.", xmldbUri)); + txnManager.abort(txn); + return; + } - // Perform actial move/copy - if (mode == Mode.COPY) { - broker.copyResource(txn, srcDocument, destCollection, newNameUri); + // Perform actial move/copy + if (mode == Mode.COPY) { + broker.copyResource(txn, srcDocument, destCollection, newNameUri); - } else { - broker.moveResource(txn, srcDocument, destCollection, newNameUri); - } + } else { + broker.moveResource(txn, srcDocument, destCollection, newNameUri); + } - // Commit change - txnManager.commit(txn); + // Commit change + txnManager.commit(txn); - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Document %sd successfully", mode)); + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Document %sd successfully", mode)); + } } - } catch (LockException e) { LOG.error("Resource is locked.", e); throw new EXistException(e.getMessage()); @@ -616,16 +575,6 @@ void resourceCopyMove(XmldbURI destCollectionUri, String newName, Mode mode) thr throw new EXistException(e.getMessage()); } finally { - - // TODO: check if can be done earlier - if (destCollection != null) { - destCollection.release(LockMode.WRITE_LOCK); - } - - if (srcCollection != null) { - srcCollection.release(srcCollectionLockMode); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished " + mode); } @@ -639,8 +588,6 @@ public LockToken refreshLock(String token) throws PermissionDeniedException, LOG.debug(String.format("refresh lock %s lock=%s", xmldbUri, token)); } - DocumentImpl document = null; - if (token == null) { if (LOG.isDebugEnabled()) { LOG.debug("token is null"); @@ -648,11 +595,11 @@ public LockToken refreshLock(String token) throws PermissionDeniedException, throw new EXistException("token is null"); } - try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject))) { - - // Try to get document (add catch?) - document = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK); + // Try to get document + try (final DBBroker broker = brokerPool.get(Optional.ofNullable(subject)); + final LockedDocument lockedDocument = broker.getXMLResource(xmldbUri, LockMode.WRITE_LOCK)) { + final DocumentImpl document = lockedDocument.getDocument(); if (document == null) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("No resource found for path: %s", xmldbUri)); @@ -709,12 +656,6 @@ public LockToken refreshLock(String token) throws PermissionDeniedException, LOG.error(e); throw e; } finally { - - // TODO: check if can be done earlier - if (document != null) { - document.getUpdateLock().release(LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("Finished create lock"); } diff --git a/extensions/webdav/src/org/exist/webdav/ExistResourceFactory.java b/extensions/webdav/src/org/exist/webdav/ExistResourceFactory.java index f8dfa2f9bee..b46d3f51221 100644 --- a/extensions/webdav/src/org/exist/webdav/ExistResourceFactory.java +++ b/extensions/webdav/src/org/exist/webdav/ExistResourceFactory.java @@ -29,6 +29,7 @@ import org.exist.EXistException; import org.exist.collections.Collection; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; @@ -163,8 +164,6 @@ public Resource getResource(String host, String path) { */ private ResourceType getResourceType(BrokerPool brokerPool, XmldbURI xmldbUri) { - Collection collection = null; - DocumentImpl document = null; ResourceType type = ResourceType.NOT_EXISTING; // MacOsX finder specific files @@ -183,50 +182,31 @@ private ResourceType getResourceType(BrokerPool brokerPool, XmldbURI xmldbUri) { // Try to read as system user. Note that the actual user is not know // yet. In MiltonResource the actual authentication and authorization // is performed. - try (final DBBroker broker = brokerPool.get(Optional.of(brokerPool.getSecurityManager().getSystemSubject()))) { + try (final DBBroker broker = brokerPool.get(Optional.of(brokerPool.getSecurityManager().getSystemSubject())); + final Collection collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK)) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Path: %s", xmldbUri.toString())); } // First check if resource is a collection - collection = broker.openCollection(xmldbUri, LockMode.READ_LOCK); if (collection != null) { type = ResourceType.COLLECTION; - collection.release(LockMode.READ_LOCK); - collection = null; - } else { // If it is not a collection, check if it is a document - document = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK); - - if (document != null) { - // Document is found - type = ResourceType.DOCUMENT; - document.getUpdateLock().release(LockMode.READ_LOCK); - document = null; - - } else { - // No document and no collection. - type = ResourceType.NOT_EXISTING; + try (final LockedDocument lockedDoc = broker.getXMLResource(xmldbUri, LockMode.READ_LOCK)) { + if (lockedDoc != null) { + // Document is found + type = ResourceType.DOCUMENT; + } else { + // No document and no collection. + type = ResourceType.NOT_EXISTING; + } } } - - - } catch (Exception ex) { + } catch (final Exception ex) { LOG.error(String.format("Error determining nature of resource %s", xmldbUri.toString()), ex); type = ResourceType.NOT_EXISTING; - } finally { - - // Clean-up, just in case - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } - - // Clean-up, just in case - if (document != null) { - document.getUpdateLock().release(LockMode.READ_LOCK); - } } if (LOG.isDebugEnabled()) { diff --git a/extensions/xprocxq/main/src/xquery/util.xqm b/extensions/xprocxq/main/src/xquery/util.xqm index ea06c026b22..894e73d2c48 100755 --- a/extensions/xprocxq/main/src/xquery/util.xqm +++ b/extensions/xprocxq/main/src/xquery/util.xqm @@ -1,10 +1,10 @@ xquery version "1.0" encoding "UTF-8"; module namespace u = "http://xproc.net/xproc/util"; -(: ------------------------------------------------------------------------------------- - +(: ------------------------------------------------------------------------------------- + util.xqm - contains most of the XQuery processor specific functions, including all helper functions. - + ---------------------------------------------------------------------------------------- :) @@ -96,7 +96,7 @@ declare function u:get-step($stepname as xs:string,$declarestep) { $const:std-steps/p:declare-step[@type=$stepname], $const:opt-steps/p:declare-step[@type=$stepname], $const:ext-steps/p:declare-step[@type=$stepname], - $const:comp-steps//xproc:element[@type=$stepname], + $const:comp-steps//xproc:element[@type=$stepname], $declarestep/@type }; @@ -151,7 +151,7 @@ else (: -------------------------------------------------------------------------- :) declare function u:assert($booleanexp as item(), $why as xs:string) { -if(not($booleanexp) and boolean($u:NDEBUG)) then +if(not($booleanexp) and boolean($u:NDEBUG)) then u:dynamicError('err:XC0020',$why) else () @@ -160,7 +160,7 @@ else (: -------------------------------------------------------------------------- :) declare function u:assert($booleanexp as item(), $why as xs:string,$error) { -if(not($booleanexp) and boolean($u:NDEBUG)) then +if(not($booleanexp) and boolean($u:NDEBUG)) then error(QName('http://www.w3.org/ns/xproc-error',$error),concat("XProc Assert Error: ",$why)) else () @@ -169,7 +169,7 @@ else (: -------------------------------------------------------------------------- :) declare function u:boolean($test as xs:string) { -if(contains($test,'false') ) then +if(contains($test,'false') ) then false() else true() @@ -433,7 +433,7 @@ declare function u:xquery($query as xs:string){ u:dynamicError('err:XD0001','query is empty and/or XProc step is not supported') else $query - let $result := util:eval($qry) + let $result := util:eval($qry) return $result }; @@ -472,8 +472,8 @@ else return util:eval-inline($xml,$query) (: - if ( $result instance of element() or $result instance of document-node()) then - + if ( $result instance of element() or $result instance of document-node()) then + u:dynamicError('err:XD0016',$xpathstring) :) }; @@ -540,11 +540,11 @@ if ($strict eq '1') then let $e1 := (for $child in $primary/* return $child) - + let $e2 := (for $child in $secondary/* return $child) - + return every $i in 1 to max((count($e1),count($e2))) @@ -556,11 +556,11 @@ else let $e1 := (for $child in $primary/* return u:treewalker($child)) - + let $e2 := (for $child in $secondary/* return u:treewalker($child)) - + return every $i in 1 to max((count($e1),count($e2))) @@ -576,11 +576,11 @@ element {node-name($element)} for $child in $element/node() return - if ($child instance of element()) then + if ($child instance of element()) then u:treewalker($child) - else + else normalize-space($child) - + } }; @@ -612,9 +612,9 @@ declare function u:copy-filter-elements($element as element(), $element-name as declare function u:rename-inline-element($element as element(),$match,$newelement) as element() { element {if(string(node-name($element)) = string($match)) then node-name($newelement) else node-name($element)} {$element/@*, - if(string(node-name($element)) = $match) then + if(string(node-name($element)) = $match) then ($newelement/@*) - else + else (), for $child in $element/node() return @@ -628,9 +628,9 @@ declare function u:delete-matching-elements($element as element(),$select) as el element {node-name($element)} {$element/@*[not(. intersect $select)], for $child in $element/node()[not(. intersect $select)] - return + return if ($child instance of element()) - then + then u:delete-matching-elements($child,$select) else $child @@ -643,7 +643,7 @@ declare function u:replace-matching-elements($element as element(),$select,$repl element {node-name($element)} {$element/@*, for $child in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then @@ -658,7 +658,7 @@ declare function u:replace-matching-elements($element as element(),$select,$repl else u:replace-matching-elements($child,$select,$replace) else - $child + $child } }; @@ -666,7 +666,7 @@ declare function u:insert-matching-elements($element as element(),$select,$repla element {node-name($element)} {$element/@*, for $child in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then @@ -674,12 +674,12 @@ declare function u:insert-matching-elements($element as element(),$select,$repla ($replace,u:insert-matching-elements($child,$select,$replace,$position)) else if($position eq 'after' or $position eq 'last-child') then (u:insert-matching-elements($child,$select,$replace,$position),$replace) - else + else u:insert-matching-elements($child,$select,$replace,$position) else u:insert-matching-elements($child,$select,$replace,$position) else - $child + $child } }; @@ -689,16 +689,16 @@ declare function u:rename-matching-elements($element as element(),$select,$new-n element {node-name($element)} {$element/@*, for $child in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then element {$new-name}{$child/* - } + } else u:rename-matching-elements($child,$select,$new-name) else - $child + $child } }; @@ -706,17 +706,17 @@ declare function u:wrap-matching-elements($element as element(),$select,$wrapper element {node-name($element)} {$element/@*, for $child in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then element {$wrapper}{ u:wrap-matching-elements($child,$select,$wrapper) - } + } else u:wrap-matching-elements($child,$select,$wrapper) else - $child + $child } }; @@ -724,7 +724,7 @@ declare function u:unwrap-matching-elements($element as element(),$select) as el element {node-name($element)} {$element/@*, for $child in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then @@ -732,7 +732,7 @@ declare function u:unwrap-matching-elements($element as element(),$select) as el else u:unwrap-matching-elements($child,$select) else - $child + $child } }; @@ -740,18 +740,18 @@ declare function u:label-matching-elements($element as element(),$select,$attrib element {node-name($element)} {$element/@*, for $child at $pos in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then - element {node-name($child)}{ + element {node-name($child)}{ attribute {$attribute} {$label,"_",$pos}, u:label-matching-elements($child,$select,$attribute,$label,$replace) - } + } else u:label-matching-elements($child,$select,$attribute,$label,$replace) else - $child + $child } }; @@ -759,18 +759,18 @@ declare function u:label-matching-elements($element as element(),$select,$attrib element {node-name($element)} {$element/@*, for $child at $pos in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then - element {node-name($child)}{ + element {node-name($child)}{ attribute {$attribute} {$label,"_",$pos}, u:label-matching-elements($child,$select,$attribute,$label,$replace) - } + } else u:label-matching-elements($child,$select,$attribute,$label,$replace) else - $child + $child } }; @@ -778,7 +778,7 @@ declare function u:add-attribute-matching-elements($element as element(),$select element {node-name($element)} {$element/@*, for $child at $pos in $element/node() - return + return if ($child instance of element()) then @@ -788,17 +788,17 @@ declare function u:add-attribute-matching-elements($element as element(),$select attribute {$attribute} {$label}, if ($child/node() instance of text()) then $child/text() - else if ($child/node() instance of element()) then + else if ($child/node() instance of element()) then u:add-attribute-matching-elements($child,$select,$attribute,$label) else $child/* - } + } else u:add-attribute-matching-elements($child,$select,$attribute,$label) - + else if ($child/node() instance of text()) then $child/text() - else + else $child } }; @@ -807,18 +807,18 @@ declare function u:add-attributes-matching-elements($element as element(),$selec element {node-name($element)} {$element/@*, for $child at $pos in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then element {node-name($child)}{ $attributes, u:add-attributes-matching-elements($child,$select,$attributes) - } + } else u:add-attributes-matching-elements($child,$select,$attributes) else - $child + $child } }; @@ -827,7 +827,7 @@ declare function u:string-replace-matching-elements($element as element(),$selec element {node-name($element)} {$element/@*, for $child at $pos in $element/node() - return + return if ($child instance of element()) then if ($child intersect $select) then (util:log('info','attribute logged'),string($replace)) @@ -888,7 +888,7 @@ declare function u:declare-used-namespaces ( $root as node()? ) as xs:anyURI* { let $namespaces := (distinct-values($root/descendant-or-self::*/(.|@*)/namespace-uri(.)) ) return for $namespace at $pos in $namespaces -return +return let $ns := concat('ns',$pos) return util:declare-namespace($ns,$namespace) @@ -900,7 +900,7 @@ let $prefix := (distinct-values($root/descendant-or-self::*/(.|@*)/substring-b let $namespaces := (distinct-values($root/descendant-or-self::*/(.|@*)/namespace-uri(.)) ) return for $namespace at $pos in $namespaces - return + return if ($namespace eq 'http://www.w3.org/XML/1998/namespace') then () else if ($namespace eq 'http://www.w3.org/ns/xproc-step') then @@ -917,15 +917,15 @@ let $prefix := (distinct-values($root/descendant-or-self::*/(.|@*)/substring-b let $namespaces := (distinct-values($root/descendant-or-self::*/(.|@*)/namespace-uri(.)) ) return for $namespace at $pos in $namespaces - return - + return + let $ns := $prefix[$pos - 1] return if ($namespace eq '') then () else if ($namespace eq 'http://www.w3.org/XML/1998/namespace') then () - else if ($ns) then + else if ($ns) then concat('declare namespace ',$ns,'="',$namespace,'";') else concat('declare default element namespace "',$namespace,'";') @@ -948,12 +948,13 @@ declare function u:serialize($xml,$options){ (: -------------------------------------------------------------------------- :) +(: TODO schedule for deprecation ? use fn:parse-xml instead :) declare function u:parse-string($string) as item()*{ util:parse($string) }; (: -------------------------------------------------------------------------- :) -declare function u:map($func, $seqA as item()*, $seqB as item()*) +declare function u:map($func, $seqA as item()*, $seqB as item()*) as item()* { if(count($seqA) != count($seqB)) then () else @@ -964,7 +965,7 @@ as item()* { }; (: -------------------------------------------------------------------------- :) -declare function u:filter($func, $seq as item()*) +declare function u:filter($func, $seq as item()*) as item()* { for $i in $seq return @@ -984,7 +985,7 @@ declare function u:printstep ($step,$meta,$value) { (: -------------------------------------------------------------------------- :) declare function u:strip-namespace($e as element()) as element() { - + element {QName((),local-name($e))} { for $child in $e/(@*,node()) return @@ -1034,5 +1035,3 @@ declare function u:step-fold( $pipeline, $result[last()], ($outputs,$result)) }; - - diff --git a/extensions/xqdoc/src/org/exist/xqdoc/xquery/Scan.java b/extensions/xqdoc/src/org/exist/xqdoc/xquery/Scan.java index 20965a5f574..2ff6b24053c 100644 --- a/extensions/xqdoc/src/org/exist/xqdoc/xquery/Scan.java +++ b/extensions/xqdoc/src/org/exist/xqdoc/xquery/Scan.java @@ -8,6 +8,7 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.source.*; import org.exist.storage.lock.Lock.LockMode; @@ -90,36 +91,38 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce } else { String uri = args[0].getStringValue(); if (uri.startsWith(XmldbURI.XMLDB_URI_PREFIX)) { - Collection collection = null; - DocumentImpl doc = null; try { XmldbURI resourceURI = XmldbURI.xmldbUriFor(uri); - collection = context.getBroker().openCollection(resourceURI.removeLastSegment(), LockMode.READ_LOCK); - if (collection == null) { - LOG.warn("collection not found: " + resourceURI.getCollectionPath()); - return Sequence.EMPTY_SEQUENCE; - } - doc = collection.getDocumentWithLock(context.getBroker(), resourceURI.lastSegment(), LockMode.READ_LOCK); - if (doc == null) - return Sequence.EMPTY_SEQUENCE; - if (doc.getResourceType() != DocumentImpl.BINARY_FILE || - !doc.getMetadata().getMimeType().equals("application/xquery")) { - throw new XPathException(this, "XQuery resource: " + uri + " is not an XQuery or " + - "declares a wrong mime-type"); + try (final Collection collection = context.getBroker().openCollection(resourceURI.removeLastSegment(), LockMode.READ_LOCK)) { + if (collection == null) { + LOG.warn("collection not found: " + resourceURI.getCollectionPath()); + return Sequence.EMPTY_SEQUENCE; + } + + try(final LockedDocument lockedDoc = collection.getDocumentWithLock(context.getBroker(), resourceURI.lastSegment(), LockMode.READ_LOCK)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + final DocumentImpl doc = lockedDoc == null ? null : lockedDoc.getDocument(); + if (doc == null) { + return Sequence.EMPTY_SEQUENCE; + } + if (doc.getResourceType() != DocumentImpl.BINARY_FILE || + !doc.getMetadata().getMimeType().equals("application/xquery")) { + throw new XPathException(this, "XQuery resource: " + uri + " is not an XQuery or " + + "declares a wrong mime-type"); + } + source = new DBSource(context.getBroker(), (BinaryDocument) doc, false); + name = doc.getFileURI().toString(); + } + } catch (LockException e) { + throw new XPathException(this, "internal lock error: " + e.getMessage()); + } catch (PermissionDeniedException pde) { + throw new XPathException(this, pde.getMessage(), pde); } - source = new DBSource(context.getBroker(), (BinaryDocument) doc, false); - name = doc.getFileURI().toString(); } catch (URISyntaxException e) { throw new XPathException(this, "invalid module uri: " + uri + ": " + e.getMessage(), e); - } catch (LockException e) { - throw new XPathException(this, "internal lock error: " + e.getMessage()); - } catch(PermissionDeniedException pde) { - throw new XPathException(this, pde.getMessage(), pde); - } finally { - if (doc != null) - doc.getUpdateLock().release(LockMode.READ_LOCK); - if(collection != null) - collection.release(LockMode.READ_LOCK); } } else { // first check if the URI points to a registered module @@ -128,6 +131,9 @@ public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathExce uri = location; try { source = SourceFactory.getSource(context.getBroker(), context.getModuleLoadPath(), uri, false); + if (source == null) { + throw new XPathException(this, "failed to read module " + uri); + } name = extractName(uri); } catch (IOException e) { throw new XPathException(this, "failed to read module " + uri, e); diff --git a/installer/install.xml.tmpl b/installer/install.xml.tmpl index 9a5729d461b..d84535bdc93 100644 --- a/installer/install.xml.tmpl +++ b/installer/install.xml.tmpl @@ -3,7 +3,7 @@ @name@ @version@ http://exist-db.org - 1.6 + 1.8 no @@ -47,7 +47,7 @@ - + @@ -206,7 +206,6 @@ - diff --git a/installer/scripts/eXist-settings.sh b/installer/scripts/eXist-settings.sh index f975c3cf708..a770e6079ea 100755 --- a/installer/scripts/eXist-settings.sh +++ b/installer/scripts/eXist-settings.sh @@ -1,6 +1,8 @@ -# -*-Shell-script-*- +#!/usr/bin/env bash + +## # Common eXist script functions and settings -# $Id:eXist-settings.sh 7231 2008-01-14 22:33:35Z wolfgang_m $ +## get_exist_home() { case "$1" in diff --git a/installer/scripts/setup.bat b/installer/scripts/setup.bat index bea7fd66a7c..accfbee57be 100644 --- a/installer/scripts/setup.bat +++ b/installer/scripts/setup.bat @@ -1,28 +1,26 @@ @echo off - -rem $Id$ +::will be set by the installer +set JAVA_HOME="$JAVA_HOME" +set EXIST_HOME="$INSTALL_PATH" ::remove any quotes from JAVA_HOME and EXIST_HOME env var, will be re-added below for /f "delims=" %%G IN (%JAVA_HOME%) DO SET JAVA_HOME=%%G for /f "delims=" %%G IN (%EXIST_HOME%) DO SET EXIST_HOME=%%G -rem will be set by the installer -set EXIST_HOME=$INSTALL_PATH -rem will be set by the installer -set JAVA_HOME=$JAVA_HOME - :gotJavaHome set JAVA_CMD="%JAVA_HOME%\bin\java" set JAVA_OPTS="-Xms64m -Xmx768m" -rem make sure there's the jetty tmp directory +::make sure there's the jetty tmp directory mkdir "%EXIST_HOME%\tools\jetty\tmp" -rem echo "JAVA_HOME: %JAVA_HOME%" -rem echo "EXIST_HOME: %EXIST_HOME%" -echo %JAVA_OPTS% +echo "JAVA_HOME: [%JAVA_HOME%]" +echo "EXIST_HOME: [%EXIST_HOME%]" +echo "EXIST_OPTS: [%JAVA_OPTS%]" +echo: +echo: %JAVA_CMD% "%JAVA_OPTS%" -Dexist.home="%EXIST_HOME%" -Duse.autodeploy.feature=false -jar "%EXIST_HOME%\start.jar" org.exist.installer.Setup %1 %2 %3 %4 %5 %6 %7 %8 %9 diff --git a/installer/vm.properties b/installer/vm.properties index ad06eda014a..cf6b5ed778e 100644 --- a/installer/vm.properties +++ b/installer/vm.properties @@ -10,6 +10,6 @@ memory.min=64 vmoptions=-Dfile.encoding=UTF-8 # Mac specific properties -vmoptions.mac=-Xdock:name="eXist-db" -Xdock:icon="icon.png" -Dapple.laf.useScreenMenuBar="true" +vmoptions.mac=-Xdock:name=eXist-db -Xdock:icon=icon.png -Dapple.laf.useScreenMenuBar=true vmoptions.win=-Djava.security.manager -Djava.security.policy=tools/yajsw/conf/yajsw.policy \ No newline at end of file diff --git a/lib/core/caffeine-2.4.0.jar b/lib/core/caffeine-2.4.0.jar new file mode 100644 index 00000000000..80b85190ea7 Binary files /dev/null and b/lib/core/caffeine-2.4.0.jar differ diff --git a/lib/core/deuce-annotations-1.0-SNAPSHOT.jar b/lib/core/deuce-annotations-1.0-SNAPSHOT.jar new file mode 100644 index 00000000000..669d487f29a Binary files /dev/null and b/lib/core/deuce-annotations-1.0-SNAPSHOT.jar differ diff --git a/lib/core/j8fu-1.16.jar b/lib/core/j8fu-1.16.jar deleted file mode 100644 index 38e1835f823..00000000000 Binary files a/lib/core/j8fu-1.16.jar and /dev/null differ diff --git a/lib/core/j8fu-1.21.jar b/lib/core/j8fu-1.21.jar new file mode 100644 index 00000000000..f8194e50c40 Binary files /dev/null and b/lib/core/j8fu-1.21.jar differ diff --git a/lib/core/jsr305-3.0.1.jar b/lib/core/jsr305-3.0.1.jar new file mode 100644 index 00000000000..021df892b9e Binary files /dev/null and b/lib/core/jsr305-3.0.1.jar differ diff --git a/lib/core/multilock-1.0-SNAPSHOT.jar b/lib/core/multilock-1.0-SNAPSHOT.jar new file mode 100644 index 00000000000..7e8195dbe4d Binary files /dev/null and b/lib/core/multilock-1.0-SNAPSHOT.jar differ diff --git a/lib/core/stax2-api-3.1.4.jar b/lib/core/stax2-api-3.1.4.jar new file mode 100644 index 00000000000..dded0369289 Binary files /dev/null and b/lib/core/stax2-api-3.1.4.jar differ diff --git a/lib/core/woodstox-core-5.0.3.jar b/lib/core/woodstox-core-5.0.3.jar new file mode 100644 index 00000000000..1c268641c88 Binary files /dev/null and b/lib/core/woodstox-core-5.0.3.jar differ diff --git a/lib/endorsed/Saxon-HE-9.6.0-7.jar b/lib/endorsed/Saxon-HE-9.6.0-7.jar deleted file mode 100644 index 37224925681..00000000000 Binary files a/lib/endorsed/Saxon-HE-9.6.0-7.jar and /dev/null differ diff --git a/lib/endorsed/Saxon-HE-9.8.0-12.jar b/lib/endorsed/Saxon-HE-9.8.0-12.jar new file mode 100644 index 00000000000..039185ac4ea Binary files /dev/null and b/lib/endorsed/Saxon-HE-9.8.0-12.jar differ diff --git a/lib/endorsed/cupv10k-runtime-LICENSE.txt b/lib/endorsed/cupv10k-runtime-LICENSE.txt new file mode 100644 index 00000000000..4ea7a709bfd --- /dev/null +++ b/lib/endorsed/cupv10k-runtime-LICENSE.txt @@ -0,0 +1,19 @@ +CUP Parser Generator Copyright Notice, License, and Disclaimer + +Copyright 1996-1999 by Scott Hudson, Frank Flannery, C. Scott Ananian + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, provided +that the above copyright notice appear in all copies and that both +the copyright notice and this permission notice and warranty disclaimer +appear in supporting documentation, and that the names of the authors +or their employers not be used in advertising or publicity pertaining +to distribution of the software without specific, written prior permission. + +The authors and their employers disclaim all warranties with regard to +this software, including all implied warranties of merchantability +and fitness. In no event shall the authors or their employers be liable +for any special, indirect or consequential damages or any damages +whatsoever resulting from loss of use, data or profits, whether in an action +of contract, negligence or other tortious action, arising out of or +in connection with the use or performance of this software. diff --git a/lib/endorsed/cupv10k-runtime.jar b/lib/endorsed/cupv10k-runtime.jar new file mode 100644 index 00000000000..97d11a1a7b0 Binary files /dev/null and b/lib/endorsed/cupv10k-runtime.jar differ diff --git a/lib/endorsed/org.eclipse.wst.xml.xpath2.processor-1.2.0.jar b/lib/endorsed/org.eclipse.wst.xml.xpath2.processor-1.2.0.jar new file mode 100644 index 00000000000..ecccaea9946 Binary files /dev/null and b/lib/endorsed/org.eclipse.wst.xml.xpath2.processor-1.2.0.jar differ diff --git a/lib/endorsed/xercesImpl-2.11.0.jar b/lib/endorsed/xercesImpl-2.11.0.jar deleted file mode 100644 index 0aaa990f3ec..00000000000 Binary files a/lib/endorsed/xercesImpl-2.11.0.jar and /dev/null differ diff --git a/lib/endorsed/xercesImpl-2.12.0.jar b/lib/endorsed/xercesImpl-2.12.0.jar new file mode 100644 index 00000000000..b032fb422d2 Binary files /dev/null and b/lib/endorsed/xercesImpl-2.12.0.jar differ diff --git a/lib/optional/jaxb-api-2.3.0.jar b/lib/optional/jaxb-api-2.3.0.jar new file mode 100644 index 00000000000..0817c083ad9 Binary files /dev/null and b/lib/optional/jaxb-api-2.3.0.jar differ diff --git a/log4j2.xml b/log4j2.xml index 227627de40a..cbcfa4a7a5d 100644 --- a/log4j2.xml +++ b/log4j2.xml @@ -22,6 +22,22 @@ + + + + + + + + + + + + + + + + @@ -129,7 +145,19 @@ - + + + + + + + + + + + + + diff --git a/nbproject/project.properties b/nbproject/project.properties index 178ed34ffbf..383cde04046 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -11,10 +11,11 @@ file.reference.commons-io-2.6.jar=lib/core/commons-io-2.6.jar file.reference.commons-lang3-3.7.jar=lib/optional/commons-lang3-3.7.jar file.reference.commons-net-3.6.jar=lib/optional/commons-net-3.6.jar file.reference.httpcore-4.4.8.jar=lib/optional/httpcore-4.4.8.jar -file.reference.j8fu-1.16.jar=lib/core/j8fu-1.16.jar +file.reference.j8fu-1.21.jar=lib/core/j8fu-1.21.jar file.reference.icu4j-59_1.jar=lib/core/icu4j-59_1.jar file.reference.icu4j-localespi-59_1.jar=lib/core/icu4j-localespi-59_1.jar file.reference.caffeine-2.6.2.jar=lib/core/caffeine-2.6.2.jar +file.reference.jaxb-api-2.3.0.jar=lib/optional/jaxb-api-2.3.0.jar file.reference.jctools-core-2.1.2.jar=lib/core/jctools-core-2.1.2.jar file.reference.jsr305-3.0.2.jar=lib/core/jsr305-3.0.2.jar file.reference.jargo-0.4.14.jar=lib/core/jargo-0.4.14.jar @@ -35,7 +36,7 @@ file.reference.nekohtml-1.9.22.jar=lib/user/nekohtml-1.9.22.jar file.reference.objenesis-2.2.jar=lib/test/objenesis-2.2.jar file.reference.pkg-java-fork.jar=lib/core/pkg-java-fork.jar file.reference.quartz-2.3.0.jar=lib/core/quartz-2.3.0.jar -file.reference.Saxon-HE-9.6.0-7.jar=lib/endorsed/Saxon-HE-9.6.0-7.jar +file.reference.Saxon-HE-9.8.0-12.jar=lib/endorsed/Saxon-HE-9.8.0-12.jar file.reference.slf4j-api-1.7.25.jar=lib/core/slf4j-api-1.7.25.jar file.reference.tagsoup-1.2.1.jar=extensions/expath/lib/tagsoup-1.2.1.jar file.reference.tools-java-1.0-SNAPSHOT.jar=extensions/expath/lib/tools-java-1.0-SNAPSHOT.jar @@ -169,7 +170,7 @@ file.reference.jdom-1.1.jar=extensions/webdav/lib/jdom-1.1.jar test.src5.dir=extensions/indexes/ngram/test/src jnlp.signing.keystore= build.dir=nbproject/build -file.reference.aspectjtools-1.9.1.jar=tools/aspectj/lib/aspectjtools-1.9.1.jar +file.reference.aspectjtools-1.9.2.jar=tools/aspectj/lib/aspectjtools-1.9.2.jar application.title=eXist-db jmx.rmi.use.port=true file.reference.jetty-io-9.4.10.v20180503.jar=tools/jetty/lib/jetty-io-9.4.10.v20180503.jar @@ -186,7 +187,7 @@ eXist-1.33.dir=${file.reference.fluent-src} jmx.jconsole.period=4 src.src3.dir=extensions/indexes/spatial/src file.reference.exquery-xquery-1.0-SNAPSHOT.jar=extensions/exquery/lib/exquery-xquery-1.0-SNAPSHOT.jar -file.reference.xercesImpl-2.11.0.jar=lib/endorsed/xercesImpl-2.11.0.jar +file.reference.xercesImpl-2.12.0.jar=lib/endorsed/xercesImpl-2.12.0.jar file.reference.isorelax-20041111.jar=lib/optional/isorelax-20041111.jar file.reference.fluent-src=extensions/fluent/src javadoc.use=true @@ -194,12 +195,12 @@ file.reference.ws-commons-util-1.0.2.jar=lib/core/ws-commons-util-1.0.2.jar file.reference.xmlunit-core-2.6.0.jar=lib/test/xmlunit-core-2.6.0.jar file.reference.xmlunit-legacy-2.6.0.jar=lib/test/xmlunit-legacy-2.6.0.jar file.reference.xmlunit-matchers-2.6.0.jar=lib/test/xmlunit-matchers-2.6.0.jar -file.reference.aspectjweaver-1.9.1.jar=tools/aspectj/lib/aspectjweaver-1.9.1.jar +file.reference.aspectjweaver-1.9.2.jar=tools/aspectj/lib/aspectjweaver-1.9.2.jar file.reference.commons-logging-1.2.jar=lib/core/commons-logging-1.2.jar file.reference.xqjapi-1.0-fr.jar=lib/optional/xqjapi-1.0-fr.jar test.eXist-1.32.dir=${file.reference.test-src-1} file.reference.log4j-jul-2.11.0.jar=lib/core/log4j-jul-2.11.0.jar -file.reference.aspectjrt-1.9.1.jar=tools/aspectj/lib/aspectjrt-1.9.1.jar +file.reference.aspectjrt-1.9.2.jar=tools/aspectj/lib/aspectjrt-1.9.2.jar excludes=**/spatial/*,**/modules/svn/*,**/modules/xmpp/**, **/xmlcalabash/**, **/xslfo/**,**/cssparser/**,**/oracle/**,**/memcached/**,**/cqlparser/**,**/contentextraction/** jnlp.descriptor=application file.reference.exquery-annotations-common-api-1.0-SNAPSHOT.jar=extensions/exquery/lib/exquery-annotations-common-api-1.0-SNAPSHOT.jar @@ -268,9 +269,9 @@ javac.classpath=\ ${file.reference.xml-apis-1.4.01.jar}:\ ${file.reference.xml-resolver-1.2.jar}:\ ${file.reference.serializer-2.7.2.jar}:\ - ${file.reference.xercesImpl-2.11.0.jar}:\ + ${file.reference.xercesImpl-2.12.0.jar}:\ ${file.reference.xmlunit-core-2.4.0.jar}:\ - ${file.reference.Saxon-HE-9.6.0-7.jar}:\ + ${file.reference.Saxon-HE-9.8.0-12.jar}:\ ${file.reference.xalan-2.7.2.jar}:\ ${file.reference.antlr-2.7.7.jar}:\ ${file.reference.clj-ds-0.0.4.jar}:\ @@ -330,9 +331,9 @@ javac.classpath=\ ${file.reference.hamcrest-core-1.3.jar}:\ ${file.reference.ant-1.10.2.jar}:\ ${file.reference.xmlunit-1.6.jar}:\ - ${file.reference.aspectjtools-1.9.1.jar}:\ - ${file.reference.aspectjrt-1.9.1.jar}:\ - ${file.reference.aspectjweaver-1.9.1.jar}:\ + ${file.reference.aspectjtools-1.9.2.jar}:\ + ${file.reference.aspectjrt-1.9.2.jar}:\ + ${file.reference.aspectjweaver-1.9.2.jar}:\ ${file.reference.existdb-favicon.jar}:\ ${file.reference.javax.annotation-api-1.2.jar}:\ ${file.reference.jetty-annotations-9.4.10.v20180503.jar}:\ @@ -367,10 +368,11 @@ javac.classpath=\ ${file.reference.tools-java-1.0-SNAPSHOT.jar}:\ ${file.reference.tagsoup-1.2.1.jar}:\ ${file.reference.exquery-annotations-common-1.0-SNAPSHOT.jar}:\ - ${file.reference.j8fu-1.16.jar}:\ + ${file.reference.j8fu-1.21.jar}:\ ${file.reference.icu4j-59_1.jar}:\ ${file.reference.icu4j-localespi-59_1.jar}:\ ${file.reference.caffeine-2.6.2.jar}:\ + ${file.reference.jaxb-api-2.3.0.jar}:\ ${file.reference.jctools-core-2.1.2.jar}:\ ${file.reference.jsr305-3.0.2.jar}:\ ${file.reference.exquery-annotations-common-api-1.0-SNAPSHOT.jar}:\ diff --git a/samples/http/put.py b/samples/http/put.py index ff680a05670..ac08b8f1d07 100644 --- a/samples/http/put.py +++ b/samples/http/put.py @@ -1,4 +1,5 @@ #!/usr/bin/python +from __future__ import print_function import httplib import sys @@ -7,31 +8,34 @@ collection = sys.argv[1] file = sys.argv[2] -f = open(file, 'r') -print "reading file %s ..." % file -xml = f.read() -f.close() +with open(file, 'r') as f: + print("reading file {} ...".format(file)) + xml = f.read() + f.close() -p = rfind(file, '/') -if p > -1: - doc = file[p+1:] -else: - doc = file -print doc -print "storing document to collection %s ..." % collection -con = httplib.HTTP('localhost:8080') -con.putrequest('PUT', '/exist/rest/%s/%s' % (collection, doc)) -con.putheader('Content-Type', 'application/xml') -clen = len(xml) -con.putheader('Content-Length', `clen`) -con.endheaders() -con.send(xml) + p = rfind(file, '/') + if p > -1: + doc = file[p+1:] + else: + doc = file + + print(doc) + print("storing document to collection {} ...".format(collection)) + + con = httplib.HTTP('localhost:8080') + con.putrequest('PUT', '/exist/rest/{col}/{doc}'.format(col=collection, doc=doc)) + con.putheader('Content-Type', 'application/xml') + + clen = len(xml) + + con.putheader('Content-Length', 'clen') + con.endheaders() + con.send(xml) -errcode, errmsg, headers = con.getreply() + errcode, errmsg, headers = con.getreply() -if errcode != 200: - f = con.getfile() - print 'An error occurred: %s' % errmsg - f.close() -else: - print "Ok." + if errcode != 200: + f = con.getfile() + print('An error occurred: {}'.format(errmsg)) + else: + print("Ok.") diff --git a/samples/src/org/exist/examples/triggers/ExampleTrigger.java b/samples/src/org/exist/examples/triggers/ExampleTrigger.java index 10e145835d1..0b18d79ab92 100644 --- a/samples/src/org/exist/examples/triggers/ExampleTrigger.java +++ b/samples/src/org/exist/examples/triggers/ExampleTrigger.java @@ -66,9 +66,9 @@ public class ExampleTrigger extends FilteringTrigger implements DocumentTrigger * @see org.exist.collections.Trigger#configure(org.exist.storage.DBBroker, org.exist.collections.Collection, java.util.Map) */ @Override - public void configure(DBBroker broker, Collection parent, Map parameters) + public void configure(DBBroker broker, Txn transaction, Collection parent, Map parameters) throws TriggerException { - super.configure(broker, parent, parameters); + super.configure(broker, transaction, parent, parameters); // the name of the contents file can be set through parameters XmldbURI contentsFile = null; String contentsName = (String)parameters.get("contents"); diff --git a/src/org/exist/Database.java b/src/org/exist/Database.java index 30232eacbde..a788d014590 100644 --- a/src/org/exist/Database.java +++ b/src/org/exist/Database.java @@ -58,6 +58,8 @@ public interface Database { public String getId(); + ThreadGroup getThreadGroup(); + /** * * @return SecurityManager diff --git a/src/org/exist/LifeCycle.java b/src/org/exist/LifeCycle.java index f9f01354a09..ebb22677fb1 100644 --- a/src/org/exist/LifeCycle.java +++ b/src/org/exist/LifeCycle.java @@ -22,6 +22,7 @@ package org.exist; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; /** * @author Dmitriy Shabanov @@ -29,7 +30,7 @@ */ public interface LifeCycle { - public void start(DBBroker broker) throws EXistException; + public void start(DBBroker broker, final Txn transaction) throws EXistException; public void sync(DBBroker broker) throws EXistException; diff --git a/src/org/exist/ant/XMLDBExistTask.java b/src/org/exist/ant/XMLDBExistTask.java index e1f6159b7b7..ceb9ac58398 100644 --- a/src/org/exist/ant/XMLDBExistTask.java +++ b/src/org/exist/ant/XMLDBExistTask.java @@ -32,7 +32,7 @@ /** - * an ant task to check for the existance of a collection or resource to be used as a ant condition. + * an ant task to check for the existence of a collection or resource to be used as a ant condition. * * @author peter.klotz@blue-elephant-systems.com */ diff --git a/src/org/exist/ant/XMLDBExtractTask.java b/src/org/exist/ant/XMLDBExtractTask.java index 1b06c2dfab0..52152257e37 100644 --- a/src/org/exist/ant/XMLDBExtractTask.java +++ b/src/org/exist/ant/XMLDBExtractTask.java @@ -191,13 +191,13 @@ private void extractSubCollections(final Collection base, final String path) thr subdir = childCol; } - if (!dir.exists() && (createdirectories == true)) { + if (!dir.exists() && createdirectories) { dir.mkdirs(); } extractResources(col, subdir); - if (subcollections == true) { + if (subcollections) { extractSubCollections(col, subdir); } } @@ -230,14 +230,14 @@ private void writeResource(final Resource res, final File dest) throws XMLDBExce * @throws XMLDBException DOCUMENT ME! */ private void writeXMLResource(final XMLResource res, final File dest) throws IOException, XMLDBException { - if (createdirectories == true) { + if (createdirectories) { final File parentDir = new File(dest.getParent()); if (!parentDir.exists()) { parentDir.mkdirs(); } } - if (dest != null || overwrite == true) { + if (dest != null || overwrite) { final Properties outputProperties = new Properties(); outputProperties.setProperty(OutputKeys.INDENT, "yes"); final SAXSerializer serializer = (SAXSerializer) SerializerPool.getInstance().borrowObject(SAXSerializer.class); diff --git a/src/org/exist/ant/XMLDBListTask.java b/src/org/exist/ant/XMLDBListTask.java index 9d0c8d1bb55..7a7d6c5f9b1 100644 --- a/src/org/exist/ant/XMLDBListTask.java +++ b/src/org/exist/ant/XMLDBListTask.java @@ -50,7 +50,7 @@ public void execute() throws BuildException throw( new BuildException( "You have to specify an XMLDB collection URI" ) ); } - if( ( hasCollections == false ) && ( hasResources == false ) ) { + if(!hasCollections && !hasResources) { throw( new BuildException( "You have at least one of collections or resources or both" ) ); } diff --git a/src/org/exist/backup/Backup.java b/src/org/exist/backup/Backup.java index a721fc716e5..d0d0161a38d 100644 --- a/src/org/exist/backup/Backup.java +++ b/src/org/exist/backup/Backup.java @@ -23,6 +23,7 @@ import org.exist.util.FileUtils; import com.evolvedbinary.j8fu.function.FunctionE; +import org.exist.util.NamedThreadGroupFactory; import org.exist.util.SystemExitCodes; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; @@ -61,6 +62,7 @@ import java.util.Properties; import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nullable; import javax.swing.*; import javax.xml.transform.OutputKeys; @@ -74,46 +76,43 @@ public class Backup private static final int currVersion = 1; + private static final AtomicInteger backupThreadId = new AtomicInteger(); + private static final NamedThreadGroupFactory backupThreadGroupFactory = new NamedThreadGroupFactory("java-backup-tool"); + private final ThreadGroup backupThreadGroup = backupThreadGroupFactory.newThreadGroup(null); + private Path target; private XmldbURI rootCollection; private String user; private String pass; - public Properties defaultOutputProperties = new Properties(); - - public Properties contentsOutputProps = new Properties(); + private final Properties defaultOutputProperties = new Properties(); + private final Properties contentsOutputProps = new Properties(); - { - defaultOutputProperties.setProperty( OutputKeys.INDENT, "no" ); - defaultOutputProperties.setProperty( OutputKeys.ENCODING, "UTF-8" ); - defaultOutputProperties.setProperty( OutputKeys.OMIT_XML_DECLARATION, "no" ); - defaultOutputProperties.setProperty( EXistOutputKeys.EXPAND_XINCLUDES, "no" ); - defaultOutputProperties.setProperty( EXistOutputKeys.PROCESS_XSL_PI, "no" ); + public Backup(final String user, final String pass, final Path target) { + this(user, pass, target, XmldbURI.LOCAL_DB_URI); } - { - contentsOutputProps.setProperty( OutputKeys.INDENT, "yes" ); + public Backup(final String user, final String pass, final Path target, final XmldbURI rootCollection ) { + this(user, pass, target, rootCollection, null); } - public Backup( String user, String pass, final Path target, XmldbURI rootCollection ) - { + public Backup(final String user, final String pass, final Path target, final XmldbURI rootCollection, + @Nullable final Properties properties ) { this.user = user; this.pass = pass; this.target = target; this.rootCollection = rootCollection; - } + defaultOutputProperties.setProperty(OutputKeys.INDENT, "no"); + defaultOutputProperties.setProperty(OutputKeys.ENCODING, "UTF-8"); + defaultOutputProperties.setProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); + defaultOutputProperties.setProperty(EXistOutputKeys.EXPAND_XINCLUDES, "no"); + defaultOutputProperties.setProperty(EXistOutputKeys.PROCESS_XSL_PI, "no"); - public Backup( String user, String pass, final Path target ) - { - this(user, pass, target, XmldbURI.LOCAL_DB_URI); - } - - - public Backup( String user, String pass, final Path target, XmldbURI rootCollection, Properties property ) - { - this( user, pass, target, rootCollection ); - this.defaultOutputProperties.setProperty( OutputKeys.INDENT, property.getProperty( "indent", "no" ) ); + if (properties != null) { + this.defaultOutputProperties.setProperty(OutputKeys.INDENT, properties.getProperty("indent", "no")); + } + this.contentsOutputProps.setProperty(OutputKeys.INDENT, "yes"); } public static String encode( String enco ) @@ -199,13 +198,17 @@ public void backup( boolean guiMode, JFrame parent ) throws XMLDBException, IOEx final BackupDialog dialog = new BackupDialog( parent, false ); dialog.setSize( new Dimension( 350, 150 ) ); dialog.setVisible( true ); - final BackupThread thread = new BackupThread( current, dialog, this ); - thread.start(); + final BackupRunnable backupRunnable = new BackupRunnable(current, dialog, this); + final Thread backupThread = newBackupThread("backup-" + backupThreadId.getAndIncrement(), backupRunnable); + backupThread.start(); + + + //super("exist-backupThread-" + backupThreadId.getAndIncrement()); if( parent == null ) { // if backup runs as a single dialog, wait for it (or app will terminate) - while( thread.isAlive() ) { + while( backupThread.isAlive() ) { synchronized( this ) { @@ -492,15 +495,24 @@ public static void writeACLPermission(SAXSerializer serializer, ACLPermission ac serializer.endElement(Namespaces.EXIST_NS, "acl", "acl"); } - private static class BackupThread extends Thread { + /** + * Create a new thread for this backup instance. + * + * @param threadName the name of the thread + * @param runnable the function to execute on the thread + * + * @return the thread + */ + private Thread newBackupThread(final String threadName, final Runnable runnable) { + return new Thread(backupThreadGroup, runnable, backupThreadGroup.getName() + "." + threadName); + } + + private static class BackupRunnable implements Runnable { private final Collection collection; private final BackupDialog dialog; private final Backup backup; - private static final AtomicInteger backupThreadId = new AtomicInteger(); - - public BackupThread(final Collection collection, final BackupDialog dialog, final Backup backup) { - super("exist-backupThread-" + backupThreadId.getAndIncrement()); + public BackupRunnable(final Collection collection, final BackupDialog dialog, final Backup backup) { this.collection = collection; this.dialog = dialog; this.backup = backup; diff --git a/src/org/exist/backup/ConsistencyCheck.java b/src/org/exist/backup/ConsistencyCheck.java index fac85812c4e..71a1f323410 100644 --- a/src/org/exist/backup/ConsistencyCheck.java +++ b/src/org/exist/backup/ConsistencyCheck.java @@ -1,26 +1,25 @@ /* - * eXist Open Source Native XML Database - * Copyright (C) 2001-07 The eXist Project - * http://exist-db.org + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - * $Id$ + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.exist.backup; +import org.exist.dom.persistent.*; import org.exist.security.Account; import org.exist.security.Group; import org.exist.security.Permission; @@ -28,10 +27,6 @@ import org.w3c.dom.Node; import org.exist.collections.Collection; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.ElementImpl; -import org.exist.dom.persistent.IStoredNode; import org.exist.management.Agent; import org.exist.management.AgentFactory; import org.exist.numbering.NodeId; @@ -45,7 +40,6 @@ import org.exist.storage.dom.DOMTransaction; import org.exist.storage.index.CollectionStore; import org.exist.storage.io.VariableByteInput; -import org.exist.storage.lock.Lock.LockMode; import org.exist.xmldb.XmldbURI; import org.exist.xquery.TerminatedException; @@ -53,31 +47,31 @@ import java.util.*; +import javax.annotation.Nullable; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; + import org.exist.security.PermissionDeniedException; -public class ConsistencyCheck -{ - private Stack elementStack = new Stack(); - private int documentCount = -1; +public class ConsistencyCheck { + private final DBBroker broker; + private final int defaultIndexDepth; + private final boolean directAccess; + private final boolean checkDocs; - private DBBroker broker; - private int defaultIndexDepth; - private boolean directAccess = false; - private boolean checkDocs = false; + private final Deque elementStack = new ArrayDeque<>(); + private int documentCount = -1; /** - * @param broker the db broker to use + * @param broker the db broker to use * @param directAccess set to true to bypass the collections.dbx index and perform a low-level scan instead - * @param checkDocs set to true to perform additional checks on every document (slow) + * @param checkDocs set to true to perform additional checks on every document (slow) */ - public ConsistencyCheck( DBBroker broker, boolean directAccess, boolean checkDocs) - { - this.broker = broker; - this.defaultIndexDepth = ( (NativeBroker)broker ).getDefaultIndexDepth(); - this.directAccess = directAccess; + public ConsistencyCheck(final DBBroker broker, final boolean directAccess, final boolean checkDocs) { + this.broker = broker; + this.defaultIndexDepth = ((NativeBroker) broker).getDefaultIndexDepth(); + this.directAccess = directAccess; this.checkDocs = checkDocs; } @@ -85,190 +79,191 @@ public ConsistencyCheck( DBBroker broker, boolean directAccess, boolean checkDoc * Combines {@link #checkCollectionTree(org.exist.backup.ConsistencyCheck.ProgressCallback)} and {@link * #checkDocuments(org.exist.backup.ConsistencyCheck.ProgressCallback)}. * - * @param callback the callback object to report to - * - * @return a list of {@link ErrorReport} objects or an empty list if no errors were found - * - * @throws TerminatedException DOCUMENT ME! + * @param callback the callback object to report to + * @return a list of {@link ErrorReport} objects or an empty list if no errors were found + * @throws TerminatedException DOCUMENT ME! */ - public List checkAll( ProgressCallback callback ) throws TerminatedException, PermissionDeniedException - { - final List errors = checkCollectionTree( callback ); - checkDocuments( callback, errors ); - return( errors ); + public List checkAll(final ProgressCallback callback) + throws TerminatedException, PermissionDeniedException { + final List errors = checkCollectionTree(callback); + checkDocuments(callback, errors); + return errors; } - /** * Run some tests on the collection hierarchy, starting at the root collection /db. * - * @param callback callback object - * - * @return a list of {@link ErrorReport} instances describing the errors found - * - * @throws TerminatedException DOCUMENT ME! + * @param callback callback object + * @return a list of {@link ErrorReport} instances describing the errors found + * @throws TerminatedException DOCUMENT ME! */ - public List checkCollectionTree( ProgressCallback callback ) throws TerminatedException, PermissionDeniedException - { + public List checkCollectionTree(final ProgressCallback callback) + throws TerminatedException, PermissionDeniedException { AccountImpl.getSecurityProperties().enableCheckPasswords(false); - try { - final List errors = new ArrayList(); - final Collection root = broker.getCollection( XmldbURI.ROOT_COLLECTION_URI ); - checkCollection( root, errors, callback ); - return( errors ); - } - finally { + final List errors = new ArrayList<>(); + final Collection root = broker.getCollection(XmldbURI.ROOT_COLLECTION_URI); + checkCollection(root, errors, callback); + return errors; + } finally { AccountImpl.getSecurityProperties().enableCheckPasswords(true); } } - - private void checkCollection( Collection collection, List errors, ProgressCallback callback ) throws TerminatedException - { + private void checkCollection(final Collection collection, final List errors, + final ProgressCallback callback) throws TerminatedException { final XmldbURI uri = collection.getURI(); - if (callback != null) - {callback.startCollection( uri.toString() );} + if (callback != null) { + callback.startCollection(uri.toString()); + } checkPermissions(collection, errors); try { - for(final Iterator i = collection.collectionIteratorNoLock(broker); i.hasNext(); ) { + for (final Iterator i = collection.collectionIteratorNoLock(broker); i.hasNext(); ) { final XmldbURI childUri = i.next(); try { - final Collection child = broker.getCollection( uri.append( childUri ) ); - - if( child == null ) { - final ErrorReport.CollectionError error = new org.exist.backup.ErrorReport.CollectionError( org.exist.backup.ErrorReport.CHILD_COLLECTION, "Child collection not found: " + childUri + ", parent is " + uri ); - error.setCollectionId( collection.getId() ); - error.setCollectionURI( childUri ); - errors.add( error ); - if (callback != null) - {callback.error( error );} + final Collection child = broker.getCollection(uri.append(childUri)); + + if (child == null) { + final ErrorReport.CollectionError error = new org.exist.backup.ErrorReport.CollectionError( + org.exist.backup.ErrorReport.CHILD_COLLECTION, + "Child collection not found: " + childUri + ", parent is " + uri); + error.setCollectionId(collection.getId()); + error.setCollectionURI(childUri); + errors.add(error); + if (callback != null) { + callback.error(error); + } continue; } - if (child.getId() != collection.getId()) - {checkCollection( child, errors, callback );} - } - catch( final Exception e ) { - final ErrorReport.CollectionError error = new ErrorReport.CollectionError( org.exist.backup.ErrorReport.CHILD_COLLECTION, "Error while loading child collection: " + childUri + ", parent is " + uri ); - error.setCollectionId( collection.getId() ); - error.setCollectionURI( childUri ); - errors.add( error ); - if (callback != null) - {callback.error( error );} + if (child.getId() != collection.getId()) { + checkCollection(child, errors, callback); + } + } catch (final Exception e) { + final ErrorReport.CollectionError error = new ErrorReport.CollectionError( + org.exist.backup.ErrorReport.CHILD_COLLECTION, + "Error while loading child collection: " + childUri + ", parent is " + uri); + error.setCollectionId(collection.getId()); + error.setCollectionURI(childUri); + errors.add(error); + if (callback != null) { + callback.error(error); + } } } - } catch(final PermissionDeniedException pde) { - final ErrorReport.CollectionError error = new ErrorReport.CollectionError( org.exist.backup.ErrorReport.CHILD_COLLECTION, "Error while loading collection: " + collection.getURI() + ", parent is " + uri ); - error.setCollectionId(collection.getId() ); + } catch (final PermissionDeniedException pde) { + final ErrorReport.CollectionError error = new ErrorReport.CollectionError( + org.exist.backup.ErrorReport.CHILD_COLLECTION, + "Error while loading collection: " + collection.getURI() + ", parent is " + uri); + error.setCollectionId(collection.getId()); error.setCollectionURI(collection.getURI()); errors.add(error); - if(callback != null) { + if (callback != null) { callback.error(error); } } } - - - public int getDocumentCount() throws TerminatedException - { - if( documentCount == -1 ) { + public int getDocumentCount() throws TerminatedException { + if (documentCount == -1) { AccountImpl.getSecurityProperties().enableCheckPasswords(false); - try { - final DocumentCallback cb = new DocumentCallback( null, null, false ); - broker.getResourcesFailsafe( cb, directAccess ); + final DocumentCallback cb = new DocumentCallback(null, null, false); + broker.getResourcesFailsafe(cb, directAccess); documentCount = cb.docCount; - } - finally { + } finally { AccountImpl.getSecurityProperties().enableCheckPasswords(true); } } - return( documentCount ); + return documentCount; } - /** - * Run some tests on all documents stored in the database. The method checks if a document is readable and if its DOM representation is - * consistent. - * - * @param progress progress callback + * Run some tests on all documents stored in the database. + * The method checks if a document is readable and if its DOM representation is consistent. * - * @return a list of {@link ErrorReport} instances describing the errors found - * - * @throws TerminatedException DOCUMENT ME! + * @param progress progress callback + * @return a list of {@link ErrorReport} instances describing the errors found + * @throws TerminatedException DOCUMENT ME! */ - public List checkDocuments( ProgressCallback progress ) throws TerminatedException - { - final List errors = new ArrayList(); - checkDocuments( progress, errors ); - return( errors ); + public List checkDocuments(final ProgressCallback progress) throws TerminatedException { + final List errors = new ArrayList<>(); + checkDocuments(progress, errors); + return errors; } - /** - * Run some tests on all documents stored in the database. The method checks if a document is readable and if its DOM representation is - * consistent. + * Run some tests on all documents stored in the database. + * The method checks if a document is readable and if its DOM representation is consistent. * - * @param progress progress callback - * @param errorList error reports will be added to this list, using instances of class {@link ErrorReport}. - * - * @throws TerminatedException DOCUMENT ME! + * @param progress progress callback + * @param errorList error reports will be added to this list, using instances of class {@link ErrorReport}. + * @throws TerminatedException DOCUMENT ME! */ - public void checkDocuments( ProgressCallback progress, List errorList ) throws TerminatedException - { + public void checkDocuments(final ProgressCallback progress, final List errorList) + throws TerminatedException { AccountImpl.getSecurityProperties().enableCheckPasswords(false); try { - final DocumentCallback cb = new DocumentCallback( errorList, progress, true ); - broker.getResourcesFailsafe( cb, directAccess ); + final DocumentCallback cb = new DocumentCallback(errorList, progress, true); + broker.getResourcesFailsafe(cb, directAccess); cb.checkDocs(); - } - finally { + } finally { AccountImpl.getSecurityProperties().enableCheckPasswords(true); } } - public void checkPermissions(Collection collection, List errorList) { + public void checkPermissions(final Collection collection, final List errorList) { try { - Permission perms = collection.getPermissions(); - Account owner = perms.getOwner(); + final Permission perms = collection.getPermissions(); + final Account owner = perms.getOwner(); if (owner == null) { - final ErrorReport.CollectionError error = new ErrorReport.CollectionError( ErrorReport.ACCESS_FAILED, "Owner account not found for collection: " + collection.getURI()); - error.setCollectionId( collection.getId() ); - error.setCollectionURI( collection.getURI() ); + final ErrorReport.CollectionError error = new ErrorReport.CollectionError( + ErrorReport.ACCESS_FAILED, + "Owner account not found for collection: " + collection.getURI()); + error.setCollectionId(collection.getId()); + error.setCollectionURI(collection.getURI()); errorList.add(error); } - Group group = perms.getGroup(); + final Group group = perms.getGroup(); if (group == null) { - final ErrorReport.CollectionError error = new ErrorReport.CollectionError( ErrorReport.ACCESS_FAILED, "Owner group not found for collection: " + collection.getURI()); - error.setCollectionId( collection.getId() ); - error.setCollectionURI( collection.getURI() ); + final ErrorReport.CollectionError error = new ErrorReport.CollectionError( + ErrorReport.ACCESS_FAILED, + "Owner group not found for collection: " + collection.getURI()); + error.setCollectionId(collection.getId()); + error.setCollectionURI(collection.getURI()); errorList.add(error); } - } catch(Exception e) { - final ErrorReport.CollectionError error = new ErrorReport.CollectionError( ErrorReport.ACCESS_FAILED, "Exception caught while : " + collection.getURI()); - error.setCollectionId( collection.getId() ); - error.setCollectionURI( collection.getURI() ); + } catch (final Exception e) { + final ErrorReport.CollectionError error = new ErrorReport.CollectionError( + ErrorReport.ACCESS_FAILED, + "Exception caught while : " + collection.getURI()); + error.setCollectionId(collection.getId()); + error.setCollectionURI(collection.getURI()); errorList.add(error); } } public ErrorReport checkPermissions(final DocumentImpl doc) { try { - Permission perms = doc.getPermissions(); - Account owner = perms.getOwner(); + final Permission perms = doc.getPermissions(); + final Account owner = perms.getOwner(); if (owner == null) { - return new ErrorReport.ResourceError(ErrorReport.RESOURCE_ACCESS_FAILED, "Owner account not found for document " + doc.getFileURI()); + return new ErrorReport.ResourceError( + ErrorReport.RESOURCE_ACCESS_FAILED, + "Owner account not found for document " + doc.getFileURI()); } - Group group = perms.getGroup(); + final Group group = perms.getGroup(); if (group == null) { - return new ErrorReport.ResourceError(ErrorReport.RESOURCE_ACCESS_FAILED, "Owner group not found for document " + doc.getFileURI()); + return new ErrorReport.ResourceError( + ErrorReport.RESOURCE_ACCESS_FAILED, + "Owner group not found for document " + doc.getFileURI()); } - } catch(Exception e) { - return new ErrorReport.ResourceError(ErrorReport.RESOURCE_ACCESS_FAILED, "Exception caught while checking permissions on document " + doc.getFileURI(), e); + } catch (final Exception e) { + return new ErrorReport.ResourceError( + ErrorReport.RESOURCE_ACCESS_FAILED, + "Exception caught while checking permissions on document " + doc.getFileURI(), e); } return null; } @@ -279,29 +274,25 @@ public ErrorReport checkPermissions(final DocumentImpl doc) { * but much faster. * * @param doc the document object to check - * @return + * @return the error report */ public ErrorReport checkDocument(final DocumentImpl doc) { - final DOMFile domDb = ( (NativeBroker)broker ).getDOMFile(); - return (ErrorReport)new DOMTransaction( this, domDb, LockMode.WRITE_LOCK, doc ) { + final DOMFile domDb = ((NativeBroker) broker).getDOMFile(); + return (ErrorReport) new DOMTransaction(this, domDb, () -> broker.getBrokerPool().getLockManager().acquireBtreeWriteLock(domDb.getLockName()), doc) { public Object start() { - EmbeddedXMLStreamReader reader = null; try { - final ElementImpl root = (ElementImpl)doc.getDocumentElement(); + final ElementImpl root = (ElementImpl) doc.getDocumentElement(); if (root == null) { - return new ErrorReport.ResourceError(ErrorReport.RESOURCE_ACCESS_FAILED, "Failed to access document data"); + return new ErrorReport.ResourceError( + ErrorReport.RESOURCE_ACCESS_FAILED, + "Failed to access document data"); } - } catch( final Exception e ) { + } catch (final Exception e) { e.printStackTrace(); - return( new ErrorReport.ResourceError( org.exist.backup.ErrorReport.RESOURCE_ACCESS_FAILED, e.getMessage(), e ) ); - } finally { - if (reader != null) { - try { - reader.close(); - } catch (XMLStreamException e) { - e.printStackTrace(); - } - } + return new ErrorReport.ResourceError( + org.exist.backup.ErrorReport.RESOURCE_ACCESS_FAILED, + e.getMessage(), + e); } return null; } @@ -312,235 +303,254 @@ public Object start() { * Check the persistent DOM of a document. The method traverses the entire node tree and checks it for consistency, including node relationships, * child and attribute counts etc. * - * @param doc the document to check - * - * @return null if the document is consistent, an error report otherwise. + * @param doc the document to check + * @return null if the document is consistent, an error report otherwise. */ - public ErrorReport checkXMLTree( final DocumentImpl doc ) - { - final DOMFile domDb = ( (NativeBroker)broker ).getDOMFile(); - return( (ErrorReport)new DOMTransaction( this, domDb, LockMode.WRITE_LOCK, doc ) { - public Object start() { - EmbeddedXMLStreamReader reader = null; - try { - final ElementImpl root = (ElementImpl)doc.getDocumentElement(); - reader = (EmbeddedXMLStreamReader)broker.getXMLStreamReader( root, true ); - NodeId nodeId; - boolean attribsAllowed = false; - int expectedAttribs = 0; - int attributeCount = 0; - - while( reader.hasNext() ) { - final int status = reader.next(); - - nodeId = (NodeId)reader.getProperty( EmbeddedXMLStreamReader.PROPERTY_NODE_ID ); - ElementNode parent = null; - - if( ( status != XMLStreamReader.END_ELEMENT ) && !elementStack.isEmpty() ) { - parent = elementStack.peek(); - parent.childCount++; - - // test parent-child relation - if( !nodeId.isChildOf( parent.elem.getNodeId() ) ) { - return( new ErrorReport.ResourceError( ErrorReport.NODE_HIERARCHY, "Node " + nodeId + " is not a child of " + parent.elem.getNodeId() ) ); - } + public ErrorReport checkXMLTree(final DocumentImpl doc) { + final DOMFile domDb = ((NativeBroker) broker).getDOMFile(); + return (ErrorReport) new DOMTransaction(this, domDb, () -> broker.getBrokerPool().getLockManager().acquireBtreeWriteLock(domDb.getLockName()), doc) { + public Object start() { + EmbeddedXMLStreamReader reader = null; + try { + final Node root = doc.getFirstChild(); + reader = (EmbeddedXMLStreamReader) broker.getXMLStreamReader((NodeHandle)root, true); - // test sibling relation - if( ( parent.prevSibling != null ) && !( nodeId.isSiblingOf( parent.prevSibling ) && ( nodeId.compareTo( parent.prevSibling ) > 0 ) ) ) { - return( new ErrorReport.ResourceError( ErrorReport.INCORRECT_NODE_ID, "Node " + nodeId + " is not a sibling of " + parent.prevSibling ) ); - } - parent.prevSibling = nodeId; - } + boolean attribsAllowed = false; + int expectedAttribs = 0; + int attributeCount = 0; - switch( status ) { + while (reader.hasNext()) { + final int status = reader.next(); - case XMLStreamReader.ATTRIBUTE: { - attributeCount++; - break; - } + final NodeId nodeId = (NodeId) reader.getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID); - case XMLStreamReader.END_ELEMENT: { - if( elementStack.isEmpty() ) { - return( new org.exist.backup.ErrorReport.ResourceError( ErrorReport.NODE_HIERARCHY, "Error in node hierarchy: received END_ELEMENT event " + "but stack was empty!" ) ); - } - final ElementNode lastElem = elementStack.pop(); - if( lastElem.childCount != lastElem.elem.getChildCount() ) { - return( new ErrorReport.ResourceError( org.exist.backup.ErrorReport.NODE_HIERARCHY, "Element reports incorrect child count: expected " + lastElem.elem.getChildCount() + " but found " + lastElem.childCount ) ); - } - break; - } - - case XMLStreamReader.START_ELEMENT: { - if( nodeId.getTreeLevel() <= defaultIndexDepth ) { + if ((status != XMLStreamReader.END_ELEMENT) && !elementStack.isEmpty()) { + final ElementNode parent = elementStack.peek(); + parent.childCount++; - // check dom.dbx btree, which maps the node - // id to the node's storage address - // look up the node id and check if the - // returned storage address is correct - final NativeBroker.NodeRef nodeRef = new NativeBroker.NodeRef( doc.getDocId(), nodeId ); + // test parent-child relation + if (!nodeId.isChildOf(parent.elem.getNodeId())) { + return new ErrorReport.ResourceError( + ErrorReport.NODE_HIERARCHY, + "Node " + nodeId + " is not a child of " + parent.elem.getNodeId()); + } - try { - final long p = domDb.findValue( nodeRef ); + // test sibling relation + if ((parent.prevSibling != null) && !(nodeId.isSiblingOf(parent.prevSibling) && (nodeId.compareTo(parent.prevSibling) > 0))) { + return new ErrorReport.ResourceError( + ErrorReport.INCORRECT_NODE_ID, + "Node " + nodeId + " is not a sibling of " + parent.prevSibling); + } + parent.prevSibling = nodeId; + } - if( p != reader.getCurrentPosition() ) { - final Value v = domDb.get( p ); + switch (status) { - if( v == null ) { - return( new ErrorReport.IndexError( ErrorReport.DOM_INDEX, "Failed to access node " + nodeId + " through dom.dbx index. Wrong storage address. Expected: " + p + "; got: " + reader.getCurrentPosition() + " - ", doc.getDocId() ) ); - } - } - } - catch( final Exception e ) { - e.printStackTrace(); - return( new ErrorReport.IndexError( ErrorReport.DOM_INDEX, "Failed to access node " + nodeId + " through dom.dbx index.", e, doc.getDocId() ) ); - } - } - - final IStoredNode node = reader.getNode(); - if( node.getNodeType() != Node.ELEMENT_NODE ) { - return( new org.exist.backup.ErrorReport.ResourceError( ErrorReport.INCORRECT_NODE_TYPE, "Expected an element node, received node of type " + node.getNodeType() ) ); - } - elementStack.push( new ElementNode( (ElementImpl)node ) ); - attribsAllowed = true; - attributeCount = 0; - expectedAttribs = reader.getAttributeCount(); - break; - } + case XMLStreamReader.ATTRIBUTE: { + attributeCount++; + break; + } - default: { - if( attribsAllowed ) { + case XMLStreamReader.END_ELEMENT: { + if (elementStack.isEmpty()) { + return new org.exist.backup.ErrorReport.ResourceError( + ErrorReport.NODE_HIERARCHY, + "Error in node hierarchy: received END_ELEMENT event " + + "but stack was empty!"); + } + final ElementNode lastElem = elementStack.pop(); + if (lastElem.childCount != lastElem.elem.getChildCount()) { + return new ErrorReport.ResourceError( + org.exist.backup.ErrorReport.NODE_HIERARCHY, + "Element reports incorrect child count: expected " + + lastElem.elem.getChildCount() + + " but found " + lastElem.childCount); + } + break; + } - if( attributeCount != expectedAttribs ) { - return( new org.exist.backup.ErrorReport.ResourceError( ErrorReport.INCORRECT_NODE_TYPE, "Wrong number of attributes. Expected: " + expectedAttribs + "; found: " + attributeCount ) ); + case XMLStreamReader.START_ELEMENT: { + if (nodeId.getTreeLevel() <= defaultIndexDepth) { + + // check dom.dbx btree, which maps the node + // id to the node's storage address + // look up the node id and check if the + // returned storage address is correct + final NativeBroker.NodeRef nodeRef = + new NativeBroker.NodeRef(doc.getDocId(), nodeId); + + try { + final long p = domDb.findValue(nodeRef); + + if (p != reader.getCurrentPosition()) { + final Value v = domDb.get(p); + + if (v == null) { + return new ErrorReport.IndexError( + ErrorReport.DOM_INDEX, + "Failed to access node " + nodeId + + " through dom.dbx index. Wrong storage address. Expected: " + + p + "; got: " + reader.getCurrentPosition() + + " - ", + doc.getDocId()); } } - attribsAllowed = false; - break; + } catch (final Exception e) { + e.printStackTrace(); + return new ErrorReport.IndexError( + ErrorReport.DOM_INDEX, + "Failed to access node " + + nodeId + " through dom.dbx index.", + e, + doc.getDocId()); } } + + final IStoredNode node = reader.getNode(); + if (node.getNodeType() != Node.ELEMENT_NODE) { + return new org.exist.backup.ErrorReport.ResourceError( + ErrorReport.INCORRECT_NODE_TYPE, + "Expected an element node, received node of type " + + node.getNodeType()); + } + elementStack.push(new ElementNode((ElementImpl) node)); + attribsAllowed = true; + attributeCount = 0; + expectedAttribs = reader.getAttributeCount(); + break; } - if( !elementStack.isEmpty() ) { - return( new org.exist.backup.ErrorReport.ResourceError( ErrorReport.NODE_HIERARCHY, "Error in node hierarchy: reached end of tree but " + "stack was not empty!" ) ); + default: { + if (attribsAllowed) { + if (attributeCount != expectedAttribs) { + return new org.exist.backup.ErrorReport.ResourceError( + ErrorReport.INCORRECT_NODE_TYPE, + "Wrong number of attributes. Expected: " + + expectedAttribs + "; found: " + attributeCount); + } + } + attribsAllowed = false; + break; } - return( null ); } - catch( final IOException e ) { - e.printStackTrace(); - return( new org.exist.backup.ErrorReport.ResourceError( ErrorReport.RESOURCE_ACCESS_FAILED, e.getMessage(), e ) ); - } - catch( final XMLStreamException e ) { + } + + if (!elementStack.isEmpty()) { + return new org.exist.backup.ErrorReport.ResourceError( + ErrorReport.NODE_HIERARCHY, + "Error in node hierarchy: reached end of tree but " + + "stack was not empty!"); + } + return null; + } catch (final IOException | XMLStreamException e) { + e.printStackTrace(); + return new org.exist.backup.ErrorReport.ResourceError( + ErrorReport.RESOURCE_ACCESS_FAILED, + e.getMessage(), + e); + } finally { + elementStack.clear(); + if (reader != null) { + try { + reader.close(); + } catch (final XMLStreamException e) { e.printStackTrace(); - return( new ErrorReport.ResourceError( org.exist.backup.ErrorReport.RESOURCE_ACCESS_FAILED, e.getMessage(), e ) ); - } - finally { - elementStack.clear(); - if (reader != null) { - try { - reader.close(); - } catch (XMLStreamException e) { - e.printStackTrace(); - } - } } } - }.run() ); + } + } + }.run(); } - public interface ProgressCallback - { - void startDocument( String name, int current, int count ) throws TerminatedException; - + public interface ProgressCallback { + void startDocument(final String name, final int current, final int count) throws TerminatedException; - void startCollection( String path ) throws TerminatedException; + void startCollection(final String path) throws TerminatedException; - - void error( org.exist.backup.ErrorReport error ); + void error(final org.exist.backup.ErrorReport error); } - private static class ElementNode - { - ElementImpl elem; - int childCount = 0; - NodeId prevSibling = null; + private static class ElementNode { + private final ElementImpl elem; + private int childCount = 0; + private NodeId prevSibling = null; - ElementNode( ElementImpl element ) - { + private ElementNode(final ElementImpl element) { this.elem = element; } } - - private class DocumentCallback implements BTreeCallback - { - private List errors; - private ProgressCallback progress; - private int docCount = 0; - private boolean checkDocs; - private int lastPercentage = -1; - private Agent jmxAgent = AgentFactory.getInstance(); - private ArrayList docs = new ArrayList<>(8192); - - private DocumentCallback( List errors, ProgressCallback progress, boolean checkDocs ) - { - this.errors = errors; - this.progress = progress; + private class DocumentCallback implements BTreeCallback { + @Nullable + private final List errors; + @Nullable + private final ProgressCallback progress; + private final boolean checkDocs; + + private int docCount = 0; + private int lastPercentage = -1; + private final Agent jmxAgent = AgentFactory.getInstance(); + private final List docs = new ArrayList<>(100); + + private DocumentCallback(@Nullable final List errors, @Nullable final ProgressCallback progress, boolean checkDocs) { + this.errors = errors; + this.progress = progress; this.checkDocs = checkDocs; } - public boolean indexInfo( Value key, long pointer ) throws TerminatedException - { - final CollectionStore store = (CollectionStore)( (NativeBroker)broker ).getStorage( NativeBroker.COLLECTIONS_DBX_ID ); - final int docId = CollectionStore.DocumentKey.getDocumentId( key ); + @Override + public boolean indexInfo(final Value key, final long pointer) throws TerminatedException { + final CollectionStore store = (CollectionStore) ((NativeBroker) broker).getStorage(NativeBroker.COLLECTIONS_DBX_ID); + final int docId = CollectionStore.DocumentKey.getDocumentId(key); try { - final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE]; - final VariableByteInput istream = store.getAsStream( pointer ); - DocumentImpl doc = null; - - if( type == DocumentImpl.BINARY_FILE ) { - doc = new BinaryDocument( broker.getBrokerPool() ); + final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE]; + final VariableByteInput istream = store.getAsStream(pointer); + final DocumentImpl doc; + if (type == DocumentImpl.BINARY_FILE) { + doc = new BinaryDocument(broker.getBrokerPool()); } else { - doc = new DocumentImpl( broker.getBrokerPool() ); + doc = new DocumentImpl(broker.getBrokerPool()); } - doc.read( istream ); + doc.read(istream); docCount++; - if( checkDocs ) { + if (checkDocs) { - if( progress != null ) { - progress.startDocument( doc.getFileURI().toString(), docCount, getDocumentCount() ); + if (progress != null) { + progress.startDocument(doc.getFileURI().toString(), docCount, getDocumentCount()); } - int percentage = 100 * ( docCount + 1 ) / ( getDocumentCount() + 1 ); + int percentage = 100 * (docCount + 1) / (getDocumentCount() + 1); - if( ( jmxAgent != null ) && ( percentage != lastPercentage ) ) { + if ((jmxAgent != null) && (percentage != lastPercentage)) { lastPercentage = percentage; - jmxAgent.updateStatus( broker.getBrokerPool(), percentage ); + jmxAgent.updateStatus(broker.getBrokerPool(), percentage); } - if( ( type == DocumentImpl.XML_FILE ) && !directAccess ) { + if ((type == DocumentImpl.XML_FILE) && !directAccess) { // add to the list of pending documents. They will be checked later docs.add(doc); } } - } - catch( final TerminatedException e ) { - throw( e ); - } - catch( final Exception e ) { + } catch (final TerminatedException e) { + throw e; + } catch (final Exception e) { e.printStackTrace(); - final org.exist.backup.ErrorReport.ResourceError error = new org.exist.backup.ErrorReport.ResourceError( org.exist.backup.ErrorReport.RESOURCE_ACCESS_FAILED, e.getMessage(), e ); - error.setDocumentId( docId ); - - if( errors != null ) { - errors.add( error ); + final org.exist.backup.ErrorReport.ResourceError error = new org.exist.backup.ErrorReport.ResourceError( + org.exist.backup.ErrorReport.RESOURCE_ACCESS_FAILED, + e.getMessage(), + e); + error.setDocumentId(docId); + + if (errors != null) { + errors.add(error); } - if( progress != null ) { - progress.error( error ); + if (progress != null) { + progress.error(error); } } - return( true ); + return true; } /** @@ -548,19 +558,16 @@ public boolean indexInfo( Value key, long pointer ) throws TerminatedException * check each of them. */ public void checkDocs() { - DocumentImpl documents[] = new DocumentImpl[docs.size()]; + final DocumentImpl documents[] = new DocumentImpl[docs.size()]; docs.toArray(documents); - Arrays.sort(documents, new Comparator() { - @Override - public int compare(DocumentImpl d1, DocumentImpl d2) { - final long a1 = StorageAddress.pageFromPointer(d1.getFirstChildAddress()); - final long a2 = StorageAddress.pageFromPointer(d2.getFirstChildAddress()); - return Long.compare(a1, a2); - } + Arrays.sort(documents, (d1, d2) -> { + final long a1 = StorageAddress.pageFromPointer(d1.getFirstChildAddress()); + final long a2 = StorageAddress.pageFromPointer(d2.getFirstChildAddress()); + return Long.compare(a1, a2); }); - for (DocumentImpl doc : documents) { - ErrorReport report; - report = checkPermissions(doc); + + for (final DocumentImpl doc : documents) { + ErrorReport report = checkPermissions(doc); if (report == null) { if (ConsistencyCheck.this.checkDocs) { report = checkXMLTree(doc); @@ -568,16 +575,16 @@ public int compare(DocumentImpl d1, DocumentImpl d2) { report = checkDocument(doc); } } - if( report != null ) { - if(report instanceof ErrorReport.ResourceError) { - ( (ErrorReport.ResourceError)report ).setDocumentId( doc.getDocId() ); + if (report != null) { + if (report instanceof ErrorReport.ResourceError) { + ((ErrorReport.ResourceError) report).setDocumentId(doc.getDocId()); } - if(errors != null) { + if (errors != null) { errors.add(report); } - if(progress != null) { + if (progress != null) { progress.error(report); } } diff --git a/src/org/exist/backup/ExportGUI.java b/src/org/exist/backup/ExportGUI.java index 1dce7b8387c..4dfa860aea9 100644 --- a/src/org/exist/backup/ExportGUI.java +++ b/src/org/exist/backup/ExportGUI.java @@ -47,6 +47,7 @@ import org.exist.security.PermissionDeniedException; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.exist.util.ThreadUtils.newInstanceThread; /** @@ -337,7 +338,7 @@ private void startBtncheck(java.awt.event.ActionEvent evt) { // GEN-FIRST:event_ closeLog(); } }; - new Thread(checkRun).start(); + newInstanceThread(pool, "export-gui.check-run", checkRun).start(); } // GEN-LAST:event_startBtncheck @@ -358,7 +359,7 @@ private void exportBtnActionPerformed(java.awt.event.ActionEvent evt) { // GEN-F closeLog(); } }; - new Thread(th).start(); + newInstanceThread(pool, "export-gui.export", th).start(); } // GEN-LAST:event_exportBtnActionPerformed diff --git a/src/org/exist/backup/Main.java b/src/org/exist/backup/Main.java index f1cc928ce0e..df277b40a98 100644 --- a/src/org/exist/backup/Main.java +++ b/src/org/exist/backup/Main.java @@ -29,6 +29,7 @@ import java.util.Optional; import java.util.concurrent.ExecutorService; +import org.exist.util.NamedThreadFactory; import org.exist.util.SystemExitCodes; import org.xml.sax.SAXException; @@ -361,7 +362,7 @@ public Void call() throws Exception { } }; - final ExecutorService executor = Executors.newSingleThreadExecutor(); + final ExecutorService executor = Executors.newSingleThreadExecutor(new NamedThreadFactory(null, null, "backup.restore-with-gui")); final Future future = executor.submit(callable); while (!future.isDone() && !future.isCancelled()) { diff --git a/src/org/exist/backup/Restore.java b/src/org/exist/backup/Restore.java index 8f5669e9fde..702ca5b9197 100644 --- a/src/org/exist/backup/Restore.java +++ b/src/org/exist/backup/Restore.java @@ -21,14 +21,14 @@ */ package org.exist.backup; -import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Properties; -import java.util.Stack; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; @@ -65,7 +65,7 @@ public void restore(RestoreListener listener, String username, String password, } //get the backup descriptors, can be more than one if it was an incremental backup - final Stack descriptors = getBackupDescriptors(f); + final Deque descriptors = getBackupDescriptors(f); final SAXParserFactory saxFactory = SAXParserFactory.newInstance(); saxFactory.setNamespaceAware(true); @@ -92,9 +92,9 @@ public void restore(RestoreListener listener, String username, String password, } } - private Stack getBackupDescriptors(Path contents) throws XMLDBException, IOException { + private Deque getBackupDescriptors(Path contents) throws XMLDBException, IOException { - final Stack descriptors = new Stack<>(); + final Deque descriptors = new ArrayDeque<>(); do { diff --git a/src/org/exist/backup/SystemExport.java b/src/org/exist/backup/SystemExport.java index c4d313996b6..25e7c4ffbca 100644 --- a/src/org/exist/backup/SystemExport.java +++ b/src/org/exist/backup/SystemExport.java @@ -34,10 +34,12 @@ import org.exist.collections.Collection; import org.exist.management.Agent; import org.exist.management.AgentFactory; +import org.exist.numbering.NodeId; import org.exist.security.ACLPermission; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; import org.exist.security.internal.AccountImpl; +import org.exist.stax.ExtendedXMLStreamReader; import org.exist.storage.DBBroker; import org.exist.storage.DataBackup; import org.exist.storage.NativeBroker; @@ -117,44 +119,41 @@ public class SystemExport { private int collectionCount = -1; - public Properties defaultOutputProperties = new Properties(); - - public Properties contentsOutputProps = new Properties(); + private final Properties defaultOutputProperties = new Properties(); + private final Properties contentsOutputProps = new Properties(); private DBBroker broker; private StatusCallback callback = null; private boolean directAccess = false; private ProcessMonitor.Monitor monitor = null; private BackupHandler bh = null; + private ChainOfReceiversFactory chainFactory; + + public SystemExport(final DBBroker broker, final StatusCallback callback, final ProcessMonitor.Monitor monitor, + final boolean direct, final ChainOfReceiversFactory chainFactory) { + this.broker = broker; + this.callback = callback; + this.monitor = monitor; + this.directAccess = direct; + this.chainFactory = chainFactory; - { defaultOutputProperties.setProperty(OutputKeys.INDENT, "no"); defaultOutputProperties.setProperty(OutputKeys.ENCODING, "UTF-8"); defaultOutputProperties.setProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); defaultOutputProperties.setProperty(EXistOutputKeys.EXPAND_XINCLUDES, "no"); defaultOutputProperties.setProperty(EXistOutputKeys.PROCESS_XSL_PI, "no"); - } - { contentsOutputProps.setProperty(OutputKeys.INDENT, "yes"); - } - - private ChainOfReceiversFactory chainFactory; - - public SystemExport(DBBroker broker, StatusCallback callback, ProcessMonitor.Monitor monitor, boolean direct, ChainOfReceiversFactory chainFactory) { - this.broker = broker; - this.callback = callback; - this.monitor = monitor; - this.directAccess = direct; - this.chainFactory = chainFactory; bh = broker.getDatabase().getPluginsManager().getBackupHandler(LOG); } - public SystemExport(DBBroker broker, StatusCallback callback, ProcessMonitor.Monitor monitor, boolean direct) { + @SuppressWarnings("unchecked") + public SystemExport(final DBBroker broker, final StatusCallback callback, final ProcessMonitor.Monitor monitor, + final boolean direct) { this(broker, callback, monitor, direct, null); - List list = (List) broker.getConfiguration().getProperty(CONFIG_FILTERS); + final List list = (List) broker.getConfiguration().getProperty(CONFIG_FILTERS); if (list != null) { chainFactory = new ChainOfReceiversFactory(list); } @@ -612,7 +611,6 @@ private void exportDocument(BackupHandler bh, BackupWriter output, Date date, Ba */ private void writeXML(DocumentImpl doc, Receiver receiver) { try { - XMLStreamReader reader; char[] ch; int nsdecls; final NamespaceSupport nsSupport = new NamespaceSupport(); @@ -625,7 +623,11 @@ private void writeXML(DocumentImpl doc, Receiver receiver) { for (int i = 0; i < children.getLength(); i++) { final StoredNode child = (StoredNode) children.item(i); - reader = broker.getXMLStreamReader(child, false); + + final int thisLevel = child.getNodeId().getTreeLevel(); + final int childLevel = child.getNodeType() == Node.ELEMENT_NODE ? thisLevel + 1 : thisLevel; + + final XMLStreamReader reader = broker.getXMLStreamReader(child, false); while (reader.hasNext()) { final int status = reader.next(); @@ -633,11 +635,10 @@ private void writeXML(DocumentImpl doc, Receiver receiver) { switch (status) { case XMLStreamReader.START_DOCUMENT: - case XMLStreamReader.END_DOCUMENT: { + case XMLStreamReader.END_DOCUMENT: break; - } - case XMLStreamReader.START_ELEMENT: { + case XMLStreamReader.START_ELEMENT: nsdecls = reader.getNamespaceCount(); for (int ni = 0; ni < nsdecls; ni++) { receiver.startPrefixMapping(reader.getNamespacePrefix(ni), reader.getNamespaceURI(ni)); @@ -650,41 +651,43 @@ private void writeXML(DocumentImpl doc, Receiver receiver) { } receiver.startElement(new QName(reader.getLocalName(), reader.getNamespaceURI(), reader.getPrefix()), attribs); break; - } - case XMLStreamReader.END_ELEMENT: { + case XMLStreamReader.END_ELEMENT: receiver.endElement(new QName(reader.getLocalName(), reader.getNamespaceURI(), reader.getPrefix())); nsdecls = reader.getNamespaceCount(); for (int ni = 0; ni < nsdecls; ni++) { receiver.endPrefixMapping(reader.getNamespacePrefix(ni)); } + + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + if (childLevel != thisLevel && otherLevel == thisLevel) { + // finished `this` element... + break; // exit-while + } + break; - } - case XMLStreamReader.CHARACTERS: { + case XMLStreamReader.CHARACTERS: receiver.characters(reader.getText()); break; - } - case XMLStreamReader.CDATA: { + case XMLStreamReader.CDATA: ch = reader.getTextCharacters(); receiver.cdataSection(ch, 0, ch.length); break; - } - case XMLStreamReader.COMMENT: { + case XMLStreamReader.COMMENT: ch = reader.getTextCharacters(); receiver.comment(ch, 0, ch.length); break; - } - case XMLStreamReader.PROCESSING_INSTRUCTION: { + case XMLStreamReader.PROCESSING_INSTRUCTION: receiver.processingInstruction(reader.getPITarget(), reader.getPIData()); break; - } } - if ((child.getNodeType() == Node.COMMENT_NODE) || (child.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE)) { + if (child.getNodeType() == Node.COMMENT_NODE || child.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) { break; } } diff --git a/src/org/exist/backup/SystemImport.java b/src/org/exist/backup/SystemImport.java index 47606600acc..1169c43dda5 100644 --- a/src/org/exist/backup/SystemImport.java +++ b/src/org/exist/backup/SystemImport.java @@ -21,13 +21,13 @@ */ package org.exist.backup; -import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Properties; -import java.util.Stack; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; @@ -73,7 +73,7 @@ public void restore(RestoreListener listener, String username, Object credential setAdminCredentials(broker, newCredentials); //get the backup descriptors, can be more than one if it was an incremental backup - final Stack descriptors = getBackupDescriptors(f); + final Deque descriptors = getBackupDescriptors(f); final SAXParserFactory saxFactory = SAXParserFactory.newInstance(); saxFactory.setNamespaceAware(true); @@ -100,9 +100,9 @@ public void restore(RestoreListener listener, String username, Object credential } } - private Stack getBackupDescriptors(Path contents) throws XMLDBException, IOException { + private Deque getBackupDescriptors(Path contents) throws XMLDBException, IOException { - final Stack descriptors = new Stack<>(); + final Deque descriptors = new ArrayDeque<>(); do { final BackupDescriptor bd = getBackupDescriptor(contents); diff --git a/src/org/exist/backup/ZipArchiveBackupDescriptor.java b/src/org/exist/backup/ZipArchiveBackupDescriptor.java index 4bc217ba8bc..42d2365d133 100644 --- a/src/org/exist/backup/ZipArchiveBackupDescriptor.java +++ b/src/org/exist/backup/ZipArchiveBackupDescriptor.java @@ -86,7 +86,8 @@ public ZipArchiveBackupDescriptor(Path fileArchive) throws IOException { throw new FileNotFoundException("Archive " + fileArchive.toAbsolutePath().toString() + " is not a valid eXist backup archive"); } - if ((!base.startsWith("db/")) || (!Paths.get(base).normalize().startsWith(Paths.get("db/")))) { + final Path fakeDbRoot = Paths.get("/db"); + if (!fakeDbRoot.resolve(Paths.get(base)).normalize().startsWith(fakeDbRoot)) { throw new IOException("Detected archive exit attack! zipFile=" + fileArchive.toAbsolutePath().normalize().toString()); } } diff --git a/src/org/exist/backup/restore/CollectionDeferredPermission.java b/src/org/exist/backup/restore/CollectionDeferredPermission.java index 8e138b85a02..0ade54e059a 100644 --- a/src/org/exist/backup/restore/CollectionDeferredPermission.java +++ b/src/org/exist/backup/restore/CollectionDeferredPermission.java @@ -45,7 +45,7 @@ public CollectionDeferredPermission(RestoreListener listener, Collection collect public void apply() { try { - UserManagementService service; + final UserManagementService service; if(getTarget().getName().equals(XmldbURI.ROOT_COLLECTION)) { service = (UserManagementService)getTarget().getService("UserManagementService", "1.0"); } else { @@ -56,7 +56,11 @@ public void apply() { service.setPermissions(getTarget(), getOwner(), getGroup(), getMode(), getAces()); //persist } catch (final XMLDBException xe) { String name = "unknown"; - try { name = getTarget().getName(); } catch(final XMLDBException x) { LOG.error(x.getMessage(), x); } + try { + name = getTarget().getName(); + } catch(final XMLDBException x) { + LOG.error(x.getMessage(), x); + } final String msg = "ERROR: Failed to set permissions on Collection '" + name + "'."; LOG.error(msg, xe); getListener().warn(msg); diff --git a/src/org/exist/backup/restore/ResourceDeferredPermission.java b/src/org/exist/backup/restore/ResourceDeferredPermission.java index 2f66a4965a3..0fd7d2aa9f3 100644 --- a/src/org/exist/backup/restore/ResourceDeferredPermission.java +++ b/src/org/exist/backup/restore/ResourceDeferredPermission.java @@ -46,11 +46,14 @@ public ResourceDeferredPermission(RestoreListener listener, Resource resource, S public void apply() { try { final UserManagementService service = (UserManagementService)getTarget().getParentCollection().getService("UserManagementService", "1.0"); - final Permission permissions = service.getPermissions(getTarget()); service.setPermissions(getTarget(), getOwner(), getGroup(), getMode(), getAces()); //persist } catch(final XMLDBException xe) { String name = "unknown"; - try { name = getTarget().getId(); } catch(final XMLDBException x) { LOG.error(x.getMessage(), x); } + try { + name = getTarget().getParentCollection().getName() + "/" + getTarget().getId(); + } catch(final XMLDBException x) { + LOG.error(x.getMessage(), x); + } final String msg = "ERROR: Failed to set permissions on Document '" + name + "'."; LOG.error(msg, xe); getListener().warn(msg); diff --git a/src/org/exist/backup/restore/RestoreHandler.java b/src/org/exist/backup/restore/RestoreHandler.java index 672d250e0dd..72d5f4d3666 100644 --- a/src/org/exist/backup/restore/RestoreHandler.java +++ b/src/org/exist/backup/restore/RestoreHandler.java @@ -23,28 +23,29 @@ import java.io.IOException; import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Date; -import java.util.Observable; -import java.util.Stack; +import java.util.*; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.exist.EXistException; import org.exist.Namespaces; import org.exist.backup.BackupDescriptor; import org.exist.backup.restore.listener.RestoreListener; import org.exist.dom.persistent.DocumentTypeImpl; import org.exist.security.ACLPermission.ACE_ACCESS_TYPE; import org.exist.security.ACLPermission.ACE_TARGET; +import org.exist.security.AuthenticationException; import org.exist.security.SecurityManager; +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; import org.exist.util.EXistInputSource; import org.exist.xmldb.EXistCollection; import org.exist.xmldb.EXistCollectionManagementService; import org.exist.xmldb.EXistResource; import org.exist.xmldb.XmldbURI; +import org.exist.xmldb.txn.bridge.InTxnLocalCollection; import org.exist.xquery.XPathException; import org.exist.xquery.util.URIUtils; import org.exist.xquery.value.DateTimeValue; @@ -55,6 +56,7 @@ import org.xml.sax.helpers.DefaultHandler; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; +import org.xmldb.api.base.ErrorCodes; import org.xmldb.api.base.Resource; import org.xmldb.api.base.XMLDBException; import org.xmldb.api.modules.CollectionManagementService; @@ -85,7 +87,7 @@ public class RestoreHandler extends DefaultHandler { //handler state private int version = 0; private EXistCollection currentCollection; - private Stack deferredPermissions = new Stack(); + private Deque deferredPermissions = new ArrayDeque(); public RestoreHandler(final RestoreListener listener, final String dbBaseUri, final String dbUsername, final String dbPassword, final BackupDescriptor descriptor) { @@ -356,14 +358,10 @@ private DeferredPermission restoreResourceEntry(final Attributes atts) throws SA ((EXistResource)res).setMimeType(mimetype); } - if(is.getByteStreamLength() > 0) { + if(is.getByteStreamLength() > 0 || "BinaryResource".equals(type)) { res.setContent(is); } else { - if("BinaryResource".equals(type)) { - res.setContent(""); - } else { - res = null; - } + res = null; } // Restoring name @@ -510,11 +508,42 @@ private EXistCollection mkcol(final XmldbURI collPath, final Date created) throw for(final XmldbURI segment : segments) { p = p.append(segment); final XmldbURI xmldbURI = dbUri.resolveCollectionPath(p); - EXistCollection c = (EXistCollection)DatabaseManager.getCollection(xmldbURI.toString(), dbUsername, dbPassword); + EXistCollection c = null; + + final boolean localConnection = Optional.ofNullable(xmldbURI.getApiName()) + .map(XmldbURI.API_LOCAL::equals) + .orElse(false); + + if (localConnection) { + //short-cut to an XMLDB Collection that can be used with the current transaction + try { + final BrokerPool pool = BrokerPool.getInstance(); + final SecurityManager securityManager = pool.getSecurityManager(); + final Subject subject = securityManager.authenticate(dbUsername, dbPassword); + try { + c = new InTxnLocalCollection(subject, pool, null, xmldbURI); + } catch(final XMLDBException e) { + if(e.errorCode == ErrorCodes.NO_SUCH_COLLECTION) { + c = null; //no such collection, will be created below + } else { + throw e; + } + } + } catch(final AuthenticationException e) { + throw new XMLDBException(ErrorCodes.PERMISSION_DENIED, e.getMessage(), e); + } catch(final EXistException e) { + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); + } + } else { + c = (EXistCollection)DatabaseManager.getCollection(xmldbURI.toString(), dbUsername, dbPassword); + } + if(c == null) { current.setTriggersEnabled(false); + final EXistCollectionManagementService mgtService = (EXistCollectionManagementService)current.getService("CollectionManagementService", "1.0"); c = (EXistCollection)mgtService.createCollection(segment, created); + current.setTriggersEnabled(true); } current = c; diff --git a/src/org/exist/backup/restore/SystemImportHandler.java b/src/org/exist/backup/restore/SystemImportHandler.java index 56e08ffc68b..167baa6a6d9 100644 --- a/src/org/exist/backup/restore/SystemImportHandler.java +++ b/src/org/exist/backup/restore/SystemImportHandler.java @@ -23,6 +23,8 @@ import java.io.IOException; +import org.exist.security.PermissionFactory; +import org.exist.storage.lock.*; import org.w3c.dom.DocumentType; import org.xml.sax.Attributes; import org.xml.sax.SAXException; @@ -45,9 +47,7 @@ import org.exist.xquery.value.DateTimeValue; import java.net.URISyntaxException; -import java.util.Calendar; -import java.util.Date; -import java.util.Stack; +import java.util.*; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; @@ -59,9 +59,7 @@ import org.exist.backup.restore.listener.RestoreListener; import org.exist.security.ACLPermission.ACE_ACCESS_TYPE; import org.exist.security.ACLPermission.ACE_TARGET; -import org.exist.security.internal.aider.ACEAider; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock.LockMode; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.xml.sax.XMLReader; @@ -94,7 +92,7 @@ public class SystemImportHandler extends DefaultHandler { //handler state private int version = 0; private Collection currentCollection; - private Stack deferredPermissions = new Stack(); + private Deque deferredPermissions = new ArrayDeque<>(); public SystemImportHandler(DBBroker broker, RestoreListener listener, String dbBaseUri, BackupDescriptor descriptor) { this.broker = broker; @@ -540,29 +538,17 @@ public CollectionDeferredPermission(RestoreListener listener, Collection collect @Override public void apply() { - try { - getTarget().getLock().acquire(LockMode.WRITE_LOCK); - - final TransactionManager txnManager = broker.getDatabase().getTransactionManager(); - try(final Txn txn = txnManager.beginTransaction()) { - final Permission permission = getTarget().getPermissions(); - permission.setOwner(getOwner()); - permission.setGroup(getGroup()); - permission.setMode(getMode()); - if(permission instanceof ACLPermission) { - final ACLPermission aclPermission = (ACLPermission)permission; - aclPermission.clear(); - for(final ACEAider ace : getAces()) { - aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); - } - } - broker.saveCollection(txn, getTarget()); - - txnManager.commit(txn); - } finally { - getTarget().release(LockMode.WRITE_LOCK); - } - + final TransactionManager txnManager = broker.getDatabase().getTransactionManager(); + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + + try(final ManagedCollectionLock targetLock = lockManager.acquireCollectionWriteLock(getTarget().getURI()); + final Txn txn = txnManager.beginTransaction()) { + final Permission permission = getTarget().getPermissions(); + PermissionFactory.chown(broker, permission, Optional.ofNullable(getOwner()), Optional.ofNullable(getGroup())); + PermissionFactory.chmod(broker, permission, Optional.of(getMode()), Optional.ofNullable(permission instanceof ACLPermission ? getAces() : null)); + broker.saveCollection(txn, getTarget()); + + txnManager.commit(txn); } catch (final Exception xe) { final String msg = "ERROR: Failed to set permissions on Collection '" + getTarget().getURI() + "'."; LOG.error(msg, xe); @@ -579,31 +565,15 @@ public ResourceDeferredPermission(RestoreListener listener, DocumentImpl resourc @Override public void apply() { - try { - getTarget().getUpdateLock().acquire(LockMode.WRITE_LOCK); - - final TransactionManager txnManager = broker.getDatabase().getTransactionManager(); - - try(final Txn txn = txnManager.beginTransaction()) { - - final Permission permission = getTarget().getPermissions(); - permission.setOwner(getOwner()); - permission.setGroup(getGroup()); - permission.setMode(getMode()); - if(permission instanceof ACLPermission) { - final ACLPermission aclPermission = (ACLPermission)permission; - aclPermission.clear(); - for(final ACEAider ace : getAces()) { - aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); - } - } - broker.storeXMLResource(txn, getTarget()); - txnManager.commit(txn); - - } finally { - getTarget().getUpdateLock().release(LockMode.WRITE_LOCK); - } - + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final TransactionManager txnManager = broker.getDatabase().getTransactionManager(); + try(final ManagedDocumentLock targetLock = lockManager.acquireDocumentWriteLock(getTarget().getURI()); + final Txn txn = txnManager.beginTransaction()) { + final Permission permission = getTarget().getPermissions(); + PermissionFactory.chown(broker, permission, Optional.ofNullable(getOwner()), Optional.ofNullable(getGroup())); + PermissionFactory.chmod(broker, permission, Optional.of(getMode()), Optional.ofNullable(permission instanceof ACLPermission ? getAces() : null)); + broker.storeXMLResource(txn, getTarget()); + txnManager.commit(txn); } catch (final Exception xe) { final String msg = "ERROR: Failed to set permissions on Document '" + getTarget().getURI() + "'."; LOG.error(msg, xe); diff --git a/src/org/exist/backup/xquery/RetrieveBackup.java b/src/org/exist/backup/xquery/RetrieveBackup.java index d4fb47fc6ee..9d160e29092 100644 --- a/src/org/exist/backup/xquery/RetrieveBackup.java +++ b/src/org/exist/backup/xquery/RetrieveBackup.java @@ -29,12 +29,9 @@ import org.exist.xquery.BasicFunction; import org.exist.xquery.Cardinality; import org.exist.xquery.FunctionSignature; -import org.exist.xquery.Variable; import org.exist.xquery.XPathException; import org.exist.xquery.XQueryContext; -import org.exist.xquery.functions.response.ResponseModule; import org.exist.xquery.value.FunctionParameterSequenceType; -import org.exist.xquery.value.JavaObjectValue; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceType; import org.exist.xquery.value.Type; @@ -44,6 +41,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Optional; import java.util.Properties; @@ -95,24 +93,17 @@ public Sequence eval( Sequence[] args, Sequence contextSequence ) throws XPathEx } // directly stream the backup contents to the HTTP response - final ResponseModule myModule = (ResponseModule)context.getModule( ResponseModule.NAMESPACE_URI ); + final Optional maybeResponse = Optional.ofNullable(context.getHttpContext()) + .map(XQueryContext.HttpContext::getResponse); - // response object is read from global variable $response - final Variable respVar = myModule.resolveVariable( ResponseModule.RESPONSE_VAR ); - - if( respVar == null ) { + if(!maybeResponse.isPresent()) { throw( new XPathException( this, "No response object found in the current XQuery context." ) ); } - if( respVar.getValue().getItemType() != Type.JAVA_OBJECT ) { - throw( new XPathException( this, "Variable $response is not bound to an Java object." ) ); - } - final JavaObjectValue respValue = (JavaObjectValue)respVar.getValue().itemAt( 0 ); - - if( !"org.exist.http.servlets.HttpResponseWrapper".equals( respValue.getObject().getClass().getName() ) ) { - throw( new XPathException( this, signature.toString() + " can only be used within the EXistServlet or XQueryServlet" ) ); + final ResponseWrapper response = maybeResponse.get(); + if( !"org.exist.http.servlets.HttpResponseWrapper".equals(response.getClass().getName())) { + throw new XPathException(this, signature.toString() + " can only be used within the EXistServlet or XQueryServlet"); } - final ResponseWrapper response = (ResponseWrapper)respValue.getObject(); response.setContentType("application/zip"); response.setHeader("Content-Length", String.valueOf(FileUtils.sizeQuietly(backupFile))); diff --git a/src/org/exist/client/BackupContentsFilter.java b/src/org/exist/client/BackupContentsFilter.java index d9e8d2a6f71..3105fddfaca 100644 --- a/src/org/exist/client/BackupContentsFilter.java +++ b/src/org/exist/client/BackupContentsFilter.java @@ -27,15 +27,9 @@ public class BackupContentsFilter extends FileFilter { @Override public boolean accept(final File f) { - if (f.getName().toLowerCase().equals(Messages.getString("ClientFrame.167"))) { - return true; - } - - if(f.isDirectory()) { - return true; - } - - return false; + return f.getName().equalsIgnoreCase(Messages.getString("ClientFrame.167")) + || f.isDirectory(); + } @Override diff --git a/src/org/exist/client/ClientFrame.java b/src/org/exist/client/ClientFrame.java index 4730b5b98b4..76ea470b46f 100644 --- a/src/org/exist/client/ClientFrame.java +++ b/src/org/exist/client/ClientFrame.java @@ -73,9 +73,7 @@ import java.text.SimpleDateFormat; import java.util.*; import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; +import java.util.concurrent.*; import java.util.prefs.Preferences; import java.util.stream.Collectors; @@ -97,8 +95,7 @@ public class ClientFrame extends JFrame implements WindowFocusListener, KeyListe private final static SimpleAttributeSet promptAttrs = new SimpleAttributeSet(); private final static SimpleAttributeSet defaultAttrs = new SimpleAttributeSet(); - - { + static { StyleConstants.setForeground(promptAttrs, Color.blue); StyleConstants.setBold(promptAttrs, true); StyleConstants.setForeground(defaultAttrs, Color.black); @@ -113,7 +110,8 @@ public class ClientFrame extends JFrame implements WindowFocusListener, KeyListe private final ResourceTableModel resources = new ResourceTableModel(); private JTextPane shell; private JPopupMenu shellPopup; - private final ProcessThread process = new ProcessThread(); + private final ProcessRunnable processRunnable; + private final Thread processThread; private Preferences preferences; private XmldbURI path = null; @@ -128,6 +126,8 @@ public ClientFrame(final InteractiveClient client, final XmldbURI path, final Pr this.path = path; this.properties = properties; this.client = client; + this.processRunnable = new ProcessRunnable(); + this.processThread = client.newClientThread("process", processRunnable); this.setIconImage(InteractiveClient.getExistIcon(getClass()).getImage()); @@ -140,7 +140,8 @@ public void windowClosing(final WindowEvent ev) { }); pack(); - process.start(); + processThread.start(); + shell.requestFocus(); preferences = Preferences.userNodeForPackage(ClientFrame.class); @@ -433,7 +434,7 @@ private JMenuBar createMenuBar() { item = new JMenuItem(Messages.getString("ClientFrame.66"), KeyEvent.VK_S); //$NON-NLS-1$ item.addActionListener(e -> { display(Messages.getString("ClientFrame.67")); //$NON-NLS-1$ - process.setAction("shutdown"); //$NON-NLS-1$ + processRunnable.setAction("shutdown"); //$NON-NLS-1$ }); connectMenu.add(item); @@ -662,7 +663,7 @@ private void newBlankDocument(final ActionEvent e) { private void goUpAction(final ActionEvent ev) { display(Messages.getString("ClientFrame.94")); //$NON-NLS-1$ - process.setAction("cd .."); //$NON-NLS-1$ + processRunnable.setAction("cd .."); //$NON-NLS-1$ } private void newCollectionAction(final ActionEvent ev) { @@ -670,7 +671,7 @@ private void newCollectionAction(final ActionEvent ev) { if (newCol != null) { final String command = "mkcol \"" + newCol + '"'; //$NON-NLS-1$ display(command + "\n"); //$NON-NLS-1$ - process.setAction(command); + processRunnable.setAction(command); } } @@ -740,7 +741,7 @@ private void removeAction(final ActionEvent ev) { showErrorMessage(e.getMessage(), e); } }; - new Thread(removeTask).start(); + client.newClientThread("remove", removeTask).start(); } } @@ -785,7 +786,7 @@ private void moveAction(final ActionEvent ev) { } setStatus(Messages.getString("ClientFrame.118")); //$NON-NLS-1$ }; - new Thread(moveTask).start(); + client.newClientThread("move", moveTask).start(); } private void renameAction(final ActionEvent ev) { @@ -827,7 +828,7 @@ private void renameAction(final ActionEvent ev) { } setStatus(Messages.getString("ClientFrame.127")); //$NON-NLS-1$ }; - new Thread(renameTask).start(); + client.newClientThread("rename", renameTask).start(); } private void copyAction(final ActionEvent ev) { @@ -882,7 +883,7 @@ private void copyAction(final ActionEvent ev) { } setStatus(Messages.getString("ClientFrame.135")); //$NON-NLS-1$ }; - new Thread(moveTask).start(); + client.newClientThread("move", moveTask).start(); } private ArrayList getCollections(final Collection root, final ArrayList collectionsList) throws XMLDBException { @@ -951,7 +952,7 @@ private void reindexAction(final ActionEvent ev) { } ClientFrame.this.setCursor(Cursor.getDefaultCursor()); }; - new Thread(reindexThread).start(); + client.newClientThread("reindex", reindexThread).start(); } } @@ -972,18 +973,16 @@ private void uploadAction(final ActionEvent ev) { private void uploadFiles(final List files) { if (files != null && !files.isEmpty()) { - new Thread() { - @Override - public void run() { - final UploadDialog upload = new UploadDialog(); - try { - client.parse(files, upload); - client.getResources(); - } catch (final XMLDBException e) { - showErrorMessage(Messages.getString("ClientFrame.147") + e.getMessage(), e); - } + final Runnable uploadTask = () -> { + final UploadDialog upload = new UploadDialog(); + try { + client.parse(files, upload); + client.getResources(); + } catch (final XMLDBException e) { + showErrorMessage(Messages.getString("ClientFrame.147") + e.getMessage(), e); } - }.start(); + }; + client.newClientThread("upload", uploadTask).start(); } } @@ -1089,8 +1088,7 @@ private void restoreAction(final ActionEvent ev) { private void doRestore(final GuiRestoreListener listener, final String username, final String password, final String dbaPassword, final Path f, final String uri) { - final Callable callable = () -> { - + final Runnable restoreTask = () -> { final Restore restore = new Restore(); try { @@ -1123,12 +1121,9 @@ private void doRestore(final GuiRestoreListener listener, final String username, showErrorMessage(Messages.getString("ClientFrame.181") + listener.warningsAndErrorsAsString(), null); } } - - return null; }; - final ExecutorService executor = Executors.newSingleThreadExecutor(); - executor.submit(callable); + client.newClientThread("restore", restoreTask).start(); } public static void repairRepository(Collection collection) throws XMLDBException { @@ -1313,7 +1308,7 @@ private void enter() { commandStart = end; doc.insertString(commandStart++, "\n", defaultAttrs); //$NON-NLS-1$ if (command != null) { - process.setAction(command); + processRunnable.setAction(command); client.console.getHistory().addToHistory(command); } } catch (final BadLocationException e) { @@ -1353,17 +1348,10 @@ private void historyForward() { private void close() { setVisible(false); dispose(); - process.terminate(); + processThread.interrupt(); System.exit(SystemExitCodes.OK_EXIT_CODE); } - private void actionFinished() { - if (!process.getStatus()) { - close(); - } - displayPrompt(); - } - private void AboutAction() { JOptionPane.showMessageDialog(this, client.getNotice()); } @@ -1379,7 +1367,7 @@ public void mouseClicked(final MouseEvent e) { // cd into collection final String command = "cd " + '"' + resource.getName() + '"'; //$NON-NLS-1$ display(command + "\n"); //$NON-NLS-1$ - process.setAction(command); + processRunnable.setAction(command); } else { // open a document for editing ClientFrame.this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); @@ -1424,67 +1412,33 @@ public int compare(final ResourceDescriptor desc1, final ResourceDescriptor desc } } - class ProcessThread extends Thread { + private class ProcessRunnable implements Runnable { + private final TransferQueue queue = new LinkedTransferQueue<>(); - private String action = null; - private boolean terminate = false; - private boolean status = false; - - public ProcessThread() { - super("exist-client-processThread"); + public void setAction(final String action) { + queue.add(action); } - synchronized public void setAction(final String action) { - while (this.action != null) { + @Override + public void run() { + while (true) { + final String action; try { - wait(); + action = queue.take(); } catch (final InterruptedException e) { - //TODO report error? + Thread.currentThread().interrupt(); + return; } - } - this.action = action; - notify(); - } - synchronized public void terminate() { - terminate = true; - notify(); - } - - synchronized public boolean getStatus() { - return status; - } + final boolean status = client.process(action); + displayPrompt(); - public boolean isReady() { - return action == null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Runnable#run() - */ - @Override - public void run() { - while (!terminate) { - while (action == null) { - try { - synchronized (this) { - wait(); - } - } catch (final InterruptedException e) { - //TODO report error? - } - } - status = client.process(action); - synchronized (this) { - action = null; - actionFinished(); - notify(); + if (!status) { + close(); + break; } } } - } static class ResourceTableModel extends AbstractTableModel { diff --git a/src/org/exist/client/ConnectionDialog.java b/src/org/exist/client/ConnectionDialog.java index e691ae34cd1..2fed269c8b9 100644 --- a/src/org/exist/client/ConnectionDialog.java +++ b/src/org/exist/client/ConnectionDialog.java @@ -27,6 +27,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.prefs.BackingStoreException; import java.util.prefs.InvalidPreferencesFormatException; @@ -43,6 +44,8 @@ */ public class ConnectionDialog extends javax.swing.JDialog implements DialogWithResponse { + private static final String PROVIDED_PASSWORD_PLACEHOLDER = "__PROVIDED__PASSWORD__"; + private ComboBoxModel connectionTypeModel = null; private DefaultListModel favouritesModel = null; private final DefaultConnectionSettings defaultConnectionSettings; @@ -244,6 +247,10 @@ protected final void paintTabBorder(final java.awt.Graphics g, final int tabPlac cmbConnectionType.addActionListener(this::cmbConnectionTypeActionPerformed); txtUsername.setText(getDefaultConnectionSettings().getUsername()); + if (getDefaultConnectionSettings().getPassword() != null + && !getDefaultConnectionSettings().getPassword().isEmpty()) { + txtPassword.setText(PROVIDED_PASSWORD_PLACEHOLDER); + } tpConnectionType.setTabPlacement(javax.swing.JTabbedPane.RIGHT); @@ -433,11 +440,12 @@ public void mouseClicked(java.awt.event.MouseEvent evt) { private void btnConnectActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnConnectActionPerformed + final String password = Arrays.equals(txtPassword.getPassword(), PROVIDED_PASSWORD_PLACEHOLDER.toCharArray()) ? getDefaultConnectionSettings().getPassword() : new String(txtPassword.getPassword()); final Connection connection; if (cmbConnectionType.getSelectedItem() == ConnectionType.Remote) { - connection = new Connection(txtUsername.getText(), new String(txtPassword.getPassword()), txtServerUri.getText(), chkSsl.isSelected()); + connection = new Connection(txtUsername.getText(), password, txtServerUri.getText(), chkSsl.isSelected()); } else { - connection = new Connection(txtUsername.getText(), new String(txtPassword.getPassword()), txtConfiguration.getText()); + connection = new Connection(txtUsername.getText(), password, txtConfiguration.getText()); } for (final DialogCompleteWithResponse callback : getDialogCompleteWithResponseCallbacks()) { diff --git a/src/org/exist/client/DocumentView.java b/src/org/exist/client/DocumentView.java index 096f911fc39..c0481545dd7 100644 --- a/src/org/exist/client/DocumentView.java +++ b/src/org/exist/client/DocumentView.java @@ -347,67 +347,65 @@ public void actionPerformed(ActionEvent e) { } private void save() { - new Thread() { - public void run() { + final Runnable saveTask = () -> { + try { + statusMessage.setText(Messages.getString("DocumentView.36") + URIUtils.urlDecodeUtf8(resource.getId())); //$NON-NLS-1$ + if (collection instanceof Observable) { + ((Observable) collection) + .addObserver(new ProgressObserver()); + } + progress.setIndeterminate(true); + progress.setVisible(true); + resource.setContent(text.getText()); + collection.storeResource(resource); + if (collection instanceof Observable) { + ((Observable) collection).deleteObservers(); + } + } catch (final XMLDBException e) { + ClientFrame.showErrorMessage(Messages.getString("DocumentView.37") //$NON-NLS-1$ + + e.getMessage(), e); + } finally { + progress.setVisible(false); + } + }; + client.newClientThread("save", saveTask).start(); + } + + private void saveAs() { + final Runnable saveAsTask = () -> { + + //Get the name to save the resource as + final String nameres = JOptionPane.showInputDialog(null, Messages.getString("DocumentView.38")); //$NON-NLS-1$ + if (nameres != null) { try { - statusMessage.setText(Messages.getString("DocumentView.36") + URIUtils.urlDecodeUtf8(resource.getId())); //$NON-NLS-1$ + //Change status message and display a progress dialog + statusMessage.setText(Messages.getString("DocumentView.39") + nameres); //$NON-NLS-1$ if (collection instanceof Observable) { - ((Observable) collection) - .addObserver(new ProgressObserver()); + ((Observable) collection).addObserver(new ProgressObserver()); } progress.setIndeterminate(true); progress.setVisible(true); - resource.setContent(text.getText()); - collection.storeResource(resource); + + //Create a new resource as named, set the content, store the resource + XMLResource result = null; + result = (XMLResource) collection.createResource(URIUtils.encodeXmldbUriFor(nameres).toString(), XMLResource.RESOURCE_TYPE); + result.setContent(text.getText()); + collection.storeResource(result); + client.reloadCollection(); //reload the client collection if (collection instanceof Observable) { ((Observable) collection).deleteObservers(); } } catch (final XMLDBException e) { - ClientFrame.showErrorMessage(Messages.getString("DocumentView.37") //$NON-NLS-1$ - + e.getMessage(), e); + ClientFrame.showErrorMessage(Messages.getString("DocumentView.40") + e.getMessage(), e); //$NON-NLS-1$ + } catch (final URISyntaxException e) { + ClientFrame.showErrorMessage(Messages.getString("DocumentView.41") + e.getMessage(), e); //$NON-NLS-1$ } finally { + //hide the progress dialog progress.setVisible(false); } } - }.start(); - } - - private void saveAs() { - new Thread() { - public void run() { - - //Get the name to save the resource as - final String nameres = JOptionPane.showInputDialog(null, Messages.getString("DocumentView.38")); //$NON-NLS-1$ - if (nameres != null) { - try { - //Change status message and display a progress dialog - statusMessage.setText(Messages.getString("DocumentView.39") + nameres); //$NON-NLS-1$ - if (collection instanceof Observable) { - ((Observable) collection).addObserver(new ProgressObserver()); - } - progress.setIndeterminate(true); - progress.setVisible(true); - - //Create a new resource as named, set the content, store the resource - XMLResource result = null; - result = (XMLResource) collection.createResource(URIUtils.encodeXmldbUriFor(nameres).toString(), XMLResource.RESOURCE_TYPE); - result.setContent(text.getText()); - collection.storeResource(result); - client.reloadCollection(); //reload the client collection - if (collection instanceof Observable) { - ((Observable) collection).deleteObservers(); - } - } catch (final XMLDBException e) { - ClientFrame.showErrorMessage(Messages.getString("DocumentView.40") + e.getMessage(), e); //$NON-NLS-1$ - } catch (final URISyntaxException e) { - ClientFrame.showErrorMessage(Messages.getString("DocumentView.41") + e.getMessage(), e); //$NON-NLS-1$ - } finally { - //hide the progress dialog - progress.setVisible(false); - } - } - } - }.start(); + }; + client.newClientThread("save-as", saveAsTask).start(); } private void export() throws XMLDBException { diff --git a/src/org/exist/client/HighlightedTableCellRenderer.java b/src/org/exist/client/HighlightedTableCellRenderer.java index a2483c52d73..9023c24f425 100644 --- a/src/org/exist/client/HighlightedTableCellRenderer.java +++ b/src/org/exist/client/HighlightedTableCellRenderer.java @@ -59,7 +59,8 @@ public Component getTableCellRendererComponent(final JTable table, Object value, ((JLabel)renderer).setOpaque(true); } - final Color foreground, background; + final Color foreground; + final Color background; final T resources = (T)table.getModel(); if (isSelected) { foreground = highForeground; diff --git a/src/org/exist/client/InteractiveClient.java b/src/org/exist/client/InteractiveClient.java index 1b1c9a949eb..6934eafe7a6 100644 --- a/src/org/exist/client/InteractiveClient.java +++ b/src/org/exist/client/InteractiveClient.java @@ -122,10 +122,11 @@ public class InteractiveClient { protected static final String NO_EMBED_MODE_DEFAULT = "FALSE"; protected static final String USER_DEFAULT = SecurityManager.DBA_USER; - // Set - protected final static Properties defaultProps = new Properties(); + protected static final String driver = "org.exist.xmldb.DatabaseImpl"; - { + // Set + private final static Properties defaultProps = new Properties(); + static { defaultProps.setProperty(DRIVER, driver); defaultProps.setProperty(URI, URI_DEFAULT); defaultProps.setProperty(USER, USER_DEFAULT); @@ -140,7 +141,6 @@ public class InteractiveClient { protected static final int colSizes[] = new int[]{10, 10, 10, -1}; - protected static final String driver = "org.exist.xmldb.DatabaseImpl"; protected static String configuration = null; protected final TreeSet completitions = new TreeSet<>(); @@ -188,6 +188,9 @@ public class InteractiveClient { protected XmldbURI path = XmldbURI.ROOT_COLLECTION_URI; private Optional lazyTraceWriter = Optional.empty(); + private static final NamedThreadGroupFactory clientThreadGroupFactory = new NamedThreadGroupFactory("java-admin-client"); + private final ThreadGroup clientThreadGroup = clientThreadGroupFactory.newThreadGroup(null); + /** * Display help on commands */ @@ -251,6 +254,18 @@ public static void main(final String[] args) { } } + /** + * Create a new thread for this client instance. + * + * @param threadName the name of the thread + * @param runnable the function to execute on the thread + * + * @return the thread + */ + Thread newClientThread(final String threadName, final Runnable runnable) { + return new Thread(clientThreadGroup, runnable, clientThreadGroup.getName() + "." + threadName); + } + /** * Register XML:DB driver and retrieve root collection. * @@ -564,7 +579,8 @@ protected boolean process(final String line) { messageln("cp requires two arguments."); return true; } - final XmldbURI src, dest; + final XmldbURI src; + final XmldbURI dest; try { src = XmldbURI.xmldbUriFor(args[1]); dest = XmldbURI.xmldbUriFor(args[2]); @@ -2155,17 +2171,18 @@ public boolean run(final String args[]) throws Exception { } // prompt for password if needed - if (interactive && options.startGUI) { - - final boolean haveLoginData = getGuiLoginData(properties); - if (!haveLoginData) { - return false; - } + if (!hasLoginDetails(options)) { + if (interactive && options.startGUI) { + final boolean haveLoginData = getGuiLoginData(properties); + if (!haveLoginData) { + return false; + } - } else if (options.username.isPresent() && !options.password.isPresent()) { - try { - properties.setProperty(PASSWORD, console.readLine("password: ", Character.valueOf('*'))); - } catch (final Exception e) { + } else if (options.username.isPresent() && !options.password.isPresent()) { + try { + properties.setProperty(PASSWORD, console.readLine("password: ", Character.valueOf('*'))); + } catch (final Exception e) { + } } } @@ -2304,6 +2321,12 @@ public boolean run(final String args[]) throws Exception { return true; } + private boolean hasLoginDetails(final CommandlineOptions options) { + return options.username.isPresent() + && options.password.isPresent() + && (options.embedded || options.options.containsKey("uri")); + } + public static String getExceptionMessage(Throwable e) { Throwable cause; while ((cause = e.getCause()) != null) { diff --git a/src/org/exist/client/QueryDialog.java b/src/org/exist/client/QueryDialog.java index 9e01a406f03..c864849727d 100644 --- a/src/org/exist/client/QueryDialog.java +++ b/src/org/exist/client/QueryDialog.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.DefaultComboBoxModel; @@ -97,7 +98,7 @@ public class QueryDialog extends JFrame { private JProgressBar progress; private JButton submitButton; private JButton killButton; - private QueryThread q = null; + private QueryRunnable queryRunnable = null; private Resource resource = null; private QueryDialog(final InteractiveClient client, final Collection collection, final Properties properties, boolean loadedFromDb) { @@ -212,7 +213,7 @@ private void setupComponents(boolean loadedFromDb) { if (collection instanceof LocalCollection) { killButton.setEnabled(true); } - q = doQuery(); + queryRunnable = doQuery(); }); toolbar.addSeparator(); @@ -222,11 +223,11 @@ private void setupComponents(boolean loadedFromDb) { toolbar.add(killButton); killButton.setEnabled(false); killButton.addActionListener(e -> { - if (q != null) { - q.killQuery(); + if (queryRunnable != null) { + queryRunnable.killQuery(); killButton.setEnabled(false); - q = null; + queryRunnable = null; } }); @@ -455,15 +456,19 @@ private void save(String stringToSave, String fileCategory) { } } - private QueryThread doQuery() { + private static final AtomicInteger queryThreadId = new AtomicInteger(); + + private QueryRunnable doQuery() { final String xpath = query.getText(); if (xpath.length() == 0) { return null; } resultDisplay.setText(""); - final QueryThread q = new QueryThread(xpath); - q.start(); - return q; + + final QueryRunnable queryRunnable = new QueryRunnable(xpath); + final Thread queryThread = client.newClientThread("query-" + queryThreadId.getAndIncrement(), queryRunnable); + queryThread.start(); + return queryRunnable; } @@ -510,26 +515,23 @@ private void compileQuery() { } } - private static AtomicInteger queryThreadId = new AtomicInteger(); - - private class QueryThread extends Thread { + private class QueryRunnable implements Runnable { private final String xpath; - private XQueryContext context; + private final AtomicReference runningContext = new AtomicReference<>(); - public QueryThread(final String query) { - super("exist-queryThread-" + queryThreadId.getAndIncrement()); + public QueryRunnable(final String query) { this.xpath = query; - this.context = null; } public boolean killQuery() { - if (context != null) { - final XQueryWatchDog xwd = context.getWatchDog(); + final XQueryContext contextRef = runningContext.get(); + if (contextRef != null) { + final XQueryWatchDog xwd = contextRef.getWatchDog(); final boolean retval = !xwd.isTerminating(); if (retval) { xwd.kill(0); } - context = null; + runningContext.compareAndSet(contextRef, null); return retval; } @@ -546,6 +548,7 @@ public void run() { long tResult = 0; long tCompiled = 0; ResourceSet result = null; + XQueryContext context = null; try { final EXistXQueryService service = (EXistXQueryService) collection.getService("XQueryService", "1.0"); service.setProperty(OutputKeys.INDENT, properties.getProperty(OutputKeys.INDENT, "yes")); @@ -560,6 +563,7 @@ public void run() { // Check could also be collection instanceof LocalCollection if (compiled instanceof CompiledXQuery) { context = ((CompiledXQuery) compiled).getContext(); + runningContext.set(context); } tCompiled = t1 - t0; @@ -569,8 +573,8 @@ public void run() { exprDisplay.setText(writer.toString()); result = service.execute(compiled); - context = null; tResult = System.currentTimeMillis() - t1; + runningContext.set(null); // jmfg: Is this still needed? I don't think so writer = new StringWriter(); @@ -606,7 +610,7 @@ public void run() { resultDisplay.setCaretPosition(0); statusMessage.setText(Messages.getString("QueryDialog.Found") + " " + result.getSize() + " " + Messages.getString("QueryDialog.items") + "." + " " + Messages.getString("QueryDialog.Compilation") + ": " + tCompiled + "ms, " + Messages.getString("QueryDialog.Execution") + ": " + tResult + "ms"); - } catch (final Throwable e) { + } catch (final XMLDBException e) { statusMessage.setText(Messages.getString("QueryDialog.Error") + ": " + InteractiveClient.getExceptionMessage(e) + ". " + Messages.getString("QueryDialog.Compilation") + ": " + tCompiled + "ms, " + Messages.getString("QueryDialog.Execution") + ": " + tResult + "ms"); progress.setVisible(false); @@ -625,7 +629,7 @@ public void run() { } catch (final XMLDBException e) { } } - if (client.queryHistory.isEmpty() || !((String) client.queryHistory.getLast()).equals(xpath)) { + if (client.queryHistory.isEmpty() || !client.queryHistory.getLast().equals(xpath)) { client.addToHistory(xpath); client.writeQueryHistory(); addQuery(xpath); diff --git a/src/org/exist/client/ZipFilter.java b/src/org/exist/client/ZipFilter.java index 915aa5612b6..9228a02d003 100644 --- a/src/org/exist/client/ZipFilter.java +++ b/src/org/exist/client/ZipFilter.java @@ -27,15 +27,9 @@ public class ZipFilter extends FileFilter { @Override public boolean accept(final File f) { - if (f.getName().toLowerCase().endsWith(".zip")) { - return true; - } - - if(f.isDirectory()) { - return true; - } - - return false; + return f.getName().toLowerCase().endsWith(".zip") + || f.isDirectory(); + } @Override diff --git a/src/org/exist/collections/Collection.java b/src/org/exist/collections/Collection.java index e219abecc48..e18f84808a0 100644 --- a/src/org/exist/collections/Collection.java +++ b/src/org/exist/collections/Collection.java @@ -4,19 +4,14 @@ import org.exist.Resource; import org.exist.collections.triggers.TriggerException; import org.exist.dom.QName; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.MutableDocumentSet; +import org.exist.dom.persistent.*; import org.exist.security.*; import org.exist.security.SecurityManager; import org.exist.storage.*; -import org.exist.storage.cache.Cacheable; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; -import org.exist.storage.lock.Lock; +import org.exist.storage.lock.*; import org.exist.storage.lock.Lock.LockMode; -import org.exist.storage.lock.LockedDocumentMap; import org.exist.storage.txn.Txn; import org.exist.util.LockException; import org.exist.util.SyntaxException; @@ -33,6 +28,10 @@ import java.util.Iterator; import java.util.List; import java.util.Observable; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import static org.exist.storage.lock.Lock.LockMode.READ_LOCK; +import static org.exist.storage.lock.Lock.LockMode.WRITE_LOCK; /** * Represents a Collection in the database. A collection maintains a list of @@ -40,9 +39,9 @@ * * Collections are shared between {@link org.exist.storage.DBBroker} instances. The caller * is responsible to lock/unlock the collection. Call {@link DBBroker#openCollection(XmldbURI, LockMode)} - * to get a collection with a read or write lock and {@link #release(LockMode)} to release the lock. + * to get a collection with a read or write lock and {@link #close()} to release the lock. */ -public interface Collection extends Resource, Comparable, Cacheable { +public interface Collection extends Resource, Comparable, AutoCloseable { /** * The length in bytes of the Collection ID @@ -54,26 +53,6 @@ public interface Collection extends Resource, Comparable, Cacheable */ int UNKNOWN_COLLECTION_ID = -1; - /** - * Get's the lock for this Collection - *

- * Note - this does not actually acquire the lock - * for that you must subsequently call {@link Lock#acquire(LockMode)} - * - * @return The lock for the Collection - */ - Lock getLock(); - - /** - * Closes the Collection, i.e. releases the lock held by - * the current thread. - *

- * This is a shortcut for {@code getLock().release(LockMode)} - * - * @param mode The mode of the Lock to release - */ - void release(LockMode mode); - /** * Get the internal id. * @@ -86,14 +65,14 @@ public interface Collection extends Resource, Comparable, Cacheable * * @param id The id of the Collection */ - void setId(int id); + @EnsureContainerLocked(mode=WRITE_LOCK) void setId(int id); /** * Set the internal storage address of the Collection data * * @param address The internal storage address */ - void setAddress(long address); + @EnsureContainerLocked(mode=WRITE_LOCK) void setAddress(long address); /** * Gets the internal storage address of the Collection data @@ -138,30 +117,12 @@ public interface Collection extends Resource, Comparable, Cacheable Permission getPermissionsNoLock(); /** - * Get the mode of the Collection permissions + * Set the mode of the Collection. * + * @param broker The database broker. * @param mode The unix like mode of the Collection permissions */ - void setPermissions(int mode) throws LockException, PermissionDeniedException; - - /** - * Set the mode of the Collection permissions - * - * @param mode The unix like mode of the Collection permissions - */ - @Deprecated - void setPermissions(String mode) throws SyntaxException, LockException, PermissionDeniedException; - - /** - * Set permissions for the collection. - * - * @param permissions the permissions to set on the Collection - * - * @deprecated This function is considered a security problem - * and should be removed, move code to copyOf or Constructor - */ - @Deprecated - void setPermissions(Permission permissions) throws LockException; + void setPermissions(DBBroker broker, int mode) throws LockException, PermissionDeniedException; /** * Gets the creation timestamp of this Collection @@ -178,7 +139,7 @@ public interface Collection extends Resource, Comparable, Cacheable * * @param timestamp the creation timestamp in milliseconds */ - void setCreationTime(long timestamp); + @EnsureContainerLocked(mode=WRITE_LOCK) void setCreationTime(long timestamp); /** * Get the Collection Configuration of this Collection @@ -246,6 +207,16 @@ public interface Collection extends Resource, Comparable, Cacheable */ int getMemorySize(); + /** + * Returns the estimated amount of memory used by this collection + * and its documents. This information is required by the + * {@link org.exist.storage.CollectionCacheManager} to be able + * to resize the caches. + * + * @return estimated amount of memory in bytes + */ + int getMemorySizeNoLock(); + /** * Get the parent Collection. * @@ -329,9 +300,8 @@ public interface Collection extends Resource, Comparable, Cacheable * * @param broker The database broker * @param child The child Collection to add to this Collection - * @param isNew Whether the Child Collection is a newly created Collection */ - void addCollection(DBBroker broker, Collection child, boolean isNew) + void addCollection(DBBroker broker, @EnsureLocked(mode=WRITE_LOCK) Collection child) throws PermissionDeniedException, LockException; /** @@ -342,7 +312,7 @@ void addCollection(DBBroker broker, Collection child, boolean isNew) * @return A list of entries in this Collection */ List getEntries(DBBroker broker) - throws PermissionDeniedException, LockException; + throws PermissionDeniedException, LockException, IOException; /** * Get the entry for a child Collection @@ -351,7 +321,8 @@ List getEntries(DBBroker broker) * @param name The name of the child Collection * @return The child Collection entry */ - CollectionEntry getChildCollectionEntry(DBBroker broker, String name) throws PermissionDeniedException; + CollectionEntry getChildCollectionEntry(DBBroker broker, String name) + throws PermissionDeniedException, LockException, IOException; /** * Get the entry for a resource @@ -361,7 +332,7 @@ List getEntries(DBBroker broker) * @return The resource entry */ CollectionEntry getResourceEntry(DBBroker broker, String name) - throws PermissionDeniedException, LockException; + throws PermissionDeniedException, LockException, IOException; /** * Update the specified child Collection @@ -369,7 +340,7 @@ CollectionEntry getResourceEntry(DBBroker broker, String name) * @param broker The database broker * @param child The child Collection to update */ - void update(DBBroker broker, Collection child) throws PermissionDeniedException, LockException; + void update(DBBroker broker, @EnsureLocked(mode=WRITE_LOCK) Collection child) throws PermissionDeniedException, LockException; /** * Add a document to the collection @@ -388,7 +359,7 @@ void addDocument(Txn transaction, DBBroker broker, DocumentImpl doc) * @param broker The database broker * @param doc The document to unlink from the Collection */ - void unlinkDocument(DBBroker broker, DocumentImpl doc) throws PermissionDeniedException, LockException; + void unlinkDocument(DBBroker broker, @EnsureLocked(mode=WRITE_LOCK) DocumentImpl doc) throws PermissionDeniedException, LockException; /** * Return an iterator over all child Collections @@ -457,7 +428,7 @@ void addDocument(Txn transaction, DBBroker broker, DocumentImpl doc) * @return The mutable document set provided in {@param docs} */ MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive) - throws PermissionDeniedException; + throws PermissionDeniedException, LockException; /** * Gets all of the documents from the Collection @@ -469,7 +440,7 @@ MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean rec * @return The mutable document set provided in {@param docs} */ MutableDocumentSet allDocs(DBBroker broker, MutableDocumentSet docs, boolean recursive, - LockedDocumentMap lockMap) throws PermissionDeniedException; + LockedDocumentMap lockMap) throws PermissionDeniedException, LockException; /** * Gets all of the documents from the Collection @@ -528,7 +499,7 @@ DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, LockedDocumen * @param name The name of the document (without collection path) * @return the document or null if it doesn't exist */ - DocumentImpl getDocument(DBBroker broker, XmldbURI name) throws PermissionDeniedException; + @Nullable @EnsureUnlocked DocumentImpl getDocument(DBBroker broker, XmldbURI name) throws PermissionDeniedException; /** * Retrieve a child resource after putting a read lock on it. @@ -537,10 +508,11 @@ DocumentSet getDocuments(DBBroker broker, MutableDocumentSet docs, LockedDocumen * @param broker The database broker * @param name The name of the document (without collection path) * @return The locked document or null if it doesn't exist - * @deprecated Use getDocumentWithLock(DBBroker broker, XmldbURI uri, int lockMode) + * + * @deprecated Use {@link #getDocumentWithLock(DBBroker, XmldbURI, LockMode)} */ @Deprecated - DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name) + @Nullable LockedDocument getDocumentWithLock(DBBroker broker, XmldbURI name) throws LockException, PermissionDeniedException; /** @@ -552,7 +524,7 @@ DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name) * @param lockMode The mode of the lock to acquire * @return The locked document or null if it doesn't exist */ - DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name, LockMode lockMode) + @Nullable LockedDocument getDocumentWithLock(DBBroker broker, XmldbURI name, LockMode lockMode) throws LockException, PermissionDeniedException; /** @@ -566,24 +538,7 @@ DocumentImpl getDocumentWithLock(DBBroker broker, XmldbURI name, LockMode lockMo * @deprecated Use {@link #getDocument(DBBroker, XmldbURI)} instead */ @Deprecated - DocumentImpl getDocumentNoLock(DBBroker broker, String rawPath) throws PermissionDeniedException; - - /** - * Release any locks held on the document - * - * @param doc The document to release locks on - * @deprecated Use {@link #releaseDocument(DocumentImpl, LockMode)} instead - */ - @Deprecated - void releaseDocument(DocumentImpl doc); - - /** - * Release any locks held on the document - * - * @param doc The document to release locks on - * @param mode The lock mode to release - */ - void releaseDocument(DocumentImpl doc, LockMode mode); + @Nullable DocumentImpl getDocumentNoLock(DBBroker broker, String rawPath) throws PermissionDeniedException; /** * Remove the specified child Collection @@ -846,6 +801,32 @@ BinaryDocument addBinaryResource(Txn transaction, DBBroker broker, BinaryDocumen String mimeType, long size, Date created, Date modified) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException; + /** + * Store a binary document into the Collection (streaming) + * + * Locks the collection while the resource is being saved. Triggers will be called after the collection + * has been unlocked while keeping a lock on the resource to prevent modification. + * + * Callers should not lock the collection before calling this method as this may lead to deadlocks. + * + * @param transaction The database transaction + * @param broker The database broker + * @param blob the binary resource to store the data into + * @param is The content for the document + * @param mimeType The Internet Media Type of the document + * @param size The size in bytes of the document + * @param created The created timestamp of the document + * @param modified The modified timestamp of the document + * @param preserve In the case of a copy, cause the copy process to preserve the following attributes of each + * source in the copy: modification time, file mode, user ID, and group ID, as allowed by + * permissions. Access Control Lists (ACLs) will also be preserved. + * + * @return The stored Binary Document object + */ + BinaryDocument addBinaryResource(Txn transaction, DBBroker broker, BinaryDocument blob, InputStream is, + String mimeType, long size, Date created, Date modified, DBBroker.PreserveType preserve) + throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException; + /** * Gets an Observable object for this Collection * @@ -858,7 +839,9 @@ BinaryDocument addBinaryResource(Txn transaction, DBBroker broker, BinaryDocumen * * @param outputStream The output stream to write the collection contents to */ - void serialize(final VariableByteOutputStream outputStream) throws IOException, LockException; + @EnsureContainerLocked(mode=READ_LOCK) void serialize(final VariableByteOutputStream outputStream) throws IOException, LockException; + + @Override void close(); //TODO(AR) consider a better separation between Broker and Collection, possibly introduce a CollectionManager object interface InternalAccess { @@ -878,7 +861,7 @@ protected CollectionEntry(final XmldbURI uri, final Permission permissions) { this.permissions = permissions; } - public abstract void readMetadata(DBBroker broker); + public abstract void readMetadata(DBBroker broker) throws IOException, LockException; public abstract void read(VariableByteInput is) throws IOException; @@ -909,7 +892,7 @@ public SubCollectionEntry(final SecurityManager sm, final XmldbURI uri) { } @Override - public void readMetadata(final DBBroker broker) { + public void readMetadata(final DBBroker broker) throws IOException, LockException { broker.readCollectionEntry(this); } @@ -941,7 +924,7 @@ public void readMetadata(final DBBroker broker) { } @Override - public void read(final VariableByteInput is) throws IOException { + public void read(final VariableByteInput is) { } } } diff --git a/src/org/exist/collections/CollectionCache.java b/src/org/exist/collections/CollectionCache.java index 3a6ff955e60..cd087882f86 100644 --- a/src/org/exist/collections/CollectionCache.java +++ b/src/org/exist/collections/CollectionCache.java @@ -1,6 +1,6 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2001-2016 The eXist Project + * Copyright (C) 2001-2017 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or @@ -19,170 +19,347 @@ */ package org.exist.collections; -import java.util.Iterator; +import java.beans.ConstructorProperties; +import java.util.Optional; +import java.util.function.Function; -import net.jcip.annotations.NotThreadSafe; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.Weigher; +import com.github.benmanes.caffeine.cache.stats.CacheStats; +import com.github.benmanes.caffeine.cache.stats.ConcurrentStatsCounter; +import com.github.benmanes.caffeine.cache.stats.StatsCounter; +import net.jcip.annotations.ThreadSafe; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.exist.storage.BrokerPool; -import org.exist.storage.BrokerPoolService; -import org.exist.storage.CacheManager; -import org.exist.storage.cache.LRUCache; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; -import org.exist.util.hashtable.Object2LongHashMap; -import org.exist.util.hashtable.SequencedLongHashMap; +import org.exist.storage.*; +import org.exist.util.Configuration; import org.exist.xmldb.XmldbURI; +import javax.annotation.Nullable; + /** - * Global cache for {@link org.exist.collections.Collection} objects. The - * cache is owned by {@link org.exist.storage.index.CollectionStore}. It is not - * synchronized. Thus a lock should be obtained on the collection store before - * accessing the cache. - * - * @author wolf + * Global cache for {@link org.exist.collections.Collection} objects. + * + * The CollectionCache safely permits concurrent access + * however appropriate Collection locks should be held + * on the actual collections when manipulating the + * CollectionCache + * + * @author Adam Retter */ -@NotThreadSafe -public class CollectionCache extends LRUCache implements BrokerPoolService { +@ThreadSafe +public class CollectionCache implements BrokerPoolService { private final static Logger LOG = LogManager.getLogger(CollectionCache.class); - private final BrokerPool pool; - private Object2LongHashMap names; + public static final int DEFAULT_CACHE_SIZE_BYTES = 64 * 1024 * 1024; // 64 MB + public static final String CACHE_SIZE_ATTRIBUTE = "collectionCache"; + public static final String PROPERTY_CACHE_SIZE_BYTES = "db-connection.collection-cache-mem"; - public CollectionCache(final BrokerPool pool, final int blockBuffers, final double growthThreshold) { - super("collection cache", blockBuffers, 2.0, growthThreshold, CacheManager.DATA_CACHE); - this.pool = pool; - this.names = new Object2LongHashMap<>(blockBuffers); - } + private int maxCacheSize = -1; + private Cache cache; + private StatsCounter statsCounter = new ConcurrentStatsCounter(); @Override - public void add(final Collection collection) { - add(collection, 1); + public void configure(final Configuration configuration) throws BrokerPoolServiceException { + this.maxCacheSize = Optional.of(configuration.getInteger(PROPERTY_CACHE_SIZE_BYTES)) + .filter(size -> size > 0) + .orElse(DEFAULT_CACHE_SIZE_BYTES); + + if(LOG.isDebugEnabled()){ + LOG.debug("CollectionsCache will use {} bytes max.", this.maxCacheSize); + } } @Override - public void add(final Collection collection, final int initialRefCount) { - // don't cache the collection during initialization: SecurityManager is not yet online - if(!pool.isOperational()) { - return; - } + public void prepare(final BrokerPool brokerPool) throws BrokerPoolServiceException { + final Weigher collectionWeigher = (uri, collection) -> collection.getMemorySizeNoLock(); + this.statsCounter = new ConcurrentStatsCounter(); + this.cache = Caffeine.newBuilder() + .maximumWeight(maxCacheSize) + .weigher(collectionWeigher) + .recordStats(() -> statsCounter) + .build(); + } - super.add(collection, initialRefCount); - final String name = collection.getURI().getRawCollectionPath(); - names.put(name, collection.getKey()); + /** + * Returns the maximum size of the cache in bytes + * + * @return maximum size of the cache in bytes + */ + public int getMaxCacheSize() { + return maxCacheSize; } - @Override - public Collection get(final Collection collection) { - return get(collection.getKey()); + /** + * Get a Snapshot of the Cache Statistics + * + * @return The cache statistics + */ + public Statistics getStatistics() { + final CacheStats cacheStats = statsCounter.snapshot(); + return new Statistics( + cacheStats.hitCount(), + cacheStats.missCount(), + cacheStats.loadSuccessCount(), + cacheStats.loadFailureCount(), + cacheStats.totalLoadTime(), + cacheStats.evictionCount(), + cacheStats.evictionWeight() + ); } - public Collection get(final XmldbURI name) { - final long key = names.get(name.getRawCollectionPath()); - if (key < 0) { - return null; - } - return get(key); + /** + * Returns the Collection from the cache or creates the entry if it is not present + * + * @param collectionUri The URI of the Collection + * @param creator A function that creates (or supplies) the Collection for the URI + * + * @return The collection indicated by the URI + */ + public Collection getOrCreate(final XmldbURI collectionUri, final Function creator) { + //NOTE: We must not store LockedCollections in the CollectionCache! So we call LockedCollection#unwrapLocked + return cache.get(key(collectionUri), uri -> LockedCollection.unwrapLocked(creator.apply(XmldbURI.create(uri)))); } - // TODO(AR) we have a mix of concerns here, we should not involve collection locking in the operation of the cache or invalidating the collectionConfiguration /** - * Overwritten to lock collections before they are removed. + * Returns the Collection from the cache or null if the Collection + * is not in the cache + * + * @param collectionUri The URI of the Collection + * @return The collection indicated by the URI or null otherwise */ - @Override - protected void removeOne(final Collection item) { - boolean removed = false; - SequencedLongHashMap.Entry next = map.getFirstEntry(); - int tries = 0; - do { - final Collection cached = next.getValue(); - if(cached.getKey() != item.getKey()) { - final Lock lock = cached.getLock(); - if (lock.attempt(LockMode.READ_LOCK)) { - try { - if (cached.allowUnload()) { - if(pool.getConfigurationManager() != null) { // might be null during db initialization - pool.getConfigurationManager().invalidate(cached.getURI(), null); - } - names.remove(cached.getURI().getRawCollectionPath()); - cached.sync(true); - map.remove(cached.getKey()); - removed = true; - } - } finally { - lock.release(LockMode.READ_LOCK); - } - } - } - if (!removed) { - next = next.getNext(); - if (next == null && tries < 2) { - next = map.getFirstEntry(); - tries++; - } else { - LOG.info("Unable to remove entry"); - removed = true; - } - } - } while(!removed); - cacheManager.requestMem(this); + @Nullable public Collection getIfPresent(final XmldbURI collectionUri) { + return cache.getIfPresent(key(collectionUri)); } - @Override - public void remove(final Collection item) { - super.remove(item); - names.remove(item.getURI().getRawCollectionPath()); + /** + * Put's the Collection into the cache + * + * If an existing Collection object for the same URI exists + * in the Cache it will be overwritten + * + * @param collection + */ + public void put(final Collection collection) { + //NOTE: We must not store LockedCollections in the CollectionCache! So we call LockedCollection#unwrapLocked + cache.put(key(collection.getURI()), LockedCollection.unwrapLocked(collection)); + } - // might be null during db initialization - if(pool.getConfigurationManager() != null) { - pool.getConfigurationManager().invalidate(item.getURI(), null); - } + /** + * Removes an entry from the cache + * + * @param collectionUri The URI of the Collection to remove from the Cache + */ + public void invalidate(final XmldbURI collectionUri) { + cache.invalidate(collectionUri); } /** - * Compute and return the in-memory size of all collections - * currently contained in this cache. + * Removes all entries from the Cache + */ + public void invalidateAll() { + cache.invalidateAll(); + } + + /** + * Calculates the key for the Cache * - * @see org.exist.storage.CollectionCacheManager - * @return in-memory size in bytes. + * @param collectionUri The URI of the Collection + * @return the key for the Collection in the Cache */ - public int getRealSize() { - int size = 0; - for (final Iterator i = names.valueIterator(); i.hasNext(); ) { - final Collection collection = get(i.next()); - if (collection != null) { - size += collection.getMemorySize(); - } - } - return size; + private String key(final XmldbURI collectionUri) { + return collectionUri.getRawCollectionPath(); } - @Override - public void resize(final int newSize) { - if (newSize < max) { - shrink(newSize); - } else { - if(LOG.isDebugEnabled()) { - LOG.debug("Growing collection cache to " + newSize); - } - final SequencedLongHashMap newMap = new SequencedLongHashMap<>(newSize * 2); - final Object2LongHashMap newNames = new Object2LongHashMap<>(newSize); - for(SequencedLongHashMap.Entry next = map.getFirstEntry(); next != null; next = next.getNext()) { - final Collection cacheable = next.getValue(); - newMap.put(cacheable.getKey(), cacheable); - newNames.put(cacheable.getURI().getRawCollectionPath(), cacheable.getKey()); - } - max = newSize; - map = newMap; - names = newNames; - accounting.reset(); - accounting.setTotalSize(max); + /** + * Basically an eXist abstraction + * for {@link CacheStats} + * Apache License Version 2.0 + */ + public static class Statistics { + private final long hitCount; + private final long missCount; + private final long loadSuccessCount; + private final long loadFailureCount; + private final long totalLoadTime; + private final long evictionCount; + private final long evictionWeight; + + /** + * @param hitCount the number of cache hits + * @param missCount the number of cache misses + * @param loadSuccessCount the number of successful cache loads + * @param loadFailureCount the number of failed cache loads + * @param totalLoadTime the total load time (success and failure) + * @param evictionCount the number of entries evicted from the cache + * @param evictionWeight the sum of weights of entries evicted from the cache + */ + @ConstructorProperties({"hitCount", "missCount", "loadSuccessCount", "loadFailureCount", "totalLoadTime", "evictionCount", "evictionWeight"}) + public Statistics(final long hitCount, final long missCount, final long loadSuccessCount, final long loadFailureCount, final long totalLoadTime, final long evictionCount, final long evictionWeight) { + this.hitCount = hitCount; + this.missCount = missCount; + this.loadSuccessCount = loadSuccessCount; + this.loadFailureCount = loadFailureCount; + this.totalLoadTime = totalLoadTime; + this.evictionCount = evictionCount; + this.evictionWeight = evictionWeight; } - } - @Override - protected void shrink(final int newSize) { - super.shrink(newSize); - names = new Object2LongHashMap<>(newSize); + /** + * Returns the number of times {@link Cache} lookup methods have returned a cached value. + * + * @return the number of times {@link Cache} lookup methods have returned a cached value + */ + public long getHitCount() { + return hitCount; + } + + /** + * Returns the number of times {@link Cache} lookup methods have returned either a cached or + * uncached value. This is defined as {@code hitCount + missCount}. + * + * @return the {@code hitCount + missCount} + */ + public long getRequestCount() { + return hitCount + missCount; + } + + /** + * Returns the ratio of cache requests which were hits. This is defined as + * {@code hitCount / requestCount}, or {@code 1.0} when {@code requestCount == 0}. Note that + * {@code hitRate + missRate =~ 1.0}. + * + * @return the ratio of cache requests which were hits + */ + public double getHitRate() { + final long requestCount = getRequestCount(); + return requestCount == 0 ? 1.0 : (double) hitCount / requestCount; + } + + /** + * Returns the number of times {@link Cache} lookup methods have returned an uncached (newly + * loaded) value, or null. Multiple concurrent calls to {@link Cache} lookup methods on an absent + * value can result in multiple misses, all returning the results of a single cache load + * operation. + * + * @return the number of times {@link Cache} lookup methods have returned an uncached (newly + * loaded) value, or null + */ + public long getMissCount() { + return missCount; + } + + /** + * Returns the ratio of cache requests which were misses. This is defined as + * {@code missCount / requestCount}, or {@code 0.0} when {@code requestCount == 0}. + * Note that {@code hitRate + missRate =~ 1.0}. Cache misses include all requests which + * weren't cache hits, including requests which resulted in either successful or failed loading + * attempts, and requests which waited for other threads to finish loading. It is thus the case + * that {@code missCount >= loadSuccessCount + loadFailureCount}. Multiple + * concurrent misses for the same key will result in a single load operation. + * + * @return the ratio of cache requests which were misses + */ + public double getMissRate() { + final long requestCount = getRequestCount(); + return requestCount == 0 ? 0.0 : (double) missCount / requestCount; + } + + /** + * Returns the total number of times that {@link Cache} lookup methods attempted to load new + * values. This includes both successful load operations, as well as those that threw exceptions. + * This is defined as {@code loadSuccessCount + loadFailureCount}. + * + * @return the {@code loadSuccessCount + loadFailureCount} + */ + public long getLoadCount() { + return loadSuccessCount + loadFailureCount; + } + + /** + * Returns the number of times {@link Cache} lookup methods have successfully loaded a new value. + * This is always incremented in conjunction with {@link #missCount}, though {@code missCount} + * is also incremented when an exception is encountered during cache loading (see + * {@link #loadFailureCount}). Multiple concurrent misses for the same key will result in a + * single load operation. + * + * @return the number of times {@link Cache} lookup methods have successfully loaded a new value + */ + public long getLoadSuccessCount() { + return loadSuccessCount; + } + + /** + * Returns the number of times {@link Cache} lookup methods failed to load a new value, either + * because no value was found or an exception was thrown while loading. This is always incremented + * in conjunction with {@code missCount}, though {@code missCount} is also incremented when cache + * loading completes successfully (see {@link #loadSuccessCount}). Multiple concurrent misses for + * the same key will result in a single load operation. + * + * @return the number of times {@link Cache} lookup methods failed to load a new value + */ + public long getLoadFailureCount() { + return loadFailureCount; + } + + /** + * Returns the ratio of cache loading attempts which threw exceptions. This is defined as + * {@code loadFailureCount / (loadSuccessCount + loadFailureCount)}, or {@code 0.0} when + * {@code loadSuccessCount + loadFailureCount == 0}. + * + * @return the ratio of cache loading attempts which threw exceptions + */ + public double getLoadFailureRate() { + final long totalLoadCount = loadSuccessCount + loadFailureCount; + return totalLoadCount == 0 + ? 0.0 + : (double) loadFailureCount / totalLoadCount; + } + + /** + * Returns the total number of nanoseconds the cache has spent loading new values. This can be + * used to calculate the miss penalty. This value is increased every time {@code loadSuccessCount} + * or {@code loadFailureCount} is incremented. + * + * @return the total number of nanoseconds the cache has spent loading new values + */ + public long getTotalLoadTime() { + return totalLoadTime; + } + + /** + * Returns the average time spent loading new values. This is defined as + * {@code totalLoadTime / (loadSuccessCount + loadFailureCount)}. + * + * @return the average time spent loading new values + */ + public double getAverageLoadPenalty() { + final long totalLoadCount = loadSuccessCount + loadFailureCount; + return totalLoadCount == 0 + ? 0.0 + : (double) totalLoadTime / totalLoadCount; + } + + /** + * Returns the number of times an entry has been evicted. This count does not include manual + * {@linkplain Cache#invalidate invalidations}. + * + * @return the number of times an entry has been evicted + */ + + public long getEvictionCount() { + return evictionCount; + } + + /** + * Returns the sum of weights of evicted entries. This total does not include manual + * {@linkplain Cache#invalidate invalidations}. + * + * @return the sum of weights of evicted entities + */ + public long getEvictionWeight() { + return evictionWeight; + } } } diff --git a/src/org/exist/collections/CollectionConfigurationManager.java b/src/org/exist/collections/CollectionConfigurationManager.java index 180db27e6fe..23f58225985 100644 --- a/src/org/exist/collections/CollectionConfigurationManager.java +++ b/src/org/exist/collections/CollectionConfigurationManager.java @@ -22,12 +22,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.EXistException; +import org.exist.collections.triggers.TriggerException; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.memtree.SAXAdapter; import org.exist.security.PermissionDeniedException; import org.exist.storage.*; import org.exist.storage.lock.Lock.LockMode; -import org.exist.storage.lock.Locked; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.util.LockException; @@ -40,10 +41,12 @@ import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; +import java.io.IOException; import java.io.StringReader; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.Callable; +import java.util.concurrent.locks.ReadWriteLock; /** * Manages index configurations. Index configurations are stored in a collection @@ -72,7 +75,7 @@ public class CollectionConfigurationManager implements BrokerPoolService { private final Map configurations = new HashMap<>(); - private final Locked latch = new Locked(); + private final ReadWriteLock lock = new java.util.concurrent.locks.ReentrantReadWriteLock(); private final CollectionConfiguration defaultConfig; @@ -81,10 +84,10 @@ public CollectionConfigurationManager(final BrokerPool brokerPool) { } @Override - public void startSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + public void startSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { try { - checkCreateCollection(systemBroker, CONFIG_COLLECTION_URI); - checkCreateCollection(systemBroker, ROOT_COLLECTION_CONFIG_URI); + checkCreateCollection(systemBroker, transaction, CONFIG_COLLECTION_URI); + checkCreateCollection(systemBroker, transaction, ROOT_COLLECTION_CONFIG_URI); loadAllConfigurations(systemBroker); defaultConfig.setIndexConfiguration(systemBroker.getIndexConfiguration()); } catch(final EXistException | CollectionConfigurationException | PermissionDeniedException | LockException e) { @@ -173,27 +176,20 @@ public void testConfiguration(DBBroker broker, String config) throws CollectionC } public List getCustomIndexSpecs(final String customIndexId) { - - return latch.read(new Callable>() { - - @Override - public List call() throws Exception { - - List configs = new ArrayList(10); - - for (CollectionConfiguration config: configurations.values()) { - IndexSpec spec = config.getIndexConfiguration(); - if (spec != null) { - Object customConfig = spec.getCustomIndexSpec(customIndexId); - if (customConfig != null) { - configs.add(customConfig); - } + try(final ManagedLock readLock = ManagedLock.acquire(lock, LockMode.READ_LOCK)) { + final List configs = new ArrayList<>(10); + for (final CollectionConfiguration config: configurations.values()) { + final IndexSpec spec = config.getIndexConfiguration(); + if (spec != null) { + final Object customConfig = spec.getCustomIndexSpec(customIndexId); + if (customConfig != null) { + configs.add(customConfig); } } - - return configs; } - }); + + return configs; + } } /** @@ -214,9 +210,9 @@ protected CollectionConfiguration getConfiguration(final Collection collection) * way to the end of the path, checking each collection on the way. I * modified it to start at the collection path and work its way back to * the root, stopping at the first config file it finds. This should be - * more efficient, and fit more appropriately will the XmldbURI api + * more efficient, and fit more appropriately with the XmldbURI api */ - return latch.read(() -> { + try(final ManagedLock readLock = ManagedLock.acquire(lock, LockMode.READ_LOCK)) { while(!path.equals(COLLECTION_CONFIG_PATH)) { final CollectionConfiguration conf = configurations.get(path); if (conf != null) { @@ -227,7 +223,7 @@ protected CollectionConfiguration getConfiguration(final Collection collection) // use default configuration return defaultConfig; - }); + } } protected void loadAllConfigurations(DBBroker broker) throws CollectionConfigurationException, PermissionDeniedException, LockException { @@ -272,16 +268,10 @@ protected void loadConfiguration(DBBroker broker, final Collection configCollect + e.getMessage(); LOG.error(message); } - - latch.write(new Callable() { - @Override - public Void call() throws Exception { - - configurations.put(new CollectionURI(configCollection.getURI().getRawCollectionPath()), conf); - return null; - } - }); + try(final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + configurations.put(new CollectionURI(configCollection.getURI().getRawCollectionPath()), conf); + } // Allow just one configuration document per collection // TODO : do not break if a system property allows several ones -pb @@ -294,34 +284,25 @@ public Void call() throws Exception { public CollectionConfiguration getOrCreateCollectionConfiguration(final DBBroker broker, Collection collection) { final CollectionURI path = new CollectionURI(COLLECTION_CONFIG_PATH); path.append(collection.getURI().getRawCollectionPath()); - - CollectionConfiguration conf = latch.read(new Callable() { - @Override - public CollectionConfiguration call() { - return configurations.get(path); - } - }); - if (conf != null) { - return conf; + try(final ManagedLock readLock = ManagedLock.acquire(lock, LockMode.READ_LOCK)) { + final CollectionConfiguration conf = configurations.get(path); + if(conf != null) { + return conf; + } } - - return latch.write(new Callable() { - @Override - public CollectionConfiguration call() { - - CollectionConfiguration conf = configurations.get(path); - - if (conf != null) { - return conf; - } - - conf = new CollectionConfiguration(broker.getBrokerPool()); - configurations.put(path, conf); + try(final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + CollectionConfiguration conf = configurations.get(path); + if (conf != null) { return conf; } - }); + + conf = new CollectionConfiguration(broker.getBrokerPool()); + configurations.put(path, conf); + + return conf; + } } /** @@ -336,34 +317,29 @@ public void invalidateAll(final XmldbURI collectionPath) { if (!collectionPath.startsWith(CONFIG_COLLECTION_URI)) { return; } - - latch.write(new Callable() { - @Override - public Void call() { - if (LOG.isDebugEnabled()) { - LOG.debug("Invalidating collection " + collectionPath + " and subcollections"); - } + try(final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { - CollectionURI uri = new CollectionURI(collectionPath.getRawCollectionPath()); + if (LOG.isDebugEnabled()) { + LOG.debug("Invalidating collection " + collectionPath + " and subcollections"); + } - configurations.remove(uri); + CollectionURI uri = new CollectionURI(collectionPath.getRawCollectionPath()); - String str = uri.toString(); + configurations.remove(uri); - Iterator> it = configurations.entrySet().iterator(); + String str = uri.toString(); - while (it.hasNext()) { - Entry entry = it.next(); + Iterator> it = configurations.entrySet().iterator(); - if (entry.getKey().toString().startsWith(str)) { - it.remove(); - } - } + while (it.hasNext()) { + Entry entry = it.next(); - return null; + if (entry.getKey().toString().startsWith(str)) { + it.remove(); + } } - }); + } } /** @@ -378,18 +354,13 @@ public void invalidate(final XmldbURI collectionPath, final BrokerPool pool) { return; } - latch.write(new Callable() { - @Override - public Void call() { - - if (LOG.isDebugEnabled()) { - LOG.debug("Invalidating collection " + collectionPath); - } - - configurations.remove(new CollectionURI(collectionPath.getRawCollectionPath())); - return null; + try(final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + if (LOG.isDebugEnabled()) { + LOG.debug("Invalidating collection " + collectionPath); } - }); + + configurations.remove(new CollectionURI(collectionPath.getRawCollectionPath())); + } } /** @@ -400,17 +371,15 @@ public Void call() { * @param uri * @throws EXistException */ - private void checkCreateCollection(DBBroker broker, XmldbURI uri) throws EXistException { - final TransactionManager transact = broker.getDatabase().getTransactionManager(); - try(final Txn txn = transact.beginTransaction()) { + private void checkCreateCollection(final DBBroker broker, final Txn txn, final XmldbURI uri) throws EXistException { + try { Collection collection = broker.getCollection(uri); if (collection == null) { collection = broker.getOrCreateCollection(txn, uri); SanityCheck.THROW_ASSERT(collection != null); broker.saveCollection(txn, collection); } - transact.commit(txn); - } catch (final Exception e) { + } catch(final TriggerException | PermissionDeniedException | IOException e) { throw new EXistException("Failed to initialize '" + uri + "' : " + e.getMessage()); } } @@ -435,9 +404,8 @@ public void checkRootCollectionConfig(DBBroker broker) throws EXistException, Pe final TransactionManager transact = broker.getDatabase().getTransactionManager(); try(final Txn txn = transact.beginTransaction()) { - Collection collection = null; - try { - collection = broker.openCollection(XmldbURI.ROOT_COLLECTION_URI, LockMode.READ_LOCK); + + try(final Collection collection = broker.openCollection(XmldbURI.ROOT_COLLECTION_URI, LockMode.READ_LOCK)) { if (collection == null) { transact.abort(txn); throw new EXistException("collection " + XmldbURI.ROOT_COLLECTION_URI + " not found!"); @@ -451,15 +419,14 @@ public void checkRootCollectionConfig(DBBroker broker) throws EXistException, Pe return; } } - } finally { - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } + + // Configure the root collection + addConfiguration(txn, broker, collection, configuration); + LOG.info("Configured '" + collection.getURI() + "'"); } - // Configure the root collection - addConfiguration(txn, broker, collection, configuration); + transact.commit(txn); - LOG.info("Configured '" + collection.getURI() + "'"); + } catch (final CollectionConfigurationException e) { throw new EXistException(e.getMessage()); } diff --git a/src/org/exist/collections/IndexInfo.java b/src/org/exist/collections/IndexInfo.java index 752968002ae..a9230b1e0cc 100644 --- a/src/org/exist/collections/IndexInfo.java +++ b/src/org/exist/collections/IndexInfo.java @@ -25,6 +25,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.security.Permission; import org.exist.storage.DBBroker; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.txn.Txn; import org.exist.util.serializer.DOMStreamer; import org.exist.xmldb.XmldbURI; @@ -38,22 +39,25 @@ /** * Internal class used to track required fields between calls to * {@link org.exist.collections.Collection#validateXMLResource(Txn, DBBroker, XmldbURI, InputSource)} and - * {@link org.exist.collections.Collection#store(Txn, DBBroker, IndexInfo, InputSource, boolean)}. + * {@link org.exist.collections.Collection#store(Txn, DBBroker, IndexInfo, InputSource)}. * * @author wolf */ public class IndexInfo { - private Indexer indexer; + private final Indexer indexer; + private final CollectionConfiguration collectionConfig; + private final ManagedDocumentLock documentLock; + private DOMStreamer streamer; private DocumentTriggers docTriggers; private boolean creating = false; private Permission oldDocPermissions = null; - private CollectionConfiguration collectionConfig; - IndexInfo(Indexer indexer, CollectionConfiguration collectionConfig) { + IndexInfo(final Indexer indexer, final CollectionConfiguration collectionConfig, final ManagedDocumentLock documentLock) { this.indexer = indexer; this.collectionConfig = collectionConfig; + this.documentLock = documentLock; } public Indexer getIndexer() { @@ -61,7 +65,7 @@ public Indexer getIndexer() { } //XXX: make protected - public void setTriggers(DocumentTriggers triggersVisitor) { + public void setTriggers(final DocumentTriggers triggersVisitor) { this.docTriggers = triggersVisitor; } @@ -70,7 +74,7 @@ public DocumentTriggers getTriggers() { return docTriggers; } - public void setCreating(boolean creating) { + public void setCreating(final boolean creating) { this.creating = creating; } @@ -86,7 +90,7 @@ public Permission getOldDocPermissions() { return oldDocPermissions; } - void setReader(XMLReader reader, EntityResolver entityResolver) throws SAXException { + void setReader(final XMLReader reader, final EntityResolver entityResolver) throws SAXException { if(entityResolver != null) { reader.setEntityResolver(entityResolver); } @@ -97,7 +101,7 @@ void setReader(XMLReader reader, EntityResolver entityResolver) throws SAXExcept reader.setErrorHandler(indexer); } - void setDOMStreamer(DOMStreamer streamer) { + void setDOMStreamer(final DOMStreamer streamer) { this.streamer = streamer; if (docTriggers == null) { streamer.setContentHandler(indexer); @@ -119,4 +123,8 @@ public DocumentImpl getDocument() { public CollectionConfiguration getCollectionConfig() { return collectionConfig; } + + public ManagedDocumentLock getDocumentLock() { + return documentLock; + } } diff --git a/src/org/exist/collections/LockedCollection.java b/src/org/exist/collections/LockedCollection.java new file mode 100644 index 00000000000..550507e9b11 --- /dev/null +++ b/src/org/exist/collections/LockedCollection.java @@ -0,0 +1,467 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.collections; + +import org.exist.EXistException; +import org.exist.collections.triggers.TriggerException; +import org.exist.dom.QName; +import org.exist.dom.persistent.*; +import org.exist.security.Permission; +import org.exist.security.PermissionDeniedException; +import org.exist.security.Subject; +import org.exist.storage.*; +import org.exist.storage.io.VariableByteOutputStream; +import org.exist.storage.lock.Lock; +import org.exist.storage.lock.LockedDocumentMap; +import org.exist.storage.lock.ManagedCollectionLock; +import org.exist.storage.txn.Txn; +import org.exist.util.LockException; +import org.exist.xmldb.XmldbURI; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; +import org.xml.sax.XMLReader; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Observable; + +/** + * Just a Delegate to a {@link Collection} which allows us to also hold a lock + * lease which is released when {@link Collection#close()} is called. This + * allows us to use ARM (Automatic Resource Management) e.g. try-with-resources + * with eXist Collection objects + * + * @author Adam Retter + */ +public class LockedCollection implements Collection { + private final ManagedCollectionLock managedCollectionLock; + private final Collection collection; + + public LockedCollection(final ManagedCollectionLock managedCollectionLock, final Collection collection) { + this.managedCollectionLock = managedCollectionLock; + this.collection = collection; + } + + //TODO(AR) if we decide that LockedCollection shouldn't implement Collection (but instead become a Tuple2) then drop this method + /** + * Given a Collection implementation returns the non-lock wrapped + * implementation. + * + * @param collection a Collection object. + * @return the actual non-lock wrapped Collection object. + */ + public static Collection unwrapLocked(final Collection collection) { + //TODO(AR) do we want to stay with LockedCollection implements Collection design {@link LockedCollection#getCollection()} + if(collection instanceof LockedCollection) { + // unwrap the locked collection + return ((LockedCollection)collection).collection; + } else { + return collection; + } + } + + /** + * Unlocks and Closes the Collection + */ + @Override + public void close() { + collection.close(); + managedCollectionLock.close(); + } + + @Override + public int getId() { + return collection.getId(); + } + + @Override + public void setId(final int id) { + collection.setId(id); + } + + @Override + public void setAddress(final long address) { + collection.setAddress(address); + } + + @Override + public long getAddress() { + return collection.getAddress(); + } + + @Override + public XmldbURI getURI() { + return collection.getURI(); + } + + @Override + public void setPath(final XmldbURI path) { + collection.setPath(path); + } + + @Override + public CollectionMetadata getMetadata() { + return collection.getMetadata(); + } + + @Override + public Permission getPermissions() { + return collection.getPermissions(); + } + + @Override + public Permission getPermissionsNoLock() { + return collection.getPermissionsNoLock(); + } + + @Override + public void setPermissions(final DBBroker broker, final int mode) throws LockException, PermissionDeniedException { + collection.setPermissions(broker, mode); + } + + @Override + @Deprecated + public long getCreationTime() { + return collection.getCreationTime(); + } + + @Override + public void setCreationTime(final long timestamp) { + collection.setCreationTime(timestamp); + } + + @Override + public CollectionConfiguration getConfiguration(final DBBroker broker) { + return collection.getConfiguration(broker); + } + + @Override + public IndexSpec getIndexConfiguration(final DBBroker broker) { + return collection.getIndexConfiguration(broker); + } + + @Override + public GeneralRangeIndexSpec getIndexByPathConfiguration(final DBBroker broker, final NodePath nodePath) { + return collection.getIndexByPathConfiguration(broker, nodePath); + } + + @Override + public QNameRangeIndexSpec getIndexByQNameConfiguration(final DBBroker broker, final QName nodeName) { + return collection.getIndexByQNameConfiguration(broker, nodeName); + } + + @Override + public boolean isTempCollection() { + return collection.isTempCollection(); + } + + @Override + public boolean isTriggersEnabled() { + return collection.isTriggersEnabled(); + } + + @Override + public void setTriggersEnabled(final boolean enabled) { + collection.setTriggersEnabled(enabled); + } + + @Override + public int getMemorySize() { + return collection.getMemorySize(); + } + + @Override + public int getMemorySizeNoLock() { + return collection.getMemorySizeNoLock(); + } + + @Override + public XmldbURI getParentURI() { + return collection.getParentURI(); + } + + @Override + public void setReader(final XMLReader reader) { + collection.setReader(reader); + } + + @Override + public boolean isEmpty(final DBBroker broker) throws PermissionDeniedException { + return collection.isEmpty(broker); + } + + @Override + public int getDocumentCount(final DBBroker broker) throws PermissionDeniedException { + return collection.getDocumentCount(broker); + } + + @Override + @Deprecated + public int getDocumentCountNoLock(final DBBroker broker) throws PermissionDeniedException { + return collection.getDocumentCountNoLock(broker); + } + + @Override + public int getChildCollectionCount(final DBBroker broker) throws PermissionDeniedException { + return collection.getChildCollectionCount(broker); + } + + @Override + public boolean hasDocument(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException { + return collection.hasDocument(broker, name); + } + + @Override + public boolean hasChildCollection(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, LockException { + return collection.hasChildCollection(broker, name); + } + + @Override + @Deprecated + public boolean hasChildCollectionNoLock(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException { + return collection.hasChildCollectionNoLock(broker, name); + } + + @Override + public void addCollection(final DBBroker broker, final Collection child) throws PermissionDeniedException, LockException { + collection.addCollection(broker, child); + } + + @Override + public List getEntries(final DBBroker broker) throws PermissionDeniedException, LockException, IOException { + return collection.getEntries(broker); + } + + @Override + public CollectionEntry getChildCollectionEntry(final DBBroker broker, final String name) throws PermissionDeniedException, LockException, IOException { + return collection.getChildCollectionEntry(broker, name); + } + + @Override + public CollectionEntry getResourceEntry(final DBBroker broker, final String name) throws PermissionDeniedException, LockException, IOException { + return collection.getResourceEntry(broker, name); + } + + @Override + public void update(final DBBroker broker, final Collection child) throws PermissionDeniedException, LockException { + collection.update(broker, child); + } + + @Override + public void addDocument(final Txn transaction, final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException { + collection.addDocument(transaction, broker, doc); + } + + @Override + public void unlinkDocument(final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException { + collection.unlinkDocument(broker, doc); + } + + @Override + public Iterator collectionIterator(final DBBroker broker) throws PermissionDeniedException, LockException { + return collection.collectionIterator(broker); + } + + @Override + @Deprecated + public Iterator collectionIteratorNoLock(final DBBroker broker) throws PermissionDeniedException { + return collection.collectionIteratorNoLock(broker); + } + + @Override + public Iterator iterator(final DBBroker broker) throws PermissionDeniedException, LockException { + return collection.iterator(broker); + } + + @Override + @Deprecated + public Iterator iteratorNoLock(final DBBroker broker) throws PermissionDeniedException { + return collection.iteratorNoLock(broker); + } + + @Override + public List getDescendants(final DBBroker broker, final Subject user) throws PermissionDeniedException { + return collection.getDescendants(broker, user); + } + + @Override + public MutableDocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive) throws PermissionDeniedException, LockException { + return collection.allDocs(broker, docs, recursive); + } + + @Override + public MutableDocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive, final LockedDocumentMap lockMap) throws PermissionDeniedException, LockException { + return collection.allDocs(broker, docs, recursive, lockMap); + } + + @Override + public DocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive, final LockedDocumentMap lockMap, final Lock.LockMode lockType) throws LockException, PermissionDeniedException { + return collection.allDocs(broker, docs, recursive, lockMap, lockType); + } + + @Override + public DocumentSet getDocuments(final DBBroker broker, final MutableDocumentSet docs) throws PermissionDeniedException, LockException { + return collection.getDocuments(broker, docs); + } + + @Override + @Deprecated + public DocumentSet getDocumentsNoLock(final DBBroker broker, final MutableDocumentSet docs) { + return collection.getDocumentsNoLock(broker, docs); + } + + @Override + public DocumentSet getDocuments(final DBBroker broker, final MutableDocumentSet docs, final LockedDocumentMap lockMap, final Lock.LockMode lockType) throws LockException, PermissionDeniedException { + return collection.getDocuments(broker, docs, lockMap, lockType); + } + + @Override + public DocumentImpl getDocument(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException { + return collection.getDocument(broker, name); + } + + @Override + @Deprecated + public LockedDocument getDocumentWithLock(final DBBroker broker, final XmldbURI name) throws LockException, PermissionDeniedException { + return collection.getDocumentWithLock(broker, name); + } + + @Override + public LockedDocument getDocumentWithLock(final DBBroker broker, final XmldbURI name, final Lock.LockMode lockMode) throws LockException, PermissionDeniedException { + return collection.getDocumentWithLock(broker, name, lockMode); + } + + @Override + @Deprecated + public DocumentImpl getDocumentNoLock(final DBBroker broker, final String rawPath) throws PermissionDeniedException { + return collection.getDocumentNoLock(broker, rawPath); + } + + @Override + public void removeCollection(final DBBroker broker, final XmldbURI name) throws LockException, PermissionDeniedException { + collection.removeCollection(broker, name); + } + + @Override + public void removeResource(final Txn transaction, final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException, IOException, TriggerException { + collection.removeResource(transaction, broker, doc); + } + + @Override + public void removeXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, TriggerException, LockException, IOException { + collection.removeXMLResource(transaction, broker, name); + } + + @Override + public void removeBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, LockException, TriggerException { + collection.removeBinaryResource(transaction, broker, name); + } + + @Override + public void removeBinaryResource(final Txn transaction, final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException, TriggerException { + collection.removeBinaryResource(transaction, broker, doc); + } + + @Override + public IndexInfo validateXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final InputSource source) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException { + return collection.validateXMLResource(transaction, broker, name, source); + } + + @Override + public IndexInfo validateXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final String data) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException { + return collection.validateXMLResource(transaction, broker, name, data); + } + + @Override + public IndexInfo validateXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final Node node) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException { + return collection.validateXMLResource(transaction, broker, name, node); + } + + @Override + public void store(final Txn transaction, final DBBroker broker, final IndexInfo info, final InputSource source) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException { + collection.store(transaction, broker, info, source); + } + + @Override + public void store(final Txn transaction, final DBBroker broker, final IndexInfo info, final String data) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException { + collection.store(transaction, broker, info, data); + } + + @Override + public void store(final Txn transaction, final DBBroker broker, final IndexInfo info, final Node node) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException { + collection.store(transaction, broker, info, node); + } + + @Override + public BinaryDocument validateBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, LockException, TriggerException, IOException { + return collection.validateBinaryResource(transaction, broker, name); + } + + @Override + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final InputStream is, final String mimeType, final long size, final Date created, final Date modified) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, name, is, mimeType, size, created, modified); + } + + @Override + @Deprecated + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final byte[] data, final String mimeType) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, name, data, mimeType); + } + + @Override + @Deprecated + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final byte[] data, final String mimeType, final Date created, final Date modified) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, name, data, mimeType, created, modified); + } + + @Override + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name, final InputStream is, final String mimeType, final long size) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, name, is, mimeType, size); + } + + @Override + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final BinaryDocument blob, final InputStream is, final String mimeType, final long size, final Date created, final Date modified) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, blob, is, mimeType, size, created, modified); + } + + @Override + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final BinaryDocument blob, final InputStream is, final String mimeType, final long size, final Date created, final Date modified, final DBBroker.PreserveType preserve) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return collection.addBinaryResource(transaction, broker, blob, is, mimeType, size, created, modified, preserve); + } + + @Override + public Observable getObservable() { + return collection.getObservable(); + } + + @Override + public void serialize(final VariableByteOutputStream outputStream) throws IOException, LockException { + collection.serialize(outputStream); + } + + @Override + public int compareTo(final Collection o) { + return collection.compareTo(o); + } +} diff --git a/src/org/exist/collections/ManagedLocks.java b/src/org/exist/collections/ManagedLocks.java new file mode 100644 index 00000000000..b72e16ee61a --- /dev/null +++ b/src/org/exist/collections/ManagedLocks.java @@ -0,0 +1,117 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.collections; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.exist.storage.lock.ManagedLock; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * Simple container for a List of ManagedLocks + * which allows ARM (Automatic Resource Management) + * via {@link AutoCloseable} + * + * Locks will be released in the reverse order to which they + * are provided + */ +public class ManagedLocks implements Iterable, AutoCloseable { + + private final static Logger LOG = LogManager.getLogger(ManagedLocks.class); + + private final List managedLocks; + + /** + * @param managedLocks A list of ManagedLocks which should + * be in the same order that they were acquired + */ + public ManagedLocks(final java.util.List managedLocks) { + this.managedLocks = managedLocks; + } + + /** + * @param managedLocks An array / var-args of ManagedLocks + * which should be in the same order that they were acquired + */ + public ManagedLocks(final T... managedLocks) { + this.managedLocks = Arrays.asList(managedLocks); + } + + @Override + public Iterator iterator() { + return new ManagedLockIterator(); + } + + private class ManagedLockIterator implements Iterator { + private final Iterator iterator = managedLocks.iterator(); + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + } + + @Override + public void close() { + closeAll(managedLocks); + } + + /** + * Closes all the locks in the provided list. + * + * Locks will be closed in reverse (acquisition) order. + * + * If a {@link RuntimeException} occurs when closing + * any lock. The first exception will be recorded and + * lock closing will continue. After all locks are closed + * the first encountered exception is rethrown. + * + * @param The type of the ManagedLocks + * @param managedLocks A list of locks, the list should be ordered in lock acquisition order. + */ + public static void closeAll(final List managedLocks) { + RuntimeException firstException = null; + + for(int i = managedLocks.size() - 1; i >= 0; i--) { + final T managedLock = managedLocks.get(i); + try { + managedLock.close(); + } catch (final RuntimeException e) { + LOG.error(e); + if(firstException == null) { + firstException = e; + } + } + } + + if(firstException != null) { + throw firstException; + } + } +} diff --git a/src/org/exist/collections/MutableCollection.java b/src/org/exist/collections/MutableCollection.java index 35488930141..510ab219dac 100644 --- a/src/org/exist/collections/MutableCollection.java +++ b/src/org/exist/collections/MutableCollection.java @@ -23,12 +23,8 @@ import net.jcip.annotations.GuardedBy; import net.jcip.annotations.NotThreadSafe; import org.exist.dom.QName; -import org.exist.dom.persistent.DocumentMetadata; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DefaultDocumentSet; +import org.exist.dom.persistent.*; + import java.io.*; import java.util.*; import java.util.function.Consumer; @@ -53,6 +49,7 @@ import org.exist.storage.io.VariableByteOutputStream; import org.exist.storage.lock.*; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.Lock.LockType; import org.exist.storage.sync.Sync; import org.exist.storage.txn.Txn; import org.exist.util.Configuration; @@ -61,7 +58,6 @@ import org.exist.util.SyntaxException; import org.exist.util.XMLReaderObjectFactory; import org.exist.util.XMLReaderObjectFactory.VALIDATION_SETTING; -import org.exist.util.hashtable.ObjectHashSet; import org.exist.util.io.FastByteArrayInputStream; import org.exist.util.serializer.DOMStreamer; import org.exist.xmldb.XmldbURI; @@ -71,6 +67,10 @@ import org.xml.sax.SAXException; import org.xml.sax.XMLReader; +import javax.annotation.Nullable; + +import static org.exist.storage.lock.Lock.LockMode.*; + /** * An implementation of {@link Collection} that allows * mutations to be made to the Collection object @@ -80,6 +80,8 @@ @NotThreadSafe public class MutableCollection implements Collection { + //TODO(AR) ultimately remove all locking internally from this class and externalise it to the callers, all methods are then internally lock free, and then finally remove `NonLocking` methods + private static final Logger LOG = LogManager.getLogger(Collection.class); private static final int SHALLOW_SIZE = 550; private static final int DOCUMENT_SIZE = 450; @@ -87,22 +89,37 @@ public class MutableCollection implements Collection { private int collectionId = UNKNOWN_COLLECTION_ID; private XmldbURI path; - private final Lock lock; - @GuardedBy("lock") private final Map documents = new TreeMap<>(); - @GuardedBy("lock") private ObjectHashSet subCollections = new ObjectHashSet<>(19); + private final LockManager lockManager; + + /* + * LinkedHashSet is used to ensure a consistent iteration order of child Documents. + * The `insertion-order` of a LinkedHashSet means we effectively order by Document creation + * time, i.e. oldest first. + * This ordering ensures that adding new Documents does not affect the existing order of Documents, + * in this manner locks acquired when iterating are always acquired and released in the same order + * which gives us deadlock avoidance for Document iteration. + */ + @GuardedBy("LockManager") private final LinkedHashMap documents = new LinkedHashMap<>(); + + /* + * LinkedHashSet is used to ensure a consistent iteration order of sub-Collections. + * The `insertion-order` of a LinkedHashSet means we effectively order by sub-Collection creation + * time, i.e. oldest first. + * This ordering ensures that adding new sub-Collections does not affect the existing order of sub-Collections, + * in this manner locks acquired when iterating are always acquired and released in the same order + * which gives us deadlock avoidance for sub-Collection iteration. + */ + @GuardedBy("LockManager") private LinkedHashSet subCollections = new LinkedHashSet<>(); + private long address = BFile.UNKNOWN_ADDRESS; // Storage address of the collection in the BFile private long created = 0; private boolean triggersEnabled = true; private XMLReader userReader; - private boolean isTempCollection; + private volatile boolean isTempCollection; private Permission permissions; private final CollectionMetadata collectionMetadata; private final ObservaleMutableCollection observable = new ObservaleMutableCollection(); - // fields required by the collections cache - private int refCount; - private int timestamp; - /** * Constructs a Collection Object (not yet persisted) * @@ -110,11 +127,22 @@ public class MutableCollection implements Collection { * @param path The path of the Collection */ public MutableCollection(final DBBroker broker, final XmldbURI path) { - //The permissions assigned to this collection - permissions = PermissionFactory.getDefaultCollectionPermission(broker.getBrokerPool().getSecurityManager()); + this(broker, path, null, -1); + } + /** + * Constructs a Collection Object (not yet persisted) + * + * @param broker The database broker + * @param path The path of the Collection + * @param permissions The permissions of the collection, or null for the default + * @param created The created time of the collection, or -1 for now + */ + public MutableCollection(final DBBroker broker, @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI path, @Nullable final Permission permissions, final long created) { setPath(path); - lock = new ReentrantReadWriteLock(path); + this.permissions = permissions != null ? permissions : PermissionFactory.getDefaultCollectionPermission(broker.getBrokerPool().getSecurityManager()); + this.created = created > 0 ? created : System.currentTimeMillis(); + this.lockManager = broker.getBrokerPool().getLockManager(); this.collectionMetadata = new CollectionMetadata(this); } @@ -129,8 +157,9 @@ public MutableCollection(final DBBroker broker, final XmldbURI path) { * * @return The Collection Object */ - public static MutableCollection load(final DBBroker broker, final XmldbURI path, final VariableByteInput inputStream) - throws PermissionDeniedException, IOException, LockException { + public static MutableCollection load(final DBBroker broker, + @EnsureLocked(mode=LockMode.WRITE_LOCK, type=LockType.COLLECTION) final XmldbURI path, + final VariableByteInput inputStream) throws PermissionDeniedException, IOException, LockException { final MutableCollection collection = new MutableCollection(broker, path); collection.deserialize(broker, inputStream); return collection; @@ -145,97 +174,111 @@ public boolean isTriggersEnabled() { public final void setPath(XmldbURI path) { path = path.toCollectionPathURI(); //TODO : see if the URI resolves against DBBroker.TEMP_COLLECTION - isTempCollection = path.getRawCollectionPath().equals(XmldbURI.TEMP_COLLECTION); - this.path=path; + this.isTempCollection = path.getRawCollectionPath().equals(XmldbURI.TEMP_COLLECTION); + this.path = path; } @Override - public Lock getLock() { - return lock; - } - - @Override - public void addCollection(final DBBroker broker, final Collection child, final boolean isNew) + public void addCollection(final DBBroker broker, final Collection child) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission to write to Collection denied for " + this.getURI()); - } - - final XmldbURI childName = child.getURI().lastSegment(); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission to write to Collection denied for " + this.getURI()); + } - getLock().acquire(LockMode.WRITE_LOCK); - try { + final XmldbURI childName = child.getURI().lastSegment(); if (!subCollections.contains(childName)) { subCollections.add(childName); } - } finally { - getLock().release(LockMode.WRITE_LOCK); } + } - if(isNew) { - child.setCreationTime(System.currentTimeMillis()); - } + private static Iterator stableIterator(final LinkedHashSet set) { + return new LinkedHashSet<>(set).iterator(); + } + + private static Iterator stableDocumentIterator(final LinkedHashMap documents) { + return new ArrayList<>(documents.values()).iterator(); + } + + private static Iterator stableDocumentNameIterator(final LinkedHashMap documents) { + return new ArrayList<>(documents.keySet()).iterator(); } @Override - public List getEntries(final DBBroker broker) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } + public List getEntries(final DBBroker broker) throws PermissionDeniedException, LockException, IOException { final List list = new ArrayList<>(); final Iterator subCollectionIterator; - getLock().acquire(LockMode.READ_LOCK); - try { - subCollectionIterator = subCollections.stableIterator(); - } finally { - getLock().release(LockMode.READ_LOCK); + final Iterator documentIterator; + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } + + subCollectionIterator = stableIterator(subCollections); + documentIterator = stableDocumentIterator(documents); } + while(subCollectionIterator.hasNext()) { final XmldbURI subCollectionURI = subCollectionIterator.next(); - final CollectionEntry entry = new SubCollectionEntry(broker.getBrokerPool().getSecurityManager(), - subCollectionURI); - entry.readMetadata(broker); - list.add(entry); + try(final ManagedCollectionLock subCollectionLock = lockManager.acquireCollectionReadLock(subCollectionURI)) { + final CollectionEntry entry = new SubCollectionEntry(broker.getBrokerPool().getSecurityManager(), + subCollectionURI); + entry.readMetadata(broker); + list.add(entry); + } } - for(final DocumentImpl document : copyOfDocs()) { - final CollectionEntry entry = new DocumentEntry(document); - entry.readMetadata(broker); - list.add(entry); + while(documentIterator.hasNext()) { + final DocumentImpl document = documentIterator.next(); + try(final ManagedDocumentLock documentLock = lockManager.acquireDocumentReadLock(document.getURI())) { + final DocumentEntry entry = new DocumentEntry(document); + entry.readMetadata(broker); + list.add(entry); + } } return list; } @Override public CollectionEntry getChildCollectionEntry(final DBBroker broker, final String name) - throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } + throws PermissionDeniedException, LockException, IOException { final XmldbURI subCollectionURI = getURI().append(name); - final CollectionEntry entry = new SubCollectionEntry(broker.getBrokerPool().getSecurityManager(), - subCollectionURI); - entry.readMetadata(broker); + final CollectionEntry entry; + try(final ManagedCollectionLock subCollectionLock = lockManager.acquireCollectionReadLock(subCollectionURI)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } + + entry = new SubCollectionEntry(broker.getBrokerPool().getSecurityManager(), + subCollectionURI); + entry.readMetadata(broker); + } return entry; } @Override public CollectionEntry getResourceEntry(final DBBroker broker, final String name) - throws PermissionDeniedException, LockException { + throws PermissionDeniedException, LockException, IOException { if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { throw new PermissionDeniedException("Permission denied to read collection: " + path); } final CollectionEntry entry; - getLock().acquire(LockMode.READ_LOCK); - try { - entry = new DocumentEntry(documents.get(name)); - } finally { - getLock().release(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + final DocumentImpl doc = documents.get(name); + + try(final ManagedDocumentLock docLock = lockManager.acquireDocumentReadLock(doc.getURI())) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + + entry = new DocumentEntry(doc); + entry.readMetadata(broker); + } } - entry.readMetadata(broker); return entry; } @@ -244,20 +287,12 @@ public boolean isTempCollection() { return isTempCollection; } - @Override - public void release(final LockMode mode) { - getLock().release(mode); - } - @Override public void update(final DBBroker broker, final Collection child) throws PermissionDeniedException, LockException { final XmldbURI childName = child.getURI().lastSegment(); - getLock().acquire(LockMode.WRITE_LOCK); - try { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { subCollections.remove(childName); subCollections.add(childName); - } finally { - getLock().release(LockMode.WRITE_LOCK); } } @@ -273,64 +308,69 @@ public void addDocument(final Txn transaction, final DBBroker broker, final Docu */ private void addDocument(final Txn transaction, final DBBroker broker, final DocumentImpl doc, final DocumentImpl oldDoc) throws PermissionDeniedException, LockException { - if(oldDoc == null) { - - /* create */ - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission to write to Collection denied for " + this.getURI()); - } - } else { - - /* update-replace */ - if(!oldDoc.getPermissions().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission to write to overwrite document: " + oldDoc.getURI()); - } - } - - if (doc.getDocId() == DocumentImpl.UNKNOWN_DOCUMENT_ID) { - try { - doc.setDocId(broker.getNextResourceId(transaction, this)); - } catch(final EXistException e) { - LOG.error("Collection error " + e.getMessage(), e); - // TODO : re-raise the exception ? -pb - return; + + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + + if (oldDoc == null) { + + /* create */ + if (!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission to write to Collection denied for " + this.getURI()); + } + } else { + /* update-replace */ + try (final ManagedDocumentLock oldDocLock = lockManager.acquireDocumentReadLock(oldDoc.getURI())) { + if (!oldDoc.getPermissions().validate(broker.getCurrentSubject(), Permission.WRITE)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + + throw new PermissionDeniedException("Permission to write to overwrite document: " + oldDoc.getURI()); + } + } } - } - getLock().acquire(LockMode.WRITE_LOCK); - try { - documents.put(doc.getFileURI().getRawCollectionPath(), doc); - } finally { - getLock().release(LockMode.WRITE_LOCK); + try (final ManagedDocumentLock docLock = lockManager.acquireDocumentWriteLock(doc.getURI())) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + + if (doc.getDocId() == DocumentImpl.UNKNOWN_DOCUMENT_ID) { + try { + doc.setDocId(broker.getNextResourceId(transaction)); + } catch (final EXistException e) { + LOG.error("Collection error " + e.getMessage(), e); + + // TODO : re-raise the exception ? -pb + return; + } + } + + documents.put(doc.getFileURI().getRawCollectionPath(), doc); + } } } @Override public void unlinkDocument(final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to remove document from collection: " + path); - } + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to remove document from collection: " + path); + } - getLock().acquire(LockMode.WRITE_LOCK); - try { documents.remove(doc.getFileURI().getRawCollectionPath()); - } finally { - getLock().release(LockMode.WRITE_LOCK); } } @Override public Iterator collectionIterator(final DBBroker broker) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI()); - } + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI()); + } - getLock().acquire(LockMode.READ_LOCK); - try { - return subCollections.stableIterator(); - } finally { - getLock().release(LockMode.READ_LOCK); + return stableIterator(subCollections); } } @@ -339,25 +379,20 @@ public Iterator collectionIteratorNoLock(final DBBroker broker) throws if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI()); } - return subCollections.stableIterator(); + return stableIterator(subCollections); } @Override public List getDescendants(final DBBroker broker, final Subject user) throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI()); - } - - final ArrayList collectionList; + final ArrayList collectionList = new ArrayList<>(); final Iterator i; - try { - getLock().acquire(LockMode.READ_LOCK); - try { - collectionList = new ArrayList<>(subCollections.size()); - i = subCollections.stableIterator(); - } finally { - getLock().release(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI()); } + + collectionList.ensureCapacity(subCollections.size()); + i = stableIterator(subCollections); } catch(final LockException e) { LOG.error(e.getMessage(), e); return Collections.emptyList(); @@ -367,7 +402,7 @@ public List getDescendants(final DBBroker broker, final Subject user final XmldbURI childName = i.next(); //TODO : resolve URI ! final Collection child = broker.getCollection(path.append(childName)); - if(getPermissionsNoLock().validate(user, Permission.READ)) { + if(getPermissions().validate(user, Permission.READ)) { collectionList.add(child); if(child.getChildCollectionCount(broker) > 0) { //Recursive call @@ -381,37 +416,30 @@ public List getDescendants(final DBBroker broker, final Subject user @Override public MutableDocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive) - throws PermissionDeniedException { + throws PermissionDeniedException, LockException { return allDocs(broker, docs, recursive, null); } @Override public MutableDocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive, - final LockedDocumentMap lockMap) throws PermissionDeniedException { - List subColls = null; - if(getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - try { - getLock().acquire(LockMode.READ_LOCK); - try { - //Add all docs in this collection to the returned set - getDocuments(broker, docs); - //Get a list of sub-collection URIs. We will process them - //after unlocking this collection. otherwise we may deadlock ourselves - subColls = subCollections.keys(); - } finally { - getLock().release(LockMode.READ_LOCK); - } - } catch(final LockException e) { - LOG.error(e.getMessage(), e); + final LockedDocumentMap lockMap) throws PermissionDeniedException, LockException { + XmldbURI[] subColls = null; + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if (getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + //Add all docs in this collection to the returned set + getDocuments(broker, docs); + //Get a list of sub-collection URIs. We will process them + //after unlocking this collection. otherwise we may deadlock ourselves + subColls = subCollections.stream() + .map(path::appendInternal) + .toArray(XmldbURI[]::new); } } if(recursive && subColls != null) { // process the child collections - for(final XmldbURI childName : subColls) { - //TODO : resolve URI ! - try { - final Collection child = broker.openCollection(path.appendInternal(childName), LockMode.NO_LOCK); + for(final XmldbURI subCol : subColls) { + try(final Collection child = broker.openCollection(subCol, NO_LOCK)) { // NOTE: the recursive call below to child.addDocs will take a lock //A collection may have been removed in the meantime, so check first if(child != null) { child.allDocs(broker, docs, recursive, lockMap); @@ -427,35 +455,26 @@ public MutableDocumentSet allDocs(final DBBroker broker, final MutableDocumentSe @Override public DocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, final boolean recursive, - final LockedDocumentMap lockMap, LockMode lockType) throws LockException, PermissionDeniedException { - + final LockedDocumentMap lockMap, final LockMode lockType) throws LockException, PermissionDeniedException { XmldbURI uris[] = null; - if(getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - getLock().acquire(LockMode.READ_LOCK); - try { + + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if (getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { //Add all documents in this collection to the returned set getDocuments(broker, docs, lockMap, lockType); //Get a list of sub-collection URIs. We will process them //after unlocking this collection. //otherwise we may deadlock ourselves - final List subColls = subCollections.keys(); - if (subColls != null) { - uris = new XmldbURI[subColls.size()]; - for(int i = 0; i < subColls.size(); i++) { - uris[i] = path.appendInternal(subColls.get(i)); - } - } - } finally { - getLock().release(LockMode.READ_LOCK); + uris = subCollections.stream() + .map(path::appendInternal) + .toArray(XmldbURI[]::new); } } if(recursive && uris != null) { //Process the child collections - for (XmldbURI uri : uris) { - //TODO : resolve URI ! - try { - final Collection child = broker.openCollection(uri, LockMode.NO_LOCK); + for (final XmldbURI uri : uris) { + try(final Collection child = broker.openCollection(uri, NO_LOCK)) { // NOTE: the recursive call below to child.addDocs will take a lock // a collection may have been removed in the meantime, so check first if (child != null) { child.allDocs(broker, docs, recursive, lockMap, lockType); @@ -473,124 +492,92 @@ public DocumentSet allDocs(final DBBroker broker, final MutableDocumentSet docs, public DocumentSet getDocuments(final DBBroker broker, final MutableDocumentSet docs) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); + final Iterator documentIterator; + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } + documentIterator = stableDocumentIterator(documents); docs.addCollection(this); - addDocumentsToSet(broker, docs); - } finally { - getLock().release(LockMode.READ_LOCK); } + addDocumentsToSet(broker, documentIterator, docs); return docs; } @Override public DocumentSet getDocumentsNoLock(final DBBroker broker, final MutableDocumentSet docs) { + final Iterator documentIterator = stableDocumentIterator(documents); docs.addCollection(this); - addDocumentsToSet(broker, docs); + addDocumentsToSet(broker, documentIterator, docs); return docs; } @Override public DocumentSet getDocuments(final DBBroker broker, final MutableDocumentSet docs, - final LockedDocumentMap lockMap, LockMode lockType) throws LockException, PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); + final LockedDocumentMap lockMap, final LockMode lockType) throws LockException, PermissionDeniedException { + final Iterator documentIterator; + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } + documentIterator = stableDocumentIterator(documents); docs.addCollection(this); - addDocumentsToSet(broker, docs, lockMap, lockType); - } finally { - getLock().release(LockMode.READ_LOCK); } - return docs; - } + addDocumentsToSet(broker, documentIterator, docs, lockMap, lockType); - /** - * Gets a stable list of the document objects - * from {@link #documents} - * - * @return A stable list of the document objects - */ - private List copyOfDocs() throws LockException { - getLock().acquire(LockMode.READ_LOCK); - try { - return new ArrayList<>(documents.values()); - } finally { - getLock().release(LockMode.READ_LOCK); - } - } - - /** - * Gets a stable set of the the document object - * names from {@link #documents} - * - * @return A stable set of the document names - */ - private Set copyOfDocNames() throws LockException { - getLock().acquire(LockMode.READ_LOCK); - try { - return new TreeSet<>(documents.keySet()); - } finally { - getLock().release(LockMode.READ_LOCK); - } + return docs; } - private void addDocumentsToSet(final DBBroker broker, final MutableDocumentSet docs, final LockedDocumentMap lockMap, LockMode lockType) throws LockException { - for(final DocumentImpl doc : copyOfDocs()) { - if(doc.getPermissions().validate(broker.getCurrentSubject(), Permission.WRITE)) { - doc.getUpdateLock().acquire(lockType); + private void addDocumentsToSet(final DBBroker broker, final Iterator documentIterator, final MutableDocumentSet docs, final LockedDocumentMap lockMap, final LockMode lockType) throws LockException { + final int requiredPermission; + if(lockType == LockMode.READ_LOCK) { + requiredPermission = Permission.READ; + } else { + requiredPermission = Permission.WRITE; + } + + while(documentIterator.hasNext()) { + final DocumentImpl doc = documentIterator.next(); + if(doc.getPermissions().validate(broker.getCurrentSubject(), requiredPermission)) { + final ManagedDocumentLock documentLock; + switch(lockType) { + case WRITE_LOCK: + documentLock = lockManager.acquireDocumentWriteLock(doc.getURI()); + break; + + case READ_LOCK: + documentLock = lockManager.acquireDocumentReadLock(doc.getURI()); + break; + + case NO_LOCK: + default: + documentLock = ManagedDocumentLock.notLocked(doc.getURI()); + break; + } docs.add(doc); - lockMap.add(doc); + lockMap.add(new LockedDocument(documentLock, doc)); } } } - private void addDocumentsToSet(final DBBroker broker, final MutableDocumentSet docs) { - try { - for (final DocumentImpl doc : copyOfDocs()) { - if (doc.getPermissions().validate(broker.getCurrentSubject(), Permission.READ)) { + private void addDocumentsToSet(final DBBroker broker, final Iterator documentIterator, final MutableDocumentSet docs) { + while (documentIterator.hasNext()) { + final DocumentImpl doc = documentIterator.next(); + try(final ManagedDocumentLock lockedDoc = lockManager.acquireDocumentReadLock(doc.getURI())) { + if(doc.getPermissions().validate(broker.getCurrentSubject(), Permission.READ)) { docs.add(doc); } + } catch (final LockException e) { + LOG.error(e.getMessage(), e); } - } catch(final LockException e) { - LOG.error(e); - } - } - - @Override - public boolean allowUnload() { - if (getURI().startsWith(CollectionConfigurationManager.ROOT_COLLECTION_CONFIG_URI)) { - return false; - } - - try { - getLock().acquire(LockMode.READ_LOCK); - try { - for (final DocumentImpl doc : documents.values()) { - if (doc.isLockedForWrite()) { - return false; - } - } - return true; - } finally { - getLock().release(LockMode.READ_LOCK); - } - } catch(final LockException e) { - LOG.error(e); - return false; } } @Override - public int compareTo(final Collection other) { + @EnsureContainerLocked(mode=READ_LOCK) + public int compareTo(@EnsureLocked(mode=READ_LOCK) final Collection other) { Objects.requireNonNull(other); if(collectionId == other.getId()) { @@ -603,7 +590,7 @@ public int compareTo(final Collection other) { } @Override - public boolean equals(final Object obj) { + @EnsureContainerLocked(mode=READ_LOCK) public boolean equals(@Nullable @EnsureLocked(mode=READ_LOCK) final Object obj) { if(obj == null || !(obj instanceof Collection)) { return false; } @@ -613,13 +600,8 @@ public boolean equals(final Object obj) { @Override public int getMemorySize() { - try { - getLock().acquire(LockMode.READ_LOCK); - try { - return SHALLOW_SIZE + documents.size() * DOCUMENT_SIZE; - } finally { - getLock().release(LockMode.READ_LOCK); - } + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + return SHALLOW_SIZE + (documents.size() * DOCUMENT_SIZE); } catch(final LockException e) { LOG.error(e); return -1; @@ -627,18 +609,18 @@ public int getMemorySize() { } @Override - public int getChildCollectionCount(final DBBroker broker) throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } + public int getMemorySizeNoLock() { + return SHALLOW_SIZE + (documents.size() * DOCUMENT_SIZE); + } - try { - getLock().acquire(LockMode.READ_LOCK); - try { - return subCollections.size(); - } finally { - getLock().release(LockMode.READ_LOCK); + @Override + public int getChildCollectionCount(final DBBroker broker) throws PermissionDeniedException { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); } + + return subCollections.size(); } catch(final LockException e) { LOG.error(e.getMessage(), e); return 0; @@ -647,17 +629,12 @@ public int getChildCollectionCount(final DBBroker broker) throws PermissionDenie @Override public boolean isEmpty(final DBBroker broker) throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); - try { - return documents.isEmpty() && subCollections.isEmpty(); - } finally { - getLock().release(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); } + + return documents.isEmpty() && subCollections.isEmpty(); } catch(final LockException e) { LOG.error(e.getMessage(), e); return false; @@ -666,21 +643,25 @@ public boolean isEmpty(final DBBroker broker) throws PermissionDeniedException { @Override public DocumentImpl getDocument(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException { - try { - getLock().acquire(LockMode.READ_LOCK); - try { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + + try(final ManagedDocumentLock docLock = lockManager.acquireDocumentReadLock(getURI().append(name))) { final DocumentImpl doc = documents.get(name.getRawCollectionPath()); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + if (doc != null) { if (!doc.getPermissions().validate(broker.getCurrentSubject(), Permission.READ)) { throw new PermissionDeniedException("Permission denied to read document: " + name.toString()); } } else { - LOG.debug("Document " + name + " not found!"); + if(LOG.isDebugEnabled()) { + LOG.debug("Document " + name + " not found!"); + } } return doc; - } finally { - getLock().release(LockMode.READ_LOCK); } } catch(final LockException e) { LOG.error(e.getMessage(), e); @@ -689,24 +670,51 @@ public DocumentImpl getDocument(final DBBroker broker, final XmldbURI name) thro } @Override - public DocumentImpl getDocumentWithLock(final DBBroker broker, final XmldbURI name) throws LockException, PermissionDeniedException { - return getDocumentWithLock(broker, name, LockMode.READ_LOCK); + public LockedDocument getDocumentWithLock(final DBBroker broker, final XmldbURI name) throws LockException, PermissionDeniedException { + return getDocumentWithLock(broker, name, READ_LOCK); } @Override - public DocumentImpl getDocumentWithLock(final DBBroker broker, final XmldbURI name, final LockMode lockMode) throws LockException, PermissionDeniedException { - getLock().acquire(LockMode.READ_LOCK); - try { + public LockedDocument getDocumentWithLock(final DBBroker broker, final XmldbURI name, final LockMode lockMode) throws LockException, PermissionDeniedException { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + + // lock the document + final ManagedDocumentLock documentLock; + final Runnable unlockFn; // we unlock on error, or if there is no Collection + switch (lockMode) { + case WRITE_LOCK: + documentLock = lockManager.acquireDocumentWriteLock(getURI().append(name)); + unlockFn = documentLock::close; + break; + + case READ_LOCK: + documentLock = lockManager.acquireDocumentReadLock(getURI().append(name)); + unlockFn = documentLock::close; + break; + + case NO_LOCK: + default: + documentLock = ManagedDocumentLock.notLocked(getURI().append(name)); + unlockFn = () -> {}; + } + + final DocumentImpl doc = documents.get(name.getRawCollectionPath()); - if(doc != null) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + + if(doc == null) { + unlockFn.run(); + return null; + } else { if(!doc.getPermissions().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read document: " + name.toString()); + unlockFn.run(); + throw new PermissionDeniedException("Permission denied to read + document: " + name.toString()); } - doc.getUpdateLock().acquire(lockMode); + + return new LockedDocument(documentLock, doc); } - return doc; - } finally { - getLock().release(LockMode.READ_LOCK); } } @@ -721,33 +729,14 @@ public DocumentImpl getDocumentNoLock(final DBBroker broker, final String rawPat return doc; } - @Override - public void releaseDocument(final DocumentImpl doc) { - if(doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } - } - - @Override - public void releaseDocument(final DocumentImpl doc, final LockMode mode) { - if(doc != null) { - doc.getUpdateLock().release(mode); - } - } - @Override public int getDocumentCount(final DBBroker broker) throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); - try { - return documents.size(); - } finally { - getLock().release(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); } + + return documents.size(); } catch(final LockException e) { LOG.warn(e.getMessage(), e); return -1; @@ -769,7 +758,7 @@ public int getId() { @Override public XmldbURI getURI() { - return path; + return path; //TODO(AR) we should have a READ_LOCK on here! but we can't as we need the URI to get the READ_LOCK... urgh! } /** @@ -788,14 +777,11 @@ public XmldbURI getParentURI() { @Override final public Permission getPermissions() { - try { - getLock().acquire(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { return permissions; } catch(final LockException e) { LOG.error(e.getMessage(), e); return permissions; - } finally { - getLock().release(LockMode.READ_LOCK); } } @@ -811,35 +797,27 @@ public CollectionMetadata getMetadata() { @Override public boolean hasDocument(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); - try { - return documents.containsKey(name.getRawCollectionPath()); - } finally { - getLock().release(LockMode.READ_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); } + + return documents.containsKey(name.getRawCollectionPath()); } catch(final LockException e) { LOG.warn(e.getMessage(), e); - //TODO : ouch ! Should we return at any price ? Xithout even logging ? -pb + //TODO : ouch ! Should we return at any price ? Without even logging ? -pb return documents.containsKey(name.getRawCollectionPath()); } } @Override public boolean hasChildCollection(final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } - getLock().acquire(LockMode.READ_LOCK); - try { return subCollections.contains(name); - } finally { - getLock().release(LockMode.READ_LOCK); } } @@ -854,10 +832,6 @@ public boolean hasChildCollectionNoLock(final DBBroker broker, final XmldbURI na @Override public Iterator iterator(final DBBroker broker) throws PermissionDeniedException, LockException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } - return getDocuments(broker, new DefaultDocumentSet()).getDocumentIterator(); } @@ -884,13 +858,12 @@ public void serialize(final VariableByteOutputStream outputStream) throws IOExce final int size; final Iterator i; - getLock().acquire(LockMode.READ_LOCK); - try { + //TODO(AR) should we READ_LOCK the Collection to stop it being modified concurrently? see NativeBroker#saveCollection line 1801 - already has WRITE_LOCK ;-) +// try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { size = subCollections.size(); - i = subCollections.stableIterator(); - } finally { - getLock().release(LockMode.READ_LOCK); - } +// i = subCollections.stableIterator(); + i = subCollections.iterator(); +// } outputStream.writeInt(size); while(i.hasNext()) { @@ -900,7 +873,12 @@ public void serialize(final VariableByteOutputStream outputStream) throws IOExce permissions.write(outputStream); outputStream.writeLong(created); } - + + @Override + public void close() { + //no-op + } + /** * Read collection contents from the stream * @@ -915,11 +893,12 @@ private void deserialize(final DBBroker broker, final VariableByteInput istream) if (collectionId < 0) { throw new IOException("Internal error reading collection: invalid collection id"); } + final int collLen = istream.readInt(); - getLock().acquire(LockMode.WRITE_LOCK); - try { - subCollections = new ObjectHashSet<>(collLen == 0 ? 19 : collLen); //TODO(AR) why is this number 19? + //TODO(AR) should we WRITE_LOCK the Collection to stop it being loaded from disk concurrently? see NativeBroker#openCollection line 1030 - already has READ_LOCK ;-) +// try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path, false)) { + subCollections = new LinkedHashSet<>(collLen == 0 ? 16 : collLen); for (int i = 0; i < collLen; i++) { subCollections.add(XmldbURI.create(istream.readUTF())); } @@ -952,23 +931,18 @@ public int getId() { return col.getId(); } }); - } finally { - getLock().release(LockMode.WRITE_LOCK); - } +// } } @Override public void removeCollection(final DBBroker broker, final XmldbURI name) throws LockException, PermissionDeniedException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to read collection: " + path); - } + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to read collection: " + path); + } - getLock().acquire(LockMode.WRITE_LOCK); - try { subCollections.remove(name); - } finally { - getLock().release(LockMode.WRITE_LOCK); } } @@ -987,130 +961,127 @@ public void removeResource(final Txn transaction, final DBBroker broker, final D } @Override - public void removeXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name) +public void removeXMLResource(final Txn transaction, final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, TriggerException, LockException, IOException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to write collection: " + path); - } - - DocumentImpl doc = null; - final BrokerPool db = broker.getBrokerPool(); db.getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_XML, name); - getLock().acquire(LockMode.WRITE_LOCK); - - try { - doc = documents.get(name.getRawCollectionPath()); - - if (doc == null) { - return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to write collection: " + path); } - - doc.getUpdateLock().acquire(LockMode.WRITE_LOCK); - - boolean useTriggers = isTriggersEnabled(); - if (CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(name)) { - // we remove a collection.xconf configuration file: tell the configuration manager to - // reload the configuration. - useTriggers = false; - final CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager(); - if (confMgr != null) { - confMgr.invalidate(getURI(), broker.getBrokerPool()); + + final XmldbURI docUri = XmldbURI.create(name.getRawCollectionPath()); + try(final ManagedDocumentLock docUpdateLock = lockManager.acquireDocumentWriteLock(docUri)) { + + final DocumentImpl doc = documents.get(docUri); + + if (doc == null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + + return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed } + + try { + boolean useTriggers = isTriggersEnabled(); + if (CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(name)) { + // we remove a collection.xconf configuration file: tell the configuration manager to + // reload the configuration. + useTriggers = false; + final CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager(); + if (confMgr != null) { + confMgr.invalidate(getURI(), broker.getBrokerPool()); + } + } + + final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, null, this, useTriggers ? getConfiguration(broker) : null); + + trigger.beforeDeleteDocument(broker, transaction, doc); + + broker.removeXMLResource(transaction, doc); + documents.remove(name.getRawCollectionPath()); + + trigger.afterDeleteDocument(broker, transaction, getURI().append(name)); + + broker.getBrokerPool().getNotificationService().notifyUpdate(doc, UpdateListener.REMOVE); + + } finally { + broker.getBrokerPool().getProcessMonitor().endJob(); + } + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); } - - DocumentTriggers trigger = new DocumentTriggers(broker, null, this, useTriggers ? getConfiguration(broker) : null); - - trigger.beforeDeleteDocument(broker, transaction, doc); - - broker.removeXMLResource(transaction, doc); - documents.remove(name.getRawCollectionPath()); - - trigger.afterDeleteDocument(broker, transaction, getURI().append(name)); - - broker.getBrokerPool().getNotificationService().notifyUpdate(doc, UpdateListener.REMOVE); - } finally { - broker.getBrokerPool().getProcessMonitor().endJob(); - if(doc != null) { - doc.getUpdateLock().release(LockMode.WRITE_LOCK); - } - getLock().release(LockMode.WRITE_LOCK); } } @Override public void removeBinaryResource(final Txn transaction, final DBBroker broker, final XmldbURI name) throws PermissionDeniedException, LockException, TriggerException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to write collection: " + path); - } - - try { - getLock().acquire(LockMode.READ_LOCK); - final DocumentImpl doc = getDocument(broker, name); - - if(doc.isLockedForWrite()) { - throw new PermissionDeniedException("Document " + doc.getFileURI() + " is locked for write"); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to write collection: " + path); + } + + try(final ManagedDocumentLock docLock = lockManager.acquireDocumentWriteLock(path.append(name))) { + final DocumentImpl doc = getDocument(broker, name); + removeBinaryResource(transaction, broker, doc); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); } - - removeBinaryResource(transaction, broker, doc); - } finally { - getLock().release(LockMode.READ_LOCK); } } @Override public void removeBinaryResource(final Txn transaction, final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException, TriggerException { - if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to write collection: " + path); - } - + if(doc == null) { return; //TODO should throw an exception!!! Otherwise we dont know if the document was removed } broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_BINARY, doc.getFileURI()); - getLock().acquire(LockMode.WRITE_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + if(!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to write collection: " + path); + } - try { - - if(doc.getResourceType() != DocumentImpl.BINARY_FILE) { + if (doc.getResourceType() != DocumentImpl.BINARY_FILE) { throw new PermissionDeniedException("document " + doc.getFileURI() + " is not a binary object"); } - - if(doc.isLockedForWrite()) { - throw new PermissionDeniedException("Document " + doc.getFileURI() + " is locked for write"); - } - - doc.getUpdateLock().acquire(LockMode.WRITE_LOCK); - - DocumentTriggers trigger = new DocumentTriggers(broker, null, this, isTriggersEnabled() ? getConfiguration(broker) : null); - trigger.beforeDeleteDocument(broker, transaction, doc); + try(final ManagedDocumentLock docUpdateLock = lockManager.acquireDocumentWriteLock(doc.getURI())) { + try { + final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, null, this, isTriggersEnabled() ? getConfiguration(broker) : null); + + trigger.beforeDeleteDocument(broker, transaction, doc); + + final IndexController indexController = broker.getIndexController(); + final StreamListener listener = indexController.getStreamListener(doc, StreamListener.ReindexMode.REMOVE_BINARY); + try { + indexController.startIndexDocument(transaction, listener); + + try { + broker.removeBinaryResource(transaction, (BinaryDocument) doc); + } catch (final IOException ex) { + throw new PermissionDeniedException("Cannot delete file: " + doc.getURI().toString() + ": " + ex.getMessage(), ex); + } + documents.remove(doc.getFileURI().getRawCollectionPath()); + } finally { + indexController.endIndexDocument(transaction, listener); + } - final IndexController indexController = broker.getIndexController(); - final StreamListener listener = indexController.getStreamListener(doc, StreamListener.ReindexMode.REMOVE_BINARY); - try { - indexController.startIndexDocument(transaction, listener); + trigger.afterDeleteDocument(broker, transaction, doc.getURI()); - try { - broker.removeBinaryResource(transaction, (BinaryDocument) doc); - } catch (final IOException ex) { - throw new PermissionDeniedException("Cannot delete file: " + doc.getURI().toString() + ": " + ex.getMessage(), ex); + } finally { + broker.getBrokerPool().getProcessMonitor().endJob(); } - documents.remove(doc.getFileURI().getRawCollectionPath()); - } finally { - indexController.endIndexDocument(transaction, listener); - } - trigger.afterDeleteDocument(broker, transaction, doc.getURI()); - - } finally { - broker.getBrokerPool().getProcessMonitor().endJob(); - doc.getUpdateLock().release(LockMode.WRITE_LOCK); - getLock().release(LockMode.WRITE_LOCK); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); + } } } @@ -1236,7 +1207,7 @@ private void storeXMLInternal(final Txn transaction, final DBBroker broker, fina } //Sanity check - if(!document.getUpdateLock().isLockedForWrite()) { + if(!lockManager.isDocumentLockedForWrite(document.getURI())) { LOG.warn("document is not locked for write !"); } @@ -1249,7 +1220,7 @@ private void storeXMLInternal(final Txn transaction, final DBBroker broker, fina LOG.debug("document stored."); } finally { //This lock has been acquired in validateXMLResourceInternal() - document.getUpdateLock().release(LockMode.WRITE_LOCK); + info.getDocumentLock().close(); broker.getBrokerPool().getProcessMonitor().endJob(); } broker.deleteObservers(); @@ -1340,7 +1311,7 @@ private InputSource closeShieldInputSource(final InputSource source) { return protectedInputSource; } - + private static class CloseShieldReader extends Reader { private final Reader reader; public CloseShieldReader(final Reader reader) { @@ -1391,105 +1362,96 @@ private IndexInfo validateXMLResourceInternal(final Txn transaction, final DBBro if (db.isReadOnly()) { throw new IOException("Database is read-only"); } - + + ManagedDocumentLock documentWriteLock = null; DocumentImpl oldDoc = null; - boolean oldDocLocked = false; db.getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, name); - getLock().acquire(LockMode.WRITE_LOCK); try { - DocumentImpl document = new DocumentImpl((BrokerPool) db, this, name); - oldDoc = documents.get(name.getRawCollectionPath()); - checkPermissionsForAddDocument(broker, oldDoc); - checkCollectionConflict(name); - manageDocumentInformation(oldDoc, document); - final Indexer indexer = new Indexer(broker, transaction); - - final IndexInfo info = new IndexInfo(indexer, config); - info.setCreating(oldDoc == null); - info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null); - indexer.setDocument(document, config); - addObserversToIndexer(broker, indexer); - indexer.setValidating(true); - - final DocumentTriggers trigger = new DocumentTriggers(broker, indexer, this, isTriggersEnabled() ? config : null); - trigger.setValidating(true); - - info.setTriggers(trigger); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { - if(oldDoc == null) { - trigger.beforeCreateDocument(broker, transaction, getURI().append(name)); - } else { - trigger.beforeUpdateDocument(broker, transaction, oldDoc); - } + // acquire the WRITE_LOCK on the Document, this lock is released in storeXMLInternal via IndexInfo + documentWriteLock = lockManager.acquireDocumentWriteLock(getURI().append(name)); - if (LOG.isDebugEnabled()) { - LOG.debug("Scanning document " + getURI().append(name)); - } - - validator.accept(info); - // new document is valid: remove old document - if (oldDoc != null) { - if (LOG.isDebugEnabled()) { - LOG.debug("removing old document " + oldDoc.getFileURI()); - } - updateModificationTime(document); - oldDoc.getUpdateLock().acquire(LockMode.WRITE_LOCK); - oldDocLocked = true; + DocumentImpl document = new DocumentImpl((BrokerPool) db, this, name); + oldDoc = documents.get(name.getRawCollectionPath()); + checkPermissionsForAddDocument(broker, oldDoc); + checkCollectionConflict(name); + manageDocumentInformation(oldDoc, document); + final Indexer indexer = new Indexer(broker, transaction); - /** - * Matching {@link StreamListener#endReplaceDocument(Txn)} call is in - * {@link #storeXMLInternal(Txn, DBBroker, IndexInfo, Consumer2E)} - */ - final StreamListener listener = broker.getIndexController().getStreamListener(document, StreamListener.ReindexMode.REPLACE_DOCUMENT); - listener.startReplaceDocument(transaction); - - if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) { - //TODO : use a more elaborated method ? No triggers... - broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc); - documents.remove(oldDoc.getFileURI().getRawCollectionPath()); - //This lock is released in storeXMLInternal() - //TODO : check that we go until there to ensure the lock is released -// if (transaction != null) -// transaction.acquireLock(document.getUpdateLock(), LockMode.WRITE_LOCK); -// else - document.getUpdateLock().acquire(LockMode.WRITE_LOCK); - - document.setDocId(broker.getNextResourceId(transaction, this)); - addDocument(transaction, broker, document); + final IndexInfo info = new IndexInfo(indexer, config, documentWriteLock); + info.setCreating(oldDoc == null); + info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null); + indexer.setDocument(document, config); + addObserversToIndexer(broker, indexer); + indexer.setValidating(true); + + final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, indexer, this, isTriggersEnabled() ? config : null); + trigger.setValidating(true); + + info.setTriggers(trigger); + + if (oldDoc == null) { + trigger.beforeCreateDocument(broker, transaction, getURI().append(name)); } else { - //TODO : use a more elaborated method ? No triggers... - broker.removeXMLResource(transaction, oldDoc, false); - oldDoc.copyOf(document, true); - indexer.setDocumentObject(oldDoc); - //old has become new at this point - document = oldDoc; - oldDocLocked = false; + trigger.beforeUpdateDocument(broker, transaction, oldDoc); } + if (LOG.isDebugEnabled()) { - LOG.debug("removed old document " + oldDoc.getFileURI()); + LOG.debug("Scanning document " + getURI().append(name)); } - } else { - //This lock is released in storeXMLInternal() - //TODO : check that we go until there to ensure the lock is released -// if (transaction != null) -// transaction.acquireLock(document.getUpdateLock(), LockMode.WRITE_LOCK); -// else - document.getUpdateLock().acquire(LockMode.WRITE_LOCK); - - document.setDocId(broker.getNextResourceId(transaction, this)); - addDocument(transaction, broker, document); - } - - trigger.setValidating(false); - return info; - } finally { - if (oldDoc != null && oldDocLocked) { - oldDoc.getUpdateLock().release(LockMode.WRITE_LOCK); + validator.accept(info); + // new document is valid: remove old document + if (oldDoc != null) { + if (LOG.isDebugEnabled()) { + LOG.debug("removing old document " + oldDoc.getFileURI()); + } + updateModificationTime(document); + + /** + * Matching {@link StreamListener#endReplaceDocument(Txn)} call is in + * {@link #storeXMLInternal(Txn, DBBroker, IndexInfo, Consumer2E)} + */ + final StreamListener listener = broker.getIndexController().getStreamListener(document, StreamListener.ReindexMode.REPLACE_DOCUMENT); + listener.startReplaceDocument(transaction); + + if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) { + //TODO : use a more elaborated method ? No triggers... + broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc); + documents.remove(oldDoc.getFileURI().getRawCollectionPath()); + + document.setDocId(broker.getNextResourceId(transaction)); + addDocument(transaction, broker, document); + } else { + //TODO : use a more elaborated method ? No triggers... + broker.removeXMLResource(transaction, oldDoc, false); + oldDoc.copyOf(broker, document, oldDoc); + indexer.setDocumentObject(oldDoc); + //old has become new at this point + document = oldDoc; + } + + if (LOG.isDebugEnabled()) { + LOG.debug("removed old document " + oldDoc.getFileURI()); + } + } else { + document.setDocId(broker.getNextResourceId(transaction)); + addDocument(transaction, broker, document); + } + + trigger.setValidating(false); + + return info; } - getLock().release(LockMode.WRITE_LOCK); - + } catch(final EXistException | PermissionDeniedException | SAXException | LockException | IOException e) { + // if there is an exception and we hold the document WRITE_LOCK we must release it + if(documentWriteLock != null) { + documentWriteLock.close(); + } + throw e; + } finally { db.getProcessMonitor().endJob(); } } @@ -1645,21 +1607,33 @@ public BinaryDocument validateBinaryResource(final Txn transaction, final DBBrok @Override public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final BinaryDocument blob, final InputStream is, final String mimeType, final long size, final Date created, final Date modified) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { + return addBinaryResource(transaction, broker, blob, is, mimeType, size, created, modified, DBBroker.PreserveType.DEFAULT); + } + + @Override + public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker broker, final BinaryDocument blob, final InputStream is, final String mimeType, final long size, final Date created, final Date modified, final DBBroker.PreserveType preserve) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException { final Database db = broker.getBrokerPool(); if (db.isReadOnly()) { throw new IOException("Database is read-only"); } + final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, null, this, isTriggersEnabled() ? getConfiguration(broker) : null); final XmldbURI docUri = blob.getFileURI(); - //TODO : move later, i.e. after the collection lock is acquired ? - final DocumentImpl oldDoc = getDocument(broker, docUri); - final DocumentTriggers trigger = new DocumentTriggers(broker, null, this, isTriggersEnabled() ? getConfiguration(broker) : null); - getLock().acquire(LockMode.WRITE_LOCK); - try { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path); + final ManagedDocumentLock docLock = lockManager.acquireDocumentWriteLock(blob.getURI())) { + //TODO : move later, i.e. after the collection lock is acquired ? + final DocumentImpl oldDoc = getDocument(broker, docUri); + db.getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_BINARY, docUri); checkPermissionsForAddDocument(broker, oldDoc); checkCollectionConflict(docUri); - manageDocumentInformation(oldDoc, blob); + //manageDocumentInformation(oldDoc, blob); + if (!broker.preserveOnCopy(preserve)) { + blob.copyOf(broker, blob, oldDoc); + } + if (blob.getMetadata() == null) { + blob.setMetadata(new DocumentMetadata()); + } final DocumentMetadata metadata = blob.getMetadata(); metadata.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType); if (created != null) { @@ -1669,7 +1643,7 @@ public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker br metadata.setLastModified(modified.getTime()); } blob.setContentLength(size); - + if (oldDoc == null) { trigger.beforeCreateDocument(broker, transaction, blob.getURI()); } else { @@ -1678,7 +1652,9 @@ public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker br if (oldDoc != null) { LOG.debug("removing old document " + oldDoc.getFileURI()); - updateModificationTime(blob); + if (!broker.preserveOnCopy(preserve)) { + updateModificationTime(blob); + } broker.removeResource(transaction, oldDoc); } @@ -1694,55 +1670,31 @@ public BinaryDocument addBinaryResource(final Txn transaction, final DBBroker br indexController.endIndexDocument(transaction, listener); } - blob.getUpdateLock().acquire(LockMode.READ_LOCK); - } finally { - broker.getBrokerPool().getProcessMonitor().endJob(); - getLock().release(LockMode.WRITE_LOCK); - } - try { + if (oldDoc == null) { trigger.afterCreateDocument(broker, transaction, blob); } else { trigger.afterUpdateDocument(broker, transaction, blob); } - } finally { - blob.getUpdateLock().release(LockMode.READ_LOCK); - } - return blob; - } - @Override - public void setId(final int id) { - this.collectionId = id; - } + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collectionLock.close(); - @Override - public void setPermissions(final int mode) throws LockException, PermissionDeniedException { - try { - getLock().acquire(LockMode.WRITE_LOCK); - permissions.setMode(mode); + return blob; } finally { - getLock().release(LockMode.WRITE_LOCK); + broker.getBrokerPool().getProcessMonitor().endJob(); } } @Override - public void setPermissions(final String mode) throws SyntaxException, LockException, PermissionDeniedException { - try { - getLock().acquire(LockMode.WRITE_LOCK); - permissions.setMode(mode); - } finally { - getLock().release(LockMode.WRITE_LOCK); - } + public void setId(final int id) { + this.collectionId = id; } @Override - public void setPermissions(final Permission permissions) throws LockException { - try { - getLock().acquire(LockMode.WRITE_LOCK); - this.permissions = permissions; - } finally { - getLock().release(LockMode.WRITE_LOCK); + public void setPermissions(final DBBroker broker, final int mode) throws LockException, PermissionDeniedException { + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { + PermissionFactory.chmod(broker, this, Optional.of(mode), Optional.empty()); } } @@ -1778,15 +1730,10 @@ public long getCreationTime() { @Override public void setTriggersEnabled(final boolean enabled) { - try { - getLock().acquire(LockMode.WRITE_LOCK); + try(final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) { this.triggersEnabled = enabled; } catch(final LockException e) { - LOG.warn(e.getMessage(), e); - //Ouch ! -pb - this.triggersEnabled = enabled; - } finally { - getLock().release(LockMode.WRITE_LOCK); + LOG.error(e.getMessage(), e); } } @@ -1881,51 +1828,6 @@ public Observable getObservable() { return observable; } - @Override - public long getKey() { - return collectionId; - } - - @Override - public int getReferenceCount() { - return refCount; - } - - @Override - public int incReferenceCount() { - return ++refCount; - } - - @Override - public int decReferenceCount() { - return refCount > 0 ? --refCount : 0; - } - - @Override - public void setReferenceCount(final int count) { - refCount = count; - } - - @Override - public void setTimestamp(final int timestamp) { - this.timestamp = timestamp; - } - - @Override - public int getTimestamp() { - return timestamp; - } - - @Override - public boolean sync(final boolean syncJournal) { - return false; - } - - @Override - public boolean isDirty() { - return false; - } - @Override public String toString() { final StringBuilder buf = new StringBuilder(); @@ -1933,9 +1835,14 @@ public String toString() { buf.append("["); try { - for (final Iterator i = copyOfDocNames().iterator(); i.hasNext(); ) { - buf.append(i.next()); - if (i.hasNext()) { + final Iterator documentNameIterator; + try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) { + documentNameIterator = documents.keySet().iterator(); + } + + while (documentNameIterator.hasNext()) { + buf.append(documentNameIterator.next()); + if (documentNameIterator.hasNext()) { buf.append(", "); } } diff --git a/src/org/exist/collections/triggers/AbstractTriggerProxy.java b/src/org/exist/collections/triggers/AbstractTriggerProxy.java index 3de85c8a222..57a0302ded6 100644 --- a/src/org/exist/collections/triggers/AbstractTriggerProxy.java +++ b/src/org/exist/collections/triggers/AbstractTriggerProxy.java @@ -24,6 +24,7 @@ import org.exist.collections.Collection; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; /** * @@ -58,10 +59,12 @@ protected Map> getParameters() { } @Override - public T newInstance(final DBBroker broker, final Collection collection) throws TriggerException { + public T newInstance(final DBBroker broker, final Txn transaction, final Collection collection) throws TriggerException { try { final T trigger = getClazz().newInstance(); - trigger.configure(broker, collection, getParameters()); + + trigger.configure(broker, transaction, collection, getParameters()); + return trigger; } catch (final InstantiationException | IllegalAccessException ie) { throw new TriggerException("Unable to instantiate Trigger '" + getClazz().getName() + "': " + ie.getMessage(), ie); diff --git a/src/org/exist/collections/triggers/AbstractTriggersVisitor.java b/src/org/exist/collections/triggers/AbstractTriggersVisitor.java index 2b9d16f684f..4323bf3b1ed 100644 --- a/src/org/exist/collections/triggers/AbstractTriggersVisitor.java +++ b/src/org/exist/collections/triggers/AbstractTriggersVisitor.java @@ -24,6 +24,7 @@ import org.exist.collections.Collection; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; /** * @@ -38,7 +39,7 @@ public AbstractTriggersVisitor(List triggers) { } @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn Transaction, Collection parent, Map> parameters) throws TriggerException { } public List getTriggers() throws TriggerException { diff --git a/src/org/exist/collections/triggers/CSVExtractingTrigger.java b/src/org/exist/collections/triggers/CSVExtractingTrigger.java index 649838f4dfb..9b0a2f32cd9 100644 --- a/src/org/exist/collections/triggers/CSVExtractingTrigger.java +++ b/src/org/exist/collections/triggers/CSVExtractingTrigger.java @@ -21,13 +21,7 @@ */ package org.exist.collections.triggers; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Stack; +import java.util.*; import javax.xml.XMLConstants; import javax.xml.namespace.QName; import org.exist.collections.Collection; @@ -91,7 +85,7 @@ public class CSVExtractingTrigger extends FilteringTrigger { private String separator; //key is the xpath to extract for, and value is the extractions to make from the value at that path - private Map extractions = new HashMap(); + private Map extractions = new HashMap<>(); //the current node path of the SAX stream private NodePath currentNodePath = new NodePath(); @@ -100,9 +94,10 @@ public class CSVExtractingTrigger extends FilteringTrigger { private StringBuilder charactersBuf = new StringBuilder(); //buffer for character data, which will then be parsed to extract csv values + @SuppressWarnings("unchecked") @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { - super.configure(broker, parent, parameters); + public void configure(final DBBroker broker, final Txn transaction, final Collection parent, final Map> parameters) throws TriggerException { + super.configure(broker, transaction, parent, parameters); //get the separator final List separators = (List)parameters.get("separator"); @@ -122,9 +117,9 @@ public void configure(DBBroker broker, Collection parent, Map> p //split out the path and preficate (if present) from the xpath String pathExpr; String attrPredicate = null; - if(xpath.indexOf("[") > -1) { + if(xpath.contains("[")) { pathExpr = xpath.substring(0, xpath.indexOf("[")); - if(xpath.indexOf("[@") > -1) { + if(xpath.contains("[@")) { attrPredicate = xpath.substring(xpath.indexOf("[@")+2, xpath.indexOf("]")); } } else { @@ -154,10 +149,10 @@ public void configure(DBBroker broker, Collection parent, Map> p } @Override - public void startElement(String namespaceURI, String localName, String qname, Attributes attributes) throws SAXException { + public void startElement(final String namespaceURI, final String localName, final String qname, final Attributes attributes) throws SAXException { //skips nested elements or already extracted nodes (i.e. during update events) //TODO needs through testing during update phase - if(capture == true) { + if(capture) { capture = false; charactersBuf.delete(0, charactersBuf.length()); } @@ -186,7 +181,7 @@ public void startElement(String namespaceURI, String localName, String qname, At } @Override - public void characters(char[] ch, int start, int length) throws SAXException { + public void characters(final char[] ch, final int start, final int length) throws SAXException { if(capture){ charactersBuf.append(ch, start, length); } else { @@ -195,7 +190,7 @@ public void characters(char[] ch, int start, int length) throws SAXException { } @Override - public void endElement(String namespaceURI, String localName, String qname) throws SAXException { + public void endElement(final String namespaceURI, final String localName, final String qname) throws SAXException { if(capture) { extractCSVValuesToElements(); @@ -245,9 +240,9 @@ private String getEscapedSeparatorForRegExp() { } private static class NodePath { - private Stack pathSegments = new Stack(); + private final Deque pathSegments = new ArrayDeque<>(); - public void add(String namespaceUri, String localName) { + public void add(final String namespaceUri, final String localName) { pathSegments.push(new QName(namespaceUri, localName)); } @@ -263,11 +258,13 @@ public int length() { public String toLocalPath() { final StringBuilder localPath = new StringBuilder(); localPath.append("/"); - for(int i = 0; i < pathSegments.size(); i++) { - localPath.append(pathSegments.get(i).getLocalPart()); + int i = 0; + for (final Iterator it = pathSegments.descendingIterator(); it.hasNext(); ) { + localPath.append(it.next()); if(i + 1 < pathSegments.size()) { localPath.append("/"); } + i++; } return localPath.toString(); @@ -277,7 +274,7 @@ public String toLocalPath() { /*** configuration data classes ***/ private static class Extraction { - private List extractEntries = new ArrayList(); + private final List extractEntries = new ArrayList<>(); private String matchAttrName; private String matchAttrValue; @@ -286,7 +283,7 @@ public List getExtractEntries() { return extractEntries; } - public void setMatchAttribute(String attrName, String attrValue) { + public void setMatchAttribute(final String attrName, final String attrValue) { this.matchAttrName = attrName.trim(); this.matchAttrValue = attrValue.replaceAll("\"", "").trim(); } @@ -295,7 +292,7 @@ public boolean mustMatchAttribute() { return(this.matchAttrName != null && this.matchAttrValue != null); } - public boolean matchesAttribute(String attrName, String attrValue) { + public boolean matchesAttribute(final String attrName, final String attrValue) { //if there is no matching then return true if(!mustMatchAttribute()) { @@ -311,7 +308,7 @@ private static class ExtractEntry implements Comparable { private final int index; private final String elementName; - public ExtractEntry(int index, String elementName) { + public ExtractEntry(final int index, final String elementName) { this.index = index; this.elementName = elementName; } @@ -325,7 +322,7 @@ public String getElementName() { } @Override - public int compareTo(ExtractEntry other) { + public int compareTo(final ExtractEntry other) { if(other == null) { return -1; } else { @@ -342,107 +339,119 @@ public int getLength() { } @Override - public String getURI(int index) { + public String getURI(final int index) { return null; } @Override - public String getLocalName(int index) { + public String getLocalName(final int index) { return null; } @Override - public String getQName(int index) { + public String getQName(final int index) { return null; } @Override - public String getType(int index) { + public String getType(final int index) { return null; } @Override - public String getValue(int index) { + public String getValue(final int index) { return null; } @Override - public int getIndex(String uri, String localName) { + public int getIndex(final String uri, final String localName) { return -1; } @Override - public int getIndex(String qName) { + public int getIndex(final String qName) { return -1; } @Override - public String getType(String uri, String localName) { + public String getType(final String uri, final String localName) { return null; } @Override - public String getType(String qName) { + public String getType(final String qName) { return null; } @Override - public String getValue(String uri, String localName) { + public String getValue(final String uri, final String localName) { return null; } @Override - public String getValue(String qName) { + public String getValue(final String qName) { return null; } } @Override - public void beforeCreateDocument(DBBroker broker, Txn txn, XmldbURI uri) throws TriggerException { + public void beforeCreateDocument(final DBBroker broker, final Txn txn, final XmldbURI uri) { + //no-op } @Override - public void afterCreateDocument(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void afterCreateDocument(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } @Override - public void beforeUpdateDocument(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void beforeUpdateDocument(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } @Override - public void afterUpdateDocument(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void afterUpdateDocument(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } @Override - public void beforeCopyDocument(DBBroker broker, Txn txn, DocumentImpl document, XmldbURI newUri) throws TriggerException { + public void beforeCopyDocument(final DBBroker broker, final Txn txn, final DocumentImpl document, final XmldbURI newUri) { + //no-op } @Override - public void afterCopyDocument(DBBroker broker, Txn txn, DocumentImpl document, XmldbURI newUri) throws TriggerException { + public void afterCopyDocument(final DBBroker broker, final Txn txn, final DocumentImpl document, final XmldbURI newUri) { + //no-op } @Override - public void beforeMoveDocument(DBBroker broker, Txn txn, DocumentImpl document, XmldbURI newUri) throws TriggerException { + public void beforeMoveDocument(final DBBroker broker, final Txn txn, final DocumentImpl document, final XmldbURI newUri) { + //no-op } @Override - public void afterMoveDocument(DBBroker broker, Txn txn, DocumentImpl document, XmldbURI newUri) throws TriggerException { + public void afterMoveDocument(final DBBroker broker, final Txn txn, final DocumentImpl document, final XmldbURI newUri) { + //no-op } @Override - public void beforeDeleteDocument(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void beforeDeleteDocument(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } @Override - public void afterDeleteDocument(DBBroker broker, Txn txn, XmldbURI uri) throws TriggerException { + public void afterDeleteDocument(final DBBroker broker, final Txn txn, final XmldbURI uri) { + //no-op } @Override - public void beforeUpdateDocumentMetadata(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void beforeUpdateDocumentMetadata(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } @Override - public void afterUpdateDocumentMetadata(DBBroker broker, Txn txn, DocumentImpl document) throws TriggerException { + public void afterUpdateDocumentMetadata(final DBBroker broker, final Txn txn, final DocumentImpl document) { + //no-op } -} \ No newline at end of file +} diff --git a/src/org/exist/collections/triggers/CollectionTriggers.java b/src/org/exist/collections/triggers/CollectionTriggers.java index 12dedb6ad15..d7ad6a2d7ab 100644 --- a/src/org/exist/collections/triggers/CollectionTriggers.java +++ b/src/org/exist/collections/triggers/CollectionTriggers.java @@ -37,15 +37,15 @@ public class CollectionTriggers implements CollectionTrigger { private final List triggers; - public CollectionTriggers(DBBroker broker) throws TriggerException { - this(broker, null, null); + public CollectionTriggers(DBBroker broker, Txn transaction) throws TriggerException { + this(broker, transaction, null, null); } - public CollectionTriggers(DBBroker broker, Collection collection) throws TriggerException { - this(broker, collection, collection.getConfiguration(broker)); + public CollectionTriggers(DBBroker broker, Txn transaction, Collection collection) throws TriggerException { + this(broker, transaction, collection, collection.getConfiguration(broker)); } - public CollectionTriggers(DBBroker broker, Collection collection, CollectionConfiguration config) throws TriggerException { + public CollectionTriggers(DBBroker broker, Txn transaction, Collection collection, CollectionConfiguration config) throws TriggerException { List> colTriggers = null; if (config != null) { @@ -58,7 +58,7 @@ public CollectionTriggers(DBBroker broker, Collection collection, CollectionConf for (TriggerProxy colTrigger : masterTriggers) { - CollectionTrigger instance = colTrigger.newInstance(broker, collection); + CollectionTrigger instance = colTrigger.newInstance(broker, transaction, collection); register(instance); } @@ -66,7 +66,7 @@ public CollectionTriggers(DBBroker broker, Collection collection, CollectionConf if (colTriggers != null) { for (TriggerProxy colTrigger : colTriggers) { - CollectionTrigger instance = colTrigger.newInstance(broker, collection); + CollectionTrigger instance = colTrigger.newInstance(broker, transaction, collection); register(instance); } @@ -78,7 +78,7 @@ private void register(CollectionTrigger trigger) { } @Override - public void configure(DBBroker broker, Collection col, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn transaction, Collection col, Map> parameters) throws TriggerException { } @Override diff --git a/src/org/exist/collections/triggers/DeferrableFilteringTrigger.java b/src/org/exist/collections/triggers/DeferrableFilteringTrigger.java index 4d1c17ab79b..13d67f0161c 100644 --- a/src/org/exist/collections/triggers/DeferrableFilteringTrigger.java +++ b/src/org/exist/collections/triggers/DeferrableFilteringTrigger.java @@ -64,7 +64,7 @@ public boolean isDeferring() { * @param defer Should we defer the processing of events? */ public void defer(final boolean defer) throws SAXException { - if(this.defer && defer == false) { + if(this.defer && !defer) { applyDeferredEvents(); } this.defer = defer; diff --git a/src/org/exist/collections/triggers/DocumentTriggers.java b/src/org/exist/collections/triggers/DocumentTriggers.java index cf8d7279e1a..d07cee4785c 100644 --- a/src/org/exist/collections/triggers/DocumentTriggers.java +++ b/src/org/exist/collections/triggers/DocumentTriggers.java @@ -54,15 +54,15 @@ public class DocumentTriggers implements DocumentTrigger, ContentHandler, Lexica private final List triggers; - public DocumentTriggers(DBBroker broker) throws TriggerException { - this(broker, null, null, null); + public DocumentTriggers(DBBroker broker, Txn transaction) throws TriggerException { + this(broker, transaction, null, null, null); } - public DocumentTriggers(DBBroker broker, Collection collection) throws TriggerException { - this(broker, null, collection, collection.isTriggersEnabled() ? collection.getConfiguration(broker) : null); + public DocumentTriggers(DBBroker broker, Txn transaction, Collection collection) throws TriggerException { + this(broker, transaction, null, collection, collection.isTriggersEnabled() ? collection.getConfiguration(broker) : null); } - public DocumentTriggers(DBBroker broker, Indexer indexer, Collection collection, CollectionConfiguration config) throws TriggerException { + public DocumentTriggers(DBBroker broker, Txn transaction, Indexer indexer, Collection collection, CollectionConfiguration config) throws TriggerException { List> docTriggers = null; if (config != null) { @@ -75,7 +75,7 @@ public DocumentTriggers(DBBroker broker, Indexer indexer, Collection collection, for (TriggerProxy docTrigger : masterTriggers) { - DocumentTrigger instance = docTrigger.newInstance(broker, collection); + DocumentTrigger instance = docTrigger.newInstance(broker, transaction, collection); register(instance); } @@ -83,7 +83,7 @@ public DocumentTriggers(DBBroker broker, Indexer indexer, Collection collection, if (docTriggers != null) { for (TriggerProxy docTrigger : docTriggers) { - DocumentTrigger instance = docTrigger.newInstance(broker, collection); + DocumentTrigger instance = docTrigger.newInstance(broker, transaction, collection); register(instance); } @@ -129,7 +129,7 @@ private void register(DocumentTrigger trigger) { } @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn txn, Collection parent, Map> parameters) throws TriggerException { } @Override diff --git a/src/org/exist/collections/triggers/Dumper.java b/src/org/exist/collections/triggers/Dumper.java index 5f9912f4cac..b68c52d7031 100644 --- a/src/org/exist/collections/triggers/Dumper.java +++ b/src/org/exist/collections/triggers/Dumper.java @@ -44,8 +44,8 @@ public class Dumper extends FilteringTrigger implements DocumentTrigger { * @see org.exist.collections.FilteringTrigger#configure(java.util.Map) */ @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { - super.configure(broker, parent, parameters); + public void configure(DBBroker broker, Txn transaction, Collection parent, Map> parameters) throws TriggerException { + super.configure(broker, transaction, parent, parameters); System.out.println("parameters:"); for(final Entry> entry : parameters.entrySet()) { diff --git a/src/org/exist/collections/triggers/HistoryTrigger.java b/src/org/exist/collections/triggers/HistoryTrigger.java index 6806e35a6d4..54be051752e 100644 --- a/src/org/exist/collections/triggers/HistoryTrigger.java +++ b/src/org/exist/collections/triggers/HistoryTrigger.java @@ -74,10 +74,10 @@ public class HistoryTrigger extends FilteringTrigger implements DocumentTrigger private XmldbURI rootPath = DEFAULT_ROOT_PATH; @Override - public void configure(final DBBroker broker, final Collection parent, + public void configure(final DBBroker broker, final Txn transaction, final Collection parent, final Map> parameters) throws TriggerException { - super.configure(broker, parent, parameters); + super.configure(broker, transaction, parent, parameters); if(parameters.containsKey(PARAM_ROOT_NAME)) { try { diff --git a/src/org/exist/collections/triggers/SAXTrigger.java b/src/org/exist/collections/triggers/SAXTrigger.java index ef31be69666..f4729149d65 100644 --- a/src/org/exist/collections/triggers/SAXTrigger.java +++ b/src/org/exist/collections/triggers/SAXTrigger.java @@ -25,6 +25,7 @@ import org.exist.Indexer; import org.exist.collections.Collection; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.ErrorHandler; @@ -60,7 +61,7 @@ protected Collection getCollection() { * collection. */ @Override - public void configure(DBBroker broker, Collection collection, Map> parameters) throws TriggerException { + public void configure(DBBroker broker, Txn Transaction, Collection collection, Map> parameters) throws TriggerException { this.collection = collection; } diff --git a/src/org/exist/collections/triggers/STXTemplatesCache.java b/src/org/exist/collections/triggers/STXTemplatesCache.java index bfb9d606c06..60eebd5de7f 100644 --- a/src/org/exist/collections/triggers/STXTemplatesCache.java +++ b/src/org/exist/collections/triggers/STXTemplatesCache.java @@ -35,6 +35,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.serializers.Serializer; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -80,8 +81,7 @@ public static STXTemplatesCache getInstance() { * @return The compiled stylesheet */ public Templates getOrUpdateTemplate(final DBBroker broker, final DocumentImpl stylesheet) throws TransformerConfigurationException, SAXException, LockException { - try { - stylesheet.getUpdateLock().acquire(Lock.LockMode.READ_LOCK); + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentReadLock(stylesheet.getURI())) { final XmldbURI stylesheetUri = stylesheet.getURI(); final long lastModified = stylesheet.getMetadata().getLastModified(); @@ -104,8 +104,6 @@ public Templates getOrUpdateTemplate(final DBBroker broker, final DocumentImpl s } return cachedTemplate.templates; - } finally { - stylesheet.getUpdateLock().release(Lock.LockMode.READ_LOCK); } } diff --git a/src/org/exist/collections/triggers/STXTransformerTrigger.java b/src/org/exist/collections/triggers/STXTransformerTrigger.java index 1b8cecd4e12..f3f80d320ef 100644 --- a/src/org/exist/collections/triggers/STXTransformerTrigger.java +++ b/src/org/exist/collections/triggers/STXTransformerTrigger.java @@ -59,8 +59,8 @@ public class STXTransformerTrigger extends SAXTrigger implements DocumentTrigger private TransformerHandler handler = null; @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { - super.configure(broker, parent, parameters); + public void configure(DBBroker broker, Txn transaction, Collection parent, Map> parameters) throws TriggerException { + super.configure(broker, transaction, parent, parameters); final String stylesheet = (String)parameters.get("src").get(0); if(stylesheet == null) { throw new TriggerException("STXTransformerTrigger requires an attribute 'src'"); diff --git a/src/org/exist/collections/triggers/Trigger.java b/src/org/exist/collections/triggers/Trigger.java index a74cafa906a..a2128f16b43 100644 --- a/src/org/exist/collections/triggers/Trigger.java +++ b/src/org/exist/collections/triggers/Trigger.java @@ -27,6 +27,7 @@ import org.exist.collections.Collection; import org.exist.collections.CollectionConfigurationException; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; /** * Defines the base interface for collection triggers. Triggers are registered through the @@ -123,5 +124,5 @@ public interface Trigger { * @throws CollectionConfigurationException * if the trigger cannot be initialized. */ - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException; + public void configure(DBBroker broker, Txn transaction, Collection parent, Map> parameters) throws TriggerException; } diff --git a/src/org/exist/collections/triggers/TriggerProxy.java b/src/org/exist/collections/triggers/TriggerProxy.java index 7c5908a675b..0b9dbf3b087 100644 --- a/src/org/exist/collections/triggers/TriggerProxy.java +++ b/src/org/exist/collections/triggers/TriggerProxy.java @@ -24,6 +24,7 @@ import org.exist.collections.Collection; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; /** * @@ -35,5 +36,5 @@ public interface TriggerProxy { void setParameters(Map> parameters); - T newInstance(DBBroker broker, Collection collection) throws TriggerException; + T newInstance(DBBroker broker, Txn transaction, Collection collection) throws TriggerException; } diff --git a/src/org/exist/collections/triggers/XQueryStartupTrigger.java b/src/org/exist/collections/triggers/XQueryStartupTrigger.java index f12fbe742e2..f9ef044a6a3 100644 --- a/src/org/exist/collections/triggers/XQueryStartupTrigger.java +++ b/src/org/exist/collections/triggers/XQueryStartupTrigger.java @@ -19,11 +19,8 @@ */ package org.exist.collections.triggers; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; + import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -31,6 +28,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; +import org.exist.security.PermissionFactory; import org.exist.security.SecurityManager; import org.exist.source.Source; import org.exist.source.SourceFactory; @@ -81,7 +79,7 @@ public class XQueryStartupTrigger implements StartupTrigger { private static final String REQUIRED_MIMETYPE = "application/xquery"; @Override - public void execute(DBBroker broker, Map> params) { + public void execute(DBBroker broker, final Txn transaction, Map> params) { LOG.info("Starting Startup Trigger for stored XQueries"); @@ -106,11 +104,8 @@ private List getScriptsInStartupCollection(DBBroker broker) { List paths = new ArrayList<>(); XmldbURI uri = XmldbURI.create(AUTOSTART_COLLECTION); - Collection collection = null; - - try { - collection = broker.openCollection(uri, LockMode.READ_LOCK); + try(final Collection collection = broker.openCollection(uri, LockMode.READ_LOCK)) { if (collection == null) { LOG.debug(String.format("Collection '%s' not found.", AUTOSTART_COLLECTION)); createAutostartCollection(broker); @@ -147,16 +142,10 @@ private List getScriptsInStartupCollection(DBBroker broker) { } - LOG.debug(String.format("Found %s xquery scripts in '%s'.", paths.size(), AUTOSTART_COLLECTION)); + LOG.debug(String.format("Found %s XQuery scripts in '%s'.", paths.size(), AUTOSTART_COLLECTION)); } catch (PermissionDeniedException ex) { LOG.error(ex.getMessage()); - - } finally { - // Clean up resources - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } } return paths; @@ -183,7 +172,7 @@ private boolean isPermissionsOK(Collection collection) { * Verify that the owner of the document is DBA, the document is owned by the DBA group and that the permissions are * set 0770, and the mimetype is set application/xquery. * - * @param collection The document + * @param document The document * @return TRUE if the conditions are met, else FALSE */ private boolean isPermissionsOK(DocumentImpl document) { @@ -259,7 +248,7 @@ private void executeQuery(DBBroker broker, String path) { Source source = SourceFactory.getSource(broker, null, path, false); if (source == null) { - LOG.info(String.format("No Xquery found at '%s'", path)); + LOG.info(String.format("No XQuery found at '%s'", path)); } else { // Setup xquery service @@ -273,7 +262,7 @@ private void executeQuery(DBBroker broker, String path) { // Compile query CompiledXQuery compiledQuery = service.compile(broker, context, source); - LOG.info(String.format("Starting Xquery at '%s'", path)); + LOG.info(String.format("Starting XQuery at '%s'", path)); // Finish preparation context.prepareForExecution(); @@ -282,13 +271,13 @@ private void executeQuery(DBBroker broker, String path) { Sequence result = service.execute(broker, compiledQuery, null); // Log results - LOG.info(String.format("Result xquery: '%s'", result.getStringValue())); + LOG.info(String.format("Result XQuery: '%s'", result.getStringValue())); } } catch (Throwable t) { // Dirty, catch it all - LOG.error(String.format("An error occured during preparation/execution of the xquery script %s: %s", path, t.getMessage()), t); + LOG.error(String.format("An error occurred during preparation/execution of the XQuery script %s: %s", path, t.getMessage()), t); } finally { if (context != null) { @@ -311,13 +300,12 @@ private void createAutostartCollection(DBBroker broker) { XmldbURI newCollection = XmldbURI.create(AUTOSTART_COLLECTION, true); // Create collection - Collection created = broker.getOrCreateCollection(txn, newCollection); + final Collection created = broker.getOrCreateCollection(txn, newCollection); + + // Set ownership and mode + PermissionFactory.chown(broker, created, Optional.of(SecurityManager.SYSTEM), Optional.of(SecurityManager.DBA_GROUP)); + PermissionFactory.chmod(broker, created, Optional.of(Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM), Optional.empty()); - // Set ownership - Permission perms = created.getPermissions(); - perms.setOwner(broker.getBrokerPool().getSecurityManager().getSystemSubject()); - perms.setGroup(broker.getBrokerPool().getSecurityManager().getDBAGroup()); - perms.setMode(Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); broker.saveCollection(txn, created); broker.flush(); diff --git a/src/org/exist/collections/triggers/XQueryTrigger.java b/src/org/exist/collections/triggers/XQueryTrigger.java index 64c50b4549c..03262055838 100644 --- a/src/org/exist/collections/triggers/XQueryTrigger.java +++ b/src/org/exist/collections/triggers/XQueryTrigger.java @@ -122,7 +122,6 @@ public class XQueryTrigger extends SAXTrigger implements DocumentTrigger, Collec public final static QName beforeDeleteDocument = new QName("before-delete-document", NAMESPACE); public final static QName afterDeleteDocument = new QName("after-delete-document", NAMESPACE); -// private SAXAdapter adapter; private Set events; private Collection collection = null; private String strQuery = null; @@ -134,17 +133,12 @@ public class XQueryTrigger extends SAXTrigger implements DocumentTrigger, Collec private XQuery service; public final static String PREPARE_EXCEPTION_MESSAGE = "Error during trigger prepare"; - - - public XQueryTrigger() - { -// adapter = new SAXAdapter(); - } /** * @link org.exist.collections.Trigger#configure(org.exist.storage.DBBroker, org.exist.collections.Collection, java.util.Map) */ - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException + @Override + public void configure(DBBroker broker, Txn transaction, Collection parent, Map> parameters) throws TriggerException { this.collection = parent; diff --git a/src/org/exist/config/ConfigurationDocumentTrigger.java b/src/org/exist/config/ConfigurationDocumentTrigger.java index 57e30b74273..39977e697ce 100644 --- a/src/org/exist/config/ConfigurationDocumentTrigger.java +++ b/src/org/exist/config/ConfigurationDocumentTrigger.java @@ -21,6 +21,7 @@ import java.util.*; +import com.evolvedbinary.j8fu.tuple.Tuple2; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.EXistException; @@ -43,6 +44,8 @@ import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; +import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple; + /** * Amongst other things, this trigger defers immediate updates to Principals * (Accounts or Groups) until it has enough information to determine @@ -145,7 +148,7 @@ public void afterCreateDocument(final DBBroker broker, final Txn txn, final Docu final XmldbURI uri = document.getCollection().getURI(); if (uri.startsWith(SecurityManager.SECURITY_COLLECTION_URI)) { try { - broker.getBrokerPool().getSecurityManager().processPramatter(broker, document); + broker.getBrokerPool().getSecurityManager().processParameter(broker, document); } catch (final ConfigurationException e) { LOG.error("Configuration can't be processed [" + document.getURI() + "]", e); //TODO : raise exception ? -pb @@ -170,7 +173,7 @@ public void beforeUpdateDocument(final DBBroker broker, final Txn txn, final Doc if (uri.startsWith(SecurityManager.SECURITY_COLLECTION_URI)) { try { broker.getBrokerPool().getSecurityManager() - .processPramatterBeforeSave(broker, document); + .processParameterBeforeSave(broker, document); } catch (final ConfigurationException e) { LOG.error("Configuration can't be processed [" + document.getURI() + "]", e); //TODO : raise exception ? -pb @@ -190,7 +193,7 @@ public void afterUpdateDocument(final DBBroker broker, final Txn txn, final Docu final XmldbURI uri = document.getCollection().getURI(); if (uri.startsWith(SecurityManager.SECURITY_COLLECTION_URI)) { try { - broker.getBrokerPool().getSecurityManager().processPramatter(broker, document); + broker.getBrokerPool().getSecurityManager().processParameter(broker, document); } catch (final ConfigurationException e) { LOG.error("Configuration can't be processed [" + document.getURI() + "]", e); //TODO : raise exception ? -pb @@ -243,9 +246,9 @@ public void beforeUpdateDocumentMetadata(final DBBroker broker, final Txn txn, f public void afterUpdateDocumentMetadata(DBBroker broker, Txn txn, DocumentImpl document) { } - @Override - public void configure(DBBroker broker, Collection parent, Map> parameters) throws TriggerException { - } + @Override + public void configure(final DBBroker broker, final Txn transaction, final Collection parent, final Map> parameters) throws TriggerException { + } @Override public void startElement(final String namespaceURI, final String localName, final String qname, final Attributes attributes) throws SAXException { @@ -420,28 +423,27 @@ private String findName() { * with Accounts or Groups */ private enum PrincipalType { - ACCOUNT("account", new HashMap() { - { - put(-1, RealmImpl.UNKNOWN_ACCOUNT_ID); - put(0, RealmImpl.SYSTEM_ACCOUNT_ID); - put(1, RealmImpl.ADMIN_ACCOUNT_ID); - put(2, RealmImpl.GUEST_ACCOUNT_ID); - } - }), - GROUP("group", new HashMap() { - { - put(-1, RealmImpl.UNKNOWN_GROUP_ID); - put(1, RealmImpl.DBA_GROUP_ID); - put(2, RealmImpl.GUEST_GROUP_ID); - } - }); + ACCOUNT("account", + Tuple(-1, RealmImpl.UNKNOWN_ACCOUNT_ID), + Tuple(0, RealmImpl.SYSTEM_ACCOUNT_ID), + Tuple(1, RealmImpl.ADMIN_ACCOUNT_ID), + Tuple(2, RealmImpl.GUEST_ACCOUNT_ID) + ), + GROUP("group", + Tuple(-1, RealmImpl.UNKNOWN_GROUP_ID), + Tuple(1, RealmImpl.DBA_GROUP_ID), + Tuple(2, RealmImpl.GUEST_GROUP_ID) + ); private final String elementName; private final Map idMigration; - PrincipalType(final String elementName, final Map idMigration) { + PrincipalType(final String elementName, final Tuple2... idMigrations) { this.elementName = elementName; - this.idMigration = idMigration; + this.idMigration = new HashMap<>(); + for (final Tuple2 idMigration : idMigrations) { + this.idMigration.put(idMigration._1, idMigration._2); + } } /** diff --git a/src/org/exist/config/Configurator.java b/src/org/exist/config/Configurator.java index a6fdb672e00..b4f522a1836 100644 --- a/src/org/exist/config/Configurator.java +++ b/src/org/exist/config/Configurator.java @@ -59,12 +59,13 @@ import org.exist.dom.memtree.SAXAdapter; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; +import org.exist.security.PermissionFactory; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock.LockMode; import org.exist.storage.sync.Sync; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; +import org.exist.util.LockException; import org.exist.util.MimeType; import com.evolvedbinary.j8fu.function.ConsumerE; import org.exist.util.io.FastByteArrayOutputStream; @@ -129,7 +130,7 @@ public static Method searchForGetMethod(final Class clazz, final String prope try { final String methodName = ("get" + property).toLowerCase(); for (final Method method : clazz.getMethods()) { - if (method.getName().toLowerCase().equals(methodName)) { + if (method.getName().equalsIgnoreCase(methodName)) { return method; } } @@ -151,7 +152,7 @@ public static Method searchForSetMethod(final Class clazz, final Field field) try { final String methodName = ("set" + field.getName()).toLowerCase(); for (final Method method : clazz.getMethods()) { - if (method.getName().toLowerCase().equals(methodName)) { + if (method.getName().equalsIgnoreCase(methodName)) { return method; } } @@ -173,7 +174,7 @@ public static Method searchForAddMethod(final Class clazz, final String prope try { final String methodName = ("add" + property).toLowerCase(); for (final Method method : clazz.getMethods()) { - if (method.getName().toLowerCase().equals(methodName) + if (method.getName().equalsIgnoreCase(methodName) && method.getParameterTypes().length == 1 && String.class.getName().equals(method.getParameterTypes()[0].getName())) { return method; @@ -189,7 +190,7 @@ public static Method searchForInsertMethod(final Class clazz, final String pr try { final String methodName = ("insert" + property).toLowerCase(); for (final Method method : clazz.getMethods()) { - if (method.getName().toLowerCase().equals(methodName) + if (method.getName().equalsIgnoreCase(methodName) && method.getParameterTypes().length == 2 && int.class.getName().equals(method.getParameterTypes()[0].getName()) && String.class.getName().equals(method.getParameterTypes()[1].getName())) { @@ -716,8 +717,10 @@ private static Configurable create(final Configuration conf, final Configurable } if (db != null) { - try(final DBBroker broker = db.getBroker()) { - ((LifeCycle) obj).start(broker); + try(final DBBroker broker = db.getBroker(); + final Txn transaction = broker.continueOrBeginTransaction()) { + ((LifeCycle) obj).start(broker, transaction); + transaction.commit(); } } @@ -1285,43 +1288,51 @@ public static DocumentImpl save(final Configurable instance, final DBBroker brok FullXmldbURI fullURI = null; final BrokerPool pool = broker.getBrokerPool(); final TransactionManager transact = pool.getTransactionManager(); - Txn txn = null; LOG.info("Storing configuration " + collection.getURI() + "/" + uri); - + try { broker.pushSubject(pool.getSecurityManager().getSystemSubject()); - txn = transact.beginTransaction(); - txn.acquireLock(collection.getLock(), LockMode.WRITE_LOCK); - final IndexInfo info = collection.validateXMLResource(txn, broker, uri, data); - final DocumentImpl doc = info.getDocument(); - doc.getMetadata().setMimeType(MimeType.XML_TYPE.getName()); - doc.getPermissions().setMode(Permission.DEFAULT_SYSTSEM_RESOURCE_PERM); - fullURI = getFullURI(pool, doc.getURI()); - saving.add(fullURI); - collection.store(txn, broker, info, data); - broker.saveCollection(txn, doc.getCollection()); - transact.commit(txn); + Txn txn = broker.getCurrentTransaction(); + final boolean txnInProgress = txn != null; + if(!txnInProgress) { + txn = transact.beginTransaction(); + } + + try { + txn.acquireCollectionLock(() -> pool.getLockManager().acquireCollectionWriteLock(collection.getURI())); + final IndexInfo info = collection.validateXMLResource(txn, broker, uri, data); + final DocumentImpl doc = info.getDocument(); + doc.getMetadata().setMimeType(MimeType.XML_TYPE.getName()); + PermissionFactory.chmod(broker, doc.getPermissions(), Optional.of(Permission.DEFAULT_SYSTSEM_RESOURCE_PERM), Optional.empty()); + fullURI = getFullURI(pool, doc.getURI()); + saving.add(fullURI); + collection.store(txn, broker, info, data); + broker.saveCollection(txn, doc.getCollection()); + if (!txnInProgress) { + transact.commit(txn); + } + } catch(final EXistException | PermissionDeniedException | SAXException | LockException e) { + if(!txnInProgress) { + transact.abort(txn); + } + throw e; + } finally { + if(!txnInProgress) { + txn.close(); + } + } saving.remove(fullURI); broker.flush(); broker.sync(Sync.MAJOR); return collection.getDocument(broker, uri.lastSegment()); - - } catch (final Exception e) { + } catch(final EXistException | PermissionDeniedException | SAXException | LockException e) { LOG.error(e); - if (fullURI != null) { saving.remove(fullURI); } - - if (txn != null) { - transact.abort(txn); - } - throw new IOException(e); - } finally { - transact.close(txn); broker.popSubject(); } } diff --git a/src/org/exist/dom/memtree/DOMIndexer.java b/src/org/exist/dom/memtree/DOMIndexer.java index 56b824d7686..6db83705279 100644 --- a/src/org/exist/dom/memtree/DOMIndexer.java +++ b/src/org/exist/dom/memtree/DOMIndexer.java @@ -45,15 +45,13 @@ import org.w3c.dom.Node; import javax.xml.XMLConstants; -import java.util.HashMap; -import java.util.Map; -import java.util.Stack; +import java.util.*; /** * Helper class to make a in-memory document fragment persistent. The class * directly accesses the in-memory document structure and writes it into a * temporary doc on the database. This is much faster than first serializing - * the document tree to SAX and passing it to {@link org.exist.collections.Collection#store(org.exist.storage.txn.Txn, org.exist.storage.DBBroker, org.exist.collections.IndexInfo, org.xml.sax.InputSource, boolean)}. + * the document tree to SAX and passing it to {@link org.exist.collections.Collection#store(org.exist.storage.txn.Txn, org.exist.storage.DBBroker, org.exist.collections.IndexInfo, org.xml.sax.InputSource)}. *

*

As the in-memory document fragment may not be a well-formed XML doc (having more than one root element), a wrapper element is put around the * content nodes.

@@ -71,7 +69,7 @@ public class DOMIndexer { private final org.exist.dom.persistent.DocumentImpl targetDoc; private final IndexSpec indexSpec; - private final Stack stack = new Stack<>(); + private final Deque stack = new ArrayDeque<>(); private StoredNode prevNode = null; private final TextImpl text = new TextImpl(); @@ -171,7 +169,7 @@ private void startNode(final int nodeNr, final NodePath currentPath) { case Node.ELEMENT_NODE: { final ElementImpl elem = (ElementImpl) NodePool.getInstance().borrowNode(Node.ELEMENT_NODE); - if(stack.empty()) { + if(stack.isEmpty()) { elem.setNodeId(broker.getBrokerPool().getNodeFactory().createInstance()); initElement(nodeNr, elem); stack.push(elem); @@ -219,7 +217,7 @@ private void startNode(final int nodeNr, final NodePath currentPath) { case Node.COMMENT_NODE: { comment.setData(doc.characters, doc.alpha[nodeNr], doc.alphaLen[nodeNr]); comment.setOwnerDocument(targetDoc); - if(stack.empty()) { + if(stack.isEmpty()) { comment.setNodeId(NodeId.DOCUMENT_NODE); targetDoc.appendChild((NodeHandle) comment); broker.storeNode(transaction, comment, null, indexSpec); @@ -237,7 +235,7 @@ private void startNode(final int nodeNr, final NodePath currentPath) { pi.setTarget(qn.getLocalPart()); pi.setData(new String(doc.characters, doc.alpha[nodeNr], doc.alphaLen[nodeNr])); pi.setOwnerDocument(targetDoc); - if(stack.empty()) { + if(stack.isEmpty()) { pi.setNodeId(NodeId.DOCUMENT_NODE); targetDoc.appendChild((NodeHandle) pi); } else { diff --git a/src/org/exist/dom/memtree/MemTreeBuilder.java b/src/org/exist/dom/memtree/MemTreeBuilder.java index 89a01940ceb..8cd4231c7e5 100644 --- a/src/org/exist/dom/memtree/MemTreeBuilder.java +++ b/src/org/exist/dom/memtree/MemTreeBuilder.java @@ -120,7 +120,7 @@ public int startElement(final String namespaceURI, String localName, final Strin final int prefixIdx = qname.indexOf(':'); String prefix = null; - if(context != null && !getDefaultNamespace().equals(namespaceURI == null ? XMLConstants.NULL_NS_URI : namespaceURI)) { + if (context != null && !getDefaultNamespace().equals(namespaceURI == null ? XMLConstants.NULL_NS_URI : namespaceURI)) { prefix = context.getPrefixForURI(namespaceURI); } @@ -128,8 +128,12 @@ public int startElement(final String namespaceURI, String localName, final Strin prefix = (prefixIdx != Constants.STRING_NOT_FOUND) ? qname.substring(0, prefixIdx) : null; } - if(localName.isEmpty() && prefixIdx > -1) { - localName = qname.substring(prefixIdx + 1); + if (localName.isEmpty()) { + if (prefixIdx > -1) { + localName = qname.substring(prefixIdx + 1); + } else { + localName = qname; + } } final QName qn = new QName(localName, namespaceURI, prefix); diff --git a/src/org/exist/dom/memtree/NodeImpl.java b/src/org/exist/dom/memtree/NodeImpl.java index 9fa71e500a6..a3e57fee379 100644 --- a/src/org/exist/dom/memtree/NodeImpl.java +++ b/src/org/exist/dom/memtree/NodeImpl.java @@ -611,7 +611,7 @@ public int getItemType() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { return new SingleNodeIterator(this); } @@ -621,7 +621,7 @@ public SequenceIterator unorderedIterator() { } @Override - public int getItemCount() { + public long getItemCountLong() { return 1; } @@ -829,30 +829,13 @@ public void selectPreceding(final NodeTest test, final Sequence result, final in public void selectFollowingSiblings(final NodeTest test, final Sequence result) throws XPathException { final int parent = document.getParentNodeFor(nodeNumber); - if(parent == 0) { - // parent is the document node - if(getNodeType() == Node.ELEMENT_NODE) { - return; - } - NodeImpl next = (NodeImpl) getNextSibling(); - while(next != null) { - if(test.matches(next)) { - result.add(next); - } - if(next.getNodeType() == Node.ELEMENT_NODE) { - break; - } - next = (NodeImpl) next.getNextSibling(); - } - } else { - int nextNode = document.getFirstChildFor(parent); - while(nextNode > parent) { - final NodeImpl n = document.getNode(nextNode); - if((nextNode > nodeNumber) && test.matches(n)) { - result.add(n); - } - nextNode = document.next[nextNode]; + int nextNode = document.getFirstChildFor(parent); + while(nextNode > parent) { + final NodeImpl n = document.getNode(nextNode); + if((nextNode > nodeNumber) && test.matches(n)) { + result.add(n); } + nextNode = document.next[nextNode]; } } @@ -1000,7 +983,7 @@ protected DOMException unsupported() { } private final static class SingleNodeIterator implements SequenceIterator { - NodeImpl node; + private NodeImpl node; public SingleNodeIterator(final NodeImpl node) { this.node = node; @@ -1018,5 +1001,21 @@ public Item nextItem() { return next; } + @Override + public long skippable() { + if (node != null) { + return 1; + } + return 0; + } + + @Override + public long skip(final long n) { + final long skip = Math.min(n, node != null ? 1 : 0); + if (skip == 1) { + node = null; + } + return skip; + } } } diff --git a/src/org/exist/dom/memtree/SAXAdapter.java b/src/org/exist/dom/memtree/SAXAdapter.java index d0db503de46..2c599f08bd7 100644 --- a/src/org/exist/dom/memtree/SAXAdapter.java +++ b/src/org/exist/dom/memtree/SAXAdapter.java @@ -117,17 +117,19 @@ public void endElement(final String namespaceURI, final String localName, final public void startElement(final String namespaceURI, final String localName, final String qName, final Attributes atts) throws SAXException { builder.startElement(namespaceURI, localName, qName, atts); - if(namespaces != null) { - for(final Map.Entry entry : namespaces.entrySet()) { + if (namespaces != null) { + for (final Map.Entry entry : namespaces.entrySet()) { builder.namespaceNode(entry.getKey(), entry.getValue()); } } - for(int i = 0; i < atts.getLength(); i++) { - if(atts.getQName(i).startsWith(XMLConstants.XMLNS_ATTRIBUTE)) { - final String prefix = null; + for (int i = 0; i < atts.getLength(); i++) { + final String attQName = atts.getQName(i); + if (attQName.startsWith(XMLConstants.XMLNS_ATTRIBUTE)) { + final int idxPrefixSep = attQName.indexOf(":"); + final String prefix = idxPrefixSep > -1 ? attQName.substring(idxPrefixSep + 1) : null; final String uri = atts.getValue(i); - if(namespaces == null || !namespaces.containsKey(prefix)) { + if (namespaces == null || !namespaces.containsKey(prefix)) { builder.namespaceNode(prefix, uri); } } diff --git a/src/org/exist/dom/persistent/AVLTreeNodeSet.java b/src/org/exist/dom/persistent/AVLTreeNodeSet.java index 1ba5bd098df..42c909f357f 100644 --- a/src/org/exist/dom/persistent/AVLTreeNodeSet.java +++ b/src/org/exist/dom/persistent/AVLTreeNodeSet.java @@ -22,12 +22,13 @@ package org.exist.dom.persistent; import org.exist.numbering.NodeId; -import org.exist.xquery.XPathException; import org.exist.xquery.value.Item; import org.exist.xquery.value.SequenceIterator; +import javax.annotation.Nullable; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Iterator; -import java.util.Stack; public class AVLTreeNodeSet extends AbstractNodeSet { @@ -35,17 +36,14 @@ public class AVLTreeNodeSet extends AbstractNodeSet { private int size = 0; private int state = 0; - /* (non-Javadoc) - * @see org.exist.dom.persistent.NodeSet#iterate() - */ @Override - public SequenceIterator iterate() throws XPathException { - return new InorderTraversal(); + public SequenceIterator iterate() { + return new InorderTraversal(root); } @Override - public SequenceIterator unorderedIterator() throws XPathException { - return new InorderTraversal(); + public SequenceIterator unorderedIterator() { + return new InorderTraversal(root); } @Override @@ -60,9 +58,8 @@ public int getLength() { return size; } - //TODO : evaluate both semantics @Override - public int getItemCount() { + public long getItemCountLong() { return size; } @@ -81,7 +78,7 @@ public NodeProxy get(final int pos) { @Override public final NodeProxy get(final NodeProxy p) { - final Node n = searchData(p); + final Node n = searchData(root, p); return n == null ? null : n.getData(); } @@ -171,9 +168,8 @@ public Node getMaxNode() { } private void balance(final Node node) { - Node currentNode, currentParent; - currentNode = node; - currentParent = node.parent; + Node currentNode = node; + Node currentParent = node.parent; while(currentNode != root) { final int h = currentParent.height; currentParent.setHeight(); @@ -304,7 +300,7 @@ private void balance(final Node node) { } } - public final Node searchData(final NodeProxy proxy) { + private static @Nullable Node searchData(@Nullable final Node root, final NodeProxy proxy) { if(root == null) { return null; } @@ -353,7 +349,7 @@ public final NodeProxy get(final DocumentImpl doc, final NodeId nodeId) { @Override public final boolean contains(final NodeProxy proxy) { - return searchData(proxy) != null; + return searchData(root, proxy) != null; } public void removeNode(final Node node) { @@ -398,7 +394,7 @@ public void removeNode(final Node node) { @Override public NodeSetIterator iterator() { - return new InorderTraversal(); + return new InorderTraversal(root); } @Override @@ -420,11 +416,12 @@ private void setHasChanged() { state = (state == Integer.MAX_VALUE ? 0 : state + 1); } - class InorderTraversal implements NodeSetIterator, SequenceIterator { + private static class InorderTraversal implements NodeSetIterator, SequenceIterator { + @Nullable private final Node root; + private final Deque nodes = new ArrayDeque<>(); - private final Stack nodes = new Stack<>(); - - public InorderTraversal() { + public InorderTraversal(@Nullable final Node root) { + this.root = root; if(root != null) { Node tempNode = root; do { @@ -436,10 +433,7 @@ public InorderTraversal() { @Override public boolean hasNext() { - if(nodes.size() == 0) { - return false; - } - return true; + return !nodes.isEmpty(); } @Override @@ -447,8 +441,7 @@ public NodeProxy next() { if(nodes.isEmpty()) { return null; } - final Node currentNode = nodes.peek(); - nodes.pop(); + final Node currentNode = nodes.pop(); if(currentNode.hasRightChild()) { Node tempNode = currentNode.rightChild; do { @@ -461,16 +454,16 @@ public NodeProxy next() { @Override public NodeProxy peekNode() { - if(nodes.isEmpty()) { + final Node currentNode = nodes.peek(); + if (currentNode == null) { return null; } - final Node currentNode = nodes.peek(); return currentNode.getData(); } @Override public void setPosition(final NodeProxy proxy) { - final Node n = searchData(proxy); + final Node n = searchData(root, proxy); nodes.clear(); if(n != null) { Node tempNode = n; @@ -491,9 +484,8 @@ public Item nextItem() { if(nodes.isEmpty()) { return null; } - final Node currentNode = nodes.peek(); - nodes.pop(); - if(currentNode.hasRightChild()) { + final Node currentNode = nodes.pop(); + if (currentNode.hasRightChild()) { Node tempNode = currentNode.rightChild; do { nodes.push(tempNode); @@ -510,7 +502,6 @@ public String toString() { } private static final class Node { - private NodeProxy data; private Node parent; private Node leftChild; diff --git a/src/org/exist/dom/persistent/AbstractArrayNodeSet.java b/src/org/exist/dom/persistent/AbstractArrayNodeSet.java index 92f4cd74550..d6ec2e58700 100644 --- a/src/org/exist/dom/persistent/AbstractArrayNodeSet.java +++ b/src/org/exist/dom/persistent/AbstractArrayNodeSet.java @@ -154,7 +154,7 @@ public int getLength() { } @Override - public int getItemCount() { + public long getItemCountLong() { return getLength(); } diff --git a/src/org/exist/dom/persistent/AbstractNodeSet.java b/src/org/exist/dom/persistent/AbstractNodeSet.java index 658d0d029d5..218e162e6c2 100644 --- a/src/org/exist/dom/persistent/AbstractNodeSet.java +++ b/src/org/exist/dom/persistent/AbstractNodeSet.java @@ -432,10 +432,10 @@ public int getSizeHint(final DocumentImpl doc) { @Override public NodeSet intersection(final NodeSet other) { final AVLTreeNodeSet r = new AVLTreeNodeSet(); - NodeProxy l, p; for(final Iterator i = iterator(); i.hasNext(); ) { - l = i.next(); - if((p = other.get(l)) != null) { + final NodeProxy l = i.next(); + final NodeProxy p = other.get(l); + if(p != null) { l.addMatches(p); r.add(l); } @@ -446,20 +446,22 @@ public NodeSet intersection(final NodeSet other) { @Override public NodeSet deepIntersection(final NodeSet other) { final AVLTreeNodeSet r = new AVLTreeNodeSet(); - NodeProxy l, p, q; - for(final Iterator i = iterator(); i.hasNext(); ) { - l = i.next(); - if((p = other.parentWithChild(l, false, true, NodeProxy.UNKNOWN_NODE_LEVEL)) != null) { - if(p.getNodeId().equals(l.getNodeId())) { + for (final Iterator i = iterator(); i.hasNext(); ) { + final NodeProxy l = i.next(); + final NodeProxy p = other.parentWithChild(l, false, true, NodeProxy.UNKNOWN_NODE_LEVEL); + if (p != null) { + if (p.getNodeId().equals(l.getNodeId())) { p.addMatches(l); } r.add(p); } } - for(final Iterator i = other.iterator(); i.hasNext(); ) { - l = i.next(); - if((q = parentWithChild(l, false, true, NodeProxy.UNKNOWN_NODE_LEVEL)) != null) { - if((p = r.get(q)) != null) { + for (final Iterator i = other.iterator(); i.hasNext(); ) { + final NodeProxy l = i.next(); + final NodeProxy q = parentWithChild(l, false, true, NodeProxy.UNKNOWN_NODE_LEVEL); + if (q != null) { + final NodeProxy p = r.get(q); + if(p != null) { p.addMatches(l); } else { r.add(l); @@ -519,10 +521,10 @@ public NodeSet union(final NodeSet other) { } else { final NewArrayNodeSet result = new NewArrayNodeSet(); result.addAll(other); - NodeProxy p, c; for(final Iterator i = iterator(); i.hasNext(); ) { - p = i.next(); - if((c = other.get(p)) != null) { + final NodeProxy p = i.next(); + final NodeProxy c = other.get(p); + if(c != null) { c.addMatches(p); } else { result.add(p); @@ -542,16 +544,14 @@ public NodeSet union(final NodeSet other) { */ @Override public NodeSet getContextNodes(final int contextId) { - NodeProxy current, context; - ContextItem contextNode; final NewArrayNodeSet result = new NewArrayNodeSet(); DocumentImpl lastDoc = null; for(final Iterator i = iterator(); i.hasNext(); ) { - current = i.next(); - contextNode = current.getContext(); + final NodeProxy current = i.next(); + ContextItem contextNode = current.getContext(); while(contextNode != null) { if(contextNode.getContextId() == contextId) { - context = contextNode.getNode(); + final NodeProxy context = contextNode.getNode(); context.addMatches(current); if(Expression.NO_CONTEXT_ID != contextId) { context.addContextNode(contextId, context); diff --git a/src/org/exist/dom/persistent/BinaryDocument.java b/src/org/exist/dom/persistent/BinaryDocument.java index 013c3b033fd..9782e9b60f5 100644 --- a/src/org/exist/dom/persistent/BinaryDocument.java +++ b/src/org/exist/dom/persistent/BinaryDocument.java @@ -48,10 +48,37 @@ public BinaryDocument(final BrokerPool pool) { super(pool); } + /** + * Creates a new persistent binary Document instance. + * + * @param pool The broker pool + * @param collection The Collection which holds this document + * @param fileURI The name of the document + */ public BinaryDocument(final BrokerPool pool, final Collection collection, final XmldbURI fileURI) { super(pool, collection, fileURI); } + /** + * Creates a new persistent binary Document instance to replace an existing document instance. + * + * @param prevDoc The previous binary Document object that we are overwriting + */ + public BinaryDocument(final DocumentImpl prevDoc) { + super(prevDoc); + } + + /** + * Creates a new persistent binary Document instance to replace an existing document instance. + * + * @param collection The Collection which holds this document + * @param prevDoc The previous Document object that we are overwriting + * @param prevDoc The previous binary Document object that we are overwriting + */ + public BinaryDocument(final BrokerPool pool, final Collection collection, final Collection.CollectionEntry prevDoc) { + super(pool, collection, prevDoc); + } + @Override public byte getResourceType() { return BINARY_FILE; diff --git a/src/org/exist/dom/persistent/DefaultDocumentSet.java b/src/org/exist/dom/persistent/DefaultDocumentSet.java index f301aeb5a3d..00db179714b 100644 --- a/src/org/exist/dom/persistent/DefaultDocumentSet.java +++ b/src/org/exist/dom/persistent/DefaultDocumentSet.java @@ -23,12 +23,12 @@ import net.jcip.annotations.NotThreadSafe; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.numbering.NodeId; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; -import org.exist.util.hashtable.Int2ObjectHashMap; import org.exist.xmldb.XmldbURI; import org.w3c.dom.Node; @@ -45,29 +45,28 @@ * @author aretter */ @NotThreadSafe -public class DefaultDocumentSet extends Int2ObjectHashMap implements MutableDocumentSet { +public class DefaultDocumentSet implements MutableDocumentSet { - private final static int DEFAULT_SIZE = 29; - private final static double DEFAULT_GROWTH = 1.75; + private static final int DEFAULT_SIZE = 29; + private static final float DEFAULT_GROWTH = 1.75f; private final BitSet docIds = new BitSet(); + private final Map docs; private final BitSet collectionIds = new BitSet(); - private final Set collections = new TreeSet<>(); - - private final Deque lockReleasers = new ArrayDeque<>(); + private final Set collections = new LinkedHashSet<>(); public DefaultDocumentSet() { - super(DEFAULT_SIZE, DEFAULT_GROWTH); + this(DEFAULT_SIZE); } public DefaultDocumentSet(final int initialSize) { - super(initialSize, DEFAULT_GROWTH); + this.docs = new LinkedHashMap<>(initialSize, DEFAULT_GROWTH); } @Override public void clear() { - super.clear(); this.docIds.clear(); + this.docs.clear(); this.collectionIds.clear(); this.collections.clear(); } @@ -85,7 +84,7 @@ public void add(final DocumentImpl doc, final boolean checkDuplicates) { } docIds.set(docId); - put(docId, doc); + docs.put(docId, doc); final Collection collection = doc.getCollection(); if (collection != null && !collectionIds.get(collection.getId())) { collectionIds.set(collection.getId()); @@ -115,10 +114,9 @@ public void addCollection(final Collection collection) { } } - @SuppressWarnings("unchecked") @Override public Iterator getDocumentIterator() { - return valueIterator(); + return docs.values().iterator(); } @Override @@ -128,46 +126,40 @@ public Iterator getCollectionIterator() { @Override public int getDocumentCount() { - return size(); - } - - public int getCollectionCount() { - return collections.size(); + return docs.size(); } @Override public DocumentImpl getDoc(final int docId) { - return (DocumentImpl)get(docId); + return docs.get(docId); } @Override public XmldbURI[] getNames() { - final XmldbURI result[] = new XmldbURI[size()]; - int j = 0; - for (final Iterator i = getDocumentIterator(); i.hasNext(); j++) { - final DocumentImpl d = i.next(); - result[j] = d.getFileURI(); - } + final XmldbURI[] result = docs.values().stream() + .map(DocumentImpl::getFileURI) + .toArray(XmldbURI[]::new); Arrays.sort(result); return result; } @Override public DocumentSet intersection(final DocumentSet other) { - final DefaultDocumentSet r = new DefaultDocumentSet(); + final DefaultDocumentSet result = new DefaultDocumentSet(); + for (final Iterator i = getDocumentIterator(); i.hasNext(); ) { final DocumentImpl d = i.next(); if (other.contains(d.getDocId())) { - r.add(d); + result.add(d); } } for (final Iterator i = other.getDocumentIterator(); i.hasNext(); ) { final DocumentImpl d = i.next(); - if (contains(d.getDocId()) && (!r.contains(d.getDocId()))) { - r.add(d); + if (contains(d.getDocId()) && (!result.contains(d.getDocId()))) { + result.add(d); } } - return r; + return result; } public DocumentSet union(final DocumentSet other) { @@ -184,7 +176,7 @@ public DocumentSet union(final DocumentSet other) { @Override public boolean contains(final DocumentSet other) { - if (other.getDocumentCount() > size()) { + if (other.getDocumentCount() > getDocumentCount()) { return false; } @@ -226,16 +218,7 @@ public NodeSet docsToNodeSet() { } public int getMinDocId() { - int min = DocumentImpl.UNKNOWN_DOCUMENT_ID; - for (final Iterator i = getDocumentIterator(); i.hasNext(); ) { - final DocumentImpl d = i.next(); - if (min == DocumentImpl.UNKNOWN_DOCUMENT_ID) { - min = d.getDocId(); - } else if (d.getDocId() < min) { - min = d.getDocId(); - } - } - return min; + return docIds.nextSetBit(0); } public int getMaxDocId() { @@ -278,23 +261,28 @@ public boolean equalDocs(final DocumentSet other) { } @Override - public void lock(final DBBroker broker, final boolean exclusive) throws LockException { - for (int idx = 0; idx < tabSize; idx++) { - if (values[idx] == null || values[idx] == REMOVED) { - continue; + public ManagedLocks lock(final DBBroker broker, final boolean exclusive) throws LockException { + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final List managedDocumentLocks = new ArrayList<>(); + final Iterator documentIterator = getDocumentIterator(); + try { + while (documentIterator.hasNext()) { + final DocumentImpl document = documentIterator.next(); + final ManagedDocumentLock managedDocumentLock; + if (exclusive) { + managedDocumentLock = lockManager.acquireDocumentWriteLock(document.getURI()); + } else { + managedDocumentLock = lockManager.acquireDocumentReadLock(document.getURI()); + } + managedDocumentLocks.add(managedDocumentLock); } - final DocumentImpl d = (DocumentImpl)values[idx]; - final Lock dlock = d.getUpdateLock(); - dlock.acquire(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK); - lockReleasers.push(() -> dlock.release(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK)); - } - } - - @Override - public void unlock() { - // NOTE: locks are released in the reverse order that they were acquired - while(!lockReleasers.isEmpty()) { - lockReleasers.pop().run(); + return new ManagedLocks<>(managedDocumentLocks); + } catch (final LockException e) { + // unlock any previously locked documents + if(!managedDocumentLocks.isEmpty()) { + new ManagedLocks<>(managedDocumentLocks).close(); + } + throw e; } } diff --git a/src/org/exist/dom/persistent/DocumentImpl.java b/src/org/exist/dom/persistent/DocumentImpl.java index ed7d3eae572..ce35f968a24 100644 --- a/src/org/exist/dom/persistent/DocumentImpl.java +++ b/src/org/exist/dom/persistent/DocumentImpl.java @@ -21,21 +21,19 @@ */ package org.exist.dom.persistent; +import com.evolvedbinary.j8fu.tuple.Tuple2; +import net.jcip.annotations.NotThreadSafe; import org.exist.EXistException; import org.exist.Resource; +import org.exist.collections.LockedCollection; import org.exist.dom.QName; import org.exist.dom.QName.IllegalQNameException; import org.exist.collections.Collection; import org.exist.collections.CollectionConfiguration; import org.exist.dom.memtree.DocumentFragmentImpl; import org.exist.numbering.NodeId; -import org.exist.security.ACLPermission; -import org.exist.security.Account; -import org.exist.security.Permission; -import org.exist.security.PermissionDeniedException; -import org.exist.security.PermissionFactory; +import org.exist.security.*; import org.exist.security.SecurityManager; -import org.exist.security.UnixStylePermission; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.ElementValue; @@ -43,8 +41,9 @@ import org.exist.storage.StorageAddress; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.MultiReadReentrantLock; +import org.exist.storage.lock.EnsureContainerLocked; +import org.exist.storage.lock.EnsureLocked; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.txn.Txn; import org.exist.util.XMLString; import org.exist.xmldb.XmldbURI; @@ -65,12 +64,16 @@ import org.w3c.dom.ProcessingInstruction; import org.w3c.dom.Text; +import javax.annotation.Nullable; import javax.xml.XMLConstants; import java.io.EOFException; import java.io.IOException; +import java.util.Optional; import static java.nio.charset.StandardCharsets.UTF_8; import static org.exist.dom.QName.Validity.ILLEGAL_FORMAT; +import static org.exist.storage.lock.Lock.LockMode.READ_LOCK; +import static org.exist.storage.lock.Lock.LockMode.WRITE_LOCK; /** * Represents a persistent document object in the database; @@ -78,6 +81,7 @@ * * @author Wolfgang Meier */ +@NotThreadSafe public class DocumentImpl extends NodeImpl implements Resource, Document { public static final int UNKNOWN_DOCUMENT_ID = -1; @@ -109,14 +113,12 @@ public class DocumentImpl extends NodeImpl implements Resource, Do private int docId = UNKNOWN_DOCUMENT_ID; /** - * the document's file name + * Just the document's file name */ private XmldbURI fileURI = null; private Permission permissions = null; - private transient Lock updateLock = null; - private DocumentMetadata metadata = null; /** @@ -125,23 +127,48 @@ public class DocumentImpl extends NodeImpl implements Resource, Do * @param pool a BrokerPool instance representing the db */ public DocumentImpl(final BrokerPool pool) { - this(pool, null, null); + this(pool, null, (XmldbURI)null); } /** - * Creates a new DocumentImpl instance. + * Creates a new persistent Document instance. * - * @param pool a BrokerPool instance representing the db - * @param collection a Collection value - * @param fileURI a XmldbURI value + * @param pool The broker pool + * @param collection The Collection which holds this document + * @param fileURI The name of the document */ public DocumentImpl(final BrokerPool pool, final Collection collection, final XmldbURI fileURI) { + this(pool, collection, fileURI, PermissionFactory.getDefaultResourcePermission(pool.getSecurityManager())); + } + + /** + * Creates a new persistent Document instance to replace an existing document instance. + * + * @param prevDoc The previous Document object that we are overwriting + */ + public DocumentImpl(final DocumentImpl prevDoc) { + this(prevDoc.pool, prevDoc.collection, prevDoc.fileURI, prevDoc.permissions.copy()); + } + + /** + * Creates a new persistent Document instance to replace an existing document instance. + * + * @param pool The broker pool + * @param collection The Collection which holds this document + * @param prevDoc The previous Document object that we are overwriting + */ + public DocumentImpl(final BrokerPool pool, final Collection collection, final Collection.CollectionEntry prevDoc) { + this(pool, collection, prevDoc.getUri().lastSegment(), prevDoc.getPermissions().copy()); + } + + private DocumentImpl(final BrokerPool pool, final Collection collection, final XmldbURI fileURI, final Permission permissions) { this.pool = pool; - this.collection = collection; - this.fileURI = fileURI; - // the permissions assigned to this document - this.permissions = PermissionFactory.getDefaultResourcePermission(pool.getSecurityManager()); + // NOTE: We must not keep a reference to a LockedCollection in the Document object! + this.collection = LockedCollection.unwrapLocked(collection); + + this.fileURI = fileURI; + this.permissions = permissions; //inherit the group to the resource if current collection is setGid if(collection != null && collection.getPermissions().isSetGid()) { @@ -169,17 +196,19 @@ public BrokerPool getBrokerPool() { * * @return a Collection value */ + @EnsureContainerLocked(mode=READ_LOCK) public Collection getCollection() { return collection; } /** - * The method setCollection + * Set the Collection for the document * - * @param parent a Collection value + * @param collection The Collection that the document belongs too */ - public void setCollection(final Collection parent) { - this.collection = parent; + @EnsureContainerLocked(mode=WRITE_LOCK) + public void setCollection(final Collection collection) { + this.collection = collection; } /** @@ -187,6 +216,7 @@ public void setCollection(final Collection parent) { * * @return an int value */ + @EnsureContainerLocked(mode=READ_LOCK) public int getDocId() { return docId; } @@ -196,6 +226,7 @@ public int getDocId() { * * @param docId an int value */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void setDocId(final int docId) { this.docId = docId; } @@ -213,8 +244,8 @@ public byte getResourceType() { * * @return a XmldbURI value */ + //@EnsureContainerLocked(mode=READ_LOCK) // TODO(AR) temporarily we need to allow some unlocked access public XmldbURI getFileURI() { - //checkAvail(); return fileURI; } @@ -223,10 +254,12 @@ public XmldbURI getFileURI() { * * @param fileURI a XmldbURI value */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void setFileURI(final XmldbURI fileURI) { this.fileURI = fileURI; } + //@EnsureContainerLocked(mode=READ_LOCK) // TODO(AR) temporarily we need to allow some unlocked access @Override public XmldbURI getURI() { if(collection == null) { @@ -236,11 +269,13 @@ public XmldbURI getURI() { } } + @EnsureContainerLocked(mode=READ_LOCK) public boolean isCollectionConfig() { return fileURI.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI); } @Override + @EnsureContainerLocked(mode=READ_LOCK) public Permission getPermissions() { return permissions; } @@ -253,6 +288,7 @@ public Permission getPermissions() { * and should be removed, move code to copyOf or Constructor */ @Deprecated + @EnsureContainerLocked(mode=WRITE_LOCK) public void setPermissions(final Permission perm) { permissions = perm; } @@ -265,11 +301,13 @@ public void setPermissions(final Permission perm) { * and should be removed, move code to copyOf or Constructor */ @Deprecated + @EnsureContainerLocked(mode=WRITE_LOCK) public void setMetadata(final DocumentMetadata meta) { this.metadata = meta; } @Override + @EnsureContainerLocked(mode=READ_LOCK) public DocumentMetadata getMetadata() { return metadata; } @@ -285,76 +323,109 @@ public DocumentMetadata getMetadata() { * This is called by {@link Collection} when replacing a document. * * @param other a DocumentImpl value - * @param preserve Cause copyOf to preserve the following attributes of - * each source file in the copy: modification time, - * access time, file mode, user ID, and group ID, - * as allowed by permissions and Access Control - * Lists (ACLs) + * @param prev if there was an existing document which we are replacing, + * we will copy the mode, ACL, and birth time from the existing document. + */ + public void copyOf(final DBBroker broker, final DocumentImpl other, @EnsureLocked(mode=READ_LOCK) @Nullable final DocumentImpl prev) throws PermissionDeniedException { + copyOf(broker, other, prev == null ? null : new Tuple2<>(prev.getPermissions(), prev.getMetadata().getCreated())); + } + + /** + * Copy the relevant internal fields from the specified document object. + * This is called by {@link Collection} when replacing a document. + * + * @param other a DocumentImpl value + * @param prev if there was an existing document which we are replacing, + * we will copy the mode, ACL, and birth time from the existing document. + */ + public void copyOf(final DBBroker broker, final DocumentImpl other, @Nullable final Collection.CollectionEntry prev) throws PermissionDeniedException { + copyOf(broker, other, prev == null ? null : new Tuple2<>(prev.getPermissions(), prev.getCreated())); + } + + /** + * Copy the relevant internal fields from the specified document object. + * This is called by {@link Collection} when replacing a document. + * + * @param other a DocumentImpl value + * @param prev A tuple, containing the permissions and birth time of any + * previous document that we are replacing; We will copy the mode, ACL, + * and birth time from the existing document. */ - public void copyOf(final DocumentImpl other, final boolean preserve) { + @EnsureContainerLocked(mode=WRITE_LOCK) + private void copyOf(final DBBroker broker, @EnsureLocked(mode=READ_LOCK) final DocumentImpl other, @Nullable final Tuple2 prev) throws PermissionDeniedException { childAddress = null; children = 0; - //XXX: why reusing? better to create new instance? -shabanovd metadata = getMetadata(); - if(metadata == null) { + if (metadata == null) { metadata = new DocumentMetadata(); } //copy metadata metadata.copyOf(other.getMetadata()); - if(preserve) { - //copy permission - permissions = ((UnixStylePermission) other.permissions).copy(); - //created and last modified are done by metadata.copyOf - //metadata.setCreated(other.getMetadata().getCreated()); - //metadata.setLastModified(other.getMetadata().getLastModified()); + final long timestamp = System.currentTimeMillis(); + if(prev != null) { + // replaced file should have same owner user:group as prev file + if (permissions.getOwner().getId() != prev._1.getOwner().getId()) { + permissions.setOwner(prev.get_1().getOwner()); + } + if (permissions.getGroup().getId() != prev._1.getGroup().getId()) { + permissions.setGroup(prev.get_1().getGroup()); + } + + //copy mode and acl from prev file + copyModeAcl(broker, prev._1, permissions); + + // set birth time to same as prev file + metadata.setCreated(prev._2); + } else { - //update timestamp - final long timestamp = System.currentTimeMillis(); + // copy mode and acl from source file + copyModeAcl(broker, other.getPermissions(), permissions); + + // set birth time to the current timestamp metadata.setCreated(timestamp); - metadata.setLastModified(timestamp); } + // always set mtime + metadata.setLastModified(timestamp); + // reset pageCount: will be updated during storage metadata.setPageCount(0); } + private void copyModeAcl(final DBBroker broker, final Permission srcPermissions, final Permission destPermissions) throws PermissionDeniedException { + PermissionFactory.chmod(broker, destPermissions, Optional.of(srcPermissions.getMode()), Optional.empty()); + + if (srcPermissions instanceof SimpleACLPermission && destPermissions instanceof SimpleACLPermission) { + final SimpleACLPermission srcAclPermissions = (SimpleACLPermission)srcPermissions; + final SimpleACLPermission destAclPermissions = (SimpleACLPermission)destPermissions; + if (!destAclPermissions.equalsAcl(srcAclPermissions)) { + PermissionFactory.chacl(destAclPermissions, newAcl -> + ((SimpleACLPermission)newAcl).copyAclOf(srcAclPermissions) + ); + } + } + } + /** * The method copyChildren * * @param other a DocumentImpl value */ - public void copyChildren(final DocumentImpl other) { + @EnsureContainerLocked(mode=WRITE_LOCK) + public void copyChildren(@EnsureLocked(mode=READ_LOCK) final DocumentImpl other) { childAddress = other.childAddress; children = other.children; } - /** - * Returns true if the document is currently locked for - * write. - */ - public synchronized boolean isLockedForWrite() { - return getUpdateLock().isLockedForWrite(); - } - - /** - * Returns the update lock associated with this - * resource. - */ - public synchronized Lock getUpdateLock() { - if(updateLock == null) { - updateLock = new MultiReadReentrantLock(fileURI); - } - return updateLock; - } - /** * The method setUserLock * * @param user an User value */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void setUserLock(final Account user) { getMetadata().setUserLock(user == null ? 0 : user.getId()); } @@ -364,6 +435,7 @@ public void setUserLock(final Account user) { * * @return an User value */ + @EnsureContainerLocked(mode=READ_LOCK) public Account getUserLock() { final int lockOwnerId = getMetadata().getUserLock(); if(lockOwnerId == 0) { @@ -379,6 +451,7 @@ public Account getUserLock() { * As an estimation, the number of pages occupied by the document * is multiplied with the current page size. */ + @EnsureContainerLocked(mode=READ_LOCK) public long getContentLength() { final long length = getMetadata().getPageCount() * pool.getPageSize(); return (length < 0) ? 0 : length; @@ -405,9 +478,6 @@ public void triggerDefrag() { * @return a Node value */ public Node getNode(final NodeId nodeId) { - if(nodeId.getTreeLevel() == 1) { - return getDocumentElement(); - } try(final DBBroker broker = pool.getBroker()) { return broker.objectWith(this, nodeId); } catch(final EXistException e) { @@ -451,6 +521,7 @@ private void resizeChildList() { * @param child a NodeHandle value * @throws DOMException if an error occurs */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void appendChild(final NodeHandle child) throws DOMException { ++children; resizeChildList(); @@ -463,11 +534,9 @@ public void appendChild(final NodeHandle child) throws DOMException { * @param ostream a VariableByteOutputStream value * @throws IOException if an error occurs */ + @EnsureContainerLocked(mode=READ_LOCK) public void write(final VariableByteOutputStream ostream) throws IOException { try { - if(!getCollection().isTempCollection() && !getUpdateLock().isLockedForWrite()) { - LOG.warn("document not locked for write !"); - } ostream.writeInt(docId); ostream.writeUTF(fileURI.toString()); getPermissions().write(ostream); @@ -492,6 +561,7 @@ public void write(final VariableByteOutputStream ostream) throws IOException { * @throws IOException if an error occurs * @throws EOFException if an error occurs */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void read(final VariableByteInput istream) throws IOException, EOFException { try { docId = istream.readInt(); @@ -518,7 +588,8 @@ public void read(final VariableByteInput istream) throws IOException, EOFExcepti * @return an int value */ @Override - public int compareTo(final DocumentImpl other) { + @EnsureContainerLocked(mode=READ_LOCK) + public int compareTo(@EnsureLocked(mode=READ_LOCK) final DocumentImpl other) { final long otherId = other.docId; if(otherId == docId) { return Constants.EQUAL; @@ -529,9 +600,6 @@ public int compareTo(final DocumentImpl other) { } } - /* (non-Javadoc) - * @see org.exist.dom.persistent.NodeImpl#updateChild(org.w3c.dom.Node, org.w3c.dom.Node) - */ @Override public IStoredNode updateChild(final Txn transaction, final Node oldChild, final Node newChild) throws DOMException { if(!(oldChild instanceof StoredNode)) { @@ -574,6 +642,7 @@ public IStoredNode updateChild(final Txn transaction, final Node oldChild, final } @Override + @EnsureContainerLocked(mode=READ_LOCK) public Node getFirstChild() { if(children == 0) { return null; @@ -587,6 +656,7 @@ public Node getFirstChild() { return null; } + @EnsureContainerLocked(mode=READ_LOCK) protected NodeProxy getFirstChildProxy() { return new NodeProxy(this, NodeId.ROOT_NODE, Node.ELEMENT_NODE, childAddress[0]); } @@ -596,6 +666,7 @@ protected NodeProxy getFirstChildProxy() { * * @return a long value */ + @EnsureContainerLocked(mode=READ_LOCK) public long getFirstChildAddress() { if(children == 0) { return StoredNode.UNKNOWN_NODE_IMPL_ADDRESS; @@ -610,6 +681,7 @@ public boolean hasChildNodes() { } @Override + @EnsureContainerLocked(mode=READ_LOCK) public NodeList getChildNodes() { final org.exist.dom.NodeListImpl list = new org.exist.dom.NodeListImpl(); try(final DBBroker broker = pool.getBroker()) { @@ -646,6 +718,7 @@ protected Node getPreviousSibling(final NodeHandle node) { * @param node a NodeHandle value * @return a Node value */ + @EnsureContainerLocked(mode=READ_LOCK) protected Node getFollowingSibling(final NodeHandle node) { final NodeList cl = getChildNodes(); for(int i = 0; i < cl.getLength(); i++) { @@ -695,6 +768,7 @@ protected NodeList findElementsByTagName(final NodeHandle root, final QName qnam * @return a DocumentType value */ @Override + @EnsureContainerLocked(mode=READ_LOCK) public DocumentType getDoctype() { return getMetadata().getDocType(); } @@ -704,6 +778,7 @@ public DocumentType getDoctype() { * * @param docType a DocumentType value */ + @EnsureContainerLocked(mode=WRITE_LOCK) public void setDocumentType(final DocumentType docType) { getMetadata().setDocType(docType); } @@ -990,10 +1065,12 @@ public Node getParentNode() { * @return an int value */ @Override + @EnsureContainerLocked(mode=READ_LOCK) public int getChildCount() { return children; } + @EnsureContainerLocked(mode=WRITE_LOCK) public void setChildCount(final int count) { this.children = count; if(children == 0) { @@ -1002,6 +1079,7 @@ public void setChildCount(final int count) { } @Override + @EnsureContainerLocked(mode=READ_LOCK) public boolean isSameNode(final Node other) { // This function is used by Saxon in some circumstances, and this partial implementation is required for proper Saxon operation. if(other instanceof DocumentImpl) { diff --git a/src/org/exist/dom/persistent/DocumentSet.java b/src/org/exist/dom/persistent/DocumentSet.java index 9fc2f00e1a0..40ebf19a8e5 100644 --- a/src/org/exist/dom/persistent/DocumentSet.java +++ b/src/org/exist/dom/persistent/DocumentSet.java @@ -23,7 +23,9 @@ package org.exist.dom.persistent; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.storage.DBBroker; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -55,13 +57,10 @@ public interface DocumentSet { * Locks all of the documents currently in the document set. * * @param exclusive true if a WRITE_LOCK is required, false if a READ_LOCK is required + * @return The locks + * @throws LockException if locking any document fails, when thrown no locks will be held on any documents in the set */ - void lock(final DBBroker broker, final boolean exclusive) throws LockException; - - /** - * Unlocks all of the documents which were locked by the previous call(s) to {@link #lock(DBBroker, boolean)}. - */ - void unlock(); + ManagedLocks lock(DBBroker broker, boolean exclusive) throws LockException; boolean equalDocs(DocumentSet other); } diff --git a/src/org/exist/dom/persistent/ElementImpl.java b/src/org/exist/dom/persistent/ElementImpl.java index af5b519d874..0c110be0829 100644 --- a/src/org/exist/dom/persistent/ElementImpl.java +++ b/src/org/exist/dom/persistent/ElementImpl.java @@ -937,17 +937,10 @@ public boolean hasChildNodes() { @Override public NodeList getChildNodes() { - final org.exist.dom.NodeListImpl childList = new org.exist.dom.NodeListImpl(children); - try(final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { - for(final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(this, false); - reader.hasNext(); ) { - final int status = reader.next(); - if(status != XMLStreamConstants.END_ELEMENT && ((NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID)).isChildOf(nodeId)) { - childList.add(reader.getNode()); - } - } - } catch(final IOException | XMLStreamException | EXistException e) { - LOG.warn("Internal error while reading child nodes: " + e.getMessage(), e); + final int childNodesLen = children - attributes; + final org.exist.dom.NodeListImpl childList = new org.exist.dom.NodeListImpl(childNodesLen); + if (childNodesLen > 0) { + getChildren(false, childList); } return childList; } @@ -959,18 +952,44 @@ public NodeList getChildNodes() { */ private NodeList getAttrsAndChildNodes() { final org.exist.dom.NodeListImpl childList = new org.exist.dom.NodeListImpl(children); - try(final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { - for(final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(this, true); - reader.hasNext(); ) { + if (children > 0) { + getChildren(true, childList); + } + return childList; + } + + private void getChildren(final boolean includeAttributes, final org.exist.dom.NodeListImpl childList) { + try (final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { + final int thisLevel = nodeId.getTreeLevel(); + final int childLevel = thisLevel + 1; + for (final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(this, includeAttributes); reader.hasNext(); ) { final int status = reader.next(); - if(status != XMLStreamConstants.END_ELEMENT && ((NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID)).isChildOf(nodeId)) { - childList.add(reader.getNode()); + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + + //NOTE(AR): The order of the checks below has been carefully chosen to optimize non-empty children, which is likely the most common case! + + // skip descendants + if (otherLevel > childLevel) { + continue; + } + + if (status == XMLStreamConstants.END_ELEMENT) { + if (otherLevel == thisLevel) { + // finished `this` element... + break; // exit-for + } + // skip over any other END_ELEMENT(s) + } else { + if (otherLevel == childLevel) { + // child + childList.add(reader.getNode()); + } } } } catch(final IOException | XMLStreamException | EXistException e) { LOG.warn("Internal error while reading child nodes: " + e.getMessage(), e); } - return childList; } @Override diff --git a/src/org/exist/dom/persistent/EmptyDocumentSet.java b/src/org/exist/dom/persistent/EmptyDocumentSet.java index 7f2cd545f72..1b59ed0dd51 100644 --- a/src/org/exist/dom/persistent/EmptyDocumentSet.java +++ b/src/org/exist/dom/persistent/EmptyDocumentSet.java @@ -20,7 +20,9 @@ package org.exist.dom.persistent; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.storage.DBBroker; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -88,12 +90,8 @@ public NodeSet docsToNodeSet() { } @Override - public void lock(final DBBroker broker, final boolean exclusive) throws - LockException { - } - - @Override - public void unlock() { + public ManagedLocks lock(final DBBroker broker, final boolean exclusive) throws LockException { + return new ManagedLocks<>(Collections.emptyList()); } @Override diff --git a/src/org/exist/dom/persistent/EmptyNodeSet.java b/src/org/exist/dom/persistent/EmptyNodeSet.java index 25ab37db413..25b2d52773a 100644 --- a/src/org/exist/dom/persistent/EmptyNodeSet.java +++ b/src/org/exist/dom/persistent/EmptyNodeSet.java @@ -24,7 +24,6 @@ import org.exist.collections.Collection; import org.exist.numbering.NodeId; -import org.exist.xquery.XPathException; import org.exist.xquery.value.Item; import org.exist.xquery.value.SequenceIterator; import org.w3c.dom.Node; @@ -40,12 +39,12 @@ public NodeSetIterator iterator() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { return SequenceIterator.EMPTY_ITERATOR; } @Override - public SequenceIterator unorderedIterator() throws XPathException { + public SequenceIterator unorderedIterator() { return SequenceIterator.EMPTY_ITERATOR; } @@ -78,7 +77,7 @@ public int getLength() { } @Override - public int getItemCount() { + public long getItemCountLong() { return 0; } diff --git a/src/org/exist/dom/persistent/ExtArrayNodeSet.java b/src/org/exist/dom/persistent/ExtArrayNodeSet.java index cccf3f7de12..19a4272c34b 100644 --- a/src/org/exist/dom/persistent/ExtArrayNodeSet.java +++ b/src/org/exist/dom/persistent/ExtArrayNodeSet.java @@ -22,10 +22,11 @@ package org.exist.dom.persistent; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.numbering.NodeId; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.ArrayUtils; import org.exist.util.FastQSort; import org.exist.util.LockException; @@ -68,7 +69,6 @@ public class ExtArrayNodeSet extends AbstractArrayNodeSet implements DocumentSet private static final int DEFAULT_INITIAL_SIZE = 128; private final int initialSize; - private final Deque lockReleasers = new ArrayDeque<>(); private int documentIds[]; protected int lastDoc = -1; @@ -197,13 +197,13 @@ public NodeSetIterator iterator() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { sortInDocumentOrder(); return new ExtArrayIterator(); } @Override - public SequenceIterator unorderedIterator() throws XPathException { + public SequenceIterator unorderedIterator() { if(!isSorted()) { sort(); } @@ -456,20 +456,25 @@ public NodeSet docsToNodeSet() { } @Override - public void lock(final DBBroker broker, final boolean exclusive) throws LockException { - for (int i = 0; i < partCount; i++) { - final DocumentImpl doc = parts[i].getOwnerDocument(); - final Lock docLock = doc.getUpdateLock(); - docLock.acquire(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK); - lockReleasers.push(() -> docLock.release(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK)); - } - } - - @Override - public void unlock() { - // NOTE: locks are released in the reverse order that they were acquired - while(!lockReleasers.isEmpty()) { - lockReleasers.pop().run(); + public ManagedLocks lock(final DBBroker broker, final boolean exclusive) throws LockException { + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final ManagedDocumentLock[] managedDocumentLocks = new ManagedDocumentLock[partCount]; + try { + for (int i = 0; i < partCount; i++) { + final DocumentImpl doc = parts[i].getOwnerDocument(); + final ManagedDocumentLock docLock; + if (exclusive) { + docLock = lockManager.acquireDocumentWriteLock(doc.getURI()); + } else { + docLock = lockManager.acquireDocumentReadLock(doc.getURI()); + } + managedDocumentLocks[i] = docLock; + } + return new ManagedLocks<>(managedDocumentLocks); + } catch (final LockException e) { + // unlock any previously locked documents + new ManagedLocks<>(managedDocumentLocks).close(); + throw e; } } @@ -660,7 +665,8 @@ NodeProxy get(final int pos) { NodeProxy get(final NodeId nodeId) { int low = 0; int high = length - 1; - int mid, cmp; + int mid; + int cmp; NodeProxy p; while(low <= high) { mid = (low + high) / 2; diff --git a/src/org/exist/dom/persistent/LockToken.java b/src/org/exist/dom/persistent/LockToken.java index 42d1a9fc6ae..10e79117ee5 100644 --- a/src/org/exist/dom/persistent/LockToken.java +++ b/src/org/exist/dom/persistent/LockToken.java @@ -70,13 +70,6 @@ public class LockToken { private long timeout = -1L; private String token = null; - /** - * Creates a new instance of LockToken - */ - public LockToken() { - // Left empty intentionally - } - // Getters and setters /** diff --git a/src/org/exist/dom/persistent/LockedDocument.java b/src/org/exist/dom/persistent/LockedDocument.java new file mode 100644 index 00000000000..cc21fda269a --- /dev/null +++ b/src/org/exist/dom/persistent/LockedDocument.java @@ -0,0 +1,58 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.dom.persistent; + +import org.exist.storage.lock.ManagedDocumentLock; + +/** + * Just a wrapper around a {@link DocumentImpl} which allows us to also hold a lock + * lease which is released when {@link #close()} is called. This + * allows us to use ARM (Automatic Resource Management) e.g. try-with-resources + * with eXist Document objects + * + * @author Adam Retter + */ +public class LockedDocument implements AutoCloseable { + private final ManagedDocumentLock managedDocumentLock; + private final DocumentImpl document; + + public LockedDocument(final ManagedDocumentLock managedDocumentLock, final DocumentImpl document) { + this.managedDocumentLock = managedDocumentLock; + this.document = document; + } + + /** + * Get the document + * + * @return the locked document + */ + public DocumentImpl getDocument() { + return document; + } + + /** + * Unlocks the Document + */ + @Override + public void close() { + managedDocumentLock.close(); + } +} diff --git a/src/org/exist/dom/persistent/NewArrayNodeSet.java b/src/org/exist/dom/persistent/NewArrayNodeSet.java index 6d00caeb76e..4496b96f1ba 100644 --- a/src/org/exist/dom/persistent/NewArrayNodeSet.java +++ b/src/org/exist/dom/persistent/NewArrayNodeSet.java @@ -22,10 +22,11 @@ package org.exist.dom.persistent; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.numbering.NodeId; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.FastQSort; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -63,7 +64,6 @@ */ public class NewArrayNodeSet extends AbstractArrayNodeSet implements ExtNodeSet, DocumentSet { - private final Deque lockReleasers = new ArrayDeque<>(); private Set cachedCollections = null; private int documentIds[] = new int[16]; @@ -161,13 +161,13 @@ public NodeSetIterator iterator() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { sortInDocumentOrder(); return new NewArrayIterator(); } @Override - public SequenceIterator unorderedIterator() throws XPathException { + public SequenceIterator unorderedIterator() { if(!isSorted()) { sort(); } @@ -218,7 +218,8 @@ private NodeProxy get(final int docIdx, final NodeId nodeId) { } int low = documentOffsets[docIdx]; int high = low + (documentLengths[docIdx] - 1); - int mid, cmp; + int mid; + int cmp; NodeProxy p; while(low <= high) { mid = (low + high) / 2; @@ -593,11 +594,12 @@ public NodeSet selectPrecedingSiblings(final NodeSet contextSet, final int conte final int end = low + documentLengths[docIdx]; int mid = low; int cmp; - NodeProxy p; + NodeProxy p = null; while(low <= high) { mid = (low + high) / 2; p = nodes[mid]; - if(p.getNodeId().isDescendantOf(parentId)) { + if(p.getNodeId().isDescendantOf(parentId) + || (parentId.equals(NodeId.DOCUMENT_NODE) && p.getNodeId().getTreeLevel() == 1)) { break; // found a child node, break out. } cmp = p.getNodeId().compareTo(parentId); @@ -614,11 +616,17 @@ public NodeSet selectPrecedingSiblings(final NodeSet contextSet, final int conte while(mid < end && nodes[mid].getNodeId().isDescendantOf(parentId)) { ++mid; } + + if (mid == 0 && parentId.equals(NodeId.DOCUMENT_NODE)) { + mid = getLength(); + } + --mid; + final NodeId refId = reference.getNodeId(); for(int i = mid; i >= documentOffsets[docIdx]; i--) { final NodeId currentId = nodes[i].getNodeId(); - if(!currentId.isDescendantOf(parentId)) { + if(!(currentId.isDescendantOf(parentId) || (p != null && parentId.equals(NodeId.DOCUMENT_NODE) && p.getNodeId().getTreeLevel() == 1))) { break; } if(currentId.getTreeLevel() == refId.getTreeLevel() && currentId.compareTo(refId) < 0) { @@ -660,11 +668,12 @@ public NodeSet selectFollowingSiblings(final NodeSet contextSet, final int conte final int end = low + documentLengths[docIdx]; int mid = low; int cmp; - NodeProxy p; + NodeProxy p = null; while(low <= high) { mid = (low + high) / 2; p = nodes[mid]; - if(p.getNodeId().isDescendantOf(parentId)) { + if(p.getNodeId().isDescendantOf(parentId) + || (parentId.equals(NodeId.DOCUMENT_NODE) && p.getNodeId().getTreeLevel() == 1)) { break; // found a child node, break out. } cmp = p.getNodeId().compareTo(parentId); @@ -684,8 +693,8 @@ public NodeSet selectFollowingSiblings(final NodeSet contextSet, final int conte final NodeId refId = reference.getNodeId(); for(int i = mid; i < end; i++) { final NodeId currentId = nodes[i].getNodeId(); - if(!currentId.isDescendantOf(parentId)) { - break; + if(!(currentId.isDescendantOf(parentId) || (p != null && parentId.equals(NodeId.DOCUMENT_NODE) && p.getNodeId().getTreeLevel() == 1))) { + continue; } if(currentId.getTreeLevel() == refId.getTreeLevel() && currentId.compareTo(refId) > 0) { if(Expression.IGNORE_CONTEXT != contextId) { @@ -1046,21 +1055,26 @@ public NodeSet docsToNodeSet() { } @Override - public void lock(final DBBroker broker, final boolean exclusive) throws LockException { + public ManagedLocks lock(final DBBroker broker, final boolean exclusive) throws LockException { sort(); - for(int idx = 0; idx < documentCount; idx++) { - final DocumentImpl doc = nodes[documentOffsets[idx]].getOwnerDocument(); - final Lock docLock = doc.getUpdateLock(); - docLock.acquire(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK); - lockReleasers.push(() -> docLock.release(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK)); - } - } - - @Override - public void unlock() { - // NOTE: locks are released in the reverse order that they were acquired - while(!lockReleasers.isEmpty()) { - lockReleasers.pop().run(); + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final ManagedDocumentLock[] managedDocumentLocks = new ManagedDocumentLock[documentCount]; + try { + for (int idx = 0; idx < documentCount; idx++) { + final DocumentImpl doc = nodes[documentOffsets[idx]].getOwnerDocument(); + final ManagedDocumentLock managedDocumentLock; + if (exclusive) { + managedDocumentLock = lockManager.acquireDocumentWriteLock(doc.getURI()); + } else { + managedDocumentLock = lockManager.acquireDocumentReadLock(doc.getURI()); + } + managedDocumentLocks[idx] = managedDocumentLock; + } + return new ManagedLocks<>(managedDocumentLocks); + } catch (final LockException e) { + // unlock any previously locked documents + new ManagedLocks<>(managedDocumentLocks).close(); + throw e; } } @@ -1095,9 +1109,8 @@ public void clearContext(final int contextId) throws XPathException { } } - private class NewArrayIterator implements NodeSetIterator, SequenceIterator { - - int pos = 0; + protected class NewArrayIterator implements NodeSetIterator, SequenceIterator { + private int pos = 0; @Override public final boolean hasNext() { @@ -1113,6 +1126,21 @@ public final NodeProxy next() { return nodes[pos++]; } + @Override + public long skippable() { + if (pos == -1) { + return 0; + } + return size - pos; + } + + @Override + public long skip(final long n) { + final long skip = Math.min(n, pos == -1 ? 0 : size - pos); + pos += skip; + return skip; + } + @Override public final void remove() { throw new UnsupportedOperationException(); diff --git a/src/org/exist/dom/persistent/NodeProxy.java b/src/org/exist/dom/persistent/NodeProxy.java index b7aa264f888..d37e7f873d8 100644 --- a/src/org/exist/dom/persistent/NodeProxy.java +++ b/src/org/exist/dom/persistent/NodeProxy.java @@ -21,6 +21,7 @@ import org.exist.EXistException; import org.exist.collections.Collection; +import org.exist.collections.ManagedLocks; import org.exist.dom.QName; import org.exist.dom.memtree.DocumentBuilderReceiver; import org.exist.numbering.NodeId; @@ -28,8 +29,8 @@ import org.exist.storage.DBBroker; import org.exist.storage.RangeIndexSpec; import org.exist.storage.StorageAddress; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.serializers.Serializer; import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; @@ -50,7 +51,9 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.IOException; -import java.util.*; +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.Properties; /** * Placeholder class for DOM nodes. @@ -75,8 +78,6 @@ public class NodeProxy implements NodeSet, NodeValue, NodeHandle, DocumentSet, C public static final short UNKNOWN_NODE_TYPE = -1; public static final int UNKNOWN_NODE_LEVEL = -1; - private final Deque lockReleasers = new ArrayDeque<>(); - /** * The owner document of this node. */ @@ -828,7 +829,7 @@ public NodeSetIterator iterator() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { return new SingleNodeIterator(this); } @@ -892,9 +893,8 @@ public int getLength() { return 1; } - //TODO : evaluate both semantics @Override - public int getItemCount() { + public long getItemCountLong() { return 1; } @@ -1201,6 +1201,7 @@ public NodeSet directSelectAttribute(final DBBroker broker, final NodeTest test, if(nodeType != UNKNOWN_NODE_TYPE && nodeType != Node.ELEMENT_NODE) { return NodeSet.EMPTY_SET; } + try { NewArrayNodeSet result = null; final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(this, true); @@ -1208,16 +1209,18 @@ public NodeSet directSelectAttribute(final DBBroker broker, final NodeTest test, if(status != XMLStreamReader.START_ELEMENT) { return NodeSet.EMPTY_SET; } + final int attrs = reader.getAttributeCount(); - for(int i = 0; i < attrs; i++) { + for (int i = 0; i < attrs; i++) { status = reader.next(); if(status != XMLStreamReader.ATTRIBUTE) { break; } + final AttrImpl attr = (AttrImpl) reader.getNode(); - if(test.matches(attr)) { + if (test.matches(attr)) { final NodeProxy child = new NodeProxy(attr); - if(Expression.NO_CONTEXT_ID != contextId) { + if (Expression.NO_CONTEXT_ID != contextId) { child.addContextNode(contextId, this); } else { child.copyContext(this); @@ -1232,9 +1235,7 @@ public NodeSet directSelectAttribute(final DBBroker broker, final NodeTest test, } } return result == null ? NodeSet.EMPTY_SET : result; - } catch(final IOException e) { - throw new RuntimeException(e.getMessage(), e); - } catch(final XMLStreamException e) { + } catch (final IOException | XMLStreamException e) { throw new RuntimeException(e.getMessage(), e); } } @@ -1302,6 +1303,23 @@ public final NodeProxy next() { } } + @Override + public long skippable() { + if (hasNext) { + return 1; + } + return 0; + } + + @Override + public long skip(final long n) { + final long skip = Math.min(n, hasNext ? 1 : 0); + if(skip == 1) { + hasNext = false; + } + return skip; + } + @Override public final NodeProxy peekNode() { return node; @@ -1420,18 +1438,15 @@ public NodeSet docsToNodeSet() { } @Override - public void lock(final DBBroker broker, final boolean exclusive) throws LockException { - final Lock docLock = doc.getUpdateLock(); - docLock.acquire(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK); - lockReleasers.push(() -> docLock.release(exclusive ? LockMode.WRITE_LOCK : LockMode.READ_LOCK)); - } - - @Override - public void unlock() { - // NOTE: locks are released in the reverse order that they were acquired - while(!lockReleasers.isEmpty()) { - lockReleasers.pop().run(); + public ManagedLocks lock(final DBBroker broker, final boolean exclusive) throws LockException { + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + final ManagedDocumentLock docLock; + if(exclusive) { + docLock = lockManager.acquireDocumentWriteLock(doc.getURI()); + } else { + docLock = lockManager.acquireDocumentReadLock(doc.getURI()); } + return new ManagedLocks<>(docLock); } @Override @@ -1454,24 +1469,24 @@ public boolean directMatchAttribute(final DBBroker broker, final NodeTest test, try { final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(this, true); int status = reader.next(); - if(status != XMLStreamReader.START_ELEMENT) { + if (status != XMLStreamReader.START_ELEMENT) { return false; } + final int attrs = reader.getAttributeCount(); - for(int i = 0; i < attrs; i++) { + for (int i = 0; i < attrs; i++) { status = reader.next(); - if(status != XMLStreamReader.ATTRIBUTE) { + if (status != XMLStreamReader.ATTRIBUTE) { break; } + final AttrImpl attr = (AttrImpl) reader.getNode(); - if(test.matches(attr)) { + if (test.matches(attr)) { return true; } } return false; - } catch(final IOException e) { - throw new RuntimeException(e.getMessage(), e); - } catch(final XMLStreamException e) { + } catch (final IOException | XMLStreamException e) { throw new RuntimeException(e.getMessage(), e); } } diff --git a/src/org/exist/dom/persistent/SortedNodeSet.java b/src/org/exist/dom/persistent/SortedNodeSet.java index 88ddf2a6848..fb9a4a01a54 100644 --- a/src/org/exist/dom/persistent/SortedNodeSet.java +++ b/src/org/exist/dom/persistent/SortedNodeSet.java @@ -175,9 +175,8 @@ public int getLength() { return list.size(); } - //TODO : evaluate both semantics (length/item count) @Override - public int getItemCount() { + public long getItemCountLong() { return list.size(); } @@ -200,12 +199,12 @@ public NodeSetIterator iterator() { } @Override - public SequenceIterator iterate() throws XPathException { + public SequenceIterator iterate() { return new SortedNodeSetIterator(list.iterator()); } @Override - public SequenceIterator unorderedIterator() throws XPathException { + public SequenceIterator unorderedIterator() { return new SortedNodeSetIterator(list.iterator()); } diff --git a/src/org/exist/dom/persistent/StoredNode.java b/src/org/exist/dom/persistent/StoredNode.java index 5946cc82791..79327a5466e 100644 --- a/src/org/exist/dom/persistent/StoredNode.java +++ b/src/org/exist/dom/persistent/StoredNode.java @@ -282,17 +282,26 @@ public Node getPreviousSibling() { final StoredNode parent = getParentStoredNode(); if(parent != null && parent.isDirty()) { try(final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { - final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(parent, true); + final int parentLevel = parent.getNodeId().getTreeLevel(); final int level = nodeId.getTreeLevel(); + + final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(parent, true); + IStoredNode last = null; while(reader.hasNext()) { final int status = reader.next(); final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); - if(status != XMLStreamConstants.END_ELEMENT && currentId.getTreeLevel() == level) { - if(currentId.equals(nodeId)) { - return last; + + if(status != XMLStreamConstants.END_ELEMENT) { + if (currentId.getTreeLevel() == level) { + if (currentId.equals(nodeId)) { + return last; + } + last = reader.getNode(); } - last = reader.getNode(); + } else if (currentId.getTreeLevel() == parentLevel) { + // reached the end of the parent element + break; // exit while loop } } } catch(final IOException | XMLStreamException | EXistException e) { @@ -332,15 +341,22 @@ public Node getNextSibling() { final StoredNode parent = getParentStoredNode(); if(parent != null && parent.isDirty()) { try(final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { - final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(parent, true); + final int parentLevel = parent.getNodeId().getTreeLevel(); final int level = nodeId.getTreeLevel(); + + final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(parent, true); + while(reader.hasNext()) { final int status = reader.next(); final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + if(status != XMLStreamConstants.END_ELEMENT && currentId.getTreeLevel() == level && currentId.compareTo(nodeId) > 0) { return reader.getNode(); + } else if(currentId.getTreeLevel() == parentLevel) { + // reached the end of the parent element + break; // exit while loop } } } catch(final IOException | XMLStreamException | EXistException e) { @@ -356,14 +372,35 @@ public Node getNextSibling() { } protected IStoredNode getLastNode(final IStoredNode node) { + + // only applicable to elements with children or attributes if(!(node.hasChildNodes() || node.hasAttributes())) { return node; } - try(final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { + + try (final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) { + final int thisLevel = node.getNodeId().getTreeLevel(); + final int childLevel = thisLevel + 1; + final IEmbeddedXMLStreamReader reader = broker.getXMLStreamReader(node, true); - while(reader.hasNext()) { - reader.next(); + while (reader.hasNext()) { + final int status = reader.next(); + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + + //NOTE(AR): The order of the checks below has been carefully chosen to optimize non-empty children, which is likely the most common case! + + // skip descendants + if (otherLevel > childLevel) { + continue; + } + + if (status == XMLStreamConstants.END_ELEMENT && otherLevel == thisLevel) { + // we have finished scanning the children of the element... + break; // exit-while + } } + return reader.getPreviousNode(); } catch(final IOException | XMLStreamException | EXistException e) { LOG.error("Internal error while reading child nodes: " + e.getMessage(), e); diff --git a/src/org/exist/dom/persistent/VirtualNodeSet.java b/src/org/exist/dom/persistent/VirtualNodeSet.java index 3388e9d609d..dda51e5b52b 100644 --- a/src/org/exist/dom/persistent/VirtualNodeSet.java +++ b/src/org/exist/dom/persistent/VirtualNodeSet.java @@ -24,8 +24,8 @@ import org.exist.collections.Collection; import org.exist.indexing.StructuralIndex; import org.exist.numbering.NodeId; -import org.exist.stax.EmbeddedXMLStreamReader; import org.exist.stax.ExtendedXMLStreamReader; +import org.exist.stax.EmbeddedXMLStreamReader; import org.exist.storage.DBBroker; import org.exist.storage.ElementValue; import org.exist.storage.dom.INodeIterator; @@ -472,43 +472,28 @@ private void addChildren(final NodeProxy contextNode, final NodeSet result) { try { final EmbeddedXMLStreamReader reader = (EmbeddedXMLStreamReader)broker.getXMLStreamReader(contextNode, true); int status = reader.next(); + if (status != XMLStreamConstants.START_ELEMENT) { + return; + } + int level = 0; - if(status == XMLStreamConstants.START_ELEMENT) { - while(reader.hasNext()) { - status = reader.next(); - if(axis == Constants.ATTRIBUTE_AXIS && status != XMLStreamConstants.ATTRIBUTE) { + while(reader.hasNext()) { + status = reader.next(); + if(axis == Constants.ATTRIBUTE_AXIS && status != XMLStreamConstants.ATTRIBUTE) { + break; + } + switch(status) { + case XMLStreamConstants.END_ELEMENT: + if(--level < 0) { + return; + } break; - } - switch(status) { - case XMLStreamConstants.END_ELEMENT: - if(--level < 0) { - return; - } - break; - case XMLStreamConstants.ATTRIBUTE: - if((axis == Constants.ATTRIBUTE_AXIS && level == 0) || - axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) { - final AttrImpl attr = (AttrImpl) reader.getNode(); - if(test.matches(attr)) { - final NodeProxy p = new NodeProxy(attr); - p.deepCopyContext(contextNode); - if(useSelfAsContext && inPredicate) { - p.addContextNode(contextId, p); - } else if(inPredicate) { - p.addContextNode(contextId, contextNode); - } - result.add(p); - } - } - break; - default: - if(((axis == Constants.CHILD_AXIS && level == 0) || - axis == Constants.DESCENDANT_AXIS || - axis == Constants.DESCENDANT_SELF_AXIS) && - test.matches(reader)) { - final NodeId nodeId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); - final NodeProxy p = new NodeProxy(contextNode.getOwnerDocument(), nodeId, - reader.getNodeType(), reader.getCurrentPosition()); + case XMLStreamConstants.ATTRIBUTE: + if((axis == Constants.ATTRIBUTE_AXIS && level == 0) || + axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) { + final AttrImpl attr = (AttrImpl) reader.getNode(); + if(test.matches(attr)) { + final NodeProxy p = new NodeProxy(attr); p.deepCopyContext(contextNode); if(useSelfAsContext && inPredicate) { p.addContextNode(contextId, p); @@ -517,11 +502,28 @@ private void addChildren(final NodeProxy contextNode, final NodeSet result) { } result.add(p); } - break; - } - if(status == XMLStreamConstants.START_ELEMENT) { - ++level; - } + } + break; + default: + if(((axis == Constants.CHILD_AXIS && level == 0) || + axis == Constants.DESCENDANT_AXIS || + axis == Constants.DESCENDANT_SELF_AXIS) && + test.matches(reader)) { + final NodeId nodeId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final NodeProxy p = new NodeProxy(contextNode.getOwnerDocument(), nodeId, + reader.getNodeType(), reader.getCurrentPosition()); + p.deepCopyContext(contextNode); + if(useSelfAsContext && inPredicate) { + p.addContextNode(contextId, p); + } else if(inPredicate) { + p.addContextNode(contextId, contextNode); + } + result.add(p); + } + break; + } + if(status == XMLStreamConstants.START_ELEMENT) { + ++level; } } } catch(final IOException e) { @@ -619,10 +621,10 @@ public int getItemType() { } @Override - public int getItemCount() { + public long getItemCountLong() { //TODO : evaluate both semantics realize(); - return realSet.getItemCount(); + return realSet.getItemCountLong(); } @Override diff --git a/src/org/exist/http/AuditTrailSessionListener.java b/src/org/exist/http/AuditTrailSessionListener.java index ed661409999..3072f64e598 100644 --- a/src/org/exist/http/AuditTrailSessionListener.java +++ b/src/org/exist/http/AuditTrailSessionListener.java @@ -22,7 +22,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Subject; import org.exist.source.DBSource; import org.exist.source.Source; @@ -92,7 +92,6 @@ private void executeXQuery(String xqueryResourcePath) { final BrokerPool pool = BrokerPool.getInstance(); final Subject sysSubject = pool.getSecurityManager().getSystemSubject(); - DocumentImpl resource = null; try (final DBBroker broker = pool.get(Optional.of(sysSubject))) { if (broker == null) { LOG.error("Unable to retrieve DBBroker for " + sysSubject.getName()); @@ -102,60 +101,58 @@ private void executeXQuery(String xqueryResourcePath) { final XmldbURI pathUri = XmldbURI.create(xqueryResourcePath); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + try(final LockedDocument lockedResource = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { - final Source source; - if (resource != null) { - if (LOG.isTraceEnabled()) { - LOG.trace("Resource [" + xqueryResourcePath + "] exists."); + final Source source; + if (lockedResource != null) { + if (LOG.isTraceEnabled()) { + LOG.trace("Resource [" + xqueryResourcePath + "] exists."); + } + source = new DBSource(broker, (BinaryDocument) lockedResource.getDocument(), true); + } else { + LOG.error("Resource [" + xqueryResourcePath + "] does not exist."); + return; } - source = new DBSource(broker, (BinaryDocument) resource, true); - } else { - LOG.error("Resource [" + xqueryResourcePath + "] does not exist."); - return; - } - - final XQuery xquery = pool.getXQueryService(); - if (xquery == null) { - LOG.error("broker unable to retrieve XQueryService"); - return; - } - final XQueryPool xqpool = pool.getXQueryPool(); - CompiledXQuery compiled = xqpool.borrowCompiledXQuery(broker, source); - final XQueryContext context; - if (compiled == null) { - context = new XQueryContext(broker.getBrokerPool()); - } else { - context = compiled.getContext(); - } - context.setStaticallyKnownDocuments(new XmldbURI[]{pathUri}); - context.setBaseURI(new AnyURIValue(pathUri.toString())); - - if (compiled == null) { - compiled = xquery.compile(broker, context, source); - } else { - compiled.getContext().updateContext(context); - context.getWatchDog().reset(); - } + final XQuery xquery = pool.getXQueryService(); + if (xquery == null) { + LOG.error("broker unable to retrieve XQueryService"); + return; + } - final Properties outputProperties = new Properties(); + final XQueryPool xqpool = pool.getXQueryPool(); + CompiledXQuery compiled = xqpool.borrowCompiledXQuery(broker, source); + final XQueryContext context; + if (compiled == null) { + context = new XQueryContext(broker.getBrokerPool()); + } else { + context = compiled.getContext(); + context.prepareForReuse(); + } + context.setStaticallyKnownDocuments(new XmldbURI[]{pathUri}); + context.setBaseURI(new AnyURIValue(pathUri.toString())); + + if (compiled == null) { + compiled = xquery.compile(broker, context, source); + } else { + compiled.getContext().updateContext(context); + context.getWatchDog().reset(); + } - try { - final long startTime = System.currentTimeMillis(); - final Sequence result = xquery.execute(broker, compiled, null, outputProperties); - final long queryTime = System.currentTimeMillis() - startTime; - if (LOG.isTraceEnabled()) { - LOG.trace("XQuery execution results: " + result.toString() + " in " + queryTime + "ms."); + final Properties outputProperties = new Properties(); + + try { + final long startTime = System.currentTimeMillis(); + final Sequence result = xquery.execute(broker, compiled, null, outputProperties); + final long queryTime = System.currentTimeMillis() - startTime; + if (LOG.isTraceEnabled()) { + LOG.trace("XQuery execution results: " + result.toString() + " in " + queryTime + "ms."); + } + } finally { + context.runCleanupTasks(); + xqpool.returnCompiledXQuery(source, compiled); } - } finally { - context.runCleanupTasks(); - xqpool.returnCompiledXQuery(source, compiled); - } - } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); } } @@ -164,4 +161,4 @@ private void executeXQuery(String xqueryResourcePath) { } } } -} \ No newline at end of file +} diff --git a/src/org/exist/http/Descriptor.java b/src/org/exist/http/Descriptor.java index 842c6272967..b2e11fef738 100644 --- a/src/org/exist/http/Descriptor.java +++ b/src/org/exist/http/Descriptor.java @@ -331,11 +331,7 @@ public boolean requestsFiltered() { * @return The boolean value true or false indicating whether it is permissible to Log Requests */ public boolean allowRequestLogging() { - if (bufWriteReplayLog == null) { - return (false); - } else { - return (true); - } + return bufWriteReplayLog != null; } /** diff --git a/src/org/exist/http/RESTServer.java b/src/org/exist/http/RESTServer.java index 98d847f3e80..4c1002c1923 100644 --- a/src/org/exist/http/RESTServer.java +++ b/src/org/exist/http/RESTServer.java @@ -49,6 +49,7 @@ import javax.xml.stream.XMLStreamException; import javax.xml.transform.OutputKeys; import javax.xml.transform.TransformerConfigurationException; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.EXistException; @@ -57,14 +58,8 @@ import org.exist.collections.IndexInfo; import org.exist.collections.triggers.TriggerException; import org.exist.debuggee.DebuggeeFactory; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DefaultDocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentMetadata; -import org.exist.dom.persistent.MutableDocumentSet; +import org.exist.dom.persistent.*; import org.exist.dom.QName; -import org.exist.dom.persistent.XMLUtil; - import static java.lang.invoke.MethodType.methodType; import static org.exist.http.RESTServerParameter.*; @@ -88,11 +83,9 @@ import org.exist.storage.DBBroker; import org.exist.storage.XQueryPool; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedCollectionLock; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.serializers.Serializer; -import org.exist.storage.serializers.Serializer.HttpContext; -import org.exist.storage.txn.TransactionException; -import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.util.Configuration; import org.exist.util.LockException; @@ -114,9 +107,6 @@ import org.exist.xquery.XPathException; import org.exist.xquery.XQuery; import org.exist.xquery.XQueryContext; -import org.exist.xquery.functions.request.RequestModule; -import org.exist.xquery.functions.response.ResponseModule; -import org.exist.xquery.functions.session.SessionModule; import org.exist.xquery.value.*; import org.exist.xupdate.Modification; import org.exist.xupdate.XUpdateProcessor; @@ -281,7 +271,7 @@ private String getParameter(final HttpServletRequest request, final RESTServerPa * @throws PermissionDeniedException * @throws NotFoundException */ - public void doGet(final DBBroker broker, final HttpServletRequest request, + public void doGet(final DBBroker broker, final Txn transaction, final HttpServletRequest request, final HttpServletResponse response, final String path) throws BadRequestException, PermissionDeniedException, NotFoundException, IOException { @@ -399,7 +389,7 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, if (query != null) { // query parameter specified, search method does all the rest of the work try { - search(broker, query, path, namespaces, variables, howmany, start, typed, outputProperties, + search(broker, transaction, query, path, namespaces, variables, howmany, start, typed, outputProperties, wrap, cache, request, response); } catch (final XPathException e) { @@ -412,44 +402,47 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, return; } // Process the request + LockedDocument lockedDocument = null; DocumentImpl resource = null; final XmldbURI pathUri = XmldbURI.createInternal(path); try { // check if path leads to an XQuery resource final String xquery_mime_type = MimeType.XQUERY_TYPE.getName(); final String xproc_mime_type = MimeType.XPROC_TYPE.getName(); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + lockedDocument = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + resource = lockedDocument == null ? null : lockedDocument.getDocument(); if (null != resource && !isExecutableType(resource)) { // return regular resource that is not an xquery and not is xproc - writeResourceAs(resource, broker, stylesheet, encoding, null, + writeResourceAs(resource, broker, transaction, stylesheet, encoding, null, outputProperties, request, response); return; } if (resource == null) { // could be request for a Collection // no document: check if path points to a collection - final Collection collection = broker.getCollection(pathUri); - if (collection != null) { - if (safeMode || !collection.getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Not allowed to read collection"); - } - // return a listing of the collection contents - try { - writeCollection(response, encoding, broker, collection); - return; - } catch (final LockException le) { - if (MimeType.XML_TYPE.getName().equals(mimeType)) { - writeXPathException(response, HttpServletResponse.SC_BAD_REQUEST, encoding, query, path, new XPathException(le.getMessage(), le)); - } else { - writeXPathExceptionHtml(response, HttpServletResponse.SC_BAD_REQUEST, encoding, query, path, new XPathException(le.getMessage(), le)); + try(final Collection collection = broker.openCollection(pathUri, LockMode.READ_LOCK)) { + if (collection != null) { + if (safeMode || !collection.getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Not allowed to read collection"); + } + // return a listing of the collection contents + try { + writeCollection(response, encoding, broker, collection); + return; + } catch (final LockException le) { + if (MimeType.XML_TYPE.getName().equals(mimeType)) { + writeXPathException(response, HttpServletResponse.SC_BAD_REQUEST, encoding, query, path, new XPathException(le.getMessage(), le)); + } else { + writeXPathExceptionHtml(response, HttpServletResponse.SC_BAD_REQUEST, encoding, query, path, new XPathException(le.getMessage(), le)); + } } - } - } else if (source) { - // didn't find regular resource, or user wants source - // on a possible xquery resource that was not found - throw new NotFoundException("Document " + path + " not found"); + } else if (source) { + // didn't find regular resource, or user wants source + // on a possible xquery resource that was not found + throw new NotFoundException("Document " + path + " not found"); + } } } @@ -464,7 +457,8 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, break; } - resource = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + lockedDocument = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + resource = lockedDocument == null ? null : lockedDocument.getDocument(); if (null != resource && isExecutableType(resource)) { break; @@ -505,12 +499,12 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, if (xquery_mime_type.equals(resource.getMetadata().getMimeType())) { // Show the source of the XQuery - writeResourceAs(resource, broker, stylesheet, encoding, + writeResourceAs(resource, broker, transaction, stylesheet, encoding, MimeType.TEXT_TYPE.getName(), outputProperties, request, response); } else if (xproc_mime_type.equals(resource.getMetadata().getMimeType())) { // Show the source of the XProc - writeResourceAs(resource, broker, stylesheet, encoding, + writeResourceAs(resource, broker, transaction, stylesheet, encoding, MimeType.XML_TYPE.getName(), outputProperties, request, response); } @@ -531,11 +525,11 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, try { if (xquery_mime_type.equals(resource.getMetadata().getMimeType())) { // Execute the XQuery - executeXQuery(broker, resource, request, response, + executeXQuery(broker, transaction, resource, request, response, outputProperties, servletPath.toString(), pathInfo); } else if (xproc_mime_type.equals(resource.getMetadata().getMimeType())) { // Execute the XProc - executeXProc(broker, resource, request, response, + executeXProc(broker, transaction, resource, request, response, outputProperties, servletPath.toString(), pathInfo); } } catch (final XPathException e) { @@ -551,19 +545,19 @@ public void doGet(final DBBroker broker, final HttpServletRequest request, } } } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); + if (lockedDocument != null) { + lockedDocument.close(); } } } - public void doHead(final DBBroker broker, final HttpServletRequest request, + public void doHead(final DBBroker broker, final Txn transaction, final HttpServletRequest request, final HttpServletResponse response, final String path) throws BadRequestException, PermissionDeniedException, NotFoundException, IOException { final XmldbURI pathUri = XmldbURI.createInternal(path); - if (checkForXQueryTarget(broker, pathUri, request, response)) { + if (checkForXQueryTarget(broker, transaction, pathUri, request, response)) { return; } @@ -576,9 +570,8 @@ public void doHead(final DBBroker broker, final HttpServletRequest request, encoding = "UTF-8"; } - DocumentImpl resource = null; - try { - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + try(final LockedDocument lockedDocument = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { + final DocumentImpl resource = lockedDocument == null ? null : lockedDocument.getDocument(); if (resource != null) { if (!resource.getPermissions().validate(broker.getCurrentSubject(), Permission.READ)) { @@ -595,24 +588,21 @@ public void doHead(final DBBroker broker, final HttpServletRequest request, response.addHeader("Content-Length", Long.toString(resource.getContentLength())); setCreatedAndLastModifiedHeaders(response, metadata.getCreated(), metadata.getLastModified()); } else { - final Collection col = broker.getCollection(pathUri); - //no resource or collection - if (col == null) { - response.sendError(HttpServletResponse.SC_NOT_FOUND, "No resource at location: " + path); + try(final Collection col = broker.openCollection(pathUri, LockMode.READ_LOCK)) { + //no resource or collection + if (col == null) { + response.sendError(HttpServletResponse.SC_NOT_FOUND, "No resource at location: " + path); - return; - } + return; + } - if (!col.getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException( - "Permission to read resource " + path + " denied"); + if (!col.getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException( + "Permission to read resource " + path + " denied"); + } + response.setContentType(MimeType.XML_TYPE.getName() + "; charset=" + encoding); + setCreatedAndLastModifiedHeaders(response, col.getCreationTime(), col.getCreationTime()); } - response.setContentType(MimeType.XML_TYPE.getName() + "; charset=" + encoding); - setCreatedAndLastModifiedHeaders(response, col.getCreationTime(), col.getCreationTime()); - } - } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); } } } @@ -631,7 +621,7 @@ public void doHead(final DBBroker broker, final HttpServletRequest request, * @throws PermissionDeniedException * @throws NotFoundException */ - public void doPost(final DBBroker broker, final HttpServletRequest request, + public void doPost(final DBBroker broker, final Txn transaction, final HttpServletRequest request, final HttpServletResponse response, final String path) throws BadRequestException, PermissionDeniedException, IOException, NotFoundException { @@ -643,6 +633,7 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, final Properties outputProperties = new Properties(defaultOutputKeysProperties); final XmldbURI pathUri = XmldbURI.createInternal(path); + LockedDocument lockedDocument = null; DocumentImpl resource = null; final String encoding = outputProperties.getProperty(OutputKeys.ENCODING); @@ -652,7 +643,8 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, // if yes, the resource is loaded and the XQuery executed. final String xquery_mime_type = MimeType.XQUERY_TYPE.getName(); final String xproc_mime_type = MimeType.XPROC_TYPE.getName(); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + lockedDocument = broker.getXMLResource(pathUri, LockMode.READ_LOCK); + resource = lockedDocument == null ? null : lockedDocument.getDocument(); XmldbURI servletPath = pathUri; @@ -665,7 +657,8 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, break; } - resource = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + lockedDocument = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + resource = lockedDocument == null ? null : lockedDocument.getDocument(); if (null != resource && (resource.getResourceType() == DocumentImpl.BINARY_FILE && xquery_mime_type.equals(resource.getMetadata().getMimeType()) @@ -674,10 +667,12 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, break; // found a binary file with mime-type xquery or XML file with mime-type xproc } else if (null != resource) { + // not an xquery or xproc resource. This means we have a path // that cannot contain an xquery or xproc object even if we keep // moving up the path, so bail out now - resource.getUpdateLock().release(LockMode.READ_LOCK); + lockedDocument.close(); + lockedDocument = null; resource = null; break; } @@ -695,11 +690,11 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, try { if (xquery_mime_type.equals(resource.getMetadata().getMimeType())) { // Execute the XQuery - executeXQuery(broker, resource, request, response, + executeXQuery(broker, transaction, resource, request, response, outputProperties, servletPath.toString(), pathInfo); } else { // Execute the XProc - executeXProc(broker, resource, request, response, + executeXProc(broker, transaction, resource, request, response, outputProperties, servletPath.toString(), pathInfo); } @@ -716,8 +711,8 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, } } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); + if (lockedDocument != null) { + lockedDocument.close(); } } @@ -742,9 +737,7 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, boolean cache = false; String query = null; - final TransactionManager transact = broker.getBrokerPool().getTransactionManager(); - - try(final Txn transaction = transact.beginTransaction()) { + try { final String content = getRequestContent(request); final NamespaceExtractor nsExtractor = new NamespaceExtractor(); final ElementImpl root = parseXML(content, nsExtractor); @@ -862,12 +855,9 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, if (query != null) { try { - search(broker, query, path, nsExtractor.getNamespaces(), variables, + search(broker, transaction, query, path, nsExtractor.getNamespaces(), variables, howmany, start, typed, outputProperties, enclose, cache, request, response); - - transact.commit(transaction); - } catch (final XPathException e) { if (MimeType.XML_TYPE.getName().equals(mimeType)) { writeXPathException(response, HttpServletResponse.SC_BAD_REQUEST, @@ -879,7 +869,6 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, } } else { - transact.abort(transaction); throw new BadRequestException("No query specified"); } @@ -900,37 +889,41 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, } final MutableDocumentSet docs = new DefaultDocumentSet(); - final Collection collection = broker.getCollection(pathUri); - if (collection != null) { - collection.allDocs(broker, docs, true); - } else { - final DocumentImpl xupdateDoc = broker.getResource(pathUri, Permission.READ); + final boolean isCollection; + try(final Collection collection = broker.openCollection(pathUri, LockMode.READ_LOCK)) { + if (collection != null) { + isCollection = true; + collection.allDocs(broker, docs, true); + } else { + isCollection = false; + } + } + if(!isCollection) { + final DocumentImpl xupdateDoc = broker.getResource(pathUri, Permission.READ); if (xupdateDoc != null) { docs.add(xupdateDoc); - } else { broker.getAllXMLResources(docs); } } final XUpdateProcessor processor = new XUpdateProcessor(broker, docs); - final Modification modifications[] = processor.parse(new InputSource(new StringReader(content))); long mods = 0; - for (int i = 0; i < modifications.length; i++) { - mods += modifications[i].process(transaction); - broker.flush(); + try(final Reader reader = new StringReader(content)) { + final Modification modifications[] = processor.parse(new InputSource(reader)); + for (int i = 0; i < modifications.length; i++) { + mods += modifications[i].process(transaction); + broker.flush(); + } } - transact.commit(transaction); - // FD : Returns an XML doc writeXUpdateResult(response, encoding, mods); // END FD } else { - transact.abort(transaction); throw new BadRequestException("Unknown XML root element: " + root.getNodeName()); } @@ -956,7 +949,7 @@ public void doPost(final DBBroker broker, final HttpServletRequest request, // content type = application/x-www-form-urlencoded } else { - doGet(broker, request, response, path); + doGet(broker, transaction, request, response, path); } } @@ -1036,33 +1029,27 @@ public String getUri() { * @throws BadRequestException * @throws PermissionDeniedException */ - public void doPut(final DBBroker broker, final XmldbURI path, + public void doPut(final DBBroker broker, final Txn transaction, final XmldbURI path, final HttpServletRequest request, final HttpServletResponse response) throws BadRequestException, PermissionDeniedException, IOException, NotFoundException { - if (checkForXQueryTarget(broker, path, request, response)) { + if (checkForXQueryTarget(broker, transaction, path, request, response)) { return; } - final TransactionManager transact = broker.getBrokerPool().getTransactionManager(); - try(final Txn transaction = transact.beginTransaction()) { - // fourth, process the request + // fourth, process the request - final XmldbURI docUri = path.lastSegment(); - final XmldbURI collUri = path.removeLastSegment(); + final XmldbURI docUri = path.lastSegment(); + final XmldbURI collUri = path.removeLastSegment(); + + if (docUri == null || collUri == null) { + throw new BadRequestException("Bad path: " + path); + } + // TODO : use getOrCreateCollection() right now ? + try(final ManagedCollectionLock managedCollectionLock = broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collUri)) { + final Collection collection = broker.getOrCreateCollection(transaction, collUri); - if (docUri == null || collUri == null) { - transact.abort(transaction); - throw new BadRequestException("Bad path: " + path); - } - // TODO : use getOrCreateCollection() right now ? - Collection collection = broker.getCollection(collUri); - if (collection == null) { - LOG.debug("creating collection " + collUri); - collection = broker.getOrCreateCollection(transaction, collUri); - broker.saveCollection(transaction, collection); - } MimeType mime; String contentType = request.getContentType(); String charset = null; @@ -1107,12 +1094,11 @@ public void doPut(final DBBroker broker, final XmldbURI path, } } - transact.commit(transaction); } catch (final SAXParseException e) { throw new BadRequestException("Parsing exception at " + e.getLineNumber() + "/" + e.getColumnNumber() + ": " + e.toString()); - } catch (final TriggerException e) { + } catch (final TriggerException | LockException e) { throw new PermissionDeniedException(e.getMessage()); } catch (final SAXException e) { Exception o = e.getException(); @@ -1122,49 +1108,50 @@ public void doPut(final DBBroker broker, final XmldbURI path, throw new BadRequestException("Parsing exception: " + o.getMessage()); } catch (final EXistException e) { throw new BadRequestException("Internal error: " + e.getMessage()); - } catch (final LockException e) { - throw new PermissionDeniedException(e.getMessage()); } } - public void doDelete(final DBBroker broker, final String path, final HttpServletRequest request, final HttpServletResponse response) + public void doDelete(final DBBroker broker, final Txn transaction, final String path, final HttpServletRequest request, final HttpServletResponse response) throws PermissionDeniedException, NotFoundException, IOException, BadRequestException { final XmldbURI pathURI = XmldbURI.createInternal(path); - if (checkForXQueryTarget(broker, pathURI, request, response)) { + if (checkForXQueryTarget(broker, transaction, pathURI, request, response)) { return; } - final TransactionManager transact = broker.getBrokerPool().getTransactionManager(); try { - final Collection collection = broker.getCollection(pathURI); - if (collection != null) { - // remove the collection - LOG.debug("removing collection " + path); - - try(final Txn txn = transact.beginTransaction()) { - broker.removeCollection(txn, collection); - transact.commit(txn); - } - response.setStatus(HttpServletResponse.SC_OK); + try(final Collection collection = broker.openCollection(pathURI, LockMode.WRITE_LOCK)) { + if (collection != null) { + // remove the collection + LOG.debug("removing collection " + path); + + broker.removeCollection(transaction, collection); + + response.setStatus(HttpServletResponse.SC_OK); - } else { - final DocumentImpl doc = (DocumentImpl) broker.getResource(pathURI, Permission.WRITE); - if (doc == null) { - //transact.abort(txn); - throw new NotFoundException("No document or collection found for path: " + path); } else { - // remove the document - LOG.debug("removing document " + path); - try(final Txn txn = transact.beginTransaction()) { - if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { - doc.getCollection().removeBinaryResource(txn, broker, pathURI.lastSegment()); + try(final LockedDocument lockedDocument = broker.getXMLResource(pathURI, LockMode.WRITE_LOCK)) { + final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument(); + if (doc == null) { + throw new NotFoundException("No document or collection found for path: " + path); } else { - doc.getCollection().removeXMLResource(txn, broker, pathURI.lastSegment()); + if (!doc.getPermissions().validate(broker.getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Account '" + broker.getCurrentSubject().getName() + "' not allowed requested access to document '" + pathURI + "'"); + } + + // remove the document + if(LOG.isDebugEnabled()) { + LOG.debug("removing document " + path); + } + + if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { + doc.getCollection().removeBinaryResource(transaction, broker, pathURI.lastSegment()); + } else { + doc.getCollection().removeXMLResource(transaction, broker, pathURI.lastSegment()); + } + + response.setStatus(HttpServletResponse.SC_OK); } - transact.commit(txn); } - - response.setStatus(HttpServletResponse.SC_OK); } } @@ -1172,12 +1159,10 @@ public void doDelete(final DBBroker broker, final String path, final HttpServlet throw new PermissionDeniedException("Trigger failed: " + e.getMessage()); } catch (final LockException e) { throw new PermissionDeniedException("Could not acquire lock: " + e.getMessage()); - } catch (final TransactionException e) { - LOG.warn("Transaction aborted: " + e.getMessage(), e); } } - private boolean checkForXQueryTarget(final DBBroker broker, + private boolean checkForXQueryTarget(final DBBroker broker, final Txn transaction, final XmldbURI path, final HttpServletRequest request, final HttpServletResponse response) throws PermissionDeniedException, NotFoundException, IOException, BadRequestException { @@ -1189,13 +1174,15 @@ private boolean checkForXQueryTarget(final DBBroker broker, final Collection collection = broker.getCollection(path); if (collection == null) { XmldbURI servletPath = path; + LockedDocument lockedDocument = null; DocumentImpl resource = null; // work up the url path to find an // xquery resource while (null == resource) { // traverse up the path looking for xquery objects - resource = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + lockedDocument = broker.getXMLResource(servletPath, LockMode.READ_LOCK); + resource = lockedDocument == null ? null : lockedDocument.getDocument(); if (null != resource && (resource.getResourceType() == DocumentImpl.BINARY_FILE && xqueryType.equals(resource.getMetadata().getMimeType()))) { @@ -1204,7 +1191,8 @@ private boolean checkForXQueryTarget(final DBBroker broker, // not an xquery or xproc resource. This means we have a path // that cannot contain an xquery or xproc object even if we keep // moving up the path, so bail out now - resource.getUpdateLock().release(LockMode.READ_LOCK); + lockedDocument.close(); + lockedDocument = null; resource = null; break; } @@ -1220,12 +1208,12 @@ private boolean checkForXQueryTarget(final DBBroker broker, final Properties outputProperties = new Properties(defaultOutputKeysProperties); try { // Execute the XQuery - executeXQuery(broker, resource, request, response, + executeXQuery(broker, transaction, resource, request, response, outputProperties, servletPath.toString(), pathInfo); } catch (final XPathException e) { writeXPathExceptionHtml(response, HttpServletResponse.SC_BAD_REQUEST, "UTF-8", null, path.toString(), e); } finally { - resource.getUpdateLock().release(LockMode.READ_LOCK); + lockedDocument.close(); } return true; } @@ -1258,7 +1246,7 @@ private String getRequestContent(final HttpServletRequest request) throws IOExce * * @throws XPathException */ - protected void search(final DBBroker broker, final String query, + protected void search(final DBBroker broker, final Txn transaction, final String query, final String path, final List namespaces, final ElementImpl variables, final int howmany, final int start, final boolean typed, final Properties outputProperties, @@ -1286,7 +1274,7 @@ protected void search(final DBBroker broker, final String query, final Sequence cached = sessionManager.get(query, sessionId); if (cached != null) { LOG.debug("Returning cached query result"); - writeResults(response, broker, cached, howmany, start, typed, outputProperties, wrap, 0, 0); + writeResults(response, broker, transaction, cached, howmany, start, typed, outputProperties, wrap, 0, 0); } else { LOG.debug("Cached query result not found. Probably timed out. Repeating query."); @@ -1310,6 +1298,7 @@ protected void search(final DBBroker broker, final String query, context = new XQueryContext(broker.getBrokerPool()); } else { context = compiled.getContext(); + context.prepareForReuse(); } context.setStaticallyKnownDocuments(new XmldbURI[]{pathUri}); @@ -1346,7 +1335,7 @@ protected void search(final DBBroker broker, final String query, } } - writeResults(response, broker, resultSequence, howmany, start, typed, outputProperties, wrap, compilationTime, executionTime); + writeResults(response, broker, transaction, resultSequence, howmany, start, typed, outputProperties, wrap, compilationTime, executionTime); } finally { context.runCleanupTasks(); @@ -1384,12 +1373,7 @@ private HttpRequestWrapper declareVariables(final XQueryContext context, final HttpRequestWrapper reqw = new HttpRequestWrapper(request, formEncoding, containerEncoding); final ResponseWrapper respw = new HttpResponseWrapper(response); - - // context.declareNamespace(RequestModule.PREFIX, - // RequestModule.NAMESPACE_URI); - context.declareVariable(RequestModule.PREFIX + ":request", reqw); - context.declareVariable(ResponseModule.PREFIX + ":response", respw); - context.declareVariable(SessionModule.PREFIX + ":session", reqw.getSession(false)); + context.setHttpContext(new XQueryContext.HttpContext(reqw, respw)); //enable EXQuery Request Module (if present) try { @@ -1476,7 +1460,7 @@ private void declareExternalAndXQJVariables(final XQueryContext context, * * @throws PermissionDeniedException */ - private void executeXQuery(final DBBroker broker, final DocumentImpl resource, + private void executeXQuery(final DBBroker broker, final Txn transaction, final DocumentImpl resource, final HttpServletRequest request, final HttpServletResponse response, final Properties outputProperties, final String servletPath, final String pathInfo) throws XPathException, BadRequestException, PermissionDeniedException { @@ -1492,10 +1476,10 @@ private void executeXQuery(final DBBroker broker, final DocumentImpl resource, // cache response.setHeader("X-XQuery-Cached", "false"); context = new XQueryContext(broker.getBrokerPool()); - } else { response.setHeader("X-XQuery-Cached", "true"); context = compiled.getContext(); + context.prepareForReuse(); } // TODO: don't hardcode this? @@ -1531,7 +1515,7 @@ private void executeXQuery(final DBBroker broker, final DocumentImpl resource, try { final long executeStart = System.currentTimeMillis(); final Sequence result = xquery.execute(broker, compiled, null, outputProperties); - writeResults(response, broker, result, -1, 1, false, outputProperties, wrap, compilationTime, System.currentTimeMillis() - executeStart); + writeResults(response, broker, transaction, result, -1, 1, false, outputProperties, wrap, compilationTime, System.currentTimeMillis() - executeStart); } finally { context.runCleanupTasks(); @@ -1544,7 +1528,7 @@ private void executeXQuery(final DBBroker broker, final DocumentImpl resource, * * @throws PermissionDeniedException */ - private void executeXProc(final DBBroker broker, final DocumentImpl resource, + private void executeXProc(final DBBroker broker, final Txn transaction, final DocumentImpl resource, final HttpServletRequest request, final HttpServletResponse response, final Properties outputProperties, final String servletPath, final String pathInfo) throws XPathException, BadRequestException, PermissionDeniedException { @@ -1558,6 +1542,7 @@ private void executeXProc(final DBBroker broker, final DocumentImpl resource, context = new XQueryContext(broker.getBrokerPool()); } else { context = compiled.getContext(); + context.prepareForReuse(); } context.declareVariable("pipeline", resource.getURI().toString()); @@ -1606,7 +1591,7 @@ private void executeXProc(final DBBroker broker, final DocumentImpl resource, try { final long executeStart = System.currentTimeMillis(); final Sequence result = xquery.execute(broker, compiled, null, outputProperties); - writeResults(response, broker, result, -1, 1, false, outputProperties, false, compilationTime, System.currentTimeMillis() - executeStart); + writeResults(response, broker, transaction, result, -1, 1, false, outputProperties, false, compilationTime, System.currentTimeMillis() - executeStart); } finally { context.runCleanupTasks(); pool.returnCompiledXQuery(source, compiled); @@ -1636,7 +1621,7 @@ public void setCreatedAndLastModifiedHeaders( // writes out a resource, uses asMimeType as the specified mime-type or if // null uses the type of the resource - private void writeResourceAs(final DocumentImpl resource, final DBBroker broker, + private void writeResourceAs(final DocumentImpl resource, final DBBroker broker, final Txn transaction, final String stylesheet, final String encoding, String asMimeType, final Properties outputProperties, final HttpServletRequest request, final HttpServletResponse response) throws BadRequestException, @@ -1717,12 +1702,9 @@ private void writeResourceAs(final DocumentImpl resource, final DBBroker broker, serializer.reset(); //setup the http context - final HttpContext httpContext = new HttpContext(); final HttpRequestWrapper reqw = new HttpRequestWrapper(request, formEncoding, containerEncoding); - httpContext.setRequest(reqw); - httpContext.setSession(reqw.getSession(false)); - serializer.setHttpContext(httpContext); - + final HttpResponseWrapper resw = new HttpResponseWrapper(response); + serializer.setHttpContext(new XQueryContext.HttpContext(reqw, resw)); // Serialize the document try { @@ -2030,7 +2012,7 @@ protected void addPermissionAttributes(final AttributesImpl attrs, final Permiss attrs.addAttribute("", "permissions", "permissions", "CDATA", perm.toString()); } - protected void writeResults(final HttpServletResponse response, final DBBroker broker, + protected void writeResults(final HttpServletResponse response, final DBBroker broker, final Txn transaction, final Sequence results, int howmany, final int start, final boolean typed, final Properties outputProperties, final boolean wrap, final long compilationTime, final long executionTime) throws BadRequestException { @@ -2058,7 +2040,7 @@ protected void writeResults(final HttpServletResponse response, final DBBroker b final String method = outputProperties.getProperty(SERIALIZATION_METHOD_PROPERTY, "xml"); if ("json".equals(method)) { - writeResultJSON(response, broker, results, howmany, start, outputProperties, wrap, compilationTime, executionTime); + writeResultJSON(response, broker, transaction, results, howmany, start, outputProperties, wrap, compilationTime, executionTime); } else { writeResultXML(response, broker, results, howmany, start, typed, outputProperties, wrap, compilationTime, executionTime); } @@ -2113,7 +2095,7 @@ private void writeResultXML(final HttpServletResponse response, } private void writeResultJSON(final HttpServletResponse response, - final DBBroker broker, final Sequence results, int howmany, + final DBBroker broker, final Txn transaction, final Sequence results, int howmany, int start, final Properties outputProperties, final boolean wrap, final long compilationTime, final long executionTime) throws BadRequestException { diff --git a/src/org/exist/http/jaxb/ObjectFactory.java b/src/org/exist/http/jaxb/ObjectFactory.java index ee00b36c464..3e3c55948fb 100644 --- a/src/org/exist/http/jaxb/ObjectFactory.java +++ b/src/org/exist/http/jaxb/ObjectFactory.java @@ -28,14 +28,6 @@ @XmlRegistry public class ObjectFactory { - - /** - * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.exist.http.jaxb - * - */ - public ObjectFactory() { - } - /** * Create an instance of {@link Result } * diff --git a/src/org/exist/http/run-xproc.xq b/src/org/exist/http/run-xproc.xq index e1dc8eb70d3..edb77950063 100644 --- a/src/org/exist/http/run-xproc.xq +++ b/src/org/exist/http/run-xproc.xq @@ -16,13 +16,13 @@ let $requestparams := if($autobind eq '1') then () else - {util:parse(request:get-parameter($binding,''))} + {parse-xml(request:get-parameter($binding,''))} else () let $xprocbindings := {$requestparams} - {util:parse($bindings)//binding} + {parse-xml($bindings)//binding} return -xproc:run( doc($pipeline), doc($stdin), $debug, "0", $xprocbindings, util:parse($options)) \ No newline at end of file +xproc:run( doc($pipeline), doc($stdin), $debug, "0", $xprocbindings, parse-xml($options)) diff --git a/src/org/exist/http/servlets/DigestAuthenticator.java b/src/org/exist/http/servlets/DigestAuthenticator.java index 6b6bf5b5684..66ebbfaaae9 100644 --- a/src/org/exist/http/servlets/DigestAuthenticator.java +++ b/src/org/exist/http/servlets/DigestAuthenticator.java @@ -102,7 +102,8 @@ private String createNonce(HttpServletRequest request) { private static void parseCredentials(Digest digest, String credentials) { credentials = credentials.substring("Digest ".length()); final StringBuilder current = new StringBuilder(); - String name = null, value; + String name = null; + String value; boolean inQuotedString = false; for (int i = 0; i < credentials.length(); i++) { final char ch = credentials.charAt(i); diff --git a/src/org/exist/http/servlets/EXistServlet.java b/src/org/exist/http/servlets/EXistServlet.java index 0d7ee5bd41e..3c1b36171cd 100644 --- a/src/org/exist/http/servlets/EXistServlet.java +++ b/src/org/exist/http/servlets/EXistServlet.java @@ -32,6 +32,7 @@ import org.exist.security.PermissionDeniedException; import org.exist.security.Subject; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; import org.exist.util.Configuration; import org.exist.validation.XmlLibraryChecker; import org.exist.xmldb.XmldbURI; @@ -137,14 +138,17 @@ protected void doPut(HttpServletRequest request, HttpServletResponse response) t return; } - try(final DBBroker broker = getPool().get(Optional.of(user))) { + try(final DBBroker broker = getPool().get(Optional.of(user)); + final Txn transaction = getPool().getTransactionManager().beginTransaction()) { final XmldbURI dbpath = XmldbURI.createInternal(path); final Collection collection = broker.getCollection(dbpath); if (collection != null) { response.sendError(400, "A PUT request is not allowed against a plain collection path."); return; } - srvREST.doPut(broker, dbpath, request, response); + srvREST.doPut(broker, transaction, dbpath, request, response); + + transaction.commit(); } catch (final BadRequestException e) { if (response.isCommitted()) { @@ -244,10 +248,10 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t } // fourth, process the request - try(final DBBroker broker = getPool().get(Optional.of(user))) { - - srvREST.doGet(broker, request, response, path); - + try(final DBBroker broker = getPool().get(Optional.of(user)); + final Txn transaction = getPool().getTransactionManager().beginTransaction()) { + srvREST.doGet(broker, transaction, request, response, path); + transaction.commit(); } catch (final BadRequestException e) { if (response.isCommitted()) { throw new ServletException(e.getMessage()); @@ -308,8 +312,10 @@ protected void doHead(HttpServletRequest request, HttpServletResponse response) } // fourth, process the request - try(final DBBroker broker = getPool().get(Optional.of(user))) { - srvREST.doHead(broker, request, response, path); + try(final DBBroker broker = getPool().get(Optional.of(user)); + final Txn transaction = getPool().getTransactionManager().beginTransaction()) { + srvREST.doHead(broker, transaction, request, response, path); + transaction.commit(); } catch (final BadRequestException e) { if (response.isCommitted()) { throw new ServletException(e.getMessage(), e); @@ -370,8 +376,10 @@ protected void doDelete(HttpServletRequest request, HttpServletResponse response } // fourth, process the request - try(final DBBroker broker = getPool().get(Optional.of(user))) { - srvREST.doDelete(broker, path, request, response); + try(final DBBroker broker = getPool().get(Optional.of(user)); + final Txn transaction = getPool().getTransactionManager().beginTransaction()) { + srvREST.doDelete(broker, transaction, path, request, response); + transaction.commit(); } catch (final PermissionDeniedException e) { // If the current user is the Default User and they do not have permission // then send a challenge request to prompt the client for a username/password. @@ -391,7 +399,6 @@ protected void doDelete(HttpServletRequest request, HttpServletResponse response } catch (final Throwable e) { getLog().error(e); throw new ServletException("An unknown error occurred: " + e.getMessage(), e); - } } @@ -448,8 +455,10 @@ protected void doPost(HttpServletRequest req, HttpServletResponse response) thro } // fourth, process the request - try (final DBBroker broker = getPool().get(Optional.of(user))) { - srvREST.doPost(broker, request, response, path); + try(final DBBroker broker = getPool().get(Optional.of(user)); + final Txn transaction = getPool().getTransactionManager().beginTransaction()) { + srvREST.doPost(broker, transaction, request, response, path); + transaction.commit(); } catch (final PermissionDeniedException e) { // If the current user is the Default User and they do not have permission // then send a challenge request to prompt the client for a username/password. diff --git a/src/org/exist/http/servlets/HttpRequestWrapper.java b/src/org/exist/http/servlets/HttpRequestWrapper.java index 260ccba7b50..24342158142 100644 --- a/src/org/exist/http/servlets/HttpRequestWrapper.java +++ b/src/org/exist/http/servlets/HttpRequestWrapper.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.Map; +import javax.servlet.RequestDispatcher; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; @@ -788,4 +789,9 @@ public boolean isFormDataParsed() { public boolean isMultipartContent() { return isMultipartContent; } + + @Override + public RequestDispatcher getRequestDispatcher(final String path) { + return servletRequest.getRequestDispatcher(path); + } } diff --git a/src/org/exist/http/servlets/HttpResponseWrapper.java b/src/org/exist/http/servlets/HttpResponseWrapper.java index 6c102c2b4ed..2faf5c1db0e 100644 --- a/src/org/exist/http/servlets/HttpResponseWrapper.java +++ b/src/org/exist/http/servlets/HttpResponseWrapper.java @@ -225,7 +225,17 @@ public void setHeader(String arg0, String arg1) { public void setIntHeader(String arg0, int arg1) { response.setIntHeader(arg0, arg1); } - + + @Override + public void sendError(final int code) throws IOException { + response.sendError(code); + } + + @Override + public void sendError(final int code, final String msg) throws IOException { + response.sendError(code, msg); + } + /** * @param arg0 */ diff --git a/src/org/exist/http/servlets/RedirectorServlet.java b/src/org/exist/http/servlets/RedirectorServlet.java index 190f79b2b88..237283e7d41 100644 --- a/src/org/exist/http/servlets/RedirectorServlet.java +++ b/src/org/exist/http/servlets/RedirectorServlet.java @@ -24,28 +24,26 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.exist.EXistException; import org.exist.Namespaces; +import org.exist.security.PermissionDeniedException; import org.exist.source.FileSource; -import org.exist.xmldb.EXistCollection; -import org.exist.xmldb.EXistXQueryService; +import org.exist.source.Source; +import org.exist.storage.DBBroker; +import org.exist.storage.XQueryPool; import org.exist.xmldb.XmldbURI; -import org.exist.xquery.functions.request.RequestModule; -import org.exist.xquery.functions.response.ResponseModule; -import org.exist.xquery.functions.session.SessionModule; +import org.exist.xquery.CompiledXQuery; +import org.exist.xquery.XPathException; +import org.exist.xquery.XQuery; +import org.exist.xquery.XQueryContext; +import org.exist.xquery.value.Sequence; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; -import org.xmldb.api.DatabaseManager; -import org.xmldb.api.base.Collection; -import org.xmldb.api.base.Database; -import org.xmldb.api.base.ResourceSet; -import org.xmldb.api.base.XMLDBException; -import org.xmldb.api.modules.XMLResource; import javax.servlet.RequestDispatcher; import javax.servlet.ServletConfig; import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @@ -65,26 +63,26 @@ * <exist:add-parameter name="new-param" value="new-param-value"/> * </exist:dispatch> * - * + *

* The element should have one of three attributes: path, servlet-name or * redirect. - * + *

* If the servlet-name attribute is present, the request will be forwarded to the named servlet * (name as specified in web.xml). Alternatively, path can point to an arbitrary resource. It can be either absolute or relative. * Relative paths are resolved relative to the original request. - * + *

* The request is forwarded via {@link javax.servlet.RequestDispatcher#forward(javax.servlet.ServletRequest, javax.servlet.ServletResponse)}. * Contrary to HTTP forwarding, there is no additional roundtrip to the client. It all happens on * the server. The client will not notice the redirect. - * + *

* When forwarding to other servlets, the fields in {@link javax.servlet.http.HttpServletRequest} will be * updated to point to the new, redirected URI. However, the original request URI is stored in the * request attribute org.exist.forward.request-uri. - * + *

* If present, the "redirect" attribute causes the server to send a redirect request to the client, which will usually respond * with a new request to the redirected location. Note that this is quite different from a forwarding via RequestDispatcher, * which is completely transparent to the client. - * + *

* RedirectorServlet takes a single parameter in web.xml: "xquery". This parameter should point to an * XQuery script. It should be relative to the current web context. * @@ -106,60 +104,53 @@ * */ @Deprecated -public class RedirectorServlet extends HttpServlet { +public class RedirectorServlet extends AbstractExistHttpServlet { - private static final long serialVersionUID = 853971301553787943L; + private static final long serialVersionUID = 853971301553787943L; - private static final Logger LOG = LogManager.getLogger(RedirectorServlet.class); + private static final Logger LOG = LogManager.getLogger(RedirectorServlet.class); public final static String DEFAULT_USER = "guest"; public final static String DEFAULT_PASS = "guest"; public final static XmldbURI DEFAULT_URI = XmldbURI.EMBEDDED_SERVER_URI.append(XmldbURI.ROOT_COLLECTION_URI); - public final static String DRIVER = "org.exist.xmldb.DatabaseImpl"; - + private String user = null; private String password = null; private XmldbURI collectionURI = null; private String query = null; @Override - public void init(ServletConfig config) throws ServletException { - super.init(config); - + public void init(final ServletConfig config) throws ServletException { query = config.getInitParameter("xquery"); - if (query == null) - {throw new ServletException("RedirectorServlet requires a parameter 'xquery'.");} + if (query == null) { + throw new ServletException("RedirectorServlet requires a parameter 'xquery'."); + } user = config.getInitParameter("user"); - if(user == null) - {user = DEFAULT_USER;} + if (user == null) { + user = DEFAULT_USER; + } password = config.getInitParameter("password"); - if(password == null) - {password = DEFAULT_PASS;} + if (password == null) { + password = DEFAULT_PASS; + } final String confCollectionURI = config.getInitParameter("uri"); - if(confCollectionURI == null) { + if (confCollectionURI == null) { collectionURI = DEFAULT_URI; } else { try { collectionURI = XmldbURI.xmldbUriFor(confCollectionURI); } catch (final URISyntaxException e) { - throw new ServletException("Invalid XmldbURI for parameter 'uri': "+e.getMessage(),e); + throw new ServletException("Invalid XmldbURI for parameter 'uri': " + e.getMessage(), e); } } - try { - final Class driver = Class.forName(DRIVER); - final Database database = (Database)driver.newInstance(); - database.setProperty("create-database", "true"); - DatabaseManager.registerDatabase(database); - } catch(final Exception e) { - final String errorMessage="Failed to initialize database driver"; - LOG.error(errorMessage,e); - throw new ServletException(errorMessage+": " + e.getMessage(), e); - } + super.init(config); } @Override - protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { + protected void service(final HttpServletRequest req, final HttpServletResponse res) throws ServletException, IOException { + final RequestWrapper request = new HttpRequestWrapper(req); + final ResponseWrapper response = new HttpResponseWrapper(res); if (request.getCharacterEncoding() == null) try { request.setCharacterEncoding("UTF-8"); @@ -168,55 +159,47 @@ protected void service(HttpServletRequest request, HttpServletResponse response) // Try to find the XQuery final String qpath = getServletContext().getRealPath(query); final Path p = Paths.get(qpath); - if(!(Files.isReadable(p) && Files.isRegularFile(p))) { + if (!(Files.isReadable(p) && Files.isRegularFile(p))) { throw new ServletException("Cannot read XQuery source from " + p.toAbsolutePath()); } final FileSource source = new FileSource(p, true); try { // Prepare and execute the XQuery - final Collection collection = DatabaseManager.getCollection(collectionURI.toString(), user, password); - final EXistXQueryService service = (EXistXQueryService) collection.getService("XQueryService", "1.0"); - if(!((EXistCollection)collection).isRemoteCollection()) { - service.declareVariable(RequestModule.PREFIX + ":request", new HttpRequestWrapper(request, "UTF-8", "UTF-8")); - service.declareVariable(ResponseModule.PREFIX + ":response", new HttpResponseWrapper(response)); - service.declareVariable(SessionModule.PREFIX + ":session", new HttpSessionWrapper(request.getSession( false ))); - } - final ResourceSet result = service.execute(source); + final Sequence result = executeQuery(source, request, response); String redirectTo = null; String servletName = null; String path = null; - RequestWrapper modifiedRequest = null; + ModifiableRequestWrapper modifiedRequest = null; // parse the query result element - if (result.getSize() == 1) { - final XMLResource resource = (XMLResource) result.getResource(0); - Node node = resource.getContentAsDOM(); - if (node.getNodeType() == Node.DOCUMENT_NODE) - {node = ((Document) node).getDocumentElement();} + if (result != null && result.getItemCount() == 1) { + Node node = (Node)result.itemAt(0); + if (node.getNodeType() == Node.DOCUMENT_NODE) { + node = ((Document) node).getDocumentElement(); + } if (node.getNodeType() != Node.ELEMENT_NODE) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, - "Redirect XQuery should return an XML element. Received: " + resource.getContent()); + "Redirect XQuery should return an XML element. Received: " + node); return; } Element elem = (Element) node; final String ns = elem.getNamespaceURI(); - if (ns == null || ((!Namespaces.EXIST_NS.equals(ns)) && "dispatch".equals(elem.getLocalName()))) - { + if (ns == null || ((!Namespaces.EXIST_NS.equals(ns)) && "dispatch".equals(elem.getLocalName()))) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, - "Redirect XQuery should return an element . Received: " + resource.getContent()); + "Redirect XQuery should return an element . Received: " + node); return; } - if (elem.hasAttribute("path")) - {path = elem.getAttribute("path");} - else if (elem.hasAttribute("servlet-name")) - {servletName = elem.getAttribute("servlet-name");} - else if (elem.hasAttribute("redirect")) - {redirectTo = elem.getAttribute("redirect");} - else { + if (elem.hasAttribute("path")) { + path = elem.getAttribute("path"); + } else if (elem.hasAttribute("servlet-name")) { + servletName = elem.getAttribute("servlet-name"); + } else if (elem.hasAttribute("redirect")) { + redirectTo = elem.getAttribute("redirect"); + } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, - "Element should either provide an attribute 'path' or 'servlet-name'. Received: " + - resource.getContent()); + "Element should either provide an attribute 'path' or 'servlet-name'. Received: " + + node); return; } @@ -228,8 +211,9 @@ else if (elem.hasAttribute("redirect")) if (node.getNodeType() == Node.ELEMENT_NODE && nsUri != null && Namespaces.EXIST_NS.equals(nsUri)) { elem = (Element) node; if ("add-parameter".equals(elem.getLocalName())) { - if (modifiedRequest == null) - {modifiedRequest = new RequestWrapper(request);} + if (modifiedRequest == null) { + modifiedRequest = new ModifiableRequestWrapper(req); + } modifiedRequest.addParameter(elem.getAttribute("name"), elem.getAttribute("value")); } } @@ -246,13 +230,14 @@ else if (elem.hasAttribute("redirect")) // Get a RequestDispatcher, either from the servlet context or the request RequestDispatcher dispatcher; - if (servletName != null && servletName.length() > 0) - {dispatcher = getServletContext().getNamedDispatcher(servletName);} - else { + if (servletName != null && servletName.length() > 0) { + dispatcher = getServletContext().getNamedDispatcher(servletName); + } else { LOG.debug("Dispatching to " + path); dispatcher = getServletContext().getRequestDispatcher(path); - if (dispatcher == null) - {dispatcher = request.getRequestDispatcher(path);} + if (dispatcher == null) { + dispatcher = request.getRequestDispatcher(path); + } } if (dispatcher == null) { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, @@ -260,24 +245,66 @@ else if (elem.hasAttribute("redirect")) return; } - if (modifiedRequest != null) - {request = modifiedRequest;} - // store the original request URI to org.exist.forward.request-uri - request.setAttribute("org.exist.forward.request-uri", request.getRequestURI()); - request.setAttribute("org.exist.forward.servlet-path", request.getServletPath()); + if (modifiedRequest != null) { + // store the original request URI to org.exist.forward.request-uri + modifiedRequest.setAttribute("org.exist.forward.request-uri", modifiedRequest.getRequestURI()); + modifiedRequest.setAttribute("org.exist.forward.servlet-path", modifiedRequest.getServletPath()); + + // finally, execute the forward + dispatcher.forward(modifiedRequest, res); + } else { + // store the original request URI to org.exist.forward.request-uri + request.setAttribute("org.exist.forward.request-uri", request.getRequestURI()); + request.setAttribute("org.exist.forward.servlet-path", request.getServletPath()); + + // finally, execute the forward + dispatcher.forward(req, res); + } + } catch (final XPathException | EXistException | PermissionDeniedException | IOException e) { + throw new ServletException("An error occurred while executing the RedirectorServlet XQuery: " + e.getMessage(), e); + } + } + + private Sequence executeQuery(final Source source, final RequestWrapper request, final ResponseWrapper response) throws EXistException, XPathException, PermissionDeniedException, IOException { + final XQuery xquery = getPool().getXQueryService(); + final XQueryPool pool = getPool().getXQueryPool(); - // finally, execute the forward - dispatcher.forward(request, response); - } catch (final XMLDBException e) { - throw new ServletException("An error occurred while initializing RedirectorServlet: " + e.getMessage(), e); + try (final DBBroker broker = getPool().getBroker()) { + + final XQueryContext context; + CompiledXQuery compiled = pool.borrowCompiledXQuery(broker, source); + if (compiled == null) { + // special header to indicate that the query is not returned from + // cache + response.setHeader("X-XQuery-Cached", "false"); + context = new XQueryContext(getPool()); + context.setModuleLoadPath(XmldbURI.EMBEDDED_SERVER_URI.toString()); + compiled = xquery.compile(broker, context, source); + } else { + response.setHeader("X-XQuery-Cached", "true"); + context = compiled.getContext(); + context.prepareForReuse(); + } + + try { + return xquery.execute(broker, compiled, null, new Properties()); + } finally { + context.runCleanupTasks(); + pool.returnCompiledXQuery(source, compiled); + } } } - private static class RequestWrapper extends javax.servlet.http.HttpServletRequestWrapper { + @Override + public Logger getLog() { + return LOG; + } + + private static class ModifiableRequestWrapper extends javax.servlet.http.HttpServletRequestWrapper { - Map addedParams = new HashMap<>(); + private final Map addedParams = new HashMap<>(); - private RequestWrapper(HttpServletRequest request) { + private ModifiableRequestWrapper(final HttpServletRequest request) { super(request); // copy parameters for (final Enumeration e = request.getParameterNames(); e.hasMoreElements(); ) { @@ -287,7 +314,7 @@ private RequestWrapper(HttpServletRequest request) { } } - public void addParameter(String name, String value) { + public void addParameter(final String name, final String value) { addedParams.put(name, new String[] { value }); } @@ -321,4 +348,4 @@ public String[] getParameterValues(final String s) { return value; } } -} \ No newline at end of file +} diff --git a/src/org/exist/http/servlets/RequestWrapper.java b/src/org/exist/http/servlets/RequestWrapper.java index 93ea6aa504e..4f0c0e63d94 100644 --- a/src/org/exist/http/servlets/RequestWrapper.java +++ b/src/org/exist/http/servlets/RequestWrapper.java @@ -30,6 +30,7 @@ import java.util.Enumeration; import java.util.List; +import javax.servlet.RequestDispatcher; import javax.servlet.http.Cookie; /** @@ -123,5 +124,7 @@ public interface RequestWrapper { void setCharacterEncoding(String arg0) throws UnsupportedEncodingException; - boolean isMultipartContent(); + boolean isMultipartContent(); + + RequestDispatcher getRequestDispatcher(final String path); } diff --git a/src/org/exist/http/servlets/ResponseWrapper.java b/src/org/exist/http/servlets/ResponseWrapper.java index e47fd569467..e75d09a3157 100644 --- a/src/org/exist/http/servlets/ResponseWrapper.java +++ b/src/org/exist/http/servlets/ResponseWrapper.java @@ -125,6 +125,11 @@ public interface ResponseWrapper { * @param arg1 */ public void setIntHeader(String arg0, int arg1); + + void sendError(final int code) throws IOException; + + void sendError(final int code, final String msg) throws IOException; + /** * @param arg0 */ diff --git a/src/org/exist/http/servlets/XQueryServlet.java b/src/org/exist/http/servlets/XQueryServlet.java index 6250a7bf0b1..ce30b47f17c 100644 --- a/src/org/exist/http/servlets/XQueryServlet.java +++ b/src/org/exist/http/servlets/XQueryServlet.java @@ -347,6 +347,12 @@ protected void process(HttpServletRequest request, HttpServletResponse response) } else if (urlAttrib != null) { try(final DBBroker broker = getPool().get(Optional.ofNullable(user))) { source = SourceFactory.getSource(broker, moduleLoadPath, urlAttrib.toString(), true); + if (source == null) { + final String msg = "Could not read source: context=" + moduleLoadPath + ", location=" + urlAttrib.toString(); + getLog().error(msg); + response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + sendError(output, "Error", msg); + } } catch (final Exception e) { getLog().error(e.getMessage(), e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); @@ -445,14 +451,15 @@ protected void process(HttpServletRequest request, HttpServletResponse response) } else { context = query.getContext(); context.setModuleLoadPath(moduleLoadPath); + context.prepareForReuse(); } final Properties outputProperties = new Properties(); outputProperties.put("base-uri", collectionURI.toString()); - - context.declareVariable(RequestModule.PREFIX + ":request", new HttpRequestWrapper(request, getFormEncoding(), getContainerEncoding())); - context.declareVariable(ResponseModule.PREFIX + ":response", new HttpResponseWrapper(response)); - context.declareVariable(SessionModule.PREFIX + ":session", ( session != null ? new HttpSessionWrapper( session ) : null ) ); + + final HttpRequestWrapper reqw = new HttpRequestWrapper(request, getFormEncoding(), getContainerEncoding()); + final ResponseWrapper respw = new HttpResponseWrapper(response); + context.setHttpContext(new XQueryContext.HttpContext(reqw, respw, session != null ? new HttpSessionWrapper( session ) : null)); final String timeoutOpt = (String) request.getAttribute(ATTR_TIMEOUT); if (timeoutOpt != null) { diff --git a/src/org/exist/http/urlrewrite/RewriteConfig.java b/src/org/exist/http/urlrewrite/RewriteConfig.java index 9967f4c5bc0..9dde38f4576 100644 --- a/src/org/exist/http/urlrewrite/RewriteConfig.java +++ b/src/org/exist/http/urlrewrite/RewriteConfig.java @@ -4,6 +4,7 @@ import org.apache.logging.log4j.Logger; import org.exist.Namespaces; import org.exist.EXistException; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.dom.persistent.DocumentImpl; import org.exist.xmldb.XmldbURI; @@ -148,16 +149,11 @@ private void configure(final String controllerConfig) throws ServletException { if (controllerConfig.startsWith(XmldbURI.XMLDB_URI_PREFIX)) { try (final DBBroker broker = urlRewrite.getBrokerPool().get(Optional.ofNullable(urlRewrite.getDefaultUser()))) { - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(XmldbURI.create(controllerConfig), LockMode.READ_LOCK); + try (final LockedDocument lockedDocument = broker.getXMLResource(XmldbURI.create(controllerConfig), LockMode.READ_LOCK);) { + final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument(); if (doc != null) { parse(doc); } - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } } } catch (final EXistException | PermissionDeniedException e) { throw new ServletException("Failed to parse controller.xml: " + e.getMessage(), e); diff --git a/src/org/exist/http/urlrewrite/XQueryURLRewrite.java b/src/org/exist/http/urlrewrite/XQueryURLRewrite.java index 22fcaff6972..92fd7d0e6ac 100644 --- a/src/org/exist/http/urlrewrite/XQueryURLRewrite.java +++ b/src/org/exist/http/urlrewrite/XQueryURLRewrite.java @@ -27,6 +27,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.exist.dom.persistent.LockedDocument; import org.exist.http.servlets.Authenticator; import org.exist.http.servlets.BasicAuthenticator; import org.exist.security.internal.web.HttpAccount; @@ -657,7 +658,9 @@ private Sequence runQuery(final DBBroker broker, final RequestWrapper request, f queryContext = new XQueryContext(broker.getBrokerPool()); } else { queryContext = compiled.getContext(); + queryContext.prepareForReuse(); } + // Find correct module load path queryContext.setModuleLoadPath(sourceInfo.moduleLoadPath); declareVariables(queryContext, sourceInfo, staticRewrite, basePath, request, response); @@ -725,20 +728,23 @@ private SourceInfo findSource(final HttpServletRequest request, final DBBroker b private @Nullable SourceInfo findSourceFromDb(final DBBroker broker, final String basePath, final String path, final String[] components) { - DocumentImpl controllerDoc = null; + LockedDocument lockedControllerDoc = null; try { final XmldbURI locationUri = XmldbURI.xmldbUriFor(basePath); XmldbURI resourceUri = locationUri; for(final String component : components) { resourceUri = resourceUri.append(component); } - controllerDoc = findDbControllerXql(broker, locationUri, resourceUri); - if (controllerDoc == null) { + lockedControllerDoc = findDbControllerXql(broker, locationUri, resourceUri); + + if (lockedControllerDoc == null) { LOG.warn("XQueryURLRewrite controller could not be found for path: " + path); return null; } + final DocumentImpl controllerDoc = lockedControllerDoc.getDocument(); + if (LOG.isTraceEnabled()) { LOG.trace("Found controller file: " + controllerDoc.getURI()); } @@ -756,8 +762,8 @@ SourceInfo findSourceFromDb(final DBBroker broker, final String basePath, final LOG.warn("Bad URI for base path: " + e.getMessage(), e); return null; } finally { - if (controllerDoc != null) { - controllerDoc.getUpdateLock().release(LockMode.READ_LOCK); + if (lockedControllerDoc != null) { + lockedControllerDoc.close(); } } } @@ -781,18 +787,18 @@ SourceInfo findSourceFromDb(final DBBroker broker, final String basePath, final */ //@tailrec private @Nullable - DocumentImpl findDbControllerXql(final DBBroker broker, final XmldbURI collectionUri, final XmldbURI resourceUri) { + LockedDocument findDbControllerXql(final DBBroker broker, final XmldbURI collectionUri, final XmldbURI resourceUri) { if (collectionUri.compareTo(resourceUri) > 0) { return null; } - Collection collection = null; - try { - collection = broker.openCollection(resourceUri, LockMode.READ_LOCK); + try (final Collection collection = broker.openCollection(resourceUri, LockMode.READ_LOCK)) { if (collection != null) { - final DocumentImpl doc = collection.getDocumentWithLock(broker, XQUERY_CONTROLLER_URI, LockMode.READ_LOCK); - if (doc != null) { - return doc; + final LockedDocument lockedDoc = collection.getDocumentWithLock(broker, XQUERY_CONTROLLER_URI, LockMode.READ_LOCK); + if (lockedDoc != null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + // collection lock will be released by the try-with-resources before the locked document is returned by this function + return lockedDoc; } } } catch (final PermissionDeniedException e) { @@ -805,10 +811,6 @@ DocumentImpl findDbControllerXql(final DBBroker broker, final XmldbURI collectio LOG.debug("LockException while scanning for XQueryURLRewrite controllers: " + e.getMessage(), e); } return null; - } finally { - if (collection != null) { - collection.getLock().release(LockMode.READ_LOCK); - } } if(resourceUri.numSegments() == 2) { @@ -872,12 +874,13 @@ private SourceInfo getSource(final DBBroker broker, final String moduleLoadPath) // Is the module source stored in the database? try { final XmldbURI locationUri = XmldbURI.xmldbUriFor(query); - DocumentImpl sourceDoc = null; - try { - sourceDoc = broker.getXMLResource(locationUri.toCollectionPathURI(), LockMode.READ_LOCK); - if (sourceDoc == null) { + + try (final LockedDocument lockedSourceDoc = broker.getXMLResource(locationUri.toCollectionPathURI(), LockMode.READ_LOCK);) { + if (lockedSourceDoc == null) { throw new ServletException("XQuery resource: " + query + " not found in database"); } + + final DocumentImpl sourceDoc = lockedSourceDoc.getDocument(); if (sourceDoc.getResourceType() != DocumentImpl.BINARY_FILE || !"application/xquery".equals(sourceDoc.getMetadata().getMimeType())) { throw new ServletException("XQuery resource: " + query + " is not an XQuery or " + @@ -887,10 +890,6 @@ private SourceInfo getSource(final DBBroker broker, final String moduleLoadPath) locationUri.toString()); } catch (final PermissionDeniedException e) { throw new ServletException("permission denied to read module source from " + query); - } finally { - if (sourceDoc != null) { - sourceDoc.getUpdateLock().release(LockMode.READ_LOCK); - } } } catch (final URISyntaxException e) { throw new ServletException(e.getMessage(), e); @@ -912,9 +911,7 @@ private void declareVariables(final XQueryContext context, final SourceInfo sour final HttpResponseWrapper respw = new HttpResponseWrapper(response); // context.declareNamespace(RequestModule.PREFIX, // RequestModule.NAMESPACE_URI); - context.declareVariable(RequestModule.PREFIX + ":request", reqw); - context.declareVariable(ResponseModule.PREFIX + ":response", respw); - context.declareVariable(SessionModule.PREFIX + ":session", reqw.getSession(false)); + context.setHttpContext(new XQueryContext.HttpContext(reqw, respw)); context.declareVariable("exist:controller", sourceInfo.controllerPath); request.setAttribute("$exist:controller", sourceInfo.controllerPath); diff --git a/src/org/exist/indexing/AbstractMatchListener.java b/src/org/exist/indexing/AbstractMatchListener.java index eda3e628598..4bfca9a8392 100644 --- a/src/org/exist/indexing/AbstractMatchListener.java +++ b/src/org/exist/indexing/AbstractMatchListener.java @@ -49,7 +49,8 @@ public Receiver getNextInChain() { @Override public Receiver getLastInChain() { - Receiver last = this, next = getNextInChain(); + Receiver last = this; + Receiver next = getNextInChain(); while (next != null) { last = next; next = ((MatchListener)next).getNextInChain(); diff --git a/src/org/exist/indexing/IndexController.java b/src/org/exist/indexing/IndexController.java index 51ea827a06c..bfe65fcc8dc 100644 --- a/src/org/exist/indexing/IndexController.java +++ b/src/org/exist/indexing/IndexController.java @@ -269,7 +269,8 @@ public IStoredNode getReindexRoot(final IStoredNode node, final NodePath path, f * @return the top-most root node to be re-indexed */ public IStoredNode getReindexRoot(final IStoredNode node, final NodePath path, final boolean insert, final boolean includeSelf) { - IStoredNode next, top = null; + IStoredNode next; + IStoredNode top = null; for (final IndexWorker indexWorker : indexWorkers.values()) { next = indexWorker.getReindexRoot(node, path, insert, includeSelf); if (next != null && (top == null || top.getNodeId().isDescendantOf(next.getNodeId()))) { diff --git a/src/org/exist/interpreter/Context.java b/src/org/exist/interpreter/Context.java index 324903a00b1..39de31e4eb2 100644 --- a/src/org/exist/interpreter/Context.java +++ b/src/org/exist/interpreter/Context.java @@ -1,3 +1,22 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ package org.exist.interpreter; import java.io.IOException; @@ -7,6 +26,7 @@ import java.util.TimeZone; import java.util.function.Predicate; +import javax.annotation.Nullable; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.stream.XMLStreamException; @@ -45,769 +65,778 @@ public interface Context { - /** - * Returns true if this context has a parent context (means it is a module context). - * - * @return False. - */ - public boolean hasParent(); - - public XQueryContext getRootContext(); - - public XQueryContext copyContext(); - - /** - * Update the current dynamic context using the properties of another context. This is needed by {@link org.exist.xquery.functions.util.Eval}. - * - * @param from - */ - public void updateContext(XQueryContext from); - - /** - * Prepares the current context before xquery execution. - */ - public void prepareForExecution(); - - /** - * Is profiling enabled? - * - * @return true if profiling is enabled for this context. - */ - public boolean isProfilingEnabled(); - - public boolean isProfilingEnabled(int verbosity); - - /** - * Returns the {@link Profiler} instance of this context if profiling is enabled. - * - * @return the profiler instance. - */ - public Profiler getProfiler(); - - /** - * Called from the XQuery compiler to set the root expression for this context. - * - * @param expr - */ - public void setRootExpression(Expression expr); - - /** - * Returns the root expression of the XQuery associated with this context. - * - * @return root expression - */ - public Expression getRootExpression(); - - /** - * Returns the number of expression objects in the internal representation of the query. Used to estimate the size of the query. - * - * @return number of expression objects - */ - public int getExpressionCount(); - - public void setSource(Source source); - - public Source getSource(); - - /** - * Declare a user-defined static prefix/namespace mapping. - * - *

eXist internally keeps a table containing all prefix/namespace mappings it found in documents, which have been previously stored into the - * database. These default mappings need not to be declared explicitely.

- * - * @param prefix - * @param uri - * - * @throws XPathException - */ - public void declareNamespace(String prefix, String uri) throws XPathException; - - public void declareNamespaces(Map namespaceMap); - - /** - * Removes the namespace URI from the prefix/namespace mappings table. - * - * @param uri - */ - public void removeNamespace(String uri); - - /** - * Declare an in-scope namespace. This is called during query execution. - * - * @param prefix - * @param uri - */ - public void declareInScopeNamespace(String prefix, String uri); - - public String getInScopeNamespace(String prefix); - - public String getInScopePrefix(String uri); - - public String getInheritedNamespace(String prefix); - - public String getInheritedPrefix(String uri); - - /** - * Return the namespace URI mapped to the registered prefix or null if the prefix is not registered. - * - * @param prefix - * - * @return namespace - */ - public String getURIForPrefix(String prefix); - - /** - * Get URI Prefix - * - * @param uri - * - * @return the prefix mapped to the registered URI or null if the URI is not registered. - */ - public String getPrefixForURI(String uri); - - /** - * Returns the current default function namespace. - * - * @return current default function namespace - */ - public String getDefaultFunctionNamespace(); - - /** - * Set the default function namespace. By default, this points to the namespace for XPath built-in functions. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultFunctionNamespace(String uri) throws XPathException; - - /** - * Returns the current default element namespace. - * - * @return current default element namespace schema - * - * @throws XPathException - */ - public String getDefaultElementNamespaceSchema() throws XPathException; - - /** - * Set the default element namespace. By default, this points to the empty uri. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultElementNamespaceSchema(String uri) throws XPathException; - - /** - * Returns the current default element namespace. - * - * @return current default element namespace - * - * @throws XPathException - */ - public String getDefaultElementNamespace() throws XPathException; - - /** - * Set the default element namespace. By default, this points to the empty uri. - * - * @param uri a String value - * @param schema a String value - * - * @exception XPathException if an error occurs - */ - public void setDefaultElementNamespace(String uri, String schema) throws XPathException; - - /** - * Set the default collation to be used by all operators and functions on strings. Throws an exception if the collation is unknown or cannot be - * instantiated. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultCollation(String uri) throws XPathException; - - public String getDefaultCollation(); - - public Collator getCollator(String uri) throws XPathException; - - public Collator getDefaultCollator(); - - /** - * Set the set of statically known documents for the current execution context. These documents will be processed if no explicit document set has - * been set for the current expression with fn:doc() or fn:collection(). - * - * @param docs - */ - public void setStaticallyKnownDocuments(XmldbURI[] docs); - - public void setStaticallyKnownDocuments(DocumentSet set); - - //TODO : not sure how these 2 options might/have to be related - public void setCalendar(XMLGregorianCalendar newCalendar); - - public void setTimeZone(TimeZone newTimeZone); - - public XMLGregorianCalendar getCalendar(); - - public TimeZone getImplicitTimeZone(); - - /** - * Get statically known documents - * - * @return set of statically known documents. - * - * @throws XPathException - */ - public DocumentSet getStaticallyKnownDocuments() throws XPathException; - - public ExtendedXMLStreamReader getXMLStreamReader(NodeValue nv) throws XMLStreamException, IOException; - - public void setProtectedDocs(LockedDocumentMap map); - - public LockedDocumentMap getProtectedDocs(); - - public boolean inProtectedMode(); - - /** - * Should loaded documents be locked? - * - *

see #setLockDocumentsOnLoad(boolean)

- */ - public boolean lockDocumentsOnLoad(); - - public void addLockedDocument(DocumentImpl doc); - - public void setShared(boolean shared); - - public boolean isShared(); - - public void addModifiedDoc(DocumentImpl document); - - public void reset(); - - /** - * Prepare this XQueryContext to be reused. This should be called when adding an XQuery to the cache. - * - * @param keepGlobals - */ - public void reset(boolean keepGlobals); - - /** - * Returns true if whitespace between constructed element nodes should be stripped by default. - */ - public boolean stripWhitespace(); - - public void setStripWhitespace(boolean strip); - - /** - * Returns true if namespaces for constructed element and document nodes should be preserved on copy by default. - */ - public boolean preserveNamespaces(); - - /** - * The method setPreserveNamespaces. - * - * @param preserve a boolean value - */ - public void setPreserveNamespaces(final boolean preserve); - - /** - * Returns true if namespaces for constructed element and document nodes should be inherited on copy by default. - */ - public boolean inheritNamespaces(); - - /** - * The method setInheritNamespaces. - * - * @param inherit a boolean value - */ - public void setInheritNamespaces(final boolean inherit); - - /** - * Returns true if order empty is set to gretest, otherwise false for order empty is least. - */ - public boolean orderEmptyGreatest(); - - /** - * The method setOrderEmptyGreatest. - * - * @param order a boolean value - */ - public void setOrderEmptyGreatest(final boolean order); - - /** - * Get modules - * - * @return iterator over all modules imported into this context - */ - public Iterator getModules(); - - /** - * Get root modules - * - * @return iterator over all modules registered in the entire context tree - */ - public Iterator getRootModules(); - - public Iterator getAllModules(); - - /** - * Get the built-in module registered for the given namespace URI. - * - * @param namespaceURI - * - * @return built-in module - */ - public Module getModule(String namespaceURI); - - public Module getRootModule(String namespaceURI); - - public void setModule(String namespaceURI, Module module); - - /** - * For compiled expressions: check if the source of any module imported by the current query has changed since compilation. - */ - public boolean checkModulesValid(); - - public void analyzeAndOptimizeIfModulesChanged(Expression expr) throws XPathException; - - /** - * Load a built-in module from the given class name and assign it to the namespace URI. The specified class should be a subclass of {@link - * Module}. The method will try to instantiate the class. If the class is not found or an exception is thrown, the method will silently fail. The - * namespace URI has to be equal to the namespace URI declared by the module class. Otherwise, the module is not loaded. - * - * @param namespaceURI - * @param moduleClass - */ - public Module loadBuiltInModule(String namespaceURI, String moduleClass); - - /** - * Declare a user-defined function. All user-defined functions are kept in a single hash map. - * - * @param function - * - * @throws XPathException - */ - public void declareFunction(UserDefinedFunction function) throws XPathException; - - /** - * Resolve a user-defined function. - * - * @param name - * @param argCount - * - * @return user-defined function - * - * @throws XPathException - */ - public UserDefinedFunction resolveFunction(QName name, int argCount) throws XPathException; - - public Iterator getSignaturesForFunction(QName name); - - public Iterator localFunctions(); - - /** - * Declare a local variable. This is called by variable binding expressions like "let" and "for". - * - * @param var - * - * @throws XPathException - */ - public LocalVariable declareVariableBinding(LocalVariable var) throws XPathException; - - /** - * Declare a global variable as by "declare variable". - * - * @param var - * - * @return variable - * - * @throws XPathException - */ - public Variable declareGlobalVariable(Variable var) throws XPathException; - - public void undeclareGlobalVariable(QName name); - - /** - * Declare a user-defined variable. - * - *

The value argument is converted into an XPath value (@see XPathUtil#javaObjectToXPath(Object)).

- * - * @param qname the qualified name of the new variable. Any namespaces should have been declared before. - * @param value a Java object, representing the fixed value of the variable - * - * @return the created Variable object - * - * @throws XPathException if the value cannot be converted into a known XPath value or the variable QName references an unknown - * namespace-prefix. - */ - public Variable declareVariable(String qname, Object value) throws XPathException; - - public Variable declareVariable(QName qn, Object value) throws XPathException; - - /** - * Try to resolve a variable. - * - * @param name the qualified name of the variable as string - * - * @return the declared Variable object - * - * @throws XPathException if the variable is unknown - */ - public Variable resolveVariable(String name) throws XPathException; - - /** - * Try to resolve a variable. - * - * @param qname the qualified name of the variable - * - * @return the declared Variable object - * - * @throws XPathException if the variable is unknown - */ - public Variable resolveVariable(QName qname) throws XPathException; - - public boolean isVarDeclared(QName qname); - - public Map getVariables(); - - public Map getLocalVariables(); - - public Map getGlobalVariables(); - - /** - * Turn on/off XPath 1.0 backwards compatibility. - * - *

If turned on, comparison expressions will behave like in XPath 1.0, i.e. if any one of the operands is a number, the other operand will be - * cast to a double.

- * - * @param backwardsCompatible - */ - public void setBackwardsCompatibility(boolean backwardsCompatible); - - /** - * XPath 1.0 backwards compatibility turned on? - * - *

In XPath 1.0 compatible mode, additional conversions will be applied to values if a numeric value is expected.

- */ - public boolean isBackwardsCompatible(); - - public boolean isRaiseErrorOnFailedRetrieval(); - - /** - * Get the DBBroker instance used for the current query. - * - *

The DBBroker is the main database access object, providing access to all internal database functions.

- * - * @return DBBroker instance - */ - public DBBroker getBroker(); - - /** - * Get the subject which executes the current query. - * - * @return subject - */ - public Subject getSubject(); - - /** - * Get the document builder currently used for creating temporary document fragments. A new document builder will be created on demand. - * - * @return document builder - */ - public MemTreeBuilder getDocumentBuilder(); - - public MemTreeBuilder getDocumentBuilder(boolean explicitCreation); - - /** - * Returns the shared name pool used by all in-memory documents which are created within this query context. Create a name pool for every document - * would be a waste of memory, especially since it is likely that the documents contain elements or attributes with similar names. - * - * @return the shared name pool - */ - public NamePool getSharedNamePool(); - - public XQueryContext getContext(); - - public void prologEnter(Expression expr); - - public void expressionStart(Expression expr) throws TerminatedException; - - public void expressionEnd(Expression expr); - - public void stackEnter(Expression expr) throws TerminatedException; - - public void stackLeave(Expression expr); - - public void proceed() throws TerminatedException; - - public void proceed(Expression expr) throws TerminatedException; - - public void proceed(Expression expr, MemTreeBuilder builder) throws TerminatedException; - - public void setWatchDog(XQueryWatchDog watchdog); - - public XQueryWatchDog getWatchDog(); - - /** - * Push any document fragment created within the current execution context on the stack. - */ - public void pushDocumentContext(); - - public void popDocumentContext(); - - /** - * Set the base URI for the evaluation context. - * - *

This is the URI returned by the fn:base-uri() function.

- * - * @param uri - */ - public void setBaseURI(AnyURIValue uri); - - /** - * Set the base URI for the evaluation context. - * - *

A base URI specified via the base-uri directive in the XQuery prolog overwrites any other setting.

- * - * @param uri - * @param setInProlog - */ - public void setBaseURI(AnyURIValue uri, boolean setInProlog); - - /** - * Set the path to a base directory where modules should be loaded from. Relative module paths will be resolved against this directory. The - * property is usually set by the XQueryServlet or XQueryGenerator, but can also be specified manually. - * - * @param path - */ - public void setModuleLoadPath(String path); - - public String getModuleLoadPath(); - - /** - * The method isBaseURIDeclared. - * - * @return a boolean value - */ - public boolean isBaseURIDeclared(); - - /** - * Get the base URI of the evaluation context. - * - *

This is the URI returned by the fn:base-uri() function.

- * - * @return base URI of the evaluation context - * - * @exception XPathException if an error occurs - */ - public AnyURIValue getBaseURI() throws XPathException; - - /** - * Set the current context position, i.e. the position of the currently processed item in the context sequence. This value is required by some - * expressions, e.g. fn:position(). - * - * @param pos - * @param sequence - */ - public void setContextSequencePosition(int pos, Sequence sequence); - - /** - * Get the current context position, i.e. the position of the currently processed item in the context sequence. - * - * @return current context position - */ - public int getContextPosition(); - - public Sequence getContextSequence(); - - public void pushInScopeNamespaces(); - - /** - * Push all in-scope namespace declarations onto the stack. - * - * @param inherit - */ - @SuppressWarnings("unchecked") - public void pushInScopeNamespaces(boolean inherit); - - public void popInScopeNamespaces(); - - @SuppressWarnings("unchecked") - public void pushNamespaceContext(); - - public void popNamespaceContext(); - - /** - * Returns the last variable on the local variable stack. The current variable context can be restored by passing the return value to {@link - * #popLocalVariables(LocalVariable)}. - * - * @param newContext - * - * @return last variable on the local variable stack - */ - public LocalVariable markLocalVariables(boolean newContext); - - /** - * Restore the local variable stack to the position marked by variable var. - * - * @param var - */ - public void popLocalVariables(LocalVariable var); - - /** - * Returns the current size of the stack. This is used to determine where a variable has been declared. - * - * @return current size of the stack - */ - public int getCurrentStackSize(); - - /** - * Report the start of a function execution. Adds the reported function signature to the function call stack. - * - * @param signature - */ - public void functionStart(FunctionSignature signature); - - /** - * Report the end of the currently executed function. Pops the last function signature from the function call stack. - */ - public void functionEnd(); - - /** - * Check if the specified function signature is found in the current function called stack. If yes, the function might be tail recursive and needs - * to be optimized. - * - * @param signature - */ - public boolean tailRecursiveCall(FunctionSignature signature); - - public void mapModule(String namespace, XmldbURI uri); - - /** - * Import a module and make it available in this context. The prefix and location parameters are optional. If prefix is null, the default prefix - * specified by the module is used. If location is null, the module will be read from the namespace URI. - * - * @param namespaceURI - * @param prefix - * @param location - * - * @throws XPathException - */ - public Module importModule(String namespaceURI, String prefix, String location) throws XPathException; - - /** - * Returns the static location mapped to an XQuery source module, if known. - * - * @param namespaceURI the URI of the module - * - * @return the location string - */ - @SuppressWarnings("unchecked") - public String getModuleLocation(String namespaceURI); - - /** - * Returns an iterator over all module namespace URIs which are statically mapped to a known location. - * - * @return an iterator - */ - @SuppressWarnings("unchecked") - public Iterator getMappedModuleURIs(); - - /** - * Add a forward reference to an undeclared function. Forward references will be resolved later. - * - * @param call - */ - public void addForwardReference(FunctionCall call); - - /** - * Resolve all forward references to previously undeclared functions. - * - * @throws XPathException - */ - public void resolveForwardReferences() throws XPathException; - - public boolean optimizationsEnabled(); - - /** - * for static compile-time options i.e. declare option - * - * @param qnameString - * @param contents - * - * @throws XPathException - */ - public void addOption(String qnameString, String contents) throws XPathException; - - /** - * for dynamic run-time options i.e. util:declare-option - * - * @param qnameString - * @param contents - * - * @throws XPathException - */ - public void addDynamicOption(String qnameString, String contents) throws XPathException; - - public Option getOption(QName qname); - - public Pragma getPragma(String name, String contents) throws XPathException; - - /** - * Store the supplied data to a temporary document fragment. - * - * @param doc - * - * @throws XPathException - */ - public DocumentImpl storeTemporaryDoc(org.exist.dom.memtree.DocumentImpl doc) throws XPathException; - - public void setAttribute(String attribute, Object value); - - public Object getAttribute(String attribute); - - /** - * Set an XQuery Context variable. General variable storage in the xquery context - * - * @param name The variable name - * @param XQvar The variable value, may be of any xs: type - */ - public void setXQueryContextVar(String name, Object XQvar); - - /** - * Get an XQuery Context variable. General variable storage in the xquery context - * - * @param name The variable name - * - * @return The variable value indicated by name. - */ - public Object getXQueryContextVar(String name); - - public void registerUpdateListener(UpdateListener listener); - - /** - * Check if the XQuery contains pragmas that define serialization settings. If yes, - * copy the corresponding settings to the current set of output properties. - * - * @param properties the properties object to which serialization parameters will be added. - * - * @throws XPathException if an error occurs while parsing the option - */ - public void checkOptions(Properties properties) throws XPathException; - - public void setDebuggeeJoint(DebuggeeJoint joint); - - public DebuggeeJoint getDebuggeeJoint(); - - public boolean isDebugMode(); - - public boolean requireDebugMode(); - - public void registerBinaryValueInstance(BinaryValue binaryValue); - - public void runCleanupTasks(final Predicate predicate); + /** + * Returns true if this context has a parent context (means it is a module context). + * + * @return False. + */ + boolean hasParent(); + + XQueryContext getRootContext(); + + XQueryContext copyContext(); + + /** + * Update the current dynamic context using the properties of another context. + *

+ * This is needed by {@link org.exist.xquery.functions.util.Eval}. + * + * @param from the context to update from + */ + void updateContext(XQueryContext from); + + /** + * Prepares the current context before xquery execution. + */ + void prepareForExecution(); + + /** + * Is profiling enabled? + * + * @return true if profiling is enabled for this context. + */ + boolean isProfilingEnabled(); + + boolean isProfilingEnabled(int verbosity); + + /** + * Returns the {@link Profiler} instance of this context if profiling is enabled. + * + * @return the profiler instance. + */ + Profiler getProfiler(); + + /** + * Called from the XQuery compiler to set the root expression for this context. + * + * @param expr + */ + void setRootExpression(Expression expr); + + /** + * Returns the root expression of the XQuery associated with this context. + * + * @return root expression + */ + Expression getRootExpression(); + + /** + * Returns the number of expression objects in the internal representation of the query. Used to estimate the size of the query. + * + * @return number of expression objects + */ + int getExpressionCount(); + + void setSource(Source source); + + Source getSource(); + + /** + * Declare a user-defined static prefix/namespace mapping. + * + *

eXist internally keeps a table containing all prefix/namespace mappings it found in documents, which have been previously stored into the + * database. These default mappings need not to be declared explicitely.

+ * + * @param prefix + * @param uri + * @throws XPathException + */ + void declareNamespace(String prefix, String uri) throws XPathException; + + void declareNamespaces(Map namespaceMap); + + /** + * Removes the namespace URI from the prefix/namespace mappings table. + * + * @param uri + */ + void removeNamespace(String uri); + + /** + * Declare an in-scope namespace. This is called during query execution. + * + * @param prefix the namespace prefix. + * @param uri the namespace uri. + */ + void declareInScopeNamespace(String prefix, String uri); + + String getInScopeNamespace(String prefix); + + String getInScopePrefix(String uri); + + String getInheritedNamespace(String prefix); + + String getInheritedPrefix(String uri); + + /** + * Return the namespace URI mapped to the registered prefix or null if the prefix is not registered. + * + * @param prefix + * @return namespace + */ + String getURIForPrefix(String prefix); + + /** + * Get URI Prefix + * + * @param uri + * @return the prefix mapped to the registered URI or null if the URI is not registered. + */ + String getPrefixForURI(String uri); + + /** + * Returns the current default function namespace. + * + * @return current default function namespace + */ + String getDefaultFunctionNamespace(); + + /** + * Set the default function namespace. By default, this points to the namespace for XPath built-in functions. + * + * @param uri + * @throws XPathException + */ + void setDefaultFunctionNamespace(String uri) throws XPathException; + + /** + * Returns the current default element namespace. + * + * @return current default element namespace schema + * @throws XPathException + */ + String getDefaultElementNamespaceSchema() throws XPathException; + + /** + * Set the default element namespace. By default, this points to the empty uri. + * + * @param uri the default element namespace schema uri + * @throws XPathException + */ + void setDefaultElementNamespaceSchema(String uri) throws XPathException; + + /** + * Returns the current default element namespace. + * + * @return current default element namespace + * @throws XPathException + */ + String getDefaultElementNamespace() throws XPathException; + + /** + * Set the default element namespace. By default, this points to the empty uri. + * + * @param uri the namespace uri + * @param schema detail of the namespace schema, or null + * @throws XPathException if an error occurs + */ + void setDefaultElementNamespace(String uri, @Nullable String schema) throws XPathException; + + /** + * Set the default collation to be used by all operators and functions on strings. + * Throws an exception if the collation is unknown or cannot be instantiated. + * + * @param uri the collation URI + * @throws XPathException + */ + void setDefaultCollation(String uri) throws XPathException; + + String getDefaultCollation(); + + Collator getCollator(String uri) throws XPathException; + + Collator getDefaultCollator(); + + /** + * Set the set of statically known documents for the current execution context. + * These documents will be processed if no explicit document set has been set for the current expression + * with fn:doc() or fn:collection(). + * + * @param docs the statically known documents + */ + void setStaticallyKnownDocuments(XmldbURI[] docs); + + void setStaticallyKnownDocuments(DocumentSet set); + + //TODO : not sure how these 2 options might/have to be related + void setCalendar(XMLGregorianCalendar newCalendar); + + void setTimeZone(TimeZone newTimeZone); + + XMLGregorianCalendar getCalendar(); + + TimeZone getImplicitTimeZone(); + + /** + * Get statically known documents + * + * @return set of statically known documents. + * @throws XPathException + */ + DocumentSet getStaticallyKnownDocuments() throws XPathException; + + ExtendedXMLStreamReader getXMLStreamReader(NodeValue nv) throws XMLStreamException, IOException; + + void setProtectedDocs(LockedDocumentMap map); + + LockedDocumentMap getProtectedDocs(); + + boolean inProtectedMode(); + + /** + * Should loaded documents be locked? + * + *

see #setLockDocumentsOnLoad(boolean)

+ */ + boolean lockDocumentsOnLoad(); + + void addLockedDocument(DocumentImpl doc); + + void setShared(boolean shared); + + boolean isShared(); + + void addModifiedDoc(DocumentImpl document); + + void reset(); + + /** + * Prepare this XQueryContext to be reused. This should be called when adding an XQuery to the cache. + * + * @param keepGlobals true if global variables should be preserved. + */ + void reset(boolean keepGlobals); + + /** + * Returns true if whitespace between constructed element nodes should be stripped by default. + * + * @return true if whitespace should be stripped, false otherwise. + */ + boolean stripWhitespace(); + + void setStripWhitespace(boolean strip); + + /** + * Returns true if namespaces for constructed element and document nodes should be preserved on copy by default. + * + * @return true if namespaces should be preserved, false otherwise. + */ + boolean preserveNamespaces(); + + /** + * The method setPreserveNamespaces. + * + * @param preserve a boolean value + */ + void setPreserveNamespaces(final boolean preserve); + + /** + * Returns true if namespaces for constructed element and document nodes should be inherited on copy by default. + */ + boolean inheritNamespaces(); + + /** + * The method setInheritNamespaces. + * + * @param inherit a boolean value + */ + void setInheritNamespaces(final boolean inherit); + + /** + * Returns true if order empty is set to greatest, otherwise false for order empty is least. + */ + boolean orderEmptyGreatest(); + + /** + * The method setOrderEmptyGreatest. + * + * @param order a boolean value + */ + void setOrderEmptyGreatest(final boolean order); + + /** + * Get modules. + * + * @return iterator over all modules imported into this context + */ + Iterator getModules(); + + /** + * Get root modules. + * + * @return iterator over all modules registered in the entire context tree + */ + Iterator getRootModules(); + + Iterator getAllModules(); + + /** + * Get the built-in module registered for the given namespace URI. + * + * @param namespaceURI the namespace of the module. + * @return the module, or null + */ + @Nullable + Module getModule(String namespaceURI); + + Module getRootModule(String namespaceURI); + + void setModule(String namespaceURI, Module module); + + /** + * For compiled expressions: check if the source of any module imported by the current + * query has changed since compilation. + * + * @return true if the modules are valid, false otherwise. + */ + boolean checkModulesValid(); + + void analyzeAndOptimizeIfModulesChanged(Expression expr) throws XPathException; + + /** + * Load a built-in module from the given class name and assign it to the namespace URI. + *

+ * The specified {@code moduleClass} should be a subclass of {@link Module}. The method will try to instantiate + * the class. + *

+ * If the class is not found or an exception is thrown, the method will silently fail. The + * namespace URI has to be equal to the namespace URI declared by the module class. Otherwise, + * the module is not loaded. + * + * @param namespaceURI the namespace URI of the module to load + * @param moduleClass the Java class of the module to load + * @return the loaded module, or null + */ + @Nullable + Module loadBuiltInModule(String namespaceURI, String moduleClass); + + /** + * Declare a user-defined function. All user-defined functions are kept in a single hash map. + * + * @param function the function. + * @throws XPathException + */ + void declareFunction(UserDefinedFunction function) throws XPathException; + + /** + * Resolve a user-defined function. + * + * @param name the function name + * @param argCount the function arity + * @return the resolved function, or null + * @throws XPathException + */ + @Nullable + UserDefinedFunction resolveFunction(QName name, int argCount) throws XPathException; + + Iterator getSignaturesForFunction(QName name); + + Iterator localFunctions(); + + + /** + * Declare a local variable. This is called by variable binding expressions like "let" and "for". + * + * @param var the variable + * @return the declare variable + * @throws XPathException + */ + LocalVariable declareVariableBinding(LocalVariable var) throws XPathException; + + /** + * Declare a global variable as by "declare variable". + * + * @param var the variable + * @return variable the declared variable + * @throws XPathException + */ + Variable declareGlobalVariable(Variable var) throws XPathException; + + void undeclareGlobalVariable(QName name); + + /** + * Declare a user-defined variable. + *

+ * The value argument is converted into an XPath value (@see XPathUtil#javaObjectToXPath(Object)). + * + * @param qname the qualified name of the new variable. Any namespaces should have been declared before. + * @param value a Java object, representing the fixed value of the variable + * @return the created Variable object + * @throws XPathException if the value cannot be converted into a known XPath value or the variable QName + * references an unknown namespace-prefix. + */ + Variable declareVariable(String qname, Object value) throws XPathException; + + Variable declareVariable(QName qn, Object value) throws XPathException; + + /** + * Try to resolve a variable. + * + * @param name the qualified name of the variable as string + * @return the declared Variable object + * @throws XPathException if the variable is unknown + */ + Variable resolveVariable(String name) throws XPathException; + + + /** + * Try to resolve a variable. + * + * @param qname the qualified name of the variable + * @return the declared Variable object + * @throws XPathException if the variable is unknown + */ + Variable resolveVariable(QName qname) throws XPathException; + + boolean isVarDeclared(QName qname); + + Map getVariables(); + + Map getLocalVariables(); + + Map getGlobalVariables(); + + /** + * Turn on/off XPath 1.0 backwards compatibility. + *

+ * If turned on, comparison expressions will behave like in XPath 1.0, i.e. if any one of the operands is a number, + * the other operand will be cast to a double. + * + * @param backwardsCompatible true to enable XPath 1.0 backwards compatible mode. + */ + void setBackwardsCompatibility(boolean backwardsCompatible); + + /** + * XPath 1.0 backwards compatibility turned on? + *

+ * In XPath 1.0 compatible mode, additional conversions will be applied to values if a numeric value is expected. + * + * @return true if XPath 1.0 compatible mode is enabled. + */ + boolean isBackwardsCompatible(); + + boolean isRaiseErrorOnFailedRetrieval(); + + /** + * Get the DBBroker instance used for the current query. + *

+ * The DBBroker is the main database access object, providing access to all internal database functions. + * + * @return DBBroker instance + */ + DBBroker getBroker(); + + /** + * Get the subject which executes the current query. + * + * @return subject + */ + Subject getSubject(); + + /** + * Get the document builder currently used for creating temporary document fragments. + * A new document builder will be created on demand. + * + * @return document builder + */ + MemTreeBuilder getDocumentBuilder(); + + MemTreeBuilder getDocumentBuilder(boolean explicitCreation); + + /** + * Returns the shared name pool used by all in-memory documents which are created within this query context. + * Create a name pool for every document would be a waste of memory, especially since it is likely that the + * documents contain elements or attributes with similar names. + * + * @return the shared name pool + */ + NamePool getSharedNamePool(); + + XQueryContext getContext(); + + void prologEnter(Expression expr); + + void expressionStart(Expression expr) throws TerminatedException; + + void expressionEnd(Expression expr); + + void stackEnter(Expression expr) throws TerminatedException; + + void stackLeave(Expression expr); + + void proceed() throws TerminatedException; + + void proceed(Expression expr) throws TerminatedException; + + void proceed(Expression expr, MemTreeBuilder builder) throws TerminatedException; + + void setWatchDog(XQueryWatchDog watchdog); + + XQueryWatchDog getWatchDog(); + + /** + * Push any document fragment created within the current execution context on the stack. + */ + void pushDocumentContext(); + + /** + * Pop the last document fragment created within the current execution context off the stack. + */ + void popDocumentContext(); + + /** + * Set the base URI for the evaluation context. + *

+ * This is the URI returned by the {@code fn:base-uri()} function. + * + * @param uri the base URI + */ + void setBaseURI(AnyURIValue uri); + + /** + * Set the base URI for the evaluation context. + *

+ * A base URI specified via the base-uri directive in the XQuery prolog overwrites any other setting. + * + * @param uri the base URI + * @param setInProlog true if it was set by a declare option in the XQuery prolog + */ + void setBaseURI(AnyURIValue uri, boolean setInProlog); + + /** + * Set the path to a base directory where modules should be loaded from. Relative module paths will be resolved + * against this directory. The property is usually set by the XQueryServlet or XQueryGenerator, but can also + * be specified manually. + * + * @param path the module load path. + */ + void setModuleLoadPath(String path); + + String getModuleLoadPath(); + + /** + * Returns true if the baseURI is declared. + * + * @return true if the baseURI is declared, false otherwise. + */ + boolean isBaseURIDeclared(); + + /** + * Get the base URI of the evaluation context. + *

+ * This is the URI returned by the fn:base-uri() function. + * + * @return base URI of the evaluation context + * @throws XPathException if an error occurs + */ + AnyURIValue getBaseURI() throws XPathException; + + /** + * Set the current context position, i.e. the position of the currently processed item in the context sequence. + * This value is required by some expressions, e.g. fn:position(). + * + * @param pos the position + * @param sequence the sequence + */ + void setContextSequencePosition(int pos, Sequence sequence); + + /** + * Get the current context position, i.e. the position of the currently processed item in the context sequence. + * + * @return current context position + */ + int getContextPosition(); + + Sequence getContextSequence(); + + void pushInScopeNamespaces(); + + /** + * Push all in-scope namespace declarations onto the stack. + * + * @param inherit true if the current namespaces become inherited + * just like the previous inherited ones + */ + @SuppressWarnings("unchecked") + void pushInScopeNamespaces(boolean inherit); + + void popInScopeNamespaces(); + + @SuppressWarnings("unchecked") + void pushNamespaceContext(); + + void popNamespaceContext(); + + /** + * Returns the last variable on the local variable stack. The current variable context can be restored by + * passing the return value to {@link #popLocalVariables(LocalVariable)}. + * + * @param newContext true if there is a new context + * @return last variable on the local variable stack + */ + LocalVariable markLocalVariables(boolean newContext); + + /** + * Restore the local variable stack to the position marked by variable {@code var}. + * + * @param var only clear variables after this variable, or null + */ + void popLocalVariables(@Nullable LocalVariable var); + + /** + * Returns the current size of the stack. This is used to determine where a variable has been declared. + * + * @return current size of the stack + */ + int getCurrentStackSize(); + + /** + * Report the start of a function execution. Adds the reported function signature to the function call stack. + * + * @param signature the function signature + */ + void functionStart(FunctionSignature signature); + + /** + * Report the end of the currently executed function. Pops the last function signature from the function call stack. + */ + void functionEnd(); + + /** + * Check if the specified function signature is found in the current function called stack. + * If yes, the function might be tail recursive and needs + * to be optimized. + * + * @param signature the function signature + * @return true if the function call is tail recursive + */ + boolean tailRecursiveCall(FunctionSignature signature); + + void mapModule(String namespace, XmldbURI uri); + + /** + * Import a module and make it available in this context. The prefix and location parameters are optional. If prefix is null, the default prefix + * specified by the module is used. If location is null, the module will be read from the namespace URI. + * + * @param namespaceURI the namespace URI of the module + * @param prefix the namespace prefix of the module + * @param location the location of the module + * @return the imported module + * @throws XPathException + */ + Module importModule(String namespaceURI, String prefix, String location) throws XPathException; + + /** + * Returns the static location mapped to an XQuery source module, if known. + * + * @param namespaceURI the URI of the module + * @return the location string + */ + String getModuleLocation(String namespaceURI); + + /** + * Returns an iterator over all module namespace URIs which are statically mapped to a known location. + * + * @return an iterator + */ + Iterator getMappedModuleURIs(); + + /** + * Add a forward reference to an undeclared function. Forward references will be resolved later. + * + * @param call the undeclared function + */ + void addForwardReference(FunctionCall call); + + /** + * Resolve all forward references to previously undeclared functions. + * + * @throws XPathException + */ + void resolveForwardReferences() throws XPathException; + + boolean optimizationsEnabled(); + + /** + * Add a static compile-time option i.e. declare option + * + * @param name the name of the option + * @param value the value of the option + * @throws XPathException + */ + void addOption(String name, String value) throws XPathException; + + /** + * Add a dynamic run-time option i.e. util:declare-option + * + * @param name the name of the dynamic option + * @param value the value of the dynamic option + * @throws XPathException + */ + void addDynamicOption(String name, String value) throws XPathException; + + /** + * Get dynamic options that were declared at run-time + * first as these have precedence, and then if not found + * get static options that were declare at compile time + * + * @param qname option name + * @return the option + */ + Option getOption(QName qname); + + Pragma getPragma(String name, String contents) throws XPathException; + + /** + * Store the supplied in-memory document to a temporary document fragment. + * + * @param doc the in-memory document + * @return The temporary document + * @throws XPathException + */ + DocumentImpl storeTemporaryDoc(org.exist.dom.memtree.DocumentImpl doc) throws XPathException; + + void setAttribute(String attribute, Object value); + + Object getAttribute(String attribute); + + /** + * Set an XQuery Context variable. General variable storage in the xquery context + * + * @param name The variable name + * @param xqVar The variable value, may be of any xs: type + */ + void setXQueryContextVar(String name, Object xqVar); + + /** + * Get an XQuery Context variable. General variable storage in the xquery context + * + * @param name The variable name + * @return The variable value indicated by name. + */ + Object getXQueryContextVar(String name); + + void registerUpdateListener(UpdateListener listener); + + /** + * Check if the XQuery contains options that define serialization settings. If yes, + * copy the corresponding settings to the current set of output properties. + * + * @param properties the properties object to which serialization parameters will be added. + * @throws XPathException if an error occurs while parsing the option + */ + void checkOptions(Properties properties) throws XPathException; + + void setDebuggeeJoint(DebuggeeJoint joint); + + DebuggeeJoint getDebuggeeJoint(); + + boolean isDebugMode(); + + boolean requireDebugMode(); + + void registerBinaryValueInstance(BinaryValue binaryValue); + + void runCleanupTasks(final Predicate predicate); } \ No newline at end of file diff --git a/src/org/exist/jetty/JettyStart.java b/src/org/exist/jetty/JettyStart.java index e6c24551e1f..a7fe16b60f2 100644 --- a/src/org/exist/jetty/JettyStart.java +++ b/src/org/exist/jetty/JettyStart.java @@ -63,6 +63,8 @@ import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Database; +import static org.exist.util.ThreadUtils.newGlobalThread; + /** * This class provides a main method to start Jetty with eXist. It registers shutdown * handlers to cleanly shut down the database and the webserver. @@ -77,11 +79,6 @@ public class JettyStart extends Observable implements LifeCycle.Listener { private static final String JETTY_PROPETIES_FILENAME = "jetty.properties"; private static final Logger logger = LogManager.getLogger(JettyStart.class); - public static void main(final String[] args) { - final JettyStart start = new JettyStart(); - start.run(args, null); - } - public final static String SIGNAL_STARTING = "jetty starting"; public final static String SIGNAL_STARTED = "jetty started"; public final static String SIGNAL_ERROR = "error"; @@ -92,9 +89,15 @@ public static void main(final String[] args) { private final static int STATUS_STOPPED = 3; @GuardedBy("this") private int status = STATUS_STOPPED; - @GuardedBy("this") private Optional shutdownHook = Optional.empty(); + @GuardedBy("this") private Optional shutdownHookThread = Optional.empty(); @GuardedBy("this") private int primaryPort = 8080; + + public static void main(final String[] args) { + final JettyStart start = new JettyStart(); + start.run(args, null); + } + public JettyStart() { // Additional checks XML libs @@@@ XmlLibraryChecker.check(); @@ -500,8 +503,9 @@ private Optional startJetty(final List configuredObjects) throws // register a shutdown hook for the server final BrokerPoolAndJettyShutdownHook brokerPoolAndJettyShutdownHook = new BrokerPoolAndJettyShutdownHook(_server); - Runtime.getRuntime().addShutdownHook(brokerPoolAndJettyShutdownHook); - this.shutdownHook = Optional.of(brokerPoolAndJettyShutdownHook); + final Thread shutdownHookThread = newGlobalThread("BrokerPoolsAndJetty.ShutdownHook", brokerPoolAndJettyShutdownHook); + this.shutdownHookThread = Optional.of(shutdownHookThread); + Runtime.getRuntime().addShutdownHook(shutdownHookThread); server = Optional.of(_server); } @@ -568,7 +572,7 @@ private List getEnabledConfigFiles(final Path enabledJettyConfigs) throws } public synchronized void shutdown() { - shutdownHook.ifPresent(Runtime.getRuntime()::removeShutdownHook); + shutdownHookThread.ifPresent(Runtime.getRuntime()::removeShutdownHook); BrokerPool.stopAll(false); @@ -615,11 +619,10 @@ public void run() { } } - private static class BrokerPoolAndJettyShutdownHook extends Thread { + private static class BrokerPoolAndJettyShutdownHook implements Runnable { private final Server server; BrokerPoolAndJettyShutdownHook(final Server server) { - super("exist-jettyStart-shutdownHook"); this.server = server; } @@ -635,7 +638,6 @@ public void run() { } catch (final Exception e) { e.printStackTrace(); } - } } diff --git a/src/org/exist/launcher/ConfigurationDialog.java b/src/org/exist/launcher/ConfigurationDialog.java index cfb9be6873a..8e98b3c5eb3 100644 --- a/src/org/exist/launcher/ConfigurationDialog.java +++ b/src/org/exist/launcher/ConfigurationDialog.java @@ -14,14 +14,13 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import javax.swing.*; -import javax.xml.stream.XMLStreamException; import javax.xml.transform.TransformerException; import org.apache.commons.configuration2.PropertiesConfiguration; import org.apache.commons.configuration2.ex.ConfigurationException; import org.apache.commons.lang3.SystemUtils; +import org.exist.collections.CollectionCache; import org.exist.storage.BrokerPool; -import org.exist.storage.CollectionCacheManager; import org.exist.storage.DefaultCacheManager; import org.exist.util.Configuration; import org.exist.util.ConfigurationHelper; @@ -64,7 +63,7 @@ public ConfigurationDialog(Consumer callback) { final int cacheSizeProp = existConfig.getInteger(DefaultCacheManager.PROPERTY_CACHE_SIZE); cacheSize.setValue(Integer.valueOf(cacheSizeProp)); - final int collectionCacheProp = existConfig.getInteger(CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES); + final int collectionCacheProp = existConfig.getInteger(CollectionCache.PROPERTY_CACHE_SIZE_BYTES); collectionCache.setValue(Integer.valueOf(collectionCacheProp / 1024 / 1024)); // show in MB final Path dir = (Path)existConfig.getProperty(BrokerPool.PROPERTY_DATA_DIR); @@ -256,7 +255,7 @@ private void initComponents() { gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; getContentPane().add(collectionCache, gridBagConstraints); - jLabel8.setText("

Memory settings only become effective after restart and only apply when eXist is started via the system tray launcher.

"); + jLabel8.setText("

Memory settings only become effective after restart and only apply when eXist-db is started via the system tray launcher.

"); jLabel8.setPreferredSize(new java.awt.Dimension(280, 48)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 4; @@ -278,7 +277,7 @@ private void initComponents() { getContentPane().add(lbCurrentUsage, gridBagConstraints); lbStartupMsg.setFont(lbStartupMsg.getFont().deriveFont(lbStartupMsg.getFont().getStyle() & ~java.awt.Font.BOLD)); - lbStartupMsg.setText("

It seems you are starting eXist for the first time. Please configure your memory settings below.

"); + lbStartupMsg.setText("

It seems you are starting eXist-db for the first time. Please configure your memory settings below.

"); lbStartupMsg.setMinimumSize(new java.awt.Dimension(60, 64)); lbStartupMsg.setPreferredSize(new java.awt.Dimension(300, 32)); gridBagConstraints = new java.awt.GridBagConstraints(); diff --git a/src/org/exist/launcher/Launcher.java b/src/org/exist/launcher/Launcher.java index 062cfed3aa4..fc639ba93a2 100644 --- a/src/org/exist/launcher/Launcher.java +++ b/src/org/exist/launcher/Launcher.java @@ -52,6 +52,8 @@ import java.util.*; import java.util.concurrent.locks.ReentrantLock; +import static org.exist.util.ThreadUtils.newGlobalThread; + /** * A launcher for the eXist-db server integrated with the desktop. * Shows a splash screen during startup and registers a tray icon @@ -157,7 +159,7 @@ public void windowOpened(WindowEvent windowEvent) { } void startJetty() { - new Thread(() -> { + final Runnable runnable = () -> { serviceLock.lock(); try { if (!jetty.isPresent()) { @@ -170,7 +172,8 @@ void startJetty() { } finally { serviceLock.unlock(); } - }).start(); + }; + newGlobalThread("launcher.startJetty", runnable).start(); } boolean isSystemTraySupported() { @@ -233,11 +236,11 @@ private PopupMenu createMenu() { serviceLock.lock(); try { if (serviceManager.isInstalled()) { - showTrayMessage("Starting the eXistdb service. Please wait...", TrayIcon.MessageType.INFO); + showTrayMessage("Starting the eXist-db service. Please wait...", TrayIcon.MessageType.INFO); if (serviceManager.start()) { - showTrayMessage("eXistdb service started", TrayIcon.MessageType.INFO); + showTrayMessage("eXist-db service started", TrayIcon.MessageType.INFO); } else { - showTrayMessage("Starting eXistdb service failed", TrayIcon.MessageType.ERROR); + showTrayMessage("Starting eXist-db service failed", TrayIcon.MessageType.ERROR); } } else if (jetty.isPresent()) { jetty.ifPresent(server -> { @@ -271,9 +274,9 @@ private PopupMenu createMenu() { showTrayMessage("eXist-db stopped", TrayIcon.MessageType.INFO); } else if (serviceManager.isRunning()) { if (serviceManager.stop()) { - showTrayMessage("eXistdb service stopped", TrayIcon.MessageType.INFO); + showTrayMessage("eXist-db service stopped", TrayIcon.MessageType.INFO); } else { - showTrayMessage("Stopping eXistdb service failed", TrayIcon.MessageType.ERROR); + showTrayMessage("Stopping eXist-db service failed", TrayIcon.MessageType.ERROR); } } } finally { @@ -374,7 +377,7 @@ private void installAsService() { try { jetty.ifPresent(server -> { if (server.isStarted()) { - showTrayMessage("Stopping eXistdb...", TrayIcon.MessageType.INFO); + showTrayMessage("Stopping eXist-db...", TrayIcon.MessageType.INFO); server.shutdown(); } }); @@ -532,9 +535,9 @@ void signalStarted() { if (SystemUtils.IS_OS_WINDOWS && !isInstallingService && !serviceManager.isInstalled()) { isInstallingService = true; SwingUtilities.invokeLater(() -> { - if (JOptionPane.showConfirmDialog(splash, "It is recommended to run eXist as a service on " + + if (JOptionPane.showConfirmDialog(splash, "It is recommended to run eXist-db as a service on " + "Windows.\nNot doing so may lead to data loss if you shut down the computer before " + - "eXist.\n\nWould you like to install the service?", "Install as Service?", + "eXist-db.\n\nWould you like to install the service?", "Install as Service?", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION) { SwingUtilities.invokeLater(() -> installAsService()); } diff --git a/src/org/exist/launcher/ServiceManager.java b/src/org/exist/launcher/ServiceManager.java index 2b23075b50a..74ab32efb94 100644 --- a/src/org/exist/launcher/ServiceManager.java +++ b/src/org/exist/launcher/ServiceManager.java @@ -114,7 +114,7 @@ void queryState() { } void installAsService() { - launcher.showTrayMessage("Installing service and starting eXistdb ...", TrayIcon.MessageType.INFO); + launcher.showTrayMessage("Installing service and starting eXist-db ...", TrayIcon.MessageType.INFO); if (canUseServices) { runWrapperCmd("install", (code, output) -> { diff --git a/src/org/exist/launcher/vm.properties b/src/org/exist/launcher/vm.properties index 5db513e76f5..724bf02c8de 100644 --- a/src/org/exist/launcher/vm.properties +++ b/src/org/exist/launcher/vm.properties @@ -9,4 +9,4 @@ memory.max=2048 vmoptions=-Dfile.encoding=UTF-8 # Mac specific properties -vmoptions.mac=-Xdock:name="eXist-db" -Xdock:icon="icon.png" -Dapple.laf.useScreenMenuBar="true" \ No newline at end of file +vmoptions.mac=-Xdock:name=eXist-db -Xdock:icon=icon.png -Dapple.laf.useScreenMenuBar=true \ No newline at end of file diff --git a/src/org/exist/management/Agent.java b/src/org/exist/management/Agent.java index 3243826ebc3..ce94af0c722 100644 --- a/src/org/exist/management/Agent.java +++ b/src/org/exist/management/Agent.java @@ -21,6 +21,7 @@ */ package org.exist.management; +import org.exist.management.impl.PerInstanceMBean; import org.exist.storage.BrokerPool; import org.exist.util.DatabaseConfigurationException; @@ -34,7 +35,7 @@ public interface Agent { void closeDBInstance(BrokerPool instance); - void addMBean(String dbInstance, String name, Object mbean) throws DatabaseConfigurationException; + void addMBean(PerInstanceMBean mbean) throws DatabaseConfigurationException; void changeStatus(BrokerPool instance, TaskStatus actualStatus); diff --git a/src/org/exist/management/AgentFactory.java b/src/org/exist/management/AgentFactory.java index a4724a8034f..061a2e36d50 100644 --- a/src/org/exist/management/AgentFactory.java +++ b/src/org/exist/management/AgentFactory.java @@ -24,6 +24,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; + +import static java.lang.invoke.MethodType.methodType; + public class AgentFactory { private final static Logger LOG = LogManager.getLogger(AgentFactory.class); @@ -38,16 +43,33 @@ public static Agent getInstance() { if (!Agent.class.isAssignableFrom(clazz)) { LOG.warn("Class " + className + " does not implement interface Agent. Using fallback."); } else { - instance = (Agent) clazz.newInstance(); + final MethodHandles.Lookup lookup = MethodHandles.publicLookup(); + + // 1. try for default constructor + try { + final MethodHandle mhConstructor = lookup.findConstructor(clazz, methodType(void.class)); + instance = (Agent) mhConstructor.invokeExact(); + } catch (final NoSuchMethodException | IllegalAccessException e) { + LOG.warn("No default constructor found for Agent: " + className + ". Will try singleton pattern..."); + + // 2. try for singleton with static getInstance() + try { + final MethodHandle methodHandle = lookup.findStatic(clazz, "getInstance", methodType(Agent.class)); + instance = (Agent) methodHandle.invokeExact(); + } catch (final NoSuchMethodException | IllegalAccessException e2) { + LOG.warn("No singleton pattern found for Agent: " + className); + } + } } - } catch (final ClassNotFoundException e) { - LOG.warn("Class not found for JMX agent: " + className); - } catch (final IllegalAccessException | InstantiationException e) { - LOG.warn("Failed to instantiate class for JMX agent: " + className); + } catch (final Throwable e) { + LOG.error("Unable to instantiate JMX agent: " + className + ". JMX will be unavailable!", e); + } + + if (instance == null) { + instance = new DummyAgent(); } - if (instance == null) - {instance = new DummyAgent();} } + return instance; } } diff --git a/src/org/exist/management/Cache.java b/src/org/exist/management/Cache.java index 295187a97a7..7752cb96479 100644 --- a/src/org/exist/management/Cache.java +++ b/src/org/exist/management/Cache.java @@ -21,16 +21,38 @@ */ package org.exist.management; -public class Cache implements CacheMXBean { +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; +public class Cache implements CacheMXBean { + private final String instanceId; private final org.exist.storage.cache.Cache cache; - public Cache(org.exist.storage.cache.Cache cache) { + public Cache(final String instanceId, final org.exist.storage.cache.Cache cache) { + this.instanceId = instanceId; this.cache = cache; } + public static String getAllInstancesQuery() { + return "org.exist.management." + '*' + ":type=CacheManager.Cache," + '*'; + } + + private static ObjectName getName(final String instanceId, final String cacheName, final String cacheType) throws MalformedObjectNameException { + return new ObjectName("org.exist.management." + instanceId + ":type=CacheManager.Cache,name=" + cacheName + ",cache-type=" + cacheType); + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return getName(instanceId, cache.getName(), cache.getType().toString()); + } + + @Override + public String getInstanceId() { + return instanceId; + } + @Override - public String getType() { + public org.exist.storage.cache.Cache.CacheType getType() { return cache.getType(); } @@ -55,7 +77,7 @@ public int getFails() { } @Override - public String getName() { + public String getCacheName() { return cache.getName(); } } diff --git a/src/org/exist/management/CacheMXBean.java b/src/org/exist/management/CacheMXBean.java index fbafe854bfd..db06e997da2 100644 --- a/src/org/exist/management/CacheMXBean.java +++ b/src/org/exist/management/CacheMXBean.java @@ -19,14 +19,17 @@ */ package org.exist.management; +import org.exist.management.impl.PerInstanceMBean; +import org.exist.storage.cache.Cache; + /** * Provides access to some properties of the internal page caches * ({@link org.exist.storage.cache.Cache}). */ -public interface CacheMXBean { +public interface CacheMXBean extends PerInstanceMBean { + + Cache.CacheType getType(); - String getType(); - int getSize(); int getUsed(); @@ -35,5 +38,5 @@ public interface CacheMXBean { int getFails(); - String getName(); + String getCacheName(); } \ No newline at end of file diff --git a/src/org/exist/management/CacheManager.java b/src/org/exist/management/CacheManager.java index 4779e796457..6ab4d7c2444 100644 --- a/src/org/exist/management/CacheManager.java +++ b/src/org/exist/management/CacheManager.java @@ -1,20 +1,35 @@ package org.exist.management; -/** - * Created by IntelliJ IDEA. - * User: wolf - * Date: Jun 10, 2007 - * Time: 8:31:15 AM - * To change this template use File | Settings | File Templates. - */ -public class CacheManager implements CacheManagerMXBean { +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; +public class CacheManager implements CacheManagerMXBean { + private final String instanceId; private final org.exist.storage.CacheManager manager; - public CacheManager(org.exist.storage.CacheManager manager) { + public CacheManager(final String instanceId, final org.exist.storage.CacheManager manager) { + this.instanceId = instanceId; this.manager = manager; } + public static String getAllInstancesQuery() { + return "org.exist.management." + '*' + ":type=CacheManager"; + } + + private static ObjectName getName(final String instanceId) throws MalformedObjectNameException { + return new ObjectName("org.exist.management." + instanceId + ":type=CacheManager"); + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return getName(instanceId); + } + + @Override + public String getInstanceId() { + return instanceId; + } + @Override public long getMaxTotal() { return manager.getMaxTotal(); diff --git a/src/org/exist/management/CacheManagerMXBean.java b/src/org/exist/management/CacheManagerMXBean.java index b889d6d22ba..0189d4b50b8 100644 --- a/src/org/exist/management/CacheManagerMXBean.java +++ b/src/org/exist/management/CacheManagerMXBean.java @@ -1,13 +1,8 @@ package org.exist.management; -/** - * Created by IntelliJ IDEA. - * User: wolf - * Date: Jun 9, 2007 - * Time: 10:33:21 PM - * To change this template use File | Settings | File Templates. - */ -public interface CacheManagerMXBean { +import org.exist.management.impl.PerInstanceMBean; + +public interface CacheManagerMXBean extends PerInstanceMBean { long getMaxTotal(); diff --git a/src/org/exist/management/DummyAgent.java b/src/org/exist/management/DummyAgent.java index e0016288d0c..f9e82f3f784 100644 --- a/src/org/exist/management/DummyAgent.java +++ b/src/org/exist/management/DummyAgent.java @@ -21,6 +21,7 @@ */ package org.exist.management; +import org.exist.management.impl.PerInstanceMBean; import org.exist.storage.BrokerPool; import org.exist.util.DatabaseConfigurationException; @@ -31,27 +32,27 @@ public class DummyAgent implements Agent { @Override - public void initDBInstance(BrokerPool instance) { + public void initDBInstance(final BrokerPool instance) { // do nothing } @Override - public void closeDBInstance(BrokerPool instance) { + public void closeDBInstance(final BrokerPool instance) { // nothing to do } @Override - public void addMBean(String dbInstance, String name, Object mbean) throws DatabaseConfigurationException { + public void addMBean(final PerInstanceMBean mbean) throws DatabaseConfigurationException { // just do nothing } @Override - public void changeStatus(BrokerPool instance, TaskStatus actualStatus) { + public void changeStatus(final BrokerPool instance, final TaskStatus actualStatus) { // nothing to do } @Override - public void updateStatus(BrokerPool instance, int percentage) { + public void updateStatus(final BrokerPool instance, final int percentage) { // nothing to do } } diff --git a/src/org/exist/management/client/JMXServlet.java b/src/org/exist/management/client/JMXServlet.java index 78d2c7c3595..414c0000517 100644 --- a/src/org/exist/management/client/JMXServlet.java +++ b/src/org/exist/management/client/JMXServlet.java @@ -48,6 +48,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.exist.storage.BrokerPool; import org.exist.util.serializer.DOMSerializer; import org.w3c.dom.Element; @@ -132,7 +133,7 @@ private void writeXmlData(HttpServletRequest request, HttpServletResponse respon } } - final long responseTime = client.ping("exist", timeout); + final long responseTime = client.ping(BrokerPool.DEFAULT_INSTANCE_NAME, timeout); if (responseTime == JMXtoXML.PING_TIMEOUT) { root = client.generateXMLReport(String.format("no response on ping after %sms", timeout), new String[]{"sanity", "locking", "processes", "instances", "memory"}); diff --git a/src/org/exist/management/client/JMXtoXML.java b/src/org/exist/management/client/JMXtoXML.java index 89abafc449c..7b7daa045a2 100644 --- a/src/org/exist/management/client/JMXtoXML.java +++ b/src/org/exist/management/client/JMXtoXML.java @@ -24,18 +24,15 @@ import java.io.IOException; import java.io.StringWriter; import java.lang.management.ManagementFactory; + import static java.lang.management.ManagementFactory.CLASS_LOADING_MXBEAN_NAME; import static java.lang.management.ManagementFactory.MEMORY_MXBEAN_NAME; import static java.lang.management.ManagementFactory.OPERATING_SYSTEM_MXBEAN_NAME; import static java.lang.management.ManagementFactory.RUNTIME_MXBEAN_NAME; import static java.lang.management.ManagementFactory.THREAD_MXBEAN_NAME; + import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.TreeMap; +import java.util.*; import java.util.concurrent.*; import javax.management.*; import javax.management.openmbean.CompositeData; @@ -47,10 +44,13 @@ import javax.xml.XMLConstants; import javax.xml.transform.OutputKeys; import javax.xml.transform.TransformerException; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.dom.QName; -import org.exist.management.impl.SanityReport; +import org.exist.management.Cache; +import org.exist.management.CacheManager; +import org.exist.management.impl.*; import org.exist.dom.memtree.MemTreeBuilder; import org.exist.util.NamedThreadFactory; import org.exist.util.serializer.DOMSerializer; @@ -61,7 +61,6 @@ * Utility class to output database status information from eXist's JMX interface as XML. * * @author wolf - * */ public class JMXtoXML { @@ -69,62 +68,66 @@ public class JMXtoXML { private final static Map CATEGORIES = new TreeMap<>(); - static { + private static void putCategory(final String categoryName, final String... objectNames) { + final ObjectName[] aryObjectNames = new ObjectName[objectNames.length]; try { - // Java - CATEGORIES.put("memory", new ObjectName[]{new ObjectName(MEMORY_MXBEAN_NAME)}); - CATEGORIES.put("runtime", new ObjectName[]{new ObjectName(RUNTIME_MXBEAN_NAME)}); - CATEGORIES.put("operatingsystem", new ObjectName[]{new ObjectName(OPERATING_SYSTEM_MXBEAN_NAME)}); - CATEGORIES.put("thread", new ObjectName[]{new ObjectName(THREAD_MXBEAN_NAME)}); - CATEGORIES.put("classloading", new ObjectName[]{new ObjectName(CLASS_LOADING_MXBEAN_NAME)}); - - // eXist - CATEGORIES.put("instances", new ObjectName[]{new ObjectName("org.exist.management.*:type=Database")}); - CATEGORIES.put("disk", new ObjectName[]{new ObjectName("org.exist.management.*:type=DiskUsage")}); - CATEGORIES.put("system", new ObjectName[]{new ObjectName("org.exist.management:type=SystemInfo")}); - CATEGORIES.put("caches", new ObjectName[]{ - new ObjectName("org.exist.management.exist:type=CacheManager"), - new ObjectName("org.exist.management.exist:type=CollectionCacheManager"), - new ObjectName("org.exist.management.exist:type=CacheManager.Cache,*") - }); - CATEGORIES.put("locking", new ObjectName[]{new ObjectName("org.exist.management:type=LockManager")}); - CATEGORIES.put("processes", new ObjectName[]{new ObjectName("org.exist.management.*:type=ProcessReport")}); - CATEGORIES.put("sanity", new ObjectName[]{new ObjectName("org.exist.management.*.tasks:type=SanityReport")}); - - // Jetty - CATEGORIES.put("jetty.threads", new ObjectName[] { new ObjectName("org.eclipse.jetty.util.thread:type=queuedthreadpool,id=0")}); - CATEGORIES.put("jetty.nio", new ObjectName[] { new ObjectName("org.eclipse.jetty.server.nio:type=selectchannelconnector,id=0")}); - - // Special case: all data - CATEGORIES.put("all", new ObjectName[]{new ObjectName("org.exist.*:*"), new ObjectName("java.lang:*")}); - + for (int i = 0; i < aryObjectNames.length; i++) { + aryObjectNames[i] = new ObjectName(objectNames[i]); + } } catch (final MalformedObjectNameException | NullPointerException e) { LOG.warn("Error in initialization: " + e.getMessage(), e); } + + CATEGORIES.put(categoryName, aryObjectNames); + } + static { + // Java + putCategory("memory", MEMORY_MXBEAN_NAME); + putCategory("runtime", RUNTIME_MXBEAN_NAME); + putCategory("operatingsystem", OPERATING_SYSTEM_MXBEAN_NAME); + putCategory("thread", THREAD_MXBEAN_NAME); + putCategory("classloading", CLASS_LOADING_MXBEAN_NAME); + + // eXist cross-instance + putCategory("system", SystemInfo.OBJECT_NAME); + + // eXist per-instance + putCategory("instances", Database.getAllInstancesQuery()); + putCategory("locking", LockTable.getAllInstancesQuery()); + putCategory("disk", DiskUsage.getAllInstancesQuery()); + putCategory("collectioncaches", CollectionCache.getAllInstancesQuery()); + putCategory("caches", + CacheManager.getAllInstancesQuery(), + Cache.getAllInstancesQuery() + ); + putCategory("binarystreamcaches", BinaryValues.getAllInstancesQuery()); + putCategory("processes", ProcessReport.getAllInstancesQuery()); + putCategory("sanity", SanityReport.getAllInstancesQuery()); + + // Jetty + putCategory("jetty.threads", "org.eclipse.jetty.util.thread:type=queuedthreadpool,*"); + putCategory("jetty.nio", "org.eclipse.jetty.server.nio:type=selectchannelconnector,id=0"); + + // Special case: all data + putCategory("all", "org.exist.*:*", "java.lang:*"); } - private final static Properties defaultProperties = new Properties(); + private static final Properties defaultProperties = new Properties(); static { defaultProperties.setProperty(OutputKeys.INDENT, "yes"); defaultProperties.setProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); } - public final static String JMX_NAMESPACE = "http://exist-db.org/jmx"; - public final static String JMX_PREFIX = "jmx"; + public static final String JMX_NAMESPACE = "http://exist-db.org/jmx"; + public static final String JMX_PREFIX = "jmx"; private static final QName ROW_ELEMENT = new QName("row", JMX_NAMESPACE, JMX_PREFIX); - - public final static QName JMX_ELEMENT = new QName("jmx", JMX_NAMESPACE, JMX_PREFIX); - - public final static QName JMX_RESULT = new QName("result", JMX_NAMESPACE, JMX_PREFIX); - + private static final QName JMX_ELEMENT = new QName("jmx", JMX_NAMESPACE, JMX_PREFIX); + private static final QName JMX_RESULT = new QName("result", JMX_NAMESPACE, JMX_PREFIX); private static final QName JMX_RESULT_TYPE_ATTR = new QName("class", JMX_NAMESPACE, JMX_PREFIX); - private static final QName JMX_CONNECTION_ATTR = new QName("connection", XMLConstants.NULL_NS_URI); - private static final QName JMX_ERROR = new QName("error", JMX_NAMESPACE, JMX_PREFIX); - private static final QName VERSION_ATTR = new QName("version", XMLConstants.NULL_NS_URI); public static final long PING_TIMEOUT = -99; @@ -134,16 +137,14 @@ public class JMXtoXML { private final MBeanServerConnection platformConnection = ManagementFactory.getPlatformMBeanServer(); private MBeanServerConnection connection; private JMXServiceURL url; - private final ThreadFactory jmxPingFactory = new NamedThreadFactory("jmx-ping"); - /** * Connect to the local JMX instance. */ public void connect() { - final ArrayList servers = MBeanServerFactory.findMBeanServer(null); + final List servers = MBeanServerFactory.findMBeanServer(null); if (servers.size() > 0) { - connection = servers.get(0); + this.connection = servers.get(0); } } @@ -151,27 +152,29 @@ public void connect() { * Connect to a remote JMX instance using address and port. * * @param address The remote address - * @param port The report port + * @param port The report port * @throws MalformedURLException The RMI url could not be constructed - * @throws IOException An IO error occurred + * @throws IOException An IO error occurred */ - public void connect(String address, int port) throws MalformedURLException, IOException { - url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://" + address + ":" + port + "/jmxrmi"); + public void connect(final String address, final int port) throws MalformedURLException, IOException { + this.url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://" + address + ":" + port + "/jmxrmi"); final Map env = new HashMap<>(); final String[] creds = {"guest", "guest"}; env.put(JMXConnector.CREDENTIALS, creds); final JMXConnector jmxc = JMXConnectorFactory.connect(url, env); - connection = jmxc.getMBeanServerConnection(); + this.connection = jmxc.getMBeanServerConnection(); - LOG.debug("Connected to JMX server at " + url.toString()); + if (LOG.isDebugEnabled()) { + LOG.debug("Connected to JMX server at " + url.toString()); + } } /** * Retrieve JMX output for the given categories and return a string of XML. Valid categories are "memory", * "instances", "disk", "system", "caches", "locking", "processes", "sanity", "all". */ - public String generateReport(String categories[]) throws TransformerException { + public String generateReport(final String categories[]) throws TransformerException { final Element root = generateXMLReport(null, categories); final StringWriter writer = new StringWriter(); final DOMSerializer streamer = new DOMSerializer(writer, defaultProperties); @@ -186,16 +189,17 @@ public String generateReport(String categories[]) throws TransformerException { * than 0. Otherwise the return value is the response time in milliseconds. * * @param instance the name of the database instance (default instance is "exist") - * @param timeout a timeout in milliseconds + * @param timeout a timeout in milliseconds * @return Response time in msec, less than 0 in case of an error on server or PING_TIMEOUT when server does not * respond in time */ public long ping(final String instance, final long timeout) { final long start = System.currentTimeMillis(); + final ThreadFactory jmxPingFactory = new NamedThreadFactory(instance, "jmx.ping"); final ExecutorService executorService = Executors.newSingleThreadExecutor(jmxPingFactory); final Future futurePing = executorService.submit(new Ping(instance, connection)); - while(true) { + while (true) { try { return futurePing.get(timeout, TimeUnit.MILLISECONDS); } catch (final ExecutionException e) { @@ -224,11 +228,11 @@ public Ping(final String instance, final MBeanServerConnection connection) { @Override public Long call() { try { - final ObjectName name = new ObjectName("org.exist.management." + instance + ".tasks:type=SanityReport"); + final ObjectName name = SanityReport.getName(instance); return (Long) connection.invoke(name, "ping", new Object[]{Boolean.TRUE}, new String[]{boolean.class.getName()}); } catch (final Exception e) { LOG.warn(e.getMessage(), e); - return (long)SanityReport.PING_ERROR; + return (long) SanityReport.PING_ERROR; } } } @@ -237,11 +241,11 @@ public Long call() { * Retrieve JMX output for the given categories and return it as an XML DOM. Valid categories are "memory", * "instances", "disk", "system", "caches", "locking", "processes", "sanity", "all". * - * @param errcode an optional error description - * @param categories + * @param errcode an optional error description + * @param categories the categories to generate the report for * @return xml report */ - public Element generateXMLReport(String errcode, String categories[]) { + public Element generateXMLReport(final String errcode, final String categories[]) { final MemTreeBuilder builder = new MemTreeBuilder(); try { @@ -281,13 +285,13 @@ public String getDataDir() { try { final Object dir = connection.getAttribute(new ObjectName("org.exist.management.exist:type=DiskUsage"), "DataDirectory"); return dir == null ? null : dir.toString(); - } catch (MBeanException | AttributeNotFoundException | InstanceNotFoundException | ReflectionException | IOException | MalformedObjectNameException e) { + } catch (final MBeanException | AttributeNotFoundException | InstanceNotFoundException | ReflectionException | IOException | MalformedObjectNameException e) { return null; } } - public Element invoke(String objectName, String operation, String[] args) throws InstanceNotFoundException, MalformedObjectNameException, MBeanException, IOException, ReflectionException, IntrospectionException { - ObjectName name = new ObjectName(objectName); + public Element invoke(final String objectName, final String operation, String[] args) throws InstanceNotFoundException, MalformedObjectNameException, MBeanException, IOException, ReflectionException, IntrospectionException { + final ObjectName name = new ObjectName(objectName); MBeanServerConnection conn = connection; MBeanInfo info; try { @@ -296,18 +300,18 @@ public Element invoke(String objectName, String operation, String[] args) throws conn = platformConnection; info = conn.getMBeanInfo(name); } - MBeanOperationInfo[] operations = info.getOperations(); - for (MBeanOperationInfo op: operations) { + final MBeanOperationInfo[] operations = info.getOperations(); + for (final MBeanOperationInfo op : operations) { if (operation.equals(op.getName())) { - MBeanParameterInfo[] sig = op.getSignature(); - Object[] params = new Object[sig.length]; - String[] types = new String[sig.length]; + final MBeanParameterInfo[] sig = op.getSignature(); + final Object[] params = new Object[sig.length]; + final String[] types = new String[sig.length]; for (int i = 0; i < sig.length; i++) { String type = sig[i].getType(); types[i] = type; params[i] = mapParameter(type, args[i]); } - Object result = conn.invoke(name, operation, params, types); + final Object result = conn.invoke(name, operation, params, types); final MemTreeBuilder builder = new MemTreeBuilder(); @@ -339,9 +343,8 @@ public Element invoke(String objectName, String operation, String[] args) throws return null; } - private void queryMBeans(MemTreeBuilder builder, ObjectName query) - throws IOException, InstanceNotFoundException, IntrospectionException, ReflectionException, - SAXException, AttributeNotFoundException, MBeanException, MalformedObjectNameException, NullPointerException { + private void queryMBeans(final MemTreeBuilder builder, final ObjectName query) + throws IOException, InstanceNotFoundException, IntrospectionException, ReflectionException, NullPointerException { MBeanServerConnection conn = connection; Set beans = conn.queryNames(query, null); @@ -362,7 +365,7 @@ private void queryMBeans(MemTreeBuilder builder, ObjectName query) final QName qname = new QName(className, JMX_NAMESPACE, JMX_PREFIX); builder.startElement(qname, null); - builder.addAttribute(new QName("name", XMLConstants.NULL_NS_URI), name.toString()); + builder.addAttribute(new QName("name", XMLConstants.NULL_NS_URI), name.toString()); final MBeanAttributeInfo[] beanAttribs = info.getAttributes(); for (int i = 0; i < beanAttribs.length; i++) { @@ -383,7 +386,7 @@ private void queryMBeans(MemTreeBuilder builder, ObjectName query) } } - private void serializeObject(MemTreeBuilder builder, Object object) throws SAXException { + private void serializeObject(final MemTreeBuilder builder, final Object object) throws SAXException { if (object == null) { return; } @@ -404,13 +407,13 @@ private void serializeObject(MemTreeBuilder builder, Object object) throws SAXEx } } - private void serialize(MemTreeBuilder builder, Object[] data) throws SAXException { + private void serialize(final MemTreeBuilder builder, final Object[] data) throws SAXException { for (final Object o : data) { serializeObject(builder, o); } } - private void serialize(MemTreeBuilder builder, CompositeData data) throws SAXException { + private void serialize(final MemTreeBuilder builder, final CompositeData data) throws SAXException { final CompositeType type = data.getCompositeType(); for (final String key : type.keySet()) { final QName qname = new QName(key, JMX_NAMESPACE, JMX_PREFIX); @@ -420,7 +423,7 @@ private void serialize(MemTreeBuilder builder, CompositeData data) throws SAXExc } } - private void serialize(MemTreeBuilder builder, TabularData data) throws SAXException { + private void serialize(final MemTreeBuilder builder, final TabularData data) throws SAXException { final CompositeType rowType = data.getTabularType().getRowType(); for (final Object rowObj : data.values()) { final CompositeData row = (CompositeData) rowObj; @@ -436,7 +439,7 @@ private void serialize(MemTreeBuilder builder, TabularData data) throws SAXExcep } } - private void serialize(MemTreeBuilder builder, CompositeData[] array) throws SAXException { + private void serialize(final MemTreeBuilder builder, final CompositeData[] array) throws SAXException { for (final CompositeData data : array) { builder.startElement(ROW_ELEMENT, null); serialize(builder, data); @@ -444,7 +447,7 @@ private void serialize(MemTreeBuilder builder, CompositeData[] array) throws SAX } } - private Object mapParameter(String type, String value) { + private Object mapParameter(final String type, final String value) { if (type.equals("int") || type.equals(Integer.class.getName())) { return Integer.parseInt(value); } else if (type.equals("long") || type.equals(Long.class.getName())) { @@ -461,18 +464,15 @@ private Object mapParameter(String type, String value) { } /** - * @param args + * @param args program arguments */ - public static void main(String[] args) { - + public static void main(final String[] args) { final JMXtoXML client = new JMXtoXML(); try { client.connect("localhost", 1099); System.out.println(client.generateReport(args)); - } catch (final IOException | TransformerException e) { e.printStackTrace(); } } - } diff --git a/src/org/exist/management/impl/BinaryValues.java b/src/org/exist/management/impl/BinaryValues.java index 523ab15196c..b6fb480c3ae 100644 --- a/src/org/exist/management/impl/BinaryValues.java +++ b/src/org/exist/management/impl/BinaryValues.java @@ -19,6 +19,7 @@ */ package org.exist.management.impl; +import org.exist.storage.BrokerPool; import org.exist.util.io.FileFilterInputStreamCache; import org.exist.util.io.FilterInputStreamCache; import org.exist.util.io.FilterInputStreamCacheMonitor; @@ -26,12 +27,37 @@ import org.exist.util.io.MemoryMappedFileFilterInputStreamCache; import org.exist.management.impl.BinaryInputStreamCacheInfo.CacheType; +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Optional; public class BinaryValues implements BinaryValuesMXBean { + private final String instanceId; + + public BinaryValues(final BrokerPool pool) { + this.instanceId = pool.getId(); + } + + public static String getAllInstancesQuery() { + return getName("*"); + } + + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=BinaryValues"; + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(instanceId)); + } + + @Override + public String getInstanceId() { + return instanceId; + } @Override public List getCacheInstances() { @@ -39,16 +65,16 @@ public List getCacheInstances() { final Collection cacheInstances = monitor.getActive(); final List results = new ArrayList<>(); - for(final FilterInputStreamCacheInfo cacheInstance : cacheInstances) { + for (final FilterInputStreamCacheInfo cacheInstance : cacheInstances) { final BinaryInputStreamCacheInfo result; final FilterInputStreamCache cache = cacheInstance.getCache(); - if(cache instanceof FileFilterInputStreamCache) { + if (cache instanceof FileFilterInputStreamCache) { result = new BinaryInputStreamCacheInfo(CacheType.FILE, cacheInstance.getRegistered(), - Optional.of(((FileFilterInputStreamCache)cache).getFilePath()), cache.getLength()); - } else if(cache instanceof MemoryMappedFileFilterInputStreamCache) { + Optional.of(((FileFilterInputStreamCache) cache).getFilePath()), cache.getLength()); + } else if (cache instanceof MemoryMappedFileFilterInputStreamCache) { result = new BinaryInputStreamCacheInfo(CacheType.MEMORY_MAPPED_FILE, cacheInstance.getRegistered(), - Optional.of(((MemoryMappedFileFilterInputStreamCache)cache).getFilePath()), cache.getLength()); + Optional.of(((MemoryMappedFileFilterInputStreamCache) cache).getFilePath()), cache.getLength()); } else { result = new BinaryInputStreamCacheInfo(CacheType.MEMORY, cacheInstance.getRegistered(), Optional.empty(), cache.getLength()); diff --git a/src/org/exist/management/impl/BinaryValuesMXBean.java b/src/org/exist/management/impl/BinaryValuesMXBean.java index 14ccd272096..b8b30ebe335 100644 --- a/src/org/exist/management/impl/BinaryValuesMXBean.java +++ b/src/org/exist/management/impl/BinaryValuesMXBean.java @@ -22,6 +22,6 @@ import java.util.List; -public interface BinaryValuesMXBean { +public interface BinaryValuesMXBean extends PerInstanceMBean { List getCacheInstances(); } diff --git a/src/org/exist/management/impl/CollectionCache.java b/src/org/exist/management/impl/CollectionCache.java new file mode 100644 index 00000000000..c8d45487082 --- /dev/null +++ b/src/org/exist/management/impl/CollectionCache.java @@ -0,0 +1,63 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.management.impl; + +import org.exist.storage.BrokerPool; + +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; + +/** + * JMX MXBean for examining the CollectionCache + * + * @author Adam Retter + */ +public class CollectionCache implements CollectionCacheMXBean { + + private final BrokerPool instance; + + public CollectionCache(final BrokerPool instance) { + this.instance = instance; + } + + public static String getAllInstancesQuery() { + return getName("*"); + } + + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=CollectionCache"; + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(instance.getId())); + } + + @Override + public String getInstanceId() { + return instance.getId(); + } + + @Override + public org.exist.collections.CollectionCache.Statistics getStatistics() { + return instance.getCollectionsCache().getStatistics(); + } +} diff --git a/src/org/exist/management/impl/CollectionCacheMXBean.java b/src/org/exist/management/impl/CollectionCacheMXBean.java new file mode 100644 index 00000000000..a96c209d8f1 --- /dev/null +++ b/src/org/exist/management/impl/CollectionCacheMXBean.java @@ -0,0 +1,38 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.management.impl; + +import org.exist.collections.CollectionCache; + +/** + * JMX MXBean interface for examining the CollectionCache + * + * @author Adam Retter + */ +public interface CollectionCacheMXBean extends PerInstanceMBean { + + /** + * Get a statistics snapshot of the Collection Cache + * + * @return Statistics for the Collection Cache + */ + CollectionCache.Statistics getStatistics(); +} diff --git a/src/org/exist/management/impl/Database.java b/src/org/exist/management/impl/Database.java index c252c89d225..65f9dff3cd6 100644 --- a/src/org/exist/management/impl/Database.java +++ b/src/org/exist/management/impl/Database.java @@ -25,8 +25,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Optional; -import javax.management.openmbean.*; +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; + import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; @@ -34,10 +35,23 @@ public class Database implements DatabaseMXBean { private final BrokerPool pool; - public Database(BrokerPool pool) { + public Database(final BrokerPool pool) { this.pool = pool; } + public static String getAllInstancesQuery() { + return getName("*"); + } + + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=Database"; + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(pool.getId())); + } + @Override public String getInstanceId() { return pool.getId(); @@ -60,9 +74,9 @@ public int getActiveBrokers() { @Override public int getTotalBrokers() { - return pool.total(); + return pool.total(); } - + @Override public List getActiveBrokersMap() { final List brokersList = new ArrayList<>(); @@ -89,7 +103,7 @@ public long getCacheMem() { @Override public long getCollectionCacheMem() { - return pool.getCollectionCacheMgr().getMaxTotal(); + return pool.getCollectionsCache().getMaxCacheSize(); } @Override @@ -101,14 +115,14 @@ public long getUptime() { public String getExistHome() { return pool.getConfiguration().getExistHome().map(p -> p.toAbsolutePath().toString()).orElse(null); } - - public String printStackTrace(Thread thread) { + + public String printStackTrace(final Thread thread) { final StackTraceElement[] stackElements = thread.getStackTrace(); - final StringWriter writer = new StringWriter(); + final StringWriter writer = new StringWriter(); final int showItems = stackElements.length > 20 ? 20 : stackElements.length; - for (int i = 0; i < showItems; i++) { + for (int i = 0; i < showItems; i++) { writer.append(stackElements[i].toString()).append('\n'); - } - return writer.toString(); + } + return writer.toString(); } } diff --git a/src/org/exist/management/impl/DatabaseMXBean.java b/src/org/exist/management/impl/DatabaseMXBean.java index c9575fb6048..abec0c81cdf 100644 --- a/src/org/exist/management/impl/DatabaseMXBean.java +++ b/src/org/exist/management/impl/DatabaseMXBean.java @@ -23,9 +23,7 @@ * * $Id$ */ -public interface DatabaseMXBean { - - String getInstanceId(); +public interface DatabaseMXBean extends PerInstanceMBean { int getMaxBrokers(); diff --git a/src/org/exist/management/impl/DiskUsage.java b/src/org/exist/management/impl/DiskUsage.java index ec496034342..574e75375cb 100644 --- a/src/org/exist/management/impl/DiskUsage.java +++ b/src/org/exist/management/impl/DiskUsage.java @@ -36,6 +36,9 @@ import org.exist.util.FileUtils; import com.evolvedbinary.j8fu.function.FunctionE; +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; + /** * Class DiskUsage. Retrieves data from the java File object * @@ -45,20 +48,40 @@ public class DiskUsage implements DiskUsageMXBean { private final static Logger LOG = LogManager.getLogger(DiskUsage.class); - private Optional journalDir; - private Optional dataDir; + private final String instanceId; + private final Optional journalDir; + private final Optional dataDir; public DiskUsage(final BrokerPool pool) { + this.instanceId = pool.getId(); final Configuration config = pool.getConfiguration(); - this.journalDir = Optional.ofNullable((Path)config.getProperty(Journal.PROPERTY_RECOVERY_JOURNAL_DIR)) + this.journalDir = Optional.ofNullable((Path) config.getProperty(Journal.PROPERTY_RECOVERY_JOURNAL_DIR)) .filter(Files::isDirectory); - this.dataDir = Optional.ofNullable((Path)config.getProperty(BrokerPool.PROPERTY_DATA_DIR)) + this.dataDir = Optional.ofNullable((Path) config.getProperty(BrokerPool.PROPERTY_DATA_DIR)) .filter(Files::isDirectory); } + public static String getAllInstancesQuery() { + return getName("*"); + } + + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=DiskUsage"; + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(instanceId)); + } + + @Override + public String getInstanceId() { + return instanceId; + } + @Override public String getDataDirectory() { return dataDir.map(d -> d.toAbsolutePath().toString()).orElse(NOT_CONFIGURED); @@ -98,7 +121,7 @@ public long getJournalDirectoryUsableSpace() { @Override public long getDataDirectoryUsedSpace() { return dataDir.map(d -> { - try(final Stream files = Files.list(d)) { + try (final Stream files = Files.list(d)) { return files .filter(this::isDbxFile) .mapToLong(p -> { @@ -106,7 +129,7 @@ public long getDataDirectoryUsedSpace() { return size == NO_VALUE ? 0 : size; }) .sum(); - } catch(final IOException ioe) { + } catch (final IOException ioe) { LOG.error(ioe); return NO_VALUE; } @@ -116,7 +139,7 @@ public long getDataDirectoryUsedSpace() { @Override public long getJournalDirectoryUsedSpace() { return dataDir.map(d -> { - try(final Stream files = Files.list(d)) { + try (final Stream files = Files.list(d)) { return files .filter(this::isJournalFile) .mapToLong(p -> { @@ -124,7 +147,7 @@ public long getJournalDirectoryUsedSpace() { return size == NO_VALUE ? 0 : size; }) .sum(); - } catch(final IOException ioe) { + } catch (final IOException ioe) { LOG.error(ioe); return NO_VALUE; } @@ -134,7 +157,7 @@ public long getJournalDirectoryUsedSpace() { @Override public long getJournalDirectoryNumberOfFiles() { return journalDir.map(j -> { - try(final Stream files = Files.list(j)) { + try (final Stream files = Files.list(j)) { return files .filter(this::isJournalFile) .count(); diff --git a/src/org/exist/management/impl/DiskUsageMXBean.java b/src/org/exist/management/impl/DiskUsageMXBean.java index 9151ec7dfc9..23d0ac5af34 100644 --- a/src/org/exist/management/impl/DiskUsageMXBean.java +++ b/src/org/exist/management/impl/DiskUsageMXBean.java @@ -27,37 +27,33 @@ * * @author dizzzz@exist-db.org */ -public interface DiskUsageMXBean -{ +public interface DiskUsageMXBean extends PerInstanceMBean { /** * No disk space could be determined. */ - public static final long NO_VALUE = -1; + long NO_VALUE = -1; /** * Directory is not defined. */ - public static final String NOT_CONFIGURED = "NOT_CONFIGURED"; - - public String getDataDirectory(); - - public long getDataDirectoryUsableSpace(); - - public long getDataDirectoryTotalSpace(); - - public long getDataDirectoryUsedSpace(); - - public String getJournalDirectory(); - - public long getJournalDirectoryUsableSpace(); - - public long getJournalDirectoryTotalSpace(); - - public long getJournalDirectoryUsedSpace(); - - public long getJournalDirectoryNumberOfFiles(); - -} + String NOT_CONFIGURED = "NOT_CONFIGURED"; + + String getDataDirectory(); + + long getDataDirectoryUsableSpace(); + + long getDataDirectoryTotalSpace(); + + long getDataDirectoryUsedSpace(); + String getJournalDirectory(); + long getJournalDirectoryUsableSpace(); + + long getJournalDirectoryTotalSpace(); + + long getJournalDirectoryUsedSpace(); + + long getJournalDirectoryNumberOfFiles(); +} diff --git a/src/org/exist/management/impl/JMXAgent.java b/src/org/exist/management/impl/JMXAgent.java index 6c8006e2c28..06386a1a92b 100755 --- a/src/org/exist/management/impl/JMXAgent.java +++ b/src/org/exist/management/impl/JMXAgent.java @@ -36,10 +36,7 @@ import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.Stack; +import java.util.*; /** * Real implementation of interface {@link org.exist.management.Agent} @@ -47,30 +44,28 @@ */ public class JMXAgent implements Agent { - private final static Logger LOG = LogManager.getLogger(JMXAgent.class); + private static final Logger LOG = LogManager.getLogger(JMXAgent.class); + private static final JMXAgent instance = new JMXAgent(); - private static volatile Agent agent = null; + private final MBeanServer server; + private final Map> registeredMBeans = new HashMap<>(); + private final Map beanInstances = new HashMap<>(); public static Agent getInstance() { - if (agent == null) { - agent = new JMXAgent(); - } - return agent; + return instance; } - private MBeanServer server; - private Map> registeredMBeans = new HashMap<>(); - private Map beanInstances = new HashMap<>(); - - public JMXAgent() { - if (LOG.isDebugEnabled()) - {LOG.debug("Creating the JMX MBeanServer.");} + private JMXAgent() { + if (LOG.isDebugEnabled()) { + LOG.debug("Creating the JMX MBeanServer."); + } final ArrayList servers = MBeanServerFactory.findMBeanServer(null); - if (servers.size() > 0) - {server = servers.get(0);} - else - {server = MBeanServerFactory.createMBeanServer();} + if (servers.size() > 0) { + server = servers.get(0); + } else { + server = MBeanServerFactory.createMBeanServer(); + } // try { // JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://127.0.0.1:9999/server"); @@ -82,51 +77,47 @@ public JMXAgent() { registerSystemMBeans(); } - public synchronized void registerSystemMBeans() { + private void registerSystemMBeans() { try { - ObjectName name = new ObjectName("org.exist.management:type=LockManager"); - addMBean(name, new org.exist.management.impl.LockManager()); - - name = new ObjectName("org.exist.management:type=SystemInfo"); - addMBean(name, new org.exist.management.impl.SystemInfo()); - + addMBean(new ObjectName(SystemInfo.OBJECT_NAME), new org.exist.management.impl.SystemInfo()); } catch (final MalformedObjectNameException | DatabaseConfigurationException e) { LOG.warn("Exception while registering cache mbean.", e); } } @Override - public void initDBInstance(BrokerPool instance) { - try { - addMBean(instance.getId(), "org.exist.management." + instance.getId() + ":type=Database", - new org.exist.management.impl.Database(instance)); - - addMBean(instance.getId(), "org.exist.management." + instance.getId() + ".tasks:type=SanityReport", - new SanityReport(instance)); - - addMBean(instance.getId(), "org.exist.management." + instance.getId() + ":type=DiskUsage", - new DiskUsage(instance)); - - addMBean(instance.getId(), "org.exist.management." + instance.getId() + ":type=ProcessReport", - new ProcessReport(instance)); - - addMBean(instance.getId(), "org.exist.management." + instance.getId() + ":type=BinaryValues", - new BinaryValues()); - - } catch (final DatabaseConfigurationException e) { - LOG.warn("Exception while registering database mbean.", e); + public synchronized void initDBInstance(final BrokerPool instance) { + final List perInstanceMBeans = Arrays.asList( + new Database(instance), + new LockTable(instance), + new SanityReport(instance), + new DiskUsage(instance), + new ProcessReport(instance), + new BinaryValues(instance), + new CollectionCache(instance) + ); + + for (final PerInstanceMBean perInstanceMBean : perInstanceMBeans) { + try { + addMBean(perInstanceMBean); + } catch (final DatabaseConfigurationException e) { + LOG.warn("Exception while registering JMX MBean: " + perInstanceMBean.getClass().getName() + ", for database: " + instance.getId() + ".", e); + } } } @Override - public synchronized void closeDBInstance(BrokerPool instance) { + public synchronized void closeDBInstance(final BrokerPool instance) { try { - final Stack stack = registeredMBeans.get(instance.getId()); + final Deque stack = registeredMBeans.get(instance.getId()); while (!stack.isEmpty()) { - final ObjectName on = (ObjectName) stack.pop(); - LOG.debug("deregistering JMX MBean: " + on); - if (server.isRegistered(on)) - {server.unregisterMBean(on);} + final ObjectName on = stack.pop(); + if (LOG.isDebugEnabled()) { + LOG.debug("Unregistering JMX MBean: " + on); + } + if (server.isRegistered(on)) { + server.unregisterMBean(on); + } } } catch (final InstanceNotFoundException | MBeanRegistrationException e) { LOG.warn("Problem found while unregistering JMX", e); @@ -134,39 +125,38 @@ public synchronized void closeDBInstance(BrokerPool instance) { } @Override - public synchronized void addMBean(String dbInstance, String name, Object mbean) throws DatabaseConfigurationException { + public synchronized void addMBean(final PerInstanceMBean mbean) throws DatabaseConfigurationException { try { - final ObjectName on = new ObjectName(name); - addMBean(on, mbean); - if (dbInstance != null) { - Stack stack = registeredMBeans.get(dbInstance); + addMBean(mbean.getName(), mbean); + if (mbean.getInstanceId() != null) { + Deque stack = registeredMBeans.get(mbean.getInstanceId()); if (stack == null) { - stack = new Stack<>(); - registeredMBeans.put(dbInstance, stack); + stack = new ArrayDeque<>(); + registeredMBeans.put(mbean.getInstanceId(), stack); } - stack.push(on); + stack.push(mbean.getName()); } - beanInstances.put(on, mbean); + beanInstances.put(mbean.getName(), mbean); } catch (final MalformedObjectNameException e) { - LOG.warn("Problem registering mbean: " + e.getMessage(), e); - throw new DatabaseConfigurationException("Exception while registering JMX mbean: " + e.getMessage()); + LOG.warn("Problem registering JMX MBean: " + e.getMessage(), e); + throw new DatabaseConfigurationException("Exception while registering JMX MBean: " + e.getMessage()); } } - private void addMBean(ObjectName name, Object mbean) throws DatabaseConfigurationException { + private void addMBean(final ObjectName name, final Object mbean) throws DatabaseConfigurationException { try { if (!server.isRegistered(name)) { server.registerMBean(mbean, name); } } catch (final InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) { - LOG.warn("Problem registering mbean: " + e.getMessage(), e); - throw new DatabaseConfigurationException("Exception while registering JMX mbean: " + e.getMessage()); + LOG.warn("Problem registering JMX MBean: " + e.getMessage(), e); + throw new DatabaseConfigurationException("Exception while registering JMX MBean: " + e.getMessage()); } } @Override - public synchronized void changeStatus(BrokerPool instance, TaskStatus actualStatus) { + public synchronized void changeStatus(final BrokerPool instance, final TaskStatus actualStatus) { try { final ObjectName name = new ObjectName("org.exist.management." + instance.getId() + ".tasks:type=SanityReport"); final SanityReport report = (SanityReport) beanInstances.get(name); @@ -174,12 +164,12 @@ public synchronized void changeStatus(BrokerPool instance, TaskStatus actualStat report.changeStatus(actualStatus); } } catch (final MalformedObjectNameException e) { - LOG.warn("Problem calling mbean: " + e.getMessage(), e); + LOG.warn("Problem calling JMX MBean: " + e.getMessage(), e); } } @Override - public synchronized void updateStatus(BrokerPool instance, int percentage) { + public synchronized void updateStatus(final BrokerPool instance, final int percentage) { try { final ObjectName name = new ObjectName("org.exist.management." + instance.getId() + ".tasks:type=SanityReport"); final SanityReport report = (SanityReport) beanInstances.get(name); @@ -187,7 +177,7 @@ public synchronized void updateStatus(BrokerPool instance, int percentage) { report.updateStatus(percentage); } } catch (final MalformedObjectNameException e) { - LOG.warn("Problem calling mbean: " + e.getMessage(), e); + LOG.warn("Problem calling JMX MBean: " + e.getMessage(), e); } } } diff --git a/src/org/exist/management/impl/LockManager.java b/src/org/exist/management/impl/LockManager.java deleted file mode 100644 index 32917a5da09..00000000000 --- a/src/org/exist/management/impl/LockManager.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-07 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - * $Id$ - */ -package org.exist.management.impl; - -import org.exist.storage.lock.DeadlockDetection; -import org.exist.storage.lock.LockInfo; - -import javax.management.openmbean.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * Returns information from the lock manager. Very useful to check for deadlocks. - */ -public class LockManager implements LockManagerMXBean { - - @Override - public List getWaitingThreads() { - final List lockList = new ArrayList<>(); - final Map map = DeadlockDetection.getWaitingThreads(); - for (final Map.Entry entry : map.entrySet()) { - lockList.add(new Lock(entry.getKey(), entry.getValue())); - } - return lockList; - } -} \ No newline at end of file diff --git a/src/org/exist/management/impl/LockManagerMXBean.java b/src/org/exist/management/impl/LockManagerMXBean.java deleted file mode 100644 index 7d606cf6034..00000000000 --- a/src/org/exist/management/impl/LockManagerMXBean.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-07 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * - * $Id$ - */ -package org.exist.management.impl; - -import java.util.List; - -/** - * - */ -public interface LockManagerMXBean { - - public List getWaitingThreads(); -} diff --git a/src/org/exist/management/impl/LockTable.java b/src/org/exist/management/impl/LockTable.java new file mode 100644 index 00000000000..0edf928a379 --- /dev/null +++ b/src/org/exist/management/impl/LockTable.java @@ -0,0 +1,87 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.management.impl; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.exist.storage.BrokerPool; +import org.exist.storage.lock.Lock; +import org.exist.storage.lock.Lock.LockType; +import org.exist.storage.lock.LockTable.LockModeOwner; +import org.exist.storage.lock.LockTableUtils; + +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; +import java.util.List; +import java.util.Map; + +/** + * JMX MXBean for examining the LockTable + * + * @author Adam Retter + */ +public class LockTable implements LockTableMXBean { + + private final BrokerPool pool; + + public LockTable(final BrokerPool brokerPool) { + this.pool = brokerPool; + } + + public static String getAllInstancesQuery() { + return getName("*"); + } + + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=LockTable"; + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(pool.getId())); + } + + @Override + public String getInstanceId() { + return pool.getId(); + } + + @Override + public Map>>> getAcquired() { + return pool.getLockManager().getLockTable().getAcquired(); + } + + @Override + public Map>> getAttempting() { + return pool.getLockManager().getLockTable().getAttempting(); + } + + @Override + public void dumpToConsole() { + System.out.println(LockTableUtils.stateToString(pool.getLockManager().getLockTable())); + } + + private final static Logger LOCK_LOG = LogManager.getLogger(org.exist.storage.lock.LockTable.class); + + @Override + public void dumpToLog() { + LOCK_LOG.info(LockTableUtils.stateToString(pool.getLockManager().getLockTable())); + } +} diff --git a/src/org/exist/management/impl/LockTableMXBean.java b/src/org/exist/management/impl/LockTableMXBean.java new file mode 100644 index 00000000000..bfd73cad9f6 --- /dev/null +++ b/src/org/exist/management/impl/LockTableMXBean.java @@ -0,0 +1,55 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.management.impl; + +import org.exist.storage.lock.Lock; +import org.exist.storage.lock.LockTable; +import org.exist.storage.lock.LockTable.LockModeOwner; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * JMX MXBean interface for examining the LockTable + * + * @author Adam Retter + */ +public interface LockTableMXBean extends PerInstanceMBean { + + /** + * Get information about acquired locks + * + * @return information about acquired locks + */ + Map>>> getAcquired(); + + /** + * Get information about outstanding attempts to acquire locks + * + * @return information about outstanding attempts to acquire locks + */ + Map>> getAttempting(); + + void dumpToConsole(); + + void dumpToLog(); +} diff --git a/src/org/exist/management/impl/PerInstanceMBean.java b/src/org/exist/management/impl/PerInstanceMBean.java new file mode 100644 index 00000000000..18c289b6a19 --- /dev/null +++ b/src/org/exist/management/impl/PerInstanceMBean.java @@ -0,0 +1,29 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.management.impl; + +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; + +public interface PerInstanceMBean { + String getInstanceId(); + ObjectName getName() throws MalformedObjectNameException; +} diff --git a/src/org/exist/management/impl/ProcessReport.java b/src/org/exist/management/impl/ProcessReport.java index 7f172b4721a..497088f3b54 100644 --- a/src/org/exist/management/impl/ProcessReport.java +++ b/src/org/exist/management/impl/ProcessReport.java @@ -22,9 +22,7 @@ package org.exist.management.impl; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.exist.scheduler.ScheduledJobInfo; import org.exist.scheduler.Scheduler; @@ -32,30 +30,39 @@ import org.exist.storage.ProcessMonitor; import org.exist.xquery.XQueryContext; import org.exist.xquery.XQueryWatchDog; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import javax.management.openmbean.TabularData; -import javax.management.openmbean.OpenType; -import javax.management.openmbean.SimpleType; -import javax.management.openmbean.CompositeType; -import javax.management.openmbean.TabularType; -import javax.management.openmbean.TabularDataSupport; -import javax.management.openmbean.OpenDataException; -import javax.management.openmbean.CompositeDataSupport; + +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; + import org.exist.storage.ProcessMonitor.QueryHistory; public class ProcessReport implements ProcessReportMXBean { + private final String instanceId; + private final ProcessMonitor processMonitor; + private final Scheduler scheduler; + + public ProcessReport(final BrokerPool pool) { + this.instanceId = pool.getId(); + this.processMonitor = pool.getProcessMonitor(); + this.scheduler = pool.getScheduler(); + } - private final static Logger LOG = LogManager.getLogger(ProcessReport.class); + public static String getAllInstancesQuery() { + return getName("*"); + } - private ProcessMonitor processMonitor; + private static String getName(final String instanceId) { + return "org.exist.management." + instanceId + ":type=ProcessReport"; + } - private Scheduler scheduler; + @Override + public ObjectName getName() throws MalformedObjectNameException { + return new ObjectName(getName(instanceId)); + } - public ProcessReport(BrokerPool pool) { - processMonitor = pool.getProcessMonitor(); - scheduler = pool.getScheduler(); + @Override + public String getInstanceId() { + return instanceId; } @Override @@ -96,13 +103,14 @@ public List getRunningQueries() { return queries; } - public void killQuery(int id) { + @Override + public void killQuery(final int id) { final XQueryWatchDog[] watchdogs = processMonitor.getRunningXQueries(); for (XQueryWatchDog watchdog : watchdogs) { final XQueryContext context = watchdog.getContext(); - if( id == context.hashCode() ) { - if( !watchdog.isTerminating() ) { + if (id == context.hashCode()) { + if (!watchdog.isTerminating()) { watchdog.kill(1000); } break; @@ -115,7 +123,7 @@ public List getRecentQueryHistory() { final List history = new ArrayList<>(); final QueryHistory[] queryHistories = processMonitor.getRecentQueryHistory(); int i = 0; - for(final QueryHistory queryHistory : queryHistories) { + for (final QueryHistory queryHistory : queryHistories) { history.add(new RecentQueryHistory(i++, queryHistory)); } return history; @@ -128,7 +136,7 @@ public List getRecentQueryHistory() { * @param time */ @Override - public void setHistoryTimespan(long time) { + public void setHistoryTimespan(final long time) { processMonitor.setHistoryTimespan(time); } @@ -144,7 +152,7 @@ public long getHistoryTimespan() { * @param time */ @Override - public void setMinTime(long time) { + public void setMinTime(final long time) { processMonitor.setMinTime(time); } @@ -161,7 +169,7 @@ public long getMinTime() { * @param track */ @Override - public void setTrackRequestURI(boolean track) { + public void setTrackRequestURI(final boolean track) { processMonitor.setTrackRequestURI(track); } @@ -177,11 +185,11 @@ public boolean getTrackRequestURI() { * (see {@link ProcessMonitor#setMinTime(long)}). * @param historyTimespan The max duration (in milliseconds) for which queries are tracked in the query history * (see {@link ProcessMonitor#setHistoryTimespan(long)}). - * @param trackURI Set to true if the class should attempt to determine the HTTP URI through which the query was triggered - * (see {@link ProcessMonitor#setHistoryTimespan(long)}). + * @param trackURI Set to true if the class should attempt to determine the HTTP URI through which the query was triggered + * (see {@link ProcessMonitor#setHistoryTimespan(long)}). */ @Override - public void configure(long minTimeRecorded, long historyTimespan, boolean trackURI) { + public void configure(final long minTimeRecorded, final long historyTimespan, final boolean trackURI) { processMonitor.setMinTime(minTimeRecorded); processMonitor.setHistoryTimespan(historyTimespan); processMonitor.setTrackRequestURI(trackURI); diff --git a/src/org/exist/management/impl/ProcessReportMXBean.java b/src/org/exist/management/impl/ProcessReportMXBean.java index b7061358706..b3034f4cd47 100644 --- a/src/org/exist/management/impl/ProcessReportMXBean.java +++ b/src/org/exist/management/impl/ProcessReportMXBean.java @@ -23,7 +23,7 @@ import java.util.List; -public interface ProcessReportMXBean { +public interface ProcessReportMXBean extends PerInstanceMBean { List getScheduledJobs(); @@ -40,9 +40,9 @@ public interface ProcessReportMXBean { * * @param minTimeRecorded minimum execution time of queries recorded in the recent query history * @param historyTimespan time span (in milliseconds) for which the stats for an executed query should - * be kept in the recent query history - * @param trackURI Enable request tracking: for every executed query, try to figure out which HTTP - * URL triggered it (if applicable) + * be kept in the recent query history + * @param trackURI Enable request tracking: for every executed query, try to figure out which HTTP + * URL triggered it (if applicable) */ void configure(long minTimeRecorded, long historyTimespan, boolean trackURI); diff --git a/src/org/exist/management/impl/SanityReport.java b/src/org/exist/management/impl/SanityReport.java index bdada32d398..d75006387f7 100644 --- a/src/org/exist/management/impl/SanityReport.java +++ b/src/org/exist/management/impl/SanityReport.java @@ -23,18 +23,7 @@ import java.util.*; -import javax.management.AttributeChangeNotification; -import javax.management.MBeanNotificationInfo; -import javax.management.Notification; -import javax.management.NotificationBroadcasterSupport; -import javax.management.openmbean.CompositeDataSupport; -import javax.management.openmbean.CompositeType; -import javax.management.openmbean.OpenDataException; -import javax.management.openmbean.OpenType; -import javax.management.openmbean.SimpleType; -import javax.management.openmbean.TabularData; -import javax.management.openmbean.TabularDataSupport; -import javax.management.openmbean.TabularType; +import javax.management.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -59,11 +48,11 @@ public class SanityReport extends NotificationBroadcasterSupport implements Sani public final static String STATUS_FAIL = "FAIL"; public final static StringSource TEST_XQUERY = new StringSource("{current-dateTime()}"); - + public final static int PING_WAITING = -1; public final static int PING_ERROR = -2; - private static List NO_ERRORS = new LinkedList(); + private static List NO_ERRORS = new LinkedList<>(); private int seqNum = 0; @@ -76,7 +65,7 @@ public class SanityReport extends NotificationBroadcasterSupport implements Sani private String lastActionInfo = "nothing done"; private long lastPingRespTime = 0; - + private String output = ""; private TaskStatus taskstatus = new TaskStatus(TaskStatus.Status.NEVER_RUN); @@ -84,18 +73,36 @@ public class SanityReport extends NotificationBroadcasterSupport implements Sani private List errors = NO_ERRORS; private BrokerPool pool; - + public SanityReport(BrokerPool pool) { this.pool = pool; } + public static String getAllInstancesQuery() { + return "org.exist.management." + '*' + ":type=SanityReport"; + } + + public static ObjectName getName(final String instanceId) throws MalformedObjectNameException { + return new ObjectName("org.exist.management." + instanceId + ".tasks:type=SanityReport"); + } + + @Override + public ObjectName getName() throws MalformedObjectNameException { + return getName(pool.getId()); + } + + @Override + public String getInstanceId() { + return pool.getId(); + } + @Override public MBeanNotificationInfo[] getNotificationInfo() { - final String[] types = new String[] { AttributeChangeNotification.ATTRIBUTE_CHANGE }; + final String[] types = new String[]{AttributeChangeNotification.ATTRIBUTE_CHANGE}; final String name = AttributeChangeNotification.class.getName(); final String description = "The status attribute of this MBean has changed"; final MBeanNotificationInfo info = new MBeanNotificationInfo(types, name, description); - return new MBeanNotificationInfo[] { info }; + return new MBeanNotificationInfo[]{info}; } @Override @@ -125,9 +132,9 @@ public String getLastActionInfo() { @Override public long getPingTime() { - return lastPingRespTime; + return lastPingRespTime; } - + @Override public List getErrors() { final List errorList = new ArrayList<>(); @@ -151,9 +158,9 @@ public void triggerCheck(String output, String backup, String incremental) { final List errors = new ArrayList<>(); errors.add( - new ErrorReport( - ErrorReport.CONFIGURATION_FAILD, - existException.getMessage(), existException)); + new ErrorReport( + ErrorReport.CONFIGURATION_FAILD, + existException.getMessage(), existException)); taskstatus.setReason(errors); changeStatus(taskstatus); @@ -165,48 +172,50 @@ public void triggerCheck(String output, String backup, String incremental) { @Override public long ping(boolean checkQueryEngine) { - final long start = System.currentTimeMillis(); - lastPingRespTime = -1; - lastActionInfo = "Ping"; - - taskstatus.setStatus(TaskStatus.Status.PING_WAIT); + final long start = System.currentTimeMillis(); + lastPingRespTime = -1; + lastActionInfo = "Ping"; + + taskstatus.setStatus(TaskStatus.Status.PING_WAIT); // try to acquire a broker. If the db is deadlocked or not responsive, // this will block forever. - try(final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getGuestSubject()))) { - - if (checkQueryEngine) { - final XQuery xquery = pool.getXQueryService(); - final XQueryPool xqPool = pool.getXQueryPool(); - CompiledXQuery compiled = xqPool.borrowCompiledXQuery(broker, TEST_XQUERY); - if (compiled == null) { - final XQueryContext context = new XQueryContext(pool); - compiled = xquery.compile(broker, context, TEST_XQUERY); - } - try { - xquery.execute(broker, compiled, null); - } finally { - compiled.getContext().runCleanupTasks(); - xqPool.returnCompiledXQuery(TEST_XQUERY, compiled); - } - } - } catch (final Exception e) { - lastPingRespTime = -2; - taskstatus.setStatus(TaskStatus.Status.PING_ERROR); - taskstatus.setStatusChangeTime(); - taskstatus.setReason(e.getMessage()); + try (final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getGuestSubject()))) { + + if (checkQueryEngine) { + final XQuery xquery = pool.getXQueryService(); + final XQueryPool xqPool = pool.getXQueryPool(); + CompiledXQuery compiled = xqPool.borrowCompiledXQuery(broker, TEST_XQUERY); + if (compiled == null) { + final XQueryContext context = new XQueryContext(pool); + compiled = xquery.compile(broker, context, TEST_XQUERY); + } else { + compiled.getContext().prepareForReuse(); + } + try { + xquery.execute(broker, compiled, null); + } finally { + compiled.getContext().runCleanupTasks(); + xqPool.returnCompiledXQuery(TEST_XQUERY, compiled); + } + } + } catch (final Exception e) { + lastPingRespTime = -2; + taskstatus.setStatus(TaskStatus.Status.PING_ERROR); + taskstatus.setStatusChangeTime(); + taskstatus.setReason(e.getMessage()); changeStatus(taskstatus); - } finally { - lastPingRespTime = System.currentTimeMillis() - start; - taskstatus.setStatus(TaskStatus.Status.PING_OK); - taskstatus.setStatusChangeTime(); - taskstatus.setReason("ping response time: " + lastPingRespTime); - changeStatus(taskstatus); - } - return lastPingRespTime; + } finally { + lastPingRespTime = System.currentTimeMillis() - start; + taskstatus.setStatus(TaskStatus.Status.PING_OK); + taskstatus.setStatusChangeTime(); + taskstatus.setReason("ping response time: " + lastPingRespTime); + changeStatus(taskstatus); + } + return lastPingRespTime; } - + private Properties parseParameter(String output, String backup, String incremental) { final Properties properties = new Properties(); final boolean doBackup = backup.equalsIgnoreCase("YES"); @@ -282,7 +291,7 @@ protected void updateStatus(int percentage) { taskstatus.setPercentage(percentage); final Notification event = new AttributeChangeNotification(this, seqNum++, taskstatus.getStatusChangeTime().getTime(), "Work percentage change", "status", "int", String.valueOf(oldPercentage), String.valueOf(taskstatus - .getPercentage())); + .getPercentage())); event.setUserData(taskstatus.getCompositeData()); sendNotification(event); } catch (final Exception e) { diff --git a/src/org/exist/management/impl/SanityReportMXBean.java b/src/org/exist/management/impl/SanityReportMXBean.java index 0d35987701e..ca26d1d25d8 100644 --- a/src/org/exist/management/impl/SanityReportMXBean.java +++ b/src/org/exist/management/impl/SanityReportMXBean.java @@ -24,23 +24,23 @@ import java.util.Date; import java.util.List; -public interface SanityReportMXBean { +public interface SanityReportMXBean extends PerInstanceMBean { - public Date getLastCheckStart(); + Date getLastCheckStart(); - public Date getLastCheckEnd(); + Date getLastCheckEnd(); - public String getLastActionInfo(); + String getLastActionInfo(); - public Date getActualCheckStart(); + Date getActualCheckStart(); - public String getStatus(); + String getStatus(); - public long getPingTime(); + long getPingTime(); - public List getErrors(); + List getErrors(); - public void triggerCheck(String output, String backup, String incremental); + void triggerCheck(String output, String backup, String incremental); - public long ping(boolean checkQueryEngine); + long ping(boolean checkQueryEngine); } \ No newline at end of file diff --git a/src/org/exist/management/impl/SystemInfo.java b/src/org/exist/management/impl/SystemInfo.java index d48772f3f18..2e1f55dba29 100644 --- a/src/org/exist/management/impl/SystemInfo.java +++ b/src/org/exist/management/impl/SystemInfo.java @@ -26,6 +26,8 @@ import org.exist.SystemProperties; +import javax.management.ObjectName; + /** * Class SystemInfo * @@ -34,8 +36,7 @@ */ public class SystemInfo implements SystemInfoMXBean { - public SystemInfo() { - } + public static final String OBJECT_NAME = "org.exist.management:type=SystemInfo"; @Override public String getExistVersion() { diff --git a/src/org/exist/management/impl/SystemInfoMXBean.java b/src/org/exist/management/impl/SystemInfoMXBean.java index 9b6cac6b580..a0222ac69a2 100644 --- a/src/org/exist/management/impl/SystemInfoMXBean.java +++ b/src/org/exist/management/impl/SystemInfoMXBean.java @@ -26,20 +26,19 @@ * @author wessels * @author ljo */ -public interface SystemInfoMXBean -{ - public String getExistVersion(); - - public String getExistBuild(); +public interface SystemInfoMXBean { + String getExistVersion(); + + String getExistBuild(); @Deprecated - public String getSvnRevision(); - - public String getGitCommit(); + String getSvnRevision(); + + String getGitCommit(); + + String getOperatingSystem(); + + String getDefaultLocale(); - public String getOperatingSystem(); - - public String getDefaultLocale(); - - public String getDefaultEncoding(); + String getDefaultEncoding(); } diff --git a/src/org/exist/numbering/DLNBase.java b/src/org/exist/numbering/DLNBase.java index cf795d31641..7d0e4ca8503 100644 --- a/src/org/exist/numbering/DLNBase.java +++ b/src/org/exist/numbering/DLNBase.java @@ -70,7 +70,8 @@ public class DLNBase { private static int[] initComponents() { final int size[] = new int[10]; size[0] = 7; // = Math.pow(2, 3) - 1; - int components, numBits; + int components; + int numBits; for (int i = 1; i < size.length; i++) { components = i + 1; numBits = components * BITS_PER_UNIT - components; diff --git a/src/org/exist/plugin/PluginsManagerImpl.java b/src/org/exist/plugin/PluginsManagerImpl.java index 02cf3c99b1f..3b47758a50c 100644 --- a/src/org/exist/plugin/PluginsManagerImpl.java +++ b/src/org/exist/plugin/PluginsManagerImpl.java @@ -37,16 +37,18 @@ import org.exist.backup.BackupHandler; import org.exist.backup.RestoreHandler; import org.exist.collections.Collection; +import org.exist.collections.triggers.TriggerException; import org.exist.config.*; import org.exist.config.Configuration; import org.exist.config.annotation.*; import org.exist.security.Permission; +import org.exist.security.PermissionDeniedException; import org.exist.storage.BrokerPool; import org.exist.storage.BrokerPoolService; import org.exist.storage.BrokerPoolServiceException; import org.exist.storage.DBBroker; -import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; +import org.exist.util.LockException; import org.exist.util.serializer.SAXSerializer; import org.exist.xmldb.XmldbURI; import org.w3c.dom.Document; @@ -96,38 +98,34 @@ public void prepare(final BrokerPool brokerPool) { } @Override - public void startSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + public void startSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { try { - start(systemBroker); + start(systemBroker, transaction); } catch(final EXistException e) { throw new BrokerPoolServiceException(e); } } @Override - public void start(DBBroker broker) throws EXistException { - final TransactionManager transaction = broker.getBrokerPool().getTransactionManager(); - + public void start(final DBBroker broker, final Txn transaction) throws EXistException { boolean interrupted = false; try { - try (final Txn txn = transaction.beginTransaction()) { + try { collection = broker.getCollection(COLLETION_URI); if (collection == null) { - collection = broker.getOrCreateCollection(txn, COLLETION_URI); + collection = broker.getOrCreateCollection(transaction, COLLETION_URI); if (collection == null) { return; } //if db corrupted it can lead to unrunnable issue //throw new ConfigurationException("Collection '/db/system/plugins' can't be created."); - collection.setPermissions(Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); - broker.saveCollection(txn, collection); + collection.setPermissions(broker, Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); + broker.saveCollection(transaction, collection); } - transaction.commit(txn); - } catch (final Exception e) { - e.printStackTrace(); - LOG.debug("loading configuration failed: " + e.getMessage()); + } catch (final TriggerException | PermissionDeniedException | IOException | LockException e) { + LOG.warn("Loading PluginsManager configuration failed: " + e.getMessage()); } final Configuration _config_ = Configurator.parse(this, broker, collection, CONFIG_FILE_URI); @@ -154,12 +152,11 @@ public void start(DBBroker broker) throws EXistException { // NOTE: must set interrupted flag interrupted = true; } - - e.printStackTrace(); + LOG.error(e); } } } catch (final Throwable e) { - e.printStackTrace(); + LOG.error(e); } //UNDERSTAND: call save? @@ -170,7 +167,7 @@ public void start(DBBroker broker) throws EXistException { // } for (final Plug jack : jacks.values()) { - jack.start(broker); + jack.start(broker, transaction); } } finally { if (interrupted) { @@ -239,7 +236,7 @@ public void addPlugin(final String className) { // NOTE: must set interrupted flag Thread.currentThread().interrupt(); } -// e.printStackTrace(); + LOG.error(e); } } diff --git a/src/org/exist/protocolhandler/URLStreamHandlerStartupTrigger.java b/src/org/exist/protocolhandler/URLStreamHandlerStartupTrigger.java index c5b7cd4b9ba..098ad16f848 100644 --- a/src/org/exist/protocolhandler/URLStreamHandlerStartupTrigger.java +++ b/src/org/exist/protocolhandler/URLStreamHandlerStartupTrigger.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger; import org.exist.storage.DBBroker; import org.exist.storage.StartupTrigger; +import org.exist.storage.txn.Txn; import java.net.URL; import java.util.List; @@ -50,7 +51,7 @@ public class URLStreamHandlerStartupTrigger implements StartupTrigger { private final static AtomicBoolean registered = new AtomicBoolean(); @Override - public void execute(final DBBroker sysBroker, final Map> params) { + public void execute(final DBBroker sysBroker, final Txn transaction, final Map> params) { String mode = null; if (params != null) { List list = params.get("mode"); diff --git a/src/org/exist/protocolhandler/embedded/EmbeddedInputStream.java b/src/org/exist/protocolhandler/embedded/EmbeddedInputStream.java index 5a443700a3e..29b21a2b1b1 100644 --- a/src/org/exist/protocolhandler/embedded/EmbeddedInputStream.java +++ b/src/org/exist/protocolhandler/embedded/EmbeddedInputStream.java @@ -37,6 +37,7 @@ import org.exist.collections.Collection; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.protocolhandler.xmldb.XmldbURL; import org.exist.security.PermissionDeniedException; import org.exist.storage.BrokerPool; @@ -104,23 +105,23 @@ private static Either openStream(final BrokerPool pool try (final DBBroker broker = pool.getBroker()) { - DocumentImpl resource = null; - Collection collection = null; - try { - resource = broker.getXMLResource(path, Lock.LockMode.READ_LOCK); - if (resource == null) { - // Test for collection - collection = broker.openCollection(path, Lock.LockMode.READ_LOCK); - if (collection == null) { - // No collection, no document - return Left(new IOException("Resource " + url.getPath() + " not found.")); + try(final LockedDocument lockedResource = broker.getXMLResource(path, Lock.LockMode.READ_LOCK)) { - } else { - // Collection - return Left(new IOException("Resource " + url.getPath() + " is a collection.")); + if (lockedResource == null) { + // Test for collection + try(final Collection collection = broker.openCollection(path, Lock.LockMode.READ_LOCK)) { + if (collection == null) { + // No collection, no document + return Left(new IOException("Resource " + url.getPath() + " not found.")); + + } else { + // Collection + return Left(new IOException("Resource " + url.getPath() + " is a collection.")); + } } } else { + final DocumentImpl resource = lockedResource.getDocument(); if (resource.getResourceType() == DocumentImpl.XML_FILE) { final Serializer serializer = broker.getSerializer(); serializer.reset(); @@ -146,14 +147,6 @@ private static Either openStream(final BrokerPool pool } } } finally { - if (collection != null) { - collection.release(Lock.LockMode.READ_LOCK); - } - - if (resource != null) { - resource.getUpdateLock().release(Lock.LockMode.READ_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("End document download"); } diff --git a/src/org/exist/protocolhandler/embedded/EmbeddedOutputStream.java b/src/org/exist/protocolhandler/embedded/EmbeddedOutputStream.java index 99ddfd88140..2e828d8a045 100644 --- a/src/org/exist/protocolhandler/embedded/EmbeddedOutputStream.java +++ b/src/org/exist/protocolhandler/embedded/EmbeddedOutputStream.java @@ -117,62 +117,58 @@ private static Either openStream(final BrokerPool poo } private static void uploadToDb(final BrokerPool pool, final XmldbURL url, final Path tempFile) throws IOException { - Collection collection = null; try(final DBBroker broker = pool.getBroker()) { final XmldbURI collectionUri = XmldbURI.create(url.getCollection()); final XmldbURI documentUri = XmldbURI.create(url.getDocumentName()); - collection = broker.openCollection(collectionUri, Lock.LockMode.WRITE_LOCK); + try(final Collection collection = broker.openCollection(collectionUri, Lock.LockMode.WRITE_LOCK)) { - if (collection == null) { - throw new IOException("Resource " + collectionUri.toString() + " is not a collection."); - } - - if (collection.hasChildCollection(broker, documentUri)) { - throw new IOException("Resource " + documentUri.toString() + " is a collection."); - } + if (collection == null) { + throw new IOException("Resource " + collectionUri.toString() + " is not a collection."); + } - MimeType mime = MimeTable.getInstance().getContentTypeFor(documentUri); - String contentType = null; - if (mime != null) { - contentType = mime.getName(); - } else { - mime = MimeType.BINARY_TYPE; - } + if (collection.hasChildCollection(broker, documentUri)) { + throw new IOException("Resource " + documentUri.toString() + " is a collection."); + } - final TransactionManager transact = pool.getTransactionManager(); - try (final Txn txn = transact.beginTransaction()) { + MimeType mime = MimeTable.getInstance().getContentTypeFor(documentUri); + String contentType = null; + if (mime != null) { + contentType = mime.getName(); + } else { + mime = MimeType.BINARY_TYPE; + } - if (mime.isXMLType()) { - if (LOG.isDebugEnabled()) { - LOG.debug("Storing XML resource"); + final TransactionManager transact = pool.getTransactionManager(); + try (final Txn txn = transact.beginTransaction()) { + + if (mime.isXMLType()) { + if (LOG.isDebugEnabled()) { + LOG.debug("Storing XML resource"); + } + final InputSource inputsource = new FileInputSource(tempFile); + final IndexInfo info = collection.validateXMLResource(txn, broker, documentUri, inputsource); + final DocumentImpl doc = info.getDocument(); + doc.getMetadata().setMimeType(contentType); + collection.store(txn, broker, info, inputsource); + + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("Storing Binary resource"); + } + try (final InputStream is = Files.newInputStream(tempFile)) { + collection.addBinaryResource(txn, broker, documentUri, is, contentType, FileUtils.sizeQuietly(tempFile)); + } } - final InputSource inputsource = new FileInputSource(tempFile); - final IndexInfo info = collection.validateXMLResource(txn, broker, documentUri, inputsource); - final DocumentImpl doc = info.getDocument(); - doc.getMetadata().setMimeType(contentType); - collection.store(txn, broker, info, inputsource); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Storing Binary resource"); - } - try (final InputStream is = Files.newInputStream(tempFile)) { - collection.addBinaryResource(txn, broker, documentUri, is, contentType, FileUtils.sizeQuietly(tempFile)); - } + txn.commit(); } - - txn.commit(); } } catch (final EXistException | PermissionDeniedException | LockException | SAXException e) { LOG.error(e); throw new IOException(e.getMessage(), e); } finally { - if(collection != null) { - collection.release(Lock.LockMode.WRITE_LOCK); - } - if (LOG.isDebugEnabled()) { LOG.debug("End document upload"); } diff --git a/src/org/exist/protocolhandler/embedded/InMemoryInputStream.java b/src/org/exist/protocolhandler/embedded/InMemoryInputStream.java index ff475dc30af..232839d1a05 100644 --- a/src/org/exist/protocolhandler/embedded/InMemoryInputStream.java +++ b/src/org/exist/protocolhandler/embedded/InMemoryInputStream.java @@ -30,6 +30,7 @@ import org.exist.collections.Collection; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.protocolhandler.xmldb.XmldbURL; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; @@ -55,61 +56,51 @@ public static InputStream stream(XmldbURL xmldbURL) throws IOException { throw new IOException(e); } - try (final FastByteArrayOutputStream os = new FastByteArrayOutputStream()) { + try (final FastByteArrayOutputStream os = new FastByteArrayOutputStream(); + final DBBroker broker = db.getBroker()) { + final XmldbURI path = XmldbURI.create(xmldbURL.getPath()); - try (DBBroker broker = db.getBroker()) { - final XmldbURI path = XmldbURI.create(xmldbURL.getPath()); + // Test for collection + try(final Collection collection = broker.openCollection(path, LockMode.READ_LOCK)) { + if(collection != null) { + // Collection + throw new IOException("Resource " + xmldbURL.getPath() + " is a collection."); + } - DocumentImpl resource = null; - Collection collection = null; - try { - resource = broker.getXMLResource(path, LockMode.READ_LOCK); - if (resource == null) { - // Test for collection - collection = broker.openCollection(path, LockMode.READ_LOCK); - if (collection == null) { - // No collection, no document - throw new IOException("Resource " + xmldbURL.getPath() + " not found."); + try (final LockedDocument lockedDocument = broker.getXMLResource(path, LockMode.READ_LOCK)) { - } else { - // Collection - throw new IOException("Resource " + xmldbURL.getPath() + " is a collection."); - } +// // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme +// collection.close(); - } else { - if (resource.getResourceType() == DocumentImpl.XML_FILE) { - final Serializer serializer = broker.getSerializer(); - serializer.reset(); + if(lockedDocument == null) { + // No collection, no document + throw new IOException("Resource " + xmldbURL.getPath() + " not found."); + } - // Preserve doctype - serializer.setProperty(EXistOutputKeys.OUTPUT_DOCTYPE, "yes"); - try (final Writer w = new OutputStreamWriter(os, "UTF-8")) { - serializer.serialize(resource, w); - } + final DocumentImpl document = lockedDocument.getDocument(); + if (document.getResourceType() == DocumentImpl.XML_FILE) { + final Serializer serializer = broker.getSerializer(); + serializer.reset(); - } else { - broker.readBinaryResource((BinaryDocument) resource, os); + // Preserve doctype + serializer.setProperty(EXistOutputKeys.OUTPUT_DOCTYPE, "yes"); + try(final Writer w = new OutputStreamWriter(os, "UTF-8")) { + serializer.serialize(document, w); } - } - } finally { - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); + } else { + broker.readBinaryResource((BinaryDocument) document, os); } + + return os.toFastByteInputStream(); } - } catch (final IOException ex) { - LOG.error(ex, ex); - throw ex; - } catch (final Exception ex) { - LOG.error(ex, ex); - throw new IOException(ex.getMessage(), ex); } - - return os.toFastByteInputStream(); + } catch (final IOException ex) { + LOG.error(ex,ex); + throw ex; + } catch (final Exception ex) { + LOG.error(ex,ex); + throw new IOException(ex.getMessage(), ex); } } - } diff --git a/src/org/exist/protocolhandler/embedded/InMemoryOutputStream.java b/src/org/exist/protocolhandler/embedded/InMemoryOutputStream.java index 39222ef9933..cb763ba3485 100644 --- a/src/org/exist/protocolhandler/embedded/InMemoryOutputStream.java +++ b/src/org/exist/protocolhandler/embedded/InMemoryOutputStream.java @@ -31,8 +31,8 @@ import org.exist.protocolhandler.xmldb.XmldbURL; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.util.MimeTable; @@ -76,20 +76,15 @@ public void stream(XmldbURL xmldbURL, byte[] data) throws IOException { final XmldbURI documentUri = XmldbURI.create(xmldbURL.getDocumentName()); final TransactionManager transact = db.getTransactionManager(); - try (final Txn txn = transact.beginTransaction()) { - -// Collection collection = broker.openCollection(collectionUri, LockMode.WRITE_LOCK); - Collection collection = broker.getOrCreateCollection(txn, collectionUri); + try (final Txn txn = transact.beginTransaction(); + final Collection collection = broker.getOrCreateCollection(txn, collectionUri)) { if (collection == null) { throw new IOException("Resource " + collectionUri.toString() + " is not a collection."); } - - Lock lock = collection.getLock(); - if (!lock.isLockedForWrite()) { - txn.acquireLock(lock, LockMode.WRITE_LOCK); - } + final LockManager lockManager = db.getLockManager(); + txn.acquireCollectionLock(() -> lockManager.acquireCollectionWriteLock(collectionUri)); if (collection.hasChildCollection(broker, documentUri)) { throw new IOException("Resource " + documentUri.toString() + " is a collection."); @@ -103,16 +98,19 @@ public void stream(XmldbURL xmldbURL, byte[] data) throws IOException { mime = MimeType.BINARY_TYPE; } - if (mime.isXMLType()) { - final InputSource inputsource = new InputSource(new FastByteArrayInputStream(data)); - final IndexInfo info = collection.validateXMLResource(txn, broker, documentUri, inputsource); - final DocumentImpl doc = info.getDocument(); - doc.getMetadata().setMimeType(contentType); - collection.store(txn, broker, info, inputsource); - - } else { - try (final InputStream is = new FastByteArrayInputStream(data)) { - collection.addBinaryResource(txn, broker, documentUri, is, contentType, data.length); + try(final ManagedDocumentLock lock = lockManager.acquireDocumentWriteLock(documentUri)) { + if (mime.isXMLType()) { + try (final InputStream is = new FastByteArrayInputStream(data)) { + final InputSource inputsource = new InputSource(is); + final IndexInfo info = collection.validateXMLResource(txn, broker, documentUri, inputsource); + final DocumentImpl doc = info.getDocument(); + doc.getMetadata().setMimeType(contentType); + collection.store(txn, broker, info, inputsource); + } + } else { + try (final InputStream is = new FastByteArrayInputStream(data)) { + collection.addBinaryResource(txn, broker, documentUri, is, contentType, data.length); + } } } diff --git a/src/org/exist/protocolhandler/protocols/xmldb/EmbeddedURLConnection.java b/src/org/exist/protocolhandler/protocols/xmldb/EmbeddedURLConnection.java index 1ce3ce58656..18076b68859 100644 --- a/src/org/exist/protocolhandler/protocols/xmldb/EmbeddedURLConnection.java +++ b/src/org/exist/protocolhandler/protocols/xmldb/EmbeddedURLConnection.java @@ -45,18 +45,18 @@ * @author Dannes Wessels */ public class EmbeddedURLConnection extends URLConnection { - - private final static Logger LOG = LogManager.getLogger(EmbeddedURLConnection.class); - + private static final Logger LOG = LogManager.getLogger(EmbeddedURLConnection.class); + private final ThreadGroup threadGroup; + /** * Constructs a URL connection to the specified URL. */ - protected EmbeddedURLConnection(final URL url) { + protected EmbeddedURLConnection(final ThreadGroup threadGroup, final URL url) { super(url); if (LOG.isDebugEnabled()) { LOG.debug(url); } - + this.threadGroup = threadGroup; setDoInput(true); setDoOutput(true); } @@ -80,7 +80,7 @@ public InputStream getInputStream() throws IOException { if(xmldbURL.isEmbedded()){ inputstream = new EmbeddedInputStream( xmldbURL ); } else { - inputstream = new XmlrpcInputStream( xmldbURL ); + inputstream = new XmlrpcInputStream(threadGroup, xmldbURL); } return inputstream; @@ -98,7 +98,7 @@ public OutputStream getOutputStream() throws IOException { if(xmldbURL.isEmbedded()){ outputstream = new EmbeddedOutputStream( xmldbURL ); } else { - outputstream = new XmlrpcOutputStream( xmldbURL ); + outputstream = new XmlrpcOutputStream(threadGroup, xmldbURL); } return outputstream; diff --git a/src/org/exist/protocolhandler/protocols/xmldb/Handler.java b/src/org/exist/protocolhandler/protocols/xmldb/Handler.java index fbf1fa1ecbb..171604efb21 100644 --- a/src/org/exist/protocolhandler/protocols/xmldb/Handler.java +++ b/src/org/exist/protocolhandler/protocols/xmldb/Handler.java @@ -40,7 +40,8 @@ */ public class Handler extends URLStreamHandler { - private final static Logger LOG = LogManager.getLogger(Handler.class); + private static final Logger LOG = LogManager.getLogger(Handler.class); + private static final ThreadGroup threadGroup = new ThreadGroup("exist.url-stream-handler"); public static final String XMLDB_EXIST = "xmldb:exist:"; public static final String XMLDB = "xmldb:"; @@ -107,9 +108,9 @@ protected URLConnection openConnection(final URL u) throws IOException { switch (mode) { case THREADS: case DISK: - return new EmbeddedURLConnection(u); + return new EmbeddedURLConnection(threadGroup, u); case MEMORY: - return new InMemoryURLConnection(u); + return new InMemoryURLConnection(threadGroup, u); } throw new IOException("unsupported mode "+mode); } diff --git a/src/org/exist/protocolhandler/protocols/xmldb/InMemoryURLConnection.java b/src/org/exist/protocolhandler/protocols/xmldb/InMemoryURLConnection.java index 27a4d8c479a..3bc663bef16 100644 --- a/src/org/exist/protocolhandler/protocols/xmldb/InMemoryURLConnection.java +++ b/src/org/exist/protocolhandler/protocols/xmldb/InMemoryURLConnection.java @@ -37,14 +37,15 @@ * resource stream. */ public class InMemoryURLConnection extends URLConnection { - - private final static Logger LOG = LogManager.getLogger(InMemoryURLConnection.class); + private static final Logger LOG = LogManager.getLogger(InMemoryURLConnection.class); + private final ThreadGroup threadGroup; /** * Constructs a URL connection to the specified URL. */ - protected InMemoryURLConnection(final URL url) { + protected InMemoryURLConnection(final ThreadGroup threadGroup, final URL url) { super(url); + this.threadGroup = threadGroup; setDoInput(true); setDoOutput(true); @@ -64,7 +65,7 @@ public InputStream getInputStream() throws IOException { if(xmldbURL.isEmbedded()){ return InMemoryInputStream.stream( xmldbURL ); } else { - return new XmlrpcInputStream( xmldbURL ); + return new XmlrpcInputStream(threadGroup, xmldbURL ); } } @@ -75,7 +76,7 @@ public OutputStream getOutputStream() throws IOException { if(xmldbURL.isEmbedded()){ return new InMemoryOutputStream( xmldbURL ); } else { - return new XmlrpcOutputStream( xmldbURL ); + return new XmlrpcOutputStream(threadGroup, xmldbURL ); } } } diff --git a/src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadThread.java b/src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadRunnable.java similarity index 65% rename from src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadThread.java rename to src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadRunnable.java index 862c5645e20..07fc58c1614 100644 --- a/src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadThread.java +++ b/src/org/exist/protocolhandler/xmlrpc/XmlrpcDownloadRunnable.java @@ -23,7 +23,6 @@ package org.exist.protocolhandler.xmlrpc; import java.io.IOException; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -31,52 +30,47 @@ import org.exist.storage.io.BlockingOutputStream; /** - * Wrap XmlrpcDownload class into a thread for XmlrpcInputStream. + * Wrap XmlrpcDownload class into a runnable for XmlrpcInputStream. * * @author Dannes Wessels */ -public class XmlrpcDownloadThread extends Thread { - - private final static Logger logger = LogManager.getLogger(XmlrpcDownloadThread.class); - private XmldbURL xmldbURL; - private BlockingOutputStream bos; +public class XmlrpcDownloadRunnable implements Runnable { - private static final AtomicInteger threadInitNumber = new AtomicInteger(); + private static final Logger logger = LogManager.getLogger(XmlrpcDownloadRunnable.class); + private final XmldbURL url; + private final BlockingOutputStream bos; /** - * Constructor of XmlrpcDownloadThread. - * + * Constructor of XmlrpcDownloadThread. + * * @param url Document location in database. * @param bos Stream to which the document is written. */ - public XmlrpcDownloadThread(XmldbURL url, BlockingOutputStream bos) { - super("exist-xmlrpcDownloadThread-" + threadInitNumber.getAndIncrement()); - xmldbURL=url; - this.bos=bos; + public XmlrpcDownloadRunnable(final XmldbURL url, final BlockingOutputStream bos) { + this.url = url; + this.bos = bos; } - + /** * Write resource to the output stream. */ + @Override public void run() { - logger.debug("Thread started." ); - IOException exception=null; + IOException exception = null; try { final XmlrpcDownload xuc = new XmlrpcDownload(); - xuc.stream(xmldbURL, bos); - - } catch (IOException ex) { + xuc.stream(url, bos); + + } catch (final IOException ex) { logger.error(ex); exception = ex; - + } finally { try { // NEEDED! bos.close(exception); } catch (final IOException ex) { - logger.debug(ex); + logger.warn(ex); } - logger.debug("Thread stopped." ); } } - } diff --git a/src/org/exist/protocolhandler/xmlrpc/XmlrpcInputStream.java b/src/org/exist/protocolhandler/xmlrpc/XmlrpcInputStream.java index e66b10738ef..58295d30667 100644 --- a/src/org/exist/protocolhandler/xmlrpc/XmlrpcInputStream.java +++ b/src/org/exist/protocolhandler/xmlrpc/XmlrpcInputStream.java @@ -25,72 +25,67 @@ import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; +import java.util.concurrent.atomic.AtomicInteger; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.exist.protocolhandler.xmldb.XmldbURL; import org.exist.storage.io.BlockingInputStream; -import org.exist.storage.io.BlockingOutputStream; /** * Read document from remote database (using xmlrpc) as a input stream. * * @author Dannes Wessels */ -public class XmlrpcInputStream extends InputStream { - - private final static Logger logger = LogManager.getLogger(XmlrpcInputStream.class); - private BlockingInputStream bis; - private BlockingOutputStream bos; - private XmlrpcDownloadThread rt; - +public class XmlrpcInputStream extends InputStream { + private static final AtomicInteger downloadThreadId = new AtomicInteger(); + private final BlockingInputStream bis; + /** - * Constructor of XmlrpcInputStream. + * Constructor of XmlrpcInputStream. * - * @param xmldbURL Location of document in database. - * @throws MalformedURLException Thrown for illegalillegal URLs. + * @param threadGroup the group for the threads created by this stream. + * @param url Location of document in database. + * @throws MalformedURLException Thrown for illegal URLs. */ - public XmlrpcInputStream(XmldbURL xmldbURL) { - - logger.debug("Initializing ResourceInputStream"); - - bis = new BlockingInputStream(); - bos = bis.getOutputStream(); - - rt = new XmlrpcDownloadThread(xmldbURL , bos); - - rt.start(); - - logger.debug("Initializing ResourceInputStream done"); - + public XmlrpcInputStream(final ThreadGroup threadGroup, final XmldbURL url) { + this.bis = new BlockingInputStream(); + + final Runnable runnable = new XmlrpcDownloadRunnable(url, bis.getOutputStream()); + final Thread thread = new Thread(threadGroup, runnable, threadGroup.getName() + ".xmlrpc.download-" + downloadThreadId.getAndIncrement()); + thread.start(); } - - public int read(byte[] b, int off, int len) throws IOException { + + @Override + public int read(final byte[] b, final int off, final int len) throws IOException { return bis.read(b, off, len); } - - public int read(byte[] b) throws IOException { + + @Override + public int read(final byte[] b) throws IOException { return bis.read(b, 0, b.length); } - - public long skip(long n) throws IOException { + + @Override + public long skip(final long n) throws IOException { return bis.skip(n); } - + + @Override public void reset() throws IOException { bis.reset(); } - + + @Override public int read() throws IOException { return bis.read(); } - + + @Override public void close() throws IOException { bis.close(); } - + + @Override public int available() throws IOException { return bis.available(); } - } diff --git a/src/org/exist/protocolhandler/xmlrpc/XmlrpcOutputStream.java b/src/org/exist/protocolhandler/xmlrpc/XmlrpcOutputStream.java index 2e98c5f0365..2987cfb9cb2 100644 --- a/src/org/exist/protocolhandler/xmlrpc/XmlrpcOutputStream.java +++ b/src/org/exist/protocolhandler/xmlrpc/XmlrpcOutputStream.java @@ -25,9 +25,8 @@ import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; +import java.util.concurrent.atomic.AtomicInteger; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.exist.protocolhandler.xmldb.XmldbURL; import org.exist.storage.io.BlockingInputStream; import org.exist.storage.io.BlockingOutputStream; @@ -37,50 +36,47 @@ * * @author Dannes Wessels */ -public class XmlrpcOutputStream extends OutputStream { - - - private final static Logger logger = LogManager.getLogger(XmlrpcOutputStream.class); - private BlockingInputStream bis; - private BlockingOutputStream bos; - private XmlrpcUploadThread rt; - +public class XmlrpcOutputStream extends OutputStream { + private static final AtomicInteger uploadThreadId = new AtomicInteger(); + private final BlockingOutputStream bos; + /** - * Constructor of XmlrpcOutputStream. - * - * @param xmldbURL Location of document in database. - * @throws MalformedURLException Thrown for illegalillegal URLs. + * Constructor of XmlrpcOutputStream. + * + * @param threadGroup the group for the threads created by this stream. + * @param url Location of document in database. + * @throws MalformedURLException Thrown for illegal URLs. */ - public XmlrpcOutputStream(XmldbURL xmldbURL) { - - logger.debug("Initializing XmlrpcOutputStream"); - - bis = new BlockingInputStream(); - bos = bis.getOutputStream(); - - rt = new XmlrpcUploadThread(xmldbURL, bis); - rt.start(); - - logger.debug("Initializing XmlrpcOutputStream done"); + public XmlrpcOutputStream(final ThreadGroup threadGroup, final XmldbURL url) { + final BlockingInputStream bis = new BlockingInputStream(); + this.bos = bis.getOutputStream(); + + final Runnable runnable = new XmlrpcUploadRunnable(url, bis); + final Thread thread = new Thread(threadGroup, runnable, threadGroup.getName() + ".xmlrpc.upload-" + uploadThreadId.getAndIncrement()); + thread.start(); } - - public void write(int b) throws IOException { + @Override + public void write(final int b) throws IOException { bos.write(b); } - public void write(byte[] b) throws IOException { - bos.write(b,0,b.length); + @Override + public void write(final byte[] b) throws IOException { + bos.write(b, 0, b.length); } - public void write(byte[] b, int off, int len) throws IOException { - bos.write(b,off,len); + @Override + public void write(final byte[] b, final int off, final int len) throws IOException { + bos.write(b, off, len); } + @Override public void close() throws IOException { bos.close(); } + @Override public void flush() throws IOException { bos.flush(); } diff --git a/src/org/exist/protocolhandler/xmlrpc/XmlrpcUpload.java b/src/org/exist/protocolhandler/xmlrpc/XmlrpcUpload.java index 433e2867e20..5c5a797b303 100644 --- a/src/org/exist/protocolhandler/xmlrpc/XmlrpcUpload.java +++ b/src/org/exist/protocolhandler/xmlrpc/XmlrpcUpload.java @@ -105,7 +105,7 @@ public void stream(XmldbURL xmldbURL, InputStream is) params.clear(); params.add(handle); params.add(xmldbURL.getCollectionPath() ); - params.add(Boolean.valueOf(true)); + params.add(Boolean.TRUE); params.add(contentType); final Boolean result =(Boolean)client.execute("parseLocal", params); diff --git a/src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadThread.java b/src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadRunnable.java similarity index 65% rename from src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadThread.java rename to src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadRunnable.java index 545bd6aee6b..5ceb1092af5 100644 --- a/src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadThread.java +++ b/src/org/exist/protocolhandler/xmlrpc/XmlrpcUploadRunnable.java @@ -17,13 +17,12 @@ * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * - * $Id: XmlrpcUploadThread.java 223 2007-04-21 22:13:05Z dizzzz $ + * $Id: XmlrpcUploadRunnable.java 223 2007-04-21 22:13:05Z dizzzz $ */ package org.exist.protocolhandler.xmlrpc; import java.io.IOException; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -31,41 +30,37 @@ import org.exist.storage.io.BlockingInputStream; /** - * Wrap XmlrpcUpload class into a thread for XmlrpcOutputStream. + * Wrap XmlrpcUpload class into a runnable for XmlrpcOutputStream. * * @author Dannes Wessels */ -public class XmlrpcUploadThread extends Thread { - - private final static Logger logger = LogManager.getLogger(XmlrpcUploadThread.class); - private XmldbURL xmldbURL; - private BlockingInputStream bis; +public class XmlrpcUploadRunnable implements Runnable { - private static final AtomicInteger threadInitNumber = new AtomicInteger(); - - public XmlrpcUploadThread(XmldbURL url, BlockingInputStream bis) { - super("exist-xmlrpcUploadThread-" + threadInitNumber.getAndIncrement()); - xmldbURL=url; - this.bis=bis; + private final static Logger logger = LogManager.getLogger(XmlrpcUploadRunnable.class); + private final XmldbURL url; + private final BlockingInputStream bis; + + public XmlrpcUploadRunnable(final XmldbURL url, final BlockingInputStream bis) { + this.url = url; + this.bis = bis; } - + /** * Start Thread. */ + @Override public void run() { - logger.debug("Thread started." ); - Exception exception=null; + Exception exception = null; try { final XmlrpcUpload uploader = new XmlrpcUpload(); - uploader.stream(xmldbURL, bis); - + uploader.stream(url, bis); + } catch (IOException ex) { logger.error(ex); exception = ex; - + } finally { bis.close(exception); - logger.debug("Thread stopped." ); } } } diff --git a/src/org/exist/repo/AutoDeploymentTrigger.java b/src/org/exist/repo/AutoDeploymentTrigger.java index 5421562a77c..0fbc445f0ce 100644 --- a/src/org/exist/repo/AutoDeploymentTrigger.java +++ b/src/org/exist/repo/AutoDeploymentTrigger.java @@ -29,6 +29,7 @@ import org.apache.logging.log4j.Logger; import org.exist.storage.DBBroker; import org.exist.storage.StartupTrigger; +import org.exist.storage.txn.Txn; import org.exist.util.FileUtils; import org.expath.pkg.repo.*; @@ -45,7 +46,7 @@ public class AutoDeploymentTrigger implements StartupTrigger { public final static String AUTODEPLOY_PROPERTY = "exist.autodeploy"; @Override - public void execute(final DBBroker sysBroker, final Map> params) { + public void execute(final DBBroker sysBroker, final Txn transaction, final Map> params) { // do not process if the system property exist.autodeploy=off final String property = System.getProperty(AUTODEPLOY_PROPERTY, "on"); if (property.equalsIgnoreCase("off")) { @@ -66,13 +67,13 @@ public void execute(final DBBroker sysBroker, final Map file so we can resolve dependencies final Map packages = new HashMap<>(); for (final Path xar : xars) { try { - final Optional name = deployment.getNameFromDescriptor(xar); + final Optional name = deployment.getNameFromDescriptor(sysBroker, xar); if(name.isPresent()) { packages.put(name.get(), xar); } else { @@ -90,7 +91,7 @@ public void execute(final DBBroker sysBroker, final Map processorDeps = pkg.getProcessorDeps(); final String procVersion = SystemProperties.getInstance().getSystemProperty("product-version", "1.0"); PackageLoader.Version processorVersion = DEFAULT_VERSION; diff --git a/src/org/exist/repo/Deployment.java b/src/org/exist/repo/Deployment.java index b63a7b55cdf..51412a7666a 100644 --- a/src/org/exist/repo/Deployment.java +++ b/src/org/exist/repo/Deployment.java @@ -32,12 +32,12 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; +import org.exist.security.PermissionFactory; +import org.exist.security.UnixStylePermission; import org.exist.security.internal.aider.GroupAider; import org.exist.security.internal.aider.UserAider; import org.exist.source.FileSource; import org.exist.storage.DBBroker; -import org.exist.storage.txn.TransactionException; -import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.util.*; import org.exist.util.serializer.AttrList; @@ -94,8 +94,6 @@ public class Deployment { private static final QName RESOURCES_ELEMENT = new QName("resources", REPO_NAMESPACE); private static final String RESOURCES_PATH_ATTRIBUTE = "path"; - private final DBBroker broker; - private static class RequestedPerms { final String user; final String password; @@ -112,10 +110,6 @@ private RequestedPerms(final String user, final String password, final Optional< // private Optional requestedPerms = Optional.empty(); - public Deployment(final DBBroker broker) { - this.broker = broker; - } - protected Optional getPackageDir(final String pkgName, final Optional repo) throws PackageException { Optional packageDir = Optional.empty(); @@ -147,7 +141,7 @@ protected Optional getPackage(final String pkgName, return Optional.empty(); } - protected DocumentImpl getRepoXML(final Path packageDir) throws PackageException { + protected DocumentImpl getRepoXML(final DBBroker broker, final Path packageDir) throws PackageException { // find and parse the repo.xml descriptor final Path repoFile = packageDir.resolve("repo.xml"); if (!Files.isReadable(repoFile)) { @@ -160,21 +154,23 @@ protected DocumentImpl getRepoXML(final Path packageDir) throws PackageException } } - public Optional installAndDeploy(final Path xar, final PackageLoader loader) throws PackageException, IOException { - return installAndDeploy(xar, loader, true); + public Optional installAndDeploy(final DBBroker broker, final Txn transaction, final Path xar, final PackageLoader loader) throws PackageException, IOException { + return installAndDeploy(broker, transaction, xar, loader, true); } /** * Install and deploy a give xar archive. Dependencies are installed from * the PackageLoader. * + * @param broker + * @param transaction * @param xar the .xar file to install * @param loader package loader to use * @param enforceDeps when set to true, the method will throw an exception if a dependency could not be resolved * or an older version of the required dependency is installed and needs to be replaced. */ - public Optional installAndDeploy(final Path xar, final PackageLoader loader, boolean enforceDeps) throws PackageException, IOException { - final Optional descriptor = getDescriptor(xar); + public Optional installAndDeploy(final DBBroker broker, final Txn transaction, final Path xar, final PackageLoader loader, boolean enforceDeps) throws PackageException, IOException { + final Optional descriptor = getDescriptor(broker, xar); if(!descriptor.isPresent()) { throw new PackageException("Missing descriptor from package: " + xar.toAbsolutePath()); } @@ -191,7 +187,7 @@ public Optional installAndDeploy(final Path xar, final PackageLoader loa if (packages != null && (!enforceDeps || pkgVersion.equals(packages.latest().getVersion()))) { LOG.info("Application package " + name + " already installed. Skipping."); final Package pkg = packages.latest(); - return Optional.of(getTargetCollection(pkg, getPackageDir(pkg))); + return Optional.of(getTargetCollection(broker, pkg, getPackageDir(pkg))); } InMemoryNodeSet deps; @@ -248,7 +244,7 @@ public Optional installAndDeploy(final Path xar, final PackageLoader loa if (!isInstalled && loader != null) { final Path depFile = loader.load(pkgName, version); if (depFile != null) { - installAndDeploy(depFile, loader); + installAndDeploy(broker, transaction, depFile, loader); } else { if (enforceDeps) { LOG.warn("Missing dependency: package " + pkgName + " could not be resolved. This error " + @@ -279,7 +275,7 @@ public Optional installAndDeploy(final Path xar, final PackageLoader loa repo.get().reportAction(ExistRepository.Action.INSTALL, pkg.getName()); LOG.info("Deploying package " + pkgName); - return deploy(pkgName, repo, null); + return deploy(broker, transaction, pkgName, repo, null); } // Totally unneccessary to do the above if repo is unavailable. @@ -291,12 +287,12 @@ private void checkProcessorVersion(final PackageLoader.Version version) throws P final DependencyVersion depVersion = version.getDependencyVersion(); if (!depVersion.isCompatible(procVersion)) { - throw new PackageException("Package requires eXistdb version " + version.toString() + ". " + + throw new PackageException("Package requires eXist-db version " + version.toString() + ". " + "Installed version is " + procVersion); } } - public Optional undeploy(final String pkgName, final Optional repo) throws PackageException { + public Optional undeploy(final DBBroker broker, final Txn transaction, final String pkgName, final Optional repo) throws PackageException { final Optional maybePackageDir = getPackageDir(pkgName, repo); if (!maybePackageDir.isPresent()) { // fails silently if package dir is not found? @@ -307,25 +303,24 @@ public Optional undeploy(final String pkgName, final Optional pkg = getPackage(pkgName, repo); final DocumentImpl repoXML; try { - repoXML = getRepoXML(packageDir); + repoXML = getRepoXML(broker, packageDir); } catch (PackageException e) { if (pkg.isPresent()) { - uninstall(pkg.get(), Optional.empty()); + uninstall(broker, transaction, pkg.get(), Optional.empty()); } throw new PackageException("Failed to remove package from database " + "due to error in repo.xml: " + e.getMessage(), e); } - if (repoXML != null) { try { final Optional cleanup = findElement(repoXML, CLEANUP_ELEMENT); if(cleanup.isPresent()) { - runQuery(null, packageDir, cleanup.get().getStringValue(), false); + runQuery(broker, null, packageDir, cleanup.get().getStringValue(), false); } final Optional target = findElement(repoXML, TARGET_COLL_ELEMENT); if (pkg.isPresent()) { - uninstall(pkg.get(), target); + uninstall(broker, transaction, pkg.get(), target); } return target.map(e -> Optional.ofNullable(e.getStringValue())).orElseGet(() -> Optional.of(getTargetFallback(pkg.get()).getCollectionPath())); @@ -337,13 +332,13 @@ public Optional undeploy(final String pkgName, final Optional deploy(final String pkgName, final Optional repo, final String userTarget) throws PackageException, IOException { + public Optional deploy(final DBBroker broker, final Txn transaction, final String pkgName, final Optional repo, final String userTarget) throws PackageException, IOException { final Optional maybePackageDir = getPackageDir(pkgName, repo); if (!maybePackageDir.isPresent()) { throw new PackageException("Package not found: " + pkgName); @@ -351,7 +346,7 @@ public Optional deploy(final String pkgName, final Optional deploy(final String pkgName, final Optional setupPath = setup.map(ElementImpl::getStringValue).filter(s -> !s.isEmpty()); if (setupPath.isPresent()) { - runQuery(null, packageDir, setupPath.get(), true); + runQuery(broker, null, packageDir, setupPath.get(), true); return Optional.empty(); } else { // otherwise copy all child directories to the target collection @@ -379,7 +374,7 @@ public Optional deploy(final String pkgName, final Optional element: " + targetPath.get(), e); } @@ -387,7 +382,6 @@ public Optional deploy(final String pkgName, final Optional in its repo.xml, no files will be deployed to /apps"); } } - if (targetCollection == null) { // no target means: package does not need to be deployed into database // however, we need to preserve a copy for backup purposes @@ -431,19 +425,19 @@ public Optional deploy(final String pkgName, final Optional preSetupPath = preSetup.map(ElementImpl::getStringValue).filter(s -> !s.isEmpty()); if(preSetupPath.isPresent()) { - runQuery(targetCollection, packageDir, preSetupPath.get(), true); + runQuery(broker, targetCollection, packageDir, preSetupPath.get(), true); } // any required users and group should have been created by the pre-setup query. // check for invalid users now. if(requestedPerms.isPresent()) { - checkUserSettings(requestedPerms.get()); + checkUserSettings(broker, requestedPerms.get()); } final InMemoryNodeSet resources = findElements(repoXML,RESOURCES_ELEMENT); // install - final List errors = scanDirectory(packageDir, targetCollection, resources, true, false, + final List errors = scanDirectory(broker, transaction, packageDir, targetCollection, resources, true, false, requestedPerms); // run the post-setup query if present @@ -451,10 +445,10 @@ public Optional deploy(final String pkgName, final Optional postSetupPath = postSetup.map(ElementImpl::getStringValue).filter(s -> !s.isEmpty()); if(postSetupPath.isPresent()) { - runQuery(targetCollection, packageDir, postSetupPath.get(), false); + runQuery(broker, targetCollection, packageDir, postSetupPath.get(), false); } - storeRepoXML(repoXML, targetCollection, requestedPerms); + storeRepoXML(broker, transaction, repoXML, targetCollection, requestedPerms); // TODO: it should be safe to clean up the file system after a package // has been deployed. Might be enabled after 2.0 @@ -521,17 +515,18 @@ private void cleanup(final String pkgName, final Optional repo) * Get the target collection for the given package, which resides in pkgDir. * Returns path to cached .xar for library packages. * + * @param broker * @param pkg * @param pkgDir * @return * @throws PackageException */ - private String getTargetCollection(final Package pkg, final Path pkgDir) throws PackageException { - final DocumentImpl repoXML = getRepoXML(pkgDir); + private String getTargetCollection(final DBBroker broker, final Package pkg, final Path pkgDir) throws PackageException { + final DocumentImpl repoXML = getRepoXML(broker, pkgDir); if (repoXML != null) { try { final Optional target = findElement(repoXML, TARGET_COLL_ELEMENT); - return target.map(ElementImpl::getStringValue).map(this::getTargetCollection).map(XmldbURI::create).map(XmldbURI::getCollectionPath) + return target.map(ElementImpl::getStringValue).map(s -> getTargetCollection(broker, s)).map(XmldbURI::create).map(XmldbURI::getCollectionPath) .orElseGet(() -> getTargetFallback(pkg).getCollectionPath()); } catch (XPathException e) { throw new PackageException("Failed to determine target collection"); @@ -546,7 +541,7 @@ private XmldbURI getTargetFallback(final Package pkg) { return XmldbURI.SYSTEM.append("repo/" + pkgColl); } - private String getTargetCollection(String targetFromRepo) { + private String getTargetCollection(final DBBroker broker, String targetFromRepo) { final String appRoot = (String) broker.getConfiguration().getProperty(PROPERTY_APP_ROOT); if (appRoot != null) { if (targetFromRepo.startsWith("/db/")) { @@ -569,15 +564,14 @@ private String getTargetCollection(String targetFromRepo) { * @param target * @throws PackageException */ - private void uninstall(final Package pkg, final Optional target) + private void uninstall(final DBBroker broker, final Txn transaction, final Package pkg, final Optional target) throws PackageException { // determine target collection final Optional targetPath = target.map(ElementImpl::getStringValue).filter(s -> !s.isEmpty()); - final XmldbURI targetCollection = targetPath.map(s -> XmldbURI.create(getTargetCollection(s))) + final XmldbURI targetCollection = targetPath.map(s -> XmldbURI.create(getTargetCollection(broker, s))) .orElseGet(() -> getTargetFallback(pkg)); - final TransactionManager mgr = broker.getBrokerPool().getTransactionManager(); - try(final Txn transaction = mgr.beginTransaction()) { + try { Collection collection = broker.getOrCreateCollection(transaction, targetCollection); if (collection != null) { broker.removeCollection(transaction, collection); @@ -589,8 +583,7 @@ private void uninstall(final Package pkg, final Optional target) broker.removeCollection(transaction, collection); } } - mgr.commit(transaction); - } catch (final PermissionDeniedException | IOException | TriggerException | TransactionException e) { + } catch (final PermissionDeniedException | IOException | TriggerException e) { LOG.error("Exception occurred while removing package.", e); } } @@ -602,7 +595,7 @@ private void uninstall(final Package pkg, final Optional target) * @param targetCollection * @throws XPathException */ - private void storeRepoXML(final DocumentImpl repoXML, final XmldbURI targetCollection, final Optional requestedPerms) + private void storeRepoXML(final DBBroker broker, final Txn transaction, final DocumentImpl repoXML, final XmldbURI targetCollection, final Optional requestedPerms) throws PackageException, XPathException { // Store repo.xml final DateTimeValue time = new DateTimeValue(new Date()); @@ -617,24 +610,20 @@ private void storeRepoXML(final DocumentImpl repoXML, final XmldbURI targetColle builder.endDocument(); final DocumentImpl updatedXML = builder.getDocument(); - final TransactionManager mgr = broker.getBrokerPool().getTransactionManager(); - try(final Txn transaction = mgr.beginTransaction()) { + try { final Collection collection = broker.getOrCreateCollection(transaction, targetCollection); final XmldbURI name = XmldbURI.createInternal("repo.xml"); final IndexInfo info = collection.validateXMLResource(transaction, broker, name, updatedXML); final Permission permission = info.getDocument().getPermissions(); - setPermissions(requestedPerms, false, MimeType.XML_TYPE, permission); + setPermissions(broker, requestedPerms, false, MimeType.XML_TYPE, permission); collection.store(transaction, broker, info, updatedXML); - - mgr.commit(transaction); } catch (final PermissionDeniedException | IOException | SAXException | LockException | EXistException e) { throw new PackageException("Error while storing updated repo.xml: " + e.getMessage(), e); } } - private void checkUserSettings(final RequestedPerms requestedPerms) throws PackageException { - Objects.requireNonNull(requestedPerms); + private void checkUserSettings(final DBBroker broker, final RequestedPerms requestedPerms) throws PackageException { final org.exist.security.SecurityManager secman = broker.getBrokerPool().getSecurityManager(); try { if (requestedPerms.group.filter(g -> !secman.hasGroup(g)).isPresent()) { @@ -652,7 +641,7 @@ private void checkUserSettings(final RequestedPerms requestedPerms) throws Packa } } - private Sequence runQuery(final XmldbURI targetCollection, final Path tempDir, final String fileName, final boolean preInstall) + private Sequence runQuery(final DBBroker broker, final XmldbURI targetCollection, final Path tempDir, final String fileName, final boolean preInstall) throws PackageException, IOException, XPathException { final Path xquery = tempDir.resolve(fileName); if (!Files.isReadable(xquery)) { @@ -691,25 +680,25 @@ private Sequence runQuery(final XmldbURI targetCollection, final Path tempDir, f * Scan a directory and import all files and sub directories into the target * collection. * + * @param broker + * @param transaction * @param directory * @param target */ - private List scanDirectory(final Path directory, final XmldbURI target, final InMemoryNodeSet resources, + private List scanDirectory(final DBBroker broker, final Txn transaction, final Path directory, final XmldbURI target, final InMemoryNodeSet resources, final boolean inRootDir, final boolean isResourcesDir, final Optional requestedPerms) { - return scanDirectory(directory, target, resources, inRootDir, isResourcesDir, requestedPerms, new ArrayList<>()); + return scanDirectory(broker, transaction, directory, target, resources, inRootDir, isResourcesDir, requestedPerms, new ArrayList<>()); } - private List scanDirectory(final Path directory, final XmldbURI target, final InMemoryNodeSet resources, + private List scanDirectory(final DBBroker broker, final Txn transaction, final Path directory, final XmldbURI target, final InMemoryNodeSet resources, final boolean inRootDir, final boolean isResourcesDir, final Optional requestedPerms, final List errors) { - final TransactionManager mgr = broker.getBrokerPool().getTransactionManager(); Collection collection = null; - try(final Txn transaction = mgr.beginTransaction()) { + try { collection = broker.getOrCreateCollection(transaction, target); - setPermissions(requestedPerms, true, null, collection.getPermissionsNoLock()); + setPermissions(broker, requestedPerms, true, null, collection.getPermissionsNoLock()); broker.saveCollection(transaction, collection); - mgr.commit(transaction); - } catch (final PermissionDeniedException | TriggerException | IOException | TransactionException e) { + } catch (final PermissionDeniedException | TriggerException | IOException e) { LOG.warn(e); errors.add(e.getMessage()); } @@ -719,17 +708,17 @@ private List scanDirectory(final Path directory, final XmldbURI target, // the root dir is not allowed to be a resources directory if (!inRootDir && isResources) { try { - storeBinaryResources(directory, collection, requestedPerms, errors); + storeBinaryResources(broker, transaction, directory, collection, requestedPerms, errors); } catch (Exception e) { LOG.error(e.getMessage(), e); } } else { - storeFiles(directory, collection, inRootDir, requestedPerms, errors); + storeFiles(broker, transaction, directory, collection, inRootDir, requestedPerms, errors); } // scan sub directories try(final Stream subDirs = Files.find(directory, 1, (path, attrs) -> (!path.equals(directory)) && attrs.isDirectory())) { - subDirs.forEach(path -> scanDirectory(path, target.append(FileUtils.fileName(path)), resources, false, + subDirs.forEach(path -> scanDirectory(broker, transaction, path, target.append(FileUtils.fileName(path)), resources, false, isResources, requestedPerms, errors)); } catch(final IOException ioe) { LOG.warn("Unable to scan sub-directories", ioe); @@ -737,18 +726,14 @@ private List scanDirectory(final Path directory, final XmldbURI target, return errors; } - private boolean isResourceDir(XmldbURI target, InMemoryNodeSet resources) { + private boolean isResourceDir(final XmldbURI target, final InMemoryNodeSet resources) { // iterate here or pass into scandirectory directly or even save as class property??? - try { - for (final SequenceIterator i = resources.iterate(); i.hasNext(); ) { - final ElementImpl child = (ElementImpl) i.nextItem(); - final String resourcePath = child.getAttribute(RESOURCES_PATH_ATTRIBUTE); - if (target.toString().endsWith(resourcePath)) { - return true; - } + for (final SequenceIterator i = resources.iterate(); i.hasNext(); ) { + final ElementImpl child = (ElementImpl) i.nextItem(); + final String resourcePath = child.getAttribute(RESOURCES_PATH_ATTRIBUTE); + if (target.toString().endsWith(resourcePath)) { + return true; } - } catch (XPathException e) { - LOG.warn("Caught exception while reading resource list in repo.xml: " + e.getMessage(), e); } return false; } @@ -756,11 +741,13 @@ private boolean isResourceDir(XmldbURI target, InMemoryNodeSet resources) { /** * Import all files in the given directory into the target collection * + * @param broker + * @param transaction * @param directory * @param targetCollection */ - private void storeFiles(final Path directory, final Collection targetCollection, final boolean inRootDir, final - Optional requestedPerms, final List errors) { + private void storeFiles(final DBBroker broker, final Txn transaction, final Path directory, final Collection targetCollection, final boolean inRootDir, + final Optional requestedPerms, final List errors) { List files; try { files = FileUtils.list(directory); @@ -771,7 +758,6 @@ private void storeFiles(final Path directory, final Collection targetCollection, } final MimeTable mimeTab = MimeTable.getInstance(); - final TransactionManager mgr = broker.getBrokerPool().getTransactionManager(); for (final Path file : files) { if (inRootDir && FileUtils.fileName(file).equals("repo.xml")) { @@ -784,7 +770,7 @@ private void storeFiles(final Path directory, final Collection targetCollection, } final XmldbURI name = XmldbURI.create(FileUtils.fileName(file)); - try(final Txn transaction = mgr.beginTransaction()) { + try { if (mime.isXMLType()) { final InputSource is = new FileInputSource(file); IndexInfo info = null; @@ -794,7 +780,7 @@ private void storeFiles(final Path directory, final Collection targetCollection, //check for .html ending if(mime.getName().equals(MimeType.HTML_TYPE.getName())){ //store it as binary resource - storeBinary(targetCollection, file, mime, name, requestedPerms, transaction); + storeBinary(broker, transaction, targetCollection, file, mime, name, requestedPerms); } else { // could neither store as xml nor binary: give up and report failure in outer catch throw new EXistException(FileUtils.fileName(file) + " cannot be stored"); @@ -803,7 +789,7 @@ private void storeFiles(final Path directory, final Collection targetCollection, if (info != null) { info.getDocument().getMetadata().setMimeType(mime.getName()); final Permission permission = info.getDocument().getPermissions(); - setPermissions(requestedPerms, false, mime, permission); + setPermissions(broker, requestedPerms, false, mime, permission); targetCollection.store(transaction, broker, info, is); } @@ -814,12 +800,11 @@ private void storeFiles(final Path directory, final Collection targetCollection, targetCollection.addBinaryResource(transaction, broker, name, is, mime.getName(), size); final Permission permission = doc.getPermissions(); - setPermissions(requestedPerms, false, mime, permission); + setPermissions(broker, requestedPerms, false, mime, permission); doc.getMetadata().setMimeType(mime.getName()); broker.storeXMLResource(transaction, doc); } } - mgr.commit(transaction); } catch (final SAXException | EXistException | PermissionDeniedException | LockException | IOException e) { LOG.error(e.getMessage(), e); errors.add(FileUtils.fileName(file) + ": " + e.getMessage()); @@ -828,7 +813,7 @@ private void storeFiles(final Path directory, final Collection targetCollection, } } - private void storeBinary(final Collection targetCollection, final Path file, final MimeType mime, final XmldbURI name, final Optional requestedPerms, final Txn transaction) throws + private void storeBinary(final DBBroker broker, final Txn transaction, final Collection targetCollection, final Path file, final MimeType mime, final XmldbURI name, final Optional requestedPerms) throws IOException, EXistException, PermissionDeniedException, LockException, TriggerException { final long size = Files.size(file); try (final InputStream is = Files.newInputStream(file)) { @@ -836,23 +821,21 @@ private void storeBinary(final Collection targetCollection, final Path file, fin targetCollection.addBinaryResource(transaction, broker, name, is, mime.getName(), size); final Permission permission = doc.getPermissions(); - setPermissions(requestedPerms, false, mime, permission); + setPermissions(broker, requestedPerms, false, mime, permission); doc.getMetadata().setMimeType(mime.getName()); broker.storeXMLResource(transaction, doc); } } - private void storeBinaryResources(final Path directory, final Collection targetCollection, final - Optional requestedPerms, final List errors) throws IOException, EXistException, + private void storeBinaryResources(final DBBroker broker, final Txn transaction, final Path directory, final Collection targetCollection, + final Optional requestedPerms, final List errors) throws IOException, EXistException, PermissionDeniedException, LockException, TriggerException { - final TransactionManager mgr = broker.getBrokerPool().getTransactionManager(); try(DirectoryStream stream = Files.newDirectoryStream(directory)) { for (final Path entry: stream) { if (!Files.isDirectory(entry)) { final XmldbURI name = XmldbURI.create(FileUtils.fileName(entry)); - try(final Txn transaction = mgr.beginTransaction()) { - storeBinary(targetCollection, entry, MimeType.BINARY_TYPE, name, requestedPerms, transaction); - mgr.commit(transaction); + try { + storeBinary(broker, transaction, targetCollection, entry, MimeType.BINARY_TYPE, name, requestedPerms); } catch (final Exception e) { LOG.error(e.getMessage(), e); errors.add(e.getMessage()); @@ -867,20 +850,18 @@ private void storeBinaryResources(final Path directory, final Collection targetC * @param mime * @param permission */ - private void setPermissions(final Optional requestedPerms, final boolean isCollection, final MimeType mime, final Permission permission) throws PermissionDeniedException { + private void setPermissions(final DBBroker broker, final Optional requestedPerms, final boolean isCollection, final MimeType mime, final Permission permission) throws PermissionDeniedException { int mode = permission.getMode(); - if(requestedPerms.isPresent()) { + if (requestedPerms.isPresent()) { final RequestedPerms perms = requestedPerms.get(); - permission.setOwner(perms.user); - if(perms.group.isPresent()) { - permission.setGroup(perms.group.get()); - } + PermissionFactory.chown(broker, permission, Optional.of(perms.user), perms.group); mode = perms.permissions.map(permStr -> { try { - permission.setMode(permStr); - return permission.getMode(); + final UnixStylePermission other = new UnixStylePermission(broker.getBrokerPool().getSecurityManager()); + other.setMode(permStr); + return other.getMode(); } catch (final PermissionDeniedException | SyntaxException e) { LOG.warn("Unable to set permissions string: " + permStr + ". Falling back to default."); return permission.getMode(); @@ -891,7 +872,8 @@ private void setPermissions(final Optional requestedPerms, final if (isCollection || (mime != null && mime.getName().equals(MimeType.XQUERY_TYPE.getName()))) { mode = mode | 0111; //TODO(AR) Whoever did this - this is a really bad idea. You are circumventing the security of the system } - permission.setMode(mode); + + PermissionFactory.chmod(broker, permission, Optional.of(mode), Optional.empty()); } private Optional findElement(final NodeImpl root, final QName qname) throws XPathException { @@ -909,12 +891,12 @@ private InMemoryNodeSet findElements(final NodeImpl root, final QName qname) thr return setupNodes; } - public Optional getNameFromDescriptor(final Path xar) throws IOException, PackageException { - final Optional doc = getDescriptor(xar); + public Optional getNameFromDescriptor(final DBBroker broker, final Path xar) throws IOException, PackageException { + final Optional doc = getDescriptor(broker, xar); return doc.map(DocumentImpl::getDocumentElement).map(root -> root.getAttribute("name")); } - public Optional getDescriptor(final Path jar) throws IOException, PackageException { + public Optional getDescriptor(final DBBroker broker, final Path jar) throws IOException, PackageException { try(final JarInputStream jis = new JarInputStream(Files.newInputStream(jar))) { JarEntry entry; while ((entry = jis.getNextJarEntry()) != null) { diff --git a/src/org/exist/repo/RepoBackup.java b/src/org/exist/repo/RepoBackup.java index 0dc91634bd2..b16bd449958 100644 --- a/src/org/exist/repo/RepoBackup.java +++ b/src/org/exist/repo/RepoBackup.java @@ -1,6 +1,7 @@ package org.exist.repo; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.storage.DBBroker; import org.exist.storage.NativeBroker; @@ -37,20 +38,18 @@ public static Path backup(final DBBroker broker) throws IOException { public static void restore(final DBBroker broker) throws IOException, PermissionDeniedException { final XmldbURI docPath = XmldbURI.createInternal(XmldbURI.ROOT_COLLECTION + "/" + REPO_ARCHIVE); - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(docPath, LockMode.READ_LOCK); - if (doc == null) - {return;} + try(final LockedDocument lockedDoc = broker.getXMLResource(docPath, LockMode.READ_LOCK)) { + if (lockedDoc == null) { + return; + } + + final DocumentImpl doc = lockedDoc.getDocument(); if (doc.getResourceType() != DocumentImpl.BINARY_FILE) {throw new IOException(docPath + " is not a binary resource");} final Path file = ((NativeBroker)broker).getCollectionBinaryFileFsPath(doc.getURI()); final Path directory = ExistRepository.getRepositoryDir(broker.getConfiguration()); unzip(file, directory); - } finally { - if (doc != null) - {doc.getUpdateLock().release(LockMode.READ_LOCK);} } } diff --git a/src/org/exist/scheduler/UserXQueryJob.java b/src/org/exist/scheduler/UserXQueryJob.java index f9a9119d28b..004100c1c75 100644 --- a/src/org/exist/scheduler/UserXQueryJob.java +++ b/src/org/exist/scheduler/UserXQueryJob.java @@ -27,7 +27,7 @@ import org.apache.logging.log4j.Logger; import org.exist.EXistException; import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.security.Subject; import org.exist.source.DBSource; @@ -134,7 +134,7 @@ public final void execute(final JobExecutionContext jec) throws JobExecutionExce final JobDataMap jobDataMap = jec.getJobDetail().getJobDataMap(); //TODO why are these values not used from the class members? - final String xqueryresource = (String)jobDataMap.get(XQUERY_SOURCE); + final String xqueryResource = (String)jobDataMap.get(XQUERY_SOURCE); final Subject user = (Subject)jobDataMap.get(ACCOUNT); final BrokerPool pool = (BrokerPool)jobDataMap.get(DATABASE); @@ -142,109 +142,105 @@ public final void execute(final JobExecutionContext jec) throws JobExecutionExce final boolean unschedule = ((Boolean)jobDataMap.get(UNSCHEDULE)); //if invalid arguments then abort - if((pool == null) || (xqueryresource == null) || (user == null)) { + if((pool == null) || (xqueryResource == null) || (user == null)) { abort("BrokerPool or XQueryResource or User was null!"); } - DocumentImpl resource = null; - Source source = null; - XQueryPool xqPool = null; - CompiledXQuery compiled = null; - XQueryContext context = null; - try (final DBBroker broker = pool.get(Optional.of(user))) { - - if(xqueryresource.indexOf(':') > 0) { - source = SourceFactory.getSource(broker, "", xqueryresource, true); + if(xqueryResource.indexOf(':') > 0) { + final Source source = SourceFactory.getSource(broker, "", xqueryResource, true); + if(source != null) { + executeXQuery(pool, broker, source, params); + return; + } } else { - final XmldbURI pathUri = XmldbURI.create(xqueryresource); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); - - if(resource != null) { - source = new DBSource(broker, (BinaryDocument)resource, true); + final XmldbURI pathUri = XmldbURI.create(xqueryResource); + try(final LockedDocument lockedResource = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { + if (lockedResource != null) { + final Source source = new DBSource(broker, (BinaryDocument) lockedResource.getDocument(), true); + executeXQuery(pool, broker, source, params); + return; + } } } - if(source != null) { + LOG.warn("XQuery User Job not found: " + xqueryResource + ", job not scheduled"); + } catch(final EXistException ee) { + abort("Could not get DBBroker!"); + } catch(final PermissionDeniedException pde) { + abort("Permission denied for the scheduling user: " + user.getName() + "!"); + } catch(final XPathException xpe) { + abort("XPathException in the Job: " + xpe.getMessage() + "!", unschedule); + } catch(final IOException e) { + abort("Could not load XQuery: " + e.getMessage()); + } + } - //execute the xquery - final XQuery xquery = pool.getXQueryService(); - xqPool = pool.getXQueryPool(); + private void executeXQuery(final BrokerPool pool, final DBBroker broker, final Source source, final Properties params) throws PermissionDeniedException, XPathException, JobExecutionException { + XQueryPool xqPool = null; + CompiledXQuery compiled = null; + XQueryContext context = null; - //try and get a pre-compiled query from the pool - compiled = xqPool.borrowCompiledXQuery(broker, source); + try { + //execute the xquery + final XQuery xquery = pool.getXQueryService(); + xqPool = pool.getXQueryPool(); - if(compiled == null) { - context = new XQueryContext(pool); - } else { - context = compiled.getContext(); - } + //try and get a pre-compiled query from the pool + compiled = xqPool.borrowCompiledXQuery(broker, source); - //TODO: don't hardcode this? - if(resource != null) { - context.setModuleLoadPath(XmldbURI.EMBEDDED_SERVER_URI.append(resource.getCollection().getURI()).toString()); - context.setStaticallyKnownDocuments(new XmldbURI[] { - resource.getCollection().getURI() - }); - } + if (compiled == null) { + context = new XQueryContext(pool); + } else { + context = compiled.getContext(); + context.prepareForReuse(); + } - if(compiled == null) { + if(source instanceof DBSource) { + final XmldbURI collectionUri = ((DBSource)source).getDocumentPath().removeLastSegment(); + context.setModuleLoadPath(XmldbURI.EMBEDDED_SERVER_URI.append(collectionUri.getCollectionPath()).toString()); + context.setStaticallyKnownDocuments(new XmldbURI[] { collectionUri }); + } - try { - compiled = xquery.compile(broker, context, source); - } - catch(final IOException e) { - abort("Failed to read query from " + xqueryresource); - } - } + if (compiled == null) { - //declare any parameters as external variables - if(params != null) { - String bindingPrefix = params.getProperty("bindingPrefix"); + try { + compiled = xquery.compile(broker, context, source); + } catch (final IOException e) { + abort("Failed to read query from " + xqueryResource); + } + } - if(bindingPrefix == null) { - bindingPrefix = "local"; - } - + //declare any parameters as external variables + if (params != null) { + String bindingPrefix = params.getProperty("bindingPrefix"); - for(final Entry param : params.entrySet()) { - final String key = (String)param.getKey(); - final String value = (String)param.getValue(); - context.declareVariable( bindingPrefix + ":" + key, new StringValue(value)); - } + if (bindingPrefix == null) { + bindingPrefix = "local"; } - xquery.execute(broker, compiled, null); - } else { - LOG.warn("XQuery User Job not found: " + xqueryresource + ", job not scheduled"); + for (final Entry param : params.entrySet()) { + final String key = (String) param.getKey(); + final String value = (String) param.getValue(); + context.declareVariable(bindingPrefix + ":" + key, new StringValue(value)); + } } - } catch(final EXistException ee) { - abort("Could not get DBBroker!"); - } catch(final PermissionDeniedException pde) { - abort("Permission denied for the scheduling user: " + user.getName() + "!"); - } catch(final XPathException xpe) { - abort("XPathException in the Job: " + xpe.getMessage() + "!", unschedule); - } catch(final IOException e) { - abort("Could not load XQuery: " + e.getMessage()); - } finally { + xquery.execute(broker, compiled, null); + } finally { if(context != null) { context.runCleanupTasks(); } - + //return the compiled query to the pool if(xqPool != null && source != null && compiled != null) { xqPool.returnCompiledXQuery(source, compiled); } - - //release the lock on the xquery resource - if(resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); - } } } + private void abort(final String message) throws JobExecutionException { abort(message, true); } diff --git a/src/org/exist/scheduler/impl/QuartzSchedulerImpl.java b/src/org/exist/scheduler/impl/QuartzSchedulerImpl.java index 50633b7de13..57fdc614a0a 100644 --- a/src/org/exist/scheduler/impl/QuartzSchedulerImpl.java +++ b/src/org/exist/scheduler/impl/QuartzSchedulerImpl.java @@ -28,6 +28,7 @@ import java.util.Properties; import java.util.stream.Collectors; +import com.evolvedbinary.j8fu.Either; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.scheduler.*; @@ -41,17 +42,23 @@ import org.quartz.*; +import static org.exist.util.ThreadUtils.nameInstanceSchedulerThread; +import static org.exist.util.ThreadUtils.nameInstanceThread; +import static org.exist.util.ThreadUtils.newInstanceSubThreadGroup; import static org.quartz.CronScheduleBuilder.cronSchedule; import static org.quartz.JobBuilder.newJob; import static org.quartz.SimpleScheduleBuilder.simpleSchedule; import static org.quartz.TriggerBuilder.newTrigger; import static org.exist.scheduler.JobDescription.*; +import static org.quartz.impl.StdSchedulerFactory.*; import org.quartz.Job; import org.quartz.impl.StdSchedulerFactory; import org.quartz.impl.matchers.GroupMatcher; +import javax.annotation.Nullable; + /** * A Scheduler to trigger Startup, System and User defined jobs. * @@ -79,14 +86,25 @@ public void configure(final Configuration configuration) throws BrokerPoolServic @Override public void prepare(final BrokerPool brokerPool) throws BrokerPoolServiceException { + + // NOTE: we create the scheduler in a separate thread with its own thread-group so that the thread group is used by Quartz + final ThreadGroup instanceQuartzThreadGroup = newInstanceSubThreadGroup(brokerPool, "scheduler.quartz-simple-thread-pool"); + final QuartzSchedulerCreator creator = new QuartzSchedulerCreator(); + final Thread schedulerCreatorThread = new Thread(instanceQuartzThreadGroup, creator, nameInstanceThread(brokerPool, "prepare-quartz-scheduler")); + schedulerCreatorThread.start(); + try { - final SchedulerFactory schedulerFactory = new StdSchedulerFactory(getQuartzProperties()); - this.scheduler = schedulerFactory.getScheduler(); - } catch(final SchedulerException e) { + schedulerCreatorThread.join(); + this.scheduler = creator + .getScheduler() + .valueOrThrow(e -> new BrokerPoolServiceException("Unable to create Scheduler: " + e.getMessage(), e)); + + } catch (final InterruptedException e) { + // restore interrupted state + Thread.currentThread().interrupt(); throw new BrokerPoolServiceException("Unable to create Scheduler: " + e.getMessage(), e); } } - @Override public void startMultiUser(final BrokerPool brokerPool) throws BrokerPoolServiceException { run(); // start running all the defined jobs @@ -95,16 +113,15 @@ public void startMultiUser(final BrokerPool brokerPool) throws BrokerPoolService private final static Properties defaultQuartzProperties = new Properties(); static { - defaultQuartzProperties.setProperty("org.quartz.scheduler.instanceName", "DefaultQuartzScheduler"); - defaultQuartzProperties.setProperty("org.quartz.scheduler.rmi.export", "false"); - defaultQuartzProperties.setProperty("org.quartz.scheduler.rmi.proxy", "false"); - defaultQuartzProperties.setProperty("org.quartz.scheduler.wrapJobExecutionInUserTransaction", "false"); - defaultQuartzProperties.setProperty("org.quartz.scheduler.skipUpdateCheck", "true"); - defaultQuartzProperties.setProperty("org.quartz.threadPool.class", "org.quartz.simpl.SimpleThreadPool"); + defaultQuartzProperties.setProperty(PROP_SCHED_RMI_EXPORT, "false"); + defaultQuartzProperties.setProperty(PROP_SCHED_RMI_PROXY, "false"); + defaultQuartzProperties.setProperty(PROP_SCHED_WRAP_JOB_IN_USER_TX, "false"); + defaultQuartzProperties.setProperty(PROP_THREAD_POOL_CLASS, "org.exist.scheduler.impl.ExistQuartzSimpleThreadPool"); defaultQuartzProperties.setProperty("org.quartz.threadPool.threadCount", "4"); defaultQuartzProperties.setProperty("org.quartz.threadPool.threadPriority", "5"); + defaultQuartzProperties.setProperty("org.quartz.threadPool.threadsInheritGroupOfInitializingThread", "true"); defaultQuartzProperties.setProperty("org.quartz.threadPool.threadsInheritContextClassLoaderOfInitializingThread", "true"); - defaultQuartzProperties.setProperty("org.quartz.jobStore.class", "org.quartz.simpl.RAMJobStore"); + defaultQuartzProperties.setProperty(PROP_JOB_STORE_CLASS, "org.quartz.simpl.RAMJobStore"); defaultQuartzProperties.setProperty("org.quartz.jobStore.misfireThreshold", "60000"); } @@ -125,10 +142,14 @@ private Properties getQuartzProperties() { LOG.warn("Using default properties for Quartz scheduler"); properties.putAll(defaultQuartzProperties); } - if (!properties.containsKey(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME)) { - properties.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, - brokerPool.getId() + "_QuartzScheduler"); - } + + // always set the scheduler name + properties.setProperty(PROP_SCHED_INSTANCE_NAME, nameInstanceSchedulerThread(brokerPool, "quartz-scheduler")); + + // always set the thread prefix for the thread pool + final String schedulerThreadNamePrefix = nameInstanceSchedulerThread(brokerPool, "quartz-worker"); + properties.setProperty(PROP_THREAD_POOL_PREFIX + ".threadNamePrefix", schedulerThreadNamePrefix); + return properties; } @@ -562,4 +583,27 @@ private void setupJobDataMap(final JobDescription job, final JobDataMap jobDataM //Store the value of the unschedule setting jobDataMap.put(UNSCHEDULE, Boolean.valueOf(unschedule)); } + + /** + * Creates a new Scheduler + */ + private class QuartzSchedulerCreator implements Runnable { + @Nullable + private volatile Either scheduler = null; + + @Nullable + public Either getScheduler() { + return scheduler; + } + + @Override + public void run() { + try { + final SchedulerFactory schedulerFactory = new StdSchedulerFactory(getQuartzProperties()); + this.scheduler = Either.Right(schedulerFactory.getScheduler()); + } catch(final SchedulerException e) { + this.scheduler = Either.Left(e); + } + } + } } diff --git a/src/org/exist/scheduler/impl/ShutdownTask.java b/src/org/exist/scheduler/impl/ShutdownTask.java index 62709d6fe02..c7338f09e6d 100644 --- a/src/org/exist/scheduler/impl/ShutdownTask.java +++ b/src/org/exist/scheduler/impl/ShutdownTask.java @@ -37,12 +37,6 @@ */ public class ShutdownTask implements SystemTask { - private final ThreadGroup threadGroup; - - public ShutdownTask() { - this.threadGroup = Thread.currentThread().getThreadGroup(); - } - @Override public String getName() { return "Database Shutdown"; @@ -57,7 +51,7 @@ public void execute(final DBBroker broker) throws EXistException { //NOTE - shutdown must be executed asynchronously from the scheduler, to avoid a deadlock with shutting down the scheduler final Callable shutdownCallable = new AsyncShutdown(broker.getBrokerPool()); - Executors.newSingleThreadExecutor(new NamedThreadFactory("shutdownTask-asyncShutdown")).submit(shutdownCallable); + Executors.newSingleThreadExecutor(new NamedThreadFactory(broker.getBrokerPool(), "shutdown-task-async-shutdown")).submit(shutdownCallable); } @Override diff --git a/src/org/exist/scheduler/impl/quartz.properties b/src/org/exist/scheduler/impl/quartz.properties index 8a1a15c0b92..4ebd73d27f4 100644 --- a/src/org/exist/scheduler/impl/quartz.properties +++ b/src/org/exist/scheduler/impl/quartz.properties @@ -2,16 +2,14 @@ # to create a Quartz Scheduler Instance, if a different # properties file is not explicitly specified. # - -org.quartz.scheduler.instanceName = DefaultQuartzScheduler org.quartz.scheduler.rmi.export = false org.quartz.scheduler.rmi.proxy = false org.quartz.scheduler.wrapJobExecutionInUserTransaction = false -org.quartz.scheduler.skipUpdateCheck = true org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool org.quartz.threadPool.threadCount = 4 org.quartz.threadPool.threadPriority = 5 +org.quartz.threadPool.threadsInheritGroupOfInitializingThread = true org.quartz.threadPool.threadsInheritContextClassLoaderOfInitializingThread = true org.quartz.jobStore.misfireThreshold = 60000 diff --git a/src/org/exist/security/ACLPermission.java b/src/org/exist/security/ACLPermission.java index 4ee78a2b1cd..0c71b03df6e 100644 --- a/src/org/exist/security/ACLPermission.java +++ b/src/org/exist/security/ACLPermission.java @@ -27,7 +27,7 @@ */ public interface ACLPermission { - public static enum ACE_ACCESS_TYPE { + enum ACE_ACCESS_TYPE { DENIED(01), ALLOWED(02); private final int val; @@ -51,7 +51,7 @@ public static ACE_ACCESS_TYPE fromVal(int val) { } } - public static enum ACE_TARGET { + enum ACE_TARGET { USER(01), GROUP(02); private final int val; @@ -75,28 +75,47 @@ public static ACE_TARGET fromVal(int val) { } } - public short getVersion(); + short getVersion(); - public void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, int mode) throws PermissionDeniedException; + void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, String modeStr) throws PermissionDeniedException; - public int getACECount(); + void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, int mode) throws PermissionDeniedException; - public ACE_ACCESS_TYPE getACEAccessType(int index); + void insertACE(int index, ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, String modeStr) throws PermissionDeniedException; - public ACE_TARGET getACETarget(int index); + void modifyACE(int index, ACE_ACCESS_TYPE access_type, String modeStr) throws PermissionDeniedException; + + void modifyACE(int index, ACE_ACCESS_TYPE access_type, int mode) throws PermissionDeniedException; + + void removeACE(int index) throws PermissionDeniedException; + + int getACECount(); + + ACE_ACCESS_TYPE getACEAccessType(int index); + + ACE_TARGET getACETarget(int index); /** * Convenience method for getting the name of the user or group * of which this ace is applied to */ - public String getACEWho(int index); + String getACEWho(int index); - public int getACEMode(int index); + int getACEMode(int index); /** * Clears all ACE's */ - public void clear() throws PermissionDeniedException; + void clear() throws PermissionDeniedException; - public boolean isCurrentSubjectCanWriteACL(); -} \ No newline at end of file + boolean isCurrentSubjectCanWriteACL(); + + /** + * Determines if this ACL is equal to another ACL. + * + * @param other Another ACL to compare against. + * + * @return true if this ACL is equal to the other ACL. + */ + boolean aclEquals(final ACLPermission other); +} diff --git a/src/org/exist/security/AbstractRealm.java b/src/org/exist/security/AbstractRealm.java index 24b36a210ea..e14c8c4d20c 100644 --- a/src/org/exist/security/AbstractRealm.java +++ b/src/org/exist/security/AbstractRealm.java @@ -1,23 +1,21 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2010-2011 The eXist Project + * Copyright (C) 2001-2016 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * - * $Id$ + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.exist.security; @@ -28,11 +26,8 @@ import java.util.List; import java.util.Map; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import java.util.function.Consumer; -import java.util.function.Function; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.exist.Database; import org.exist.EXistException; import org.exist.collections.Collection; @@ -44,15 +39,14 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.security.internal.AccountImpl; import org.exist.security.internal.GroupImpl; -import org.exist.security.internal.SecurityManagerImpl; import org.exist.security.realm.Realm; import org.exist.security.utils.Utils; import org.exist.storage.DBBroker; -import org.exist.storage.txn.TransactionManager; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; +import org.exist.util.ConcurrentValueWrapper; import org.exist.util.LockException; -import com.evolvedbinary.j8fu.function.Consumer2E; -import com.evolvedbinary.j8fu.function.ConsumerE; import org.exist.xmldb.XmldbURI; /** @@ -61,12 +55,12 @@ */ public abstract class AbstractRealm implements Realm, Configurable { + private static final Logger LOG = LogManager.getLogger(AbstractRealm.class); + protected final PrincipalDbByName usersByName = new PrincipalDbByName<>(); protected final PrincipalDbByName groupsByName = new PrincipalDbByName<>(); - - - private SecurityManager sm; + private final SecurityManager sm; protected Configuration configuration; protected Collection collectionRealm = null; @@ -75,7 +69,7 @@ public abstract class AbstractRealm implements Realm, Configurable { protected Collection collectionRemovedAccounts = null; protected Collection collectionRemovedGroups = null; - public AbstractRealm(SecurityManager sm, Configuration config) { + public AbstractRealm(final SecurityManager sm, final Configuration config) { this.sm = sm; this.configuration = Configurator.configure(this, config); } @@ -90,21 +84,16 @@ public SecurityManager getSecurityManager() { return sm; } - protected void initialiseRealmStorage(final DBBroker broker) throws EXistException { - + private void initialiseRealmStorage(final DBBroker broker, final Txn transaction) throws EXistException { final XmldbURI realmCollectionURL = SecurityManager.SECURITY_COLLECTION_URI.append(getId()); - - final TransactionManager transact = broker.getBrokerPool().getTransactionManager(); - try(final Txn txn = transact.beginTransaction()) { - collectionRealm = Utils.getOrCreateCollection(broker, txn, realmCollectionURL); - - collectionAccounts = Utils.getOrCreateCollection(broker, txn, realmCollectionURL.append("accounts")); - collectionGroups = Utils.getOrCreateCollection(broker, txn, realmCollectionURL.append("groups")); + try { + collectionRealm = Utils.getOrCreateCollection(broker, transaction, realmCollectionURL); - collectionRemovedAccounts = Utils.getOrCreateCollection(broker, txn, realmCollectionURL.append("accounts").append("removed")); - collectionRemovedGroups = Utils.getOrCreateCollection(broker, txn, realmCollectionURL.append("groups").append("removed")); + collectionAccounts = Utils.getOrCreateCollection(broker, transaction, realmCollectionURL.append("accounts")); + collectionGroups = Utils.getOrCreateCollection(broker, transaction, realmCollectionURL.append("groups")); - transact.commit(txn); + collectionRemovedAccounts = Utils.getOrCreateCollection(broker, transaction, realmCollectionURL.append("accounts").append("removed")); + collectionRemovedGroups = Utils.getOrCreateCollection(broker, transaction, realmCollectionURL.append("groups").append("removed")); } catch(final PermissionDeniedException | IOException | TriggerException | LockException e) { throw new EXistException(e); @@ -120,18 +109,18 @@ private void loadGroupsFromRealmStorage(final DBBroker broker) throws Configurat final Configuration conf = Configurator.parse(broker.getBrokerPool(), i.next()); final String name = conf.getProperty("name"); - groupsByName.modifyE(principalDb -> { + groupsByName.writeE(principalDb -> { if(name != null && !principalDb.containsKey(name)) { //Group group = instantiateGroup(this, conf); final GroupImpl group = new GroupImpl(r, conf); - getSecurityManager().addGroup(group.getId(), group); + getSecurityManager().registerGroup(group); principalDb.put(group.getName(), group); //set collection if(group.getId() > 0) { - ((AbstractPrincipal)group).setCollection(broker, collectionGroups); + group.setCollection(broker, collectionGroups); } } }); @@ -152,7 +141,7 @@ private void loadRemovedGroupsFromRealmStorage(final DBBroker broker) throws Con final GroupImpl group = new GroupImpl(this, conf); group.removed = true; - getSecurityManager().addGroup(group.getId(), group); + getSecurityManager().registerGroup(group); } } } @@ -169,18 +158,18 @@ private void loadAccountsFromRealmStorage(final DBBroker broker) throws Configur final Configuration conf = Configurator.parse(broker.getBrokerPool(), doc); final String name = conf.getProperty("name"); - usersByName.modifyE(principalDb -> { + usersByName.writeE(principalDb -> { if(name != null && !principalDb.containsKey(name)) { //A account = instantiateAccount(this, conf); final Account account; try { account = new AccountImpl(r, conf); } catch (Throwable e) { - SecurityManagerImpl.LOG.error("Account object can't build up from '"+doc.getFileURI()+"'", e); + LOG.error("Account object can't be built from '" + doc.getFileURI() + "'", e); return; } - getSecurityManager().addUser(account.getId(), account); + getSecurityManager().registerAccount(account); principalDb.put(account.getName(), account); //set collection @@ -203,10 +192,10 @@ private void loadRemovedAccountsFromRealmStorage(final DBBroker broker) throws C if (id != null && !getSecurityManager().hasUser(id)) { //A account = instantiateAccount(this, conf, true); - final AccountImpl account = new AccountImpl( this, conf ); - account.removed = true; + final AccountImpl account = new AccountImpl( this, conf ); + account.removed = true; - getSecurityManager().addUser(account.getId(), account); + getSecurityManager().registerAccount(account); } } } @@ -214,9 +203,9 @@ private void loadRemovedAccountsFromRealmStorage(final DBBroker broker) throws C @Override - public void start(final DBBroker broker) throws EXistException { + public void start(final DBBroker broker, final Txn transaction) throws EXistException { - initialiseRealmStorage(broker); + initialiseRealmStorage(broker, transaction); try { loadGroupsFromRealmStorage(broker); @@ -231,20 +220,20 @@ public void start(final DBBroker broker) throws EXistException { @Override - public void sync(DBBroker broker) throws EXistException { + public void sync(final DBBroker broker) { } @Override - public void stop(DBBroker broker) throws EXistException { + public void stop(final DBBroker broker) { } - public void save() throws PermissionDeniedException, EXistException, IOException { + public void save() throws PermissionDeniedException, EXistException { configuration.save(); } //Accounts management methods public final Account registerAccount(final Account account) { - usersByName.modify(principalDb -> { + usersByName.write(principalDb -> { if(principalDb.containsKey(account.getName())) { throw new IllegalArgumentException("Account " + account.getName() + " exist."); } @@ -256,7 +245,7 @@ public final Account registerAccount(final Account account) { } public final Group registerGroup(final Group group) { - groupsByName.modify(principalDb -> { + groupsByName.write(principalDb -> { if(principalDb.containsKey(group.getName())) { throw new IllegalArgumentException("Group " + group.getName() + " already exists."); } @@ -278,13 +267,13 @@ public final boolean hasAccount(final String accountName) { } @Override - public final boolean hasAccount(Account account) { + public final boolean hasAccount(final Account account) { return hasAccount(account.getName()); } @Override public final java.util.Collection getAccounts() { - return usersByName.read(principalDb -> principalDb.values()); + return usersByName.read(Map::values); } //Groups management methods @@ -294,7 +283,7 @@ public final boolean hasGroup(final String groupName) { } @Override - public final boolean hasGroup(Group role) { + public final boolean hasGroup(final Group role) { return hasGroup(role.getName()); } @@ -310,7 +299,7 @@ public final java.util.Collection getRoles() { @Override public final java.util.Collection getGroups() { - return groupsByName.read(principalDb -> principalDb.values()); + return groupsByName.read(Map::values); } //collections related methods @@ -333,7 +322,7 @@ public Group addGroup(final DBBroker broker, final Group group) throws Permissio } @Override - public Account addAccount(Account account) throws PermissionDeniedException, EXistException, ConfigurationException { + public Account addAccount(final Account account) throws PermissionDeniedException, EXistException { if(account.getRealmId() == null) { throw new ConfigurationException("Account's realmId is null."); } @@ -360,17 +349,17 @@ public boolean updateAccount(final Account account) throws PermissionDeniedExcep //check: add account to group String[] groups = account.getGroups(); - for (int i = 0; i < groups.length; i++) { - if (!(updatingAccount.hasGroup(groups[i]))) { - updatingAccount.addGroup(groups[i]); + for (final String group : groups) { + if (!(updatingAccount.hasGroup(group))) { + updatingAccount.addGroup(group); } } //check: remove account from group groups = updatingAccount.getGroups(); - for (int i = 0; i < groups.length; i++) { - if(!(account.hasGroup(groups[i]))) { - updatingAccount.remGroup(groups[i]); + for (final String group : groups) { + if (!(account.hasGroup(group))) { + updatingAccount.remGroup(group); } } @@ -391,7 +380,7 @@ public boolean updateAccount(final Account account) throws PermissionDeniedExcep } - ((AbstractPrincipal)updatingAccount).save(); + updatingAccount.save(); return true; } @@ -454,84 +443,47 @@ public Configuration getConfiguration() { @Override public List findUsernamesWhereNameStarts(final String startsWith) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public List findUsernamesWhereUsernameStarts(final String startsWith) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public List findAllGroupNames() { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public List findAllUserNames() { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public List findAllGroupMembers(final String groupName) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public List findUsernamesWhereNamePartStarts(final String startsWith) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public java.util.Collection findGroupnamesWhereGroupnameStarts(final String startsWith) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } @Override public java.util.Collection findGroupnamesWhereGroupnameContains(final String fragment) { - return Collections.EMPTY_LIST; + return Collections.emptyList(); } - protected static class PrincipalDbByName { - private final Map db = new HashMap<>(65); - private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - private final ReadLock readLock = lock.readLock(); - private final WriteLock writeLock = lock.writeLock(); - - public R read(final Function, R> readOp) { - readLock.lock(); - try { - return readOp.apply(db); - } finally { - readLock.unlock(); - } - } - - public final void modify(final Consumer> writeOp) { - writeLock.lock(); - try { - writeOp.accept(db); - } finally { - writeLock.unlock(); - } - } - - public final void modifyE(final ConsumerE, E> writeOp) throws E { - writeLock.lock(); - try { - writeOp.accept(db); - } finally { - writeLock.unlock(); - } - } - - public final void modify2E(final Consumer2E, E1, E2> writeOp) throws E1, E2 { - writeLock.lock(); - try { - writeOp.accept(db); - } finally { - writeLock.unlock(); - } + protected static class PrincipalDbByName extends ConcurrentValueWrapper> { + public PrincipalDbByName() { + super(new HashMap<>(65)); } } -} \ No newline at end of file +} diff --git a/src/org/exist/security/AbstractUnixStylePermission.java b/src/org/exist/security/AbstractUnixStylePermission.java index 6c9e79e3a6f..533ffe3d6b0 100644 --- a/src/org/exist/security/AbstractUnixStylePermission.java +++ b/src/org/exist/security/AbstractUnixStylePermission.java @@ -243,9 +243,9 @@ private void setSimpleSymbolicMode(final String simpleSymbolicMode) setMode(simpleSymbolicModeToInt(simpleSymbolicMode)); } - private final static Pattern unixSymbolicModePattern = Pattern.compile("((?:[augo]*(?:[+\\-=](?:[" + READ_CHAR + SETUID_CHAR + STICKY_CHAR + WRITE_CHAR + EXECUTE_CHAR + "])+)+),?)+"); - private final static Pattern existSymbolicModePattern = Pattern.compile("(?:(?:" + USER_STRING + "|" + GROUP_STRING + "|" + OTHER_STRING + ")=(?:[+-](?:" + READ_STRING + "|" + WRITE_STRING + "|" + EXECUTE_STRING + "),?)+)+"); - private final static Pattern simpleSymbolicModePattern = Pattern.compile("(?:(?:" + READ_CHAR + "|" + UNSET_CHAR + ")(?:" + WRITE_CHAR + "|" + UNSET_CHAR + ")(?:[" + EXECUTE_CHAR + SETUID_CHAR + SETUID_CHAR_NO_EXEC + "]|" + UNSET_CHAR + ")){2}(?:" + READ_CHAR + "|" + UNSET_CHAR + ")(?:" + WRITE_CHAR + "|" + UNSET_CHAR + ")(?:[" + EXECUTE_CHAR + STICKY_CHAR + "]|" + UNSET_CHAR + ")"); + public static final Pattern UNIX_SYMBOLIC_MODE_PATTERN = Pattern.compile("((?:[augo]*(?:[+\\-=](?:[" + READ_CHAR + SETUID_CHAR + STICKY_CHAR + WRITE_CHAR + EXECUTE_CHAR + "])+)+),?)+"); + public static final Pattern EXIST_SYMBOLIC_MODE_PATTERN = Pattern.compile("(?:(?:" + USER_STRING + "|" + GROUP_STRING + "|" + OTHER_STRING + ")=(?:[+-](?:" + READ_STRING + "|" + WRITE_STRING + "|" + EXECUTE_STRING + "),?)+)+"); + public static final Pattern SIMPLE_SYMBOLIC_MODE_PATTERN = Pattern.compile("(?:(?:" + READ_CHAR + "|" + UNSET_CHAR + ")(?:" + WRITE_CHAR + "|" + UNSET_CHAR + ")(?:[" + EXECUTE_CHAR + SETUID_CHAR + SETUID_CHAR_NO_EXEC + "]|" + UNSET_CHAR + ")){2}(?:" + READ_CHAR + "|" + UNSET_CHAR + ")(?:" + WRITE_CHAR + "|" + UNSET_CHAR + ")(?:[" + EXECUTE_CHAR + STICKY_CHAR + "]|" + UNSET_CHAR + ")"); /** * Note: we don't need @PermissionRequired(user = IS_DBA | IS_OWNER) here @@ -263,16 +263,16 @@ private void setSimpleSymbolicMode(final String simpleSymbolicMode) @Override public final void setMode(final String modeStr) throws SyntaxException, PermissionDeniedException { - final Matcher simpleSymbolicModeMatcher = simpleSymbolicModePattern.matcher(modeStr); + final Matcher simpleSymbolicModeMatcher = SIMPLE_SYMBOLIC_MODE_PATTERN.matcher(modeStr); if(simpleSymbolicModeMatcher.matches()) { setSimpleSymbolicMode(modeStr); } else { - final Matcher unixSymbolicModeMatcher = unixSymbolicModePattern.matcher(modeStr); + final Matcher unixSymbolicModeMatcher = UNIX_SYMBOLIC_MODE_PATTERN.matcher(modeStr); if(unixSymbolicModeMatcher.matches()){ setUnixSymbolicMode(modeStr); } else { - final Matcher existSymbolicModeMatcher = existSymbolicModePattern.matcher(modeStr); + final Matcher existSymbolicModeMatcher = EXIST_SYMBOLIC_MODE_PATTERN.matcher(modeStr); if(existSymbolicModeMatcher.matches()) { setExistSymbolicMode(modeStr); } else { diff --git a/src/org/exist/security/GnuCryptoJceProviderStartupTrigger.java b/src/org/exist/security/GnuCryptoJceProviderStartupTrigger.java index 376ee65befb..2e9d7345bdf 100644 --- a/src/org/exist/security/GnuCryptoJceProviderStartupTrigger.java +++ b/src/org/exist/security/GnuCryptoJceProviderStartupTrigger.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger; import org.exist.storage.DBBroker; import org.exist.storage.StartupTrigger; +import org.exist.storage.txn.Txn; import java.util.List; import java.util.Map; @@ -38,7 +39,7 @@ public class GnuCryptoJceProviderStartupTrigger implements StartupTrigger { GnuCryptoJceProviderStartupTrigger.class); @Override - public void execute(final DBBroker sysBroker, + public void execute(final DBBroker sysBroker, final Txn transaction, final Map> params) { java.security.Security.addProvider(new gnu.crypto.jce.GnuCrypto()); diff --git a/src/org/exist/security/NFSv4ACL.java b/src/org/exist/security/NFSv4ACL.java deleted file mode 100644 index 374f250a170..00000000000 --- a/src/org/exist/security/NFSv4ACL.java +++ /dev/null @@ -1,123 +0,0 @@ -package org.exist.security; - -/** - * - * http://tools.ietf.org/html/rfc3530#page-50 - * - * @author Adam Retter - */ -public class NFSv4ACL { - - private static class nfs4ace { - int type; - int flag; - int access_mask; - String who; - } - - - public final static int ACL4_SUPPORT_ALLOW_ACL = 0x00000001; - public final static int ACL4_SUPPORT_DENY_ACL = 0x00000002; - public final static int ACL4_SUPPORT_AUDIT_ACL = 0x00000004; - public final static int ACL4_SUPPORT_ALARM_ACL = 0x00000008; - - //TODO add support for ALARM_ACL - public final static int getaclsupport = ACL4_SUPPORT_ALLOW_ACL | ACL4_SUPPORT_DENY_ACL | ACL4_SUPPORT_AUDIT_ACL; - - //nfs4ace.type - public final static int ACE4_ACCESS_ALLOWED_ACE_TYPE = 0x00000000; - public final static int ACE4_ACCESS_DENIED_ACE_TYPE = 0x00000001; - public final static int ACE4_SYSTEM_AUDIT_ACE_TYPE = 0x00000002; - public final static int ACE4_SYSTEM_ALARM_ACE_TYPE = 0x00000003; - - /* - Clients should not attempt to set an ACE unless the server claims - support for that ACE type. If the server receives a request to set - an ACE that it cannot store, it MUST reject the request with - NFS4ERR_ATTRNOTSUPP. If the server receives a request to set an ACE - that it can store but cannot enforce, the server SHOULD reject the - request with NFS4ERR_ATTRNOTSUPP. - - Example: suppose a server can enforce NFS ACLs for NFS access but - cannot enforce ACLs for local access. If arbitrary processes can run - on the server, then the server SHOULD NOT indicate ACL support. On - the other hand, if only trusted administrative programs run locally, - then the server may indicate ACL support. - */ - //NFS4ERR_ATTRNOTSUPP - //NFS4ERR_ATTRNOTSUPP - - - //nfs4ace.access_mask - public final static int ACE4_READ_DATA = 0x00000001; - public final static int ACE4_LIST_DIRECTORY = 0x00000001; - public final static int ACE4_WRITE_DATA = 0x00000002; - public final static int ACE4_ADD_FILE = 0x00000002; - public final static int ACE4_APPEND_DATA = 0x00000004; - public final static int ACE4_ADD_SUBDIRECTORY = 0x00000004; - public final static int ACE4_READ_NAMED_ATTRS = 0x00000008; - public final static int ACE4_WRITE_NAMED_ATTRS = 0x00000010; - public final static int ACE4_EXECUTE = 0x00000020; - public final static int ACE4_DELETE_CHILD = 0x00000040; - public final static int ACE4_READ_ATTRIBUTES = 0x00000080; - public final static int ACE4_WRITE_ATTRIBUTES = 0x00000100; - public final static int ACE4_DELETE = 0x00010000; - public final static int ACE4_READ_ACL = 0x00020000; - public final static int ACE4_WRITE_ACL = 0x00040000; - public final static int ACE4_WRITE_OWNER = 0x00080000; - public final static int ACE4_SYNCHRONIZE = 0x00100000; - - - //nfs4ace.flag - public final static int ACE4_FILE_INHERIT_ACE = 0x00000001; - public final static int ACE4_DIRECTORY_INHERIT_ACE = 0x00000002; - public final static int ACE4_NO_PROPAGATE_INHERIT_ACE = 0x00000004; - public final static int ACE4_INHERIT_ONLY_ACE = 0x00000008; - public final static int ACE4_SUCCESSFUL_ACCESS_ACE_FLAG = 0x00000010; - public final static int ACE4_FAILED_ACCESS_ACE_FLAG = 0x00000020; - public final static int ACE4_IDENTIFIER_GROUP = 0x00000040; - - /* - A server need not support any of these flags. If the server supports - flags that are similar to, but not exactly the same as, these flags, - the implementation may define a mapping between the protocol-defined - flags and the implementation-defined flags. Again, the guiding - principle is that the file not appear to be more secure than it - really is. - - For example, suppose a client tries to set an ACE with - ACE4_FILE_INHERIT_ACE set but not ACE4_DIRECTORY_INHERIT_ACE. If the - server does not support any form of ACL inheritance, the server - should reject the request with NFS4ERR_ATTRNOTSUPP. If the server - supports a single "inherit ACE" flag that applies to both files and - directories, the server may reject the request (i.e., requiring the - client to set both the file and directory inheritance flags). The - server may also accept the request and silently turn on the - ACE4_DIRECTORY_INHERIT_ACE flag. - */ - - - public final static String WHO4_OWNER = "OWNER@"; - public final static String WHO4_GROUP = "GROUP@"; - public final static String WHO4_EVERYONE = "EVERYONE@"; - public final static String WHO4_INTERACTIVE = "INTERACTIVE@"; - public final static String WHO4_NETWORK = "NETWORK@"; - public final static String WHO4_DIALUP = "DIALUP@"; - public final static String WHO4_BATCH = "BATCH@"; - public final static String WHO4_ANONYMOUS = "ANONYMOUS@"; - public final static String WHO4_AUTHENTICATED = "AUTHENTICATED@"; - public final static String WHO4_SERVICE = "SERVICE@"; - - public final static int MODE4_SUID = 0x800; /* set user id on execution */ - public final static int MODE4_SGID = 0x400; /* set group id on execution */ - public final static int MODE4_SVTX = 0x200; /* save text even after use */ - public final static int MODE4_RUSR = 0x100; /* read permission: owner */ - public final static int MODE4_WUSR = 0x080; /* write permission: owner */ - public final static int MODE4_XUSR = 0x040; /* execute permission: owner */ - public final static int MODE4_RGRP = 0x020; /* read permission: group */ - public final static int MODE4_WGRP = 0x010; /* write permission: group */ - public final static int MODE4_XGRP = 0x008; /* execute permission: group */ - public final static int MODE4_ROTH = 0x004; /* read permission: other */ - public final static int MODE4_WOTH = 0x002; /* write permission: other */ - public final static int MODE4_XOTH = 0x001; /* execute permission: other */ -} \ No newline at end of file diff --git a/src/org/exist/security/Permission.java b/src/org/exist/security/Permission.java index d47a03cb869..e7d0e38b230 100644 --- a/src/org/exist/security/Permission.java +++ b/src/org/exist/security/Permission.java @@ -38,7 +38,8 @@ public interface Permission { int DEFAULT_SYSTEM_ETC_COLLECTION_PERM = 0755; int DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM = 0770; - + + int DEFAULT_TEMPORARY_COLLECTION_PERM = 0771; int DEFAULT_TEMPORARY_DOCUMENT_PERM = 0771; int SET_UID = 04; @@ -185,7 +186,7 @@ public interface Permission { * The eXist symbolic format should be avoided * in new applications as it is deprecated * - * @param str The new mode + * @param modeStr The new mode * @exception SyntaxException Description of the Exception */ void setMode(String modeStr) throws SyntaxException, PermissionDeniedException; @@ -239,4 +240,8 @@ public interface Permission { boolean isCurrentSubjectInGroup(); boolean isCurrentSubjectInGroup(int groupId); + + boolean isPosixChownRestricted(); + + Permission copy(); } diff --git a/src/org/exist/security/PermissionFactory.java b/src/org/exist/security/PermissionFactory.java index 0ac6b61001e..50d2ef5e83d 100644 --- a/src/org/exist/security/PermissionFactory.java +++ b/src/org/exist/security/PermissionFactory.java @@ -22,22 +22,33 @@ package org.exist.security; import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import com.evolvedbinary.j8fu.Either; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.collections.Collection; -import org.exist.collections.triggers.TriggerException; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; +import org.exist.security.internal.aider.ACEAider; +import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; -import org.exist.storage.txn.TransactionException; -import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import com.evolvedbinary.j8fu.function.ConsumerE; import org.exist.util.LockException; +import org.exist.util.SyntaxException; import org.exist.xmldb.XmldbURI; import org.exist.xquery.XPathException; +import static org.exist.security.AbstractUnixStylePermission.SIMPLE_SYMBOLIC_MODE_PATTERN; +import static org.exist.security.AbstractUnixStylePermission.UNIX_SYMBOLIC_MODE_PATTERN; +import static org.exist.security.Permission.*; +import static org.exist.storage.DBBroker.POSIX_CHOWN_RESTRICTED_PROPERTY; + /** * Instantiates an appropriate Permission class based on the current configuration * @@ -53,7 +64,7 @@ public class PermissionFactory { */ public static Permission getDefaultResourcePermission(final SecurityManager sm) { - //TODO consider loading Permission.DEFAULT_PERM from conf.xml instead + //TODO(AR) consider loading Permission.DEFAULT_PERM from conf.xml instead final Subject currentSubject = sm.getDatabase().getActiveBroker().getCurrentSubject(); final int mode = Permission.DEFAULT_RESOURCE_PERM & ~ currentSubject.getUserMask(); @@ -67,7 +78,7 @@ public static Permission getDefaultResourcePermission(final SecurityManager sm) */ public static Permission getDefaultCollectionPermission(final SecurityManager sm) { - //TODO consider loading Permission.DEFAULT_PERM from conf.xml instead + //TODO(AR) consider loading Permission.DEFAULT_PERM from conf.xml instead final Subject currentSubject = sm.getDatabase().getActiveBroker().getCurrentSubject(); final int mode = Permission.DEFAULT_COLLECTION_PERM & ~ currentSubject.getUserMask(); @@ -110,53 +121,503 @@ public static Permission getPermission(final SecurityManager sm, final String us return permission; } - public static void updatePermissions(final DBBroker broker, final XmldbURI pathUri, final ConsumerE permissionModifier) throws PermissionDeniedException { - final TransactionManager transact = broker.getBrokerPool().getTransactionManager(); - try(final Txn transaction = transact.beginTransaction()) { - Collection collection = null; - try { - collection = broker.openCollection(pathUri, LockMode.WRITE_LOCK); + private static void updatePermissions(final DBBroker broker, final Txn transaction, final XmldbURI pathUri, final ConsumerE permissionModifier) throws PermissionDeniedException { + final BrokerPool brokerPool = broker.getBrokerPool(); + try { + try(final Collection collection = broker.openCollection(pathUri, LockMode.WRITE_LOCK)) { if (collection == null) { - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(pathUri, LockMode.WRITE_LOCK); - if (doc == null) { - transact.abort(transaction); + try(final LockedDocument lockedDoc = broker.getXMLResource(pathUri, LockMode.WRITE_LOCK)) { + + if (lockedDoc == null) { throw new XPathException("Resource or collection '" + pathUri.toString() + "' does not exist."); } + final DocumentImpl doc = lockedDoc.getDocument(); + // keep a write lock in the transaction - transaction.acquireLock(doc.getUpdateLock(), LockMode.WRITE_LOCK); + transaction.acquireDocumentLock(() -> brokerPool.getLockManager().acquireDocumentWriteLock(doc.getURI())); + final Permission permissions = doc.getPermissions(); permissionModifier.accept(permissions); broker.storeXMLResource(transaction, doc); - } finally { - if(doc != null) { - doc.getUpdateLock().release(LockMode.WRITE_LOCK); - } } - transact.commit(transaction); - broker.flush(); } else { // keep a write lock in the transaction - transaction.acquireLock(collection.getLock(), LockMode.WRITE_LOCK); + transaction.acquireCollectionLock(() -> brokerPool.getLockManager().acquireCollectionWriteLock(collection.getURI())); final Permission permissions = collection.getPermissionsNoLock(); permissionModifier.accept(permissions); broker.saveCollection(transaction, collection); - transact.commit(transaction); - broker.flush(); - } - } finally { - if(collection != null) { - collection.release(LockMode.WRITE_LOCK); } + broker.flush(); } - } catch(final XPathException | PermissionDeniedException | IOException | TriggerException | TransactionException | LockException e) { + } catch(final XPathException | PermissionDeniedException | IOException | LockException e) { throw new PermissionDeniedException("Permission to modify permissions is denied for user '" + broker.getCurrentSubject().getName() + "' on '" + pathUri.toString() + "': " + e.getMessage(), e); } } -} \ No newline at end of file + + /** + * Changes the ownership of a resource in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param transaction the database transaction; + * @param pathUri the URI to a resource in the database. + * @param owner the new owner for the resource. + * @param group thr new group for the resource. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chown(final DBBroker broker, final Txn transaction, final XmldbURI pathUri, final Optional owner, final Optional group) throws PermissionDeniedException { + updatePermissions(broker, transaction, pathUri, permission -> chown(broker, permission, owner, group)); + } + + /** + * Changes the ownership of a Collection in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param collection the URI to a Collection in the database. + * @param owner the new owner for the collection. + * @param group thr new group for the collection. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chown(final DBBroker broker, final Collection collection, final Optional owner, final Optional group) throws PermissionDeniedException { + chown(broker, collection.getPermissions(), owner, group); + } + + /** + * Changes the ownership of a Document in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param document the URI to a Document in the database. + * @param owner the new owner for the document. + * @param group thr new group for the document. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chown(final DBBroker broker, final DocumentImpl document, final Optional owner, final Optional group) throws PermissionDeniedException { + chown(broker, document.getPermissions(), owner, group); + } + + public static void chown(final DBBroker broker, final Permission permission, final Optional owner, final Optional group) throws PermissionDeniedException { + if ((!owner.isPresent()) && !group.isPresent()) { + throw new IllegalArgumentException("Either owner or group must be provided"); + } + + final boolean changeOwner = owner.map(desiredOwner -> !permission.getOwner().getName().equals(desiredOwner)).orElse(false); + final boolean changeGroup = group.map(desiredGroup -> !permission.getGroup().getName().equals(desiredGroup)).orElse(false); + + // enforce security checks + final boolean posixChownRestricted = broker.getConfiguration().getProperty(POSIX_CHOWN_RESTRICTED_PROPERTY, true); + if (posixChownRestricted) { + if (changeOwner && !permission.isCurrentSubjectDBA()) { + // Only a superuser process can change the user ID of the file. + + throw new PermissionDeniedException("Only a DBA can change the user ID of a resource when posix-chown-restricted is in effect."); + } + + if (changeGroup && !permission.isCurrentSubjectDBA()) { + + /* + A non-superuser process can change the group ID of the file if the process owns the file + (the effective user ID equals the user ID of the file) + */ + if (!permission.isCurrentSubjectOwner()) { + throw new PermissionDeniedException("You cannot change the group ID of a file you do not own when posix-chown-restricted is in effect."); + } + // and, group equals either the effective group ID of the process or one of the process’s supplementary group IDs. + final int desiredGroupId = broker.getBrokerPool().getSecurityManager().getGroup(group.get()).getId(); + if (!permission.isCurrentSubjectInGroup(desiredGroupId)) { + throw new PermissionDeniedException("You cannot change the group ID of a file to a group of which you are not a member when posix-chown-restricted is in effect."); + } + } + } else { + if (changeOwner) { + if ((!permission.isCurrentSubjectDBA()) && !permission.isCurrentSubjectOwner()) { + throw new PermissionDeniedException("Only a DBA or the resources owner can change the user ID of a resource."); + } + } + + if (changeGroup) { + if ((!permission.isCurrentSubjectDBA()) && !permission.isCurrentSubjectOwner()) { + throw new PermissionDeniedException("Only a DBA or the resources owner can change the group ID of a resource."); + } + } + } + + if (!permission.isCurrentSubjectDBA()) { + /* + If this is called by a process other than a superuser process, on successful return, + both the set-user-ID and the set-group-ID bits are cleared. + + MUST be done before changing the owner or group to prevent a privilege escalation attack + */ + + if (permission.isSetUid()) { + permission.setSetUid(false); + } + + if (permission.isSetGid()) { + permission.setSetGid(false); + } + } + + // change the owner + if (changeOwner) { + permission.setOwner(owner.get()); + } + + // change the group + if (changeGroup) { + permission.setGroup(group.get()); + } + } + + /** + * Changes the mode of a resource in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param transaction the database transaction. + * @param pathUri the URI to a resource in the database. + * @param modeStr the new mode for the resource. + * @param acl the new ACL for the resource. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod_str(final DBBroker broker, final Txn transaction, final XmldbURI pathUri, final Optional modeStr, final Optional> acl) throws PermissionDeniedException { + updatePermissions(broker, transaction, pathUri, permission -> chmod_impl(broker, permission, modeStr.map(Either::Left), acl)); + } + + /** + * Changes the mode of a Collection in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param collection the URI to a Collection in the database. + * @param modeStr the new mode for the collection. + * @param acl the new ACL for the collection. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod_str(final DBBroker broker, final Collection collection, final Optional modeStr, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, collection.getPermissions(), modeStr.map(Either::Left), acl); + } + + /** + * Changes the mode of a Document in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param document the URI to a Document in the database. + * @param modeStr the new mode for the document. + * @param acl the new ACL for the document. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod_str(final DBBroker broker, final DocumentImpl document, final Optional modeStr, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, document.getPermissions(), modeStr.map(Either::Left), acl); + } + + /** + * Changes the mode of a resource in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param transaction the database transaction. + * @param pathUri the URI to a resource in the database. + * @param mode the new mode for the resource. + * @param acl the new ACL for the resource. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod(final DBBroker broker, final Txn transaction, final XmldbURI pathUri, final Optional mode, final Optional> acl) throws PermissionDeniedException { + updatePermissions(broker, transaction, pathUri, permission -> chmod_impl(broker, permission, mode.map(Either::Right), acl)); + } + + /** + * Changes the mode of a Collection in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param collection the URI to a Collection in the database. + * @param mode the new mode for the collection. + * @param acl the new ACL for the collection. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod(final DBBroker broker, final Collection collection, final Optional mode, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, collection.getPermissions(), mode.map(Either::Right), acl); + } + + /** + * Changes the mode of a Document in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param document the URI to a Document in the database. + * @param mode the new mode for the document. + * @param acl the new ACL for the document. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod(final DBBroker broker, final DocumentImpl document, final Optional mode, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, document.getPermissions(), mode.map(Either::Right), acl); + } + + /** + * Changes the mode of permissions in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param permissions the permissions in the database. + * @param mode the new mode for the permissions. + * @param acl the new ACL for the permissions. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod_str(final DBBroker broker, final Permission permissions, final Optional mode, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, permissions, mode.map(Either::Left), acl); + } + + /** + * Changes the mode of permissions in the database + * inline with the rules of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param permissions the permissions in the database. + * @param mode the new mode for the permissions. + * @param acl the new ACL for the permissions. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chmod(final DBBroker broker, final Permission permissions, final Optional mode, final Optional> acl) throws PermissionDeniedException { + chmod_impl(broker, permissions, mode.map(Either::Right), acl); + } + + private static void chmod_impl(final DBBroker broker, final Permission permission, final Optional> mode, final Optional> acl) throws PermissionDeniedException { + if ((!mode.isPresent()) && !acl.isPresent()) { + throw new IllegalArgumentException("Either mode or acl must be provided"); + } + + try { + final boolean changeMode; + if (mode.isPresent()) { + if (mode.get().isLeft()) { + final Subject effectiveUser = broker.getCurrentSubject(); + final Permission other = new UnixStylePermission(broker.getBrokerPool().getSecurityManager(), effectiveUser.getId(), effectiveUser.getDefaultGroup().getId(), 0); + other.setMode(mode.get().left().get()); + changeMode = permission.getMode() != other.getMode(); + } else { + changeMode = permission.getMode() != mode.get().right().get().intValue(); + } + } else { + changeMode = false; + } + final boolean changeAcl = acl.map(desiredAces -> !aclEquals(permission, desiredAces)).orElse(false); + + /* + To change the permission bits of a file, the effective user ID of the process must be equal to the owner ID + of the file, or the process must have superuser permissions. + */ + if ((changeMode || changeAcl) && (!permission.isCurrentSubjectDBA()) && !permission.isCurrentSubjectOwner()) { + throw new PermissionDeniedException("Only a DBA or the resources owner can change the mode of a resource."); + } + + // change the mode + if (changeMode) { + + final boolean matchedGroup = permission.isCurrentSubjectInGroup(); + if (permission.isCurrentSubjectDBA() || matchedGroup) { + if (mode.get().isLeft()) { + permission.setMode(mode.get().left().get()); + } else { + permission.setMode(mode.get().right().get()); + } + + } else { + /* + If the group ID of the file does not equal either the effective group ID of the process or one of + the process’s supplementary group IDs and if the process does not have superuser privileges, + then the set-group-ID bit is automatically turned off. + This prevents a user from creating a set-group-ID file owned by a group that the user doesn’t + belong to. + */ + if (mode.get().isLeft()) { + permission.setMode(removeSetGid(mode.get().left().get())); + } else { + permission.setMode(removeSetGid(mode.get().right().get())); + } + } + } + + // change the acl + if (changeAcl) { + final ACLPermission aclPermission = (ACLPermission) permission; + aclPermission.clear(); + for (final ACEAider ace : acl.get()) { + aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); + } + } + } catch (final SyntaxException se) { + throw new PermissionDeniedException("Unrecognised mode syntax: " + se.getMessage(), se); + } + } + + /** + * Changes the ACL of a permissioned object in the database + * inline with the rules for chmod of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param permission the permissions of the object in the database. + * @param permissionModifier a function which will modify the ACL. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chacl(final Permission permission, final ConsumerE permissionModifier) throws PermissionDeniedException { + if(permission instanceof SimpleACLPermission) { + chacl((SimpleACLPermission)permission, permissionModifier); + } else { + throw new PermissionDeniedException("ACL like permissions have not been enabled"); + } + } + + /** + * Changes the ACL of permissions in the database + * inline with the rules for chmod of POSIX.1-2017 (Issue 7, 2018 edition). + * + * @param broker the database broker. + * @param transaction the database transaction. + * @param pathUri the URI to a resource in the database. + * @param permissionModifier a function which will modify the ACL. + * + * @throws PermissionDeniedException if the calling process has insufficient permissions. + */ + public static void chacl(final DBBroker broker, final Txn transaction, final XmldbURI pathUri, final ConsumerE permissionModifier) throws PermissionDeniedException { + updatePermissions(broker, transaction, pathUri, permission -> { + if(permission instanceof SimpleACLPermission) { + chacl((SimpleACLPermission)permission, permissionModifier); + } else { + throw new PermissionDeniedException("ACL like permissions have not been enabled"); + } + }); + } + + public static void chacl(final SimpleACLPermission permission, final ConsumerE permissionModifier) throws PermissionDeniedException { + if (permissionModifier == null) { + throw new IllegalArgumentException("permissionModifier must be provided"); + } + + /* + To change the permission bits of a file, the effective user ID of the process must be equal to the owner ID + of the file, or the process must have superuser permissions. + */ + if ((!permission.isCurrentSubjectDBA()) && !permission.isCurrentSubjectOwner()) { + throw new PermissionDeniedException("Only a DBA or the resources owner can change the ACL of a resource."); + } + + // change the acl + permissionModifier.accept(permission); + } + + /** + * Compares the ACEs in a permission's ACL against the provides ACEs. + * + * @param permission The permission ACL to compare against the otherAces. + * @param otherAces The ACEs to compare against the permissions's ACL. + * + * @return true if the {@code permission}'s ACL has the same ACEs as {@code otherAces}, false otherwise. + */ + private static boolean aclEquals(final Permission permission, final List otherAces) { + if (!(permission instanceof ACLPermission)) { + return false; + } + + final ACLPermission aclPermission = (ACLPermission)permission; + if (aclPermission.getACECount() != otherAces.size()) { + return false; + } + + for (int i = 0; i < otherAces.size(); i++) { + final ACEAider other = otherAces.get(i); + + if (aclPermission.getACEAccessType(i) != other.getAccessType() + || aclPermission.getACETarget(i) != other.getTarget() + || (!aclPermission.getACEWho(i).equals(other.getWho())) + || aclPermission.getACEMode(i) != other.getMode()) { + return false; + } + } + + return true; + } + + /** + * Removes any setGid bit from the provided mode string. + * + * @param modeStr The provided mode string. + * + * @return The mode string without a setGid bit. + */ + private static String removeSetGid(final String modeStr) { + if (SIMPLE_SYMBOLIC_MODE_PATTERN.matcher(modeStr).matches()) { + final char groupExecute = modeStr.charAt(5); + if (groupExecute == SETGID_CHAR_NO_EXEC) { + return modeStr.substring(0, 5) + UNSET_CHAR + modeStr.substring(5); + } else if (groupExecute == SETGID_CHAR) { + return modeStr.substring(0, 5) + EXECUTE_CHAR + modeStr.substring(5); + } + } else { + if (UNIX_SYMBOLIC_MODE_PATTERN.matcher(modeStr).matches()) { + + // check for 'g+s' or 'g=s' + final Pattern ptnExtractGroupMode = Pattern.compile("[^g]*(g\\+|=)([^,s]*s[^,s]*)[^g]*"); + final Matcher mtcExtractGroupMode = ptnExtractGroupMode.matcher(modeStr); + if (mtcExtractGroupMode.matches()) { + final String requestedGroupOp = mtcExtractGroupMode.group(1); + final String requestedGroupMode = mtcExtractGroupMode.group(2); + final String noSetGidGroupMode = requestedGroupMode.replace("s", ""); + if (noSetGidGroupMode.isEmpty()) { + return modeStr.replace(requestedGroupOp + requestedGroupMode, ""); + } else { + return modeStr.replace(requestedGroupOp + requestedGroupMode, requestedGroupOp + noSetGidGroupMode); + } + } else { + // check for 'a+s' or 'a=s' + final Pattern ptnExtractAllMode = Pattern.compile("[^a]*a(\\+|=)([^,s]*s[^,s]*)[^a]*"); + final Matcher mtcExtractAllMode = ptnExtractAllMode.matcher(modeStr); + if (mtcExtractAllMode.matches()) { + final String requestedAllOpSymbol = mtcExtractAllMode.group(1); + final String requestedAllMode = mtcExtractAllMode.group(2); + final String noSetGidGroupMode = requestedAllMode.replace("s", ""); + + return + USER_CHAR + requestedAllOpSymbol + requestedAllMode + "," + + (noSetGidGroupMode.isEmpty() ? "" : (GROUP_CHAR + requestedAllOpSymbol + noSetGidGroupMode + ",")) + + OTHER_CHAR + requestedAllMode + requestedAllMode; + } + } + } + + // NOTE: we don't need to do anything for EXIST_SYMBOLIC_MODE_PATTERN as it does not support setting setGid + } + + return modeStr; + } + + /** + * Removes any setGid bit from the provided mode. + * + * @param mode The provided mode. + * + * @return The mode without a setGid bit. + */ + private static int removeSetGid(final int mode) { + return mode & ~0x800; + } +} diff --git a/src/org/exist/security/PermissionRequired.java b/src/org/exist/security/PermissionRequired.java index d3e217286b3..2afcbc59aee 100644 --- a/src/org/exist/security/PermissionRequired.java +++ b/src/org/exist/security/PermissionRequired.java @@ -33,22 +33,24 @@ @Target(value = {ElementType.METHOD, ElementType.PARAMETER}) public @interface PermissionRequired { - //int mode() default UNDEFINED; - int user() default UNDEFINED; - int group() default UNDEFINED; - int mode() default UNDEFINED; + // int mode() default UNDEFINED; + byte user() default UNDEFINED; + byte group() default UNDEFINED; + byte mode() default UNDEFINED; - public final static int UNDEFINED = 0; + int UNDEFINED = 0; - //user flags - public final static int IS_DBA = 04; - public final static int IS_OWNER = 02; + // test that POSIX_CHOWN_RESTRICTED is not set + byte NOT_POSIX_CHOWN_RESTRICTED = (byte)0x80; + + // user and group flags + byte IS_MEMBER = 0x4; + byte IS_DBA = 0x2; + byte IS_OWNER = 0x1; - //group flags - public final static int IS_MEMBER = 40; - //mode flags - public final static int ACL_WRITE = 04; - public final static int IS_SET_UID = 02; - public final static int IS_SET_GID = 01; + // mode flags + byte ACL_WRITE = 0x4; + byte IS_SET_UID = 0x2; + byte IS_SET_GID = 0x1; } \ No newline at end of file diff --git a/src/org/exist/security/PermissionRequiredAspect.java b/src/org/exist/security/PermissionRequiredAspect.java index 6cf0a31077e..315a4115d19 100644 --- a/src/org/exist/security/PermissionRequiredAspect.java +++ b/src/org/exist/security/PermissionRequiredAspect.java @@ -27,12 +27,7 @@ import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; -import static org.exist.security.PermissionRequired.UNDEFINED; -import static org.exist.security.PermissionRequired.IS_DBA; -import static org.exist.security.PermissionRequired.IS_OWNER; -import static org.exist.security.PermissionRequired.IS_MEMBER; -import static org.exist.security.PermissionRequired.ACL_WRITE; -import static org.exist.security.PermissionRequired.IS_SET_GID; +import static org.exist.security.PermissionRequired.*; /** * @author Adam Retter @@ -50,24 +45,33 @@ public void methodParameterWithPermissionRequired(Permission permission, Object @Before("methodParameterWithPermissionRequired(permission, o)") public void enforcePermissionsOnParameter(JoinPoint joinPoint, Permission permission, Object o) throws PermissionDeniedException { - //the next two lines can be replaced when this aspectj bug is closed - https://bugs.eclipse.org/bugs/show_bug.cgi?id=259416 + //TODO(AR) the next two lines can be replaced when this aspectj bug is closed - https://bugs.eclipse.org/bugs/show_bug.cgi?id=259416 final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); final PermissionRequired parameterPermissionRequired = (PermissionRequired)ms.getMethod().getParameterAnnotations()[0][0]; - //1) check if we should allow DBA access + // 1) check if we should allow DBA access if(((parameterPermissionRequired.user() & IS_DBA) == IS_DBA) && permission.isCurrentSubjectDBA()) { return; } - //2) check if the user is in the target group + // 2) check if the user is in the target group if((parameterPermissionRequired.user() & IS_MEMBER) == IS_MEMBER) { final Integer groupId = (Integer)o; if(permission.isCurrentSubjectInGroup(groupId)) { return; } } - - //3) check if we are looking for setGID + + // 3) check if we should allow access when POSIX_CHOWN_RESTRICTED is not set + if((parameterPermissionRequired.user() & NOT_POSIX_CHOWN_RESTRICTED) == NOT_POSIX_CHOWN_RESTRICTED + && !permission.isPosixChownRestricted()) { + final PermissionRequired methodPermissionRequired = ms.getMethod().getAnnotation(PermissionRequired.class); + if ((methodPermissionRequired.user() & IS_OWNER) == IS_OWNER && permission.isCurrentSubjectOwner()) { + return; + } + } + + // 4) check if we are looking for setGID if((parameterPermissionRequired.mode() & IS_SET_GID) == IS_SET_GID) { final Permission other = (Permission)o; if(other.isSetGid()) { @@ -119,7 +123,7 @@ public void enforcePermissions(JoinPoint joinPoint, Permission permission, Permi throw new PermissionDeniedException("You do not have appropriate access rights to modify permissions on this object"); } - //TODO change Pointcut so that @annotation values are directly bound. see - https://bugs.eclipse.org/bugs/show_bug.cgi?id=347684 + //TODO(AR) change Pointcut so that @annotation values are directly bound. see - https://bugs.eclipse.org/bugs/show_bug.cgi?id=347684 /* @Pointcut("execution(@org.exist.security.PermissionRequired * *(..)) && this(permission) && @annotation(org.exist.security.PermissionRequired(mode,user,group))") public void methodWithPermissionRequired(Permission permission, int mode, int user, int group) { diff --git a/src/org/exist/security/SecurityManager.java b/src/org/exist/security/SecurityManager.java index e2d3e507912..2f3abe7a28d 100644 --- a/src/org/exist/security/SecurityManager.java +++ b/src/org/exist/security/SecurityManager.java @@ -1,6 +1,6 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2001-2015 The eXist Project + * Copyright (C) 2001-2016 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or @@ -27,6 +27,7 @@ import org.exist.dom.persistent.DocumentImpl; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; import org.exist.xmldb.XmldbURI; /** @@ -51,11 +52,15 @@ public interface SecurityManager extends Configurable { String GUEST_GROUP = "guest"; String GUEST_USER = "guest"; - void attach(DBBroker broker) throws EXistException; + void attach(DBBroker broker, Txn transaction) throws EXistException; Database getDatabase(); Database database(); + void registerAccount(Account account); + + void registerGroup(Group group); + Account getAccount(int id); boolean hasAccount(String name); @@ -108,10 +113,6 @@ public interface SecurityManager extends Configurable { @Deprecated Subject getSubjectBySessionId(String sessionid); - void addGroup(int id, Group group); - - void addUser(int id, Account account); - boolean hasGroup(int id); boolean hasUser(int id); @@ -152,8 +153,8 @@ public interface SecurityManager extends Configurable { * @param document * @throws ConfigurationException */ - void processPramatter(DBBroker broker, DocumentImpl document) throws ConfigurationException; - void processPramatterBeforeSave(DBBroker broker, DocumentImpl document) throws ConfigurationException; + void processParameter(DBBroker broker, DocumentImpl document) throws ConfigurationException; + void processParameterBeforeSave(DBBroker broker, DocumentImpl document) throws ConfigurationException; /** * Particular web page for authentication. diff --git a/src/org/exist/security/SimpleACLPermission.java b/src/org/exist/security/SimpleACLPermission.java index 01823bbd978..55938878885 100644 --- a/src/org/exist/security/SimpleACLPermission.java +++ b/src/org/exist/security/SimpleACLPermission.java @@ -23,8 +23,10 @@ import java.io.IOException; import java.util.Arrays; + import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; + import static org.exist.security.PermissionRequired.IS_DBA; import static org.exist.security.PermissionRequired.IS_OWNER; import static org.exist.security.PermissionRequired.ACL_WRITE; @@ -32,7 +34,7 @@ /** * A simple ACL (Access Control List) implementation * which extends UnixStylePermission with additional - * ACEs (Access Control Entries) + * ACEs (Access Control Entries). * * everyone has READ_ACL * WRITE access implies WRITE_ACL @@ -41,93 +43,94 @@ */ public class SimpleACLPermission extends UnixStylePermission implements ACLPermission { - public final static short VERSION = 1; + public static final short VERSION = 1; - private final static int MAX_ACL_LENGTH = 255; //restrict to sizeof 1 byte + private static final int MAX_ACL_LENGTH = 255; //restrict to sizeof 1 byte private int acl[] = new int[0]; - public SimpleACLPermission(SecurityManager sm) { + public SimpleACLPermission(final SecurityManager sm) { super(sm); } - public SimpleACLPermission(SecurityManager sm, long vector) { + public SimpleACLPermission(final SecurityManager sm, final long vector) { super(sm, vector); } - - public SimpleACLPermission(SecurityManager sm, int ownerId, int groupId, int mode) { + + public SimpleACLPermission(final SecurityManager sm, final int ownerId, final int groupId, final int mode) { super(sm, ownerId, groupId, mode); } - - public void addUserACE(ACE_ACCESS_TYPE access_type, int userId, int mode) throws PermissionDeniedException { + + public void addUserACE(final ACE_ACCESS_TYPE access_type, final int userId, final int mode) throws PermissionDeniedException { addACE(access_type, ACE_TARGET.USER, userId, mode); } - - public void addGroupACE(ACE_ACCESS_TYPE access_type, int groupId, int mode) throws PermissionDeniedException { + + public void addGroupACE(final ACE_ACCESS_TYPE access_type, final int groupId, final int mode) throws PermissionDeniedException { addACE(access_type, ACE_TARGET.GROUP, groupId, mode); } - public void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, String modeStr) throws PermissionDeniedException { + @Override + public void addACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String name, final String modeStr) throws PermissionDeniedException { addACE(access_type, target, lookupTargetId(target, name), modeStrToMode(modeStr)); } @Override - public void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, int mode) throws PermissionDeniedException { + public void addACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String name, final int mode) throws PermissionDeniedException { addACE(access_type, target, lookupTargetId(target, name), mode); } - + @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) - private void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, int id, int mode) throws PermissionDeniedException { - if(acl.length >= MAX_ACL_LENGTH) { + private void addACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final int id, final int mode) throws PermissionDeniedException { + if (acl.length >= MAX_ACL_LENGTH) { throw new PermissionDeniedException("Maximum of " + MAX_ACL_LENGTH + " ACEs has been reached."); } - int newAcl[] = new int[acl.length + 1]; + final int newAcl[] = new int[acl.length + 1]; System.arraycopy(acl, 0, newAcl, 0, acl.length); newAcl[newAcl.length - 1] = encodeAsACE(access_type, target, id, mode); this.acl = newAcl; } - public void insertUserACE(int index, ACE_ACCESS_TYPE access_type, int userId, int mode) throws PermissionDeniedException { + public void insertUserACE(final int index, final ACE_ACCESS_TYPE access_type, final int userId, final int mode) throws PermissionDeniedException { insertACE(index, access_type, ACE_TARGET.USER, userId, mode); } - public void insertGroupACE(int index, ACE_ACCESS_TYPE access_type, int groupId, int mode) throws PermissionDeniedException { + public void insertGroupACE(final int index, final ACE_ACCESS_TYPE access_type, final int groupId, final int mode) throws PermissionDeniedException { insertACE(index, access_type, ACE_TARGET.GROUP, groupId, mode); } - public void insertACE(int index, ACE_ACCESS_TYPE access_type, ACE_TARGET target, String name, String modeStr) throws PermissionDeniedException { + @Override + public void insertACE(final int index, final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String name, final String modeStr) throws PermissionDeniedException { insertACE(index, access_type, target, lookupTargetId(target, name), modeStrToMode(modeStr)); } @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) - private void insertACE(int index, ACE_ACCESS_TYPE access_type, ACE_TARGET target, int id, int mode) throws PermissionDeniedException { - - if(acl.length >= MAX_ACL_LENGTH) { + private void insertACE(final int index, final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final int id, final int mode) throws PermissionDeniedException { + if (acl.length >= MAX_ACL_LENGTH) { throw new PermissionDeniedException("Maximum of " + MAX_ACL_LENGTH + " ACEs has been reached."); } - if(index < 0 || (acl.length > 0 && acl.length <= index)) { + if (index < 0 || (acl.length > 0 && acl.length <= index)) { throw new PermissionDeniedException("No Such ACE index " + index + " in ACL."); } - int newAcl[] = new int[acl.length + 1]; + final int newAcl[] = new int[acl.length + 1]; System.arraycopy(acl, 0, newAcl, 0, index); newAcl[index] = encodeAsACE(access_type, target, id, mode); - if(acl.length > 0) { - System.arraycopy(acl, index, newAcl, index+1, newAcl.length - index - 1); + if (acl.length > 0) { + System.arraycopy(acl, index, newAcl, index + 1, newAcl.length - index - 1); } this.acl = newAcl; } - private int modeStrToMode(String modeStr) throws PermissionDeniedException { - if(modeStr == null || modeStr.length() == 0 || modeStr.length() > 3) { + private int modeStrToMode(final String modeStr) throws PermissionDeniedException { + if (modeStr == null || modeStr.length() == 0 || modeStr.length() > 3) { throw new PermissionDeniedException("Invalid mode string '" + modeStr + "'"); } int mode = 0; - for(final char c : modeStr.toCharArray()) { - switch(c) { + for (final char c : modeStr.toCharArray()) { + switch (c) { case READ_CHAR: mode |= READ; break; @@ -146,17 +149,17 @@ private int modeStrToMode(String modeStr) throws PermissionDeniedException { return mode; } - private int lookupTargetId(ACE_TARGET target, String targetName) throws PermissionDeniedException { + private int lookupTargetId(final ACE_TARGET target, final String targetName) throws PermissionDeniedException { final int id; - if(target == ACE_TARGET.USER) { + if (target == ACE_TARGET.USER) { final Account account = sm.getAccount(targetName); - if(account == null) { + if (account == null) { throw new PermissionDeniedException("User Account for username '" + targetName + "' is unknown."); } id = account.getId(); - } else if(target == ACE_TARGET.GROUP) { + } else if (target == ACE_TARGET.GROUP) { final Group group = sm.getGroup(targetName); - if(group == null) { + if (group == null) { throw new PermissionDeniedException("User Group for groupname '" + targetName + "' is unknown."); } id = group.getId(); @@ -170,37 +173,40 @@ private int lookupTargetId(ACE_TARGET target, String targetName) throws Permissi * should return max of 29 bits - e.g. The maximum numeric value - 536870911 * exact encoding is [target(3),id(20),mode(3),access_type(3)] */ - private int encodeAsACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, int id, int mode) { - //ensure mode is just 3 bits max (rwu) - TODO maybe error if not 20 bits + private int encodeAsACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, int id, int mode) { + //ensure mode is just 3 bits max (rwu) - TODO(AR) maybe error if not 20 bits mode = mode & 7; - //makes sure id is only 20 bits max - TODO maybe error if not 20 bits + //makes sure id is only 20 bits max - TODO(AR) maybe error if not 20 bits id = id & 1048575; return (target.getVal() << 26) | (id << 6) | (mode << 3) | access_type.getVal(); } @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) - public void removeACE(int index) throws PermissionDeniedException { + @Override + public void removeACE(final int index) throws PermissionDeniedException { - if(index < 0 || index >= acl.length) { + if (index < 0 || index >= acl.length) { throw new PermissionDeniedException("ACL Entry does not exist"); } - int newAcl[] = new int[acl.length - 1]; + final int newAcl[] = new int[acl.length - 1]; System.arraycopy(acl, 0, newAcl, 0, index); - System.arraycopy(acl, index+1, newAcl, index, newAcl.length - index); + System.arraycopy(acl, index + 1, newAcl, index, newAcl.length - index); this.acl = newAcl; } - public void modifyACE(int index, ACE_ACCESS_TYPE access_type, String modeStr) throws PermissionDeniedException { + @Override + public void modifyACE(final int index, final ACE_ACCESS_TYPE access_type, final String modeStr) throws PermissionDeniedException { modifyACE(index, access_type, modeStrToMode(modeStr)); } @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) - public void modifyACE(int index, ACE_ACCESS_TYPE access_type, int mode) throws PermissionDeniedException { + @Override + public void modifyACE(final int index, final ACE_ACCESS_TYPE access_type, final int mode) throws PermissionDeniedException { - if(index < 0 || index >= acl.length) { + if (index < 0 || index >= acl.length) { throw new PermissionDeniedException("ACL Entry does not exist"); } @@ -213,11 +219,11 @@ public void modifyACE(int index, ACE_ACCESS_TYPE access_type, int mode) throws P */ @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) @Override - public void clear() throws PermissionDeniedException { + public void clear() { acl = new int[0]; } - public int getACEId(int index) { + public int getACEId(final int index) { return (acl[index] >>> 6) & 1048575; } @@ -226,8 +232,8 @@ public int getACEId(int index) { * of which this ace is applied to */ @Override - public String getACEWho(int index) { - switch(getACETarget(index)) { + public String getACEWho(final int index) { + switch (getACETarget(index)) { case USER: return sm.getAccount(getACEId(index)).getName(); case GROUP: @@ -236,30 +242,30 @@ public String getACEWho(int index) { return null; } } - + @Override - public int getACEMode(int index) { + public int getACEMode(final int index) { return (acl[index] >>> 3) & 7; } - public String getACEModeString(int index) { + public String getACEModeString(final int index) { final int aceMode = getACEMode(index); - final char ch[] = new char[] { - (aceMode & READ) != READ ? UNSET_CHAR : READ_CHAR, - (aceMode & WRITE) != WRITE ? UNSET_CHAR : WRITE_CHAR, - (aceMode & EXECUTE) != EXECUTE ? UNSET_CHAR : EXECUTE_CHAR + final char ch[] = new char[]{ + (aceMode & READ) != READ ? UNSET_CHAR : READ_CHAR, + (aceMode & WRITE) != WRITE ? UNSET_CHAR : WRITE_CHAR, + (aceMode & EXECUTE) != EXECUTE ? UNSET_CHAR : EXECUTE_CHAR }; return String.valueOf(ch); } @Override - public ACE_TARGET getACETarget(int index) { + public ACE_TARGET getACETarget(final int index) { return ACE_TARGET.fromVal(acl[index] >>> 26); } @Override - public ACE_ACCESS_TYPE getACEAccessType(int index) { + public ACE_ACCESS_TYPE getACEAccessType(final int index) { return ACE_ACCESS_TYPE.fromVal(acl[index] & 7); } @@ -269,94 +275,103 @@ public int getACECount() { } @Override - public void read(VariableByteInput istream) throws IOException { + public void read(final VariableByteInput istream) throws IOException { super.read(istream); final int aclLength = istream.read(); acl = new int[aclLength]; - for(int i = 0; i < aclLength; i++) { + for (int i = 0; i < aclLength; i++) { acl[i] = istream.readInt(); } } @Override - public void write(VariableByteOutputStream ostream) throws IOException { + public void write(final VariableByteOutputStream ostream) throws IOException { super.write(ostream); ostream.write(acl.length); - for(int i = 0; i < acl.length; i++) { - ostream.writeInt(acl[i]); + for (final int ace : acl) { + ostream.writeInt(ace); } } /** - * Evaluation order is - - * + * Evaluation order is - + * * 1) ACL ACEs are evaluated first * 2) Classic Unix Style Permissions are evaluated second - * + * * The first match is considered the authority */ @Override - public boolean validate(Subject user, int mode) { + public boolean validate(final Subject user, final int mode) { //group dba has full access - if(user.hasDbaRole()) { + if (user.hasDbaRole()) { return true; } - + final int userId = user.getId(); final int userGroupIds[] = user.getGroupIds(); - - /*** START EXTENDED ACL VALIDATION ***/ - //exact encoding is [target(3),id(20),mode(3),access_type(3)] + + /* + * START EXTENDED ACL VALIDATION. + * + * exact encoding is [target(3),id(20),mode(3),access_type(3)] + */ //check ACL - for(final int ace : acl) { + for (final int ace : acl) { final int aceTarget = ace >>> 26; final int id = (ace >>> 6) & 1048575; final int aceMode = (ace >>> 3) & 7; final int accessType = ace & 7; - if((aceTarget & ACE_TARGET.USER.getVal()) == ACE_TARGET.USER.getVal()){ + if ((aceTarget & ACE_TARGET.USER.getVal()) == ACE_TARGET.USER.getVal()) { //check for a user - if(id == userId && (aceMode & mode) == mode) { - return(accessType == ACE_ACCESS_TYPE.ALLOWED.getVal()); + if (id == userId && (aceMode & mode) == mode) { + return (accessType == ACE_ACCESS_TYPE.ALLOWED.getVal()); } - } else if((aceTarget & ACE_TARGET.GROUP.getVal()) == ACE_TARGET.GROUP.getVal()){ + } else if ((aceTarget & ACE_TARGET.GROUP.getVal()) == ACE_TARGET.GROUP.getVal()) { //check for a group - for(final int userGroupId : userGroupIds) { - if(userGroupId == id && (aceMode & mode) == mode) { - return(accessType == ACE_ACCESS_TYPE.ALLOWED.getVal()); + for (final int userGroupId : userGroupIds) { + if (userGroupId == id && (aceMode & mode) == mode) { + return (accessType == ACE_ACCESS_TYPE.ALLOWED.getVal()); } } } } - /*** END EXTENDED ACL VALIDATION ***/ - - - /*** FALLBACK to UNIX STYLE VALIDATION ***/ + /* + * END EXTENDED ACL VALIDATION + */ + + + /* + * FALLBACK to UNIX STYLE VALIDATION + */ //check owner - if(userId == (vector >>> 32)) { //check owner + if (userId == (vector >>> 32)) { //check owner return (mode & ((vector >>> 28) & 7)) == mode; //check owner mode } //check group - - final int groupId = (int)((vector >>> 8) & 1048575); - for(final int userGroupId : userGroupIds) { - if(userGroupId == groupId) { + + final int groupId = (int) ((vector >>> 8) & 1048575); + for (final int userGroupId : userGroupIds) { + if (userGroupId == groupId) { return (mode & ((vector >>> 4) & 7)) == mode; } } //check other - if((mode & (vector & 7)) == mode) { + if ((mode & (vector & 7)) == mode) { return true; } - /*** END FALLBACK to UNIX STYLE VALIDATION ***/ + /* + * END FALLBACK to UNIX STYLE VALIDATION + */ return false; } @@ -370,19 +385,72 @@ public short getVersion() { public boolean isCurrentSubjectCanWriteACL() { return validate(getCurrentSubject(), WRITE); } - + @Override public SimpleACLPermission copy() { - SimpleACLPermission prm = new SimpleACLPermission(sm, vector); - + final SimpleACLPermission prm = new SimpleACLPermission(sm, vector); + prm.acl = new int[acl.length]; System.arraycopy(acl, 0, prm.acl, 0, acl.length); - + return prm; } + /** + * Determines if this permisisons ACL is equal to that + * of another permissions ACL. + * + * @param other the other ACL to check equality against. + * + * @return true if the ACLs are equal + */ + public boolean equalsAcl(final SimpleACLPermission other) { + if (other == null || other.getACECount() != getACECount()) { + return false; + } + + for (int i = 0; i < getACECount(); i++) { + + if(getACEAccessType(i) != other.getACEAccessType(i) + || getACETarget(i) != other.getACETarget(i) + || (!getACEWho(i).equals(other.getACEWho(i))) + || getACEMode(i) != other.getACEMode(i)) { + return false; + } + } + + return true; + } + @PermissionRequired(user = IS_DBA | IS_OWNER, mode = ACL_WRITE) public void copyAclOf(final SimpleACLPermission simpleACLPermission) { this.acl = Arrays.copyOf(simpleACLPermission.acl, simpleACLPermission.acl.length); } -} \ No newline at end of file + + @Override + public boolean aclEquals(final ACLPermission other) { + if (other == null) { + return false; + } + + if (other instanceof SimpleACLPermission) { + // optimisation for when both are the same type + return Arrays.equals(acl, ((SimpleACLPermission) other).acl); + } else { + if (getACECount() != other.getACECount()) { + return false; + } + + for (int i = 0; i < getACECount(); i++) { + if (getACEAccessType(i) != other.getACEAccessType(i) + || getACETarget(i) != other.getACETarget(i) + || (!getACEWho(i).equals(other.getACEWho(i))) + || getACEMode(i) != other.getACEMode(i)) { + return false; + } + } + + return true; + } + } +} diff --git a/src/org/exist/security/UnixStylePermission.java b/src/org/exist/security/UnixStylePermission.java index 3d9fbcef948..4e1ed550d1c 100644 --- a/src/org/exist/security/UnixStylePermission.java +++ b/src/org/exist/security/UnixStylePermission.java @@ -20,17 +20,16 @@ package org.exist.security; import java.io.IOException; -import static org.exist.security.PermissionRequired.IS_DBA; -import static org.exist.security.PermissionRequired.IS_MEMBER; -import static org.exist.security.PermissionRequired.IS_OWNER; -import static org.exist.security.PermissionRequired.IS_SET_GID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.security.internal.RealmImpl; +import org.exist.storage.DBBroker; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; +import static org.exist.security.PermissionRequired.*; + /** * Manages the permissions assigned to a resource. This includes * the user who owns the resource, the owner group and the permissions @@ -149,8 +148,8 @@ public void setOwner(final String name) { } } - @PermissionRequired(user = IS_DBA) - private void setOwnerId(final int ownerId) { + @PermissionRequired(user = IS_DBA | IS_OWNER) + private void setOwnerId(@PermissionRequired(user = IS_DBA | NOT_POSIX_CHOWN_RESTRICTED) final int ownerId) { this.vector = ((long)ownerId << 32) | //left shift new ownerId into position (vector & 4294967295L); //extract everything from current permission except ownerId @@ -209,7 +208,7 @@ public void setGroup(final int id) { } @PermissionRequired(user = IS_DBA | IS_OWNER) - private void setGroupId(@PermissionRequired(user = IS_DBA | IS_MEMBER) final int groupId) { + private void setGroupId(@PermissionRequired(user = IS_DBA | IS_MEMBER | NOT_POSIX_CHOWN_RESTRICTED) final int groupId) { /* This function wrapper is really just used as a place to focus PermissionRequired checks for several public @@ -492,13 +491,7 @@ public boolean isCurrentSubjectOwner() { @Override public boolean isCurrentSubjectInGroup() { - final int groupId = getGroupId(); - for(final int currentSubjectGroupId : getCurrentSubject().getGroupIds()) { - if(groupId == currentSubjectGroupId) { - return true; - } - } - return false; + return isCurrentSubjectInGroup(getGroupId()); } @Override @@ -511,7 +504,13 @@ public boolean isCurrentSubjectInGroup(final int groupId) { return false; } + @Override public UnixStylePermission copy() { return new UnixStylePermission(sm, vector); } + + @Override + public boolean isPosixChownRestricted() { + return sm.getDatabase().getConfiguration().getProperty(DBBroker.POSIX_CHOWN_RESTRICTED_PROPERTY, true); + } } \ No newline at end of file diff --git a/src/org/exist/security/internal/EXistDBLoginModule.java b/src/org/exist/security/internal/EXistDBLoginModule.java index 62101084a1d..a585ebdd514 100644 --- a/src/org/exist/security/internal/EXistDBLoginModule.java +++ b/src/org/exist/security/internal/EXistDBLoginModule.java @@ -187,7 +187,7 @@ public boolean login() throws LoginException { * succeeded, or false otherwise. */ public boolean commit() throws LoginException { - if (succeeded == false) { + if (!succeeded) { return false; } else { // add a Principal (authenticated identity) @@ -228,7 +228,7 @@ public boolean commit() throws LoginException { public boolean abort() throws LoginException { if (succeeded == false) { return false; - } else if (succeeded == true && commitSucceeded == false) { + } else if (succeeded && !commitSucceeded) { // login succeeded but overall authentication failed succeeded = false; userPrincipal = null; diff --git a/src/org/exist/security/internal/RealmImpl.java b/src/org/exist/security/internal/RealmImpl.java index d21c3264432..8614240a705 100644 --- a/src/org/exist/security/internal/RealmImpl.java +++ b/src/org/exist/security/internal/RealmImpl.java @@ -1,30 +1,26 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2010-2011 The eXist Project + * Copyright (C) 2001-2016 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * - * $Id$ + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.exist.security.internal; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; +import java.security.Principal; +import java.util.*; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -57,12 +53,12 @@ * */ public class RealmImpl extends AbstractRealm { - - public static String ID = "exist"; //TODO: final "eXist-db"; private final static Logger LOG = LogManager.getLogger(RealmImpl.class); - static public void setPasswordRealm(String value) { + public static String ID = "exist"; //TODO: final "eXist-db"; + + static public void setPasswordRealm(final String value) { ID = value; } @@ -70,22 +66,20 @@ static public void setPasswordRealm(String value) { public final static int ADMIN_ACCOUNT_ID = 1048574; public final static int GUEST_ACCOUNT_ID = 1048573; public final static int UNKNOWN_ACCOUNT_ID = 1048572; - public final static int INITIAL_LAST_ACCOUNT_ID = 10; public final static int DBA_GROUP_ID = 1048575; public final static int GUEST_GROUP_ID = 1048574; public final static int UNKNOWN_GROUP_ID = 1048573; - public final static int INITIAL_LAST_GROUP_ID = 10; - protected final AccountImpl ACCOUNT_SYSTEM; - protected final AccountImpl ACCOUNT_UNKNOWN; + final AccountImpl ACCOUNT_SYSTEM; + final AccountImpl ACCOUNT_UNKNOWN; - protected final GroupImpl GROUP_DBA; - protected final GroupImpl GROUP_GUEST; - protected final GroupImpl GROUP_UNKNOWN; + final GroupImpl GROUP_DBA; + final GroupImpl GROUP_GUEST; + final GroupImpl GROUP_UNKNOWN; - private final static String DEFAULT_ADMIN_PASSWORD = ""; - private final static String DEFAULT_GUEST_PASSWORD = "guest"; + private static final String DEFAULT_ADMIN_PASSWORD = ""; + private static final String DEFAULT_GUEST_PASSWORD = "guest"; //@ConfigurationFieldAsElement("allow-guest-authentication") public boolean allowGuestAuthentication = true; @@ -94,55 +88,44 @@ protected RealmImpl(final DBBroker broker, final SecurityManagerImpl sm, final C super(sm, config); - sm.lastUserId = INITIAL_LAST_ACCOUNT_ID; //TODO this is horrible! - sm.lastGroupId = INITIAL_LAST_GROUP_ID; //TODO this is horrible! - //DBA group GROUP_DBA = new GroupImpl(broker, this, DBA_GROUP_ID, SecurityManager.DBA_GROUP); - GROUP_DBA.setManagers(new ArrayList>(){ - { add(new ReferenceImpl<>(sm, "getAccount", SecurityManager.DBA_USER)); } - }); + GROUP_DBA.setManagers(new ArrayList<>(Arrays.asList(new ReferenceImpl<>(sm, "getAccount", SecurityManager.DBA_USER)))); GROUP_DBA.setMetadataValue(EXistSchemaType.DESCRIPTION, "Database Administrators"); - sm.addGroup(GROUP_DBA.getId(), GROUP_DBA); + sm.registerGroup(GROUP_DBA); registerGroup(GROUP_DBA); - //sm.groupsById.put(GROUP_DBA.getId(), GROUP_DBA); - //groupsByName.put(GROUP_DBA.getName(), GROUP_DBA); - + //System account ACCOUNT_SYSTEM = new AccountImpl(broker, this, SYSTEM_ACCOUNT_ID, SecurityManager.SYSTEM, "", GROUP_DBA, true); ACCOUNT_SYSTEM.setMetadataValue(AXSchemaType.FULLNAME, SecurityManager.SYSTEM); ACCOUNT_SYSTEM.setMetadataValue(EXistSchemaType.DESCRIPTION, "System Internals"); - sm.addUser(ACCOUNT_SYSTEM.getId(), ACCOUNT_SYSTEM); + sm.registerAccount(ACCOUNT_SYSTEM); registerAccount(ACCOUNT_SYSTEM); - //sm.usersById.put(ACCOUNT_SYSTEM.getId(), ACCOUNT_SYSTEM); - //usersByName.put(ACCOUNT_SYSTEM.getName(), ACCOUNT_SYSTEM); - + //guest group GROUP_GUEST = new GroupImpl(broker, this, GUEST_GROUP_ID, SecurityManager.GUEST_GROUP); GROUP_GUEST.setManagers(new ArrayList>(){ { add(new ReferenceImpl<>(sm, "getAccount", SecurityManager.DBA_USER)); } }); GROUP_GUEST.setMetadataValue(EXistSchemaType.DESCRIPTION, "Anonymous Users"); - sm.addGroup(GROUP_GUEST.getId(), GROUP_GUEST); + sm.registerGroup(GROUP_GUEST); registerGroup(GROUP_GUEST); - //sm.groupsById.put(GROUP_GUEST.getId(), GROUP_GUEST); - //groupsByName.put(GROUP_GUEST.getName(), GROUP_GUEST); - + //unknown account and group GROUP_UNKNOWN = new GroupImpl(broker, this, UNKNOWN_GROUP_ID, ""); - ACCOUNT_UNKNOWN = new AccountImpl(broker, this, UNKNOWN_ACCOUNT_ID, "", (String)null, GROUP_UNKNOWN); + ACCOUNT_UNKNOWN = new AccountImpl(broker, this, UNKNOWN_ACCOUNT_ID, "", null, GROUP_UNKNOWN); //XXX: GROUP_DBA._addManager(ACCOUNT_ADMIN); //XXX: GROUP_GUEST._addManager(ACCOUNT_ADMIN); } @Override - public void start(final DBBroker broker) throws EXistException { - super.start(broker); + public void start(final DBBroker broker, final Txn transaction) throws EXistException { + super.start(broker, transaction); try { createAdminAndGuestIfNotExist(broker); } catch(final PermissionDeniedException pde) { - final boolean exportOnly = (Boolean) broker.getConfiguration().getProperty(BrokerPool.PROPERTY_EXPORT_ONLY, false); + final boolean exportOnly = broker.getConfiguration().getProperty(BrokerPool.PROPERTY_EXPORT_ONLY, false); if(!exportOnly) { throw new EXistException(pde.getMessage(), pde); } @@ -153,7 +136,6 @@ private void createAdminAndGuestIfNotExist(final DBBroker broker) throws EXistEx //Admin account if(getSecurityManager().getAccount(ADMIN_ACCOUNT_ID) == null) { - //AccountImpl actAdmin = new AccountImpl(broker, this, ADMIN_ACCOUNT_ID, SecurityManager.DBA_USER, "", GROUP_DBA, true); final UserAider actAdmin = new UserAider(ADMIN_ACCOUNT_ID, getId(), SecurityManager.DBA_USER); actAdmin.setPassword(DEFAULT_ADMIN_PASSWORD); actAdmin.setMetadataValue(AXSchemaType.FULLNAME, SecurityManager.DBA_USER); @@ -164,7 +146,6 @@ private void createAdminAndGuestIfNotExist(final DBBroker broker) throws EXistEx //Guest account if(getSecurityManager().getAccount(GUEST_ACCOUNT_ID) == null) { - //AccountImpl actGuest = new AccountImpl(broker, this, GUEST_ACCOUNT_ID, SecurityManager.GUEST_USER, SecurityManager.GUEST_USER, GROUP_GUEST, false); final UserAider actGuest = new UserAider(GUEST_ACCOUNT_ID, getId(), SecurityManager.GUEST_USER); actGuest.setMetadataValue(AXSchemaType.FULLNAME, SecurityManager.GUEST_USER); actGuest.setMetadataValue(EXistSchemaType.DESCRIPTION, "Anonymous User"); @@ -185,7 +166,7 @@ public boolean deleteAccount(final Account account) throws PermissionDeniedExcep return false; } - usersByName.modify2E(principalDb -> { + usersByName.write2E(principalDb -> { final AbstractAccount remove_account = (AbstractAccount)principalDb.get(account.getName()); if(remove_account == null){ throw new IllegalArgumentException("No such account exists!"); @@ -195,22 +176,21 @@ public boolean deleteAccount(final Account account) throws PermissionDeniedExcep final Account user = broker.getCurrentSubject(); if(!(account.getName().equals(user.getName()) || user.hasDbaRole()) ) { - throw new PermissionDeniedException("You are not allowed to delete '" +account.getName() + "' user"); + throw new PermissionDeniedException("You are not allowed to delete '" + account.getName() + "' user"); } remove_account.setRemoved(true); remove_account.setCollection(broker, collectionRemovedAccounts, XmldbURI.create(UUIDGenerator.getUUID()+".xml")); - final TransactionManager transaction = getDatabase().getTransactionManager(); - try(final Txn txn = transaction.beginTransaction()) { - collectionAccounts.removeXMLResource(txn, broker, XmldbURI.create( remove_account.getName() + ".xml")); + try(final Txn txn = broker.continueOrBeginTransaction()) { + collectionAccounts.removeXMLResource(txn, broker, XmldbURI.create( remove_account.getName() + ".xml")); - transaction.commit(txn); - } catch(final Exception e) { - LOG.warn(e.getMessage(), e); - } + txn.commit(); + } catch(final Exception e) { + LOG.warn(e.getMessage(), e); + } - getSecurityManager().addUser(remove_account.getId(), remove_account); + getSecurityManager().registerAccount(remove_account); principalDb.remove(remove_account.getName()); } }); @@ -223,10 +203,10 @@ public boolean deleteGroup(final Group group) throws PermissionDeniedException, if(group == null) {return false;} - groupsByName.modify2E(principalDb -> { + groupsByName.write2E(principalDb -> { final AbstractPrincipal remove_group = (AbstractPrincipal)principalDb.get(group.getName()); if(remove_group == null) - {throw new IllegalArgumentException("Group does '"+group.getName()+"' not exist!");} + {throw new IllegalArgumentException("Group does '" + group.getName() + "' not exist!");} final DBBroker broker = getDatabase().getActiveBroker(); final Subject subject = broker.getCurrentSubject(); @@ -235,18 +215,16 @@ public boolean deleteGroup(final Group group) throws PermissionDeniedException, remove_group.setRemoved(true); remove_group.setCollection(broker, collectionRemovedGroups, XmldbURI.create(UUIDGenerator.getUUID() + ".xml")); - - final TransactionManager transaction = getDatabase().getTransactionManager(); - try(final Txn txn = transaction.beginTransaction()) { + try(final Txn txn = broker.continueOrBeginTransaction()) { collectionGroups.removeXMLResource(txn, broker, XmldbURI.create(remove_group.getName() + ".xml" )); - transaction.commit(txn); + txn.commit(); } catch (final Exception e) { LOG.warn(e.getMessage(), e); } - getSecurityManager().addGroup(remove_group.getId(), (Group)remove_group); + getSecurityManager().registerGroup((Group)remove_group); principalDb.remove(remove_group.getName()); }); @@ -323,18 +301,18 @@ public List findAllGroupMembers(final String groupName) { principalDb.values() .stream() .filter(account -> account.hasGroup(groupName)) - .map(account -> account.getName()) + .map(Principal::getName) .collect(Collectors.toList()) ); } @Override public List findUsernamesWhereNameStarts(final String startsWith) { - return Collections.EMPTY_LIST; //TODO at present exist users cannot have personal name details + return Collections.emptyList(); //TODO at present exist users cannot have personal name details, used in LDAP realm } @Override public List findUsernamesWhereNamePartStarts(final String startsWith) { - return Collections.EMPTY_LIST; //TODO at present exist users cannot have personal name details + return Collections.emptyList(); //TODO at present exist users cannot have personal name details, used in LDAP realm } -} \ No newline at end of file +} diff --git a/src/org/exist/security/internal/SMEvents.java b/src/org/exist/security/internal/SMEvents.java index 3678631a2cf..1bd1553f896 100644 --- a/src/org/exist/security/internal/SMEvents.java +++ b/src/org/exist/security/internal/SMEvents.java @@ -33,6 +33,7 @@ import org.exist.config.annotation.ConfigurationFieldAsElement; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.dom.persistent.NodeSet; import org.exist.dom.QName; import org.exist.security.PermissionDeniedException; @@ -153,20 +154,15 @@ protected void runScript(Subject subject, String scriptURI, String script, QName private Source getQuerySource(DBBroker broker, String scriptURI, String script) { if(scriptURI != null) { - DocumentImpl resource = null; - try { - final XmldbURI pathUri = XmldbURI.create(scriptURI); - - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); - if (resource != null) - {return new DBSource(broker, (BinaryDocument)resource, true);} - + + final XmldbURI pathUri = XmldbURI.create(scriptURI); + try(final LockedDocument lockedResource = broker.getXMLResource(pathUri, LockMode.READ_LOCK)) { + if (lockedResource != null) { + return new DBSource(broker, (BinaryDocument)lockedResource.getDocument(), true); + } } catch (final PermissionDeniedException e) { //XXX: log e.printStackTrace(); - } finally { - if(resource != null) - {resource.getUpdateLock().release(LockMode.READ_LOCK);} } // try { diff --git a/src/org/exist/security/internal/SecurityManagerImpl.java b/src/org/exist/security/internal/SecurityManagerImpl.java index 034ea444989..9df3b6118cb 100644 --- a/src/org/exist/security/internal/SecurityManagerImpl.java +++ b/src/org/exist/security/internal/SecurityManagerImpl.java @@ -1,6 +1,6 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2001-2015 The eXist Project + * Copyright (C) 2001-2016 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or @@ -19,21 +19,20 @@ */ package org.exist.security.internal; +import com.evolvedbinary.j8fu.lazy.AtomicLazyVal; +import net.jcip.annotations.ThreadSafe; import org.exist.scheduler.JobDescription; import org.exist.security.AbstractRealm; import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; -import java.util.concurrent.locks.Lock; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import java.util.function.Consumer; -import java.util.function.Function; +import java.util.function.BiFunction; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; @@ -62,13 +61,15 @@ import org.exist.storage.BrokerPoolService; import org.exist.storage.BrokerPoolServiceException; import org.exist.storage.DBBroker; -import org.exist.storage.txn.TransactionManager; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; +import org.exist.util.ConcurrentValueWrapper; +import org.exist.util.WeakLazyStripes; import org.exist.util.hashtable.Int2ObjectHashMap; import org.exist.xmldb.XmldbURI; import org.quartz.JobDataMap; import org.quartz.JobExecutionContext; -import org.quartz.JobExecutionException; import org.quartz.SimpleTrigger; /** @@ -86,35 +87,33 @@ @ConfigurationClass("security-manager") public class SecurityManagerImpl implements SecurityManager, BrokerPoolService { + private static final Logger LOG = LogManager.getLogger(SecurityManager.class); - public final static int MAX_USER_ID = 1048571; //1 less than RealmImpl.UNKNOWN_ACCOUNT_ID - public final static int MAX_GROUP_ID = 1048572; //1 less than RealmImpl.UNKNOWN_GROUP_ID - - public final static Logger LOG = LogManager.getLogger(SecurityManager.class); - - private Database db; - - protected PrincipalDbById groupsById = new PrincipalDbById<>(); - protected PrincipalDbById usersById = new PrincipalDbById<>(); + public static final int MAX_USER_ID = 1048571; //1 less than RealmImpl.UNKNOWN_ACCOUNT_ID + public static final int MAX_GROUP_ID = 1048572; //1 less than RealmImpl.UNKNOWN_GROUP_ID + static final int INITIAL_LAST_ACCOUNT_ID = 10; + static final int INITIAL_LAST_GROUP_ID = 10; + private final PrincipalDbById groupsById = new PrincipalDbById<>(INITIAL_LAST_GROUP_ID); + private final PrincipalDbById usersById = new PrincipalDbById<>(INITIAL_LAST_ACCOUNT_ID); private final PrincipalLocks accountLocks = new PrincipalLocks<>(); private final PrincipalLocks groupLocks = new PrincipalLocks<>(); + private final SessionDb sessions = new SessionDb(); - //TODO: validate & remove if session timeout - private SessionDb sessions = new SessionDb(); + private Database db; - @ConfigurationFieldAsAttribute("last-account-id") - protected int lastUserId = 0; + private AtomicLazyVal systemSubject; + private AtomicLazyVal guestSubject; - @ConfigurationFieldAsAttribute("last-group-id") - protected int lastGroupId = 0; + private final Map saving = new ConcurrentHashMap<>(); @ConfigurationFieldAsAttribute("version") - private String version = "2.0"; + @SuppressWarnings("unused") + private String version = "2.1"; @ConfigurationFieldAsElement("authentication-entry-point") - public final static String authenticationEntryPoint = "/authentication/login"; - + private static final String authenticationEntryPoint = "/authentication/login"; + private RealmImpl defaultRealm; @ConfigurationFieldAsElement("realm") @@ -138,25 +137,27 @@ public void prepare(final BrokerPool brokerPool) throws BrokerPoolServiceExcepti try { this.defaultRealm = new RealmImpl(null, this, null); realms.add(defaultRealm); + this.systemSubject = new AtomicLazyVal<>(() -> new SubjectAccreditedImpl(defaultRealm.ACCOUNT_SYSTEM, this)); + this.guestSubject = new AtomicLazyVal<>(() -> new SubjectAccreditedImpl((AccountImpl) defaultRealm.getAccount(SecurityManager.GUEST_USER), this)); } catch(final EXistException e) { throw new BrokerPoolServiceException(e); } } @Override - public void startSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + public void startSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { try { - attach(systemBroker); + attach(systemBroker, transaction); } catch(final EXistException e) { throw new BrokerPoolServiceException(e); } } @Override - public void startPreMultiUserSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + public void startPreMultiUserSystem(final DBBroker systemBroker, final Txn transaction) { final Properties params = new Properties(); params.put(getClass().getName(), this); - db.getScheduler().createPeriodicJob(TIMEOUT_CHECK_PERIOD, new SessionsCheck(), TIMEOUT_CHECK_PERIOD, params, SimpleTrigger.REPEAT_INDEFINITELY, false); + db.getScheduler().createPeriodicJob(SessionsCheck.TIMEOUT_CHECK_PERIOD, new SessionsCheck(), SessionsCheck.TIMEOUT_CHECK_PERIOD, params, SimpleTrigger.REPEAT_INDEFINITELY, false); } /** @@ -165,61 +166,50 @@ public void startPreMultiUserSystem(final DBBroker systemBroker) throws BrokerPo * Checks if the file users.xml exists in the system collection of the database. * If not, it is created with two default users: admin and guest. * - * @param broker + * @param broker the database broker */ @Override - public void attach(final DBBroker broker) throws EXistException { - //groups = new Int2ObjectHashMap(65); - //users = new Int2ObjectHashMap(65); - + public void attach(final DBBroker broker, final Txn transaction) throws EXistException { db = broker.getDatabase(); //TODO: check that db is same? - final TransactionManager transaction = db.getTransactionManager(); - Collection systemCollection = null; - try(final Txn txn = transaction.beginTransaction()) { + try { systemCollection = broker.getCollection(XmldbURI.SYSTEM_COLLECTION_URI); if(systemCollection == null) { - systemCollection = broker.getOrCreateCollection(txn, XmldbURI.SYSTEM_COLLECTION_URI); + systemCollection = broker.getOrCreateCollection(transaction, XmldbURI.SYSTEM_COLLECTION_URI); if (systemCollection == null) { return; } - systemCollection.setPermissions(Permission.DEFAULT_SYSTEM_COLLECTION_PERM); - broker.saveCollection(txn, systemCollection); + systemCollection.setPermissions(broker, Permission.DEFAULT_SYSTEM_COLLECTION_PERM); + broker.saveCollection(transaction, systemCollection); } - transaction.commit(txn); } catch (final Exception e) { - e.printStackTrace(); - LOG.debug("loading acl failed: " + e.getMessage()); + LOG.error("Setting /db/system permissions failed: " + e.getMessage(), e); } - try(final Txn txn = transaction.beginTransaction()) { + try { collection = broker.getCollection(SECURITY_COLLECTION_URI); if (collection == null) { - collection = broker.getOrCreateCollection(txn, SECURITY_COLLECTION_URI); + collection = broker.getOrCreateCollection(transaction, SECURITY_COLLECTION_URI); if (collection == null) { + LOG.error("Collection '/db/system/security' can't be created. Database may be corrupt!"); return; } - //if db corrupted it can lead to unrunnable issue - //throw new ConfigurationException("Collection '/db/system/security' can't be created."); - - collection.setPermissions(Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); - broker.saveCollection(txn, collection); + collection.setPermissions(broker, Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); + broker.saveCollection(transaction, collection); } - transaction.commit(txn); } catch (final Exception e) { e.printStackTrace(); - LOG.debug("loading configuration failed: " + e.getMessage()); + LOG.error("Loading security configuration failed: " + e.getMessage(), e); } final Configuration _config_ = Configurator.parse(this, broker, collection, CONFIG_FILE_URI); configuration = Configurator.configure(this, _config_); - for (final Realm realm : realms) { - realm.start(broker); + realm.start(broker, transaction); } } @@ -233,12 +223,8 @@ public boolean updateAccount(final Account account) throws PermissionDeniedExcep throw new ConfigurationException("Account must have realm id."); } - final Lock lock = accountLocks.getWriteLock(account); - lock.lock(); - try { + try (final ManagedLock lock = ManagedLock.acquire(accountLocks.getLock(account), LockMode.WRITE_LOCK)) { return findRealmForRealmId(account.getRealmId()).updateAccount(account); - } finally { - lock.unlock(); } } @@ -252,12 +238,8 @@ public boolean updateGroup(final Group group) throws PermissionDeniedException, throw new ConfigurationException("Group must have realm id."); } - final Lock lock = groupLocks.getWriteLock(group); - lock.lock(); - try { + try (final ManagedLock lock = ManagedLock.acquire(groupLocks.getLock(group), LockMode.WRITE_LOCK)) { return findRealmForRealmId(group.getRealmId()).updateGroup(group); - } finally { - lock.unlock(); } } @@ -272,12 +254,8 @@ public boolean deleteGroup(final String name) throws PermissionDeniedException, throw new ConfigurationException("Group must have realm id."); } - final Lock lock = groupLocks.getWriteLock(group); - lock.lock(); - try { + try (final ManagedLock lock = ManagedLock.acquire(groupLocks.getLock(group), LockMode.WRITE_LOCK)) { return findRealmForRealmId(group.getRealmId()).deleteGroup(group); - } finally { - lock.unlock(); } } @@ -296,29 +274,23 @@ public boolean deleteAccount(final Account account) throws PermissionDeniedExcep throw new ConfigurationException("Account must have realm id."); } - final Lock lock = accountLocks.getWriteLock(account); - lock.lock(); - try { + try (final ManagedLock lock = ManagedLock.acquire(accountLocks.getLock(account), LockMode.WRITE_LOCK)) { return findRealmForRealmId(account.getRealmId()).deleteAccount(account); - } finally { - lock.unlock(); } } @Override public Account getAccount(final String name) { -// if (SYSTEM.equals(name)) { -// return defaultRealm.ACCOUNT_SYSTEM; -// } - for(final Realm realm : realms) { final Account account = realm.getAccount(name); if (account != null) { return account; } } - - LOG.debug("Account for '" + name + "' not found!"); + + if (LOG.isDebugEnabled()) { + LOG.debug("Account for '" + name + "' not found!"); + } return null; } @@ -360,12 +332,8 @@ public final Group getGroup(final int id) { @Override public boolean hasAdminPrivileges(final Account user) { - final Lock lock = accountLocks.getReadLock(user); - lock.lock(); - try { + try (final ManagedLock lock = ManagedLock.acquire(accountLocks.getLock(user), LockMode.READ_LOCK)) { return user.hasDbaRole(); - } finally { - lock.unlock(); } } @@ -382,7 +350,7 @@ public boolean hasAccount(final String name) { @Override public Subject authenticate(final String username, final Object credentials) throws AuthenticationException { if (LOG.isDebugEnabled()) { - LOG.debug("Authentication try for '"+username+"'."); + LOG.debug("Authentication try for '" + username + "'."); } if (username == null) { @@ -403,7 +371,7 @@ public Subject authenticate(final String username, final Object credentials) thr } final Subject subject = sessions.read(db1 -> { - final Session session = db1.get(credentials); + final Session session = db1.get(credentials.toString()); if (session == null) { return null; } @@ -462,32 +430,15 @@ public Subject authenticate(final String username, final Object credentials) thr "Account [" + username + "] not found" ); } - - protected Subject systemSubject = null; - protected Subject guestSubject = null; @Override public Subject getSystemSubject() { - if (systemSubject == null) { - synchronized (this) { - if (systemSubject == null) { - systemSubject = new SubjectAccreditedImpl(defaultRealm.ACCOUNT_SYSTEM, this); - } - } - } - return systemSubject; + return systemSubject.get(); } @Override public Subject getGuestSubject() { - if (guestSubject == null) { - synchronized (this) { - if (guestSubject == null) { - guestSubject = new SubjectAccreditedImpl((AccountImpl) defaultRealm.getAccount(SecurityManager.GUEST_USER), this); - } - } - } - return guestSubject; + return guestSubject.get(); } @Override @@ -505,18 +456,22 @@ public Database database() { return db; } - private synchronized int getNextGroupId() { - if(lastGroupId + 1 == MAX_GROUP_ID) { - throw new RuntimeException("System has no more group-ids available"); - } - return ++lastGroupId; + /** + * For internal testing use only! + * + * @return The last group id + */ + int getLastGroupId() { + return groupsById.getCurrentPrincipalId(); } - private synchronized int getNextAccountId() { - if(lastUserId +1 == MAX_USER_ID) { - throw new RuntimeException("System has no more user-ids available"); - } - return ++lastUserId; + /** + * For internal testing use only! + * + * @return The last account id + */ + int getLastAccountId() { + return usersById.getCurrentPrincipalId(); } @Override @@ -573,7 +528,7 @@ public Group addGroup(final DBBroker broker, final Group group) throws Permissio if(group.getId() != Group.UNDEFINED_ID) { id = group.getId(); } else { - id = getNextGroupId(); + id = groupsById.getNextPrincipalId(); } final AbstractRealm registeredRealm = (AbstractRealm)findRealmForRealmId(group.getRealmId()); @@ -587,24 +542,18 @@ public Group addGroup(final DBBroker broker, final Group group) throws Permissio newGroup.setMetadataValue(metadataKey, metadataValue); } - final Lock lock = groupLocks.getWriteLock(newGroup); - lock.lock(); - try { - groupsById.modify(principalDb -> principalDb.put(id, newGroup)); - + try(final ManagedLock lock = ManagedLock.acquire(groupLocks.getLock(newGroup), LockMode.WRITE_LOCK)) { + registerGroup(newGroup); registeredRealm.registerGroup(newGroup); - save(broker); newGroup.save(broker); return newGroup; - } finally { - lock.unlock(); } } @Override - public final Account addAccount(final Account account) throws PermissionDeniedException, EXistException { + public final Account addAccount(final Account account) throws PermissionDeniedException, EXistException{ try(final DBBroker broker = db.getBroker()) { return addAccount(broker, account); } @@ -624,38 +573,19 @@ public final Account addAccount(final DBBroker broker, final Account account) th if(account.getId() != Account.UNDEFINED_ID) { id = account.getId(); } else { - id = getNextAccountId(); + id = usersById.getNextPrincipalId(); } final AbstractRealm registeredRealm = (AbstractRealm) findRealmForRealmId(account.getRealmId()); final AccountImpl newAccount = new AccountImpl(broker, registeredRealm, id, account); - final Lock lock = accountLocks.getWriteLock(newAccount); - lock.lock(); - try { - usersById.modify(principalDb -> principalDb.put(id, newAccount)); - + try (final ManagedLock lock = ManagedLock.acquire(accountLocks.getLock(newAccount), LockMode.WRITE_LOCK)) { + registerAccount(newAccount); registeredRealm.registerAccount(newAccount); - //XXX: one transaction? - save(broker); newAccount.save(broker); return newAccount; - } finally { - lock.unlock(); - } - } - - private void save() throws PermissionDeniedException, EXistException { - if (configuration != null) { - configuration.save(); - } - } - - private void save(final DBBroker broker) throws PermissionDeniedException, EXistException { - if (configuration != null) { - configuration.save(broker); } } @@ -670,15 +600,10 @@ public Configuration getConfiguration() { } //Session management part - - public final static long TIMEOUT_CHECK_PERIOD = 20000; //20 sec - public static class SessionsCheck implements JobDescription, org.quartz.Job { + public static final long TIMEOUT_CHECK_PERIOD = 20000; //20 sec - boolean firstRun = true; - - public SessionsCheck() {} - + @Override public String getGroup() { return "eXist.Security"; } @@ -689,11 +614,11 @@ public String getName() { } @Override - public void setName(String name) { + public void setName(final String name) { } @Override - public final void execute(final JobExecutionContext jec) throws JobExecutionException { + public final void execute(final JobExecutionContext jec) { final JobDataMap jobDataMap = jec.getJobDetail().getJobDataMap(); final Properties params = (Properties) jobDataMap.get("params"); @@ -706,27 +631,19 @@ public final void execute(final JobExecutionContext jec) throws JobExecutionExce return; } - sm.sessions.modify(db -> { - final Iterator> it = db.entrySet().iterator(); - while (it.hasNext()) { - final Map.Entry entry = it.next(); - if (entry == null || !entry.getValue().isValid()) { - it.remove(); - } - } - }); + sm.sessions.write(db -> db.entrySet().removeIf(entry -> entry == null || !entry.getValue().isValid())); } } @Override public void registerSession(final Session session) { - sessions.modify(db -> db.put(session.getId(), session)); + sessions.write(db -> db.put(session.getId(), session)); } @Override - public Subject getSubjectBySessionId(String sessionId) { + public Subject getSubjectBySessionId(final String sessionId) { return sessions.read(db -> { - Session session = db.get(sessionId); + final Session session = db.get(sessionId); if (session != null) { return session.getSubject(); } @@ -743,15 +660,45 @@ private Realm findRealmForRealmId(final String realmId) throws ConfigurationExce } throw new ConfigurationException("Realm id = '" + realmId + "' not found."); } - + + /** + * Register mapping id to group. + * + * @param group thr group. + */ @Override - public void addGroup(final int id, final Group group) { - groupsById.modify(principalDb -> principalDb.put(id, group)); + public void registerGroup(final Group group) { + groupsById.update((principalDb, principalId) -> { + final int id = group.getId(); + + principalDb.put(id, group); + + if (id < MAX_GROUP_ID) { + return Math.max(principalId, id); + } else { + return principalId; + } + }); } + /** + * Register mapping id to account. + * + * @param account the account. + */ @Override - public void addUser(final int id, final Account account) { - usersById.modify(principalDb -> principalDb.put(id, account)); + public void registerAccount(final Account account) { + usersById.update((principalDb, principalId) -> { + final int id = account.getId(); + + principalDb.put(id, account); + + if (id < MAX_USER_ID) { + return Math.max(principalId, id); + } else { + return principalId; + } + }); } @Override @@ -827,10 +774,8 @@ public List findAllUserNames() { return userNames; } - private Map saving = new HashMap<>(); - @Override - public void processPramatterBeforeSave(final DBBroker broker, final DocumentImpl document) throws ConfigurationException { + public void processParameterBeforeSave(final DBBroker broker, final DocumentImpl document) { XmldbURI uri = document.getCollection().getURI(); final boolean isRemoved = uri.endsWith(SecurityManager.REMOVED_COLLECTION_URI); @@ -852,11 +797,9 @@ public void processPramatterBeforeSave(final DBBroker broker, final DocumentImpl } @Override - public void processPramatter(DBBroker broker, DocumentImpl document) throws ConfigurationException { + public void processParameter(final DBBroker broker, final DocumentImpl document) throws ConfigurationException { XmldbURI uri = document.getCollection().getURI(); - - //System.out.println(document); final boolean isRemoved = uri.endsWith(SecurityManager.REMOVED_COLLECTION_URI); if(isRemoved) { @@ -885,7 +828,7 @@ public void processPramatter(DBBroker broker, DocumentImpl document) throws Conf if (isRemoved && id > 2 && !hasUser(id)) { final AccountImpl account = new AccountImpl( realm, conf ); account.removed = true; - addUser(account.getId(), account); + registerAccount(account); } else if(name != null) { if (realm.hasAccount(name)) { final Integer oldId = saving.get(document.getURI()); @@ -896,38 +839,35 @@ public void processPramatter(DBBroker broker, DocumentImpl document) throws Conf if (!newId.equals(oldId)) { final Account current = realm.getAccount(name); - accountLocks.getWriteLock(current).lock(); - try { - usersById.modify(principalDb -> { + try (final ManagedLock lock = ManagedLock.acquire(accountLocks.getLock(current), LockMode.WRITE_LOCK)) { + usersById.write(principalDb -> { principalDb.remove(oldId); principalDb.put(newId, current); }); - } finally { - accountLocks.getWriteLock(current).unlock(); } } } else { final Account account = new AccountImpl( realm, conf ); - addUser(account.getId(), account); + registerAccount(account); realm.registerAccount(account); } } else { //this can't be! log any way - LOG.error("Account '"+name+"' pressent at '"+realmId+"' realm, but get event that new one created."); + LOG.error("Account '" + name + "' already exists in realm: '" + realmId + "', but received notification that a new one was created."); } } else if(isGroup) { if (isRemoved && id > 2 && !hasGroup(id)) { final GroupImpl group = new GroupImpl( realm, conf ); group.removed = true; - addGroup(group.getId(), group); + registerGroup(group); } else if (name != null && !realm.hasGroup(name)) { final GroupImpl group = new GroupImpl( realm, conf ); - addGroup(group.getId(), group); + registerGroup(group); realm.registerGroup(group); } else { //this can't be! log any way - LOG.error("Group '"+name+"' pressent at '"+realmId+"' realm, but get event that new one created."); + LOG.error("Group '" + name + "' already exists in realm: '" + realmId + "', but received notification that a new one was created."); } } @@ -937,78 +877,57 @@ public void processPramatter(DBBroker broker, DocumentImpl document) throws Conf @Override public String getAuthenticationEntryPoint() { - return authenticationEntryPoint; + return authenticationEntryPoint; } + @ThreadSafe private static class PrincipalLocks { - private final Map locks = new HashMap<>(); - - private synchronized ReentrantReadWriteLock getLock(final T principal) { - ReentrantReadWriteLock lock = locks.get(principal.getId()); - if(lock == null) { - lock = new ReentrantReadWriteLock(); - locks.put(principal.getId(), lock); - } - return lock; - } + private final WeakLazyStripes lockStripes = new WeakLazyStripes<>(id -> new ReentrantReadWriteLock()); - public ReadLock getReadLock(T principal) { - return getLock(principal).readLock(); + public ReadWriteLock getLock(final T principal) { + return lockStripes.get(principal.getId()); } + } - public WriteLock getWriteLock(T principal) { - return getLock(principal).writeLock(); + @ThreadSafe + private static class SessionDb extends ConcurrentValueWrapper> { + public SessionDb() { + super(new HashMap<>()); } } - - protected static class SessionDb { - private final Map db = new HashMap<>(); - private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - private final ReadLock readLock = lock.readLock(); - private final WriteLock writeLock = lock.writeLock(); - - public R read(final Function, R> readFn) { - readLock.lock(); - try { - return readFn.apply(db); - } finally { - readLock.unlock(); - } + + @ThreadSafe + private static class PrincipalDbById extends ConcurrentValueWrapper> { + private int principalId; + + public PrincipalDbById(final int initialLastId) { + super(new Int2ObjectHashMap<>(65)); + this.principalId = initialLastId; } - public final void modify(final Consumer> modifyFn) { - writeLock.lock(); - try { - modifyFn.accept(db); - } finally { - writeLock.unlock(); - } + public int getNextPrincipalId() { + return writeAndReturn(principalDb -> { + if(principalId + 1 >= MAX_GROUP_ID) { + throw new RuntimeException("System has no more ids available for principal type"); + } + return ++principalId; + }); } - } - - protected static class PrincipalDbById { - - private final Int2ObjectHashMap db = new Int2ObjectHashMap<>(65); - private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - private final ReadLock readLock = lock.readLock(); - private final WriteLock writeLock = lock.writeLock(); - public R read(final Function, R> readFn) { - readLock.lock(); - try { - return readFn.apply(db); - } finally { - readLock.unlock(); - } + private int getCurrentPrincipalId() { + return read(principalDb -> principalId); } - public final void modify(final Consumer> writeOp) { - writeLock.lock(); - try { - writeOp.accept(db); - } finally { - writeLock.unlock(); - } + /** + * Allows updates to the principal db, + * and principal id. + * + * @param updateFn A function which updates the principal db and returns a new principal id. + */ + public void update(final BiFunction, Integer, Integer> updateFn) { + write(principalDb -> { + this.principalId = updateFn.apply(principalDb, principalId); + }); } } @@ -1018,16 +937,12 @@ public Subject getCurrentSubject() { } @Override - public final synchronized void preAllocateAccountId(final PrincipalIdReceiver receiver) throws PermissionDeniedException, EXistException { - final int id = getNextAccountId(); - save(); - receiver.allocate(id); + public final void preAllocateAccountId(final PrincipalIdReceiver receiver) { + receiver.allocate(usersById.getNextPrincipalId()); } @Override - public final synchronized void preAllocateGroupId(final PrincipalIdReceiver receiver) throws PermissionDeniedException, EXistException { - final int id = getNextGroupId(); - save(); - receiver.allocate(id); + public final void preAllocateGroupId(final PrincipalIdReceiver receiver) { + receiver.allocate(groupsById.getNextPrincipalId()); } } diff --git a/src/org/exist/security/internal/aider/SimpleACLPermissionAider.java b/src/org/exist/security/internal/aider/SimpleACLPermissionAider.java index e529b0fdcff..32933444f91 100644 --- a/src/org/exist/security/internal/aider/SimpleACLPermissionAider.java +++ b/src/org/exist/security/internal/aider/SimpleACLPermissionAider.java @@ -23,27 +23,27 @@ import java.util.ArrayList; import java.util.List; + import org.exist.security.ACLPermission; import org.exist.security.PermissionDeniedException; import org.exist.security.SimpleACLPermission; /** - * * @author Adam Retter */ public class SimpleACLPermissionAider extends UnixStylePermissionAider implements ACLPermission { - private final List aces = new ArrayList(); + private final List aces = new ArrayList<>(); public SimpleACLPermissionAider() { super(); } - public SimpleACLPermissionAider(int mode) { + public SimpleACLPermissionAider(final int mode) { super(mode); } - public SimpleACLPermissionAider(String user, String group, int mode) { + public SimpleACLPermissionAider(final String user, final String group, final int mode) { super(user, group, mode); } @@ -53,33 +53,60 @@ public short getVersion() { } @Override - public void addACE(ACE_ACCESS_TYPE access_type, ACE_TARGET target, String who, int mode) throws PermissionDeniedException { + public void addACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String who, final int mode) throws PermissionDeniedException { //TODO validate() aces.add(new ACEAider(access_type, target, who, mode)); } + @Override + public void addACE(final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String name, final String modeStr) throws PermissionDeniedException { + addACE(access_type, target, name, modeStrToMode(modeStr)); + } + + @Override + public void insertACE(final int index, final ACE_ACCESS_TYPE access_type, final ACE_TARGET target, final String name, final String modeStr) throws PermissionDeniedException { + aces.add(index, new ACEAider(access_type, target, name, modeStrToMode(modeStr))); + } + + @Override + public void modifyACE(final int index, final ACE_ACCESS_TYPE access_type, final String modeStr) throws PermissionDeniedException { + modifyACE(index, access_type, modeStrToMode(modeStr)); + } + + @Override + public void modifyACE(final int index, final ACE_ACCESS_TYPE access_type, final int mode) throws PermissionDeniedException { + final ACEAider ace = aces.get(index); + ace.setAccessType(access_type); + ace.setMode(mode); + } + + @Override + public void removeACE(final int index) throws PermissionDeniedException { + aces.remove(index); + } + @Override public int getACECount() { return aces.size(); } @Override - public ACE_ACCESS_TYPE getACEAccessType(int index) { + public ACE_ACCESS_TYPE getACEAccessType(final int index) { return aces.get(index).getAccessType(); } @Override - public ACE_TARGET getACETarget(int index) { + public ACE_TARGET getACETarget(final int index) { return aces.get(index).getTarget(); } @Override - public String getACEWho(int index) { + public String getACEWho(final int index) { return aces.get(index).getWho(); } @Override - public int getACEMode(int index) { + public int getACEMode(final int index) { return aces.get(index).getMode(); } @@ -95,5 +122,51 @@ public boolean isCurrentSubjectCanWriteACL() { return true; } + @Override + public boolean aclEquals(final ACLPermission other) { + if (other == null) { + return false; + } + + if (getACECount() != other.getACECount()) { + return false; + } + + for (int i = 0; i < getACECount(); i++) { + if (getACEAccessType(i) != other.getACEAccessType(i) + || getACETarget(i) != other.getACETarget(i) + || (!getACEWho(i).equals(other.getACEWho(i))) + || getACEMode(i) != other.getACEMode(i)) { + return false; + } + } -} \ No newline at end of file + return true; + } + + private int modeStrToMode(final String modeStr) throws PermissionDeniedException { + if (modeStr == null || modeStr.length() == 0 || modeStr.length() > 3) { + throw new PermissionDeniedException("Invalid mode string '" + modeStr + "'"); + } + + int mode = 0; + for (final char c : modeStr.toCharArray()) { + switch (c) { + case READ_CHAR: + mode |= READ; + break; + case WRITE_CHAR: + mode |= WRITE; + break; + case EXECUTE_CHAR: + mode |= EXECUTE; + break; + case UNSET_CHAR: + break; + default: + throw new PermissionDeniedException("Unknown char '" + c + "' in mode string '" + modeStr + "'"); + } + } + return mode; + } +} diff --git a/src/org/exist/security/internal/aider/UnixStylePermissionAider.java b/src/org/exist/security/internal/aider/UnixStylePermissionAider.java index 7353c0d26ef..7bdd6c876c8 100644 --- a/src/org/exist/security/internal/aider/UnixStylePermissionAider.java +++ b/src/org/exist/security/internal/aider/UnixStylePermissionAider.java @@ -22,14 +22,10 @@ package org.exist.security.internal.aider; import java.io.IOException; -import org.exist.security.AbstractUnixStylePermission; -import org.exist.security.Group; +import org.exist.security.*; + import org.exist.security.SecurityManager; -import org.exist.security.Account; -import org.exist.security.Permission; -import org.exist.security.PermissionDeniedException; -import org.exist.security.Subject; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; import org.exist.util.SyntaxException; @@ -382,4 +378,14 @@ public boolean isCurrentSubjectInGroup() { public boolean isCurrentSubjectInGroup(final int groupId) { throw new UnsupportedOperationException("Not supported yet."); } -} \ No newline at end of file + + @Override + public boolean isPosixChownRestricted() { + throw new UnsupportedOperationException("Not supported yet."); + } + + @Override + public Permission copy() { + throw new UnsupportedOperationException(); + } +} diff --git a/src/org/exist/security/realm/Realm.java b/src/org/exist/security/realm/Realm.java index 0431dcdd474..7ea710ad7d1 100644 --- a/src/org/exist/security/realm/Realm.java +++ b/src/org/exist/security/realm/Realm.java @@ -24,14 +24,12 @@ import java.util.Collection; import java.util.List; import org.exist.Database; -import org.exist.EXistException; import org.exist.LifeCycle; import org.exist.security.Account; import org.exist.security.Group; import org.exist.security.SecurityManager; import org.exist.security.management.AccountsManagement; import org.exist.security.management.GroupsManagement; -import org.exist.storage.DBBroker; /** * @author Dmitriy Shabanov @@ -39,27 +37,31 @@ */ public interface Realm extends AuthenticatingRealm, AuthorizingRealm, AccountsManagement, GroupsManagement, LifeCycle { - public String getId(); + String getId(); - public Collection getAccounts(); + Collection getAccounts(); - public Collection getGroups(); + Collection getGroups(); + + /** + * @deprecated Use {@link #getGroups()}. + */ @Deprecated //use getGroups (remove after 1.6) - public Collection getRoles(); + Collection getRoles(); - public Database getDatabase(); + Database getDatabase(); - public Group getExternalGroup(final String name); + Group getExternalGroup(final String name); - public List findUsernamesWhereNameStarts(String startsWith); - public List findUsernamesWhereNamePartStarts(String startsWith); - public List findUsernamesWhereUsernameStarts(String startsWith); - public List findAllGroupNames(); - public List findAllGroupMembers(final String groupName); - public List findAllUserNames(); + List findUsernamesWhereNameStarts(String startsWith); + List findUsernamesWhereNamePartStarts(String startsWith); + List findUsernamesWhereUsernameStarts(String startsWith); + List findAllGroupNames(); + List findAllGroupMembers(final String groupName); + List findAllUserNames(); - public SecurityManager getSecurityManager(); + SecurityManager getSecurityManager(); - public Collection findGroupnamesWhereGroupnameStarts(String startsWith); - public Collection findGroupnamesWhereGroupnameContains(String fragment); + Collection findGroupnamesWhereGroupnameStarts(String startsWith); + Collection findGroupnamesWhereGroupnameContains(String fragment); } diff --git a/src/org/exist/security/utils/Utils.java b/src/org/exist/security/utils/Utils.java index e771ed4ace4..77b23c1474f 100644 --- a/src/org/exist/security/utils/Utils.java +++ b/src/org/exist/security/utils/Utils.java @@ -45,7 +45,7 @@ public static Collection createCollection(DBBroker broker, Txn txn, XmldbURI uri throw new IOException("Collection " + uri + " cannot be created."); } - collection.setPermissions(Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); + collection.setPermissions(broker, Permission.DEFAULT_SYSTEM_SECURITY_COLLECTION_PERM); broker.saveCollection(txn, collection); return collection; diff --git a/src/org/exist/source/DBSource.java b/src/org/exist/source/DBSource.java index a0fdc3adf1a..ccdc6935eba 100644 --- a/src/org/exist/source/DBSource.java +++ b/src/org/exist/source/DBSource.java @@ -26,6 +26,7 @@ import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.QName; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; import org.exist.security.Subject; @@ -86,26 +87,19 @@ public long getLastModified() { @Override public Validity isValid(final DBBroker broker) { - DocumentImpl d = null; Validity result; - try { - d = broker.getXMLResource(key, LockMode.READ_LOCK); - - if(d == null) { + try(final LockedDocument lockedDoc = broker.getXMLResource(key, LockMode.READ_LOCK);) { + if(lockedDoc == null) { result = Validity.INVALID; - } else if(d.getMetadata().getLastModified() > lastModified) { + } else if(lockedDoc.getDocument().getMetadata().getLastModified() > lastModified) { result = Validity.INVALID; } else { result = Validity.VALID; } } catch(final PermissionDeniedException pde) { result = Validity.INVALID; - } finally { - if(d != null) { - d.getUpdateLock().release(LockMode.READ_LOCK); - } } - + return result; } @@ -198,4 +192,4 @@ public void validate(final Subject subject, final int mode) throws PermissionDen public Permission getPermissions() { return doc.getPermissions(); } -} \ No newline at end of file +} diff --git a/src/org/exist/source/SourceFactory.java b/src/org/exist/source/SourceFactory.java index 49dd109c045..fbc8a93a2a2 100644 --- a/src/org/exist/source/SourceFactory.java +++ b/src/org/exist/source/SourceFactory.java @@ -21,7 +21,6 @@ */ package org.exist.source; -import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; @@ -37,6 +36,7 @@ import org.exist.EXistException; import org.exist.dom.persistent.BinaryDocument; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.PermissionDeniedException; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; @@ -45,6 +45,8 @@ import org.exist.xmldb.XmldbURI; import org.xml.sax.SAXException; +import javax.annotation.Nullable; + /** * Factory to create a {@link org.exist.source.Source} object for a given * URL. @@ -56,18 +58,22 @@ public class SourceFactory { private final static Logger LOG = LogManager.getLogger(SourceFactory.class); /** - * Create a {@link Source} object for the given URL. - *

+ * Create a {@link Source} object for the given resource URL. + * * As a special case, if the URL starts with "resource:", the resource * will be read from the current context class loader. * - * @param broker broker, can be null if not asking for a database resource - * @param contextPath - * @param location - * @throws MalformedURLException - * @throws IOException + * @param contextPath the context path of the resource. + * @param location the location of the resource (relative to the {@code contextPath}). + * @param checkXQEncoding where we need to check the encoding of the XQuery. + * + * @return The Source of the resource, or null if the resource cannot be found. + * + * @throws PermissionDeniedException if the resource resides in the database, + * but the calling user does not have permission to access it. + * @throws IOException if a general I/O error occurs whilst accessing the resource. */ - public static final Source getSource(DBBroker broker, String contextPath, String location, boolean checkXQEncoding) throws IOException, PermissionDeniedException { + public static @Nullable Source getSource(final DBBroker broker, final String contextPath, final String location, final boolean checkXQEncoding) throws IOException, PermissionDeniedException { Source source = null; /* resource: */ @@ -80,188 +86,250 @@ public static final Source getSource(DBBroker broker, String contextPath, String resolvedURL = resolvedURL.replaceFirst("file://", ClassLoaderSource.PROTOCOL); source = new ClassLoaderSource(resolvedURL); } - /* file:// or location without scheme (:/) is assumed to be a file */ - else if (location.startsWith("file:/") || !location.contains(":/")) { - location = location.replaceAll("^(file:)?/*(.*)$", "$2"); + /* xmldb */ + if (source == null + && (location.startsWith(XmldbURI.XMLDB_URI_PREFIX) + || (contextPath != null && contextPath.startsWith(XmldbURI.XMLDB_URI_PREFIX)))) { + + XmldbURI pathUri; try { - final Path p = Paths.get(contextPath, location); - if (Files.isReadable(p)) { - location = p.toUri().toASCIIString(); - source = new FileSource(p, checkXQEncoding); + if (contextPath == null) { + pathUri = XmldbURI.create(location); + } else { + pathUri = XmldbURI.create(contextPath).append(location); } - } catch (InvalidPathException e) { - // continue trying + } catch (final IllegalArgumentException e) { + // this is allowed if the location is already an absolute URI, below we will try using other schemes + pathUri = null; } - if (source == null) { - try { - final Path p2 = Paths.get(location); - if (Files.isReadable(p2)) { - location = p2.toUri().toASCIIString(); - source = new FileSource(p2, checkXQEncoding); - } - } catch (InvalidPathException e) { - // continue trying - } + if (pathUri != null) { + source = getSource_fromDb(broker, pathUri); } + } - if (source == null && contextPath != null) { - try { - final Path p3 = Paths.get(contextPath).toAbsolutePath().resolve(location); - if (Files.isReadable(p3)) { - location = p3.toUri().toASCIIString(); - source = new FileSource(p3, checkXQEncoding); - } - } catch (InvalidPathException e) { - // continue trying - } + /* /db */ + if (source == null + && ((location.startsWith("/db") && !Files.exists(Paths.get(firstPathSegment(location)))) + || (contextPath != null && contextPath.startsWith("/db") && !Files.exists(Paths.get(firstPathSegment(contextPath)))))) { + final XmldbURI pathUri; + if (contextPath == null) { + pathUri = XmldbURI.create(location); + } else { + pathUri = XmldbURI.create(contextPath).append(location); } + source = getSource_fromDb(broker, pathUri); + } - if (source == null) { - /* - * Try to load as an absolute path - */ - try { - final Path p4 = Paths.get("/" + location); - if (Files.isReadable(p4)) { - location = p4.toUri().toASCIIString(); - source = new FileSource(p4, checkXQEncoding); - } - } catch (InvalidPathException e) { - // continue trying - } + /* file:// or location without scheme (:/) is assumed to be a file */ + if (source == null + && (location.startsWith("file:/") + || !location.contains(":/"))) { + source = getSource_fromFile(contextPath, location, checkXQEncoding); + } + + /* final attempt - any other URL */ + if (source == null + && !( + location.startsWith(ClassLoaderSource.PROTOCOL) + || location.startsWith(XmldbURI.XMLDB_URI_PREFIX) + || location.startsWith("file:/")) + ) { + try { + final URL url = new URL(location); + source = new URLSource(url); + } catch (final MalformedURLException e) { + return null; } + } - if (source == null) { - /* - * Try to load from the folder of the contextPath - */ - try { - final Path p5 = Paths.get(contextPath).resolveSibling(location); - if (Files.isReadable(p5)) { - location = p5.toUri().toASCIIString(); - source = new FileSource(p5, checkXQEncoding); + return source; + } + + private static String firstPathSegment(final String path) { + return XmldbURI + .create(path) + .getPathSegments()[0] + .getRawCollectionPath(); + } + + /** + * Get the resource source from the database. + * + * @param broker The database broker. + * @param path The path to the resource in the database. + * + * @return the source, or null if there is no such resource in the db indicated by {@code path}. + */ + private static @Nullable Source getSource_fromDb(final DBBroker broker, final XmldbURI path) throws PermissionDeniedException, IOException { + Source source = null; + try(final LockedDocument lockedResource = broker.getXMLResource(path, LockMode.READ_LOCK)) { + if (lockedResource != null) { + final DocumentImpl resource = lockedResource.getDocument(); + if (resource.getResourceType() == DocumentImpl.BINARY_FILE) { + source = new DBSource(broker, (BinaryDocument) resource, true); + } else { + try { + // XML document: serialize to string source so it can be read as a stream + // by fn:unparsed-text and friends + source = new StringSource(broker.getSerializer().serialize(resource)); + } catch (final SAXException e) { + throw new IOException(e.getMessage()); } - } catch (InvalidPathException e) { - // continue trying } } + } + return source; + } - if (source == null) { - /* - * Try to load from the parent folder of the contextPath URL - */ - try { - Path p6 = null; - if(contextPath.startsWith("file:/")) { - try { - p6 = Paths.get(new URI(contextPath)).resolveSibling(location); - } catch (final URISyntaxException e) { - // continue trying - } - } + /** + * Get the resource source from the filesystem. + * + * @param contextPath the context path of the resource. + * @param location the location of the resource (relative to the {@code contextPath}). + * @param checkXQEncoding where we need to check the encoding of the XQuery. + * + * @return the source, or null if there is no such resource in the db indicated by {@code path}. + */ + private static @Nullable Source getSource_fromFile(final String contextPath, final String location, final boolean checkXQEncoding) { + String locationPath = location.replaceAll("^(file:)?/*(.*)$", "$2"); - if(p6 == null) { - p6 = Paths.get(contextPath.replaceFirst("^file:/*(/.*)$", "$1")).resolveSibling(location); - } + Source source = null; + try { + final Path p = Paths.get(contextPath, locationPath); + if (Files.isReadable(p)) { + locationPath = p.toUri().toASCIIString(); + source = new FileSource(p, checkXQEncoding); + } + } catch (final InvalidPathException e) { + // continue trying + } - if (Files.isReadable(p6)) { - location = p6.toUri().toASCIIString(); - source = new FileSource(p6, checkXQEncoding); - } - } catch (InvalidPathException e) { - // continue trying + if (source == null) { + try { + final Path p2 = Paths.get(locationPath); + if (Files.isReadable(p2)) { + locationPath = p2.toUri().toASCIIString(); + source = new FileSource(p2, checkXQEncoding); } + } catch (final InvalidPathException e) { + // continue trying } + } - if (source == null) { - /* - * Try to load from the contextPath URL folder - */ - try { - Path p7 = null; - if(contextPath.startsWith("file:/")) { - try { - p7 = Paths.get(new URI(contextPath)).resolve(location); - } catch (final URISyntaxException e) { - // continue trying - } - } + if (source == null && contextPath != null) { + try { + final Path p3 = Paths.get(contextPath).toAbsolutePath().resolve(locationPath); + if (Files.isReadable(p3)) { + locationPath = p3.toUri().toASCIIString(); + source = new FileSource(p3, checkXQEncoding); + } + } catch (final InvalidPathException e) { + // continue trying + } + } - if(p7 == null) { - p7 = Paths.get(contextPath.replaceFirst("^file:/*(/.*)$", "$1")).resolve(location); - } + if (source == null) { + /* + * Try to load as an absolute path + */ + try { + final Path p4 = Paths.get("/" + locationPath); + if (Files.isReadable(p4)) { + locationPath = p4.toUri().toASCIIString(); + source = new FileSource(p4, checkXQEncoding); + } + } catch (final InvalidPathException e) { + // continue trying + } + } - if (Files.isReadable(p7)) { - location = p7.toUri().toASCIIString(); - source = new FileSource(p7, checkXQEncoding); - } - } catch (InvalidPathException e) { - // continue trying + if (source == null && contextPath != null) { + /* + * Try to load from the folder of the contextPath + */ + try { + final Path p5 = Paths.get(contextPath).resolveSibling(locationPath); + if (Files.isReadable(p5)) { + locationPath = p5.toUri().toASCIIString(); + source = new FileSource(p5, checkXQEncoding); } + } catch (final InvalidPathException e) { + // continue trying } + } - if (source == null) { - /* - * Lastly we try to load it using EXIST_HOME as the reference point - */ - Path p8 = null; - try { - p8 = FileUtils.resolve(BrokerPool.getInstance().getConfiguration().getExistHome(), location); - if (Files.isReadable(p8)) { - location = p8.toUri().toASCIIString(); - source = new FileSource(p8, checkXQEncoding); + if (source == null && contextPath != null) { + /* + * Try to load from the parent folder of the contextPath URL + */ + try { + Path p6 = null; + if(contextPath.startsWith("file:/")) { + try { + p6 = Paths.get(new URI(contextPath)).resolveSibling(locationPath); + } catch (final URISyntaxException e) { + // continue trying } - } catch (final EXistException e) { - LOG.warn(e); - } catch (InvalidPathException e) { - // continue and abort below } - } - if (source == null) { - throw new FileNotFoundException("cannot read module source from file at " + location + ". \n"); + if(p6 == null) { + p6 = Paths.get(contextPath.replaceFirst("^file:/*(/.*)$", "$1")).resolveSibling(locationPath); + } + + if (Files.isReadable(p6)) { + locationPath = p6.toUri().toASCIIString(); + source = new FileSource(p6, checkXQEncoding); + } + } catch (final InvalidPathException e) { + // continue trying } } - - /* xmldb: */ - else if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX)) { - DocumentImpl resource = null; + + if (source == null && contextPath != null) { + /* + * Try to load from the contextPath URL folder + */ try { - final XmldbURI pathUri = XmldbURI.create(location); - resource = broker.getXMLResource(pathUri, LockMode.READ_LOCK); - if (resource != null) { - if (resource.getResourceType() == DocumentImpl.BINARY_FILE) { - source = new DBSource(broker, (BinaryDocument) resource, true); - } else { - try { - // XML document: serialize to string source so it can be read as a stream - // by fn:unparsed-text and friends - source = new StringSource(broker.getSerializer().serialize(resource)); - } catch (SAXException e) { - throw new IOException(e.getMessage()); - } + Path p7 = null; + if(contextPath.startsWith("file:/")) { + try { + p7 = Paths.get(new URI(contextPath)).resolve(locationPath); + } catch (final URISyntaxException e) { + // continue trying } } - } finally { - //TODO: this is nasty!!! as we are unlocking the resource whilst there - //is still a source - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); + + if(p7 == null) { + p7 = Paths.get(contextPath.replaceFirst("^file:/*(/.*)$", "$1")).resolve(locationPath); } + + if (Files.isReadable(p7)) { + locationPath = p7.toUri().toASCIIString(); + source = new FileSource(p7, checkXQEncoding); + } + } catch (final InvalidPathException e) { + // continue trying } } - - /* resource: */ - else if (location.startsWith(ClassLoaderSource.PROTOCOL)) { - source = new ClassLoaderSource(location); - } - /* any other URL */ - else { - final URL url = new URL(location); - source = new URLSource(url); + if (source == null) { + /* + * Lastly we try to load it using EXIST_HOME as the reference point + */ + Path p8 = null; + try { + p8 = FileUtils.resolve(BrokerPool.getInstance().getConfiguration().getExistHome(), locationPath); + if (Files.isReadable(p8)) { + locationPath = p8.toUri().toASCIIString(); + source = new FileSource(p8, checkXQEncoding); + } + } catch (final EXistException e) { + LOG.warn(e); + } catch (final InvalidPathException e) { + // continue and abort below + } } return source; diff --git a/src/org/exist/start/start.config b/src/org/exist/start/start.config index c8b858a9bf5..81f8baa941b 100644 --- a/src/org/exist/start/start.config +++ b/src/org/exist/start/start.config @@ -36,12 +36,14 @@ test/classes mode == other lib/endorsed/* always lib/core/antlr-%latest%.jar always lib/core/gnu-crypto-%latest%.jar always +lib/core/caffeine-%latest%.jar always lib/core/commons-codec-%latest%.jar always lib/core/commons-collections-%latest%.jar always lib/core/commons-configuration2-%latest%.jar always lib/core/commons-io-%latest%.jar always lib/core/commons-logging-%latest%.jar always lib/core/commons-pool-%latest%.jar always +lib/core/deuce-annotations-%latest%.jar always lib/core/jargo-%latest%.jar mode != jetty lib/core/jackson-core-%latest%.jar always lib/core/rsyntaxtextarea-%latest%.jar mode == client @@ -51,6 +53,7 @@ lib/core/log4j-api-%latest%.jar always lib/core/log4j-core-%latest%.jar always lib/core/log4j-jul-%latest%.jar always lib/core/log4j-slf4j-impl-%latest%.jar always +lib/core/multilock-%latest%.jar always lib/core/slf4j-api-%latest%.jar always lib/core/pkg-java-fork.jar always lib/core/quartz-%latest%.jar always @@ -78,6 +81,7 @@ lib/optional/httpclient-%latest%.jar mode == client lib/optional/httpclient-cache-%latest%.jar mode == client lib/optional/httpmime-%latest%.jar mode == client lib/optional/isorelax-%latest%.jar mode == client +lib/optional/jaxb-api-%latest%.jar always lib/optional/jing-%latest%.jar mode == client lib/optional/servlet-api-%latest%.jar mode == client lib/optional/commons-compress-%latest%.jar mode == client diff --git a/src/org/exist/stax/AbstractEmbeddedXMLStreamReader.java b/src/org/exist/stax/AbstractEmbeddedXMLStreamReader.java deleted file mode 100644 index 63c01b5efb5..00000000000 --- a/src/org/exist/stax/AbstractEmbeddedXMLStreamReader.java +++ /dev/null @@ -1,718 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-2014 The eXist team - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * $Id$ - */ -package org.exist.stax; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.exist.dom.persistent.*; -import org.exist.numbering.NodeId; -import org.exist.storage.DBBroker; -import org.exist.storage.Signatures; -import org.exist.storage.btree.Value; -import org.exist.storage.dom.IRawNodeIterator; -import org.exist.util.ByteConversion; -import org.exist.util.XMLString; -import org.exist.util.serializer.AttrList; -import org.w3c.dom.Node; -import org.w3c.dom.ProcessingInstruction; - -import javax.xml.namespace.NamespaceContext; -import javax.xml.namespace.QName; -import javax.xml.stream.Location; -import javax.xml.stream.StreamFilter; -import javax.xml.stream.XMLStreamException; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Stack; - -/** - * Lazy implementation of a StAX {@link javax.xml.stream.XMLStreamReader}, which directly reads - * information from the persistent DOM. The class is optimized to support fast scanning of the DOM, where only - * a few selected node properties are requested. Node properties are extracted on demand. For example, the QName of - * an element will not be read unless {@link #getText()} is called. - */ -public abstract class AbstractEmbeddedXMLStreamReader implements IEmbeddedXMLStreamReader, ExtendedXMLStreamReader { - - private static final Logger LOG = LogManager.getLogger(AbstractEmbeddedXMLStreamReader.class); - - protected final T iterator; - private Value current = null; - private Value previous = null; - - private final Stack elementStack = new Stack<>(); - - private int state = START_DOCUMENT; - - private boolean beforeRoot = false; - - private DocumentImpl document; - protected NodeId nodeId; - protected NodeHandle origin; - private QName qname = null; - - private final XMLString text = new XMLString(256); - - private final List namespaces = new ArrayList<>(6); - private boolean nsRead = false; - - private AttrList attributes = null; - private boolean reportAttribs = false; - - private DBBroker broker; - - /** - * Construct an EmbeddedXMLStreamReader. - * - * @param doc the document to which the start node belongs. - * @param iterator a RawNodeIterator positioned on the start node. - * @param origin an optional NodeHandle whose nodeId should match the first node in the stream - * (or null if no need to check) - * @param reportAttributes if set to true, attributes will be reported as top-level events. - * @throws javax.xml.stream.XMLStreamException - */ - public AbstractEmbeddedXMLStreamReader(final DBBroker broker, final DocumentImpl doc, final T iterator, final NodeHandle origin, final boolean reportAttributes) - throws XMLStreamException { - this.broker = broker; - this.document = doc; - this.iterator = iterator; - this.reportAttribs = reportAttributes; - this.origin = origin; - } - - @Override - public void filter(final StreamFilter filter) throws XMLStreamException { - while(hasNext()) { - next(); - if(!filter.accept(this)) { - break; - } - } - } - - @Override - public void reposition(final DBBroker broker, final NodeHandle node, final boolean reportAttributes) throws IOException { - this.broker = broker; - // Seeking to a node with unknown address will reuse this reader, so do it before setting all - // the fields otherwise they could get overwritten. - iterator.seek(node); - reset(); - this.current = null; - this.previous = null; - this.elementStack.clear(); - this.state = START_DOCUMENT; - this.reportAttribs = reportAttributes; - this.document = node.getOwnerDocument(); - this.origin = node; - } - - @Override - public short getNodeType() { - return Signatures.getType(current.data()[current.start()]); - } - - private void initNode() { - final short type = Signatures.getType(current.data()[current.start()]); // TODO: remove potential NPE - if(state == START_DOCUMENT && type != Node.ELEMENT_NODE) { - beforeRoot = true; - } - switch(type) { - case Node.ELEMENT_NODE: - state = START_ELEMENT; - elementStack.push(new ElementEvent(current)); - beforeRoot = false; - break; - case Node.ATTRIBUTE_NODE: - state = ATTRIBUTE; - break; - case Node.TEXT_NODE: - state = CHARACTERS; - break; - case Node.COMMENT_NODE: - state = COMMENT; - break; - case Node.CDATA_SECTION_NODE: - state = CDATA; - break; - case Node.PROCESSING_INSTRUCTION_NODE: - state = PROCESSING_INSTRUCTION; - break; - } - reset(); - readNodeId(); - } - - private void skipAttributes() throws XMLStreamException { - if(attributes == null) { - // attributes were not yet read. skip them... - final ElementEvent parent = elementStack.peek(); - final int attrs = getAttributeCount(); - for(int i = 0; i < attrs; i++) { - iterator.next(); - parent.incrementChild(); - } - } - } - - private void readAttributes() { - if(attributes == null) { - final ElementEvent parent = elementStack.peek(); - final int count = getAttributeCount(); - attributes = new AttrList(); - for(int i = 0; i < count; i++) { - final Value v = iterator.next(); - AttrImpl.addToList(broker, v.data(), v.start(), v.getLength(), attributes); - parent.incrementChild(); - } - } - } - - private void readNodeId() { - int offset = current.start() + StoredNode.LENGTH_SIGNATURE_LENGTH; - if(state == START_ELEMENT || state == END_ELEMENT) { - offset += ElementImpl.LENGTH_ELEMENT_CHILD_COUNT; - } - final int dlnLen = ByteConversion.byteToShort(current.data(), offset); - offset += NodeId.LENGTH_NODE_ID_UNITS; - nodeId = broker.getBrokerPool().getNodeFactory().createFromData(dlnLen, current.data(), offset); - } - - @Override - public int next() throws XMLStreamException { - if(state != END_ELEMENT) { - previous = current; - } - if(state == START_ELEMENT && !reportAttribs) { - skipAttributes(); - } - if(!elementStack.isEmpty()) { - final ElementEvent parent = elementStack.peek(); - if(parent.getChildCount() == parent.getCurrentChild()) { - elementStack.pop(); - state = END_ELEMENT; - current = parent.data; - reset(); - return state; - } else { - parent.incrementChild(); - } - } else if(state != START_DOCUMENT && !beforeRoot) { - throw new NoSuchElementException(); - } - final boolean first = state == START_DOCUMENT; - current = iterator.next(); - initNode(); - if(first && origin != null) { - verifyOriginNodeId(); - origin = null; - } - return state; - } - - protected void verifyOriginNodeId() throws XMLStreamException { - if(!nodeId.equals(origin.getNodeId())) { - // Node got moved, we had the wrong address. Resync iterator by nodeid. - LOG.warn("expected node id " + origin.getNodeId() + ", got " + nodeId + "; resyncing address"); - origin.setInternalAddress(StoredNode.UNKNOWN_NODE_IMPL_ADDRESS); - final boolean reportAttribsBackup = reportAttribs; - DocumentImpl documentBackup = document; - try { - iterator.seek(origin); - } catch(final IOException e) { - throw new XMLStreamException(e); - } - // Seeking the iterator might've reused this reader, so reset all fields. - reset(); - previous = null; - elementStack.clear(); - reportAttribs = reportAttribsBackup; - document = documentBackup; - current = iterator.next(); - initNode(); - } - } - - private void reset() { - nodeId = null; - qname = null; - attributes = null; - text.reuse(); - if(state != END_ELEMENT) { - namespaces.clear(); - nsRead = false; - } - } - - @Override - public void require(final int type, final String namespaceURI, final String localName) throws XMLStreamException { - throw new UnsupportedOperationException(); - } - - @Override - public String getElementText() throws XMLStreamException { - if(getEventType() != START_ELEMENT) { - throw new XMLStreamException( - "parser must be on START_ELEMENT to read next text"); - } - int eventType = next(); - final StringBuilder content = new StringBuilder(); - while(eventType != END_ELEMENT) { - if(eventType == CHARACTERS - || eventType == CDATA - || eventType == SPACE - || eventType == ENTITY_REFERENCE) { - content.append(getText()); - } else if(eventType == PROCESSING_INSTRUCTION - || eventType == COMMENT) { - // skipping - } else if(eventType == END_DOCUMENT) { - throw new XMLStreamException("unexpected end of document when reading element text content"); - } else if(eventType == START_ELEMENT) { - throw new XMLStreamException( - "element text content may not contain START_ELEMENT"); - } else { - throw new XMLStreamException( - "Unexpected event type " + eventType); - } - eventType = next(); - } - return content.toString(); - } - - @Override - public Object getProperty(final String string) throws IllegalArgumentException { - if(string.equals(PROPERTY_NODE_ID)) { - if(nodeId == null) { - readNodeId(); - } - return nodeId; - } - return null; - } - - @Override - public int nextTag() throws XMLStreamException { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasNext() throws XMLStreamException { - return state == START_DOCUMENT || beforeRoot || !elementStack.isEmpty(); - } - - @Override - public void close() throws XMLStreamException { - iterator.close(); - } - - @Override - public boolean isStartElement() { - return state == START_ELEMENT; - } - - @Override - public boolean isEndElement() { - return state == END_ELEMENT; - } - - @Override - public boolean isCharacters() { - return state == CHARACTERS; - } - - @Override - public boolean isWhiteSpace() { - return false; - } - - @Override - public String getAttributeValue(final String namespaceURI, final String localName) { - readAttributes(); - for(int i = 0; i < attributes.getLength(); i++) { - final org.exist.dom.QName qn = attributes.getQName(i); - if(qn.getNamespaceURI().equals(namespaceURI) && qn.getLocalPart().equals(localName)) { - return attributes.getValue(i); - } - } - return null; - } - - @Override - public int getAttributeCount() { - final int offset = current.start() + StoredNode.LENGTH_SIGNATURE_LENGTH + ElementImpl.LENGTH_ELEMENT_CHILD_COUNT + NodeId.LENGTH_NODE_ID_UNITS + nodeId.size(); - return ByteConversion.byteToShort(current.data(), offset); - } - - @Override - public QName getAttributeName(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getQName(index).toJavaQName(); - } - - @Override - public org.exist.dom.QName getAttributeQName(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getQName(index); - } - - @Override - public String getAttributeNamespace(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getQName(index).getNamespaceURI(); - } - - @Override - public String getAttributeLocalName(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getQName(index).getLocalPart(); - } - - @Override - public String getAttributePrefix(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getQName(index).getPrefix(); - } - - @Override - public String getAttributeType(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - final int type = attributes.getType(index); - return AttrImpl.getAttributeType(type); - } - - @Override - public String getAttributeValue(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getValue(index); - } - - @Override - public NodeId getAttributeId(final int index) { - if(state != START_ELEMENT) { - throw new IllegalStateException("Cursor is not at an element"); - } - readAttributes(); - if(index > attributes.getLength()) { - throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); - } - return attributes.getNodeId(index); - } - - @Override - public boolean isAttributeSpecified(final int index) { - return false; - } - - @Override - public int getNamespaceCount() { - readNamespaceDecls(); - return namespaces.size(); - } - - @Override - public String getNamespacePrefix(final int index) { - readNamespaceDecls(); - if(index < 0 || index > namespaces.size()) { - return null; - } - final String[] decl = namespaces.get(index); - return decl[0]; - } - - @Override - public String getNamespaceURI(int i) { - readNamespaceDecls(); - if(i < 0 || i > namespaces.size()) { - return null; - } - final String[] decl = namespaces.get(i); - return decl[1]; - } - - @Override - public NamespaceContext getNamespaceContext() { - throw new UnsupportedOperationException(); - } - - @Override - public int getEventType() { - return state; - } - - @Override - public XMLString getXMLText() { - if(state == CHARACTERS || state == COMMENT || state == CDATA) { - if(text.length() == 0) { - AbstractCharacterData.readData(nodeId, current, text); - } - return text; - } - return new XMLString(); - } - - @Override - public String getText() { - return getXMLText().toString(); - } - - @Override - public char[] getTextCharacters() { - final String s = getText(); - final char[] dst = new char[s.length()]; - s.getChars(0, dst.length, dst, 0); - return dst; - } - - @Override - public int getTextCharacters(final int sourceStart, final char[] chars, final int targetStart, final int length) throws XMLStreamException { - throw new UnsupportedOperationException(); - } - - @Override - public int getTextStart() { - throw new UnsupportedOperationException(); - } - - @Override - public int getTextLength() { - if(state == CHARACTERS || state == COMMENT || state == CDATA) { - if(text.length() == 0) { - return AbstractCharacterData.getStringLength(nodeId, current); - } - return text.length(); - } - return 0; - } - - @Override - public String getEncoding() { - throw new UnsupportedOperationException(); - } - - @Override - public boolean hasText() { - return state == CHARACTERS || state == COMMENT || state == CDATA; - } - - @Override - public Location getLocation() { - throw new UnsupportedOperationException(); - } - - @Override - public String getNamespaceURI(final String prefix) { - return null; - } - - @Override - public QName getName() { - if(qname != null) { - return qname; - } - if(state == START_ELEMENT || state == END_ELEMENT) { - if(nodeId == null) { - readNodeId(); - } - qname = ElementImpl.readQName(current, document, nodeId).toJavaQName(); - } - return qname; - } - - @Override - public org.exist.dom.QName getQName() { - if(state == START_ELEMENT || state == END_ELEMENT) { - if(nodeId == null) { - readNodeId(); - } - return ElementImpl.readQName(current, document, nodeId); - } - return null; - } - - /** - * Read all namespace declarations defined on the current element. - * Cache them in the namespaces map. - */ - private void readNamespaceDecls() { - if(nsRead) { - return; - } - if(state == START_ELEMENT || state == END_ELEMENT) { - if(nodeId == null) { - readNodeId(); - } - ElementImpl.readNamespaceDecls(namespaces, current, document, nodeId); - } - nsRead = true; - } - - @Override - public String getPrefix() { - return getName().getPrefix(); - } - - @Override - public String getLocalName() { - return getName().getLocalPart(); - } - - @Override - public String getNamespaceURI() { - return getName().getNamespaceURI(); - } - - @Override - public boolean hasName() { - return (state == START_ELEMENT || state == END_ELEMENT); - } - - @Override - public IStoredNode getNode() { - final IStoredNode node = StoredNode.deserialize(current.data(), current.start(), current.getLength(), document); - node.setOwnerDocument(document); - node.setInternalAddress(current.getAddress()); - return node; - } - - @Override - public IStoredNode getPreviousNode() { - final StoredNode node = StoredNode.deserialize(previous.data(), previous.start(), previous.getLength(), document); - node.setOwnerDocument(document); - node.setInternalAddress(previous.getAddress()); - return node; - } - - @Override - public String getVersion() { - return "1.0"; - } - - @Override - public boolean isStandalone() { - return false; - } - - @Override - public boolean standaloneSet() { - return false; - } - - @Override - public String getCharacterEncodingScheme() { - return null; - } - - @Override - public String getPITarget() { - readPI(); - return qname.getLocalPart(); - } - - @Override - public String getPIData() { - readPI(); - return text.toString(); - } - - private void readPI() { - if(qname == null) { - if(state != PROCESSING_INSTRUCTION) { - throw new IllegalStateException("Cursor is not at a processing instruction"); - } - final ProcessingInstruction pi = (ProcessingInstruction) - StoredNode.deserialize(current.data(), current.start(), current.getLength(), document); - qname = new QName("", pi.getTarget(), ""); - text.append(pi.getData()); - } - } - - private static final class ElementEvent { - - private final Value data; - private final int childCount; - private int currentChild = 0; - - public ElementEvent(Value data) { - this.data = data; - this.childCount = ByteConversion.byteToInt(data.data(), data.start() + StoredNode.LENGTH_SIGNATURE_LENGTH); - } - - public Value getData() { - return data; - } - - public int getChildCount() { - return childCount; - } - - public int getCurrentChild() { - return currentChild; - } - - public void incrementChild() { - currentChild++; - } - } -} \ No newline at end of file diff --git a/src/org/exist/stax/EmbeddedXMLStreamReader.java b/src/org/exist/stax/EmbeddedXMLStreamReader.java index ec01261440d..2ccb04b273b 100644 --- a/src/org/exist/stax/EmbeddedXMLStreamReader.java +++ b/src/org/exist/stax/EmbeddedXMLStreamReader.java @@ -1,56 +1,692 @@ /* - * eXist Open Source Native XML Database - * Copyright (C) 2001-2014 The eXist team - * http://exist-db.org + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * $Id$ + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ + package org.exist.stax; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.NodeHandle; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.exist.dom.persistent.*; +import org.exist.numbering.NodeId; import org.exist.storage.DBBroker; +import org.exist.storage.Signatures; +import org.exist.storage.btree.Value; import org.exist.storage.dom.RawNodeIterator; +import org.exist.util.ByteConversion; +import org.exist.util.XMLString; +import org.exist.util.serializer.AttrList; +import org.w3c.dom.Node; +import org.w3c.dom.ProcessingInstruction; +import javax.annotation.Nullable; +import javax.xml.namespace.NamespaceContext; +import javax.xml.namespace.QName; +import javax.xml.stream.Location; +import javax.xml.stream.StreamFilter; import javax.xml.stream.XMLStreamException; +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Deque; +import java.util.List; -/** - * Lazy implementation of a StAX {@link javax.xml.stream.XMLStreamReader}, which directly reads - * information from the persistent DOM. The class is optimized to support fast scanning of the DOM, where only - * a few selected node properties are requested. Node properties are extracted on demand. For example, the QName of - * an element will not be read unless {@link #getText()} is called. - */ -public class EmbeddedXMLStreamReader extends AbstractEmbeddedXMLStreamReader { +public class EmbeddedXMLStreamReader implements IEmbeddedXMLStreamReader, ExtendedXMLStreamReader { + + private static final Logger LOG = LogManager.getLogger(EmbeddedXMLStreamReader.class); + + // states outside the range of events defined in {@link XMLStreamConstants} + private static int BEFORE = -1; + private static int AFTER = -2; + + // members which don't (generally) change! + private DBBroker broker; + private DocumentImpl document; + private final RawNodeIterator iterator; + private boolean reportAttributes; + + // mutable members which hold the current state of the stream + private int state = BEFORE; // initial state! + private boolean consumedState = false; + @Nullable private NodeHandle origin; + @Nullable private Value previous = null; + @Nullable private Value current = null; + @Nullable private NodeId nodeId = null; + private final Deque elementStack = new ArrayDeque<>(); + private boolean nsRead = false; + private final List namespaces = new ArrayList<>(6); + @Nullable private QName qname = null; + @Nullable private AttrList attributes = null; + private final XMLString text = new XMLString(256); + + + public EmbeddedXMLStreamReader(final DBBroker broker, final DocumentImpl document, final RawNodeIterator iterator, @Nullable final NodeHandle origin, final boolean reportAttributes) + throws XMLStreamException { + this.broker = broker; + this.document = document; + this.iterator = iterator; + this.reportAttributes = reportAttributes; + this.origin = origin; + } + + @Override + public void reposition(final DBBroker broker, final NodeHandle node, final boolean reportAttributes) throws IOException { + this.broker = broker; + // Seeking to a node with unknown address will reuse this reader, so do it before setting all + // the fields otherwise they could get overwritten. + iterator.seek(node); + reset(); + this.current = null; + this.previous = null; + this.elementStack.clear(); + this.state = BEFORE; + this.consumedState = false; + this.reportAttributes = reportAttributes; + this.document = node.getOwnerDocument(); + this.origin = node; + } + + @Override + public boolean hasNext() throws XMLStreamException { + if (consumedState || state == BEFORE) { + getNext(); + + consumedState = false; // mark that we have a new state available + + // NOTE: this intentionally prevents END_DOCUMENT event being returned from {@link #next()}. + if (state == END_DOCUMENT) { + state = AFTER; + } + } + + return state != AFTER; + } + + @Override + public int next() throws XMLStreamException { + if (!hasNext()) { + throw new IllegalStateException("hasNext()==false"); + } + consumedState = true; // mark that we have consumed the current state + return state; + } + + private void getNext() throws XMLStreamException { + if(state != END_ELEMENT) { + previous = current; + } + + if(state == START_ELEMENT && !reportAttributes) { + skipAttributes(); + } + if(!elementStack.isEmpty()) { + final ElementEvent parent = elementStack.peek(); + if(parent.getChildCount() == parent.getCurrentChild()) { + elementStack.pop(); + state = END_ELEMENT; + current = parent.data; + reset(); + return; + } else { + parent.incrementChild(); + } + } + final boolean first = state == BEFORE; + current = iterator.next(); + if (current == null) { + state = END_DOCUMENT; + reset(); + return; + } + initNode(); + if(first && origin != null) { + verifyOriginNodeId(); + origin = null; + } + return; + } + + @Override + public void filter(final StreamFilter filter) throws XMLStreamException { + while (hasNext()) { + next(); + if (!filter.accept(this)) { + break; + } + } + } + + private void initNode() { + final short type = Signatures.getType(current.data()[current.start()]); + switch (type) { + case Node.ELEMENT_NODE: + state = START_ELEMENT; + elementStack.push(new ElementEvent(current)); + break; + case Node.ATTRIBUTE_NODE: + state = ATTRIBUTE; + break; + case Node.TEXT_NODE: + state = CHARACTERS; + break; + case Node.COMMENT_NODE: + state = COMMENT; + break; + case Node.CDATA_SECTION_NODE: + state = CDATA; + break; + case Node.PROCESSING_INSTRUCTION_NODE: + state = PROCESSING_INSTRUCTION; + break; + } + reset(); + readNodeId(); + } + + private void skipAttributes() throws XMLStreamException { + if(attributes == null) { + // attributes were not yet read. skip them... + final ElementEvent parent = elementStack.peek(); + final int attrs = getAttributeCount(); + for(int i = 0; i < attrs; i++) { + iterator.next(); + parent.incrementChild(); + } + } + } + + private void readAttributes() { + if(attributes == null) { + final ElementEvent parent = elementStack.peek(); + final int count = getAttributeCount(); + attributes = new AttrList(); + for(int i = 0; i < count; i++) { + final Value v = iterator.next(); + AttrImpl.addToList(broker, v.data(), v.start(), v.getLength(), attributes); + parent.incrementChild(); + } + } + } + + private void readNodeId() { + int offset = current.start() + StoredNode.LENGTH_SIGNATURE_LENGTH; + if(state == START_ELEMENT || state == END_ELEMENT) { + offset += ElementImpl.LENGTH_ELEMENT_CHILD_COUNT; + } + final int dlnLen = ByteConversion.byteToShort(current.data(), offset); + offset += NodeId.LENGTH_NODE_ID_UNITS; + nodeId = broker.getBrokerPool().getNodeFactory().createFromData(dlnLen, current.data(), offset); + } + + private void verifyOriginNodeId() throws XMLStreamException { + if(!nodeId.equals(origin.getNodeId())) { + // Node got moved, we had the wrong address. Resync iterator by nodeid. + LOG.warn("Expected node id " + origin.getNodeId() + ", got " + nodeId + "; resyncing address"); + origin.setInternalAddress(StoredNode.UNKNOWN_NODE_IMPL_ADDRESS); + final boolean reportAttributesBackup = reportAttributes; + DocumentImpl documentBackup = document; + try { + iterator.seek(origin); + } catch(final IOException e) { + throw new XMLStreamException(e); + } + // Seeking the iterator might've reused this reader, so reset all fields. + reset(); + previous = null; + elementStack.clear(); + reportAttributes = reportAttributesBackup; + document = documentBackup; + current = iterator.next(); + initNode(); + + origin.setInternalAddress(iterator.currentAddress()); + } + } + + private void reset() { + nodeId = null; + qname = null; + attributes = null; + text.reuse(); + if(state != END_ELEMENT) { + namespaces.clear(); + nsRead = false; + } + } + + @Override + public void require(final int type, final String namespaceURI, final String localName) throws XMLStreamException { + throw new UnsupportedOperationException(); + } + + @Override + public short getNodeType() { + return Signatures.getType(current.data()[current.start()]); + } + + @Override + public String getElementText() throws XMLStreamException { + if (state != START_ELEMENT) { + throw new XMLStreamException("parser must be on START_ELEMENT to read next text"); + } + int eventType = next(); + final StringBuilder content = new StringBuilder(); + while(eventType != END_ELEMENT) { + if(eventType == CHARACTERS + || eventType == CDATA + || eventType == SPACE + || eventType == ENTITY_REFERENCE) { + content.append(getText()); + } else if(eventType == PROCESSING_INSTRUCTION + || eventType == COMMENT) { + // skipping + } else if(eventType == END_DOCUMENT) { + throw new XMLStreamException("unexpected end of document when reading element text content"); + } else if(eventType == START_ELEMENT) { + throw new XMLStreamException("element text content may not contain START_ELEMENT"); + } else { + throw new XMLStreamException("Unexpected event type " + eventType); + } + eventType = next(); + } + return content.toString(); + } + + @Override + public Object getProperty(final String string) throws IllegalArgumentException { + if(string.equals(PROPERTY_NODE_ID)) { + if(nodeId == null) { + readNodeId(); + } + return nodeId; + } + return null; + } + + @Override + public int nextTag() throws XMLStreamException { + throw new UnsupportedOperationException(); + } + + @Override + public void close() throws XMLStreamException { + iterator.close(); + } + + @Override + public boolean isStartElement() { + return state == START_ELEMENT; + } + + @Override + public boolean isEndElement() { + return state == END_ELEMENT; + } + + @Override + public boolean isCharacters() { + return state == CHARACTERS; + } + + @Override + public boolean isWhiteSpace() { + return false; + } + + @Override + public String getAttributeValue(final String namespaceURI, final String localName) { + readAttributes(); + for(int i = 0; i < attributes.getLength(); i++) { + final org.exist.dom.QName qn = attributes.getQName(i); + if(qn.getNamespaceURI().equals(namespaceURI) && qn.getLocalPart().equals(localName)) { + return attributes.getValue(i); + } + } + return null; + } + + @Override + public int getAttributeCount() { + final int offset = current.start() + StoredNode.LENGTH_SIGNATURE_LENGTH + ElementImpl.LENGTH_ELEMENT_CHILD_COUNT + NodeId.LENGTH_NODE_ID_UNITS + nodeId.size(); + return ByteConversion.byteToShort(current.data(), offset); + } + + @Override + public javax.xml.namespace.QName getAttributeName(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getQName(index).toJavaQName(); + } + + @Override + public org.exist.dom.QName getAttributeQName(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getQName(index); + } + + @Override + public String getAttributeNamespace(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getQName(index).getNamespaceURI(); + } + + @Override + public String getAttributeLocalName(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getQName(index).getLocalPart(); + } + + @Override + public String getAttributePrefix(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getQName(index).getPrefix(); + } + + @Override + public String getAttributeType(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + final int type = attributes.getType(index); + return AttrImpl.getAttributeType(type); + } + + @Override + public String getAttributeValue(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getValue(index); + } + + @Override + public NodeId getAttributeId(final int index) { + if (state != START_ELEMENT) { + throw new IllegalStateException("Cursor is not at an element"); + } + readAttributes(); + if(index > attributes.getLength()) { + throw new ArrayIndexOutOfBoundsException("index should be < " + attributes.getLength()); + } + return attributes.getNodeId(index); + } + + @Override + public boolean isAttributeSpecified(final int index) { + return false; + } + + @Override + public int getNamespaceCount() { + readNamespaceDecls(); + return namespaces.size(); + } + + @Override + public String getNamespacePrefix(final int index) { + readNamespaceDecls(); + if(index < 0 || index > namespaces.size()) { + return null; + } + final String[] decl = namespaces.get(index); + return decl[0]; + } + + @Override + public String getNamespaceURI(int i) { + readNamespaceDecls(); + if(i < 0 || i > namespaces.size()) { + return null; + } + final String[] decl = namespaces.get(i); + return decl[1]; + } + + @Override + public NamespaceContext getNamespaceContext() { + throw new UnsupportedOperationException(); + } + + @Override + public int getEventType() { + return state; + } + + @Override + public XMLString getXMLText() { + if(state == CHARACTERS || state == COMMENT || state == CDATA) { + if(text.length() == 0) { + AbstractCharacterData.readData(nodeId, current, text); + } + return text; + } + return new XMLString(); + } + + @Override + public String getText() { + return getXMLText().toString(); + } + + @Override + public char[] getTextCharacters() { + return getText().toCharArray(); + } + + @Override + public int getTextCharacters(final int sourceStart, final char[] chars, final int targetStart, final int length) throws XMLStreamException { + throw new UnsupportedOperationException(); + } + + @Override + public int getTextStart() { + throw new UnsupportedOperationException(); + } + + @Override + public int getTextLength() { + if(state == CHARACTERS || state == COMMENT || state == CDATA) { + if(text.length() == 0) { + return AbstractCharacterData.getStringLength(nodeId, current); + } + return text.length(); + } + return 0; + } + + @Override + public String getEncoding() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean hasText() { + return state == CHARACTERS || state == COMMENT || state == CDATA; + } + + @Override + public Location getLocation() { + throw new UnsupportedOperationException(); + } + + @Override + public String getNamespaceURI(final String prefix) { + return null; + } + + @Override + public javax.xml.namespace.QName getName() { + if(qname != null) { + return qname; + } + if (state == START_ELEMENT || state == END_ELEMENT) { + if(nodeId == null) { + readNodeId(); + } + qname = ElementImpl.readQName(current, document, nodeId).toJavaQName(); + } + return qname; + } + + @Override + public org.exist.dom.QName getQName() { + if (state == START_ELEMENT || state == END_ELEMENT) { + if(nodeId == null) { + readNodeId(); + } + return ElementImpl.readQName(current, document, nodeId); + } + return null; + } /** - * Construct an EmbeddedXMLStreamReader. - * - * @param doc the document to which the start node belongs. - * @param iterator a RawNodeIterator positioned on the start node. - * @param origin an optional NodeHandle whose nodeId should match the first node in the stream - * (or null if no need to check) - * @param reportAttributes if set to true, attributes will be reported as top-level events. - * @throws XMLStreamException + * Read all namespace declarations defined on the current element. + * Cache them in the namespaces map. */ - public EmbeddedXMLStreamReader(final DBBroker broker, final DocumentImpl doc, final RawNodeIterator iterator, final NodeHandle origin, final boolean reportAttributes) - throws XMLStreamException { - super(broker, doc, iterator, origin, reportAttributes); + private void readNamespaceDecls() { + if(nsRead) { + return; + } + if (state == START_ELEMENT || state == END_ELEMENT) { + if(nodeId == null) { + readNodeId(); + } + ElementImpl.readNamespaceDecls(namespaces, current, document, nodeId); + } + nsRead = true; + } + + @Override + public String getPrefix() { + return getName().getPrefix(); + } + + @Override + public String getLocalName() { + return getName().getLocalPart(); + } + + @Override + public String getNamespaceURI() { + return getName().getNamespaceURI(); + } + + @Override + public boolean hasName() { + return state == START_ELEMENT || state == END_ELEMENT; + } + + @Override + public IStoredNode getNode() { + final IStoredNode node = StoredNode.deserialize(current.data(), current.start(), current.getLength(), document); + node.setOwnerDocument(document); + node.setInternalAddress(current.getAddress()); + return node; + } + + @Override + public IStoredNode getPreviousNode() { + final StoredNode node = StoredNode.deserialize(previous.data(), previous.start(), previous.getLength(), document); + node.setOwnerDocument(document); + node.setInternalAddress(previous.getAddress()); + return node; + } + + @Override + public String getVersion() { + return "1.0"; + } + + @Override + public boolean isStandalone() { + return false; + } + + @Override + public boolean standaloneSet() { + return false; } + @Override + public String getCharacterEncodingScheme() { + return null; + } + + @Override + public String getPITarget() { + readPI(); + return qname.getLocalPart(); + } + + @Override + public String getPIData() { + readPI(); + return text.toString(); + } + + private void readPI() { + if(qname == null) { + if(state != PROCESSING_INSTRUCTION) { + throw new IllegalStateException("Cursor is not at a processing instruction"); + } + final ProcessingInstruction pi = (ProcessingInstruction) + StoredNode.deserialize(current.data(), current.start(), current.getLength(), document); + qname = new javax.xml.namespace.QName("", pi.getTarget(), ""); + text.append(pi.getData()); + } + } /** * Returns the (internal) address of the node at the cursor's current @@ -62,11 +698,30 @@ public long getCurrentPosition() { return iterator.currentAddress(); } - @Override - protected void verifyOriginNodeId() throws XMLStreamException { - if(!nodeId.equals(origin.getNodeId())) { - super.verifyOriginNodeId(); - origin.setInternalAddress(iterator.currentAddress()); + private static final class ElementEvent { + private final Value data; + private final int childCount; + private int currentChild = 0; + + public ElementEvent(final Value data) { + this.data = data; + this.childCount = ByteConversion.byteToInt(data.data(), data.start() + StoredNode.LENGTH_SIGNATURE_LENGTH); + } + + public Value getData() { + return data; + } + + public int getChildCount() { + return childCount; + } + + public int getCurrentChild() { + return currentChild; + } + + public void incrementChild() { + currentChild++; } } -} \ No newline at end of file +} diff --git a/src/org/exist/stax/StaXUtil.java b/src/org/exist/stax/StaXUtil.java index 03b4c4f91b8..cf3dc3b1bc4 100644 --- a/src/org/exist/stax/StaXUtil.java +++ b/src/org/exist/stax/StaXUtil.java @@ -29,52 +29,86 @@ public class StaXUtil { - public static int streamType2Type(int type) { + public static int streamType2Type(final int type) { int xpathType; switch (type) { case XMLStreamConstants.START_ELEMENT : xpathType = Type.ELEMENT; break; - case XMLStreamConstants.ATTRIBUTE : - xpathType = Type.ATTRIBUTE; + + case XMLStreamConstants.PROCESSING_INSTRUCTION : + xpathType = Type.PROCESSING_INSTRUCTION; break; + case XMLStreamConstants.CHARACTERS : - case XMLStreamConstants.CDATA : xpathType = Type.TEXT; break; + case XMLStreamConstants.COMMENT : xpathType = Type.COMMENT; break; - case XMLStreamConstants.PROCESSING_INSTRUCTION : - xpathType = Type.PROCESSING_INSTRUCTION; + + case XMLStreamConstants.START_DOCUMENT: + xpathType = Type.DOCUMENT; break; + + case XMLStreamConstants.ATTRIBUTE: + xpathType = Type.ATTRIBUTE; + break; + + case XMLStreamConstants.CDATA: + xpathType = Type.TEXT; + break; + default: xpathType = Type.UNTYPED; } return xpathType; } - public static short streamType2DOM(int type) { + public static short streamType2DOM(final int type) { short domType; switch (type) { - case XMLStreamConstants.START_ELEMENT : + case XMLStreamConstants.START_ELEMENT: domType = Node.ELEMENT_NODE; break; - case XMLStreamConstants.ATTRIBUTE : - domType = Node.ATTRIBUTE_NODE; + + case XMLStreamConstants.PROCESSING_INSTRUCTION: + domType = Node.PROCESSING_INSTRUCTION_NODE; break; - case XMLStreamConstants.CHARACTERS : + + case XMLStreamConstants.CHARACTERS: domType = Node.TEXT_NODE; break; - case XMLStreamConstants.CDATA : - domType = Node.CDATA_SECTION_NODE; - break; - case XMLStreamConstants.COMMENT : + + case XMLStreamConstants.COMMENT: domType = Node.COMMENT_NODE; break; - case XMLStreamConstants.PROCESSING_INSTRUCTION : - domType = Node.PROCESSING_INSTRUCTION_NODE; + + case XMLStreamConstants.START_DOCUMENT: + domType = Node.DOCUMENT_NODE; break; + + case XMLStreamConstants.ENTITY_REFERENCE: + domType = Node.ENTITY_REFERENCE_NODE; + break; + + case XMLStreamConstants.ATTRIBUTE: + domType = Node.ATTRIBUTE_NODE; + break; + + case XMLStreamConstants.DTD: + domType = Node.DOCUMENT_TYPE_NODE; + break; + + case XMLStreamConstants.CDATA: + domType = Node.CDATA_SECTION_NODE; + break; + + case XMLStreamConstants.ENTITY_DECLARATION: + domType = Node.ENTITY_NODE; + break; + default: domType = -1; } diff --git a/src/org/exist/storage/BrokerPool.java b/src/org/exist/storage/BrokerPool.java index 684e520175f..2e20b67ac53 100644 --- a/src/org/exist/storage/BrokerPool.java +++ b/src/org/exist/storage/BrokerPool.java @@ -54,8 +54,8 @@ import org.exist.security.SecurityManager; import org.exist.security.internal.SecurityManagerImpl; import org.exist.storage.journal.JournalManager; -import org.exist.storage.lock.DeadlockDetection; import org.exist.storage.lock.FileLockService; +import org.exist.storage.lock.LockManager; import org.exist.storage.recovery.RecoveryManager; import org.exist.storage.sync.Sync; import org.exist.storage.sync.SyncTask; @@ -85,6 +85,8 @@ import java.util.concurrent.locks.ReentrantLock; import static com.evolvedbinary.j8fu.fsm.TransitionTable.transitionTable; +import static org.exist.util.ThreadUtils.nameInstanceThreadGroup; +import static org.exist.util.ThreadUtils.newInstanceThread; /** * This class controls all available instances of the database. @@ -130,6 +132,15 @@ public class BrokerPool extends BrokerPools implements BrokerPoolConstants, Data */ private final String instanceName; + private final int concurrencyLevel; + private LockManager lockManager; + + /** + * Root thread group for all threads related + * to this instance. + */ + private final ThreadGroup instanceThreadGroup; + /** * State of the BrokerPool instance */ @@ -299,8 +310,6 @@ private enum Event { private DefaultCacheManager cacheManager; - private CollectionCacheManager collectionCacheMgr; - private long reservedMem; /** @@ -373,6 +382,7 @@ private enum Event { this.classLoader = Thread.currentThread().getContextClassLoader(); this.instanceName = instanceName; + this.instanceThreadGroup = new ThreadGroup(nameInstanceThreadGroup(instanceName)); this.maxShutdownWait = conf.getProperty(BrokerPool.PROPERTY_SHUTDOWN_DELAY, DEFAULT_MAX_SHUTDOWN_WAIT); LOG.info("database instance '" + instanceName + "' will wait " + nf.format(this.maxShutdownWait) + " ms during shutdown"); @@ -382,11 +392,9 @@ private enum Event { this.minBrokers = conf.getProperty(PROPERTY_MIN_CONNECTIONS, minBrokers); this.maxBrokers = conf.getProperty(PROPERTY_MAX_CONNECTIONS, maxBrokers); - LOG.info("database instance '" + instanceName + "' will have between " + nf.format(this.minBrokers) + " and " + nf.format(this.maxBrokers) + " brokers"); this.majorSyncPeriod = conf.getProperty(PROPERTY_SYNC_PERIOD, DEFAULT_SYNCH_PERIOD); - LOG.info("database instance '" + instanceName + "' will be synchronized every " + nf.format(/*this.*/majorSyncPeriod) + " ms"); // convert from bytes to megabytes: 1024 * 1024 @@ -397,6 +405,8 @@ private enum Event { //Configuration is valid, save it this.conf = conf; + this.concurrencyLevel = Math.max(maxBrokers, 2 * Runtime.getRuntime().availableProcessors()); + statusObserver.ifPresent(this.statusObservers::add); this.watchdog = Optional.ofNullable(System.getProperty(BrokerWatchdog.TRACE_BROKERS_PROPERTY_NAME)) @@ -432,6 +442,8 @@ void initialize() throws EXistException, DatabaseConfigurationException { } private void _initialize() throws EXistException, DatabaseConfigurationException { + this.lockManager = new LockManager(instanceName, instanceThreadGroup, concurrencyLevel); + //Flag to indicate that we are initializing status.process(Event.INITIALIZE); @@ -456,8 +468,7 @@ private void _initialize() throws EXistException, DatabaseConfigurationException final int bufferSize = Optional.of(conf.getInteger(PROPERTY_COLLECTION_CACHE_SIZE)) .filter(size -> size != -1) .orElse(DEFAULT_COLLECTION_BUFFER_SIZE); - this.collectionCache = servicesManager.register(new CollectionCache(this, bufferSize, 0.000001)); - this.collectionCacheMgr = servicesManager.register(new CollectionCacheManager(this, collectionCache)); + this.collectionCache = servicesManager.register(new CollectionCache()); this.notificationService = servicesManager.register(new NotificationService()); this.journalManager = recoveryEnabled ? Optional.of(new JournalManager()) : Optional.empty(); @@ -497,7 +508,7 @@ private void _initialize() throws EXistException, DatabaseConfigurationException final Runtime rt = Runtime.getRuntime(); final long maxMem = rt.maxMemory(); final long minFree = maxMem / 5; - reservedMem = cacheManager.getTotalMem() + collectionCacheMgr.getMaxTotal() + minFree; + reservedMem = cacheManager.getTotalMem() + collectionCache.getMaxCacheSize() + minFree; LOG.debug("Reserved memory: " + reservedMem + "; max: " + maxMem + "; min: " + minFree); //prepare the registered services, before entering system (single-user) mode @@ -518,7 +529,7 @@ private void _initialize() throws EXistException, DatabaseConfigurationException statusReporter = new StatusReporter(SIGNAL_STARTUP); statusObservers.forEach(statusReporter::addObserver); - final Thread statusThread = new Thread(statusReporter, "exist-broker-" + getId() + "-initialize-statusReporter"); + final Thread statusThread = newInstanceThread(this, "startup-status-reporter", statusReporter); statusThread.start(); // statusReporter may have to be terminated or the thread can/will hang. @@ -574,8 +585,9 @@ private void _initialize() throws EXistException, DatabaseConfigurationException statusReporter.setStatus(SIGNAL_READINESS); - try { - servicesManager.startSystemServices(systemBroker); + try(final Txn transaction = transactionManager.beginTransaction()) { + servicesManager.startSystemServices(systemBroker, transaction); + transaction.commit(); } catch(final BrokerPoolServiceException e) { throw new EXistException(e); } @@ -627,8 +639,9 @@ private void _initialize() throws EXistException, DatabaseConfigurationException // we have completed all system mode operations // we can now prepare those services which need // system mode before entering multi-user mode - try { - servicesManager.startPreMultiUserSystemServices(systemBroker); + try(final Txn transaction = transactionManager.beginTransaction()) { + servicesManager.startPreMultiUserSystemServices(systemBroker, transaction); + transaction.commit(); } catch(final BrokerPoolServiceException e) { throw new EXistException(e); } @@ -692,7 +705,7 @@ private void initialiseSystemCollection(final DBBroker sysBroker, final XmldbURI if(collection == null) { throw new IOException("Could not create system collection: " + sysCollectionUri); } - collection.setPermissions(permissions); + collection.setPermissions(sysBroker, permissions); sysBroker.saveCollection(txn, collection); transact.commit(txn); @@ -729,6 +742,15 @@ private void initialiseTriggersForCollections(final DBBroker broker, final Xmldb } } + /** + * Get the LockManager for this database instance + * + * @return The lock manager + */ + public LockManager getLockManager() { + return lockManager; + } + /** * Run a database recovery if required. This method is called once during * startup from {@link org.exist.storage.BrokerPool}. @@ -784,6 +806,11 @@ public String getId() { return instanceName; } + @Override + public ThreadGroup getThreadGroup() { + return instanceThreadGroup; + } + /** * Returns the number of brokers currently serving requests for the database instance. * @@ -968,10 +995,6 @@ public DefaultCacheManager getCacheManager() { return cacheManager; } - public CollectionCacheManager getCollectionCacheMgr() { - return collectionCacheMgr; - } - /** * Returns the index manager which handles all additional indexes not * being part of the database core. @@ -1540,7 +1563,7 @@ public void shutdown(final boolean killed) { statusObservers.forEach(statusReporter::addObserver); synchronized (this) { - final Thread statusThread = new Thread(statusReporter, "exist-broker-" + getId() + "-shutdown-statusReporter"); + final Thread statusThread = newInstanceThread(this, "shutdown-status-reporter", statusReporter); statusThread.start(); // release transaction log to allow remaining brokers to complete @@ -1626,6 +1649,8 @@ public void shutdown(final boolean killed) { } } + collectionCache.invalidateAll(); + // final notification to database services to shutdown servicesManager.shutdown(); @@ -1657,7 +1682,6 @@ public void shutdown(final boolean killed) { Configurator.clear(this); transactionManager = null; collectionCache = null; - collectionCacheMgr = null; xQueryPool = null; processMonitor = null; collectionConfigurationManager = null; @@ -1666,11 +1690,19 @@ public void shutdown(final boolean killed) { xmlReaderPool = null; shutdownListener = null; securityManager = null; + + if (lockManager != null) { + lockManager.getLockTable().shutdown(); + lockManager = null; + } + notificationService = null; statusObservers.clear(); statusReporter.terminate(); statusReporter = null; + +// instanceThreadGroup.destroy(); } } finally { status.process(Event.FINISHED_SHUTDOWN); @@ -1770,7 +1802,6 @@ public void printSystemInfo() { writer.format("Database instance: %s\n", getId()); writer.println("-------------------------------------------------------------------"); watchdog.ifPresent(wd -> wd.dump(writer)); - DeadlockDetection.debug(writer); final String s = sout.toString(); LOG.info(s); diff --git a/src/org/exist/storage/BrokerPoolService.java b/src/org/exist/storage/BrokerPoolService.java index 6e1c454831c..a4a33b52885 100644 --- a/src/org/exist/storage/BrokerPoolService.java +++ b/src/org/exist/storage/BrokerPoolService.java @@ -19,6 +19,7 @@ */ package org.exist.storage; +import org.exist.storage.txn.Txn; import org.exist.util.Configuration; /** @@ -65,10 +66,11 @@ default void prepare(final BrokerPool brokerPool) throws BrokerPoolServiceExcept * and the only system broker is passed to this function * * @param systemBroker The system mode broker + * @param transaction The transaction for the system service * * @throws BrokerPoolServiceException if an error occurs when starting the system service */ - default void startSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + default void startSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { // nothing to start } @@ -78,16 +80,17 @@ default void startSystem(final DBBroker systemBroker) throws BrokerPoolServiceEx * mode * * As this point the database is not generally available, - * {@link #startSystem(DBBroker)} has already been called + * {@link #startSystem(DBBroker, Txn)} has already been called * for all services, any reindexing and recovery has completed * but there is still only a system broker which is passed to this * function * * @param systemBroker The system mode broker + * @param transaction The transaction for the pre-multi-user system service * * @throws BrokerPoolServiceException if an error occurs when starting the pre-multi-user system service */ - default void startPreMultiUserSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + default void startPreMultiUserSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { //nothing to start } @@ -96,7 +99,7 @@ default void startPreMultiUserSystem(final DBBroker systemBroker) throws BrokerP * start of multi-user mode * * As this point the database is generally available, - * {@link #startPreMultiUserSystem(DBBroker)} has already been called + * {@link #startPreMultiUserSystem(DBBroker, Txn)} has already been called * for all services. You may be competing with other services and/or * users for database access * diff --git a/src/org/exist/storage/BrokerPoolServicesManager.java b/src/org/exist/storage/BrokerPoolServicesManager.java index 6f5dc70ec7e..fe5ab4caa63 100644 --- a/src/org/exist/storage/BrokerPoolServicesManager.java +++ b/src/org/exist/storage/BrokerPoolServicesManager.java @@ -20,6 +20,7 @@ package org.exist.storage; import net.jcip.annotations.NotThreadSafe; +import org.exist.storage.txn.Txn; import org.exist.util.Configuration; import com.evolvedbinary.j8fu.fsm.AtomicFSM; import com.evolvedbinary.j8fu.fsm.FSM; @@ -168,20 +169,21 @@ void prepareServices(final BrokerPool brokerPool) throws BrokerPoolServiceExcept * * @param systemBroker The System Broker which is available for * services to use to access the database + * @param transaction The transaction for the system services * * @throws BrokerPoolServiceException if any service causes an error during starting the system mode * * @throws IllegalStateException Thrown if there is an attempt to start a service * after any other service has entered the start pre-multi-user system mode. */ - void startSystemServices(final DBBroker systemBroker) throws BrokerPoolServiceException { + void startSystemServices(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { states.process(ManagerEvent.ENTER_SYSTEM_MODE); for(final BrokerPoolService brokerPoolService : brokerPoolServices) { if(LOG.isTraceEnabled()) { LOG.trace("Notifying service: " + brokerPoolService.getClass().getSimpleName() + " of start system..."); } - brokerPoolService.startSystem(systemBroker); + brokerPoolService.startSystem(systemBroker, transaction); } } @@ -196,20 +198,21 @@ void startSystemServices(final DBBroker systemBroker) throws BrokerPoolServiceEx * * @param systemBroker The System Broker which is available for * services to use to access the database + * @param transaction The transaction for the pre-multi-user system services * * @throws BrokerPoolServiceException if any service causes an error during starting the pre-multi-user mode * * @throws IllegalStateException Thrown if there is an attempt to start pre-multi-user system a service * after any other service has entered multi-user. */ - void startPreMultiUserSystemServices(final DBBroker systemBroker) throws BrokerPoolServiceException { + void startPreMultiUserSystemServices(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { states.process(ManagerEvent.PREPARE_ENTER_MULTI_USER_MODE); for(final BrokerPoolService brokerPoolService : brokerPoolServices) { if(LOG.isTraceEnabled()) { LOG.trace("Notifying service: " + brokerPoolService.getClass().getSimpleName() + " of start pre-multi-user..."); } - brokerPoolService.startPreMultiUserSystem(systemBroker); + brokerPoolService.startPreMultiUserSystem(systemBroker, transaction); } } diff --git a/src/org/exist/storage/BrokerPools.java b/src/org/exist/storage/BrokerPools.java index 826672828b6..1bc732dbcb0 100644 --- a/src/org/exist/storage/BrokerPools.java +++ b/src/org/exist/storage/BrokerPools.java @@ -23,15 +23,18 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.EXistException; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; import org.exist.util.Configuration; import org.exist.util.DatabaseConfigurationException; import com.evolvedbinary.j8fu.function.ConsumerE; import java.util.*; -import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static org.exist.util.ThreadUtils.newGlobalThread; + /** * This abstract class really just contains the static * methods for {@link BrokerPool} to help us organise the @@ -58,16 +61,13 @@ abstract class BrokerPools { // register a shutdown hook static { try { - Runtime.getRuntime().addShutdownHook(new Thread("BrokerPools-ShutdownHook") { + Runtime.getRuntime().addShutdownHook(newGlobalThread("BrokerPools.ShutdownHook", () -> { /** * Make sure that all instances are cleanly shut down. */ - @Override - public void run() { - LOG.info("Executing shutdown thread"); - BrokerPools.stopAll(true); - } - }); + LOG.info("Executing shutdown thread"); + BrokerPools.stopAll(true); + })); LOG.debug("Shutdown hook registered"); } catch(final IllegalArgumentException e) { LOG.warn("Shutdown hook already registered"); @@ -151,21 +151,15 @@ public static void configure(final String instanceName, final int minBrokers, fi final Configuration config, final Optional statusObserver) throws EXistException { // optimize for read-concurrency as instances are configured (created) once and used many times - final Lock readLock = instancesLock.readLock(); - readLock.lock(); - try { + try(final ManagedLock readLock = ManagedLock.acquire(instancesLock, LockMode.READ_LOCK)) { if (instances.containsKey(instanceName)) { LOG.warn("Database instance '" + instanceName + "' is already configured"); return; } - } finally { - readLock.unlock(); } // fallback to probably having to create a new BrokerPool instance - final Lock writeLock = instancesLock.writeLock(); - writeLock.lock(); - try { + try(final ManagedLock writeLock = ManagedLock.acquire(instancesLock, LockMode.WRITE_LOCK)) { // check again, as another thread may have preempted us since we released the read-lock if (instances.containsKey(instanceName)) { LOG.warn("Database instance '" + instanceName + "' is already configured"); @@ -193,8 +187,6 @@ public static void configure(final String instanceName, final int minBrokers, fi } throw ee; } - } finally { - writeLock.unlock(); } } @@ -214,17 +206,13 @@ public static boolean isConfigured() { * @return true if it is configured */ public static boolean isConfigured(final String instanceName) { - final Lock readLock = instancesLock.readLock(); - readLock.lock(); - try { + try(final ManagedLock readLock = ManagedLock.acquire(instancesLock, LockMode.READ_LOCK)) { final BrokerPool instance = instances.get(instanceName); if (instance == null) { return false; } else { return instance.isInstanceConfigured(); } - } finally { - readLock.unlock(); } } @@ -249,9 +237,7 @@ public static BrokerPool getInstance() throws EXistException { */ public static BrokerPool getInstance(final String instanceName) throws EXistException { //Check if there is a database instance with the same id - final Lock readLock = instancesLock.readLock(); - readLock.lock(); - try { + try(final ManagedLock readLock = ManagedLock.acquire(instancesLock, LockMode.READ_LOCK)) { final BrokerPool instance = instances.get(instanceName); if (instance != null) { //TODO : call isConfigured(id) and throw an EXistException if relevant ? @@ -259,40 +245,26 @@ public static BrokerPool getInstance(final String instanceName) throws EXistExce } else { throw new EXistException("Database instance '" + instanceName + "' is not available"); } - } finally { - readLock.unlock(); } } static void removeInstance(final String instanceName) { - final Lock writeLock = instancesLock.writeLock(); - writeLock.lock(); - try { + try(final ManagedLock writeLock = ManagedLock.acquire(instancesLock, LockMode.WRITE_LOCK)) { instances.remove(instanceName); - } finally { - writeLock.unlock(); } } public static void readInstances(final ConsumerE reader) throws E { - final Lock readLock = instancesLock.readLock(); - readLock.lock(); - try { + try(final ManagedLock readLock = ManagedLock.acquire(instancesLock, LockMode.READ_LOCK)) { for (final BrokerPool instance : instances.values()) { reader.accept(instance); } - } finally { - readLock.unlock(); } } static int instancesCount() { - final Lock readLock = instancesLock.readLock(); - readLock.lock(); - try { + try(final ManagedLock readLock = ManagedLock.acquire(instancesLock, LockMode.READ_LOCK)) { return instances.size(); - } finally { - readLock.unlock(); } } @@ -303,9 +275,7 @@ static int instancesCount() { * @param killed true when invoked by an exiting JVM */ public static void stopAll(final boolean killed) { - final Lock writeLock = instancesLock.writeLock(); - writeLock.lock(); - try { + try(final ManagedLock writeLock = ManagedLock.acquire(instancesLock, LockMode.WRITE_LOCK)) { for (final BrokerPool instance : instances.values()) { if (instance.isInstanceConfigured()) { //Shut it down @@ -316,8 +286,6 @@ public static void stopAll(final boolean killed) { // Clear the living instances container : they are all sentenced to death... assert(instances.size() == 0); // should have all been removed by BrokerPool#shutdown(boolean) instances.clear(); - } finally { - writeLock.unlock(); } } } diff --git a/src/org/exist/storage/CacheManager.java b/src/org/exist/storage/CacheManager.java index 96bfd847bb3..0a8b4eecf75 100644 --- a/src/org/exist/storage/CacheManager.java +++ b/src/org/exist/storage/CacheManager.java @@ -23,9 +23,6 @@ public interface CacheManager { - String BTREE_CACHE = "BTREE"; - String DATA_CACHE = "DATA"; - /** * Register a cache, i.e. put it under control of * the cache manager. diff --git a/src/org/exist/storage/CollectionCacheManager.java b/src/org/exist/storage/CollectionCacheManager.java deleted file mode 100644 index 7969f5426ab..00000000000 --- a/src/org/exist/storage/CollectionCacheManager.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-2016 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - */ -package org.exist.storage; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.exist.collections.CollectionCache; -import org.exist.storage.cache.Cache; -import org.exist.util.Configuration; -import org.exist.util.DatabaseConfigurationException; -import org.exist.management.AgentFactory; -import org.exist.management.Agent; - -import java.util.Optional; - -public class CollectionCacheManager implements CacheManager, BrokerPoolService { - - private static final Logger LOG = LogManager.getLogger(CollectionCacheManager.class); - private static final int DEFAULT_CACHE_SIZE_BYTES = 64 * 1024 * 1024; // 64 MB - public static final String CACHE_SIZE_ATTRIBUTE = "collectionCache"; - public static final String PROPERTY_CACHE_SIZE_BYTES = "db-connection.collection-cache-mem"; - - private final String brokerPoolId; - private CollectionCache collectionCache; - private int maxCacheSize; - - public CollectionCacheManager(final BrokerPool pool, final CollectionCache cache) { - this.brokerPoolId = pool.getId(); - this.collectionCache = cache; - this.collectionCache.setCacheManager(this); - } - - @Override - public void configure(final Configuration configuration) throws BrokerPoolServiceException { - final int cacheSize = Optional.of(configuration.getInteger(PROPERTY_CACHE_SIZE_BYTES)).filter(size -> size > 0) - .orElse(DEFAULT_CACHE_SIZE_BYTES); - this.maxCacheSize = cacheSize; - - if(LOG.isDebugEnabled()){ - LOG.debug("collection collectionCache will be using " + this.maxCacheSize + " bytes max."); - } - - //TODO(AR) move to some start method... - registerMBean(brokerPoolId); - } - - @Override - public void shutdown() { - deregisterCache(collectionCache); - } - - @Override - public void registerCache(final Cache cache) { - } - - @Override - public void deregisterCache(final Cache cache) { - this.collectionCache = null; - } - - @Override - public int requestMem(final Cache cache) { - final int realSize = collectionCache.getRealSize(); - if (realSize < maxCacheSize) { - synchronized (this) { - final int newCacheSize = (int)(collectionCache.getBuffers() * collectionCache.getGrowthFactor()); - if (LOG.isDebugEnabled()) { - LOG.debug("Growing cache {} (a {}) from {} to {} bytes. Current memory usage = {}", - collectionCache.getName(), collectionCache.getClass().getName(), - collectionCache.getBuffers(), newCacheSize, realSize); - } - collectionCache.resize(newCacheSize); - return newCacheSize; - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("Cache has reached max. size: " + realSize); - } - return -1; - } - - @Override - public void checkCaches() { - } - - @Override - public void checkDistribution() { - } - - /** - * @return Maximum size of all Caches in bytes - */ - @Override - public long getMaxTotal() { - return maxCacheSize; - } - - /** - * @return Maximum size of a single Cache in bytes - */ - @Override - public long getMaxSingle() { - return maxCacheSize; - } - - /** - * @return Current size of all Caches in bytes - */ - @Override - public long getCurrentSize() { - return collectionCache.getRealSize(); - } - - @Override - public int getDefaultInitialSize() { - return DEFAULT_CACHE_SIZE_BYTES; - } - - private void registerMBean(final String instanceName) { - final Agent agent = AgentFactory.getInstance(); - try { - agent.addMBean(instanceName, "org.exist.management." + instanceName + - ":type=CollectionCacheManager", new org.exist.management.CacheManager(this)); - } catch (final DatabaseConfigurationException e) { - LOG.warn("Exception while registering cache mbean.", e); - } - } -} diff --git a/src/org/exist/storage/DBBroker.java b/src/org/exist/storage/DBBroker.java index 6e3e0381d26..99edef00020 100644 --- a/src/org/exist/storage/DBBroker.java +++ b/src/org/exist/storage/DBBroker.java @@ -19,6 +19,7 @@ */ package org.exist.storage; +import com.evolvedbinary.j8fu.tuple.Tuple2; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.Database; @@ -27,37 +28,39 @@ import org.exist.collections.Collection; import org.exist.collections.Collection.SubCollectionEntry; import org.exist.collections.triggers.TriggerException; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.IStoredNode; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.dom.persistent.NodeHandle; -import org.exist.dom.persistent.NodeProxy; +import org.exist.dom.persistent.*; import org.exist.indexing.Index; import org.exist.indexing.IndexController; import org.exist.indexing.StreamListener; import org.exist.indexing.StructuralIndex; import org.exist.numbering.NodeId; +import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; import org.exist.security.Subject; import org.exist.stax.IEmbeddedXMLStreamReader; import org.exist.storage.btree.BTreeCallback; import org.exist.storage.dom.INodeIterator; +import org.exist.storage.lock.EnsureLocked; +import org.exist.storage.lock.EnsureUnlocked; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.Lock.LockType; import org.exist.storage.serializers.Serializer; import org.exist.storage.sync.Sync; +import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.util.*; import org.exist.xmldb.XmldbURI; import org.exist.xquery.TerminatedException; import org.w3c.dom.Document; +import javax.annotation.Nullable; import javax.xml.stream.XMLStreamException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Path; import java.util.*; +import java.util.function.Function; /** * This is the base class for all database backends. All the basic database @@ -91,6 +94,11 @@ public abstract class DBBroker extends Observable implements AutoCloseable { //TODO : move elsewhere public final static String PROPERTY_XUPDATE_CONSISTENCY_CHECKS = "xupdate.consistency-checks"; + public static final String POSIX_CHOWN_RESTRICTED_ATTRIBUTE = "posix-chown-restricted"; + public static final String POSIX_CHOWN_RESTRICTED_PROPERTY = "db-connection.posix-chown-restricted"; + public static final String PRESERVE_ON_COPY_ATTRIBUTE = "preserve-on-copy"; + public static final String PRESERVE_ON_COPY_PROPERTY = "db-connection.preserve-on-copy"; + protected final static Logger LOG = LogManager.getLogger(DBBroker.class); protected boolean caseSensitive = true; @@ -117,6 +125,8 @@ public abstract class DBBroker extends Observable implements AutoCloseable { private final TimestampedReference indexController = new TimestampedReference<>(); + private final PreserveType preserveOnCopy; + public DBBroker(final BrokerPool pool, final Configuration config) { this.config = config; final Boolean temp = (Boolean) config.getProperty(NativeValueIndex.PROPERTY_INDEX_CASE_SENSITIVE); @@ -124,6 +134,7 @@ public DBBroker(final BrokerPool pool, final Configuration config) { caseSensitive = temp.booleanValue(); } this.pool = pool; + this.preserveOnCopy = config.getProperty(PRESERVE_ON_COPY_PROPERTY, PreserveType.NO_PRESERVE); } /** @@ -244,127 +255,96 @@ public void removeContentLoadingObserver(ContentLoadingObserver observer) { /** * Adds all the documents in the database to the specified DocumentSet. - * - * @param docs - * a (possibly empty) document set to which the found documents - * are added. - * + * + * WARNING: This is an incredibly expensive operation as it requires recursing through the Collection hierarchy and + * accessing every document. + * + * @param docs a (possibly empty) document set to which the found documents are added. */ - public abstract MutableDocumentSet getAllXMLResources(MutableDocumentSet docs) throws PermissionDeniedException; + public abstract MutableDocumentSet getAllXMLResources(MutableDocumentSet docs) throws PermissionDeniedException, LockException; public abstract void getResourcesFailsafe(BTreeCallback callback, boolean fullScan) throws TerminatedException; public abstract void getCollectionsFailsafe(BTreeCallback callback) throws TerminatedException; /** - * Returns the database collection identified by the specified path. The - * path should be absolute, e.g. /db/shakespeare. - * - * @return collection or null if no collection matches the path - * - * deprecated Use XmldbURI instead! - * - * public abstract Collection getCollection(String name); - */ - - /** - * Returns the database collection identified by the specified path. The - * path should be absolute, e.g. /db/shakespeare. - * - * @return collection or null if no collection matches the path - */ - public abstract Collection getCollection(XmldbURI uri) throws PermissionDeniedException; - - /** - * Returns the database collection identified by the specified path. The - * storage address is used to locate the collection without looking up the - * path in the btree. - * - * @return deprecated Use XmldbURI instead! - * - * public abstract Collection getCollection(String name, long address); - */ - - /** - * Returns the database collection identified by the specified path. The - * storage address is used to locate the collection without looking up the - * path in the btree. - * - * @return Database collection - * - * public abstract Collection getCollection(XmldbURI uri, long address); - */ - - /** - * Open a collection for reading or writing. The collection is identified by - * its absolute path, e.g. /db/shakespeare. It will be loaded and locked - * according to the lockMode argument. - * - * The caller should take care to release the collection lock properly. - * - * @param name - * the collection path - * @param lockMode - * one of the modes specified in class - * {@link org.exist.storage.lock.Lock} - * @return collection or null if no collection matches the path - * - * deprecated Use XmldbURI instead! + * Gets a database Collection. + * + * The Collection is identified by its absolute path, e.g. /db/shakespeare. + * The returned Collection will NOT HAVE a lock. + * + * The caller should take care to release any associated resource by + * calling {@link Collection#close()} + * + * In general, accessing Collections without a lock provides no consistency guarantees. + * This function should only be used where estimated reads are needed, no writes should + * be performed on a Collection retrieved by this function. + * If you are uncertain whether this function is safe for you to use, you should always + * use {@link #openCollection(XmldbURI, LockMode)} instead. * - * public abstract Collection openCollection(String name, LockMode lockMode); + * @return the Collection, or null if no Collection matches the path */ + @Nullable @EnsureUnlocked public abstract Collection getCollection(XmldbURI uri) throws PermissionDeniedException; /** - * Open a collection for reading or writing. The collection is identified by - * its absolute path, e.g. /db/shakespeare. It will be loaded and locked - * according to the lockMode argument. - * - * The caller should take care to release the collection lock properly. + * Open a Collection for reading or writing. + * + * The Collection is identified by its absolute path, e.g. /db/shakespeare. + * It will be loaded and locked according to the lockMode argument. * - * @param uri - * The collection path - * @param lockMode - * one of the modes specified in class - * {@link org.exist.storage.lock.Lock} - * @return collection or null if no collection matches the path + * The caller should take care to release the Collection lock properly by + * calling {@link Collection#close()} * + * @param uri The Collection's path + * @param lockMode the mode for locking the Collection, as specified in {@link LockMode} + * + * @return the Collection, or null if no Collection matches the path */ - public abstract Collection openCollection(XmldbURI uri, LockMode lockMode) throws PermissionDeniedException; + @Nullable @EnsureLocked public abstract Collection openCollection(XmldbURI uri, + LockMode lockMode) throws PermissionDeniedException; public abstract List findCollectionsMatching(String regexp); - + /** - * Returns the database collection identified by the specified path. If the - * collection does not yet exist, it is created - including all ancestors. - * The path should be absolute, e.g. /db/shakespeare. - * - * @return collection or null if no collection matches the path - * - * deprecated Use XmldbURI instead! + * Gets the database Collection identified by the specified path. + * If the Collection does not yet exist, it is created - including all ancestors. + * The Collection is identified by its absolute path, e.g. /db/shakespeare. + * The returned Collection will NOT HAVE a lock. + * + * The caller should take care to release any associated resource by + * calling {@link Collection#close()} * - * public Collection getOrCreateCollection(Txn transaction, String name) - * throws PermissionDeniedException { return null; } + * @param transaction The current transaction + * @param uri The Collection's URI + * + * @return The existing or created Collection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws IOException If an error occurs whilst reading (get) or writing (create) a Collection to disk + * @throws TriggerException If a CollectionTrigger throws an exception */ + public abstract @EnsureUnlocked Collection getOrCreateCollection(Txn transaction, XmldbURI uri) + throws PermissionDeniedException, IOException, TriggerException; /** * Returns the database collection identified by the specified path. If the * collection does not yet exist, it is created - including all ancestors. * The path should be absolute, e.g. /db/shakespeare. - * + * * @param transaction The transaction, which registers the acquired write locks. The locks should be released on commit/abort. * @param uri The collection's URI + * @param creationAttributes the attributes to use if the collection needs to be created. * @return The collection or null if no collection matches the path * @throws PermissionDeniedException * @throws IOException - * @throws TriggerException + * @throws TriggerException */ - public abstract Collection getOrCreateCollection(Txn transaction, XmldbURI uri) - throws PermissionDeniedException, IOException, TriggerException; + public abstract Collection getOrCreateCollection(Txn transaction, XmldbURI uri, Optional> creationAttributes) + throws PermissionDeniedException, IOException, TriggerException; /** * Returns the configuration object used to initialize the current database * instance. - * */ public Configuration getConfiguration() { return config; @@ -388,12 +368,10 @@ public INodeIterator getNodeIterator(NodeHandle node) { * @return the document or null if no document could be found at the * specified location. * - * deprecated Use XmldbURI instead! - * * public abstract Document getXMLResource(String path) throws * PermissionDeniedException; */ - public abstract Document getXMLResource(XmldbURI docURI) throws PermissionDeniedException; + public abstract @EnsureUnlocked Document getXMLResource(XmldbURI docURI) throws PermissionDeniedException; /** * Get a document by its file name. The document's file name is used to @@ -404,16 +382,9 @@ public INodeIterator getNodeIterator(NodeHandle node) { * @param accessType The access mode for the resource e.g. {@link org.exist.security.Permission#READ} * @return The document value or null if no document could be found */ - public abstract DocumentImpl getResource(XmldbURI docURI, int accessType) throws PermissionDeniedException; + public abstract @EnsureUnlocked DocumentImpl getResource(XmldbURI docURI, int accessType) throws PermissionDeniedException; - public abstract DocumentImpl getResourceById(int collectionId, byte resourceType, int documentId) throws PermissionDeniedException; - - /** - * deprecated Use XmldbURI instead! - * - * public abstract DocumentImpl getXMLResource(String docPath, LockMode lockMode) - * throws PermissionDeniedException; - */ + public abstract @EnsureUnlocked DocumentImpl getResourceById(int collectionId, byte resourceType, int documentId) throws PermissionDeniedException; /** * Return the document stored at the specified path. The path should be @@ -422,14 +393,14 @@ public INodeIterator getNodeIterator(NodeHandle node) { * @return the document or null if no document could be found at the * specified location. */ - public abstract DocumentImpl getXMLResource(XmldbURI docURI, LockMode lockMode) + @Nullable @EnsureLocked public abstract LockedDocument getXMLResource(XmldbURI docURI, LockMode lockMode) throws PermissionDeniedException; /** * Get a new document id that does not yet exist within the collection. * @throws EXistException */ - public abstract int getNextResourceId(Txn transaction, Collection collection) throws EXistException; + public abstract int getNextResourceId(Txn transaction) throws EXistException, LockException; /** * Get the string value of the specified node. @@ -462,38 +433,56 @@ public String getNodeValue(IStoredNode node, boolean addWhitespace) { * @param nodeId * the node's unique identifier */ - public abstract IStoredNode objectWith(Document doc, NodeId nodeId); + public abstract IStoredNode objectWith(@EnsureLocked(mode=LockMode.READ_LOCK) Document doc, NodeId nodeId); public abstract IStoredNode objectWith(NodeProxy p); /** - * Remove the collection and all its subcollections from the database. - * - * @throws PermissionDeniedException - * @throws IOException - * @throws TriggerException - * + * Remove the Collection and all of its sub-Collections from the database. + * + * @param transaction The current transaction + * @param collection The Collection to remove from the database + * + * @return true if the Collection was removed, false otherwise + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws IOException If an error occurs whilst removing the Collection from disk + * @throws TriggerException If a CollectionTrigger throws an exception */ public abstract boolean removeCollection(Txn transaction, - Collection collection) throws PermissionDeniedException, IOException, TriggerException; + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection collection) + throws PermissionDeniedException, IOException, TriggerException; /** * Remove a document from the database. * */ - public abstract void removeResource(Txn tx, DocumentImpl doc) throws IOException, PermissionDeniedException; + public abstract void removeResource(Txn tx, @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl doc) + throws IOException, PermissionDeniedException; /** * Remove a XML document from the database. * + * NOTE Should never be called directly, + * only for use from {@link Collection#removeXMLResource(Txn, DBBroker, XmldbURI)} + * or {@link DBBroker}. + * */ - public void removeXMLResource(Txn transaction, DocumentImpl document) + public void removeXMLResource(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl document) throws PermissionDeniedException, IOException { removeXMLResource(transaction, document, true); } + /** + * Remove a XML document from the database. + * + * NOTE Should never be called directly, + * only for use from {@link Collection#removeXMLResource(Txn, DBBroker, XmldbURI)} + * or {@link DBBroker}. + * + */ public abstract void removeXMLResource(Txn transaction, - DocumentImpl document, boolean freeDocId) throws PermissionDeniedException, IOException; + @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl document, boolean freeDocId) throws PermissionDeniedException, IOException; public enum IndexMode { STORE, @@ -502,28 +491,36 @@ public enum IndexMode { } /** - * Reindex a collection. - * - * @param collectionName - * @throws PermissionDeniedException - * - * public abstract void reindexCollection(String collectionName) throws - * PermissionDeniedException; + * Reindex a Collection and its descendants + * + * NOTE: Read locks will be taken in a top-down, left-right manner + * on Collections as they are indexed + * + * @param transaction + * @param collectionUri The URI of the Collection to reindex + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst reindexing the Collection on disk */ - public abstract void reindexCollection(XmldbURI collectionName) - throws PermissionDeniedException, IOException; + public abstract void reindexCollection(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK, type=LockType.COLLECTION) XmldbURI collectionUri) + throws PermissionDeniedException, IOException, LockException; - public abstract void reindexXMLResource(Txn txn, DocumentImpl doc); + public abstract void reindexXMLResource(final Txn txn, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl doc); - public abstract void reindexXMLResource(final Txn transaction, final DocumentImpl doc, final IndexMode mode); + public abstract void reindexXMLResource(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl doc, final IndexMode mode); /** * Repair indexes. Should delete all secondary indexes and rebuild them. * This method will be called after the recovery run has completed. * - * @throws PermissionDeniedException + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst repairing indexes the database */ - public abstract void repair() throws PermissionDeniedException, IOException; + public abstract void repair() throws PermissionDeniedException, IOException, LockException; /** * Repair core indexes (dom, collections ...). This method is called immediately @@ -532,31 +529,31 @@ public abstract void reindexCollection(XmldbURI collectionName) public abstract void repairPrimary(); /** - * Saves the specified collection to storage. Collections are usually cached - * in memory. If a collection is modified, this method needs to be called to - * make the changes persistent. Note: appending a new document to a - * collection does not require a save. - * - * @param transaction - * @param collection Collection to store - * @throws org.exist.security.PermissionDeniedException - * @throws IOException - * @throws TriggerException + * Saves the specified Collection to disk. Collections are usually cached in + * memory. If a Collection is modified, this method needs to be called to make + * the changes persistent. + * + * Note: adding or removing a document to a Collection does not require a save. However, + * modifying a Collection's metadata or adding or removing a sub-Collection does require + * a save. + * + * NOTE: It is assumed that the caller holds a {@link LockMode#WRITE_LOCK} on the Collection + * + * @param transaction The current transaction + * @param collection The Collection to persist + * + * @throws IOException If an error occurs whilst writing the Collection to disk */ - public abstract void saveCollection(Txn transaction, Collection collection) - throws PermissionDeniedException, IOException, TriggerException; + public abstract void saveCollection(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection collection) + throws IOException; - public void closeDocument() { - //Nothing to do - } + public abstract void closeDocument(); /** * Shut down the database instance. All open files, jdbc connections etc. * should be closed. */ - public void shutdown() { - //Nothing to do - } + public abstract void shutdown(); /** * Store a node into the database. This method is called by the parser to @@ -582,9 +579,9 @@ public void endElement(final IStoredNode node, NodePa * @param doc * the document's metadata to store. */ - public abstract void storeXMLResource(Txn transaction, DocumentImpl doc); + public abstract void storeXMLResource(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl doc); - public abstract void storeMetadata(Txn transaction, DocumentImpl doc) throws TriggerException; + public abstract void storeMetadata(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl doc) throws TriggerException; /** * Stores the given data under the given binary resource descriptor @@ -597,7 +594,7 @@ public void endElement(final IStoredNode node, NodePa */ @Deprecated public abstract void storeBinaryResource(Txn transaction, - BinaryDocument blob, byte[] data) throws IOException; + @EnsureLocked(mode=LockMode.WRITE_LOCK) BinaryDocument blob, byte[] data) throws IOException; /** * Stores the given data under the given binary resource descriptor @@ -609,19 +606,19 @@ public abstract void storeBinaryResource(Txn transaction, * the document binary data as input stream */ public abstract void storeBinaryResource(Txn transaction, - BinaryDocument blob, InputStream is) throws IOException; + @EnsureLocked(mode=LockMode.WRITE_LOCK) BinaryDocument blob, InputStream is) throws IOException; public abstract void getCollectionResources(Collection.InternalAccess collectionInternalAccess); - public abstract void readBinaryResource(final BinaryDocument blob, + public abstract void readBinaryResource(@EnsureLocked(mode=LockMode.READ_LOCK) final BinaryDocument blob, final OutputStream os) throws IOException; - public abstract Path getBinaryFile(final BinaryDocument blob) throws IOException; + public abstract Path getBinaryFile(@EnsureLocked(mode=LockMode.READ_LOCK) final BinaryDocument blob) throws IOException; - public abstract InputStream getBinaryResource(final BinaryDocument blob) + public abstract InputStream getBinaryResource(@EnsureLocked(mode=LockMode.READ_LOCK) final BinaryDocument blob) throws IOException; - public abstract long getBinaryResourceSize(final BinaryDocument blob) + public abstract long getBinaryResourceSize(@EnsureLocked(mode=LockMode.READ_LOCK) final BinaryDocument blob) throws IOException; /** @@ -633,85 +630,143 @@ public abstract long getBinaryResourceSize(final BinaryDocument blob) * if you don't have the right to do this */ public abstract void removeBinaryResource(Txn transaction, - BinaryDocument blob) throws PermissionDeniedException,IOException; + @EnsureLocked(mode=LockMode.WRITE_LOCK) BinaryDocument blob) throws PermissionDeniedException,IOException; /** - * Move a collection and all its subcollections to another collection and + * Move a collection and all its sub-Collections to another Collection and * rename it. Moving a collection just modifies the collection path and all * resource paths. The data itself remains in place. - * - * @param collection - * the collection to move - * @param destination - * the destination collection - * @param newName - * the new name the collection should have in the destination - * collection - * - * @throws PermissionDeniedException - * @throws LockException - * @throws IOException - * @throws TriggerException + * + * NOTE: It is assumed that the caller holds a {@link LockMode#WRITE_LOCK} on both the + * `sourceCollection` and the `targetCollection` + * + * @param transaction The current transaction + * @param sourceCollection The Collection to move + * @param targetCollection The target Collection to move the sourceCollection into + * @param newName The new name the sourceCollection should have in the targetCollection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst moving the Collection on disk + * @throws TriggerException If a CollectionTrigger throws an exception */ - public abstract void moveCollection(Txn transaction, Collection collection, - Collection destination, XmldbURI newName) - throws PermissionDeniedException, LockException, IOException, TriggerException; + public abstract void moveCollection(Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection targetCollection, XmldbURI newName) + throws PermissionDeniedException, LockException, IOException, TriggerException; /** - * Move a resource to the destination collection and rename it. - * - * @param doc - * the resource to move - * @param destination - * the destination collection - * @param newName - * the new name the resource should have in the destination - * collection - * - * @throws PermissionDeniedException - * @throws LockException - * @throws IOException - * @throws TriggerException + * Move a resource to the target Collection and rename it. + * + * NOTE: It is assumed that the caller holds a {@link LockMode#WRITE_LOCK} on the + * `sourceDocument` and its parent Collection, and the `targetCollection` + * + * @param transaction The current transaction + * @param sourceDocument The document to move + * @param targetCollection The target Collection to move the sourceDocument into + * @param newName The new name the sourceDocument should have in the targetCollection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst moving the Document on disk + * @throws TriggerException If a CollectionTrigger throws an exception */ - public abstract void moveResource(Txn transaction, DocumentImpl doc, - Collection destination, XmldbURI newName) + public abstract void moveResource(Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl sourceDocument, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection targetCollection, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException; /** * Copy a collection to the destination collection and rename it. - * + * + * NOTE: It is assumed that the caller holds a {@link LockMode#READ_LOCK} + * `sourceCollection` and a {@link LockMode#WRITE_LOCK} on the `targetCollection` + * * @param transaction The transaction, which registers the acquired write locks. The locks should be released on commit/abort. - * @param collection The origin collection - * @param destination The destination parent collection + * @param sourceCollection The origin collection + * @param targetCollection The destination parent collection * @param newName The new name of the collection - * - * @throws PermissionDeniedException - * @throws LockException - * @throws IOException - * @throws TriggerException - * @throws EXistException + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst copying the Collection on disk + * @throws TriggerException If a CollectionTrigger throws an exception + * + * @deprecated Use {@link #copyCollection(Txn, Collection, Collection, XmldbURI, PreserveType)} */ - public abstract void copyCollection(Txn transaction, Collection collection, - Collection destination, XmldbURI newName) + @Deprecated + public abstract void copyCollection(Txn transaction, @EnsureLocked(mode=LockMode.READ_LOCK) Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException; + /** + * Copy a collection to the destination collection and rename it. + * + * NOTE: It is assumed that the caller holds a {@link LockMode#READ_LOCK} + * `sourceCollection` and a {@link LockMode#WRITE_LOCK} on the `targetCollection` + * + * @param transaction The transaction, which registers the acquired write locks. The locks should be released on commit/abort. + * @param sourceCollection The origin collection + * @param targetCollection The destination parent collection + * @param newName The new name of the collection + * @param preserve Cause the copy process to preserve the following attributes of each source in the copy: + * modification time, file mode, user ID, and group ID, as allowed by permissions. Access Control Lists (ACLs) + * will also be preserved. + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst copying the Collection on disk + * @throws TriggerException If a CollectionTrigger throws an exception + */ + public abstract void copyCollection(Txn transaction, @EnsureLocked(mode=LockMode.READ_LOCK) Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection targetCollection, XmldbURI newName, final PreserveType preserve) + throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException; + + /** * Copy a resource to the destination collection and rename it. - * - * @param doc - * the resource to copy - * @param destination - * the destination collection - * @param newName - * the new name the resource should have in the destination - * collection + * + * NOTE: It is assumed that the caller holds a {@link LockMode#READ_LOCK} on the + * `sourceDocument` and its parent Collection, + * and a {@link LockMode#WRITE_LOCK} on the `targetCollection` + * + * @param sourceDocumet the resource to copy + * @param targetCollection the destination collection + * @param newName the new name the resource should have in the destination collection + * * @throws PermissionDeniedException * @throws LockException - * @throws EXistException + * @throws EXistException + * + * @deprecated Use {@link #copyResource(Txn, DocumentImpl, Collection, XmldbURI, PreserveType)} */ - public abstract void copyResource(Txn transaction, DocumentImpl doc, - Collection destination, XmldbURI newName) - throws PermissionDeniedException, LockException, EXistException, IOException; + @Deprecated + public abstract void copyResource(Txn transaction, @EnsureLocked(mode=LockMode.READ_LOCK) DocumentImpl sourceDocument, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection targetCollection, XmldbURI newName) + throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException; + + /** + * Copy a resource to the destination collection and rename it. + * + * + * NOTE: It is assumed that the caller holds a {@link LockMode#READ_LOCK} on the + * `sourceDocument` and its parent Collection, + * and a {@link LockMode#WRITE_LOCK} on the `targetCollection` + * + * @param sourceDocument the resource to copy + * @param targetCollection the destination collection + * @param newName the new name the resource should have in the destination collection + * @param preserve Cause the copy process to preserve the following attributes of each source in the copy: + * modification time, file mode, user ID, and group ID, as allowed by permissions. Access Control Lists (ACLs) + * will also be preserved. + * + * @throws PermissionDeniedException + * @throws LockException + * @throws EXistException + */ + public abstract void copyResource(Txn transaction, @EnsureLocked(mode=LockMode.READ_LOCK) DocumentImpl sourceDocument, + @EnsureLocked(mode=LockMode.WRITE_LOCK) Collection targetCollection, XmldbURI newName, final PreserveType preserve) + throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException; /** * Defragment pages of this document. This will minimize the number of split @@ -720,7 +775,7 @@ public abstract void copyResource(Txn transaction, DocumentImpl doc, * @param doc * to defrag */ - public abstract void defragXMLResource(Txn transaction, DocumentImpl doc); + public abstract void defragXMLResource(Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) DocumentImpl doc); /** * Perform a consistency check on the specified document. @@ -729,9 +784,9 @@ public abstract void copyResource(Txn transaction, DocumentImpl doc, * * @param doc */ - public abstract void checkXMLResourceTree(DocumentImpl doc); + public abstract void checkXMLResourceTree(@EnsureLocked(mode=LockMode.READ_LOCK) DocumentImpl doc); - public abstract void checkXMLResourceConsistency(DocumentImpl doc) + public abstract void checkXMLResourceConsistency(@EnsureLocked(mode=LockMode.READ_LOCK) DocumentImpl doc) throws EXistException; /** @@ -797,7 +852,7 @@ public abstract void removeAllNodes(Txn transaction, IStoredNode node, * @throws PermissionDeniedException * @throws LockException */ - public abstract DocumentImpl storeTempResource( + public abstract @EnsureUnlocked DocumentImpl storeTempResource( org.exist.dom.memtree.DocumentImpl doc) throws EXistException, PermissionDeniedException, LockException; @@ -813,10 +868,19 @@ public abstract DocumentImpl storeTempResource( */ public abstract void checkAvailableMemory(); - /** - * - */ - public abstract MutableDocumentSet getXMLResourcesByDoctype(String doctype, MutableDocumentSet result) throws PermissionDeniedException; + /** + * Get all the documents in this database matching the given + * document-type's name. + * + * WARNING: This is an incredibly expensive operation as it requires recursing through the Collection hierarchy and + * accessing every document. + * + * @param doctype The doctype to match documents against + * @param result a (possibly empty) document set to which the found documents are added. + * + * @return The result + */ + public abstract MutableDocumentSet getXMLResourcesByDoctype(String doctype, MutableDocumentSet result) throws PermissionDeniedException, LockException; public int getReferenceCount() { return referenceCount; @@ -853,7 +917,40 @@ public abstract IEmbeddedXMLStreamReader newXMLStreamReader(NodeHandle node, boo public abstract void backupToArchive(RawDataBackup backup) throws IOException, EXistException; - public abstract void readCollectionEntry(SubCollectionEntry entry); + /** + * Reads and populates the metadata for a sub-Collection + * + * The entry to read is determined by {@link SubCollectionEntry#uri} + * + * NOTE: It is assumed that the caller holds a {@link LockMode#READ_LOCK} (or better) + * on the Collection indicated in `entry`. + * + * @param entry The sub-Collection entry to populate + * + * @throws IOException If an error occurs whilst reading (get) or writing (create) a Collection to disk + * @throws LockException if we are unable to obtain a lock on the collections.dbx + */ + public abstract void readCollectionEntry(SubCollectionEntry entry) throws IOException, LockException; + + /** + * Determines if Collection or Document attributes be preserved on copy, + * by comparing the argument with the global system settings. + * + * Returns true if either: + * 1.) The {@code preserve} argument is {@link PreserveType#PRESERVE}. + * 2.) The {@code preserve} argument is {@link PreserveType#DEFAULT}, + * and the global system setting is {@link PreserveType#PRESERVE}. + * + * @param preserve The call-specific preserve flag. + * + * @return true if attributes should be preserved. + */ + public boolean preserveOnCopy(final PreserveType preserve) { + Objects.requireNonNull(preserve); + + return PreserveType.PRESERVE == preserve || + (PreserveType.DEFAULT == preserve && PreserveType.PRESERVE == this.preserveOnCopy); + } @Override public void close() { @@ -868,6 +965,67 @@ public void release() { pool.release(this); } + public final static String PROP_DISABLE_SINGLE_THREAD_OVERLAPPING_TRANSACTION_CHECKS = "exist.disable-single-thread-overlapping-transaction-checks"; + private final static boolean DISABLE_SINGLE_THREAD_OVERLAPPING_TRANSACTION_CHECKS = Boolean.valueOf(System.getProperty(PROP_DISABLE_SINGLE_THREAD_OVERLAPPING_TRANSACTION_CHECKS, "false")); + private Txn currentTransaction = null; + public synchronized void setCurrentTransaction(final Txn transaction) { + if (DISABLE_SINGLE_THREAD_OVERLAPPING_TRANSACTION_CHECKS) { + currentTransaction = transaction; + } else { + if (currentTransaction == null ^ transaction == null) { + currentTransaction = transaction; + } else { + throw new IllegalStateException("Broker already has a transaction set"); + } + } + } + + public synchronized Txn getCurrentTransaction() { + return currentTransaction; + } + + /** + * Gets the current transaction, or if there is no current transaction + * for this thread (i.e. broker), then we begin a new transaction. + * + * The callee is *always* responsible for calling .close on the transaction + * + * Note - When there is an existing transaction, calling .close on the object + * returned (e.g. ResusableTxn) from this function will only cause a minor state + * change and not close the original transaction. That is intentional, as it will + * eventually be closed by the creator of the original transaction (i.e. the code + * site that began the first transaction) + * + * @Deprecated This is a stepping-stone; Transactions should be explicitly passed + * around. This will be removed in the near future. + */ + @Deprecated + public synchronized Txn continueOrBeginTransaction() { + final Txn currentTransaction = getCurrentTransaction(); + if(currentTransaction != null) { + return new Txn.ReusableTxn(currentTransaction); + } else { + final TransactionManager tm = getBrokerPool().getTransactionManager(); + return tm.beginTransaction(); //TransactionManager will call this#setCurrentTransaction + } + } + + //TODO the object passed to the function e.g. Txn should not implement .close + //if we are using a function passing approach like this, i.e. one point of + //responsibility and WE HERE should be responsible for closing the transaction. + //we could return a sub-class of Txn which is uncloseable like Txn.reuseable or similar + //also getCurrentTransaction should then be made private +// private T transact(final Function transactee) throws EXistException { +// final Txn existing = getCurrentTransaction(); +// if(existing == null) { +// try(final Txn txn = pool.getTransactionManager().beginTransaction()) { +// return transactee.apply(txn); +// } +// } else { +// return transactee.apply(existing); +// } +// } + /** * Represents a {@link Subject} change * made to a broker @@ -905,5 +1063,29 @@ final static TraceableSubjectChange pop(final Subject subject, final String id) return new TraceableSubjectChange(Change.POP, subject, id); } } -} + /** + * Indicates the behaviour for not preserving or + * preserving Collection of Document attributes + * when making a copy. + */ + public enum PreserveType { + /** + * Implies whatever the default is, + * as configured in conf.xml: /exist/db-connection/@preserve-on-copy + */ + DEFAULT, + + /** + * Collection or Document attributes are not preserved + * when making a copy. + */ + NO_PRESERVE, + + /** + * Collection or Document attributes are preserved + * when making a copy. + */ + PRESERVE + } +} diff --git a/src/org/exist/storage/DefaultCacheManager.java b/src/org/exist/storage/DefaultCacheManager.java index 365344360ce..b7853b3cbcf 100644 --- a/src/org/exist/storage/DefaultCacheManager.java +++ b/src/org/exist/storage/DefaultCacheManager.java @@ -68,7 +68,7 @@ public class DefaultCacheManager implements CacheManager, BrokerPoolService public static final int DEFAULT_CACHE_SIZE = 64; public static final String CACHE_SIZE_ATTRIBUTE = "cacheSize"; public static final String PROPERTY_CACHE_SIZE = "db-connection.cache-size"; - + public static final String DEFAULT_CACHE_CHECK_MAX_SIZE_STRING = "true"; public static final String CACHE_CHECK_MAX_SIZE_ATTRIBUTE = "checkMaxCacheSize"; public static final String PROPERTY_CACHE_CHECK_MAX_SIZE = "db-connection.check-max-cache-size"; @@ -126,36 +126,36 @@ public DefaultCacheManager( BrokerPool pool ) shrinkThreshold = configuration.getInteger( SHRINK_THRESHOLD_PROPERTY ); totalMem = cacheSize * 1024L * 1024L; - + final Boolean checkMaxCache = (Boolean)configuration.getProperty( PROPERTY_CACHE_CHECK_MAX_SIZE ); - + if( checkMaxCache == null || checkMaxCache.booleanValue() ) { - final long max = Runtime.getRuntime().maxMemory(); - long maxCache = ( max >= ( 768 * 1024 * 1024 ) ) ? ( max / 2 ) : ( max / 3 ); - - if( totalMem > maxCache ) { - totalMem = maxCache; - - LOG.warn( "The cacheSize=\"" + cacheSize + - "\" setting in conf.xml is too large. Java has only " + ( max / 1024 ) + "k available. Cache manager will not use more than " + ( totalMem / 1024L ) + "k " + - "to avoid memory issues which may lead to database corruptions." - ); - } - } else { - LOG.warn( "Checking of Max Cache Size disabled by user, this could cause memory issues which may lead to database corruptions if you don't have enough memory allocated to your JVM!" ); - } - + final long max = Runtime.getRuntime().maxMemory(); + long maxCache = ( max >= ( 768 * 1024 * 1024 ) ) ? ( max / 2 ) : ( max / 3 ); + + if( totalMem > maxCache ) { + totalMem = maxCache; + + LOG.warn( "The cacheSize=\"" + cacheSize + + "\" setting in conf.xml is too large. Java has only " + ( max / 1024 ) + "k available. Cache manager will not use more than " + ( totalMem / 1024L ) + "k " + + "to avoid memory issues which may lead to database corruptions." + ); + } + } else { + LOG.warn( "Checking of Max Cache Size disabled by user, this could cause memory issues which may lead to database corruptions if you don't have enough memory allocated to your JVM!" ); + } + int buffers = (int)( totalMem / pageSize ); this.totalPageCount = buffers; this.maxCacheSize = (int)( totalPageCount * MAX_MEM_USE ); final NumberFormat nf = NumberFormat.getNumberInstance(); - - LOG.info( "Cache settings: " + nf.format( totalMem / 1024L ) + "k; totalPages: " + nf.format( totalPageCount ) + - "; maxCacheSize: " + nf.format( maxCacheSize ) + - "; cacheShrinkThreshold: " + nf.format( shrinkThreshold ) + + LOG.info( "Cache settings: " + nf.format( totalMem / 1024L ) + "k; totalPages: " + nf.format( totalPageCount ) + + "; maxCacheSize: " + nf.format( maxCacheSize ) + + "; cacheShrinkThreshold: " + nf.format( shrinkThreshold ) ); - + registerMBean(); } @@ -350,28 +350,21 @@ public int getDefaultInitialSize() } - private void registerMBean() - { + private void registerMBean() { final Agent agent = AgentFactory.getInstance(); - try { - agent.addMBean( instanceName, "org.exist.management." + instanceName + ":type=CacheManager", new org.exist.management.CacheManager( this ) ); - } - catch( final DatabaseConfigurationException e ) { - LOG.warn( "Exception while registering cache mbean.", e ); + agent.addMBean(new org.exist.management.CacheManager(instanceName,this)); + } catch (final DatabaseConfigurationException e) { + LOG.warn("Exception while registering JMX CacheManager MBean.", e); } } - - private void registerMBean( Cache cache ) - { + private void registerMBean(final Cache cache) { final Agent agent = AgentFactory.getInstance(); - try { - agent.addMBean( instanceName, "org.exist.management." + instanceName + ":type=CacheManager.Cache,name=" + cache.getName() + ",cache-type=" + cache.getType(), new org.exist.management.Cache( cache ) ); - } - catch( final DatabaseConfigurationException e ) { - LOG.warn( "Exception while registering cache mbean.", e ); + agent.addMBean(new org.exist.management.Cache(instanceName, cache)); + } catch (final DatabaseConfigurationException e) { + LOG.warn("Exception while registering JMX Cache MBean.", e); } } } diff --git a/src/org/exist/storage/FluentBrokerAPI.java b/src/org/exist/storage/FluentBrokerAPI.java new file mode 100644 index 00000000000..36407d0be52 --- /dev/null +++ b/src/org/exist/storage/FluentBrokerAPI.java @@ -0,0 +1,505 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.storage; + +import com.evolvedbinary.j8fu.tuple.Tuple2; +import com.evolvedbinary.j8fu.tuple.Tuple3; +import org.exist.EXistException; +import org.exist.collections.Collection; +import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; +import org.exist.security.PermissionDeniedException; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.util.LockException; +import org.exist.xmldb.XmldbURI; + +import javax.annotation.Nullable; +import java.util.Optional; +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * A fluent lambda API for working + * with Documents and Collections. + * + * @author cf.apply(c)); + + if(collectionDocumentFun.isPresent() || documentFun.isPresent()) { + + final Tuple2 docAccess = documentLookupFun.apply(collection); + + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(broker, docAccess._1, docAccess._2)) { + final DocumentImpl document = lockedDocument.getDocument(); + + collectionDocumentFunResult = collectionDocumentFun.map(cdf -> cdf.apply(c, document)); + + // release the Collection lock early + collection.close(); + collection = null; // signal closed + + documentFunResult = documentFun.map(df -> df.apply(document)); + } + } else { + collectionDocumentFunResult = Optional.empty(); + documentFunResult = Optional.empty(); + } + } finally { + // catch-all to close the collection in case of an exception and it hasn't been closed + if(collection != null) { + collection.close(); + collection = null; + } + } + } + + return new Tuple3<>(collectionFunResult, collectionDocumentFunResult, documentFunResult); + } + + + +// public class FluentBrokerAPIBuilder_ColN { +// private final Tuple2 collectionsAndLockModes[]; +// +// private FluentBrokerAPIBuilder_ColN(final Tuple2... collectionsAndLockModes) { +// this.collectionsAndLockModes = collectionsAndLockModes; +// } +// +// public Object[] execute(final Function... collectionOps) { +// if(collectionsAndLockModes.length != collectionOps.length) { +// throw new IllegalStateException(); +// } +// } +// } + } +} diff --git a/src/org/exist/storage/NativeBroker.java b/src/org/exist/storage/NativeBroker.java index e08f4142c78..52937892384 100644 --- a/src/org/exist/storage/NativeBroker.java +++ b/src/org/exist/storage/NativeBroker.java @@ -19,24 +19,14 @@ */ package org.exist.storage; +import com.evolvedbinary.j8fu.function.FunctionE; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.collections.*; import org.exist.collections.Collection; import org.exist.dom.memtree.DOMIndexer; -import org.exist.dom.persistent.AttrImpl; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.AbstractCharacterData; -import org.exist.dom.persistent.DefaultDocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentMetadata; -import org.exist.dom.persistent.ElementImpl; -import org.exist.dom.persistent.IStoredNode; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.dom.persistent.NodeHandle; -import org.exist.dom.persistent.NodeProxy; +import org.exist.dom.persistent.*; import org.exist.dom.QName; -import org.exist.dom.persistent.TextImpl; import org.exist.EXistException; import org.exist.Indexer; import org.exist.backup.RawDataBackup; @@ -48,6 +38,7 @@ import org.exist.indexing.StructuralIndex; import org.exist.numbering.NodeId; import org.exist.security.*; +import org.exist.security.internal.aider.ACEAider; import org.exist.stax.EmbeddedXMLStreamReader; import org.exist.stax.IEmbeddedXMLStreamReader; import org.exist.storage.btree.*; @@ -61,8 +52,9 @@ import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; import org.exist.storage.journal.*; -import org.exist.storage.lock.Lock; +import org.exist.storage.lock.*; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.Lock.LockType; import org.exist.storage.serializers.NativeSerializer; import org.exist.storage.serializers.Serializer; import org.exist.storage.sync.Sync; @@ -79,22 +71,26 @@ import org.w3c.dom.Node; import org.w3c.dom.NodeList; +import javax.annotation.Nullable; import javax.xml.stream.XMLStreamException; import java.io.*; +import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.text.NumberFormat; import java.util.*; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.exist.dom.persistent.StoredNode; import org.exist.storage.dom.INodeIterator; import com.evolvedbinary.j8fu.tuple.Tuple2; +import static org.exist.security.Permission.DEFAULT_TEMPORARY_COLLECTION_PERM; + /** * Main class for the native XML storage backend. * By "native" it is meant file-based, embedded backend. @@ -192,7 +188,9 @@ public class NativeBroker extends DBBroker { private NodeProcessor nodeProcessor = new NodeProcessor(); private IEmbeddedXMLStreamReader streamReader = null; + private IEmbeddedXMLStreamReader streamReaderNG = null; + private final LockManager lockManager; private final Optional logManager; private boolean incrementalDocIds = false; @@ -200,6 +198,7 @@ public class NativeBroker extends DBBroker { /** initialize database; read configuration, etc. */ public NativeBroker(final BrokerPool pool, final Configuration config) throws EXistException { super(pool, config); + this.lockManager = pool.getLockManager(); this.logManager = pool.getJournalManager(); LOG.debug("Initializing broker " + hashCode()); @@ -352,7 +351,7 @@ private void notifyDropIndex(final Collection collection) { } } - private void notifyDropIndex(final DocumentImpl doc) throws ReadOnlyException { + private void notifyDropIndex(final DocumentImpl doc) { for(final ContentLoadingObserver observer : contentLoadingObservers) { observer.dropIndex(doc); } @@ -562,19 +561,19 @@ public NativeValueIndex getValueIndex() { @Override public IEmbeddedXMLStreamReader getXMLStreamReader(final NodeHandle node, final boolean reportAttributes) - throws IOException, XMLStreamException { - if(streamReader == null) { + throws IOException, XMLStreamException { + if(streamReaderNG == null) { final RawNodeIterator iterator = new RawNodeIterator(this, domDb, node); - streamReader = new EmbeddedXMLStreamReader(this, node.getOwnerDocument(), iterator, node, reportAttributes); + streamReaderNG = new EmbeddedXMLStreamReader(this, node.getOwnerDocument(), iterator, node, reportAttributes); } else { - streamReader.reposition(this, node, reportAttributes); + streamReaderNG.reposition(this, node, reportAttributes); } - return streamReader; + return streamReaderNG; } @Override public IEmbeddedXMLStreamReader newXMLStreamReader(final NodeHandle node, final boolean reportAttributes) - throws IOException, XMLStreamException { + throws IOException, XMLStreamException { final RawNodeIterator iterator = new RawNodeIterator(this, domDb, node); return new EmbeddedXMLStreamReader(this, node.getOwnerDocument(), iterator, null, reportAttributes); } @@ -630,13 +629,13 @@ public XmldbURI prepend(final XmldbURI uri) { * @throws IOException * @throws TriggerException */ - private Tuple2 getOrCreateTempCollection(final Txn transaction) + private @EnsureUnlocked Tuple2 getOrCreateTempCollection(final Txn transaction) throws LockException, PermissionDeniedException, IOException, TriggerException { try { pushSubject(pool.getSecurityManager().getSystemSubject()); - final Tuple2 temp = getOrCreateCollectionExplicit(transaction, XmldbURI.TEMP_COLLECTION_URI); + final Tuple2 temp = getOrCreateCollectionExplicit(transaction, XmldbURI.TEMP_COLLECTION_URI, Optional.empty()); if (temp._1) { - temp._2.setPermissions(0771); + temp._2.setPermissions(this, DEFAULT_TEMPORARY_COLLECTION_PERM); saveCollection(transaction, temp._2); } return temp; @@ -671,143 +670,229 @@ private final String readInitCollectionConfig() { @Override public Collection getOrCreateCollection(final Txn transaction, XmldbURI name) throws PermissionDeniedException, IOException, TriggerException { - return getOrCreateCollectionExplicit(transaction, name)._2; + return getOrCreateCollectionExplicit(transaction, name, Optional.empty())._2; + } + + @Override + public Collection getOrCreateCollection(final Txn transaction, XmldbURI name, final Optional> creationAttributes) throws PermissionDeniedException, IOException, TriggerException { + return getOrCreateCollectionExplicit(transaction, name, creationAttributes)._2; } /** + * Gets the database Collection identified by the specified path. + * If the Collection does not yet exist, it is created - including all ancestors. + * The Collection is identified by its absolute path, e.g. /db/shakespeare. + * The returned Collection will NOT HAVE a lock. + * + * The caller should take care to release any associated resource by + * calling {@link Collection#close()} + * + * @param transaction The current transaction + * @param path The Collection's URI + * * @return A tuple whose first boolean value is set to true if the - * collection was created, or false if the collection already existed + * collection was created, or false if the collection already existed. The + * second value is the existing or created Collection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws IOException If an error occurs whilst reading (get) or writing (create) a Collection to disk + * @throws TriggerException If a CollectionTrigger throws an exception */ - private Tuple2 getOrCreateCollectionExplicit(final Txn transaction, XmldbURI name) throws PermissionDeniedException, IOException, TriggerException { - name = prepend(name.normalizeCollectionPath()); + private Tuple2 getOrCreateCollectionExplicit(final Txn transaction, final XmldbURI path, final Optional> creationAttributes) throws PermissionDeniedException, IOException, TriggerException { + final XmldbURI collectionUri = prepend(path.normalizeCollectionPath()); + final XmldbURI parentCollectionUri = collectionUri.removeLastSegment(); final CollectionCache collectionsCache = pool.getCollectionsCache(); - boolean created = false; - synchronized(collectionsCache) { - try { - //TODO : resolve URIs ! - final XmldbURI[] segments = name.getPathSegments(); - XmldbURI path = XmldbURI.ROOT_COLLECTION_URI; - Collection sub; - Collection current = getCollection(XmldbURI.ROOT_COLLECTION_URI); - if(current == null) { - - if(LOG.isDebugEnabled()) { - LOG.debug("Creating root collection '" + XmldbURI.ROOT_COLLECTION_URI + "'"); - } - - final CollectionTrigger trigger = new CollectionTriggers(this); - trigger.beforeCreateCollection(this, transaction, XmldbURI.ROOT_COLLECTION_URI); + try { - current = new MutableCollection(this, XmldbURI.ROOT_COLLECTION_URI); - current.setId(getNextCollectionId(transaction)); - current.setCreationTime(System.currentTimeMillis()); + // 1) optimize for the existence of the Collection in the cache + try (final ManagedCollectionLock collectionLock = readLockCollection(collectionUri)) { + final Collection collection = collectionsCache.getIfPresent(collectionUri); + if (collection != null) { + return new Tuple2<>(false, collection); + } + } - if(transaction != null) { - transaction.acquireLock(current.getLock(), LockMode.WRITE_LOCK); - } + // 2) try and read the Collection from disk, if not on disk then create it + try (final ManagedCollectionLock parentCollectionLock = writeLockCollection(parentCollectionUri.numSegments() == 0 ? XmldbURI.ROOT_COLLECTION_URI : parentCollectionUri)) { // we write lock the parent (as we may need to add a new Collection to it) - //TODO : acquire lock manually if transaction is null ? - saveCollection(transaction, current); - created = true; + // check for preemption between READ -> WRITE lock, is the Collection now in the cache? + final Collection collection = collectionsCache.getIfPresent(collectionUri); + if (collection != null) { + return new Tuple2<>(false, collection); + } - //adding to make it available @ afterCreateCollection - collectionsCache.add(current); + // is the parent Collection in the cache? + if (parentCollectionUri == XmldbURI.EMPTY_URI) { + // no parent... so, this is the root collection! + return getOrCreateCollectionExplicit_rootCollection(transaction, collectionUri, collectionsCache); + } else { + final Collection parentCollection = collectionsCache.getIfPresent(parentCollectionUri); + if (parentCollection != null) { + // parent collection is in cache, is our Collection present on disk? + final Collection loadedCollection = loadCollection(collectionUri, BFile.UNKNOWN_ADDRESS); - trigger.afterCreateCollection(this, transaction, current); + if (loadedCollection != null) { + // loaded it from disk - //import an initial collection configuration - try { - final String initCollectionConfig = readInitCollectionConfig(); - if(initCollectionConfig != null) { - CollectionConfigurationManager collectionConfigurationManager = pool.getConfigurationManager(); - if(collectionConfigurationManager == null) { - if(pool.getConfigurationManager() == null) { - throw new IllegalStateException(); - //might not yet have been initialised - //pool.initCollectionConfigurationManager(this); - } - collectionConfigurationManager = pool.getConfigurationManager(); - } + // add it to the cache and return it + collectionsCache.put(loadedCollection); + return new Tuple2<>(false, loadedCollection); - if(collectionConfigurationManager != null) { - collectionConfigurationManager.addConfiguration(transaction, this, current, initCollectionConfig); - } + } else { + // not on disk, create the collection + return new Tuple2<>(true, createCollection(transaction, parentCollection, collectionUri, collectionsCache, creationAttributes)); } - } catch(final CollectionConfigurationException cce) { - LOG.error("Could not load initial collection configuration for /db: " + cce.getMessage(), cce); + + } else { + /* + * No parent Collection in the cache so that needs to be loaded/created + * (or will be read from cache if we are pre-empted) before we can create this Collection. + * However to do this, we need to yield the collectionLock, so we will continue outside + * the ManagedCollectionLock at (3) + */ } } + } - for(int i = 1; i < segments.length; i++) { - final XmldbURI temp = segments[i]; - path = path.append(temp); - if(current.hasChildCollectionNoLock(this, temp)) { - current = getCollection(path); - if(current == null) { - LOG.error("Collection '" + path + "' found in subCollections set but is missing from collections.dbx!"); - } - } else { + //TODO(AR) below, should we just fall back to recursive descent creating the collection hierarchy in the same manner that getOrCreateCollection used to do? - if(isReadOnly()) { - throw new IOException(DATABASE_IS_READ_ONLY); - } + // 3) No parent collection was previously found in cache so we need to call this function for the parent Collection and then ourselves + final Tuple2 newOrExistingParentCollection = getOrCreateCollectionExplicit(transaction, parentCollectionUri, creationAttributes); + return getOrCreateCollectionExplicit(transaction, collectionUri, creationAttributes); - if(!current.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - LOG.error("Permission denied to create collection '" + path + "'"); - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' not allowed to write to collection '" + current.getURI() + "'"); - } + } catch(final ReadOnlyException e) { + throw new PermissionDeniedException(DATABASE_IS_READ_ONLY); + } catch(final LockException e) { + throw new IOException(e); + } + } - if(!current.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { - LOG.error("Permission denied to create collection '" + path + "'"); - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' not allowed to execute to collection '" + current.getURI() + "'"); - } + private Tuple2 getOrCreateCollectionExplicit_rootCollection(final Txn transaction, final XmldbURI collectionUri, final CollectionCache collectionsCache) throws PermissionDeniedException, IOException, LockException, ReadOnlyException, TriggerException { + // this is the root collection, so no parent, is the Collection present on disk? - if(current.hasDocument(this, path.lastSegment())) { - LOG.error("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'"); - throw new PermissionDeniedException("Collection '" + current.getURI() + "' have document '" + path.lastSegment() + "'."); - } + final Collection loadedRootCollection = loadCollection(collectionUri, BFile.UNKNOWN_ADDRESS); - if(LOG.isDebugEnabled()) { - LOG.debug("Creating collection '" + path + "'..."); - } + if (loadedRootCollection != null) { + // loaded it from disk - final CollectionTrigger trigger = new CollectionTriggers(this, current); - trigger.beforeCreateCollection(this, transaction, path); + // add it to the cache and return it + collectionsCache.put(loadedRootCollection); + return new Tuple2<>(false, loadedRootCollection); + } else { + // not on disk, create the root collection + final Collection rootCollection = createCollection(transaction, null, collectionUri, collectionsCache, Optional.empty()); - sub = new MutableCollection(this, path); - //inherit the group to the sub-collection if current collection is setGid - if(current.getPermissions().isSetGid()) { - sub.getPermissions().setGroupFrom(current.getPermissions()); //inherit group - sub.getPermissions().setSetGid(true); //inherit setGid bit + //import an initial collection configuration + try { + final String initCollectionConfig = readInitCollectionConfig(); + if(initCollectionConfig != null) { + CollectionConfigurationManager collectionConfigurationManager = pool.getConfigurationManager(); + if(collectionConfigurationManager == null) { + if(pool.getConfigurationManager() == null) { + throw new IllegalStateException(); + //might not yet have been initialised + //pool.initCollectionConfigurationManager(this, transaction); } - sub.setId(getNextCollectionId(transaction)); + collectionConfigurationManager = pool.getConfigurationManager(); + } - if(transaction != null) { - transaction.acquireLock(sub.getLock(), LockMode.WRITE_LOCK); - } + if(collectionConfigurationManager != null) { + collectionConfigurationManager.addConfiguration(transaction, this, rootCollection, initCollectionConfig); + } + } + } catch(final CollectionConfigurationException cce) { + LOG.error("Could not load initial collection configuration for /db: " + cce.getMessage(), cce); + } - //TODO : acquire lock manually if transaction is null ? - current.addCollection(this, sub, true); - saveCollection(transaction, current); - created = true; + return new Tuple2<>(true, rootCollection); + } + } - //adding to make it available @ afterCreateCollection - collectionsCache.add(sub); + /** + * NOTE - When this is called there must be a WRITE_LOCK on collectionUri + * and a WRITE_LOCK on parentCollection (if it is not null) + */ + private @EnsureUnlocked Collection createCollection(final Txn transaction, + @Nullable @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection parentCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK, type=LockType.COLLECTION) final XmldbURI collectionUri, + final CollectionCache collectionCache, final Optional> creationAttributes) + throws TriggerException, ReadOnlyException, PermissionDeniedException, LockException, IOException { + + final CollectionTrigger trigger; + if(parentCollection == null) { + trigger = new CollectionTriggers(this, transaction); + } else { + trigger = new CollectionTriggers(this, transaction, parentCollection); + } + trigger.beforeCreateCollection(this, transaction, collectionUri); - trigger.afterCreateCollection(this, transaction, sub); + final Collection collectionObj = createCollectionObject(transaction, parentCollection, collectionUri, creationAttributes); + saveCollection(transaction, collectionObj); - current = sub; - } - } - return new Tuple2<>(created, current); - } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); + if(parentCollection != null) { + parentCollection.addCollection(this, collectionObj); + saveCollection(transaction, parentCollection); + } + + collectionCache.put(collectionObj); + + trigger.afterCreateCollection(this, transaction, collectionObj); + + return collectionObj; + } + + /** + * NOTE - When this is called there must be a WRITE_LOCK on collectionUri + * and at least a READ_LOCK on parentCollection (if it is not null) + */ + private Collection createCollectionObject(final Txn transaction, + @Nullable @EnsureLocked(mode=LockMode.READ_LOCK) final Collection parentCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK, type=LockType.COLLECTION) final XmldbURI collectionUri, + final Optional> creationAttributes) + throws ReadOnlyException, PermissionDeniedException, LockException { + + final Collection collection = creationAttributes.map(attrs -> new MutableCollection(this, collectionUri, attrs._1, attrs._2)).orElseGet(() -> new MutableCollection(this, collectionUri)); + collection.setId(getNextCollectionId(transaction)); + + //inherit the group to collection if parent-collection is setGid + if(parentCollection != null) { + final Permission parentPermissions = parentCollection.getPermissionsNoLock(); + if(parentPermissions.isSetGid()) { + final Permission collectionPermissions = collection.getPermissionsNoLock(); + collectionPermissions.setGroupFrom(parentPermissions); //inherit group + collectionPermissions.setSetGid(true); //inherit setGid bit + } + } + + return collection; + } + + /** + * Loads a Collection from disk + * + * @param collectionUri The URI of the Collection to load + * @param address The virtual address in the storage of the Collection if known, else {@link BFile#UNKNOWN_ADDRESS} + * + * @return The Collection object loaded from disk, or null if the record does not exist on disk + */ + private @Nullable @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) Collection loadCollection( + @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI collectionUri, + final long address) throws PermissionDeniedException, LockException, IOException { + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { + VariableByteInput is; + if (address == BFile.UNKNOWN_ADDRESS) { + final Value key = new CollectionStore.CollectionKey(collectionUri.toString()); + is = collectionsDb.getAsStream(key); + } else { + is = collectionsDb.getAsStream(address); + } + if (is == null) { return null; - } catch(final ReadOnlyException e) { - throw new PermissionDeniedException(DATABASE_IS_READ_ONLY); } + + return MutableCollection.load(this, collectionUri, is); } } @@ -821,6 +906,94 @@ public Collection openCollection(final XmldbURI uri, final LockMode lockMode) th return openCollection(uri, BFile.UNKNOWN_ADDRESS, lockMode); } + /** + * Open a Collection for reading or writing. + * + * The Collection is identified by its absolute path, e.g. /db/shakespeare. + * It will be loaded and locked according to the lockMode argument. + * + * The caller should take care to release the Collection lock properly by + * calling {@link Collection#close()} + * + * @param path The Collection's path + * @param address + * @param lockMode the mode for locking the Collection, as specified in {@link LockMode} + * + * @return the Collection, or null if no Collection matches the path + */ + private @Nullable @EnsureLocked Collection openCollection(final XmldbURI path, final long address, final LockMode lockMode) + throws PermissionDeniedException { + final XmldbURI collectionUri = prepend(path.normalizeCollectionPath()); + + final ManagedCollectionLock collectionLock; + final Runnable unlockFn; // we unlock on error, or if there is no Collection + try { + switch (lockMode) { + case WRITE_LOCK: + collectionLock = writeLockCollection(collectionUri); + unlockFn = collectionLock::close; + break; + + case READ_LOCK: + collectionLock = readLockCollection(collectionUri); + unlockFn = collectionLock::close; + break; + + case NO_LOCK: + default: + collectionLock = ManagedCollectionLock.notLocked(collectionUri); + unlockFn = () -> {}; + } + } catch(final LockException e) { + LOG.error("Failed to acquire lock on Collection: {}", collectionUri); + return null; + } + + // 1) optimize for reading from the Collection from the cache + final CollectionCache collectionsCache = pool.getCollectionsCache(); + final Collection collection = collectionsCache.getIfPresent(collectionUri); + if (collection != null) { + + // sanity check + if(!collection.getURI().equalsInternal(collectionUri)) { + LOG.error("openCollection: The Collection received from the cache: {} is not the requested: {}", collection.getURI(), collectionUri); + unlockFn.run(); + throw new IllegalStateException(); + } + + // does the user have permission to access the Collection + if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { + unlockFn.run(); + throw new PermissionDeniedException("Permission denied to open collection: " + collection.getURI().toString() + " by " + getCurrentSubject().getName()); + } + + return new LockedCollection(collectionLock, collection); + } + + // 2) if not in the cache, load from disk + final Collection loadedCollection; + try { + loadedCollection = loadCollection(collectionUri, address); + } catch(final IOException e) { + LOG.error(e.getMessage(), e); + unlockFn.run(); + return null; + } catch(final LockException e) { + LOG.error("Failed to acquire lock on: {}", FileUtils.fileName(collectionsDb.getFile())); + unlockFn.run(); + return null; + } + + // if we loaded a Collection add it to the cache (if it isn't already there) + if(loadedCollection != null) { + final Collection cachedCollection = collectionsCache.getOrCreate(collectionUri, uri -> loadedCollection); + return new LockedCollection(collectionLock, cachedCollection); + } else { + unlockFn.run(); + return null; + } + } + @Override public List findCollectionsMatching(final String regexp) { @@ -830,9 +1003,7 @@ public List findCollectionsMatching(final String regexp) { final Pattern p = Pattern.compile(regexp); final Matcher m = p.matcher(""); - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { //TODO write a regexp lookup for key data in BTree.query //final IndexQuery idxQuery = new IndexQuery(IndexQuery.REGEXP, regexp); @@ -859,464 +1030,577 @@ public List findCollectionsMatching(final String regexp) { } catch(final TerminatedException | IOException | BTreeException e) { LOG.error(e.getMessage(), e); //return null; - } finally { - lock.release(LockMode.READ_LOCK); } return collections; } @Override - public void readCollectionEntry(final SubCollectionEntry entry) { - + public void readCollectionEntry(final SubCollectionEntry entry) throws IOException, LockException { final XmldbURI uri = prepend(entry.getUri().toCollectionPathURI()); - Collection collection; final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { - collection = collectionsCache.get(uri); - if(collection == null) { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); - - final Value key = new CollectionStore.CollectionKey(uri.toString()); - final VariableByteInput is = collectionsDb.getAsStream(key); - if(is == null) { - LOG.error("Could not read collection entry for: " + uri); - return; - } - - //read the entry details - entry.read(is); - - } catch(final UnsupportedEncodingException e) { - LOG.error("Unable to encode '" + uri + "' in UTF-8"); - } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); - } catch(final IOException e) { - LOG.error(e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); - } - } else { + final Collection collection = collectionsCache.getIfPresent(uri); + if(collection == null) { + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { - if(!collection.getURI().equalsInternal(uri)) { - LOG.error("The collection received from the cache is not the requested: " + uri + - "; received: " + collection.getURI()); - return; + final Value key = new CollectionStore.CollectionKey(uri.toString()); + final VariableByteInput is = collectionsDb.getAsStream(key); + if(is == null) { + throw new IOException("Could not find collection entry for: " + uri); } - entry.read(collection); + //read the entry details + entry.read(is); + } + } else { - collectionsCache.add(collection); + if(!collection.getURI().equalsInternal(uri)) { + throw new IOException(String.format("readCollectionEntry: The Collection received from the cache: %s is not the requested: %s", collection.getURI(), uri)); } + + entry.read(collection); } } - /** - * Get collection object. If the collection does not exist, null is - * returned. - * - * @param uri collection URI - * @return The collection value - */ - private Collection openCollection(XmldbURI uri, final long address, final LockMode lockMode) throws PermissionDeniedException { - uri = prepend(uri.toCollectionPathURI()); - //We *must* declare it here (see below) - Collection collection; - final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { - collection = collectionsCache.get(uri); - if(collection == null) { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); - VariableByteInput is; - if(address == BFile.UNKNOWN_ADDRESS) { - final Value key = new CollectionStore.CollectionKey(uri.toString()); - is = collectionsDb.getAsStream(key); - } else { - is = collectionsDb.getAsStream(address); - } - if(is == null) { - return null; - } - collection = MutableCollection.load(this, uri, is); + @Override + public void copyCollection(final Txn transaction, final Collection collection, final Collection destination, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException { + copyCollection(transaction, collection, destination, newName, PreserveType.DEFAULT); + } - collectionsCache.add(collection); + @Override + public void copyCollection(final Txn transaction, final Collection sourceCollection, final Collection targetCollection, final XmldbURI newName, final PreserveType preserve) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException { + assert(sourceCollection != null); + assert(targetCollection != null); + assert(newName != null); - //TODO : rethrow exceptions ? -pb - } catch(final UnsupportedEncodingException e) { - LOG.error("Unable to encode '" + uri + "' in UTF-8"); - return null; - } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); - return null; - } catch(final IOException e) { - LOG.error(e.getMessage(), e); - return null; - } finally { - lock.release(LockMode.READ_LOCK); - } - } else { - if(!collection.getURI().equalsInternal(uri)) { - LOG.error("The collection received from the cache is not the requested: " + uri + - "; received: " + collection.getURI()); - } - collectionsCache.add(collection); + if(isReadOnly()) { + throw new IOException(DATABASE_IS_READ_ONLY); + } - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { - throw new PermissionDeniedException("Permission denied to open collection: " + collection.getURI().toString() + " by " + getCurrentSubject().getName()); - } - } + if(newName.numSegments() != 1) { + throw new IOException("newName name must be just a name i.e. an XmldbURI with one segment!"); } - //Important : - //This code must remain outside of the synchronized block - //because another thread may already own a lock on the collection - //This would result in a deadlock... until the time-out raises the Exception - //TODO : make an attempt to an immediate lock ? - //TODO : manage a collection of requests for locks ? - //TODO : another yet smarter solution ? - if(lockMode != LockMode.NO_LOCK) { - try { - collection.getLock().acquire(lockMode); - } catch(final LockException e) { - LOG.error("Failed to acquire lock on collection '" + uri + "'"); + final XmldbURI sourceCollectionUri = sourceCollection.getURI(); + final XmldbURI targetCollectionUri = targetCollection.getURI(); + final XmldbURI destinationCollectionUri = targetCollectionUri.append(newName); + + if(sourceCollection.getId() == targetCollection.getId()) { + throw new PermissionDeniedException("Cannot copy collection to itself '" + sourceCollectionUri + "'."); + } + if(sourceCollectionUri.equals(destinationCollectionUri)) { + throw new PermissionDeniedException("Cannot copy collection to itself '" + sourceCollectionUri + "'."); + } + if(isSubCollection(sourceCollectionUri, targetCollectionUri)) { + throw new PermissionDeniedException("Cannot copy collection '" + sourceCollectionUri + "' inside itself '" + targetCollectionUri + "'."); + } + + if(!sourceCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " has insufficient privileges on collection to copy collection " + sourceCollectionUri); + } + if(!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " has insufficient privileges on target collection " + targetCollectionUri + " to copy collection " + sourceCollectionUri); + } + + /* + * At this point this thread should hold: + * READ_LOCK on: + * 1) sourceCollection + * + * WRITE_LOCK on: + * 1) targetCollection + * + * Remember a lock on a node in the Collection tree, + * implies locking the entire sub-tree, therefore + * we don't need to explicitly lock sub-collections (just documents). + */ + + pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_COPY_COLLECTION, sourceCollection.getURI()); + try { + + final XmldbURI sourceCollectionParentUri = sourceCollection.getParentURI(); + // READ_LOCK the parent of the source Collection for the triggers + try(final Collection sourceCollectionParent = sourceCollectionParentUri == null ? sourceCollection : openCollection(sourceCollectionParentUri, LockMode.READ_LOCK)) { + // fire before copy collection triggers + final CollectionTrigger trigger = new CollectionTriggers(this, transaction, sourceCollectionParent); + trigger.beforeCopyCollection(this, transaction, sourceCollection, destinationCollectionUri); + + final DocumentTrigger docTrigger = new DocumentTriggers(this, transaction); + + // pessimistically obtain READ_LOCKs on all descendant documents of sourceCollection, and WRITE_LOCKs on all target documents + final Collection newCollection; + try(final ManagedLocks sourceDocLocks = new ManagedLocks(lockDescendantDocuments(sourceCollection, lockManager::acquireDocumentReadLock)); + final ManagedLocks targetDocLocks = new ManagedLocks(lockTargetDocuments(sourceCollectionUri, sourceDocLocks, destinationCollectionUri, lockManager::acquireDocumentWriteLock))) { + + // check all permissions in the tree to ensure a copy operation will succeed before starting copying + checkPermissionsForCopy(sourceCollection, targetCollection, newName); + newCollection = doCopyCollection(transaction, docTrigger, sourceCollection, targetCollection, destinationCollectionUri, true, preserve); + } + // fire after copy collection triggers + trigger.afterCopyCollection(this, transaction, newCollection, sourceCollectionUri); } + + } finally { + pool.getProcessMonitor().endJob(); } - return collection; } /** - * Checks all permissions in the tree to ensure that a copy operation will succeed + * Checks all permissions in the tree to ensure that a copy operation + * will not fail due to a lack of rights + * + * @param sourceCollection The Collection to copy + * @param targetCollection The target Collection to copy the sourceCollection into + * @param newName The new name the sourceCollection should have in the targetCollection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks */ - protected void checkPermissionsForCopy(final Collection src, final XmldbURI destUri, final XmldbURI newName) throws PermissionDeniedException, LockException { + protected void checkPermissionsForCopy(@EnsureLocked(mode=LockMode.READ_LOCK) final Collection sourceCollection, + @EnsureLocked(mode=LockMode.READ_LOCK) @Nullable final Collection targetCollection, final XmldbURI newName) + throws PermissionDeniedException, LockException { - if(!src.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.READ)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " by " + getCurrentSubject().getName()); + if(!sourceCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.READ)) { + throw new PermissionDeniedException("Permission denied to copy collection " + sourceCollection.getURI() + " by " + getCurrentSubject().getName()); } - final Collection dest = getCollection(destUri); - final XmldbURI newDestUri = destUri.append(newName); - final Collection newDest = getCollection(newDestUri); + final XmldbURI destinationCollectionUri = targetCollection == null ? null : targetCollection.getURI().append(newName); + final Collection destinationCollection = destinationCollectionUri == null ? null : getCollection(destinationCollectionUri); // NOTE: we already have a WRITE_LOCK on destinationCollectionUri - if(dest != null) { - //if(!dest.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.WRITE | Permission.READ)) { - //TODO do we really need WRITE permission on the dest? - if(!dest.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getCurrentSubject().getName()); + if(targetCollection != null) { + if(!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to copy collection " + sourceCollection.getURI() + " to " + targetCollection.getURI() + " by " + getCurrentSubject().getName()); } - if(newDest != null) { - //TODO why do we need READ access on the dest collection? - /*if(!dest.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.READ)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + dest.getURI() + " by " + getCurrentSubject().getName()); - }*/ - - //if(newDest.isEmpty(this)) { - if(!newDest.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " to " + newDest.getURI() + " by " + getCurrentSubject().getName()); + if(destinationCollection != null) { + if(!destinationCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE | Permission.WRITE)) { + throw new PermissionDeniedException("Permission denied to copy collection " + sourceCollection.getURI() + " to " + destinationCollection.getURI() + " by " + getCurrentSubject().getName()); } - //} } } - for(final Iterator itSrcSubDoc = src.iterator(this); itSrcSubDoc.hasNext(); ) { + // check document permissions + for(final Iterator itSrcSubDoc = sourceCollection.iteratorNoLock(this); itSrcSubDoc.hasNext(); ) { // NOTE: we already have a READ lock on sourceCollection implicitly final DocumentImpl srcSubDoc = itSrcSubDoc.next(); if(!srcSubDoc.getPermissions().validate(getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + srcSubDoc.getURI() + " by " + getCurrentSubject().getName()); + throw new PermissionDeniedException("Permission denied to copy collection " + sourceCollection.getURI() + " for resource " + srcSubDoc.getURI() + " by " + getCurrentSubject().getName()); } //if the destination resource exists, we must have write access to replace it's metadata etc. (this follows the Linux convention) - if(newDest != null && !newDest.isEmpty(this)) { - final DocumentImpl newDestSubDoc = newDest.getDocument(this, srcSubDoc.getFileURI()); //TODO check this uri is just the filename! + if(destinationCollection != null && !destinationCollection.isEmpty(this)) { + final DocumentImpl newDestSubDoc = destinationCollection.getDocument(this, srcSubDoc.getFileURI()); //TODO check this uri is just the filename! if(newDestSubDoc != null) { if(!newDestSubDoc.getPermissions().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Permission denied to copy collection " + src.getURI() + " for resource " + newDestSubDoc.getURI() + " by " + getCurrentSubject().getName()); + throw new PermissionDeniedException("Permission denied to copy collection " + sourceCollection.getURI() + " for resource " + newDestSubDoc.getURI() + " by " + getCurrentSubject().getName()); } } } } - for(final Iterator itSrcSubColUri = src.collectionIterator(this); itSrcSubColUri.hasNext(); ) { + // descend into sub-collections + for(final Iterator itSrcSubColUri = sourceCollection.collectionIteratorNoLock(this); itSrcSubColUri.hasNext(); ) { // NOTE: we already have a READ lock on sourceCollection implicitly final XmldbURI srcSubColUri = itSrcSubColUri.next(); - final Collection srcSubCol = getCollection(src.getURI().append(srcSubColUri)); + final Collection srcSubCol = getCollection(sourceCollection.getURI().append(srcSubColUri)); // NOTE: we already have a READ_LOCK on destinationCollectionUri - checkPermissionsForCopy(srcSubCol, newDestUri, srcSubColUri); + checkPermissionsForCopy(srcSubCol, destinationCollection, srcSubColUri); } } - /* (non-Javadoc) - * @see org.exist.storage.DBBroker#copyCollection(org.exist.storage.txn.Txn, org.exist.collections.Collection, org.exist.collections.Collection, org.exist.xmldb.XmldbURI) + /** + * Copy a collection and all its sub-Collections. + * + * @param transaction The current transaction + * @param documentTrigger The trigger to use for document events + * @param sourceCollection The Collection to copy + * @param destinationCollectionUri The destination Collection URI for the sourceCollection copy + * @param copyCollectionMode false on the first call, true on recursive calls + * + * @return A reference to the Collection, no additional locks are held on the Collection + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst copying the Collection on disk + * @throws TriggerException If a CollectionTrigger throws an exception + * @throws EXistException If no more Document IDs are available */ - @Override - public void copyCollection(final Txn transaction, final Collection collection, final Collection destination, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException { - if(isReadOnly()) { - throw new IOException(DATABASE_IS_READ_ONLY); + private Collection doCopyCollection(final Txn transaction, final DocumentTrigger documentTrigger, + @EnsureLocked(mode=LockMode.READ_LOCK) final Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection destinationParentCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK, type=LockType.COLLECTION) final XmldbURI destinationCollectionUri, + final boolean copyCollectionMode, final PreserveType preserve) + throws PermissionDeniedException, IOException, EXistException, TriggerException, LockException { + if(LOG.isDebugEnabled()) { + LOG.debug("Copying collection to '{}'", destinationCollectionUri); } - //TODO : resolve URIs !!! - if(newName != null && newName.numSegments() != 1) { - throw new PermissionDeniedException("New collection name must have one segment!"); + // permissions and attributes for the destCollection (if we have to create it) + final Permission createCollectionPerms = PermissionFactory.getDefaultCollectionPermission(getBrokerPool().getSecurityManager()); + copyModeAndAcl(sourceCollection.getPermissions(), createCollectionPerms); + final long created; + if (preserveOnCopy(preserve)) { + // only copy the owner and group from the source if we are creating a new collection and we are the DBA + if (getCurrentSubject().hasDbaRole()) { + PermissionFactory.chown(this, createCollectionPerms, Optional.of(sourceCollection.getPermissions().getOwner().getName()), Optional.of(sourceCollection.getPermissions().getGroup().getName())); + } + + created = sourceCollection.getMetadata().getCreated(); + } else { + created = 0; } - final XmldbURI srcURI = collection.getURI(); - final XmldbURI dstURI = destination.getURI().append(newName); + final Tuple2 destinationCollection = getOrCreateCollectionExplicit(transaction, destinationCollectionUri, Optional.of(new Tuple2<>(createCollectionPerms, created))); - if(collection.getURI().equals(dstURI)) { - throw new PermissionDeniedException("Cannot copy collection to itself '" + collection.getURI() + "'."); - } - if(collection.getId() == destination.getId()) { - throw new PermissionDeniedException("Cannot copy collection to itself '" + collection.getURI() + "'."); + // if we didn't create destCollection but we need to preserve the attributes + if((!destinationCollection._1) && preserveOnCopy(preserve)) { + copyModeAndAcl(sourceCollection.getPermissions(), destinationCollection._2.getPermissions()); } - if(isSubCollection(collection, destination)) { - throw new PermissionDeniedException("Cannot copy collection '" + collection.getURI() + "' to it child collection '"+destination.getURI()+"'."); + + // inherit the group to the destinationCollection if parent is setGid + if (destinationParentCollection != null && destinationParentCollection.getPermissions().isSetGid()) { + destinationCollection._2.getPermissions().setGroupFrom(destinationParentCollection.getPermissions()); //inherit group + destinationCollection._2.getPermissions().setSetGid(true); //inherit setGid bit } - final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { - final Lock lock = collectionsDb.getLock(); - try { - pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_COPY_COLLECTION, collection.getURI()); - lock.acquire(LockMode.WRITE_LOCK); + doCopyCollectionDocuments(transaction, documentTrigger, sourceCollection, destinationCollection._2, preserve); - //recheck here because now under 'synchronized(collectionsCache)' - if(isSubCollection(collection, destination)) { - throw new PermissionDeniedException("Cannot copy collection '" + collection.getURI() + "' to it child collection '"+destination.getURI()+"'."); + final XmldbURI sourceCollectionUri = sourceCollection.getURI(); + for(final Iterator i = sourceCollection.collectionIterator(this); i.hasNext(); ) { + final XmldbURI childName = i.next(); + final XmldbURI childUri = sourceCollectionUri.append(childName); + try (final Collection child = getCollection(childUri)) { // NOTE: we already have a READ lock on child implicitly + if (child == null) { + throw new IOException("Child collection " + childUri + " not found"); + } else { + doCopyCollection(transaction, documentTrigger, child, destinationCollection._2, destinationCollection._2.getURI().append(childName), true, preserve); } - - final XmldbURI parentName = collection.getParentURI(); - final Collection parent = parentName == null ? collection : getCollection(parentName); - - final CollectionTrigger trigger = new CollectionTriggers(this, parent); - trigger.beforeCopyCollection(this, transaction, collection, dstURI); - - //atomically check all permissions in the tree to ensure a copy operation will succeed before starting copying - checkPermissionsForCopy(collection, destination.getURI(), newName); - - final DocumentTrigger docTrigger = new DocumentTriggers(this); - - final Collection newCollection = doCopyCollection(transaction, docTrigger, collection, destination, newName, false); - - trigger.afterCopyCollection(this, transaction, newCollection, srcURI); - } finally { - lock.release(LockMode.WRITE_LOCK); - pool.getProcessMonitor().endJob(); } } - } - - private Collection doCopyCollection(final Txn transaction, final DocumentTrigger trigger, final Collection collection, final Collection destination, XmldbURI newName, final boolean copyCollectionMode) throws PermissionDeniedException, IOException, EXistException, TriggerException, LockException { - - if(newName == null) { - newName = collection.getURI().lastSegment(); - } - newName = destination.getURI().append(newName); - - if(LOG.isDebugEnabled()) { - LOG.debug("Copying collection to '" + newName + "'"); - } - final Tuple2 destCollection = getOrCreateCollectionExplicit(transaction, newName); - - //if required, copy just the mode and acl of the permissions to the dest collection - if(copyCollectionMode && destCollection._1) { - final Permission srcPerms = collection.getPermissions(); - final Permission destPerms = destCollection._2.getPermissions(); - copyModeAndAcl(srcPerms, destPerms); - } + return destinationCollection._2; + } - for(final Iterator i = collection.iterator(this); i.hasNext(); ) { + /** + * Copy the documents in one Collection to another (non-recursive) + * + * @param transaction The current transaction + * @param documentTrigger The trigger to use for document events + * @param sourceCollection The Collection to copy documents from + * @param destinationCollection The Collection to copy documents to + * + * @throws PermissionDeniedException If the current user does not have appropriate permissions + * @throws LockException If an exception occurs whilst acquiring locks + * @throws IOException If an error occurs whilst copying the Collection on disk + * @throws TriggerException If a CollectionTrigger throws an exception + * @throws EXistException If no more Document IDs are available + */ + private void doCopyCollectionDocuments(final Txn transaction, final DocumentTrigger documentTrigger, + @EnsureLocked(mode=LockMode.READ_LOCK) final Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection destinationCollection, + final PreserveType preserve) + throws LockException, PermissionDeniedException, IOException, TriggerException, EXistException { + for(final Iterator i = sourceCollection.iterator(this); i.hasNext(); ) { final DocumentImpl child = i.next(); if(LOG.isDebugEnabled()) { - LOG.debug("Copying resource: '" + child.getURI() + "'"); + LOG.debug("Copying resource: '{}'", child.getURI()); } - //TODO The code below seems quite different to that in NativeBroker#copyResource presumably should be the same? + // TODO(AR) The code below seems quite different to that in NativeBroker#copyResource presumably should be the same? - - final XmldbURI newUri = destCollection._2.getURI().append(child.getFileURI()); - trigger.beforeCopyDocument(this, transaction, child, newUri); + final XmldbURI newDocName = child.getFileURI(); + final XmldbURI newDocUri = destinationCollection.getURI().append(newDocName); + documentTrigger.beforeCopyDocument(this, transaction, child, newDocUri); //are we overwriting an existing document? final CollectionEntry oldDoc; - if(destCollection._2.hasDocument(this, child.getFileURI())) { - oldDoc = destCollection._2.getResourceEntry(this, child.getFileURI().toString()); + if(destinationCollection.hasDocument(this, child.getFileURI())) { + oldDoc = destinationCollection.getResourceEntry(this, newDocName.toString()); } else { oldDoc = null; } - DocumentImpl createdDoc; - if(child.getResourceType() == DocumentImpl.XML_FILE) { - //TODO : put a lock on newDoc ? - final DocumentImpl newDoc = new DocumentImpl(pool, destCollection._2, child.getFileURI()); - newDoc.copyOf(child, false); - if(oldDoc != null) { - //preserve permissions from existing doc we are replacing - newDoc.setPermissions(oldDoc.getPermissions()); //TODO use newDoc.copyOf(oldDoc) ideally, but we cannot currently access oldDoc without READ access to it, which we may not have (and should not need for this)! + final DocumentImpl createdDoc; + if(child.getResourceType() == DocumentImpl.BINARY_FILE) { + final BinaryDocument newDoc; + if (oldDoc != null) { + newDoc = new BinaryDocument(pool, destinationCollection, oldDoc); } else { - //copy just the mode and acl of the permissions to the dest document - final Permission srcPerm = child.getPermissions(); - final Permission destPerm = newDoc.getPermissions(); - copyModeAndAcl(srcPerm, destPerm); + newDoc = new BinaryDocument(pool, destinationCollection, child.getFileURI()); } - newDoc.setDocId(getNextResourceId(transaction, destination)); - copyXMLResource(transaction, child, newDoc); - storeXMLResource(transaction, newDoc); - destCollection._2.addDocument(transaction, this, newDoc); + newDoc.copyOf(this, child, oldDoc); + newDoc.setDocId(getNextResourceId(transaction)); - createdDoc = newDoc; - } else { - final BinaryDocument newDoc = new BinaryDocument(pool, destCollection._2, child.getFileURI()); - newDoc.copyOf(child, false); - if(oldDoc != null) { - //preserve permissions from existing doc we are replacing - newDoc.setPermissions(oldDoc.getPermissions()); //TODO use newDoc.copyOf(oldDoc) ideally, but we cannot currently access oldDoc without READ access to it, which we may not have (and should not need for this)! + if(preserveOnCopy(preserve)) { + copyResource_preserve(this, child, newDoc, oldDoc != null); } - newDoc.setDocId(getNextResourceId(transaction, destination)); - try(final InputStream is = getBinaryResource((BinaryDocument) child)) { + try (final InputStream is = getBinaryResource((BinaryDocument) child)) { storeBinaryResource(transaction, newDoc, is); } storeXMLResource(transaction, newDoc); - destCollection._2.addDocument(transaction, this, newDoc); + destinationCollection.addDocument(transaction, this, newDoc); createdDoc = newDoc; - } - - trigger.afterCopyDocument(this, transaction, createdDoc, child.getURI()); - } - saveCollection(transaction, destCollection._2); - - final XmldbURI name = collection.getURI(); - for(final Iterator i = collection.collectionIterator(this); i.hasNext(); ) { - final XmldbURI childName = i.next(); - //TODO : resolve URIs ! collection.getURI().resolve(childName) - Collection child = null; - try { - child = openCollection(name.append(childName), LockMode.READ_LOCK); - if (child == null) { - LOG.error("Child collection '" + childName + "' not found"); + } else { + //TODO : put a lock on newDoc ? + final DocumentImpl newDoc; + if (oldDoc != null) { + newDoc = new DocumentImpl(pool, destinationCollection, oldDoc); } else { - doCopyCollection(transaction, trigger, child, destCollection._2, childName, true); + newDoc = new DocumentImpl(pool, destinationCollection, child.getFileURI()); } - } finally { - if(child != null) { - child.release(LockMode.READ_LOCK); + newDoc.copyOf(this, child, oldDoc); + newDoc.setDocId(getNextResourceId(transaction)); + copyXMLResource(transaction, child, newDoc); + if (preserveOnCopy(preserve)) { + copyResource_preserve(this, child, newDoc, oldDoc != null); } + storeXMLResource(transaction, newDoc); + destinationCollection.addDocument(transaction, this, newDoc); + + createdDoc = newDoc; } - } - saveCollection(transaction, destCollection._2); - saveCollection(transaction, destination); - return destCollection._2; + documentTrigger.afterCopyDocument(this, transaction, createdDoc, child.getURI()); + } } /** * Copies just the mode and ACL from the src to the dest + * + * @param srcPermission The source to copy from + * @param destPermission The destination to copy to */ private void copyModeAndAcl(final Permission srcPermission, final Permission destPermission) throws PermissionDeniedException { - destPermission.setMode(srcPermission.getMode()); + final List aces = new ArrayList<>(); if(srcPermission instanceof SimpleACLPermission && destPermission instanceof SimpleACLPermission) { - ((SimpleACLPermission)destPermission).copyAclOf((SimpleACLPermission)srcPermission); + final SimpleACLPermission srcAclPermission = (SimpleACLPermission) srcPermission; + for (int i = 0; i < srcAclPermission.getACECount(); i++) { + aces.add(new ACEAider(srcAclPermission.getACEAccessType(i), srcAclPermission.getACETarget(i), srcAclPermission.getACEWho(i), srcAclPermission.getACEMode(i))); + } } + PermissionFactory.chmod(this, destPermission, Optional.of(srcPermission.getMode()), Optional.of(aces)); } - private boolean isSubCollection(final Collection col, final Collection sub) { - return sub.getURI().startsWith(col.getURI()); + private boolean isSubCollection(@EnsureLocked(mode=LockMode.READ_LOCK) final Collection col, + @EnsureLocked(mode=LockMode.READ_LOCK) final Collection sub) { + return isSubCollection(col.getURI(), sub.getURI()); + } + + private boolean isSubCollection(final XmldbURI col, final XmldbURI sub) { + return sub.startsWith(col); } @Override - public void moveCollection(final Txn transaction, final Collection collection, final Collection destination, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException { + public void moveCollection(final Txn transaction, final Collection sourceCollection, + final Collection targetCollection, final XmldbURI newName) + throws PermissionDeniedException, LockException, IOException, TriggerException { + assert(sourceCollection != null); + assert(targetCollection != null); + assert(newName != null); if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - if(newName != null && newName.numSegments() != 1) { - throw new PermissionDeniedException("New collection name must have one segment!"); + if(newName.numSegments() != 1) { + throw new IOException("newName name must be just a name i.e. an XmldbURI with one segment!"); } - if(collection.getId() == destination.getId()) { - throw new PermissionDeniedException("Cannot move collection to itself '" + collection.getURI() + "'."); + final XmldbURI sourceCollectionUri = sourceCollection.getURI(); + final XmldbURI targetCollectionUri = targetCollection.getURI(); + final XmldbURI destinationCollectionUri = targetCollectionUri.append(newName); + + if(sourceCollection.getId() == targetCollection.getId()) { + throw new PermissionDeniedException("Cannot move collection to itself '" + sourceCollectionUri + "'."); } - if(collection.getURI().equals(destination.getURI().append(newName))) { - throw new PermissionDeniedException("Cannot move collection to itself '" + collection.getURI() + "'."); + if(sourceCollectionUri.equals(destinationCollectionUri)) { + throw new PermissionDeniedException("Cannot move collection to itself '" + sourceCollectionUri + "'."); } - if(collection.getURI().equals(XmldbURI.ROOT_COLLECTION_URI)) { - throw new PermissionDeniedException("Cannot move the db root collection"); + if(sourceCollectionUri.equals(XmldbURI.ROOT_COLLECTION_URI)) { + throw new PermissionDeniedException("Cannot move the db root collection /db"); } - if(isSubCollection(collection, destination)) { - throw new PermissionDeniedException("Cannot move collection '" + collection.getURI() + "' to it child collection '"+destination.getURI()+"'."); + if(isSubCollection(sourceCollectionUri, targetCollectionUri)) { + throw new PermissionDeniedException("Cannot move collection '" + sourceCollectionUri + "' inside itself '" + targetCollectionUri + "'."); } - final XmldbURI parentName = collection.getParentURI(); - final Collection parent = parentName == null ? collection : getCollection(parentName); - if(!parent.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI()); + if(!sourceCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " has insufficient privileges on collection to move collection " + sourceCollectionUri); } - - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection to move collection " + collection.getURI()); + if(!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " has insufficient privileges on destination collection " + destinationCollectionUri + " to move collection " + sourceCollectionUri); } - if(!destination.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection " + parent.getURI() + " to move collection " + collection.getURI()); - } - - /* - * If replacing another collection in the move i.e. /db/col1/A -> /db/col2 (where /db/col2/A exists) - * we have to make sure the permissions to remove /db/col2/A are okay! - * - * So we must call removeCollection on /db/col2/A - * Which will ensure that collection can be removed and then remove it. - */ - final XmldbURI movedToCollectionUri = destination.getURI().append(newName); - final Collection existingMovedToCollection = getCollection(movedToCollectionUri); - if(existingMovedToCollection != null) { - removeCollection(transaction, existingMovedToCollection); + + + // WRITE LOCK the parent of the sourceCollection (as we will want to remove the sourceCollection from it eventually) + final XmldbURI sourceCollectionParentUri = sourceCollectionUri.removeLastSegment(); + try (final Collection sourceCollectionParent = openCollection(sourceCollectionParentUri, LockMode.WRITE_LOCK)) { + + if(!sourceCollectionParent.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection " + sourceCollectionParentUri + " to move collection " + sourceCollectionUri); + } + + /* + * If replacing another collection in the move + * i.e. sourceCollection=/db/col1/A, targetCollection=/db/col2, newName=A + * where /db/col2/A already exists we have to make sure the permissions to + * remove /db/col2/A are okay! + * + * So we must call removeCollection on /db/col2/A + * Which will ensure that collection can be removed and then remove it. + */ + try(final Collection existingDestinationCollection = getCollection(destinationCollectionUri)) { // NOTE: we already have a WRITE lock on destinationCollection (implicitly as targetCollection is locked) + if(existingDestinationCollection != null) { + if (!removeCollection(transaction, existingDestinationCollection)) { + throw new IOException("Destination collection '" + destinationCollectionUri + "' already exists and cannot be removed"); + } + } + } + + /* + * At this point this thread should hold WRITE_LOCKs on: + * 1) parent of sourceCollection + * 2) sourceCollection + * 3) targetCollection + * + * Remember a lock on a node in the Collection tree, + * implies locking the entire sub-tree, therefore + * we don't need to explicitly lock sub-collections (just documents). + */ + + pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_MOVE_COLLECTION, sourceCollection.getURI()); + try { + final CollectionTrigger trigger = new CollectionTriggers(this, transaction, sourceCollectionParent); + trigger.beforeMoveCollection(this, transaction, sourceCollection, destinationCollectionUri); + + // pessimistically obtain WRITE_LOCKs on all descendant documents of sourceCollection, and WRITE_LOCKs on all target documents + // we do this as whilst the document objects won't change, their method getURI() will return a different URI after the move + try(final ManagedLocks sourceDocLocks = new ManagedLocks(lockDescendantDocuments(sourceCollection, lockManager::acquireDocumentWriteLock)); + final ManagedLocks targetDocLocks = new ManagedLocks(lockTargetDocuments(sourceCollectionUri, sourceDocLocks, destinationCollectionUri, lockManager::acquireDocumentWriteLock))) { + + // Need to move each collection in the source tree individually, so recurse. + moveCollectionRecursive(transaction, trigger, sourceCollectionParent, sourceCollection, targetCollection, newName, false); + + // For binary resources, though, just move the top level directory and all descendants come with it. + final Path fsSourceDir = getCollectionFile(getFsDir(), sourceCollectionUri, false); + moveBinaryFork(transaction, fsSourceDir, targetCollection, newName); + } + + trigger.afterMoveCollection(this, transaction, sourceCollection, sourceCollectionUri); + } finally { + pool.getProcessMonitor().endJob(); + } } + } - pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_MOVE_COLLECTION, collection.getURI()); + /** + * Acquires locks on all descendant Collections of a specific Collection + * + * Locks are acquired in a top-down, left-to-right order + * + * NOTE: It is assumed that the caller holds a lock on the + * `collection` of the same mode as those that we should acquire on the descendants + * + * @param collection The Collection whose descendant locks should be acquired + * @param lockFn A function for acquiring a lock + * + * @return A list of locks in the same order as collectionUris. Note that these should be released in reverse order + */ + private List lockDescendantDocuments(final Collection collection, final FunctionE lockFn) throws LockException, PermissionDeniedException { + final List locks = new ArrayList<>(); try { + final Iterator itDoc = collection.iteratorNoLock(this); + while(itDoc.hasNext()) { + final DocumentImpl doc = itDoc.next(); + final ManagedDocumentLock docLock = lockFn.apply(doc.getURI()); + locks.add(docLock); + } - final XmldbURI srcURI = collection.getURI(); - final XmldbURI dstURI = destination.getURI().append(newName); + final XmldbURI collectionUri = collection.getURI(); + final Iterator it = collection.collectionIteratorNoLock(this); // NOTE: we should already have a lock on collection + while (it.hasNext()) { + final XmldbURI childCollectionName = it.next(); + final XmldbURI childCollectionUri = collectionUri.append(childCollectionName); + final Collection childCollection = getCollection(childCollectionUri); // NOTE: we don't need to lock the collection as we should already implicitly have a lock on the collection sub-tree + final List descendantLocks = lockDescendantDocuments(childCollection, lockFn); + locks.addAll(descendantLocks); + } + } catch (final PermissionDeniedException | LockException e) { + // unlock in reverse order + try { + ManagedLocks.closeAll(locks); + } catch (final RuntimeException re) { + LOG.error(re); + } - final CollectionTrigger trigger = new CollectionTriggers(this, parent); - trigger.beforeMoveCollection(this, transaction, collection, dstURI); + throw e; + } - // sourceDir must be known in advance, because once moveCollectionRecursive - // is called, both collection and destination can point to the same resource - final Path fsSourceDir = getCollectionFile(getFsDir(), collection.getURI(), false); + return locks; + } - // Need to move each collection in the source tree individually, so recurse. - moveCollectionRecursive(transaction, trigger, collection, destination, newName, false); + /** + * Locks target documents (useful for copy/move operations). + * + * @param sourceCollectionUri The source collection URI root of the copy/move operation + * @param sourceDocumentLocks Locks on the source documents, for which target document locks should be acquired + * @param targetCollectionUri The target collection URI root of the copy/move operation + * @param lockFn The function for locking the target document. + * + * @return A list of locks on the target documents. + */ + private List lockTargetDocuments(final XmldbURI sourceCollectionUri, final ManagedLocks sourceDocumentLocks, final XmldbURI targetCollectionUri, final FunctionE lockFn) throws LockException { + final List locks = new ArrayList<>(); + try { + for (final ManagedDocumentLock sourceDocumentLock : sourceDocumentLocks) { + final XmldbURI sourceDocumentUri = sourceDocumentLock.getPath(); + final URI relativeDocumentUri = sourceCollectionUri.relativizeCollectionPath(sourceDocumentUri.getURI()); + final XmldbURI targetDocumentUri = XmldbURI.create(targetCollectionUri.resolveCollectionPath(relativeDocumentUri)); - // For binary resources, though, just move the top level directory and all descendants come with it. - moveBinaryFork(transaction, fsSourceDir, destination, newName); - trigger.afterMoveCollection(this, transaction, collection, srcURI); + final ManagedDocumentLock documentLock = lockFn.apply(targetDocumentUri); + locks.add(documentLock); - } finally { - pool.getProcessMonitor().endJob(); + } + } catch(final LockException e) { + // unlock in reverse order + try { + ManagedLocks.closeAll(locks); + } catch (final RuntimeException re) { + LOG.error(re); + } + + throw e; } + return locks; } - private void moveBinaryFork(final Txn transaction, final Path sourceDir, final Collection destination, final XmldbURI newName) throws IOException { - final Path targetDir = getCollectionFile(getFsDir(), destination.getURI().append(newName), false); + /** + * Moves the binary objects for a Collection Move operation, only meant to be + * called from {@link #moveCollection(Txn, Collection, Collection, XmldbURI)} + * + * @param transaction The current transaction + * @param sourceDir The source directory (containing the binary objects) which is to be moved + * @param targetCollection The target Collection which the source collection is to be moved to + * @param newName The name of the source collection in the target Collection + */ + private void moveBinaryFork(final Txn transaction, final Path sourceDir, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection targetCollection, final XmldbURI newName) + throws IOException { + final XmldbURI destinationCollectionUri = targetCollection.getURI().append(newName); + + final Path targetDir = getCollectionFile(getFsDir(), destinationCollectionUri, false); if(Files.exists(sourceDir)) { if(Files.exists(targetDir)) { if(fsJournalDir.isPresent()) { - final Path targetDelDir = getCollectionFile(fsJournalDir.get(), transaction, destination.getURI().append(newName), true); + final Path targetDelDir = getCollectionFile(fsJournalDir.get(), transaction, destinationCollectionUri, true); Files.createDirectories(targetDelDir); Files.move(targetDir, targetDelDir, StandardCopyOption.ATOMIC_MOVE); @@ -1346,404 +1630,368 @@ private void moveBinaryFork(final Txn transaction, final Path sourceDir, final C } } - //TODO bug the trigger param is reused as this is a recursive method, but in the current design triggers - // are only meant to be called once for each action and then destroyed! + //TODO bug the trigger param is reused as this is a recursive method, but in the current design triggers are only meant to be called once for each action and then destroyed! /** - * @param transaction - * @param trigger - * @param collection - * @param destination - * @param newName + * Recursive-descent Collection move, only meant to be + * called from {@link #moveCollection(Txn, Collection, Collection, XmldbURI)} + * + * @param transaction The current transaction + * @param trigger The trigger to fire on Collection events + * @param sourceCollection The Collection to move + * @param targetCollection The target Collection to move the sourceCollection into + * @param newName The new name the sourceCollection should have in the targetCollection * @param fireTrigger Indicates whether the CollectionTrigger should be fired - * on the collection the first time this function is called. - * Triggers will always be fired for recursive calls of this - * function. + * on the Collection the first time this function is called. Triggers will always + * be fired for recursive calls of this function. */ - private void moveCollectionRecursive(final Txn transaction, final CollectionTrigger trigger, final Collection collection, final Collection destination, final XmldbURI newName, final boolean fireTrigger) throws PermissionDeniedException, IOException, LockException, TriggerException { - - final XmldbURI uri = collection.getURI(); - final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { + private void moveCollectionRecursive(final Txn transaction, final CollectionTrigger trigger, + @Nullable @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection sourceCollectionParent, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection sourceCollection, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection targetCollection, final XmldbURI newName, + final boolean fireTrigger) throws PermissionDeniedException, IOException, LockException, TriggerException { + final XmldbURI sourceCollectionUri = sourceCollection.getURI(); + final XmldbURI destinationCollectionUri = targetCollection.getURI().append(newName); - final XmldbURI srcURI = collection.getURI(); - final XmldbURI dstURI = destination.getURI().append(newName); + if(fireTrigger) { + trigger.beforeMoveCollection(this, transaction, sourceCollection, destinationCollectionUri); + } - //recheck here because now under 'synchronized(collectionsCache)' - if(isSubCollection(collection, destination)) { - throw new PermissionDeniedException("Cannot move collection '" + srcURI + "' to it child collection '"+dstURI+"'."); - } + // remove source from parent + if (sourceCollectionParent != null) { + final XmldbURI sourceCollectionName = sourceCollectionUri.lastSegment(); + sourceCollectionParent.removeCollection(this, sourceCollectionName); + saveCollection(transaction, sourceCollectionParent); + } - if(fireTrigger) { - trigger.beforeMoveCollection(this, transaction, collection, dstURI); - } + // remove source from cache + final CollectionCache collectionsCache = pool.getCollectionsCache(); + collectionsCache.invalidate(sourceCollection.getURI()); - final XmldbURI parentName = collection.getParentURI(); - final Collection parent = openCollection(parentName, LockMode.WRITE_LOCK); + // remove source from disk + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { + final Value key = new CollectionStore.CollectionKey(sourceCollectionUri.toString()); + collectionsDb.remove(transaction, key); + } - if(parent != null) { - try { - //TODO : resolve URIs - parent.removeCollection(this, uri.lastSegment()); - } finally { - parent.release(LockMode.WRITE_LOCK); - } - } + // set source path to destination... source is now the destination + sourceCollection.setPath(destinationCollectionUri); + saveCollection(transaction, sourceCollection); - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - collectionsCache.remove(collection); - final Value key = new CollectionStore.CollectionKey(uri.toString()); - collectionsDb.remove(transaction, key); - //TODO : resolve URIs destination.getURI().resolve(newName) - collection.setPath(destination.getURI().append(newName)); - collection.setCreationTime(System.currentTimeMillis()); - destination.addCollection(this, collection, false); - if(parent != null) { - saveCollection(transaction, parent); - } - if(parent != destination) { - saveCollection(transaction, destination); - } - saveCollection(transaction, collection); - //} catch (ReadOnlyException e) { - //throw new PermissionDeniedException(DATABASE_IS_READ_ONLY); - } finally { - lock.release(LockMode.WRITE_LOCK); - } + // add destination to target + targetCollection.addCollection(this, sourceCollection); + if (sourceCollectionParent != targetCollection) { + saveCollection(transaction, targetCollection); + } - if(fireTrigger) { - trigger.afterMoveCollection(this, transaction, collection, srcURI); - } + if(fireTrigger) { + trigger.afterMoveCollection(this, transaction, sourceCollection, sourceCollectionUri); + } - for(final Iterator i = collection.collectionIterator(this); i.hasNext(); ) { - final XmldbURI childName = i.next(); - //TODO : resolve URIs !!! name.resolve(childName) - final Collection child = openCollection(uri.append(childName), LockMode.WRITE_LOCK); - if(child == null) { - LOG.error("Child collection " + childName + " not found"); + // move the descendants + for(final Iterator i = sourceCollection.collectionIteratorNoLock(this); i.hasNext(); ) { // NOTE: we already have a WRITE lock on sourceCollection + final XmldbURI childName = i.next(); + final XmldbURI childUri = sourceCollectionUri.append(childName); + try(final Collection child = getCollection(childUri)) { // NOTE: we already have a WRITE lock on child + if (child == null) { + throw new IOException("Child collection " + childUri + " not found"); } else { - try { - moveCollectionRecursive(transaction, trigger, child, collection, childName, true); - } finally { - child.release(LockMode.WRITE_LOCK); - } + moveCollectionRecursive(transaction, trigger, null, child, sourceCollection, childName, true); } } } } - /** - * Removes a collection and all child collections and resources - * - * We first traverse down the Collection tree to ensure that the Permissions - * enable the Collection Tree to be removed. We then return back up the Collection - * tree, removing each child as we progresses upwards. - * - * @param transaction the transaction to use - * @param collection the collection to remove - * @return true if the collection was removed, false otherwise - * @throws TriggerException - */ @Override public boolean removeCollection(final Txn transaction, final Collection collection) throws PermissionDeniedException, IOException, TriggerException { - if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - final XmldbURI parentName = collection.getParentURI(); - final boolean isRoot = parentName == null; - final Collection parent = isRoot ? collection : getCollection(parentName); - - //parent collection permissions - if(!parent.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'"); + // WRITE LOCK the collection's parent (as we will remove this collection from it) + final XmldbURI parentCollectionUri = collection.getParentURI() == null ? XmldbURI.ROOT_COLLECTION_URI : collection.getParentURI(); + try(final ManagedCollectionLock parentCollectionLock = writeLockCollection(parentCollectionUri)) { + return _removeCollection(transaction, collection); + } catch(final LockException e) { + LOG.error("Unable to lock Collection: {}", collection.getURI(), e); + return false; } + } - if(!parent.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'"); - } + private boolean _removeCollection(final Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection collection) throws PermissionDeniedException, TriggerException, IOException { + final XmldbURI collectionUri = collection.getURI(); - //this collection permissions - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'"); - } + getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_COLLECTION, collectionUri); - if(!collection.isEmpty(this)) { - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'"); - } + try { - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { + @Nullable final Collection parentCollection = collection.getParentURI() == null ? null : getCollection(collection.getParentURI()); // NOTE: we already have a WRITE lock on the parent of the Collection we set out to remove + + // 1) check the current user has permission to delete the Collection + //TODO(AR) the below permissions check could be optimised when descending the tree so we don't check the same collection(s) twice in some cases + if(!checkRemoveCollectionPermissions(parentCollection, collection)) { throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' is not allowed to remove collection '" + collection.getURI() + "'"); } - } - try { + final CollectionTrigger colTrigger = new CollectionTriggers(this, transaction, parentCollection == null ? collection : parentCollection); + colTrigger.beforeDeleteCollection(this, transaction, collection); - pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_COLLECTION, collection.getURI()); + // 2) remove descendant collections + for (final Iterator subCollectionName = collection.collectionIteratorNoLock(this); subCollectionName.hasNext(); ) { // NOTE: we already have a WRITE lock on the parent of the Collection we set out to remove + final XmldbURI subCollectionUri = collectionUri.append(subCollectionName.next()); + final boolean removedSubCollection = _removeCollection(transaction, getCollection(subCollectionUri)); // NOTE: we already have a WRITE lock on the parent of the Collection we set out to remove + if(!removedSubCollection) { + LOG.error("Unable to remove Collection: {}", subCollectionUri); + return false; + } + } - final CollectionTrigger colTrigger = new CollectionTriggers(this, parent); + //TODO(AR) this can be executed asynchronously as a task, Do we need to await the completion before unlocking the collection? or just await completion before returning from the first call to _removeCollection? + // 3) drop indexes for this Collection + notifyDropIndex(collection); + getIndexController().removeCollection(collection, this, false); - colTrigger.beforeDeleteCollection(this, transaction, collection); + // 4) remove this Collection from the parent Collection + if(parentCollection != null) { + parentCollection.removeCollection(this, collectionUri.lastSegment()); + saveCollection(transaction, parentCollection); + } - final long start = System.currentTimeMillis(); - final CollectionCache collectionsCache = pool.getCollectionsCache(); + // 5) remove Collection from collections.dbx + if(parentCollection != null) { + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { + final Value key = new CollectionStore.CollectionKey(collectionUri.getRawCollectionPath()); + collectionsDb.remove(transaction, key); + + //TODO(AR) is this the correct place to invalidate the config? + // Notify the collection configuration manager + final CollectionConfigurationManager manager = pool.getConfigurationManager(); + if(manager != null) { + manager.invalidate(collectionUri, getBrokerPool()); + } + } - synchronized(collectionsCache) { - final XmldbURI uri = collection.getURI(); - final String collName = uri.getRawCollectionPath(); + // invalidate the cache entry + final CollectionCache collectionsCache = pool.getCollectionsCache(); + collectionsCache.invalidate(collection.getURI()); + } else { + // if this is the root collection we just have to save + // it to persist the removal of any subCollections to collections.dbx + saveCollection(transaction, collection); + } - // Notify the collection configuration manager - final CollectionConfigurationManager manager = pool.getConfigurationManager(); - if(manager != null) { - manager.invalidate(uri, getBrokerPool()); + //TODO(AR) this could possibly be executed asynchronously as a task, we don't need to know when it completes (this is because access to documents is through a Collection, and the Collection was removed above), however we cannot recycle the collectionId until all docs are gone + // 6) unlink all documents from the Collection + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { + final Value docKey = new CollectionStore.DocumentKey(collection.getId()); + final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, docKey); + collectionsDb.removeAll(transaction, query); + if(parentCollection != null) { // we must not free the root collection id! + collectionsDb.freeCollectionId(collection.getId()); } + } catch(final BTreeException | IOException e) { + LOG.error("Unable to unlink documents from the Collection: {}", collectionUri, e); + } - if(LOG.isDebugEnabled()) { - LOG.debug("Removing children collections from their parent '" + collName + "'..."); - } + //TODO(AR) this can be executed asynchronously as a task, we need to await the completion before unlocking the collection + // 7) remove the documents nodes of the Collection from dom.dbx + removeCollectionsDocumentNodes(transaction, collection); - try { - for (final Iterator i = collection.collectionIterator(this); i.hasNext(); ) { - final XmldbURI childName = i.next(); - //TODO : resolve from collection's base URI - //TODO : resolve URIs !!! (uri.resolve(childName)) - final Collection childCollection = openCollection(uri.append(childName), LockMode.WRITE_LOCK); - try { - removeCollection(transaction, childCollection); - } catch (final NullPointerException npe) { - LOG.error("childCollection '" + childName + "' is corrupted. Caught NPE to be able to actually remove the parent."); - } finally { - if (childCollection != null) { - childCollection.getLock().release(LockMode.WRITE_LOCK); - } else { - LOG.warn("childCollection is null !"); - } - } - } - } catch(final LockException e) { - LOG.error("LockException while removing collection '" + collName + "'", e); - return false; - } + //TODO(AR) this can be executed asynchronously as a task, we need to await the completion before unlocking the collection + //TODO(AR) could optimise by only calling at the highest level (i.e. the first call to _removeCollection) + // 8) remove any binary files that were in the Collection + removeCollectionBinaries(transaction, collection); + + colTrigger.afterDeleteCollection(this, transaction, collectionUri); - //Drop all index entries - notifyDropIndex(collection); + return true; - // Drop custom indexes - getIndexController().removeCollection(collection, this, false); + } catch(final LockException e) { + LOG.error("Unable to lock Collection: {}", collectionUri, e); + return false; + } finally { + getBrokerPool().getProcessMonitor().endJob(); + } + } - if(!isRoot) { - // remove from parent collection - //TODO : resolve URIs ! (uri.resolve("..")) - Collection parentCollection = null; - try { - parentCollection = openCollection(collection.getParentURI(), LockMode.WRITE_LOCK); - if(parentCollection != null) { - // keep a lock for the transaction - if(transaction != null) { - transaction.acquireLock(parentCollection.getLock(), LockMode.WRITE_LOCK); - } + private void removeCollectionBinaries(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection collection) throws IOException { + final Path fsSourceDir = getCollectionFile(getFsDir(), collection.getURI(), false); + if(fsJournalDir.isPresent()) { + final Path fsTargetDir = getCollectionFile(fsJournalDir.get(), transaction, collection.getURI(), true); - LOG.debug("Removing collection '" + collName + "' from its parent..."); - //TODO : resolve from collection's base URI - parentCollection.removeCollection(this, uri.lastSegment()); - saveCollection(transaction, parentCollection); - } - } catch(final LockException e) { - LOG.error("LockException while removing collection '" + collName + "'"); - } finally { - if(parentCollection != null) { - parentCollection.getLock().release(LockMode.WRITE_LOCK); - } - } - } + // remove child binary collections + if (Files.exists(fsSourceDir)) { + Files.createDirectories(fsTargetDir.getParent()); - //Update current state - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - // remove the metadata of all documents in the collection - final Value docKey = new CollectionStore.DocumentKey(collection.getId()); - final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, docKey); - collectionsDb.removeAll(transaction, query); - // if this is not the root collection remove it... - if(!isRoot) { - final Value key = new CollectionStore.CollectionKey(collName); - //... from the disk - collectionsDb.remove(transaction, key); - //... from the cache - collectionsCache.remove(collection); - //and free its id for any further use - collectionsDb.freeCollectionId(collection.getId()); - } else { - //Simply save the collection on disk - //It will remain cached - //and its id well never be made available - saveCollection(transaction, collection); + //TODO(DS) log first, rename second ??? + //TODO(DW) not sure a Fatal is required here. Copy and delete maybe? + Files.move(fsSourceDir, fsTargetDir, StandardCopyOption.ATOMIC_MOVE); + + if (logManager.isPresent()) { + final Loggable loggable = new RenameBinaryLoggable(this, transaction, fsSourceDir, fsTargetDir); + try { + logManager.get().journal(loggable); + } catch (final JournalException e) { + LOG.error(e.getMessage(), e); } - } catch(final LockException e) { - LOG.error("Failed to acquire lock on '" + FileUtils.fileName(collectionsDb.getFile()) + "'"); - } - //catch(ReadOnlyException e) { - //throw new PermissionDeniedException(DATABASE_IS_READ_ONLY); - //} - catch(final BTreeException | IOException e) { - LOG.error("Exception while removing collection: " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } + } + } else { + FileUtils.delete(fsSourceDir); + } + } - //Remove child resources - if(LOG.isDebugEnabled()) { - LOG.debug("Removing resources in '" + collName + "'..."); - } - final DocumentTrigger docTrigger = new DocumentTriggers(this, collection); + private void removeCollectionsDocumentNodes(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection collection) + throws TriggerException, PermissionDeniedException, LockException { + final DocumentTrigger docTrigger = new DocumentTriggers(this, transaction, collection); - try { - for (final Iterator i = collection.iterator(this); i.hasNext(); ) { - final DocumentImpl doc = i.next(); - - docTrigger.beforeDeleteDocument(this, transaction, doc); - - //Remove doc's metadata - // WM: now removed in one step. see above. - //removeResourceMetadata(transaction, doc); - //Remove document nodes' index entries - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { - @Override - public Object start() { - try { - final Value ref = new NodeRef(doc.getDocId()); - final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); - domDb.remove(transaction, query, null); - } catch (final BTreeException e) { - LOG.error("btree error while removing document", e); - } catch (final IOException e) { - LOG.error("io error while removing document", e); - } catch (final TerminatedException e) { - LOG.error("method terminated", e); - } - return null; - } - }.run(); - //Remove nodes themselves - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { - @Override - public Object start() { - if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { - final long page = ((BinaryDocument) doc).getPage(); - if (page > Page.NO_PAGE) { - domDb.removeOverflowValue(transaction, page); - } - } else { - final NodeHandle node = (NodeHandle) doc.getFirstChild(); - domDb.removeAll(transaction, node.getInternalAddress()); - } - return null; - } - }.run(); + for (final Iterator itDocument = collection.iteratorNoLock(this); itDocument.hasNext(); ) { // NOTE: we already have a WRITE_LOCK on the collection + final DocumentImpl doc = itDocument.next(); - docTrigger.afterDeleteDocument(this, transaction, doc.getURI()); + docTrigger.beforeDeleteDocument(this, transaction, doc); - //Make doc's id available again - collectionsDb.freeResourceId(doc.getDocId()); + //Remove doc's metadata + // WM: now removed in one step. see above. + //removeResourceMetadata(transaction, doc); + //Remove document nodes' index entries + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { + @Override + public Object start() { + try { + final Value ref = new NodeRef(doc.getDocId()); + final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); + domDb.remove(transaction, query, null); + } catch (final BTreeException e) { + LOG.error("btree error while removing document", e); + } catch (final IOException e) { + LOG.error("io error while removing document", e); + } catch (final TerminatedException e) { + LOG.error("method terminated", e); } - } catch(final LockException e) { - LOG.error("LockException while removing documents from collection '" + collection.getURI() + "'.", e); - return false; + return null; } + }.run(); - //now that the database has been updated, update the binary collections on disk - final Path fsSourceDir = getCollectionFile(getFsDir(), collection.getURI(), false); - if(fsJournalDir.isPresent()) { - final Path fsTargetDir = getCollectionFile(fsJournalDir.get(), transaction, collection.getURI(), true); - - // remove child binary collections - if (Files.exists(fsSourceDir)) { - Files.createDirectories(fsTargetDir.getParent()); - - //TODO(DS) log first, rename second ??? - //TODO(DW) not sure a Fatal is required here. Copy and delete maybe? - Files.move(fsSourceDir, fsTargetDir, StandardCopyOption.ATOMIC_MOVE); - - if (logManager.isPresent()) { - final Loggable loggable = new RenameBinaryLoggable(this, transaction, fsSourceDir, fsTargetDir); - try { - logManager.get().journal(loggable); - } catch (final JournalException e) { - LOG.error(e.getMessage(), e); - } + //Remove nodes themselves + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { + @Override + public Object start() { + if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { + final long page = ((BinaryDocument) doc).getPage(); + if (page > Page.NO_PAGE) { + domDb.removeOverflowValue(transaction, page); } + } else { + final NodeHandle node = (NodeHandle) doc.getFirstChild(); + domDb.removeAll(transaction, node.getInternalAddress()); } - } else { - FileUtils.delete(fsSourceDir); + return null; } + }.run(); + docTrigger.afterDeleteDocument(this, transaction, doc.getURI()); - if(LOG.isDebugEnabled()) { - LOG.debug("Removing collection '" + collName + "' took " + (System.currentTimeMillis() - start)); - } + //Make doc's id available again + collectionsDb.freeResourceId(doc.getDocId()); + } + } + + /** + * Checks that the current user has permissions to remove the Collection + * + * @param parentCollection The parent Collection or null if we are testing the root Collection + * @param collection The Collection to check permissions for removal + * + * @return true if the current user is allowed to remove the Collection + */ + private boolean checkRemoveCollectionPermissions( + @Nullable @EnsureLocked(mode=LockMode.READ_LOCK) final Collection parentCollection, + @EnsureLocked(mode=LockMode.READ_LOCK) final Collection collection) throws PermissionDeniedException { + // parent collection permissions + if(parentCollection != null) { + if (!parentCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { + return false; + } + if (!parentCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { + return false; + } + } - colTrigger.afterDeleteCollection(this, transaction, collection.getURI()); + // collection permissions + if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.READ)) { + return false; + } - return true; + if(!collection.isEmpty(this)) { + if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { + return false; + } + if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { + return false; } - } finally { - pool.getProcessMonitor().endJob(); } + + return true; + } + + /** + * Acquires a write lock on a Collection + * + * @param collectionUri The uri of the collection to lock + * + * @return A managed lock for the Collection + */ + private ManagedCollectionLock writeLockCollection(final XmldbURI collectionUri) throws LockException { + return lockManager.acquireCollectionWriteLock(collectionUri); } /** - * Saves the specified collection to storage. Collections are usually cached in - * memory. If a collection is modified, this method needs to be called to make - * the changes persistent. + * Acquires a READ lock on a Collection * - * Note: appending a new document to a collection does not require a save. + * @param collectionUri The uri of the collection to lock * - * @throws PermissionDeniedException - * @throws IOException - * @throws TriggerException + * @return A managed lock for the Collection */ + private ManagedCollectionLock readLockCollection(final XmldbURI collectionUri) throws LockException { + return lockManager.acquireCollectionReadLock(collectionUri); + } + @Override - public void saveCollection(final Txn transaction, final Collection collection) throws PermissionDeniedException, IOException, TriggerException { + public void saveCollection(final Txn transaction, final Collection collection) throws IOException { if(collection == null) { LOG.error("NativeBroker.saveCollection called with collection == null! Aborting."); return; } + if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - pool.getCollectionsCache().add(collection); + final CollectionCache collectionsCache = pool.getCollectionsCache(); + collectionsCache.put(collection); - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { if(collection.getId() == Collection.UNKNOWN_COLLECTION_ID) { collection.setId(getNextCollectionId(transaction)); } + final Value name = new CollectionStore.CollectionKey(collection.getURI().toString()); try(final VariableByteOutputStream os = new VariableByteOutputStream(8)) { collection.serialize(os); final long address = collectionsDb.put(transaction, name, os.data(), true); if (address == BFile.UNKNOWN_ADDRESS) { - //TODO : exception !!! -pb - LOG.error("could not store collection data for '" + collection.getURI() + "'"); - return; + throw new IOException("Could not store collection data for '" + collection.getURI() + "', address=BFile.UNKNOWN_ADDRESS"); } collection.setAddress(address); } } catch(final ReadOnlyException e) { - LOG.warn(DATABASE_IS_READ_ONLY); + throw new IOException(DATABASE_IS_READ_ONLY, e); } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile()), e); - } finally { - lock.release(LockMode.WRITE_LOCK); + throw new IOException(e); } } @@ -1753,14 +2001,12 @@ public void saveCollection(final Txn transaction, final Collection collection) t * @return next available unique collection id * @throws ReadOnlyException */ - public int getNextCollectionId(final Txn transaction) throws ReadOnlyException { + public int getNextCollectionId(final Txn transaction) throws ReadOnlyException, LockException { int nextCollectionId = collectionsDb.getFreeCollectionId(); if(nextCollectionId != Collection.UNKNOWN_COLLECTION_ID) { return nextCollectionId; } - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { final Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_COLLECTION_ID_KEY); final Value data = collectionsDb.get(key); if(data != null) { @@ -1771,91 +2017,84 @@ public int getNextCollectionId(final Txn transaction) throws ReadOnlyException { ByteConversion.intToByte(nextCollectionId, d, OFFSET_COLLECTION_ID); collectionsDb.put(transaction, key, d, true); return nextCollectionId; - } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile()), e); - return Collection.UNKNOWN_COLLECTION_ID; - //TODO : rethrow ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } } @Override - public void reindexCollection(XmldbURI collectionName) throws PermissionDeniedException, IOException { + public void reindexCollection(final Txn transaction, final XmldbURI collectionUri) throws PermissionDeniedException, IOException, LockException { if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - collectionName = prepend(collectionName.toCollectionPathURI()); - final Collection collection = getCollection(collectionName); - if(collection == null) { - LOG.debug("collection " + collectionName + " not found!"); - return; - } - reindexCollection(collection, IndexMode.STORE); - } - - public void reindexCollection(final Collection collection, final IndexMode mode) throws PermissionDeniedException { - final TransactionManager transact = pool.getTransactionManager(); + final XmldbURI fqUri = prepend(collectionUri.toCollectionPathURI()); final long start = System.currentTimeMillis(); + try(final Collection collection = openCollection(fqUri, LockMode.READ_LOCK)) { + if (collection == null) { + LOG.warn("Collection {} not found!", fqUri); + return; + } - try(final Txn transaction = transact.beginTransaction()) { - LOG.info(String.format("Start indexing collection %s", collection.getURI().toString())); + LOG.info("Start indexing collection {}", collection.getURI().toString()); pool.getProcessMonitor().startJob(ProcessMonitor.ACTION_REINDEX_COLLECTION, collection.getURI()); - reindexCollection(transaction, collection, mode); - transact.commit(transaction); - - } catch(final Exception e) { + reindexCollection(transaction, collection, IndexMode.STORE); + } catch(final PermissionDeniedException | IOException e) { LOG.error("An error occurred during reindex: " + e.getMessage(), e); - } finally { pool.getProcessMonitor().endJob(); LOG.info(String.format("Finished indexing collection %s in %s ms.", - collection.getURI().toString(), System.currentTimeMillis() - start)); + fqUri, System.currentTimeMillis() - start)); } } - public void reindexCollection(final Txn transaction, final Collection collection, final IndexMode mode) throws PermissionDeniedException, IOException { - final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection " + collection.getURI()); - } - LOG.debug("Reindexing collection " + collection.getURI()); - if(mode == IndexMode.STORE) { - dropCollectionIndex(transaction, collection, true); - } - try { - for (final Iterator i = collection.iterator(this); i.hasNext(); ) { - final DocumentImpl next = i.next(); - reindexXMLResource(transaction, next, mode); - } - } catch(final LockException e) { - LOG.error("LockException while reindexing documents of collection '" + collection.getURI() + ". Skipping...", e); + private void reindexCollection(final Txn transaction, + @EnsureLocked(mode=LockMode.READ_LOCK) final Collection collection, final IndexMode mode) + throws PermissionDeniedException, IOException, LockException { + if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on collection " + collection.getURI()); + } + + LOG.debug("Reindexing collection " + collection.getURI()); + if(mode == IndexMode.STORE) { + dropCollectionIndex(transaction, collection, true); + } + + // reindex documents + try { + for (final Iterator i = collection.iterator(this); i.hasNext(); ) { + final DocumentImpl next = i.next(); + reindexXMLResource(transaction, next, mode); } + } catch(final LockException e) { + LOG.error("LockException while reindexing documents of collection '{}'. Skipping...", collection.getURI(), e); + } - try { - for (final Iterator i = collection.collectionIterator(this); i.hasNext(); ) { - final XmldbURI next = i.next(); - //TODO : resolve URIs !!! (collection.getURI().resolve(next)) - final Collection child = getCollection(collection.getURI().append(next)); + // descend into child collections + try { + for (final Iterator i = collection.collectionIterator(this); i.hasNext(); ) { + final XmldbURI childName = i.next(); + final XmldbURI childUri = collection.getURI().append(childName); + try(final Collection child = openCollection(childUri, LockMode.READ_LOCK)) { if (child == null) { - LOG.error("Collection '" + next + "' not found"); + throw new IOException("Collection '" + childUri + "' not found"); } else { reindexCollection(transaction, child, mode); } } - } catch(final LockException e) { - LOG.error("LockException while reindexing child collections of collection '" + collection.getURI() + ". Skipping...", e); } + } catch(final LockException e) { + LOG.error("LockException while reindexing child collections of collection '" + collection.getURI() + ". Skipping...", e); } } - public void dropCollectionIndex(final Txn transaction, final Collection collection) throws PermissionDeniedException, IOException { + private void dropCollectionIndex(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection collection) + throws PermissionDeniedException, IOException, LockException { dropCollectionIndex(transaction, collection, false); } - public void dropCollectionIndex(final Txn transaction, final Collection collection, final boolean reindex) throws PermissionDeniedException, IOException { + private void dropCollectionIndex(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection collection, final boolean reindex) + throws PermissionDeniedException, IOException, LockException { if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } @@ -1864,34 +2103,24 @@ public void dropCollectionIndex(final Txn transaction, final Collection collecti } notifyDropIndex(collection); getIndexController().removeCollection(collection, this, reindex); - try { - for (final Iterator i = collection.iterator(this); i.hasNext(); ) { - final DocumentImpl doc = i.next(); - LOG.debug("Dropping index for document " + doc.getFileURI()); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { - @Override - public Object start() { - try { - final Value ref = new NodeRef(doc.getDocId()); - final IndexQuery query = - new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); - domDb.remove(transaction, query, null); - domDb.flush(); - } catch (final BTreeException e) { - LOG.error("btree error while removing document", e); - } catch (final DBException e) { - LOG.error("db error while removing document", e); - } catch (final IOException e) { - LOG.error("io error while removing document", e); - } catch (final TerminatedException e) { - LOG.error("method terminated", e); - } - return null; + for (final Iterator i = collection.iterator(this); i.hasNext(); ) { + final DocumentImpl doc = i.next(); + LOG.debug("Dropping index for document " + doc.getFileURI()); + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { + @Override + public Object start() { + try { + final Value ref = new NodeRef(doc.getDocId()); + final IndexQuery query = + new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); + domDb.remove(transaction, query, null); + domDb.flush(); + } catch (final TerminatedException | IOException | DBException e) { + LOG.error("Error while removing Document '{}' from Collection index: {}", doc.getURI().lastSegment(), collection.getURI(), e); } - }.run(); - } - } catch(final LockException e) { - LOG.error("LockException while removing index of collection '" + collection.getURI(), e); + return null; + } + }.run(); } } @@ -1918,44 +2147,51 @@ public DocumentImpl storeTempResource(final org.exist.dom.memtree.DocumentImpl d final XmldbURI docName = XmldbURI.create(MessageDigester.md5(Thread.currentThread().getName() + Long.toString(System.currentTimeMillis()), false) + ".xml"); //get the temp collection - try (final Txn transaction = transact.beginTransaction()) { - Tuple2 tuple = getOrCreateTempCollection(transaction); - Collection temp = tuple._2; - if (!tuple._1) { - transaction.acquireLock(temp.getLock(), LockMode.WRITE_LOCK); + try(final Txn transaction = transact.beginTransaction(); + final ManagedCollectionLock tempCollectionLock = lockManager.acquireCollectionWriteLock(XmldbURI.TEMP_COLLECTION_URI)) { + + // if temp collection does not exist, creates temp collection (with write lock in Txn) + final Tuple2 createdOrExistingTemp = getOrCreateTempCollection(transaction); + if (createdOrExistingTemp == null) { + LOG.error("Failed to create temporary collection"); + transact.abort(transaction); + return null; } - //create a temporary document - final DocumentImpl targetDoc = new DocumentImpl(pool, temp, docName); - targetDoc.getPermissions().setMode(Permission.DEFAULT_TEMPORARY_DOCUMENT_PERM); - - final long now = System.currentTimeMillis(); - final DocumentMetadata metadata = new DocumentMetadata(); - metadata.setLastModified(now); - metadata.setCreated(now); - targetDoc.setMetadata(metadata); - targetDoc.setDocId(getNextResourceId(transaction, temp)); - - //index the temporary document - final DOMIndexer indexer = new DOMIndexer(this, transaction, doc, targetDoc); - indexer.scan(); - indexer.store(); - - //store the temporary document - temp.addDocument(transaction, this, targetDoc); - - storeXMLResource(transaction, targetDoc); + final Collection temp = createdOrExistingTemp._2; - saveCollection(transaction, temp); + //create a temporary document + try (final ManagedDocumentLock docLock = lockManager.acquireDocumentWriteLock(temp.getURI().append(docName))) { + final DocumentImpl targetDoc = new DocumentImpl(pool, temp, docName); + PermissionFactory.chmod(this, targetDoc, Optional.of(Permission.DEFAULT_TEMPORARY_DOCUMENT_PERM), Optional.empty()); + final long now = System.currentTimeMillis(); + final DocumentMetadata metadata = new DocumentMetadata(); + metadata.setLastModified(now); + metadata.setCreated(now); + targetDoc.setMetadata(metadata); + targetDoc.setDocId(getNextResourceId(transaction)); + //index the temporary document + final DOMIndexer indexer = new DOMIndexer(this, transaction, doc, targetDoc); + indexer.scan(); + indexer.store(); + //store the temporary document + temp.addDocument(transaction, this, targetDoc); + + storeXMLResource(transaction, targetDoc); + + saveCollection(transaction, temp); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + temp.close(); - flush(); - closeDocument(); - //commit the transaction - transact.commit(transaction); - return targetDoc; + flush(); + closeDocument(); + //commit the transaction + transact.commit(transaction); + return targetDoc; + } } catch (final Exception e) { LOG.error("Failed to store temporary fragment: " + e.getMessage(), e); - //abort the transaction } } finally { //restore the user @@ -1972,25 +2208,24 @@ public DocumentImpl storeTempResource(final org.exist.dom.memtree.DocumentImpl d */ @Override public void cleanUpTempResources(final boolean forceRemoval) throws PermissionDeniedException { - final Collection temp = getCollection(XmldbURI.TEMP_COLLECTION_URI); - if(temp == null) { - return; - } - final TransactionManager transact = pool.getTransactionManager(); - try(final Txn transaction = transact.beginTransaction()) { - removeCollection(transaction, temp); - transact.commit(transaction); - } catch(final Exception e) { - LOG.error("Failed to remove temp collection: " + e.getMessage(), e); + try (final Collection temp = openCollection(XmldbURI.TEMP_COLLECTION_URI, LockMode.WRITE_LOCK)) { + if (temp == null) { + return; + } + final TransactionManager transact = pool.getTransactionManager(); + try (final Txn transaction = transact.beginTransaction()) { + removeCollection(transaction, temp); + transact.commit(transaction); + } catch (final Exception e) { + LOG.error("Failed to remove temp collection: " + e.getMessage(), e); + } } } @Override public DocumentImpl getResourceById(final int collectionId, final byte resourceType, final int documentId) throws PermissionDeniedException { XmldbURI uri = null; - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { //final VariableByteOutputStream os = new VariableByteOutputStream(8); //doc.write(os); //Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId()); @@ -2040,8 +2275,6 @@ public DocumentImpl getResourceById(final int collectionId, final byte resourceT } catch(final IOException e) { LOG.error("IOException while reading resource data", e); return null; - } finally { - lock.release(LockMode.READ_LOCK); } return getResource(uri, Permission.READ); @@ -2052,11 +2285,8 @@ public DocumentImpl getResourceById(final int collectionId, final byte resourceT */ @Override public void storeXMLResource(final Txn transaction, final DocumentImpl doc) { - - - final Lock lock = collectionsDb.getLock(); - try(final VariableByteOutputStream os = new VariableByteOutputStream(8)) { - lock.acquire(LockMode.WRITE_LOCK); + try(final VariableByteOutputStream os = new VariableByteOutputStream(8); + final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { doc.write(os); final Value key = new CollectionStore.DocumentKey(doc.getCollection().getId(), doc.getResourceType(), doc.getDocId()); collectionsDb.put(transaction, key, os.data(), true); @@ -2066,14 +2296,13 @@ public void storeXMLResource(final Txn transaction, final DocumentImpl doc) { LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); } catch(final IOException e) { LOG.error("IOException while writing document data: " + doc.getURI(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } + @Override public void storeMetadata(final Txn transaction, final DocumentImpl doc) throws TriggerException { final Collection col = doc.getCollection(); - final DocumentTrigger trigger = new DocumentTriggers(this, col); + final DocumentTrigger trigger = new DocumentTriggers(this, transaction, col); trigger.beforeUpdateDocumentMetadata(this, transaction, doc); @@ -2082,11 +2311,14 @@ public void storeMetadata(final Txn transaction, final DocumentImpl doc) throws trigger.afterUpdateDocumentMetadata(this, transaction, doc); } - protected Path getCollectionFile(final Path dir, final XmldbURI uri, final boolean create) throws IOException { + protected Path getCollectionFile(final Path dir, + @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI uri, final boolean create) + throws IOException { return getCollectionFile(dir, null, uri, create); } - public Path getCollectionBinaryFileFsPath(final XmldbURI uri) { + public Path getCollectionBinaryFileFsPath( + @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI uri) { String suri = uri.getURI().toString(); if(suri.startsWith("/")) { suri = suri.substring(1); @@ -2094,7 +2326,8 @@ public Path getCollectionBinaryFileFsPath(final XmldbURI uri) { return getFsDir().resolve(suri); } - private Path getCollectionFile(Path dir, final Txn transaction, final XmldbURI uri, final boolean create) + private Path getCollectionFile(Path dir, final Txn transaction, + @EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI uri, final boolean create) throws IOException { if(transaction != null) { dir = dir.resolve("txn." + transaction.getId()); @@ -2145,7 +2378,9 @@ public void storeBinaryResource(final Txn transaction, final BinaryDocument blob * @param blob The binary document to store * @param fWriteData A function that given the destination path, writes the document data to that path */ - private void storeBinaryResource(final Txn transaction, final BinaryDocument blob, final ConsumerE fWriteData) throws IOException { + private void storeBinaryResource(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final BinaryDocument blob, + final ConsumerE fWriteData) throws IOException { blob.setPage(Page.NO_PAGE); final Path binFile = getCollectionFile(getFsDir(), blob.getURI(), true); final boolean exists = Files.exists(binFile); @@ -2187,39 +2422,48 @@ public DocumentImpl getResource(XmldbURI fileName, final int accessType) throws //TODO : resolve URIs !!! final XmldbURI collUri = fileName.removeLastSegment(); final XmldbURI docUri = fileName.lastSegment(); - final Collection collection = getCollection(collUri); - if(collection == null) { - LOG.debug("collection '" + collUri + "' not found!"); - return null; - } + try(final Collection collection = openCollection(collUri, LockMode.READ_LOCK)) { + if (collection == null) { + LOG.debug("collection '" + collUri + "' not found!"); + return null; + } - //if(!collection.getPermissions().validate(getCurrentSubject(), Permission.READ)) { - //throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getCurrentSubject().getName()); - //} + //if(!collection.getPermissions().validate(getCurrentSubject(), Permission.READ)) { + //throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getCurrentSubject().getName()); + //} - final DocumentImpl doc = collection.getDocument(this, docUri); - if(doc == null) { - LOG.debug("document '" + fileName + "' not found!"); - return null; - } + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(this, docUri, LockMode.READ_LOCK)) { - if(!doc.getPermissions().validate(getCurrentSubject(), accessType)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' not allowed requested access to document '" + fileName + "'"); - } + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); - if(doc.getResourceType() == DocumentImpl.BINARY_FILE) { - final BinaryDocument bin = (BinaryDocument) doc; - try { - bin.setContentLength(getBinaryResourceSize(bin)); - } catch(final IOException ex) { - LOG.fatal("Cannot get content size for " + bin.getURI(), ex); + if (lockedDocument == null) { + LOG.debug("document '" + fileName + "' not found!"); + return null; + } + + final DocumentImpl doc = lockedDocument.getDocument(); + if (!doc.getPermissions().validate(getCurrentSubject(), accessType)) { + throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' not allowed requested access to document '" + fileName + "'"); + } + + if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { + final BinaryDocument bin = (BinaryDocument) doc; + try { + bin.setContentLength(getBinaryResourceSize(bin)); + } catch (final IOException ex) { + LOG.fatal("Cannot get content size for " + bin.getURI(), ex); + } + } + return doc; + } catch(final LockException e) { + throw new PermissionDeniedException(e); } } - return doc; } @Override - public DocumentImpl getXMLResource(XmldbURI fileName, final LockMode lockMode) throws PermissionDeniedException { + public LockedDocument getXMLResource(XmldbURI fileName, final LockMode lockMode) throws PermissionDeniedException { if(fileName == null) { return null; } @@ -2227,38 +2471,40 @@ public DocumentImpl getXMLResource(XmldbURI fileName, final LockMode lockMode) t //TODO : resolve URIs ! final XmldbURI collUri = fileName.removeLastSegment(); final XmldbURI docUri = fileName.lastSegment(); - Collection collection = null; - try { - collection = openCollection(collUri, LockMode.READ_LOCK); - if(collection == null) { + try(final Collection collection = openCollection(collUri, LockMode.READ_LOCK)) { + if (collection == null) { LOG.debug("Collection '" + collUri + "' not found!"); return null; } - //if (!collection.getPermissions().validate(getCurrentSubject(), Permission.EXECUTE)) { - // throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getCurrentSubject().getName()); - //} - final DocumentImpl doc = collection.getDocumentWithLock(this, docUri, lockMode); - if(doc == null) { - //LOG.debug("document '" + fileName + "' not found!"); - return null; - } - //if (!doc.getMode().validate(getUser(), Permission.READ)) - //throw new PermissionDeniedException("not allowed to read document"); - if(doc.getResourceType() == DocumentImpl.BINARY_FILE) { - final BinaryDocument bin = (BinaryDocument) doc; - try { - bin.setContentLength(getBinaryResourceSize(bin)); - } catch(final IOException ex) { - LOG.fatal("Cannot get content size for " + bin.getURI(), ex); + try { + //if (!collection.getPermissions().validate(getCurrentSubject(), Permission.EXECUTE)) { + // throw new PermissionDeniedException("Permission denied to read collection '" + collUri + "' by " + getCurrentSubject().getName()); + //} + final LockedDocument lockedDocument = collection.getDocumentWithLock(this, docUri, lockMode); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + if (lockedDocument == null) { + //LOG.debug("document '" + fileName + "' not found!"); + return null; } - } - return doc; - } catch(final LockException e) { - LOG.error("Could not acquire lock on document " + fileName, e); - //TODO : exception ? -pb - } finally { - if(collection != null) { - collection.release(LockMode.READ_LOCK); + //if (!doc.getMode().validate(getUser(), Permission.READ)) + //throw new PermissionDeniedException("not allowed to read document"); + final DocumentImpl doc = lockedDocument.getDocument(); + if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { + final BinaryDocument bin = (BinaryDocument) doc; + try { + bin.setContentLength(getBinaryResourceSize(bin)); + } catch (final IOException ex) { + LOG.fatal("Cannot get content size for " + bin.getURI(), ex); + //TODO : exception + } + } + return lockedDocument; + } catch (final LockException e) { + LOG.error("Could not acquire lock on document " + fileName, e); + //TODO : exception ? -pb } } return null; @@ -2292,9 +2538,7 @@ public InputStream getBinaryResource(final BinaryDocument blob) //TODO : consider a better cooperation with Collection -pb @Override public void getCollectionResources(final Collection.InternalAccess collectionInternalAccess) { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { final Value key = new CollectionStore.DocumentKey(collectionInternalAccess.getId()); final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key); @@ -2303,16 +2547,12 @@ public void getCollectionResources(final Collection.InternalAccess collectionInt LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); } catch(final IOException | BTreeException | TerminatedException e) { LOG.error("Exception while reading document data", e); - } finally { - lock.release(LockMode.READ_LOCK); } } @Override public void getResourcesFailsafe(final BTreeCallback callback, final boolean fullScan) throws TerminatedException { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { final Value key = new CollectionStore.DocumentKey(); final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key); if(fullScan) { @@ -2324,16 +2564,12 @@ public void getResourcesFailsafe(final BTreeCallback callback, final boolean ful LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); } catch(final IOException | BTreeException e) { LOG.error("Exception while reading document data", e); - } finally { - lock.release(LockMode.READ_LOCK); } } @Override public void getCollectionsFailsafe(final BTreeCallback callback) throws TerminatedException { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeReadLock(collectionsDb.getLockName())) { final Value key = new CollectionStore.CollectionKey(); final IndexQuery query = new IndexQuery(IndexQuery.TRUNC_RIGHT, key); collectionsDb.query(query, callback); @@ -2341,47 +2577,33 @@ public void getCollectionsFailsafe(final BTreeCallback callback) throws Terminat LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); } catch(final IOException | BTreeException e) { LOG.error("Exception while reading document data", e); - } finally { - lock.release(LockMode.READ_LOCK); } } - /** - * Get all the documents in this database matching the given - * document-type's name. - * - * @return The documentsByDoctype value - */ @Override - public MutableDocumentSet getXMLResourcesByDoctype(final String doctypeName, final MutableDocumentSet result) throws PermissionDeniedException { + public MutableDocumentSet getXMLResourcesByDoctype(final String doctypeName, final MutableDocumentSet result) throws PermissionDeniedException, LockException { final MutableDocumentSet docs = getAllXMLResources(new DefaultDocumentSet()); for(final Iterator i = docs.getDocumentIterator(); i.hasNext(); ) { final DocumentImpl doc = i.next(); - final DocumentType doctype = doc.getDoctype(); - if(doctype == null) { - continue; - } - if(doctypeName.equals(doctype.getName()) - && doc.getCollection().getPermissionsNoLock().validate(getCurrentSubject(), Permission.READ) - && doc.getPermissions().validate(getCurrentSubject(), Permission.READ)) { - result.add(doc); + try(final ManagedDocumentLock documentLock = lockManager.acquireDocumentReadLock(doc.getURI())) { + final DocumentType doctype = doc.getDoctype(); + if (doctype == null) { + continue; + } + if (doctypeName.equals(doctype.getName()) + && doc.getCollection().getPermissionsNoLock().validate(getCurrentSubject(), Permission.READ) + && doc.getPermissions().validate(getCurrentSubject(), Permission.READ)) { + result.add(doc); + } } } return result; } - /** - * Adds all the documents in the database to the specified DocumentSet. - * - * @param docs a (possibly empty) document set to which the found - * documents are added. - */ @Override - public MutableDocumentSet getAllXMLResources(final MutableDocumentSet docs) throws PermissionDeniedException { + public MutableDocumentSet getAllXMLResources(final MutableDocumentSet docs) throws PermissionDeniedException, LockException { final long start = System.currentTimeMillis(); - Collection rootCollection = null; - try { - rootCollection = openCollection(XmldbURI.ROOT_COLLECTION_URI, LockMode.READ_LOCK); + try(final Collection rootCollection = openCollection(XmldbURI.ROOT_COLLECTION_URI, LockMode.READ_LOCK)) { rootCollection.allDocs(this, docs, true); if(LOG.isDebugEnabled()) { LOG.debug("getAllDocuments(DocumentSet) - end - " @@ -2392,121 +2614,179 @@ public MutableDocumentSet getAllXMLResources(final MutableDocumentSet docs) thro + "ms."); } return docs; - } finally { - if(rootCollection != null) { - rootCollection.release(LockMode.READ_LOCK); - } } } - /** - * @param doc src document - * @param destination destination collection - * @param newName the new name for the document - */ @Override - public void copyResource(final Txn transaction, final DocumentImpl doc, final Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, EXistException, IOException { + public void copyResource(final Txn transaction, final DocumentImpl sourceDocument, final Collection targetCollection, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException { + copyResource(transaction, sourceDocument, targetCollection, newName, PreserveType.DEFAULT); + } + @Override + public void copyResource(final Txn transaction, final DocumentImpl sourceDocument, final Collection targetCollection, final XmldbURI newName, final PreserveType preserve) throws PermissionDeniedException, LockException, IOException, TriggerException, EXistException { + assert(sourceDocument != null); + assert(targetCollection != null); + assert(newName != null); if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - final Collection collection = doc.getCollection(); + if(newName.numSegments() != 1) { + throw new IOException("newName name must be just a name i.e. an XmldbURI with one segment!"); + } + + final XmldbURI sourceDocumentUri = sourceDocument.getURI(); + final XmldbURI targetCollectionUri = targetCollection.getURI(); + final XmldbURI destinationDocumentUri = targetCollectionUri.append(newName); - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'."); + if(!sourceDocument.getPermissions().validate(getCurrentSubject(), Permission.READ)) { + throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' has insufficient privileges to copy the resource '" + sourceDocumentUri + "'."); } - if(!doc.getPermissions().validate(getCurrentSubject(), Permission.READ)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' has insufficient privileges to copy the resource '" + doc.getFileURI() + "'."); + // we assume the caller holds a READ_LOCK (or better) on sourceDocument#getCollection() + final Collection sourceCollection = sourceDocument.getCollection(); + if (!sourceCollection.getPermissions().validate(getCurrentSubject(), Permission.EXECUTE)) { + throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' has insufficient privileges to copy the resource '" + sourceDocumentUri + "'."); } - if(newName == null) { - newName = doc.getFileURI(); + if(!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { + throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' does not have execute access on the destination collection '" + targetCollectionUri + "'."); } - final CollectionCache collectionsCache = pool.getCollectionsCache(); - synchronized(collectionsCache) { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - final DocumentImpl oldDoc = destination.getDocument(this, newName); + if(targetCollection.hasChildCollection(this, newName.lastSegment())) { + throw new EXistException("The collection '" + targetCollectionUri + "' already has a sub-collection named '" + newName.lastSegment() + "', you cannot create a Document with the same name as an existing collection."); + } + + try(final LockedDocument oldLockedDoc = targetCollection.getDocumentWithLock(this, newName, LockMode.WRITE_LOCK)) { + final DocumentTrigger trigger = new DocumentTriggers(this, transaction, targetCollection); - if(!destination.getPermissionsNoLock().validate(getCurrentSubject(), Permission.EXECUTE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' does not have execute access on the destination collection '" + destination.getURI() + "'."); + final DocumentImpl oldDoc = oldLockedDoc == null ? null : oldLockedDoc.getDocument(); + if (oldDoc == null) { + if (!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' does not have write access on the destination collection '" + targetCollectionUri + "'."); } + } else { + //overwrite existing document - if(destination.hasChildCollection(this, newName.lastSegment())) { - throw new EXistException( - "The collection '" + destination.getURI() + "' already has a sub-collection named '" + newName.lastSegment() + "', you cannot create a Document with the same name as an existing collection." - ); + if (sourceDocument.getDocId() == oldDoc.getDocId()) { + throw new PermissionDeniedException("Cannot copy resource to itself '" + sourceDocumentUri + "'."); + } + + if (!oldDoc.getPermissions().validate(getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("A resource with the same name already exists in the target collection '" + oldDoc.getURI() + "', and you do not have write access on that resource."); } - final XmldbURI newURI = destination.getURI().append(newName); - final XmldbURI oldUri = doc.getURI(); + trigger.beforeDeleteDocument(this, transaction, oldDoc); + trigger.afterDeleteDocument(this, transaction, destinationDocumentUri); + } - final DocumentTrigger trigger = new DocumentTriggers(this, collection); + trigger.beforeCopyDocument(this, transaction, sourceDocument, destinationDocumentUri); - if(oldDoc == null) { - if(!destination.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Account '" + getCurrentSubject().getName() + "' does not have write access on the destination collection '" + destination.getURI() + "'."); + DocumentImpl newDocument = null; + if (sourceDocument.getResourceType() == DocumentImpl.BINARY_FILE) { + final LockManager lockManager = getBrokerPool().getLockManager(); + try (final ManagedDocumentLock newDocLock = lockManager.acquireDocumentWriteLock(destinationDocumentUri); + final InputStream is = getBinaryResource((BinaryDocument) sourceDocument)) { + final BinaryDocument newDoc; + if (oldDoc != null) { + newDoc = new BinaryDocument(oldDoc); + } else { + newDoc = new BinaryDocument(getBrokerPool(), targetCollection, newName); } - } else { - //overwrite existing document - - if(doc.getDocId() == oldDoc.getDocId()) { - throw new EXistException("Cannot copy resource to itself '" + doc.getURI() + "'."); + newDoc.copyOf(this, sourceDocument, oldDoc); + newDoc.setDocId(getNextResourceId(transaction)); + final Date created; + final Date lastModified; + if(preserveOnCopy(preserve)) { + copyResource_preserve(this, sourceDocument, newDoc, oldDoc != null); + if (oldDoc != null) { + created = new Date(oldDoc.getMetadata().getLastModified()); + } else { + created = new Date(sourceDocument.getMetadata().getLastModified()); + } + lastModified = new Date(sourceDocument.getMetadata().getLastModified()); + } else { + created = null; + lastModified = null; } - if(!oldDoc.getPermissions().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("A resource with the same name already exists in the target collection '" + oldDoc.getURI() + "', and you do not have write access on that resource."); - } + targetCollection.addBinaryResource(transaction, this, newDoc, is, sourceDocument.getMetadata().getMimeType(), -1, created, lastModified, preserve); - trigger.beforeDeleteDocument(this, transaction, oldDoc); - trigger.afterDeleteDocument(this, transaction, newURI); + newDocument = newDoc; } - - trigger.beforeCopyDocument(this, transaction, doc, newURI); - - DocumentImpl newDocument = null; - if(doc.getResourceType() == DocumentImpl.BINARY_FILE) { - InputStream is = null; - try { - is = getBinaryResource((BinaryDocument) doc); - newDocument = destination.addBinaryResource(transaction, this, newName, is, doc.getMetadata().getMimeType(), -1); - } finally { - if(is != null) { - is.close(); - } + } else { + final LockManager lockManager = getBrokerPool().getLockManager(); + try (final ManagedDocumentLock newDocLock = lockManager.acquireDocumentWriteLock(destinationDocumentUri)) { + final DocumentImpl newDoc; + if (oldDoc != null) { + newDoc = new DocumentImpl(oldDoc); + } else { + newDoc = new DocumentImpl(pool, targetCollection, newName); } - } else { - final DocumentImpl newDoc = new DocumentImpl(pool, destination, newName); - newDoc.copyOf(doc, oldDoc != null); - newDoc.setDocId(getNextResourceId(transaction, destination)); - newDoc.getUpdateLock().acquire(LockMode.WRITE_LOCK); - try { - copyXMLResource(transaction, doc, newDoc); - destination.addDocument(transaction, this, newDoc); - storeXMLResource(transaction, newDoc); - } finally { - newDoc.getUpdateLock().release(LockMode.WRITE_LOCK); + newDoc.copyOf(this, sourceDocument, oldDoc); + newDoc.setDocId(getNextResourceId(transaction)); + copyXMLResource(transaction, sourceDocument, newDoc); + if (preserveOnCopy(preserve)) { + copyResource_preserve(this, sourceDocument, newDoc, oldDoc != null); } + targetCollection.addDocument(transaction, this, newDoc); + storeXMLResource(transaction, newDoc); + newDocument = newDoc; } + } - trigger.afterCopyDocument(this, transaction, newDocument, oldUri); + trigger.afterCopyDocument(this, transaction, newDocument, sourceDocumentUri); + } + } - } catch(final IOException e) { - LOG.error("An error occurred while copying resource", e); - } catch(final TriggerException e) { - throw new PermissionDeniedException(e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); - } + /** + * Preserves attributes when copying a resource. + * e.g. `cp --preserve` + * + * @param srcDocument The source document. + * @param destDocument The destination document. + */ + public static void copyResource_preserve(final DBBroker broker, final DocumentImpl srcDocument, final DocumentImpl destDocument, final boolean overwrittingDest) throws PermissionDeniedException { + final Permission srcPermissions = srcDocument.getPermissions(); + final Permission destPermissions = destDocument.getPermissions(); + + // only copy the owner and group from the source if we are creating a new file and we are the DBA + if ((!overwrittingDest) && broker.getCurrentSubject().hasDbaRole()) { + PermissionFactory.chown(broker, destPermissions, Optional.of(srcPermissions.getOwner().getName()), Optional.of(srcPermissions.getGroup().getName())); + } + + copyModeAcl(broker, srcPermissions, destPermissions); + + // btime (birth time) + if (!overwrittingDest) { + destDocument.getMetadata().setCreated(srcDocument.getMetadata().getLastModified()); // Indeed! ...the birth time of the dest file is the last modified time of the source file + } + + // mtime (modified time) + destDocument.getMetadata().setLastModified(srcDocument.getMetadata().getLastModified()); + + } + + /** + * Copies the Mode and ACL (if present) from one + * object to another. + * + * @param srcPermissions The permissions of the source object. + * @param destPermissions The permissions of the destination object. + */ + private static void copyModeAcl(final DBBroker broker, final Permission srcPermissions, final Permission destPermissions) throws PermissionDeniedException { + PermissionFactory.chmod(broker, destPermissions, Optional.of(srcPermissions.getMode()), Optional.empty()); + if (srcPermissions instanceof SimpleACLPermission && destPermissions instanceof SimpleACLPermission) { + PermissionFactory.chacl(destPermissions, newAcl -> + ((SimpleACLPermission)newAcl).copyAclOf((SimpleACLPermission)srcPermissions) + ); } } - private void copyXMLResource(final Txn transaction, final DocumentImpl oldDoc, final DocumentImpl newDoc) throws IOException { + private void copyXMLResource(final Txn transaction, + @EnsureLocked(mode=LockMode.READ_LOCK) final DocumentImpl oldDoc, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl newDoc) throws IOException { if (LOG.isDebugEnabled()) LOG.debug("Copying document " + oldDoc.getFileURI() + " to " + newDoc.getURI()); final long start = System.currentTimeMillis(); @@ -2525,32 +2805,29 @@ private void copyXMLResource(final Txn transaction, final DocumentImpl oldDoc, f LOG.debug("Copy took " + (System.currentTimeMillis() - start) + "ms."); } - /** - * Move (and/or rename) a Resource to another collection - * - * @param doc source document - * @param destination the destination collection - * @param newName the new name for the resource - * @throws TriggerException - */ @Override - public void moveResource(final Txn transaction, final DocumentImpl doc, final Collection destination, XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException { + public void moveResource(final Txn transaction, final DocumentImpl sourceDocument, final Collection targetCollection, final XmldbURI newName) throws PermissionDeniedException, LockException, IOException, TriggerException { + assert(sourceDocument != null); + assert(targetCollection != null); + assert(newName != null); if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } - final Account docUser = doc.getUserLock(); - if(docUser != null) { - if(!(getCurrentSubject().getName()).equals(docUser.getName())) { - throw new PermissionDeniedException("Cannot move '" + doc.getFileURI() + " because is locked by getUser() '" + docUser.getName() + "'"); - } + if(newName.numSegments() != 1) { + throw new IOException("newName name must be just a name i.e. an XmldbURI with one segment!"); } - final Collection collection = doc.getCollection(); + final XmldbURI sourceDocumentUri = sourceDocument.getURI(); + final XmldbURI targetCollectionUri = targetCollection.getURI(); + final XmldbURI destinationDocumentUri = targetCollectionUri.append(newName); - if(!collection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on source Collection to move resource " + doc.getFileURI()); + final Account docUser = sourceDocument.getUserLock(); + if(docUser != null) { + if(!getCurrentSubject().getName().equals(docUser.getName())) { + throw new PermissionDeniedException("Cannot move '" + sourceDocumentUri + " because is locked by getUser() '" + docUser.getName() + "'"); + } } /** @@ -2563,111 +2840,94 @@ public void moveResource(final Txn transaction, final DocumentImpl doc, final Co * * - Adam 2013-03-26 */ - //must be owner of have execute access for the rename -// if(!((doc.getPermissions().getOwner().getId() != getCurrentSubject().getId()) | (doc.getPermissions().validate(getCurrentSubject(), Permission.EXECUTE)))) { -// throw new PermissionDeniedException("Account "+getCurrentSubject().getName()+" have insufficient privileges on destination Collection to move resource " + doc.getFileURI()); -// } - if(!destination.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { - throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on destination Collection to move resource " + doc.getFileURI()); + // we assume the caller holds a WRITE_LOCK on sourceDocument#getCollection() + final Collection sourceCollection = sourceDocument.getCollection(); + if(!sourceCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on source Collection to move resource: " + sourceDocumentUri); } - - - /* Copy reference to original document */ - final Path fsOriginalDocument = getCollectionFile(getFsDir(), doc.getURI(), true); - - final XmldbURI oldName = doc.getFileURI(); - if(newName == null) { - newName = oldName; + if(!targetCollection.getPermissionsNoLock().validate(getCurrentSubject(), Permission.WRITE | Permission.EXECUTE)) { + throw new PermissionDeniedException("Account " + getCurrentSubject().getName() + " have insufficient privileges on destination Collection '" + targetCollectionUri + "' to move resource: " + sourceDocumentUri); } - try { - if(destination.hasChildCollection(this, newName.lastSegment())) { - throw new PermissionDeniedException( - "The collection '" + destination.getURI() + "' have collection '" + newName.lastSegment() + "'. " + - "Document with same name can't be created." - ); - } - - final DocumentTrigger trigger = new DocumentTriggers(this, collection); + if(targetCollection.hasChildCollection(this, newName.lastSegment())) { + throw new PermissionDeniedException( + "The Collection '" + targetCollectionUri + "' has a sub-collection '" + newName + "'; cannot create a Document with the same name!" + ); + } - // check if the move would overwrite a collection - //TODO : resolve URIs : destination.getURI().resolve(newName) - final DocumentImpl oldDoc = destination.getDocument(this, newName); - if(oldDoc != null) { + final DocumentTrigger trigger = new DocumentTriggers(this, transaction, sourceCollection); - if(doc.getDocId() == oldDoc.getDocId()) { - throw new PermissionDeniedException("Cannot move resource to itself '" + doc.getURI() + "'."); - } + // check if the move would overwrite a collection + final DocumentImpl oldDoc = targetCollection.getDocument(this, newName); + if(oldDoc != null) { - // GNU mv command would prompt for Confirmation here, you can say yes or pass the '-f' flag. As we cant prompt for confirmation we assume OK - /* if(!oldDoc.getPermissions().validate(getCurrentSubject(), Permission.WRITE)) { - throw new PermissionDeniedException("Resource with same name exists in target collection and write is denied"); - } - */ + if(sourceDocument.getDocId() == oldDoc.getDocId()) { + throw new PermissionDeniedException("Cannot move resource to itself '" + sourceDocumentUri + "'."); + } - removeResource(transaction, oldDoc); + // GNU mv command would prompt for Confirmation here, you can say yes or pass the '-f' flag. As we cant prompt for confirmation we assume OK + /* if(!oldDoc.getPermissions().validate(getCurrentSubject(), Permission.WRITE)) { + throw new PermissionDeniedException("Resource with same name exists in target collection and write is denied"); } + */ - boolean renameOnly = collection.getId() == destination.getId(); + // remove the old resource + removeResource(transaction, oldDoc); + } - final XmldbURI oldURI = doc.getURI(); - final XmldbURI newURI = destination.getURI().append(newName); + final boolean renameOnly = sourceCollection.getId() == targetCollection.getId(); - trigger.beforeMoveDocument(this, transaction, doc, newURI); + trigger.beforeMoveDocument(this, transaction, sourceDocument, destinationDocumentUri); - if(doc.getResourceType() == DocumentImpl.XML_FILE) { - if (!renameOnly) { - dropIndex(transaction, doc); - } + if(sourceDocument.getResourceType() == DocumentImpl.XML_FILE) { + if (!renameOnly) { + dropIndex(transaction, sourceDocument); } + } - collection.unlinkDocument(this, doc); - if(!renameOnly) { - saveCollection(transaction, collection); - } + sourceCollection.unlinkDocument(this, sourceDocument); + if(!renameOnly) { + saveCollection(transaction, sourceCollection); + } - removeResourceMetadata(transaction, doc); + removeResourceMetadata(transaction, sourceDocument); - doc.setFileURI(newName); - doc.setCollection(destination); - destination.addDocument(transaction, this, doc); + sourceDocument.setFileURI(newName); + sourceDocument.setCollection(targetCollection); + targetCollection.addDocument(transaction, this, sourceDocument); - if(doc.getResourceType() == DocumentImpl.XML_FILE) { - if(!renameOnly) { - // reindexing - reindexXMLResource(transaction, doc, IndexMode.REPAIR); - } - } else { - // binary resource - final Path colDir = getCollectionFile(getFsDir(), destination.getURI(), true); - final Path binFile = colDir.resolve(newName.lastSegment().toString()); - final Path sourceFile = getCollectionFile(getFsDir(), doc.getURI(), false); + if(sourceDocument.getResourceType() == DocumentImpl.XML_FILE) { + if(!renameOnly) { + // reindexing + reindexXMLResource(transaction, sourceDocument, IndexMode.REPAIR); + } + } else { + // binary resource + final Path fsSourceDocument = getCollectionFile(getFsDir(), sourceDocumentUri, false); + final Path fsTargetCollection = getCollectionFile(getFsDir(), targetCollectionUri, true); + final Path fsDestinationDocument = fsTargetCollection.resolve(newName.lastSegment().toString()); - /* Create required directories */ - Files.createDirectories(binFile.getParent()); + /* Create required directories */ + Files.createDirectories(fsTargetCollection); - /* Rename original file to new location */ - Files.move(fsOriginalDocument, binFile, StandardCopyOption.ATOMIC_MOVE); + /* Rename original file to new location */ + Files.move(fsSourceDocument, fsDestinationDocument, StandardCopyOption.ATOMIC_MOVE); - if(logManager.isPresent()) { - final Loggable loggable = new RenameBinaryLoggable(this, transaction, sourceFile, binFile); - try { - logManager.get().journal(loggable); - } catch (final JournalException e) { - LOG.error(e.getMessage(), e); - } + if(logManager.isPresent()) { + final Loggable loggable = new RenameBinaryLoggable(this, transaction, fsSourceDocument, fsDestinationDocument); + try { + logManager.get().journal(loggable); + } catch (final JournalException e) { + LOG.error(e.getMessage(), e); } } - storeXMLResource(transaction, doc); - saveCollection(transaction, destination); - - trigger.afterMoveDocument(this, transaction, doc, oldURI); - - } catch(final ReadOnlyException e) { - throw new PermissionDeniedException(e.getMessage(), e); } + storeXMLResource(transaction, sourceDocument); + saveCollection(transaction, targetCollection); + + trigger.afterMoveDocument(this, transaction, sourceDocument, sourceDocumentUri); } @Override @@ -2681,7 +2941,7 @@ public void removeXMLResource(final Txn transaction, final DocumentImpl document " (" + document.getDocId() + ") ..."); } - final DocumentTrigger trigger = new DocumentTriggers(this); + final DocumentTrigger trigger = new DocumentTriggers(this, transaction); if(freeDocId) { trigger.beforeDeleteDocument(this, transaction, document); @@ -2693,7 +2953,7 @@ public void removeXMLResource(final Txn transaction, final DocumentImpl document } try { if(!document.getMetadata().isReferenced()) { - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { final NodeHandle node = (NodeHandle) document.getFirstChild(); @@ -2708,7 +2968,7 @@ public Object start() { final NodeRef ref = new NodeRef(document.getDocId()); final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { try { @@ -2727,15 +2987,12 @@ public Object start() { trigger.afterDeleteDocument(this, transaction, document.getURI()); } - - } catch(final ReadOnlyException e) { - LOG.error("removeDocument(String) - " + DATABASE_IS_READ_ONLY); } catch(final TriggerException e) { LOG.error(e); } } - private void dropIndex(final Txn transaction, final DocumentImpl document) throws ReadOnlyException { + private void dropIndex(final Txn transaction, @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl document) { final StreamListener listener = getIndexController().getStreamListener(document, ReindexMode.REMOVE_ALL_NODES); listener.startIndexDocument(transaction); final NodeList nodes = document.getChildNodes(); @@ -2791,11 +3048,10 @@ public void removeBinaryResource(final Txn transaction, final BinaryDocument blo * @param transaction * @param document */ - private void removeResourceMetadata(final Txn transaction, final DocumentImpl document) { + private void removeResourceMetadata(final Txn transaction, + @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl document) { // remove document metadata - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { if(LOG.isDebugEnabled()) { LOG.debug("Removing resource metadata for " + document.getDocId()); } @@ -2803,11 +3059,10 @@ private void removeResourceMetadata(final Txn transaction, final DocumentImpl do collectionsDb.remove(transaction, key); } catch(final LockException e) { LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile())); - } finally { - lock.release(LockMode.WRITE_LOCK); } } + @Override public void removeResource(Txn tx, DocumentImpl doc) throws IOException, PermissionDeniedException { if (doc instanceof BinaryDocument) { removeBinaryResource(tx, (BinaryDocument) doc); @@ -2822,15 +3077,13 @@ public void removeResource(Txn tx, DocumentImpl doc) throws IOException, Permiss * @throws EXistException If there's no free document id */ @Override - public int getNextResourceId(final Txn transaction, final Collection collection) throws EXistException { + public int getNextResourceId(final Txn transaction) throws EXistException, LockException { int nextDocId = collectionsDb.getFreeResourceId(); if(nextDocId != DocumentImpl.UNKNOWN_DOCUMENT_ID) { return nextDocId; } nextDocId = 1; - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { final Value key = new CollectionStore.CollectionKey(CollectionStore.NEXT_DOC_ID_KEY); final Value data = collectionsDb.get(key); if(data != null) { @@ -2850,11 +3103,6 @@ public int getNextResourceId(final Txn transaction, final Collection collection) //LOG.warn("Database is read-only"); //return DocumentImpl.UNKNOWN_DOCUMENT_ID; //TODO : rethrow ? -pb - } catch(final LockException e) { - LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile()), e); - //TODO : rethrow ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } return nextDocId; } @@ -2903,7 +3151,7 @@ public void defragXMLResource(final Txn transaction, final DocumentImpl doc) { // dropping dom index final NodeRef ref = new NodeRef(doc.getDocId()); final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { try { @@ -2919,7 +3167,7 @@ public Object start() { }.run(); // create a copy of the old doc to copy the nodes into it final DocumentImpl tempDoc = new DocumentImpl(pool, doc.getCollection(), doc.getFileURI()); - tempDoc.copyOf(doc, true); + tempDoc.copyOf(this, doc, doc); tempDoc.setDocId(doc.getDocId()); final StreamListener listener = getIndexController().getStreamListener(doc, ReindexMode.STORE); // copy the nodes @@ -2933,7 +3181,7 @@ public Object start() { } flush(); // remove the old nodes - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { domDb.removeAll(transaction, firstChild); @@ -2950,11 +3198,10 @@ public Object start() { doc.getMetadata().setPageCount(tempDoc.getMetadata().getPageCount()); storeXMLResource(transaction, doc); closeDocument(); - if (LOG.isDebugEnabled()) + if (LOG.isDebugEnabled()) { LOG.debug("Defragmentation took " + (System.currentTimeMillis() - start) + "ms."); - } catch(final ReadOnlyException e) { - LOG.warn(DATABASE_IS_READ_ONLY, e); - } catch(final IOException e) { + } + } catch(final PermissionDeniedException | IOException e) { LOG.error(e); } } @@ -2989,7 +3236,7 @@ public void checkXMLResourceTree(final DocumentImpl doc) { xupdateConsistencyChecks = ((Boolean) property).booleanValue(); } if(xupdateConsistencyChecks) { - new DOMTransaction(this, domDb, LockMode.READ_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) { @Override public Object start() throws ReadOnlyException { LOG.debug("Pages used: " + domDb.debugPages(doc, false)); @@ -3013,7 +3260,7 @@ public Object start() throws ReadOnlyException { } final NodeRef ref = new NodeRef(doc.getDocId()); final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref); - new DOMTransaction(this, domDb, LockMode.READ_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) { @Override public Object start() { try { @@ -3041,7 +3288,7 @@ public void storeNode(final Txn transaction, final IStor final DocumentImpl doc = node.getOwnerDocument(); final short nodeType = node.getNodeType(); final byte data[] = node.serialize(); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK, doc) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName()), doc) { @Override public Object start() throws ReadOnlyException { long address; @@ -3073,7 +3320,7 @@ public void updateNode(final Txn transaction, final ISto final DocumentImpl doc = node.getOwnerDocument(); final long internalAddress = node.getInternalAddress(); final byte[] data = node.serialize(); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() throws ReadOnlyException { if(StorageAddress.hasAddress(internalAddress)) { @@ -3109,7 +3356,7 @@ public Object start() throws ReadOnlyException { public void insertNodeAfter(final Txn transaction, final NodeHandle previous, final IStoredNode node) { final byte data[] = node.serialize(); final DocumentImpl doc = previous.getOwnerDocument(); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK, doc) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName()), doc) { @Override public Object start() { long address = previous.getInternalAddress(); @@ -3126,13 +3373,13 @@ public Object start() { } private void copyNodes(final Txn transaction, final INodeIterator iterator, final IStoredNode node, - final NodePath currentPath, final DocumentImpl newDoc, final boolean defragment, + final NodePath currentPath, @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl newDoc, final boolean defragment, final StreamListener listener) { copyNodes(transaction, iterator, node, currentPath, newDoc, defragment, listener, null); } private void copyNodes(final Txn transaction, INodeIterator iterator, final IStoredNode node, - final NodePath currentPath, final DocumentImpl newDoc, final boolean defragment, + final NodePath currentPath, @EnsureLocked(mode=LockMode.WRITE_LOCK) final DocumentImpl newDoc, final boolean defragment, final StreamListener listener, NodeId oldNodeId) { if(node.getNodeType() == Node.ELEMENT_NODE) { currentPath.addComponent(node.getQName()); @@ -3209,10 +3456,10 @@ private void copyNodes(final Txn transaction, INodeItera * for later removal. */ @Override - public void removeNode(final Txn transaction, final IStoredNode node, final NodePath currentPath, - final String content) { + public void removeNode(final Txn transaction, final IStoredNode node, + final NodePath currentPath, final String content) { final DocumentImpl doc = node.getOwnerDocument(); - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK, doc) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName()), doc) { @Override public Object start() { final long address = node.getInternalAddress(); @@ -3289,12 +3536,12 @@ public Object start() { @Override public void removeAllNodes(final Txn transaction, final IStoredNode node, final NodePath currentPath, - final StreamListener listener) { + final StreamListener listener) { try(final INodeIterator iterator = getNodeIterator(node)) { iterator.next(); - final Stack stack = new Stack<>(); + final Deque stack = new ArrayDeque<>(); collectNodesForRemoval(transaction, stack, iterator, listener, node, currentPath); while(!stack.isEmpty()) { final RemovedNode next = stack.pop(); @@ -3305,8 +3552,8 @@ public void removeAllNodes(final Txn transaction, final IStoredNode node, final } } - private void collectNodesForRemoval(final Txn transaction, final Stack stack, - final INodeIterator iterator, final StreamListener listener, final IStoredNode node, final NodePath currentPath) { + private void collectNodesForRemoval(final Txn transaction, final Deque stack, + final INodeIterator iterator, final StreamListener listener, final IStoredNode node, final NodePath currentPath) { RemovedNode removed; switch(node.getNodeType()) { case Node.ELEMENT_NODE: @@ -3476,7 +3723,7 @@ private void scanNodes(final Txn transaction, final INodeIterator iterator, fina @Override public String getNodeValue(final IStoredNode node, final boolean addWhitespace) { - return (String) new DOMTransaction(this, domDb, LockMode.READ_LOCK) { + return (String) new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) { @Override public Object start() { return domDb.getNodeValue(NativeBroker.this, node, addWhitespace); @@ -3486,7 +3733,7 @@ public Object start() { @Override public IStoredNode objectWith(final Document doc, final NodeId nodeId) { - return (IStoredNode) new DOMTransaction(this, domDb, LockMode.READ_LOCK) { + return (IStoredNode) new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) { @Override public Object start() { final Value val = domDb.get(NativeBroker.this, new NodeProxy((DocumentImpl) doc, nodeId)); @@ -3509,7 +3756,7 @@ public IStoredNode objectWith(final NodeProxy p) { if(!StorageAddress.hasAddress(p.getInternalAddress())) { return objectWith(p.getOwnerDocument(), p.getNodeId()); } - return (IStoredNode) new DOMTransaction(this, domDb, LockMode.READ_LOCK) { + return (IStoredNode) new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) { @Override public Object start() { // DocumentImpl sets the nodeId to DOCUMENT_NODE when it's trying to find its top-level @@ -3551,7 +3798,7 @@ public Object start() { } @Override - public void repair() throws PermissionDeniedException, IOException { + public void repair() throws PermissionDeniedException, IOException, LockException { if(isReadOnly()) { throw new IOException(DATABASE_IS_READ_ONLY); } @@ -3591,17 +3838,12 @@ public void repairPrimary() { protected void rebuildIndex(final byte indexId) { final BTree btree = getStorage(indexId); - final Lock lock = btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(btree.getLockName())) { LOG.info("Rebuilding index " + FileUtils.fileName(btree.getFile())); btree.rebuild(); LOG.info("Index " + FileUtils.fileName(btree.getFile()) + " was rebuilt."); } catch(LockException | IOException | TerminatedException | DBException e) { LOG.error("Caught error while rebuilding core index " + FileUtils.fileName(btree.getFile()) + ": " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @@ -3625,7 +3867,7 @@ public void sync(final Sync syncEvent) { return; } try { - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { try { @@ -3637,14 +3879,10 @@ public Object start() { } }.run(); if(syncEvent == Sync.MAJOR) { - final Lock lock = collectionsDb.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock collectionsDbLock = lockManager.acquireBtreeWriteLock(collectionsDb.getLockName())) { collectionsDb.flush(); } catch(final LockException e) { LOG.error("Failed to acquire lock on " + FileUtils.fileName(collectionsDb.getFile()), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } notifySync(); pool.getIndexManager().sync(); @@ -3678,7 +3916,6 @@ public void shutdown() { } catch(final Exception e) { LOG.error(e.getMessage(), e); } - super.shutdown(); } /** @@ -3702,7 +3939,7 @@ public void checkAvailableMemory() { //TODO UNDERSTAND : why not use shutdown ? -pb @Override public void closeDocument() { - new DOMTransaction(this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() { domDb.closeDocument(); @@ -3893,9 +4130,10 @@ public void doIndex() { */ public void store() { final DocumentImpl doc = node.getOwnerDocument(); - if(indexMode == IndexMode.STORE && node.getNodeType() == Node.ELEMENT_NODE && level <= defaultIndexDepth) { + // we store all nodes at level 1 (see - https://github.com/eXist-db/exist/issues/1691), and only element nodes after! + if(indexMode == IndexMode.STORE && (level == 1 || (node.getNodeType() == Node.ELEMENT_NODE && level <= defaultIndexDepth))) { //TODO : used to be this, but NativeBroker.this avoids an owner change - new DOMTransaction(NativeBroker.this, domDb, LockMode.WRITE_LOCK) { + new DOMTransaction(NativeBroker.this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) { @Override public Object start() throws ReadOnlyException { try { diff --git a/src/org/exist/storage/NativeValueIndex.java b/src/org/exist/storage/NativeValueIndex.java index 5a5402c9382..6270dc591b4 100644 --- a/src/org/exist/storage/NativeValueIndex.java +++ b/src/org/exist/storage/NativeValueIndex.java @@ -43,8 +43,8 @@ import org.exist.storage.io.VariableByteArrayInput; import org.exist.storage.io.VariableByteInput; import org.exist.storage.io.VariableByteOutputStream; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; import org.exist.xquery.Constants; import org.exist.xquery.Constants.Comparison; @@ -62,6 +62,7 @@ import java.io.IOException; import java.nio.file.Path; import java.util.*; +import java.util.concurrent.locks.ReentrantLock; /** * Maintains an index on typed node values (optionally by QName). @@ -133,16 +134,6 @@ public class NativeValueIndex implements ContentLoadingObserver { public final static String INDEX_CASE_SENSITIVE_ATTRIBUTE = "caseSensitive"; public final static String PROPERTY_INDEX_CASE_SENSITIVE = "indexer.case-sensitive"; - public enum IndexType { - GENERIC((byte)0x0), - QNAME((byte)0x1); - final byte val; - - IndexType(final byte val) { - this.val = val; - } - } - /** * The broker that is using this value index. */ @@ -154,15 +145,6 @@ public enum IndexType { @GuardedBy("dbValues#getLock()") final BFile dbValues; private final Configuration config; - private static class PendingChanges { - final IndexType indexType; - final Map> changes = new TreeMap<>(); - - PendingChanges(final IndexType indexType) { - this.indexType = indexType; - } - } - /** * A collection of key-value pairs that pending modifications for this value index. * The keys are {@link org.exist.xquery.value.AtomicValue atomic values} @@ -173,6 +155,8 @@ private static class PendingChanges { private final PendingChanges pendingGeneric = new PendingChanges<>(IndexType.GENERIC); private final PendingChanges pendingQName = new PendingChanges<>(IndexType.QNAME); + private final LockManager lockManager; + /** * The current document. */ @@ -187,6 +171,7 @@ private static class PendingChanges { public NativeValueIndex(final DBBroker broker, final byte id, final Path dataDir, final Configuration config) throws DBException { this.broker = broker; + this.lockManager = broker.getBrokerPool().getLockManager(); this.config = config; final double cacheGrowth = NativeValueIndex.DEFAULT_VALUE_CACHE_GROWTH; final double cacheValueThresHold = NativeValueIndex.DEFAULT_VALUE_VALUE_THRESHOLD; @@ -348,18 +333,17 @@ public void reindex(final IStoredNode node) { @Override public void storeText(final TextImpl node, final NodePath currentPath) { + //no-op } @Override public void removeNode(final NodeHandle node, final NodePath currentPath, final String content) { + //no-op } @Override public void sync() { - final Lock lock = dbValues.getLock(); - - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { dbValues.flush(); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); @@ -367,8 +351,6 @@ public void sync() { } catch (final DBException e) { LOG.error(e.getMessage(), e); //TODO : throw an exception ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } } @@ -416,10 +398,7 @@ private void flush(final PendingChanges pending, final FunctionE bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { final Value v = dbKeyFn.apply(key); if (dbValues.append(v, os.data()) == BFile.UNKNOWN_ADDRESS) { @@ -437,7 +416,6 @@ private void flush(final PendingChanges pending, final FunctionE void remove(final PendingChanges pending, final FunctionE newGIDList = new ArrayList<>(); os.clear(); - final Lock lock = dbValues.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { //Compute a key for the value final Value searchKey = dbKeyFn.apply(key); @@ -557,7 +533,6 @@ private void remove(final PendingChanges pending, final FunctionE list, final NodeId nodeId @Override public void dropIndex(final Collection collection) { - final Lock lock = dbValues.getLock(); - - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { flush(); @@ -588,18 +560,13 @@ public void dropIndex(final Collection collection) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); } catch (final BTreeException | IOException e) { LOG.error(e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @Override public void dropIndex(final DocumentImpl document) { final int collectionId = document.getCollection().getId(); - final Lock lock = dbValues.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { dropIndex(document.getDocId(), pendingGeneric, key -> new SimpleValue(collectionId, (Indexable) key)); dropIndex(document.getDocId(), pendingQName, key -> new QNameValue(collectionId, key.qname, key.value, broker.getBrokerPool().getSymbols())); } catch (final LockException e) { @@ -610,7 +577,6 @@ public void dropIndex(final DocumentImpl document) { LOG.warn("Exception while removing range index: " + e.getMessage(), e); } finally { os.clear(); - lock.release(LockMode.WRITE_LOCK); } } @@ -716,7 +682,6 @@ public NodeSet findAll(final XQueryWatchDog watchDog, final Comparison compariso */ private NodeSet findAll(final XQueryWatchDog watchDog, final Comparison comparison, final DocumentSet docs, final NodeSet contextSet, final int axis, final List qnames, final Indexable value, final NodeSet result) throws TerminatedException { final SearchCallback cb = new SearchCallback(docs, contextSet, result, axis == NodeSet.ANCESTOR); - final Lock lock = dbValues.getLock(); final int idxOp = toIndexQueryOp(comparison); @@ -726,8 +691,7 @@ private NodeSet findAll(final XQueryWatchDog watchDog, final Comparison comparis watchDog.proceed(null); if (qnames == null) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { final Value searchKey = new SimpleValue(collectionId, value); final IndexQuery query = new IndexQuery(idxOp, searchKey); @@ -741,13 +705,10 @@ private NodeSet findAll(final XQueryWatchDog watchDog, final Comparison comparis LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } else { for (final QName qname : qnames) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { //Compute a key for the value in the collection final Value searchKey = new QNameValue(collectionId, qname, value, broker.getBrokerPool().getSymbols()); @@ -763,8 +724,6 @@ private NodeSet findAll(final XQueryWatchDog watchDog, final Comparison comparis LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } } @@ -882,15 +841,13 @@ public NodeSet matchAll(final XQueryWatchDog watchDog, final DocumentSet docs, f } final MatcherCallback cb = new MatcherCallback(docs, contextSet, result, matcher, axis == NodeSet.ANCESTOR); - final Lock lock = dbValues.getLock(); for (final Iterator iter = docs.getCollectionIterator(); iter.hasNext(); ) { final int collectionId = iter.next().getId(); watchDog.proceed(null); if (qnames == null) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { final Value searchKey; if (startTerm != null) { @@ -906,13 +863,10 @@ public NodeSet matchAll(final XQueryWatchDog watchDog, final DocumentSet docs, f LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } else { for (final QName qname : qnames) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { final Value searchKey; if (startTerm != null) { @@ -927,8 +881,6 @@ public NodeSet matchAll(final XQueryWatchDog watchDog, final DocumentSet docs, f LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } } @@ -940,12 +892,10 @@ public ValueOccurrences[] scanIndexKeys(final DocumentSet docs, final NodeSet co final int type = start.getType(); final boolean stringType = Type.subTypeOf(type, Type.STRING); final IndexScanCallback cb = new IndexScanCallback(docs, contextSet, type, false); - final Lock lock = dbValues.getLock(); for (final Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { final Collection c = i.next(); final int collectionId = c.getId(); @@ -963,8 +913,6 @@ public ValueOccurrences[] scanIndexKeys(final DocumentSet docs, final NodeSet co LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } final Map map = cb.map; @@ -993,13 +941,11 @@ public ValueOccurrences[] scanIndexKeys(final DocumentSet docs, final NodeSet co final int type = start.getType(); final boolean stringType = Type.subTypeOf(type, Type.STRING); final IndexScanCallback cb = new IndexScanCallback(docs, contextSet, type, true); - final Lock lock = dbValues.getLock(); for (final QName qname : qnames) { for (final Iterator i = docs.getCollectionIterator(); i.hasNext(); ) { - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(dbValues.getLockName())) { final int collectionId = i.next().getId(); //Compute a key for the start value in the collection @@ -1016,8 +962,6 @@ public ValueOccurrences[] scanIndexKeys(final DocumentSet docs, final NodeSet co LOG.error(e.getMessage(), e); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.READ_LOCK); } } } @@ -1103,29 +1047,21 @@ private AtomicValue convertToAtomic(final int xpathType, final String value) { @Override public void closeAndRemove() { - final Lock lock = dbValues.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { config.setProperty(getConfigKeyForFile(), null); dbValues.closeAndRemove(); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @Override public void close() throws DBException { - final Lock lock = dbValues.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(dbValues.getLockName())) { config.setProperty(getConfigKeyForFile(), null); dbValues.close(); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(dbValues.getFile()) + "'", e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @@ -1668,4 +1604,23 @@ public IndexWorker getWorker() { return null; } } + + public enum IndexType { + GENERIC((byte)0x0), + QNAME((byte)0x1); + final byte val; + + IndexType(final byte val) { + this.val = val; + } + } + + private static class PendingChanges { + final IndexType indexType; + final Map> changes = new TreeMap<>(); + + PendingChanges(final IndexType indexType) { + this.indexType = indexType; + } + } } diff --git a/src/org/exist/storage/NotificationService.java b/src/org/exist/storage/NotificationService.java index b2b137f9d15..f5bb3ffb4ec 100644 --- a/src/org/exist/storage/NotificationService.java +++ b/src/org/exist/storage/NotificationService.java @@ -21,80 +21,81 @@ */ package org.exist.storage; +import net.jcip.annotations.ThreadSafe; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.dom.persistent.DocumentImpl; import org.exist.numbering.NodeId; import java.util.IdentityHashMap; +import java.util.Map; + import org.exist.dom.persistent.IStoredNode; /** * Global notification service for document updates. Other classes * can subscribe to this service to be notified of document modifications, * removals or additions. - * - * @author wolf * + * @author wolf */ -public class NotificationService extends IdentityHashMap implements BrokerPoolService { +@ThreadSafe +public class NotificationService implements BrokerPoolService { + + private static final long serialVersionUID = -3629584664969740903L; + private static final Logger LOG = LogManager.getLogger(NotificationService.class); + + private final Map listeners = new IdentityHashMap<>(); + + public NotificationService() { + super(); + } - private static final long serialVersionUID = -3629584664969740903L; + /** + * Subscribe an {@link UpdateListener} to receive notifications. + * + * @param listener + */ + public synchronized void subscribe(final UpdateListener listener) { + listeners.put(listener, new Object()); + } - private final static Logger LOG = LogManager.getLogger(NotificationService.class); - - public NotificationService() { - super(); - } - - /** - * Subscribe an {@link UpdateListener} to receive notifications. - * - * @param listener - */ - public synchronized void subscribe(UpdateListener listener) { - put(listener, new Object()); - } - - /** - * Unsubscribe an {@link UpdateListener}. - * - * @param listener - */ - public synchronized void unsubscribe(UpdateListener listener) { - final Object i = remove(listener); - if (i == null) - {throw new RuntimeException(hashCode() + " listener not found: " + listener.hashCode());} + /** + * Unsubscribe an {@link UpdateListener}. + * + * @param listener + */ + public synchronized void unsubscribe(final UpdateListener listener) { + final Object i = listeners.remove(listener); + if (i == null) { + throw new RuntimeException(hashCode() + " listener not found: " + listener.hashCode()); + } listener.unsubscribe(); } - /** - * Notify all subscribers that a document has been updated/removed or - * a new document has been added. - * - * @param document - * @param event - */ - public synchronized void notifyUpdate(DocumentImpl document, int event) { - for (final UpdateListener listener : keySet()) { - listener.documentUpdated(document, event); - } - } + /** + * Notify all subscribers that a document has been updated/removed or + * a new document has been added. + * + * @param document + * @param event + */ + public synchronized void notifyUpdate(final DocumentImpl document, final int event) { + listeners.keySet().forEach(listener -> listener.documentUpdated(document, event)); + } /** - * Notify all subscribers that a node has been moved. Nodes may be moved during a + * Notify all subscribers that a node has been moved. Nodes may be moved during a * defragmentation run. - */ - public synchronized void notifyMove(NodeId oldNodeId, IStoredNode newNode) { - for (final UpdateListener listener : keySet()) { - listener.nodeMoved(oldNodeId, newNode); - } - } + */ + public synchronized void notifyMove(final NodeId oldNodeId, final IStoredNode newNode) { + listeners.keySet().forEach(listener -> listener.nodeMoved(oldNodeId, newNode)); + } - public void debug() { - LOG.debug("Registered UpdateListeners:"); - for (final UpdateListener listener : keySet()) { - listener.debug(); - } - } + public synchronized void debug() { + if (LOG.isDebugEnabled()) { + LOG.debug("Registered UpdateListeners:"); + } + listeners.keySet().forEach(UpdateListener::debug); + } } diff --git a/src/org/exist/storage/ProcessMonitor.java b/src/org/exist/storage/ProcessMonitor.java index 2dc18d677c6..bbd7f7fa481 100644 --- a/src/org/exist/storage/ProcessMonitor.java +++ b/src/org/exist/storage/ProcessMonitor.java @@ -28,20 +28,16 @@ import org.exist.http.urlrewrite.XQueryURLRewrite; import org.exist.source.Source; import org.exist.util.Configuration; -import org.exist.xquery.Variable; -import org.exist.xquery.XPathException; +import org.exist.xquery.XQueryContext; import org.exist.xquery.XQueryWatchDog; import org.exist.xquery.functions.request.RequestModule; import org.exist.xquery.util.ExpressionDumper; -import org.exist.xquery.value.JavaObjectValue; -import org.exist.xquery.value.Type; import java.util.*; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; -import java.util.stream.StreamSupport; /** * Class to keep track of all running queries in a database instance. The main @@ -389,35 +385,26 @@ public static String getRequestURI(XQueryWatchDog watchdog) { if (reqModule == null) { return null; } - try { - final Variable var = reqModule.resolveVariable(RequestModule.REQUEST_VAR); - if(var == null || var.getValue() == null) { - return null; - } + final Optional maybeRequest = Optional.ofNullable(watchdog.getContext()) + .map(XQueryContext::getHttpContext) + .map(XQueryContext.HttpContext::getRequest); - if (var.getValue().getItemType() != Type.JAVA_OBJECT) { - return null; - } + if (!maybeRequest.isPresent()) { + return null; + } - final JavaObjectValue value = (JavaObjectValue) var.getValue().itemAt(0); - if (value.getObject() instanceof RequestWrapper) { - final RequestWrapper wrapper = (RequestWrapper) value.getObject(); - final Object attr = wrapper.getAttribute(XQueryURLRewrite.RQ_ATTR_REQUEST_URI); - String uri; - if (attr == null) { - uri = wrapper.getRequestURI(); - } else { - uri = attr.toString(); - } - String queryString = wrapper.getQueryString(); - if (queryString != null) { - uri += "?" + queryString; - } - return uri; - } - } catch (XPathException e) { - // ignore and return null + final RequestWrapper request = maybeRequest.get(); + final Object attr = request.getAttribute(XQueryURLRewrite.RQ_ATTR_REQUEST_URI); + String uri; + if (attr == null) { + uri = request.getRequestURI(); + } else { + uri = attr.toString(); + } + String queryString = request.getQueryString(); + if (queryString != null) { + uri += "?" + queryString; } - return null; + return uri; } } diff --git a/src/org/exist/storage/StartupTrigger.java b/src/org/exist/storage/StartupTrigger.java index 41730d44f15..b8c086678c1 100644 --- a/src/org/exist/storage/StartupTrigger.java +++ b/src/org/exist/storage/StartupTrigger.java @@ -1,5 +1,7 @@ package org.exist.storage; +import org.exist.storage.txn.Txn; + import java.util.List; import java.util.Map; @@ -27,5 +29,5 @@ public interface StartupTrigger { * @param sysBroker The single system broker available during database startup * @param params Key, Values */ - public void execute(final DBBroker sysBroker, final Map> params); + public void execute(final DBBroker sysBroker, final Txn transaction, final Map> params); } diff --git a/src/org/exist/storage/StartupTriggersManager.java b/src/org/exist/storage/StartupTriggersManager.java index 1434065231a..6d8c0a81ba4 100644 --- a/src/org/exist/storage/StartupTriggersManager.java +++ b/src/org/exist/storage/StartupTriggersManager.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.plugin.PluginsManagerImpl; +import org.exist.storage.txn.Txn; import org.exist.util.Configuration; import java.util.ArrayList; @@ -50,12 +51,12 @@ public void configure(final Configuration configuration) throws BrokerPoolServic } @Override - public void startPreMultiUserSystem(final DBBroker systemBroker) throws BrokerPoolServiceException { + public void startPreMultiUserSystem(final DBBroker systemBroker, final Txn transaction) throws BrokerPoolServiceException { for(final Configuration.StartupTriggerConfig startupTriggerConfig : startupTriggerConfigs) { try { final Class clazz = (Class) Class.forName(startupTriggerConfig.getClazz()); final StartupTrigger startupTrigger = clazz.newInstance(); - startupTrigger.execute(systemBroker, startupTriggerConfig.getParams()); + startupTrigger.execute(systemBroker, transaction, startupTriggerConfig.getParams()); } catch(final ClassNotFoundException | IllegalAccessException | InstantiationException e) { LOG.error("Could not call StartupTrigger class: " + startupTriggerConfig + ". SKIPPING! " + e.getMessage(), e); } catch(final RuntimeException re) { diff --git a/src/org/exist/storage/btree/BTree.java b/src/org/exist/storage/btree/BTree.java index 787d934dc66..6c4bc856b8b 100644 --- a/src/org/exist/storage/btree/BTree.java +++ b/src/org/exist/storage/btree/BTree.java @@ -76,12 +76,11 @@ import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; -import org.exist.storage.CacheManager; + import org.exist.storage.DefaultCacheManager; import org.exist.storage.NativeBroker; import org.exist.storage.cache.*; import org.exist.storage.journal.*; -import org.exist.storage.lock.Lock; import org.exist.storage.txn.Txn; import org.exist.util.ByteConversion; import org.exist.util.FileUtils; @@ -229,19 +228,14 @@ public void closeAndRemove() { cacheManager.deregisterCache(cache); } - /** - * Get the active Lock object for this file. - * - * @see org.exist.util.Lockable#getLock() - */ @Override - public Lock getLock() { + public String getLockName() { return null; } protected void initCache() { this.cache = new BTreeCache<>(FileUtils.fileName(getFile()), cacheManager.getDefaultInitialSize(), 1.5, - 0, CacheManager.BTREE_CACHE); + 0, Cache.CacheType.BTREE); cacheManager.registerCache(cache); } diff --git a/src/org/exist/storage/btree/Repair.java b/src/org/exist/storage/btree/Repair.java index 440ab2eccc3..02604d3e9d3 100644 --- a/src/org/exist/storage/btree/Repair.java +++ b/src/org/exist/storage/btree/Repair.java @@ -6,14 +6,15 @@ import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.NativeBroker; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.structural.NativeStructuralIndexWorker; import org.exist.util.Configuration; import org.exist.util.DatabaseConfigurationException; import org.exist.util.FileUtils; import java.util.Optional; +import java.util.concurrent.locks.ReentrantLock; /** @@ -53,15 +54,12 @@ public void repair(String id) { System.console().printf("Unkown index: %s\n", id); return; } - final Lock lock = btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + try(final ManagedLock btreeLock = lockManager.acquireBtreeWriteLock(btree.getLockName())) { System.console().printf("Rebuilding %15s ...", FileUtils.fileName(btree.getFile())); btree.rebuild(); System.out.println("Done"); - } finally { - lock.release(LockMode.WRITE_LOCK); } } catch (Exception e) { diff --git a/src/org/exist/storage/cache/BTreeCache.java b/src/org/exist/storage/cache/BTreeCache.java index d155015958d..a55ba5c9808 100755 --- a/src/org/exist/storage/cache/BTreeCache.java +++ b/src/org/exist/storage/cache/BTreeCache.java @@ -29,7 +29,7 @@ @NotThreadSafe public class BTreeCache extends LRUCache { - public BTreeCache(final String name, final int size, final double growthFactor, final double growthThreshold, final String type) { + public BTreeCache(final String name, final int size, final double growthFactor, final double growthThreshold, final CacheType type) { super(name, size, growthFactor, growthThreshold, type); } diff --git a/src/org/exist/storage/cache/Cache.java b/src/org/exist/storage/cache/Cache.java index a9e28cc9666..274598ef468 100644 --- a/src/org/exist/storage/cache/Cache.java +++ b/src/org/exist/storage/cache/Cache.java @@ -31,13 +31,18 @@ */ public interface Cache { + enum CacheType { + BTREE, + DATA + } + /** * Returns the type of this cache. Should be one of the - * constants defined in {@link org.exist.storage.CacheManager}. + * constants defined in {@link CacheType}. * * @return the type of this cache */ - String getType(); + CacheType getType(); /** * Add the item to the cache. If it is already in the cache, diff --git a/src/org/exist/storage/cache/GClockCache.java b/src/org/exist/storage/cache/GClockCache.java index b13ccf771b3..cbebc7556c8 100644 --- a/src/org/exist/storage/cache/GClockCache.java +++ b/src/org/exist/storage/cache/GClockCache.java @@ -51,7 +51,7 @@ public class GClockCache implements Cache { protected int size; private final double growthFactor; Accounting accounting; - private final String type; + private final CacheType type; protected T[] items; protected Long2ObjectHashMap map; protected int count = 0; @@ -59,7 +59,7 @@ public class GClockCache implements Cache { private int hitsOld = 0; protected CacheManager cacheManager = null; - public GClockCache(final String name, final Class cacheableClazz, final int size, final double growthFactor, final double growthThreshold, final String type) { + public GClockCache(final String name, final Class cacheableClazz, final int size, final double growthFactor, final double growthThreshold, final CacheType type) { this.name = name; this.cacheableClazz = cacheableClazz; this.size = size; @@ -82,7 +82,7 @@ public String getName() { } @Override - public String getType() { + public CacheType getType() { return type; } diff --git a/src/org/exist/storage/cache/LRDCache.java b/src/org/exist/storage/cache/LRDCache.java index 74647133d94..85bfd2e102e 100644 --- a/src/org/exist/storage/cache/LRDCache.java +++ b/src/org/exist/storage/cache/LRDCache.java @@ -48,7 +48,7 @@ public class LRDCache extends GClockCache { private int totalReferences = 0; private int nextCleanup; - public LRDCache(final String name, final Class cacheableClazz, final int size, final double growthFactor, final double growthThreshold, final String type) { + public LRDCache(final String name, final Class cacheableClazz, final int size, final double growthFactor, final double growthThreshold, final CacheType type) { super(name, cacheableClazz, size, growthFactor, growthThreshold, type); maxReferences = size * 10000; ageingPeriod = size * 5000; @@ -82,7 +82,8 @@ public void add(final T item, final int initialRefCount) { @Override protected T removeOne(final T item) { T old; - double rd = 0, minRd = -1; + double rd = 0; + double minRd = -1; int bucket = -1; final int len = items.length; for (int i = 0; i < len; i++) { diff --git a/src/org/exist/storage/cache/LRUCache.java b/src/org/exist/storage/cache/LRUCache.java index 2a94811eba3..a43a8ea9f36 100644 --- a/src/org/exist/storage/cache/LRUCache.java +++ b/src/org/exist/storage/cache/LRUCache.java @@ -43,11 +43,11 @@ public class LRUCache implements Cache { protected final double growthFactor; protected final Accounting accounting; protected SequencedLongHashMap map; - private final String type; + private final CacheType type; private int hitsOld = -1; protected CacheManager cacheManager = null; - public LRUCache(final String name, final int size, final double growthFactor, final double growthThreshold, final String type) { + public LRUCache(final String name, final int size, final double growthFactor, final double growthThreshold, final CacheType type) { this.name = name; this.max = size; this.growthFactor = growthFactor; @@ -68,7 +68,7 @@ public void add(final T item, final int initialRefCount) { } @Override - public String getType() { + public CacheType getType() { return type; } diff --git a/src/org/exist/storage/dom/DOMFile.java b/src/org/exist/storage/dom/DOMFile.java index 4b596a612cc..87d7a8b38b5 100644 --- a/src/org/exist/storage/dom/DOMFile.java +++ b/src/org/exist/storage/dom/DOMFile.java @@ -29,6 +29,7 @@ import java.text.NumberFormat; import java.util.ArrayList; import java.util.List; +import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import static java.nio.charset.StandardCharsets.UTF_8; @@ -43,10 +44,10 @@ import org.exist.dom.persistent.StoredNode; import org.exist.numbering.DLNBase; import org.exist.numbering.NodeId; +import org.exist.stax.ExtendedXMLStreamReader; import org.exist.stax.EmbeddedXMLStreamReader; import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; -import org.exist.storage.CacheManager; import org.exist.storage.DBBroker; import org.exist.storage.NativeBroker; import org.exist.storage.NativeBroker.NodeRef; @@ -65,8 +66,7 @@ import org.exist.storage.journal.LogEntryTypes; import org.exist.storage.journal.Loggable; import org.exist.storage.journal.Lsn; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.ReentrantReadWriteLock; +import org.exist.storage.lock.LockManager; import org.exist.storage.txn.Txn; import org.exist.util.*; import org.exist.util.hashtable.Object2LongIdentityHashMap; @@ -164,6 +164,8 @@ public class DOMFile extends BTree implements Lockable { public final static short FILE_FORMAT_VERSION_ID = 9; + private final LockManager lockManager; + //Page types public final static byte LOB = 21; public final static byte RECORD = 20; @@ -178,8 +180,6 @@ public class DOMFile extends BTree implements Lockable { private Object owner = null; - private final Lock lock; - private final Object2LongIdentityHashMap pages = new Object2LongIdentityHashMap<>(64); private DocumentImpl currentDocument = null; @@ -188,11 +188,11 @@ public class DOMFile extends BTree implements Lockable { public DOMFile(final BrokerPool pool, final byte id, final Path dataDir, final Configuration config) throws DBException { super(pool, id, true, pool.getCacheManager()); - lock = new ReentrantReadWriteLock(getFileName()); + this.lockManager = pool.getLockManager(); fileHeader = (BTreeFileHeader)getFileHeader(); fileHeader.setPageCount(0); fileHeader.setTotalCount(0); - dataCache = new LRUCache<>(getFileName(), 256, 0.0, 1.0, CacheManager.DATA_CACHE); + dataCache = new LRUCache<>(getFileName(), 256, 0.0, 1.0, Cache.CacheType.DATA); cacheManager.registerCache(dataCache); final Path file = dataDir.resolve(getFileName()); setFile(file); @@ -267,8 +267,8 @@ public boolean open() throws DBException { } public void closeDocument() { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } pages.remove(owner); } @@ -309,8 +309,8 @@ public void close() throws DBException { @Override public void closeAndRemove() { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } super.closeAndRemove(); cacheManager.deregisterCache(dataCache); @@ -333,8 +333,8 @@ public void setCurrentDocument(final DocumentImpl doc) { * @return the virtual storage address of the value */ public long add(final Txn transaction, final byte[] value) throws ReadOnlyException { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } if (value == null || value.length == 0) { @@ -366,8 +366,8 @@ public long add(final Txn transaction, final byte[] value) throws ReadOnlyExcept * @throws ReadOnlyException */ private long add(final Txn transaction, final byte[] value, final boolean overflowPage) throws ReadOnlyException { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final int valueLength = value.length; //Always append data to the end of the file @@ -438,8 +438,8 @@ private void writeToLog(final Loggable loggable, final Page page) { * @param value Binary resource as byte array */ public long addBinary(final Txn transaction, final DocumentImpl doc, final byte[] value) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final OverflowDOMPage overflowPage = new OverflowDOMPage(); final int pagesCount = overflowPage.write(transaction, value); @@ -456,8 +456,8 @@ public long addBinary(final Txn transaction, final DocumentImpl doc, final byte[ * @param is Binary resource as stream. */ public long addBinary(final Txn transaction, final DocumentImpl doc, final InputStream is) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final OverflowDOMPage overflowPage = new OverflowDOMPage(); final int pagesCount = overflowPage.write(transaction, is); @@ -471,15 +471,15 @@ public long addBinary(final Txn transaction, final DocumentImpl doc, final Input * @param pageNum */ public byte[] getBinary(final long pageNum) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } return getOverflowValue(pageNum); } public void readBinary(final long pageNum, final OutputStream os) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } try { final OverflowDOMPage overflowPage = new OverflowDOMPage(pageNum); @@ -496,8 +496,8 @@ public void readBinary(final long pageNum, final OutputStream os) { * @param value */ public long insertAfter(final Txn transaction, final DocumentImpl doc, final Value key, final byte[] value) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } try { final long address = findValue(key); @@ -527,8 +527,8 @@ public long insertAfter(final Txn transaction, final DocumentImpl doc, final Val * @param value the value of the new node. */ public long insertAfter(final Txn transaction, final DocumentImpl doc, final long address, byte[] value) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } // check if we need an overflow page boolean isOverflow = false; @@ -1241,8 +1241,8 @@ public PageHeader createPageHeader() { public List findKeys(final IndexQuery query) throws IOException, BTreeException { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } final FindCallback callBack = new FindCallback(FindCallback.KEYS); try { @@ -1262,8 +1262,8 @@ public List findKeys(final IndexQuery query) */ protected long findValue(final DBBroker broker, final NodeProxy node) throws IOException, BTreeException { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } final DocumentImpl doc = node.getOwnerDocument(); final NodeRef nodeRef = new NativeBroker.NodeRef(doc.getDocId(), node.getNodeId()); @@ -1277,14 +1277,12 @@ protected long findValue(final DBBroker broker, final NodeProxy node) do { nodeID = nodeID.getParentId(); if (nodeID == null) { - SanityCheck.TRACE("Node " + node.getOwnerDocument().getDocId() + ":" + - nodeID + " not found."); + SanityCheck.TRACE("Node " + node.getOwnerDocument().getDocId() + ":" + nodeID + " not found."); throw new BTreeException("Node not found."); } if (nodeID == NodeId.DOCUMENT_NODE) { - SanityCheck.TRACE("Node " + node.getOwnerDocument().getDocId() + ":" + - nodeID + " not found."); - throw new BTreeException("Node " + nodeID + " not found."); + SanityCheck.TRACE("Node " + node.getOwnerDocument().getDocId() + ":" + nodeID + " not found."); + return KEY_NOT_FOUND; } final NativeBroker.NodeRef parentRef = new NativeBroker.NodeRef(doc.getDocId(), nodeID); try { @@ -1294,17 +1292,37 @@ protected long findValue(final DBBroker broker, final NodeProxy node) } } while (parentPointer == KEY_NOT_FOUND); try { + + final int thisLevel = nodeID.getTreeLevel(); + Integer childLevel = null; // lazily initialized below + final NodeProxy parent = new NodeProxy(doc, nodeID, parentPointer); - final EmbeddedXMLStreamReader cursor = (EmbeddedXMLStreamReader)broker.getXMLStreamReader(parent, true); - while(cursor.hasNext()) { - final int status = cursor.next(); + final EmbeddedXMLStreamReader reader = (EmbeddedXMLStreamReader)broker.getXMLStreamReader(parent, true); + + while (reader.hasNext()) { + final int status = reader.next(); + if (status != XMLStreamReader.END_ELEMENT) { - final NodeId nextId = (NodeId) cursor.getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID); - if (nextId.equals(node.getNodeId())) { - return cursor.getCurrentPosition(); + if (childLevel == null) { + childLevel = reader.getNode().getNodeType() == Node.ELEMENT_NODE ? thisLevel + 1 : thisLevel; + } + + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + if (otherId.equals(node.getNodeId())) { + return reader.getCurrentPosition(); + } + } + + if (status == XMLStreamConstants.END_ELEMENT) { + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + if (childLevel != null && childLevel != otherLevel && otherLevel == thisLevel) { + // finished `this` element... + break; // exit-while } } } + if (LOG.isDebugEnabled()) { LOG.debug("Node " + node.getNodeId() + " could not be found. Giving up. This is usually not an error."); } @@ -1328,8 +1346,8 @@ protected long findValue(final DBBroker broker, final NodeProxy node) */ public List findValues(final IndexQuery query) throws IOException, BTreeException { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } final FindCallback callBack = new FindCallback(FindCallback.VALUES); try { @@ -1397,8 +1415,8 @@ public BufferStats getDataBufferStats() { * @return Description of the Return Value */ public Value get(final Value key) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } try { final long pointer = findValue(key); @@ -1414,8 +1432,8 @@ public Value get(final Value key) { } public Value get(final DBBroker broker, final NodeProxy node) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } try { final long pointer = findValue(broker, node); @@ -1448,8 +1466,8 @@ public Value get(final long pointer) { * @return The node */ public Value get(final long pointer, final boolean warnIfMissing) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } final RecordPos rec = findRecord(pointer); if (rec == null) { @@ -1507,8 +1525,8 @@ protected void dumpValue(final Writer writer, final Value key, final int status) */ public long put(final Txn transaction, final Value key, final byte[] value) throws ReadOnlyException { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final long pointer = add(transaction, value); try { @@ -1533,8 +1551,8 @@ public long put(final Txn transaction, final Value key, final byte[] value) //} public void remove(final Txn transaction, final Value key) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } try { final long pointer = findValue(key); @@ -1552,8 +1570,8 @@ public void remove(final Txn transaction, final Value key) { protected byte[] getOverflowValue(final long pointer) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } try { final OverflowDOMPage overflow = new OverflowDOMPage(pointer); @@ -1572,8 +1590,8 @@ protected byte[] getOverflowValue(final long pointer) { * @param pointer The pointer to the value */ public void removeOverflowValue(final Txn transaction, final long pointer) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } try { final OverflowDOMPage overflow = new OverflowDOMPage(pointer); @@ -1640,8 +1658,8 @@ private void removeLink(final Txn transaction, final long pointer) { //} public void removeNode(final Txn transaction, final long pointer) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final RecordPos rec = findRecord(pointer); //Position the stream at the very beginning of the record @@ -1736,8 +1754,8 @@ public void remove(final Txn transaction, final Value key, final long pointer) { * @param page */ private void removePage(final DOMPage page) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final DOMFilePageHeader pageHeader = page.getPageHeader(); if (pageHeader.getNextDataPage() != Page.NO_PAGE) { @@ -1775,8 +1793,8 @@ private void removePage(final DOMPage page) { * address pointer p. */ public void removeAll(final Txn transaction, final long pointer) { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } long pageNum = StorageAddress.pageFromPointer(pointer); if (pageNum == Page.NO_PAGE) { @@ -1860,8 +1878,8 @@ public boolean update(final Txn transaction, final Value key, final byte[] value * Update the key/value pair where the value is found at address p. */ public void update(final Txn transaction, final long pointer, final byte[] value) throws ReadOnlyException { - if (!lock.isLockedForWrite()) { - LOG.warn("The file doesn't own a write lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLockedForWrite(getLockName())) { + LOG.debug("The file doesn't own a write lock"); } final RecordPos recordPos = findRecord(pointer); final short valueLength = ByteConversion.byteToShort(recordPos.getPage().data, recordPos.offset); @@ -1904,8 +1922,8 @@ public void update(final Txn transaction, final long pointer, final byte[] value * @return string value of the specified node */ public String getNodeValue(final DBBroker broker, final IStoredNode node, final boolean addWhitespace) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } try { long address = node.getInternalAddress(); @@ -1954,8 +1972,8 @@ private void getNodeValue(final BrokerPool pool, final DocumentImpl doc, final FastByteArrayOutputStream os, final RecordPos rec, final boolean isTopNode, final boolean addWhitespace) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } //Locate the next real node, skipping relocated nodes boolean foundNext = false; @@ -2107,8 +2125,8 @@ protected RecordPos findRecord(final long pointer) { * @return The record position in the page */ protected RecordPos findRecord(final long pointer, final boolean skipLinks) { - if (!lock.hasLock()) { - LOG.warn("The file doesn't own a lock"); + if(LOG.isDebugEnabled() && !lockManager.isBtreeLocked(getLockName())) { + LOG.debug("The file doesn't own a lock"); } long pageNum = StorageAddress.pageFromPointer(pointer); short tupleID = StorageAddress.tidFromPointer(pointer); @@ -2139,8 +2157,8 @@ protected RecordPos findRecord(final long pointer, final boolean skipLinks) { } @Override - public Lock getLock() { - return lock; + public String getLockName() { + return getFileName(); } /** diff --git a/src/org/exist/storage/dom/DOMTransaction.java b/src/org/exist/storage/dom/DOMTransaction.java index ea773563201..29686807d39 100644 --- a/src/org/exist/storage/dom/DOMTransaction.java +++ b/src/org/exist/storage/dom/DOMTransaction.java @@ -21,15 +21,21 @@ */ package org.exist.storage.dom; +import com.evolvedbinary.j8fu.function.SupplierE; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.dom.persistent.DocumentImpl; +import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; import org.exist.util.FileUtils; import org.exist.util.LockException; import org.exist.util.ReadOnlyException; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; + /** * DOMTransaction controls access to the DOM file * @@ -46,25 +52,18 @@ public abstract class DOMTransaction { private final Object ownerObject; private final DOMFile file; - private final LockMode mode; + private final SupplierE, LockException> acquireFn; private final DocumentImpl document; - /** - * @deprecated : use other constructors - */ - public DOMTransaction(final Object owner, final DOMFile file) { - this(owner, file, LockMode.READ_LOCK); - } - /** * Creates a new DOMTransaction instance. * * @param owner an Object value * @param file a DOMFile value - * @param mode an int value + * @param acquireFn a Supplier value */ - public DOMTransaction(final Object owner, final DOMFile file, final LockMode mode) { - this(owner, file, mode, null); + public DOMTransaction(final Object owner, final DOMFile file, final SupplierE, LockException> acquireFn) { + this(owner, file, acquireFn, null); } /** @@ -72,13 +71,13 @@ public DOMTransaction(final Object owner, final DOMFile file, final LockMode mod * * @param owner an Object value * @param file a DOMFile value - * @param mode an int value + * @param acquireFn a Supplier value * @param doc a DocumentImpl value */ - public DOMTransaction(final Object owner, final DOMFile file, final LockMode mode, final DocumentImpl doc) { + public DOMTransaction(final Object owner, final DOMFile file, final SupplierE, LockException> acquireFn, final DocumentImpl doc) { this.ownerObject = owner; this.file = file; - this.mode = mode; + this.acquireFn = acquireFn; this.document = doc; } @@ -96,22 +95,16 @@ public DOMTransaction(final Object owner, final DOMFile file, final LockMode mod * @return an Object value */ public Object run() { - final Lock lock = file.getLock(); - try { - // try to acquire a lock on the file - try { - lock.acquire( mode ); - } catch( final LockException e ) { - LOG.error("Failed to acquire read lock on " + FileUtils.fileName(file.getFile()), e); - return null; - } + // try to acquire a lock on the file + try(final ManagedLock domFileLock = acquireFn.get()) { file.setOwnerObject(ownerObject); file.setCurrentDocument(document); return start(); + } catch(final LockException e) { + LOG.error("Failed to acquire read lock on " + FileUtils.fileName(file.getFile()), e); + return null; } catch(final ReadOnlyException e) { LOG.error(e.getMessage(), e); - } finally { - lock.release(mode); } return null; } diff --git a/src/org/exist/storage/dom/NodeIterator.java b/src/org/exist/storage/dom/NodeIterator.java index 683f117eec4..ad2f0277edb 100644 --- a/src/org/exist/storage/dom/NodeIterator.java +++ b/src/org/exist/storage/dom/NodeIterator.java @@ -11,14 +11,16 @@ import org.exist.storage.btree.BTree; import org.exist.storage.btree.BTreeException; import org.exist.storage.btree.Paged.Page; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.util.ByteConversion; import org.exist.util.FileUtils; import org.exist.util.LockException; import org.exist.util.sanity.SanityCheck; import java.io.IOException; +import java.util.concurrent.locks.ReentrantLock; + import org.exist.dom.persistent.NodeHandle; /** @@ -42,6 +44,7 @@ public final class NodeIterator implements INodeIterator { private long pageNum; private long startAddress = StoredNode.UNKNOWN_NODE_IMPL_ADDRESS; private DBBroker broker; + private final LockManager lockManager; private boolean useNodePool = false; public NodeIterator(DBBroker broker, DOMFile db, NodeHandle node, boolean poolable) @@ -51,6 +54,7 @@ public NodeIterator(DBBroker broker, DOMFile db, NodeHandle node, boolean poolab this.useNodePool = poolable; this.node = node; this.broker = broker; + this.lockManager = broker.getBrokerPool().getLockManager(); } /** @@ -70,15 +74,7 @@ public long currentAddress() { */ @Override public boolean hasNext() { - final Lock lock = db.getLock(); - try { - try { - lock.acquire(LockMode.READ_LOCK); - } catch (final LockException e) { - LOG.warn("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); - //TODO : throw exception here ? -pb - return false; - } + try(final ManagedLock domFileLock = lockManager.acquireBtreeReadLock(db.getLockName())) { db.setOwnerObject(broker); if (gotoNextPosition()) { db.getPageBuffer().add(page); @@ -91,14 +87,13 @@ else if (pageHeader.getNextDataPage() == Page.NO_PAGE) //Mmmmh... strange -pb {return true;} } - } catch (final BTreeException e) { - LOG.warn(e); + } catch (final LockException e) { + LOG.warn("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); //TODO : throw exception here ? -pb - } catch (final IOException e) { + return false; + } catch (final BTreeException | IOException e) { LOG.warn(e); //TODO : throw exception here ? -pb - } finally { - lock.release(LockMode.READ_LOCK); } return false; } @@ -108,15 +103,7 @@ else if (pageHeader.getNextDataPage() == Page.NO_PAGE) */ @Override public IStoredNode next() { - final Lock lock = db.getLock(); - try { - try { - lock.acquire(LockMode.READ_LOCK); - } catch (final LockException e) { - LOG.warn("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); - //TODO : throw exception here ? -pb - return null; - } + try(final ManagedLock domFileLock = lockManager.acquireBtreeReadLock(db.getLockName())) { db.setOwnerObject(broker); IStoredNode nextNode = null; if (gotoNextPosition()) { @@ -211,14 +198,13 @@ public IStoredNode next() { } while (nextNode == null); } return nextNode; - } catch (final BTreeException e) { - LOG.error(e.getMessage(), e); - //TODO : re-throw exception ? -pb - } catch (final IOException e) { + } catch (final LockException e) { + LOG.warn("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); + //TODO : throw exception here ? -pb + return null; + } catch (final BTreeException | IOException e) { LOG.error(e.getMessage(), e); //TODO : re-throw exception ? -pb - } finally { - lock.release(LockMode.READ_LOCK); } return null; } diff --git a/src/org/exist/storage/dom/RawNodeIterator.java b/src/org/exist/storage/dom/RawNodeIterator.java index f89cb1dad0e..d985bdf4a9d 100644 --- a/src/org/exist/storage/dom/RawNodeIterator.java +++ b/src/org/exist/storage/dom/RawNodeIterator.java @@ -31,14 +31,15 @@ import org.exist.storage.btree.BTreeException; import org.exist.storage.btree.Paged; import org.exist.storage.btree.Value; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.util.ByteConversion; import org.exist.util.FileUtils; import org.exist.util.LockException; import org.exist.util.sanity.SanityCheck; import java.io.IOException; +import java.util.concurrent.locks.ReentrantLock; /** * An iterator that walks through the raw node data items in a document. The class @@ -51,6 +52,7 @@ public class RawNodeIterator implements IRawNodeIterator { private final static Logger LOG = LogManager.getLogger(RawNodeIterator.class); private DBBroker broker; + private final LockManager lockManager; private final DOMFile db; private int offset; @@ -69,15 +71,14 @@ public class RawNodeIterator implements IRawNodeIterator { */ public RawNodeIterator(final DBBroker broker, final DOMFile db, final NodeHandle node) throws IOException { this.broker = broker; + this.lockManager = broker.getBrokerPool().getLockManager(); this.db = db; seek(node); } @Override public final void seek(final NodeHandle node) throws IOException { - final Lock lock = db.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock domFileLock = lockManager.acquireBtreeReadLock(db.getLockName())) { RecordPos rec = null; if (StorageAddress.hasAddress(node.getInternalAddress())) {rec = db.findRecord(node.getInternalAddress());} @@ -97,23 +98,14 @@ public final void seek(final NodeHandle node) throws IOException { page = rec.getPage(); } catch (final LockException e) { throw new IOException("Exception while scanning document: " + e.getMessage()); - } finally { - lock.release(LockMode.READ_LOCK); } } @Override public Value next() { Value nextValue = null; - final Lock lock = db.getLock(); - try { - try { - lock.acquire(LockMode.READ_LOCK); - } catch (final LockException e) { - LOG.error("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); - //TODO : throw exception here ? -pb - return null; - } + try(final ManagedLock domFileLock = lockManager.acquireBtreeReadLock(db.getLockName())) { + db.setOwnerObject(broker); long backLink = 0; do { @@ -199,8 +191,10 @@ public Value next() { } } while (nextValue == null); return nextValue; - } finally { - lock.release(LockMode.READ_LOCK); + } catch (final LockException e) { + LOG.error("Failed to acquire read lock on " + FileUtils.fileName(db.getFile())); + //TODO : throw exception here ? -pb + return null; } } diff --git a/src/org/exist/storage/index/BFile.java b/src/org/exist/storage/index/BFile.java index 2356f05c713..07336829473 100644 --- a/src/org/exist/storage/index/BFile.java +++ b/src/org/exist/storage/index/BFile.java @@ -25,7 +25,6 @@ import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; -import org.exist.storage.CacheManager; import org.exist.storage.DefaultCacheManager; import org.exist.storage.NativeBroker; import org.exist.storage.StorageAddress; @@ -45,9 +44,8 @@ import org.exist.storage.journal.LogEntryTypes; import org.exist.storage.journal.Loggable; import org.exist.storage.journal.Lsn; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; -import org.exist.storage.lock.ReentrantReadWriteLock; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; import org.exist.util.*; import org.exist.util.io.FastByteArrayOutputStream; @@ -61,6 +59,7 @@ import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.concurrent.locks.ReentrantLock; import static java.nio.charset.StandardCharsets.UTF_8; @@ -135,10 +134,10 @@ public class BFile extends BTree { LogEntryTypes.addEntryType(LOG_OVERFLOW_REMOVE, OverflowRemoveLoggable::new); } + protected final LockManager lockManager; protected final BFileHeader fileHeader; protected final int minFree; protected final Cache dataCache; - protected final Lock lock; public final int fixedKeyLen = -1; protected final int maxValueSize; @@ -146,11 +145,11 @@ public class BFile extends BTree { public BFile(final BrokerPool pool, final byte fileId, final boolean recoveryEnabled, final Path file, final DefaultCacheManager cacheManager, final double cacheGrowth, final double thresholdData) throws DBException { super(pool, fileId, recoveryEnabled, cacheManager, file); + lockManager = pool.getLockManager(); fileHeader = (BFileHeader) getFileHeader(); - dataCache = new LRUCache<>(FileUtils.fileName(file), 64, cacheGrowth, thresholdData, CacheManager.DATA_CACHE); + dataCache = new LRUCache<>(FileUtils.fileName(file), 64, cacheGrowth, thresholdData, Cache.CacheType.DATA); cacheManager.registerCache(dataCache); minFree = PAGE_MIN_FREE; - lock = new ReentrantReadWriteLock(FileUtils.fileName(file)); maxValueSize = fileHeader.getWorkSize() / 2; if(exists()) { @@ -177,8 +176,8 @@ public short getFileVersion() { * @return Lock */ @Override - public Lock getLock() { - return lock; + public String getLockName() { + return FileUtils.fileName(getFile()); } protected long getDataSyncPeriod() { @@ -2333,8 +2332,8 @@ private final void advance() throws IOException { throw new EOFException(); } - try { - lock.acquire(LockMode.READ_LOCK); + + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(getLockName())) { nextPage = (SinglePage) getDataPage(next, false); pageLen = nextPage.ph.getDataLength(); offset = 0; @@ -2342,8 +2341,6 @@ private final void advance() throws IOException { } catch (final LockException e) { throw new IOException("failed to acquire a read lock on " + FileUtils.fileName(getFile())); - } finally { - lock.release(LockMode.READ_LOCK); } } @@ -2454,8 +2451,7 @@ public long position() { public void seek(final long position) throws IOException { final int newPage = StorageAddress.pageFromPointer(position); final short newOffset = StorageAddress.tidFromPointer(position); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock bfileLock = lockManager.acquireBtreeReadLock(getLockName())) { nextPage = getSinglePage(newPage); pageLen = nextPage.ph.getDataLength(); if (pageLen > fileHeader.getWorkSize()) { @@ -2465,8 +2461,6 @@ public void seek(final long position) throws IOException { dataCache.add(nextPage); } catch (final LockException e) { throw new IOException("Failed to acquire a read lock on " + FileUtils.fileName(getFile())); - } finally { - lock.release(LockMode.READ_LOCK); } } } diff --git a/src/org/exist/storage/index/BTreeStore.java b/src/org/exist/storage/index/BTreeStore.java index f194c8a5c67..f629af256bc 100644 --- a/src/org/exist/storage/index/BTreeStore.java +++ b/src/org/exist/storage/index/BTreeStore.java @@ -4,8 +4,6 @@ import org.exist.storage.DefaultCacheManager; import org.exist.storage.btree.BTree; import org.exist.storage.btree.DBException; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.ReentrantReadWriteLock; import org.exist.util.FileUtils; import java.nio.file.Path; @@ -14,11 +12,8 @@ public class BTreeStore extends BTree { public final static short FILE_FORMAT_VERSION_ID = 2; - protected Lock lock = null; - public BTreeStore(final BrokerPool pool, final byte fileId, final boolean recoverEnabled, final Path file, final DefaultCacheManager cacheManager) throws DBException { super(pool, fileId, recoverEnabled, cacheManager, file); - lock = new ReentrantReadWriteLock(FileUtils.fileName(file)); if(exists()) { open(FILE_FORMAT_VERSION_ID); @@ -32,8 +27,8 @@ public BTreeStore(final BrokerPool pool, final byte fileId, final boolean recove } @Override - public Lock getLock() { - return lock; + public String getLockName() { + return FileUtils.fileName(getFile()); } public short getFileVersion() { diff --git a/src/org/exist/storage/index/CollectionStore.java b/src/org/exist/storage/index/CollectionStore.java index f6aeb5aa276..3fd7da5ee0c 100644 --- a/src/org/exist/storage/index/CollectionStore.java +++ b/src/org/exist/storage/index/CollectionStore.java @@ -10,14 +10,15 @@ import org.exist.storage.BrokerPool; import org.exist.storage.btree.DBException; import org.exist.storage.btree.Value; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; import org.exist.util.*; import java.io.IOException; import java.io.Writer; import java.nio.file.Path; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.concurrent.locks.ReentrantLock; /** * Handles access to the central collection storage file (collections.dbx). @@ -37,8 +38,8 @@ public class CollectionStore extends BFile { public final static byte KEY_TYPE_COLLECTION = 0; public final static byte KEY_TYPE_DOCUMENT = 1; - private Stack freeResourceIds = new Stack<>(); - private Stack freeCollectionIds = new Stack<>(); + private Deque freeResourceIds = new ArrayDeque<>(); + private Deque freeCollectionIds = new ArrayDeque<>(); /** * @param pool @@ -80,24 +81,16 @@ public boolean flush() throws DBException { } public void freeResourceId(int id) { - final Lock lock = getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { freeResourceIds.push(id); } catch (LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } public int getFreeResourceId() { int freeDocId = DocumentImpl.UNKNOWN_DOCUMENT_ID; - final Lock lock = getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { if (!freeResourceIds.isEmpty()) { freeDocId = freeResourceIds.pop(); } @@ -105,31 +98,21 @@ public int getFreeResourceId() { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); return DocumentImpl.UNKNOWN_DOCUMENT_ID; //TODO : rethrow ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } return freeDocId; } public void freeCollectionId(int id) { - final Lock lock = getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { freeCollectionIds.push(id); } catch (LockException e) { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } public int getFreeCollectionId() { int freeCollectionId = Collection.UNKNOWN_COLLECTION_ID; - final Lock lock = getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); - + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(getLockName())) { if (!freeCollectionIds.isEmpty()) { freeCollectionId = freeCollectionIds.pop(); } @@ -137,8 +120,6 @@ public int getFreeCollectionId() { LOG.warn("Failed to acquire lock on " + FileUtils.fileName(getFile()), e); return Collection.UNKNOWN_COLLECTION_ID; //TODO : rethrow ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } return freeCollectionId; } diff --git a/src/org/exist/storage/index/FreeList.java b/src/org/exist/storage/index/FreeList.java index 815ab1ab94d..bcda7444c0d 100644 --- a/src/org/exist/storage/index/FreeList.java +++ b/src/org/exist/storage/index/FreeList.java @@ -52,10 +52,6 @@ public class FreeList { protected FreeSpace last = null; protected int size = 0; - public FreeList() { - //Nothing to do - } - /** * Append a new {@link FreeSpace} object to the list, * describing the amount of free space available on a page. diff --git a/src/org/exist/storage/io/AbstractVariableByteInput.java b/src/org/exist/storage/io/AbstractVariableByteInput.java index 5f9b8a3944c..79c2922e381 100644 --- a/src/org/exist/storage/io/AbstractVariableByteInput.java +++ b/src/org/exist/storage/io/AbstractVariableByteInput.java @@ -32,10 +32,6 @@ */ public abstract class AbstractVariableByteInput implements VariableByteInput { - public AbstractVariableByteInput() { - //Nothing to do - } - @Override public byte readByte() throws IOException { final int i = read(); diff --git a/src/org/exist/storage/journal/FileSyncThread.java b/src/org/exist/storage/journal/FileSyncRunnable.java similarity index 74% rename from src/org/exist/storage/journal/FileSyncThread.java rename to src/org/exist/storage/journal/FileSyncRunnable.java index efe75a76b21..ce022ca3b0a 100644 --- a/src/org/exist/storage/journal/FileSyncThread.java +++ b/src/org/exist/storage/journal/FileSyncRunnable.java @@ -20,6 +20,8 @@ package org.exist.storage.journal; import net.jcip.annotations.GuardedBy; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import java.io.IOException; import java.nio.channels.FileChannel; @@ -36,7 +38,9 @@ * * @author wolf */ -public class FileSyncThread extends Thread { +public class FileSyncRunnable implements Runnable { + + private static final Logger LOG = LogManager.getLogger(FileSyncRunnable.class); @GuardedBy("latch") private FileChannel endOfLog; private final Object latch; @@ -51,10 +55,10 @@ public class FileSyncThread extends Thread { * Create a new FileSyncThread, using the specified latch * to synchronize on. * - * @param latch The object to synchronize on + * @param latch The object to synchronize on for + * accessing the file channel in {@link #setChannel(FileChannel)}. */ - public FileSyncThread(final Object latch) { - super("exist-fileSyncThread"); + public FileSyncRunnable(final Object latch) { this.latch = latch; } @@ -75,18 +79,21 @@ public void setChannel(final FileChannel channel) { * Trigger a sync on the journal. If a sync is already in progress, * the method will just wait until the sync has completed. */ - public synchronized void triggerSync() { - // trigger a sync - syncTriggered = true; - notifyAll(); + public void triggerSync() { + synchronized (this) { + syncTriggered = true; + notifyAll(); + } } /** - * Shutdown the sync thread. + * Request to shutdown the sync runnable. + * + * NOTE: calling thread should call Thread#interrupt() on + * the thread running the FileSyncRunnable. */ public void shutdown() { shutdown = true; - interrupt(); } /** @@ -99,13 +106,14 @@ public void closeChannel() { endOfLog.close(); } catch (final IOException e) { // may occur during shutdown + LOG.error(e); } } } } /** - * Wait for a sync event or shutdown. + * Process sync events, or shutdown. */ @Override public void run() { @@ -114,15 +122,29 @@ public void run() { try { wait(); } catch (final InterruptedException e) { - //Nothing to do + // likely (but not definitely) caused by request to {@link #shutdown()} + + // restore interrupted status + Thread.currentThread().interrupt(); + + if (shutdown) { + // avoid double sync on shutdown + break; + } } + if (syncTriggered) { sync(); + syncTriggered = false; } } } - // shutdown: sync the file and close it - sync(); + + // shutdown... always sync the file and close it + synchronized(this) { + sync(); + syncTriggered = false; + } closeChannel(); } @@ -134,9 +156,9 @@ private void sync() { endOfLog.force(false); } catch (final IOException e) { // may occur during shutdown + LOG.error(e); } } - syncTriggered = false; } } } diff --git a/src/org/exist/storage/journal/Journal.java b/src/org/exist/storage/journal/Journal.java index f9e755b34d4..93c3532d801 100644 --- a/src/org/exist/storage/journal/Journal.java +++ b/src/org/exist/storage/journal/Journal.java @@ -44,6 +44,8 @@ import org.exist.util.ReadOnlyException; import org.exist.util.sanity.SanityCheck; +import static org.exist.util.ThreadUtils.newInstanceThread; + /** * Manages the journalling log. The database uses one central journal for * all data files. If the journal exceeds the predefined maximum size, a new file is created. @@ -124,7 +126,8 @@ public final class Journal { private FileChannel channel; /** Synching the journal is done by a background thread */ - private final FileSyncThread syncThread; + private final FileSyncRunnable fileSyncRunnable; + private final Thread fileSyncThread; /** latch used to synchronize writes to the channel */ private final Object latch = new Object(); @@ -172,10 +175,11 @@ public Journal(final BrokerPool pool, final Path directory) throws EXistExceptio this.pool = pool; this.fsJournalDir = directory.resolve("fs.journal"); // we use a 1 megabyte buffer: - currentBuffer = ByteBuffer.allocateDirect(1024 * 1024); + this.currentBuffer = ByteBuffer.allocateDirect(1024 * 1024); - syncThread = new FileSyncThread(latch); - syncThread.start(); //this makes us to use class as a final only - no inheritance allowed + this.fileSyncRunnable = new FileSyncRunnable(latch); + this.fileSyncThread = newInstanceThread(pool, "file-sync-thread", fileSyncRunnable); + fileSyncThread.start(); //this makes us to use class as a final only - no inheritance allowed this.syncOnCommit = pool.getConfiguration().getProperty(PROPERTY_RECOVERY_SYNC_ON_COMMIT, DEFAULT_SYNC_ON_COMMIT); if (LOG.isDebugEnabled()) { @@ -295,7 +299,7 @@ public synchronized void flushToLog(final boolean fsync, final boolean forceSync } flushBuffer(); if (forceSync || (fsync && syncOnCommit && currentLsn > lastSyncLsn)) { - syncThread.triggerSync(); + fileSyncRunnable.triggerSync(); lastSyncLsn = currentLsn; } try { @@ -356,13 +360,15 @@ public void checkpoint(final long txnId, final boolean switchLogFiles) throws Jo try { if (switchLogFiles && channel != null && channel.position() > journalSizeMin) { final Path oldFile = getFile(currentFile); - final RemoveThread rt = new RemoveThread(channel, oldFile); + final RemoveRunnable removeRunnable = new RemoveRunnable(channel, oldFile); try { switchFiles(); } catch (final LogException e) { LOG.warn("Failed to create new journal: " + e.getMessage(), e); } - rt.start(); + + final Thread removeThread = newInstanceThread(pool, "remove-journal", removeRunnable); + removeThread.start(); } clearBackupFiles(); } catch (final IOException e) { @@ -429,8 +435,8 @@ public void switchFiles() throws LogException { //RandomAccessFile raf = new RandomAccessFile(file, "rw"); os = new FileOutputStream(file.toFile(), true); channel = os.getChannel(); - - syncThread.setChannel(channel); + + fileSyncRunnable.setChannel(channel); } catch (final FileNotFoundException e) { throw new LogException("Failed to open new journal: " + file.toAbsolutePath().toString(), e); } @@ -523,9 +529,10 @@ public void shutdown(final long txnId, final boolean checkpoint) { flushBuffer(); } fileLock.release(); - syncThread.shutdown(); + fileSyncRunnable.shutdown(); + fileSyncThread.interrupt(); try { - syncThread.join(); + fileSyncThread.join(); } catch (final InterruptedException e) { //Nothing to do } @@ -554,12 +561,11 @@ static String getFileName(final int fileNum) { return hex + '.' + LOG_FILE_SUFFIX; } - private static class RemoveThread extends Thread { - final FileChannel channel; - final Path path; + private static class RemoveRunnable implements Runnable { + private final FileChannel channel; + private final Path path; - RemoveThread(final FileChannel channel, final Path path) { - super("exist-removeJournalThread"); + RemoveRunnable(final FileChannel channel, final Path path) { this.channel = channel; this.path = path; } diff --git a/src/org/exist/storage/lock/DeadlockDetection.java b/src/org/exist/storage/lock/DeadlockDetection.java deleted file mode 100644 index c787d134d8b..00000000000 --- a/src/org/exist/storage/lock/DeadlockDetection.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-2015 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * $Id$ - */ -package org.exist.storage.lock; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Deadlock detection for resource and collection locks. The static methods in this class - * keep track of all waiting threads, which are currently waiting on a resource or collection - * lock. In some scenarios (e.g. a complex XQuery which modifies resources), a single thread - * may acquire different read/write locks on resources in a collection. The locks can be arbitrarily - * nested. For example, a thread may first acquire a read lock on a collection, then a read lock on - * a resource and later acquires a write lock on the collection to remove the resource. - * - * Since we have locks on both, collections and resources, deadlock situations are sometimes - * unavoidable. For example, imagine the following scenario: - * - *
    - *
  • T1 owns write lock on resource
  • - *
  • T2 owns write lock on collection
  • - *
  • T2 wants to acquire write lock on resource locked by T1
  • - *
  • T1 tries to acquire write lock on collection currently locked by T2
  • - *
  • DEADLOCK
  • - *
- * - * The code should probably be redesigned to avoid this kind of crossed collection-resource - * locking, which easily leads to circular wait conditions. However, this needs to be done with care. In - * the meantime, DeadlockDetection is used to detect deadlock situations as the one described - * above. The lock classes can - * then try to resolve the deadlock by suspending one thread. - */ -public class DeadlockDetection { - - private final static Logger LOG = LogManager.getLogger(DeadlockDetection.class); - - private final static Map waitForResource = new HashMap<>(); - private final static Map waitForCollection = new HashMap<>(); - - /** - * Register a thread as waiting for a resource lock. - * - * @param thread the thread - * @param waiter the WaitingThread object which wraps around the thread - */ - public static void addResourceWaiter(final Thread thread, final WaitingThread waiter) { - synchronized (DeadlockDetection.class) { - waitForResource.put(thread, waiter); - } - } - - /** - * Deregister a waiting thread. - * - * @param thread - * @return lock - */ - public static Lock clearResourceWaiter(final Thread thread) { - synchronized (DeadlockDetection.class) { - final WaitingThread waiter = waitForResource.remove(thread); - if (waiter != null) - {return waiter.getLock();} - return null; - } - } - - public static WaitingThread getResourceWaiter(final Thread thread) { - synchronized (DeadlockDetection.class) { - return waitForResource.get(thread); - } - } - - /** - * Check if there's a risk for a circular wait between threadA and threadB. The method tests if - * threadB is currently waiting for a resource lock (read or write). It then checks - * if threadA holds a lock on this resource. If yes, the {@link org.exist.storage.lock.WaitingThread} - * object for threadB is returned. This object can be used to suspend the waiting thread - * in order to temporarily yield the lock to threadA. - * - * @param threadA - * @param threadB - * @return waiting thread - */ - public static WaitingThread deadlockCheckResource(final Thread threadA, final Thread threadB) { - synchronized (DeadlockDetection.class) { - //Check if threadB is waiting for a resource lock - final WaitingThread waitingThread = waitForResource.get(threadB); - //If lock != null, check if thread B waits for a resource lock currently held by thread A - if (waitingThread != null) { - return waitingThread.getLock().hasLock(threadA) ? waitingThread : null; - } - return null; - } - } - - /** - * Check if the second thread is currently waiting for a resource lock and - * is blocked by the first thread. - * - * @param threadA the thread whose lock might be blocking threadB - * @param threadB the thread to check - * @return true if threadB is currently blocked by a lock held by threadA - */ - public static boolean isBlockedBy(final Thread threadA, final Thread threadB) { - synchronized (DeadlockDetection.class) { - //Check if threadB is waiting for a resource lock - final WaitingThread waitingThread = waitForResource.get(threadB); - //If lock != null, check if thread B waits for a resource lock currently held by thread A - if (waitingThread != null) { - return waitingThread.getLock().hasLock(threadA); - } - return false; - } - } - - public static boolean wouldDeadlock(final Thread waiter, final Thread owner, final List waiters) { - synchronized (DeadlockDetection.class) { - final WaitingThread wt = waitForResource.get(owner); - if (wt != null) { - if (waiters.contains(wt)) { - // probably a deadlock, but not directly connected to the current thread - // return to avoid endless loop - return false; - } - waiters.add(wt); - final Lock l = wt.getLock(); - final Thread t = ((MultiReadReentrantLock) l).getWriteLockedThread(); - if (t == owner) { - return false; - } - if (t != null) { - if (t == waiter) - {return true;} - return wouldDeadlock(waiter, t, waiters); - } - return false; - } - final Lock l = waitForCollection.get(owner); - if (l != null) { - final Thread t = ((ReentrantReadWriteLock) l).getOwner(); - if (t == owner) { - return false; - } - if (t != null) { - if (t == waiter) - {return true;} - return wouldDeadlock(waiter, t, waiters); - } - } - return false; - } - } - - /** - * Register a thread as waiting for a resource lock. - * - * @param waiter the thread - * @param lock the lock object - */ - public static void addCollectionWaiter(final Thread waiter, final Lock lock) { - synchronized (DeadlockDetection.class) { - waitForCollection.put(waiter, lock); - } - } - - public static Lock clearCollectionWaiter(final Thread waiter) { - synchronized (DeadlockDetection.class) { - return waitForCollection.remove(waiter); - } - } - - public static Lock isWaitingFor(final Thread waiter) { - synchronized (DeadlockDetection.class) { - return waitForCollection.get(waiter); - } - } - - public static Map getWaitingThreads() { - final Map table = new HashMap<>(); - for (final WaitingThread waitingThread : waitForResource.values()) { - table.put(waitingThread.getThread().getName(), waitingThread.getLock().getLockInfo()); - } - for (final Map.Entry entry : waitForCollection.entrySet()) { - table.put(entry.getKey().getName(), entry.getValue().getLockInfo()); - } - return table; - } - - public static void debug(final String name, final LockInfo info) { - try(final StringWriter sout = new StringWriter(); - final PrintWriter writer = new PrintWriter(sout)) { - debug(writer, name, info); - System.out.println(sout.toString()); - } catch(final IOException e) { - LOG.error(e.getMessage(), e); - } - } - - public static void debug(final PrintWriter writer, final String name, final LockInfo info) { - writer.println("Thread: " + name); - if (info != null) { - writer.format("%20s: %s\n", "Lock type", info.getLockType()); - writer.format("%20s: %s\n", "Lock mode", info.getLockMode()); - writer.format("%20s: %s\n", "Lock id", info.getId()); - writer.format("%20s: %s\n", "Held by", Arrays.toString(info.getOwners())); - writer.format("%20s: %s\n", "Held by", Arrays.toString(info.getOwners())); - writer.format("%20s: %s\n", "Held by", Arrays.toString(info.getOwners())); - writer.format("%20s: %s\n", "Waiting for read", Arrays.toString(info.getWaitingForRead())); - writer.format("%20s: %s\n\n", "Waiting for write", Arrays.toString(info.getWaitingForWrite())); - } - } - - public static void debug(final PrintWriter writer) { - writer.println("Threads currently waiting for a lock:"); - writer.println("====================================="); - - final Map threads = getWaitingThreads(); - for (final Map.Entry entry : threads.entrySet()) { - debug(writer, entry.getKey(), entry.getValue()); - } - } -} diff --git a/src/org/exist/storage/lock/EnsureContainerLocked.java b/src/org/exist/storage/lock/EnsureContainerLocked.java new file mode 100644 index 00000000000..bdd9caac63f --- /dev/null +++ b/src/org/exist/storage/lock/EnsureContainerLocked.java @@ -0,0 +1,94 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.storage.lock; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An annotation for indicating that certain locks + * must be held on the containing object before + * a method may be called. + * + * As well as explicitly expressing intention, this annotation can be used + * with {@link EnsureLockingAspect} to compile into the code runtime checks + * which will enforce the locking policy. + * + * Typically this is used on methods within implementations of {@link org.exist.collections.Collection} + * and {@link org.exist.dom.persistent.DocumentImpl}. + * The typical use is to ensure that a container holds appropriate locks (by URI) + * when calling the method accessors on their internal state. + * + *
+ * {@code
+ * public class MyCollectonImpl implements Collection {
+ *     final XmldbURI uri;
+ *     public MyCollectionImpl(@EnsureLocked(mode=LockMode.READ_LOCK, type=LockType.COLLECTION) final XmldbURI uri) {
+ *         this.uri = uri;
+ *     }
+ *
+ *     public XmldbURI getUri() {
+ *         return uri;
+ *     }
+ *
+ *     ...
+ *
+ *     @EnsureContainerLocked(mode=LockMode.READ_LOCK)
+ *     public int countDocuments() {
+ *         return documents.size();
+ *     }
+ * }
+ * }
+ * 
+ * + * @author Adam Retter + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(value = {ElementType.METHOD}) +public @interface EnsureContainerUnlocked { +} diff --git a/src/org/exist/storage/lock/EnsureLocked.java b/src/org/exist/storage/lock/EnsureLocked.java new file mode 100644 index 00000000000..23525d01019 --- /dev/null +++ b/src/org/exist/storage/lock/EnsureLocked.java @@ -0,0 +1,108 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.Lock.LockType; + +/** + * An annotation for indicating that certain locks + * must be held on parameters to a method or return types. + * + * As well as explicitly expressing intention, this annotation can be used + * with {@link EnsureLockingAspect} to compile into the code runtime checks + * which will enforce the locking policy. + * + * Typically this is used with parameters of type {@link org.exist.collections.Collection} + * and {@link org.exist.dom.persistent.DocumentImpl}. If this annotation is + * used on an {@link org.exist.xmldb.XmldbURI} then a {@code type} value must + * also be provided to indicate the type of the lock identified by the uri. + * + * For example we may indicate that Collection parameters to methods + * must already be locked appropriately before the method is called: + *
+ * {@code
+ * public Result copyCollection(
+ *         @EnsureLocked(mode=LockMode.READ_LOCK) final Collection srcCollection,
+ *         @EnsureLocked(mode=LockMode.WRITE_LOCK) final Collection destCollection) {
+ *
+ *    ...
+ *
+ * }
+ * }
+ * 
+ * + * We may also indicate that objects returned from a function must have gained an appropriate + * lock for the calling thread: + * + *
+ * {@code
+ * public @EnsureLocked(mode=LockMode.READ_LOCK) Collection openCollection(final XmldbURI uri, final LockMode lockMode) {
+ *
+ *    ...
+ *
+ * }
+ * }
+ * 
+ * + * @author Adam Retter + */ +@Aspect +public class EnsureLockingAspect { + + public static final String PROP_DISABLED = "exist.ensurelocking.disabled"; + public static final String PROP_ENFORCE = "exist.ensurelocking.enforce"; + public static final String PROP_OUTPUT = "exist.ensurelocking.output"; + public static final String PROP_OUTPUT_STACK_DEPTH = "exist.ensurelocking.output.stack.depth"; + public static final String PROP_TRACE = "exist.ensurelocking.trace"; + + private static final boolean DISABLED = Boolean.parseBoolean(System.getProperty(PROP_DISABLED, "false")); + private static final boolean ENFORCE = Boolean.parseBoolean(System.getProperty(PROP_ENFORCE, "false")); + private static final boolean OUTPUT_TO_CONSOLE = System.getProperty(PROP_OUTPUT, "console").equals("console"); + private static final int OUTPUT_STACK_DEPTH = Integer.parseInt(System.getProperty(PROP_OUTPUT_STACK_DEPTH, "0")); + private static final boolean TRACE = Boolean.parseBoolean(System.getProperty(PROP_TRACE, "false")); + + private static final Logger LOG = LogManager.getLogger(EnsureLockingAspect.class); + + + @Pointcut("execution(* *(..,@org.exist.storage.lock.EnsureLocked (*),..))") + public void methodWithEnsureLockedParameters() { + } + + @Pointcut("execution(@org.exist.storage.lock.EnsureLocked (*) *(..))") + public void methodWithEnsureLockedReturnType() { + } + + @Pointcut("execution(@org.exist.storage.lock.EnsureContainerLocked (*) *(..))") + public void methodWithEnsureContainerLocked() { + } + + @Pointcut("execution(* *(..,@org.exist.storage.lock.EnsureUnlocked (*),..))") + public void methodWithEnsureUnlockedParameters() { + } + + @Pointcut("execution(@org.exist.storage.lock.EnsureUnlocked (*) *(..))") + public void methodWithEnsureUnlockedReturnType() { + } + + @Pointcut("execution(@org.exist.storage.lock.EnsureContainerUnlocked (*) *(..))") + public void methodWithEnsureContainerUnlocked() { + } + + /** + * Ensures that the parameters to a method + * annotated by {@link EnsureLocked} hold + * the indicated locks. + * + * @throws LockException if the appropriate locks are not held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @Before("methodWithEnsureLockedParameters()") + public void enforceEnsureLockedParameters(final JoinPoint joinPoint) throws LockException { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + final Object[] args = joinPoint.getArgs(); + + final List> ensureLockedParameters = getAllParameterAnnotations(method, EnsureLocked.class); + for (final AnnotatedParameterConstraint ensureLockedConstraint : ensureLockedParameters) { + final EnsureLockDetail ensureLockDetail = resolveLockDetail(ensureLockedConstraint, args); + traceln(() -> "Checking: method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + "( " + toAnnotationString(EnsureLocked.class, ensureLockDetail) + " " + ensureLockedConstraint.getParameter().getName() + ") ..."); + + // check the lock constraint holds + final LockManager lockManager = getLockManager(); + boolean failed = false; + if (lockManager != null) { + final int idx = ensureLockedConstraint.getParameterIndex(); + final Object arg = args[idx]; + + // if the argument is null, and annotated @Nullable, we can skip the check + if(arg == null && !getAllParameterAnnotations(method, Nullable.class).isEmpty()) { + traceln(() -> "Skipping method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " for null argument(idx=" + idx + ") with @EnsureLocked @Nullable"); + continue; + } + + switch (ensureLockDetail.type) { + case COLLECTION: + final XmldbURI collectionUri; + if (XmldbURI.class.isAssignableFrom(arg.getClass())) { + collectionUri = (XmldbURI) arg; + } else { + collectionUri = ((Collection) arg).getURI(); + } + + if (!hasCollectionLock(lockManager, collectionUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Collection: " + collectionUri + " FAILED"); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (XmldbURI.class.isAssignableFrom(arg.getClass())) { + documentUri = (XmldbURI) arg; + } else { + documentUri = ((DocumentImpl) arg).getURI(); + } + + if (!hasDocumentLock(lockManager, documentUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document locks are supported"); + } + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + } + + /** + * Ensures that the object returned by a method + * has an lock taken upon it before it is returned. + * + * @throws LockException if the appropriate locks are not held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @AfterReturning(value = "methodWithEnsureLockedReturnType()", returning = "result") + public void enforceEnsureLockedReturnType(final JoinPoint joinPoint, final Object result) throws Throwable { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + + final AnnotatedMethodConstraint ensureLockedConstraint = getMethodAnnotation(method, EnsureLocked.class); + final EnsureLockDetail ensureLockDetail = resolveLockDetail(ensureLockedConstraint, joinPoint.getArgs()); + + traceln(() -> "Checking: " + toAnnotationString(EnsureLocked.class, ensureLockDetail) + " method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " ..."); + + // check the lock constraint holds + boolean failed = false; + if(result != null) { + final LockManager lockManager = getLockManager(); + if (lockManager != null) { + switch (ensureLockDetail.type) { + case COLLECTION: + final XmldbURI collectionUri; + if (XmldbURI.class.isAssignableFrom(result.getClass())) { + collectionUri = (XmldbURI) result; + } else { + collectionUri = ((Collection) result).getURI(); + } + + if (!hasCollectionLock(lockManager, collectionUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Collection: " + collectionUri); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (XmldbURI.class.isAssignableFrom(result.getClass())) { + documentUri = (XmldbURI) result; + } else { + documentUri = ((DocumentImpl) result).getURI(); + } + + if (!hasDocumentLock(lockManager, documentUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document locks are supported"); + } + } + } else { + traceln(() -> "Unable to check return type as value is null!"); + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + + /** + * Ensures that the appropriate lock is held on the container + * object which houses the method before the method is called. + * + * @throws LockException if the appropriate locks are not held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @Before("methodWithEnsureContainerLocked() && target(container)") + public void enforceEnsureLockedContainer(final JoinPoint joinPoint, final Object container) throws LockException { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + + final AnnotatedMethodConstraint ensureContainerLockedConstraint = + getMethodAnnotation(method, EnsureContainerLocked.class); + final EnsureLockDetail ensureLockDetail = resolveContainerLockDetail(ensureContainerLockedConstraint, joinPoint.getArgs()); + + traceln(() -> "Checking: " + toAnnotationString(EnsureContainerLocked.class, ensureLockDetail) + " method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " ..."); + + // check the lock constraint holds + boolean failed = false; + final LockManager lockManager = getLockManager(); + if (lockManager != null) { + switch (ensureLockDetail.type) { + case COLLECTION: + final XmldbURI collectionUri; + if (Collection.class.isAssignableFrom(container.getClass())) { + collectionUri = ((Collection) container).getURI(); + } else { + throw new IllegalArgumentException("Container type was identified as Collection, but the container is not an implementation of Collection"); + } + + if (collectionUri == null) { + LOG.warn("collectionUri is null, unable to validate contract"); + break; + } + + if (!hasCollectionLock(lockManager, collectionUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Collection: " + collectionUri); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (DocumentImpl.class.isAssignableFrom(container.getClass())) { + documentUri = ((DocumentImpl) container).getURI(); + } else { + throw new IllegalArgumentException("Container type was identified as Document, but the container is not an implementation of DocumentImpl"); + } + + if (documentUri == null) { + LOG.warn("documentUri is null, unable to validate contract"); + break; + } + + if (!hasDocumentLock(lockManager, documentUri, ensureLockDetail)) { + report("FAILED: Constraint to require lock mode " + ensureLockDetail.mode + " on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document container locks are supported"); + } + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + + /** + * Ensures that the parameters to a method + * annotated by {@link EnsureUnlocked} do not hold + * any locks. + * + * @throws LockException if any locks are held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @Before("methodWithEnsureUnlockedParameters()") + public void enforceEnsureUnlockedParameters(final JoinPoint joinPoint) throws LockException { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + final Object[] args = joinPoint.getArgs(); + + final List> ensureUnlockedParameters = getAllParameterAnnotations(method, EnsureUnlocked.class); + for (final AnnotatedParameterConstraint ensureUnlockedConstraint : ensureUnlockedParameters) { + final Lock.LockType lockType = resolveLockDetail(ensureUnlockedConstraint); + traceln(() -> "Checking: method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + "( " + toAnnotationString(EnsureUnlocked.class, lockType) + " " + ensureUnlockedConstraint.getParameter().getName() + ") ..."); + + // check the lock constraint holds + final LockManager lockManager = getLockManager(); + boolean failed = false; + if (lockManager != null) { + final int idx = ensureUnlockedConstraint.getParameterIndex(); + final Object arg = args[idx]; + + // if the argument is null, and annotated @Nullable, we can skip the check + if(arg == null && !getAllParameterAnnotations(method, Nullable.class).isEmpty()) { + traceln(() -> "Skipping method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " for null argument(idx=" + idx + ") with @EnsureUnlocked @Nullable"); + continue; + } + + switch (lockType) { + case COLLECTION: + final XmldbURI collectionUri; + if (XmldbURI.class.isAssignableFrom(arg.getClass())) { + collectionUri = (XmldbURI) arg; + } else { + collectionUri = ((Collection) arg).getURI(); + } + + if (!hasNoCollectionLocks(lockManager, collectionUri)) { + report("FAILED: Constraint to require no locks on Collection: " + collectionUri + " FAILED"); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (XmldbURI.class.isAssignableFrom(arg.getClass())) { + documentUri = (XmldbURI) arg; + } else { + documentUri = ((DocumentImpl) arg).getURI(); + } + + if (!hasNoDocumentLocks(lockManager, documentUri)) { + report("FAILED: Constraint to require no locks on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document locks are supported"); + } + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + } + + /** + * Ensures that the object returned by a method + * has no lock held upon it before it is returned. + * + * @throws LockException if any locks are held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @AfterReturning(value = "methodWithEnsureUnlockedReturnType()", returning = "result") + public void enforceEnsureUnlockedReturnType(final JoinPoint joinPoint, final Object result) throws Throwable { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + + final AnnotatedMethodConstraint ensureUnlockedConstraint = getMethodAnnotation(method, EnsureUnlocked.class); + final Lock.LockType lockType = resolveLockDetail(ensureUnlockedConstraint); + + traceln(() -> "Checking: " + toAnnotationString(EnsureUnlocked.class, lockType) + " method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " ..."); + + // check the lock constraint holds + boolean failed = false; + if(result != null) { + final LockManager lockManager = getLockManager(); + if (lockManager != null) { + switch (lockType) { + case COLLECTION: + final XmldbURI collectionUri; + if (XmldbURI.class.isAssignableFrom(result.getClass())) { + collectionUri = (XmldbURI) result; + } else { + collectionUri = ((Collection) result).getURI(); + } + + if (!hasNoCollectionLocks(lockManager, collectionUri)) { + report("FAILED: Constraint to require no locks on Collection: " + collectionUri + " FAILED"); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (XmldbURI.class.isAssignableFrom(result.getClass())) { + documentUri = (XmldbURI) result; + } else { + documentUri = ((DocumentImpl) result).getURI(); + } + + if (!hasNoDocumentLocks(lockManager, documentUri)) { + report("FAILED: Constraint to require no locks on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document locks are supported"); + } + } + } else { + traceln(() -> "Unable to check return type as value is null!"); + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + + /** + * Ensures that the no locks are held on the container + * object which houses the method before the method is called. + * + * @throws LockException if any locks are held and + * the System property `exist.ensurelocking.enforce=true` is set. + */ + @Before("methodWithEnsureContainerUnlocked() && target(container)") + public void enforceEnsureUnlockedContainer(final JoinPoint joinPoint, final Object container) throws LockException { + + if(DISABLED) { + return; + } + + final MethodSignature ms = (MethodSignature)joinPoint.getSignature(); + final Method method = ms.getMethod(); + + final AnnotatedMethodConstraint ensureContainerUnlockedConstraint = + getMethodAnnotation(method, EnsureContainerUnlocked.class); + final Lock.LockType lockType = resolveContainerLockDetail(ensureContainerUnlockedConstraint); + + traceln(() -> "Checking: " + toAnnotationString(EnsureContainerUnlocked.class, lockType) + " method=" + ms.getDeclaringType().getName() + "#" + ms.getName() + " ..."); + + // check the lock constraint holds + boolean failed = false; + final LockManager lockManager = getLockManager(); + if (lockManager != null) { + switch (lockType) { + case COLLECTION: + final XmldbURI collectionUri; + if (Collection.class.isAssignableFrom(container.getClass())) { + collectionUri = ((Collection) container).getURI(); + } else { + throw new IllegalArgumentException("Container type was identified as Collection, but the container is not an implementation of Collection"); + } + + if (!hasNoCollectionLocks(lockManager, collectionUri)) { + report("FAILED: Constraint to require no locks on Collection: " + collectionUri); + failed = true; + } + break; + + case DOCUMENT: + final XmldbURI documentUri; + if (DocumentImpl.class.isAssignableFrom(container.getClass())) { + documentUri = ((DocumentImpl) container).getURI(); + } else { + throw new IllegalArgumentException("Container type was identified as Document, but the container is not an implementation of DocumentImpl"); + } + + if (!hasNoDocumentLocks(lockManager, documentUri)) { + report("FAILED: Constraint to require no locks on Document: " + documentUri + " FAILED"); + failed = true; + } + break; + + default: + throw new UnsupportedOperationException("Currently only Collection or Document container locks are supported"); + } + } + + if(!failed) { + traceln(() -> "PASSED."); + } + } + + private @Nullable LockManager getLockManager() { + if(BrokerPool.isConfigured()) { + try { + return BrokerPool.getInstance().getLockManager(); + } catch (final EXistException e) { + throw new IllegalStateException(e); + } + } else { + traceln(() -> "Waiting for BrokerPool to become available..."); + return null; + } + } + + private boolean hasDocumentLock(final LockManager lockManager, final XmldbURI documentUri, final EnsureLockDetail ensureLockDetail) { + switch (ensureLockDetail.mode) { + case READ_LOCK: + return lockManager.isDocumentLockedForRead(documentUri) || + lockManager.isDocumentLockedForWrite(documentUri); + + case WRITE_LOCK: + return lockManager.isDocumentLockedForWrite(documentUri); + + case NO_LOCK: + if(ensureLockDetail.modeWasFromParam) { + traceln(() -> "Nothing to trace for NO_LOCK"); // TODO(AR) consider implementation strategies? although it is likely we will obsolete NO_LOCK + return true; + } + //intentional fallthrough + + default: + throw new UnsupportedOperationException("Currently only READ or WRITE lock modes are supported"); + } + } + + + /** + * Checks if a Collection is locked explicitly, or implicitly through a parent lock for the correct mode on the sub-tree. + * + * @true if a collection is locked either explicitly or implicitly + */ + private boolean hasCollectionLock(final LockManager lockManager, final XmldbURI collectionUri, final EnsureLockDetail ensureLockDetail) { + XmldbURI uri = collectionUri; + while(uri.numSegments() > 0) { + + switch (ensureLockDetail.mode) { + case READ_LOCK: + if(lockManager.isCollectionLockedForRead(uri) || + lockManager.isCollectionLockedForWrite(uri)) { + return true; + } + break; + + case WRITE_LOCK: + if(lockManager.isCollectionLockedForWrite(uri)) { + return true; + } + break; + + case NO_LOCK: + if(ensureLockDetail.modeWasFromParam) { + traceln(() -> "Nothing to trace for NO_LOCK"); // TODO(AR) consider implementation strategies? although it is likely we will obsolete NO_LOCK + return true; + } + //intentional fallthrough + + default: + throw new UnsupportedOperationException("Currently only READ or WRITE lock modes are supported"); + } + + // loop round to parent collection + uri = uri.removeLastSegment(); + } + + return false; + } + + private boolean hasNoDocumentLocks(final LockManager lockManager, final XmldbURI documentUri) { + return !(lockManager.isDocumentLockedForRead(documentUri) + && lockManager.isDocumentLockedForWrite(documentUri)); + } + + private boolean hasNoCollectionLocks(final LockManager lockManager, final XmldbURI collectionUri) { + return !(lockManager.isCollectionLockedForRead(collectionUri) + && lockManager.isCollectionLockedForWrite(collectionUri)); + } + + private String toAnnotationString(final Class annotationClass, final EnsureLockDetail ensureLockDetail) { + return "@" + annotationClass.getSimpleName() + "(mode=" + ensureLockDetail.mode + ", type=" + ensureLockDetail.type + ")"; + } + + private String toAnnotationString(final Class annotationClass, final Lock.LockType lockType) { + return "@" + annotationClass.getSimpleName() + "(type=" + lockType + ")"; + } + + private EnsureLockDetail resolveContainerLockDetail(final AnnotatedMethodConstraint lockConstraint, final Object args[]) { + final Tuple2 mode = getLockMode(lockConstraint.getAnnotation(), args); + + final Lock.LockType type; + if(Collection.class.isAssignableFrom(lockConstraint.getMethod().getDeclaringClass())) { + type = Lock.LockType.COLLECTION; + } else if(Document.class.isAssignableFrom(lockConstraint.getMethod().getDeclaringClass())) { + type = Lock.LockType.DOCUMENT; + } else { + // error + throw new IllegalArgumentException("@EnsureContainerLocked is specified on a method whose container object is neither a Collection nor a Document"); + } + + return new EnsureLockDetail(mode._1, mode._2, type); + } + + private EnsureLockDetail resolveLockDetail(final AnnotatedMethodConstraint lockConstraint, final Object args[]) { + final Tuple2 mode = getLockMode(lockConstraint.getAnnotation(), args); + + final Lock.LockType type; + if (lockConstraint.getAnnotation().type() != Lock.LockType.UNKNOWN) { + type = lockConstraint.getAnnotation().type(); + } else if (Collection.class.isAssignableFrom(lockConstraint.getMethod().getReturnType())) { + type = Lock.LockType.COLLECTION; + } else if (Document.class.isAssignableFrom(lockConstraint.getMethod().getReturnType())) { + type = Lock.LockType.DOCUMENT; + } else { + // error + throw new IllegalArgumentException("@EnsureLocked is specified on a method that returns neither a Collection nor a Document"); + } + + return new EnsureLockDetail(mode._1, mode._2, type); + } + + private EnsureLockDetail resolveLockDetail(final AnnotatedParameterConstraint lockConstraint, final Object args[]) { + final Tuple2 mode = getLockMode(lockConstraint.getAnnotation(), args); + + final Lock.LockType type; + if (lockConstraint.getAnnotation().type() != Lock.LockType.UNKNOWN) { + type = lockConstraint.getAnnotation().type(); + } else if (Collection.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + type = Lock.LockType.COLLECTION; + } else if (Document.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + type = Lock.LockType.DOCUMENT; + } else if (XmldbURI.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + throw new IllegalArgumentException("@EnsureLocked is specified on an XmldbURI method parameter, but is missing the `lockType` value"); + } else { + // error + throw new IllegalArgumentException("@EnsureLocked is specified on a method parameter that is neither a Collection, Document, nor an XmldbURI"); + } + + return new EnsureLockDetail(mode._1, mode._2, type); + } + + private Lock.LockType resolveContainerLockDetail(final AnnotatedMethodConstraint lockConstraint) { + final Lock.LockType type; + if(Collection.class.isAssignableFrom(lockConstraint.getMethod().getDeclaringClass())) { + type = Lock.LockType.COLLECTION; + } else if(Document.class.isAssignableFrom(lockConstraint.getMethod().getDeclaringClass())) { + type = Lock.LockType.DOCUMENT; + } else { + // error + throw new IllegalArgumentException("@EnsureContainerUnlocked is specified on a method whose container object is neither a Collection nor a Document"); + } + + return type; + } + + private Lock.LockType resolveLockDetail(final AnnotatedMethodConstraint lockConstraint) { + final Lock.LockType type; + if (lockConstraint.getAnnotation().type() != Lock.LockType.UNKNOWN) { + type = lockConstraint.getAnnotation().type(); + } else if (Collection.class.isAssignableFrom(lockConstraint.getMethod().getReturnType())) { + type = Lock.LockType.COLLECTION; + } else if (Document.class.isAssignableFrom(lockConstraint.getMethod().getReturnType())) { + type = Lock.LockType.DOCUMENT; + } else { + // error + throw new IllegalArgumentException("@EnsureUnlocked is specified on a method that returns neither a Collection nor a Document"); + } + + return type; + } + + private Lock.LockType resolveLockDetail(final AnnotatedParameterConstraint lockConstraint) { + final Lock.LockType type; + if (lockConstraint.getAnnotation().type() != Lock.LockType.UNKNOWN) { + type = lockConstraint.getAnnotation().type(); + } else if (Collection.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + type = Lock.LockType.COLLECTION; + } else if (Document.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + type = Lock.LockType.DOCUMENT; + } else if (XmldbURI.class.isAssignableFrom(lockConstraint.getParameter().getType())) { + throw new IllegalArgumentException("@EnsureUnlocked is specified on an XmldbURI method parameter, but is missing the `lockType` value"); + } else { + // error + throw new IllegalArgumentException("@EnsureUnlocked is specified on a method parameter that is neither a Collection, Document, nor an XmldbURI"); + } + + return type; + } + + + private Tuple2 getLockMode(final EnsureLocked ensureLocked, final Object args[]) { + return getLockMode(ensureLocked.mode(), ensureLocked.modeParam(), args); + } + + private Tuple2 getLockMode(final EnsureContainerLocked ensureContainerLocked, final Object args[]) { + return getLockMode(ensureContainerLocked.mode(), ensureContainerLocked.modeParam(), args); + } + + /** + * @return A tuple, whose first value is the lock mode, + * and whose second value is true if the mode was resolved from args. + */ + private Tuple2 getLockMode(final Lock.LockMode specifiedLockMode, final short specifiedLockModeParam, final Object args[]) { + final Tuple2 mode; + if(specifiedLockMode != Lock.LockMode.NO_LOCK) { + mode = new Tuple2<>(specifiedLockMode, false); + } else if (specifiedLockModeParam != EnsureLocked.NO_MODE_PARAM) { + final short idx = specifiedLockModeParam; + if(idx < args.length) { + final Object arg = args[idx]; + if(arg instanceof Lock.LockMode) { + mode = new Tuple2<>((Lock.LockMode)arg, true); + } else { + throw new IllegalArgumentException("modeParam was specified on @EnsureLocked but its index was not a Lock.LockMode parameter, found: " + arg.getClass().getName()); + } + } else { + throw new IllegalArgumentException("modeParam was specified on @EnsureLocked but its index was out-of-bounds"); + } + } else { + final List lockModeArgs = getLockModeArgs(args); + if(lockModeArgs.size() == 1) { + mode = new Tuple2<>(lockModeArgs.get(0), true); + } else if(lockModeArgs.isEmpty()) { + throw new IllegalArgumentException("No mode or modeParam was specified on @EnsureLocked and no LockMode parameter was found"); + } else { + throw new IllegalArgumentException("No mode or modeParam was specified on @EnsureLocked and more than one LockMode parameter was found"); + } + } + + return mode; + } + + private List getLockModeArgs(final Object[] args) { + final List lockModeArgs = new ArrayList<>(); + for(final Object arg : args) { + if(arg instanceof Lock.LockMode) { + lockModeArgs.add((Lock.LockMode)arg); + } + } + return lockModeArgs; + } + + private @Nullable AnnotatedMethodConstraint getMethodAnnotation(final Method method, final Class annotationClass) { + final T methodAnnotation = method.getDeclaredAnnotation(annotationClass); + if(methodAnnotation != null) { + return new AnnotatedMethodConstraint<>(methodAnnotation, method); + } + + final Class declaringClazz = method.getDeclaringClass(); + + final Class superClazz = declaringClazz.getSuperclass(); + if(superClazz != null && !superClazz.equals(Object.class)) { + final Method superMethod = findMethodOnOtherClass(method, superClazz); + if (superMethod != null) { + final AnnotatedMethodConstraint superMethodAnnotation = + getMethodAnnotation(superMethod, annotationClass); + if(superMethodAnnotation != null) { + return superMethodAnnotation; + } + } + } + + for (final Class interfaceClazz : declaringClazz.getInterfaces()) { + final Method interfaceMethod = findMethodOnOtherClass(method, interfaceClazz); + if (interfaceMethod != null) { + final AnnotatedMethodConstraint interfaceMethodAnnotation = + getMethodAnnotation(interfaceMethod, annotationClass); + if(interfaceMethodAnnotation != null) { + return interfaceMethodAnnotation; + } + } + } + + return null; + } + + private List> getAllParameterAnnotations(final Method method, + final Class annotationClass) { + final List> annotatedParameters = new ArrayList<>(); + getAllParameterAnnotations(method, annotationClass, annotatedParameters); + return annotatedParameters; + } + + private void getAllParameterAnnotations(final Method method, final Class annotationClass, + final List> results) { + final Parameter[] parameters = method.getParameters(); + for(int i = 0; i < parameters.length; i++) { + final Parameter parameter = parameters[i]; + final T parameterAnnotation = parameter.getDeclaredAnnotation(annotationClass); + if(parameterAnnotation != null) { + results.add(new AnnotatedParameterConstraint<>(parameterAnnotation, parameter, i)); + } + } + + final Class declaringClazz = method.getDeclaringClass(); + + final Class superClazz = declaringClazz.getSuperclass(); + if(superClazz != null && !superClazz.equals(Object.class)) { + final Method superMethod = findMethodOnOtherClass(method, superClazz); + if (superMethod != null) { + getAllParameterAnnotations(superMethod, annotationClass, results); + } + } + + for (final Class interfaceClazz : declaringClazz.getInterfaces()) { + final Method interfaceMethod = findMethodOnOtherClass(method, interfaceClazz); + if (interfaceMethod != null) { + getAllParameterAnnotations(interfaceMethod, annotationClass, results); + } + } + } + + private @Nullable Method findMethodOnOtherClass(final Method method, final Class otherClazz) { + try { + return otherClazz.getDeclaredMethod(method.getName(), method.getParameterTypes()); + } catch (final NoSuchMethodException e) { + // nothing to do + return null; + } + } + + private static void report(final String message) throws LockException { + final String reportMessage; + if(OUTPUT_STACK_DEPTH > 0) { + reportMessage = message + ": " + Stacktrace.asString(Stacktrace.substack(Thread.currentThread().getStackTrace(), 2, OUTPUT_STACK_DEPTH)); + } else { + reportMessage = message; + } + + if(OUTPUT_TO_CONSOLE) { + System.err.println(reportMessage); + } else { + LOG.error(reportMessage); + } + + if(ENFORCE) { + throw new LockException(message); + } + } + + private static void traceln(final Supplier messageFn) { + if(TRACE) { + if(OUTPUT_TO_CONSOLE) { + System.out.println(messageFn.get()); + } else { + LOG.trace(messageFn.get()); + } + } + } + + private static class AnnotatedParameterConstraint extends AnnotatedConstraint { + private final int parameterIndex; + public AnnotatedParameterConstraint(final T annotation, final Parameter parameter, final int parameterIndex) { + super(annotation, parameter); + this.parameterIndex = parameterIndex; + } + + public Parameter getParameter() { + return annotationTarget; + } + + public int getParameterIndex() { + return parameterIndex; + } + } + + private static class AnnotatedMethodConstraint extends AnnotatedConstraint { + public AnnotatedMethodConstraint(final T annotation, final Method method) { + super(annotation, method); + } + + public Method getMethod() { + return annotationTarget; + } + } + + private static abstract class AnnotatedConstraint { + private final T annotation; + protected final U annotationTarget; + + public AnnotatedConstraint(final T annotation, final U annotationTarget) { + this.annotation = annotation; + this.annotationTarget = annotationTarget; + } + + public T getAnnotation() { + return annotation; + } + } + + private static class EnsureLockDetail { + private final Lock.LockMode mode; + private final boolean modeWasFromParam; + private final Lock.LockType type; + + public EnsureLockDetail(final Lock.LockMode mode, final boolean modeWasFromParam, final Lock.LockType type) { + this.mode = mode; + this.modeWasFromParam = modeWasFromParam; + this.type = type; + } + } +} diff --git a/src/org/exist/storage/lock/EnsureUnlocked.java b/src/org/exist/storage/lock/EnsureUnlocked.java new file mode 100644 index 00000000000..919a71a1ab2 --- /dev/null +++ b/src/org/exist/storage/lock/EnsureUnlocked.java @@ -0,0 +1,69 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An annotation for indicating that no locks + * must be held on parameters to a method, or on return types. + * + * As well as explicitly expressing intention, this annotation can be used + * with {@link EnsureLockingAspect} to compile into the code runtime checks + * which will enforce the locking policy. + * + * Typically this is used with parameters of type {@link org.exist.collections.Collection} + * and {@link org.exist.dom.persistent.DocumentImpl}. + * + * If this annotation is + * used on an {@link org.exist.xmldb.XmldbURI} then a {@code type} value must + * also be provided to indicate the type of the lock identified by the uri. + * + * For example we may indicate that Collection parameters to methods + * should not be locked: + *
+ * {@code
+ * public LockedCollection lockCollection(@EnsureUnlocked final Collection collection) {
+ *
+ *    ...
+ *
+ * }
+ * }
+ * 
+ + * + * @author + */ +@NotThreadSafe +public class LockEventLogListener implements LockTable.LockEventListener { + private final Logger log; + private final Level level; + + /** + * @param log The Log4j log + * @param level The level at which to to log the lock events to Log4j + */ + public LockEventLogListener(final Logger log, final Level level) { + this.log = log; + this.level = level; + } + + @Override + public void accept(final LockTable.LockAction lockAction) { + if(log.isEnabled(level)) { + log.log(level, lockAction); + } + } +} diff --git a/src/org/exist/storage/lock/LockEventXmlListener.java b/src/org/exist/storage/lock/LockEventXmlListener.java new file mode 100644 index 00000000000..a5555abcee7 --- /dev/null +++ b/src/org/exist/storage/lock/LockEventXmlListener.java @@ -0,0 +1,174 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import net.jcip.annotations.NotThreadSafe; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import javax.annotation.Nullable; +import javax.xml.stream.XMLOutputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamWriter; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; + +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * A lock event listener which formats events as XML and writes them to a file + * + * @author Adam Retter + */ +@NotThreadSafe +public class LockEventXmlListener implements LockTable.LockEventListener { + + private final static Logger LOG = LogManager.getLogger(LockEventXmlListener.class); + + private volatile boolean registered = false; + + private final Path xmlFile; + private final boolean prettyPrint; + + private OutputStream os = null; + private XMLStreamWriter xmlStreamWriter = null; + + + public LockEventXmlListener(final Path xmlFile) { + this(xmlFile, false); + } + + public LockEventXmlListener(final Path xmlFile, final boolean prettyPrint) { + this.xmlFile = xmlFile; + this.prettyPrint = prettyPrint; + } + + @Override + public void registered() { + this.registered = true; + try { + this.os = Files.newOutputStream(xmlFile, + StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE); + final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newInstance(); + this.xmlStreamWriter = xmlOutputFactory.createXMLStreamWriter(os, UTF_8.name()); + + this.xmlStreamWriter.writeStartDocument(UTF_8.name(), "1.0"); + this.xmlStreamWriter.writeStartElement("lockEvents"); + } catch (final IOException | XMLStreamException e) { + LOG.error(e); + } + } + + @Override + public void unregistered() { + try { + if(xmlStreamWriter != null) { + this.xmlStreamWriter.writeEndElement(); + this.xmlStreamWriter.writeEndDocument(); + this.xmlStreamWriter.close(); + this.xmlStreamWriter = null; + } + } catch (final XMLStreamException e) { + LOG.error(e); + } + + try { + if(os != null) { + this.os.close(); + this.os = null; + } + } catch (final IOException e) { + LOG.error(e); + } + + this.registered = false; + } + + public boolean isRegistered() { + return registered; + } + + @Override + public void accept(final LockTable.LockAction lockAction) { + if(!registered) { + return; + } + + if(xmlStreamWriter != null) { + try { + xmlStreamWriter.writeStartElement("lockEvent"); + + writeLongElement("timestamp", lockAction.timestamp); + writeStringElement("action", lockAction.action.name()); + writeLongElement("groupId", lockAction.groupId); + writeStringElement("id", lockAction.id); + writeStringElement("thread", lockAction.threadName); + stackTraceToJson(lockAction.stackTrace); + + xmlStreamWriter.writeStartElement("lock"); + writeStringElement("type", lockAction.lockType.name()); + writeStringElement("mode", lockAction.mode.name()); + writeIntElement("holdCount", lockAction.count); + xmlStreamWriter.writeEndElement(); + + xmlStreamWriter.writeEndElement(); + } catch(final XMLStreamException e) { + LOG.error(e); + } + } + } + + private void writeStringElement(final String name, final String value) throws XMLStreamException { + xmlStreamWriter.writeStartElement(name); + xmlStreamWriter.writeCharacters(value); + xmlStreamWriter.writeEndElement(); + } + + private void writeLongElement(final String name, final long value) throws XMLStreamException { + xmlStreamWriter.writeStartElement(name); + xmlStreamWriter.writeCharacters(Long.toString(value)); + xmlStreamWriter.writeEndElement(); + } + + private void writeIntElement(final String name, final int value) throws XMLStreamException { + xmlStreamWriter.writeStartElement(name); + xmlStreamWriter.writeCharacters(Integer.toString(value)); + xmlStreamWriter.writeEndElement(); + } + + private void stackTraceToJson(@Nullable final StackTraceElement[] stackTrace) throws XMLStreamException { + xmlStreamWriter.writeStartElement("trace"); + + if(stackTrace != null) { + for(final StackTraceElement stackTraceElement : stackTrace) { + xmlStreamWriter.writeStartElement("frame"); + xmlStreamWriter.writeAttribute("methodName", stackTraceElement.getMethodName()); + xmlStreamWriter.writeAttribute("className", stackTraceElement.getClassName()); + xmlStreamWriter.writeAttribute("lineNumber", Integer.toString(stackTraceElement.getLineNumber())); + xmlStreamWriter.writeEndElement(); + } + } + + xmlStreamWriter.writeEndElement(); + } +} diff --git a/src/org/exist/storage/lock/LockManager.java b/src/org/exist/storage/lock/LockManager.java new file mode 100644 index 00000000000..ed1df4a442f --- /dev/null +++ b/src/org/exist/storage/lock/LockManager.java @@ -0,0 +1,673 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import com.evolvedbinary.j8fu.tuple.Tuple3; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.exist.storage.lock.Lock.LockType; +import org.exist.util.LockException; +import org.exist.util.WeakLazyStripes; +import org.exist.xmldb.XmldbURI; +import uk.ac.ic.doc.slurp.multilock.MultiLock; + +import java.util.Arrays; +import java.util.concurrent.locks.ReentrantLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Consumer; + +/** + * A Lock Manager for Locks that are used across + * database instance functions. + * + * There is a unique lock for each ID, and calls with the same + * ID will always return the same lock. Different IDs will always + * receive different locks. + * + * The locking protocol for Collection locks is taken from the paper: + * Granularity of Locks in a Shared Data Base - Gray, Lorie and Putzolu 1975 + * {@see https://pdfs.semanticscholar.org/5acd/43c51fa5e677b0c242b065a64f5948af022c.pdf} + * specifically we have adopted the acquisition algorithm from Section 3.2 of the paper. + * + * Our adaptions enable us to specify either a multi-writer/multi-reader approach between Collection + * sub-trees or a single-writer/multi-reader approach on the entire Collection tree. + * + * The uptake is that locking a Collection, also implicitly implies locking all descendant + * Collections with the same mode. This reduces the amount of locks required for + * manipulating Collection sub-trees. + * + * The locking protocol for Documents is entirely flat, and is unrelated to Collection locking. + * Deadlocks can still occur between Collections and Documents in eXist-db (as they could in the past). + * If it becomes necessary to eliminate such Collection/Document deadlock scenarios, Document locks + * could be acquired using the same protocol as Collection locks (as really they are all just URI paths in a hierarchy)! + * + * @author Adam Retter + */ +public class LockManager { + + public final static String PROP_ENABLE_COLLECTIONS_MULTI_WRITER = "exist.lockmanager.collections.multiwriter"; + public final static String PROP_UPGRADE_CHECK = "exist.lockmanager.upgrade.check"; + public final static String PROP_WARN_WAIT_ON_READ_FOR_WRITE = "exist.lockmanager.warn.waitonreadforwrite"; + public final static String PROP_USE_MULTILOCK_FOR_DOCUMENTS = "exist.lockmanager.documents.multilock"; + + private static final Logger LOG = LogManager.getLogger(LockManager.class); + private static final boolean USE_FAIR_SCHEDULER = true; //Java's ReentrantReadWriteLock must use the Fair Scheduler to get FIFO like ordering + private static final boolean USE_MULTILOCK_FOR_DOCUMENTS = Boolean.getBoolean(PROP_USE_MULTILOCK_FOR_DOCUMENTS); + + /** + * Set to true to enable Multi-Writer/Multi-Reader semantics for + * the Collection Hierarchy as opposed to the default Single-Writer/Multi-Reader + */ + private volatile boolean collectionsMultiWriter = Boolean.getBoolean(PROP_ENABLE_COLLECTIONS_MULTI_WRITER); + + /** + * Set to true to enable checking for lock upgrading within the same + * thread, i.e. READ_LOCK -> WRITE_LOCK + */ + private volatile boolean upgradeCheck = Boolean.getBoolean(PROP_UPGRADE_CHECK); + + /** + * Set to true to enable warning when a thread wants to acquire the WRITE_LOCK + * but another thread holds the READ_LOCK + */ + private volatile boolean warnWaitOnReadForWrite = Boolean.getBoolean(PROP_WARN_WAIT_ON_READ_FOR_WRITE); + + + private final LockTable lockTable; + private final WeakLazyStripes collectionLocks; + private final WeakLazyStripes documentLocks; + private final WeakLazyStripes btreeLocks; + + + public LockManager(final String brokerPoolId, final ThreadGroup threadGroup, final int concurrencyLevel) { + this.lockTable = new LockTable(brokerPoolId, threadGroup); + this.collectionLocks = new WeakLazyStripes<>(concurrencyLevel, LockManager::createCollectionLock); + this.documentLocks = new WeakLazyStripes<>(concurrencyLevel, LockManager::createDocumentLock); + this.btreeLocks = new WeakLazyStripes<>(concurrencyLevel, LockManager::createBtreeLock); + LOG.info("Configured LockManager with concurrencyLevel={}", concurrencyLevel); + } + + /** + * Get the lock table. + * + * @return the lock table. + */ + public LockTable getLockTable() { + return lockTable; + } + + /** + * Creates a new lock for a Collection + * will be Striped by the collectionPath + */ + private static MultiLock createCollectionLock(final String collectionPath) { + return new MultiLock(); + } + + /** + * Creates a new lock for a Document + * will be Striped by the collectionPath + */ + private static DocumentLock createDocumentLock(final String documentPath) { + if (USE_MULTILOCK_FOR_DOCUMENTS) { + return new MultiLockDocumentLockAdapter(new MultiLock()); + } else { + return new ReentrantReadWriteLockDocumentLockAdapter(new ReentrantReadWriteLock()); + } + } + + /** + * Creates a new lock for a {@link org.exist.storage.btree.BTree} + * will be Striped by the btreeFileName + */ + private static ReentrantLock createBtreeLock(final String btreeFileName) { + return new ReentrantLock(); + } + + /** + * Retrieves a lock for a Collection + * + * This function is concerned with just the lock object + * and has no knowledge of the state of the lock. The only + * guarantee is that if this lock has not been requested before + * then it will be provided in the unlocked state + * + * @param collectionPath The path of the Collection for which a lock is requested + * + * @return A lock for the Collection + */ + MultiLock getCollectionLock(final String collectionPath) { + return collectionLocks.get(collectionPath); + } + + /** + * Acquires a READ_LOCK on a Collection (and implicitly all descendant Collections). + * + * @param collectionPath The path of the Collection for which a lock is requested. + * + * @return A READ_LOCK on the Collection. + */ + public ManagedCollectionLock acquireCollectionReadLock(final XmldbURI collectionPath) throws LockException { + final XmldbURI[] segments = collectionPath.getPathSegments(); + + final long groupId = System.nanoTime(); + + String path = ""; + final Tuple3[] locked = new Tuple3[segments.length]; + for(int i = 0; i < segments.length; i++) { + path += '/' + segments[i].toString(); + + final Lock.LockMode lockMode; + if(i + 1 == segments.length) { + lockMode = Lock.LockMode.READ_LOCK; //leaf + } else { + lockMode = Lock.LockMode.INTENTION_READ; //ancestor + } + final MultiLock lock = getCollectionLock(path); + + lockTable.attempt(groupId, path, LockType.COLLECTION, lockMode); + if(lock(lock, lockMode)) { + locked[i] = new Tuple3<>(lock, lockMode, path); + lockTable.acquired(groupId, path, LockType.COLLECTION, lockMode); + } else { + lockTable.attemptFailed(groupId, path, LockType.COLLECTION, lockMode); + + unlockAll(locked, l -> lockTable.released(groupId, l._3, LockType.COLLECTION, l._2)); + + throw new LockException("Unable to acquire " + lockMode + " for: " + path); + } + } + + return new ManagedCollectionLock( + collectionPath, + Arrays.stream(locked).map(Tuple3::get_1).toArray(MultiLock[]::new), + () -> unlockAll(locked, l -> lockTable.released(groupId, l._3, LockType.COLLECTION, l._2)) + ); + } + + /** + * Locks a lock object. + * + * @param lock the lock object to lock. + * @param lockMode the mode of the {@code lock} to acquire. + * + * @return true, if we were able to lock with the mode. + */ + private boolean lock(final MultiLock lock, final Lock.LockMode lockMode) { + switch(lockMode) { + case INTENTION_READ: + lock.intentionReadLock(); + break; + + case INTENTION_WRITE: + lock.intentionWriteLock(); + break; + + case READ_LOCK: + lock.readLock(); + break; + + case WRITE_LOCK: + lock.writeLock(); + break; + + default: + throw new UnsupportedOperationException(); // TODO(AR) implement the other modes + } + + return true; //TODO(AR) switch to lock interruptibly above! + } + + /** + * Releases an array of locked locks for the modes with which they were locked + * + * Locks are released in the opposite to their acquisition order + * + * @param locked An array of locks in acquisition order + */ + private void unlockAll(final Tuple3[] locked, final Consumer> unlockListener) { + for(int i = locked.length - 1; i >= 0; i--) { + final Tuple3 lock = locked[i]; + unlock(lock._1, lock._2); + unlockListener.accept(lock); + } + } + + /** + * Unlocks a lock object. + * + * @param lock The lock object to unlock. + * @param lockMode The mode of the {@code lock} to release. + */ + private void unlock(final MultiLock lock, final Lock.LockMode lockMode) { + switch(lockMode) { + case INTENTION_READ: + lock.unlockIntentionRead(); + break; + + case INTENTION_WRITE: + lock.unlockIntentionWrite(); + break; + + case READ_LOCK: + lock.unlockRead(); + break; + + case WRITE_LOCK: + lock.unlockWrite(); + break; + + default: + throw new UnsupportedOperationException(); // TODO(AR) implement the other modes + } + } + + /** + * Acquires a WRITE_LOCK on a Collection (and implicitly all descendant Collections). + * + * @param collectionPath The path of the Collection for which a lock is requested. + * + * @return A WRITE_LOCK on the Collection. + */ + public ManagedCollectionLock acquireCollectionWriteLock(final XmldbURI collectionPath) throws LockException { + return acquireCollectionWriteLock(collectionPath, false); + } + + /** + * Acquires a WRITE_LOCK on a Collection (and implicitly all descendant Collections). + * + * @param collectionPath The path of the Collection for which a lock is requested. + * @param lockParent true if we should also explicitly write lock the parent Collection. + * + * @return A WRITE_LOCK on the Collection. + */ + ManagedCollectionLock acquireCollectionWriteLock(final XmldbURI collectionPath, final boolean lockParent) throws LockException { + final XmldbURI[] segments = collectionPath.getPathSegments(); + + final long groupId = System.nanoTime(); + + String path = ""; + final Tuple3[] locked = new Tuple3[segments.length]; + for(int i = 0; i < segments.length; i++) { + path += '/' + segments[i].toString(); + + final Lock.LockMode lockMode; + if(lockParent && i + 2 == segments.length) { + lockMode = Lock.LockMode.WRITE_LOCK; // parent + } else if(i + 1 == segments.length) { + lockMode = Lock.LockMode.WRITE_LOCK; // leaf + } else { + // ancestor + + if(!collectionsMultiWriter) { + // single-writer/multi-reader + lockMode = Lock.LockMode.WRITE_LOCK; + } else { + // multi-writer/multi-reader + lockMode = Lock.LockMode.INTENTION_WRITE; + } + } + final MultiLock lock = getCollectionLock(path); + + if(upgradeCheck && lockMode == Lock.LockMode.WRITE_LOCK && (lock.getIntentionReadHoldCount() > 0 || lock.getReadHoldCount() > 0)) { + throw new LockException("Lock upgrading would lead to a self-deadlock: " + path); + } + + if(warnWaitOnReadForWrite && lockMode == Lock.LockMode.WRITE_LOCK) { + if(lock.getIntentionReadLockCount() > 0) { + LOG.warn("About to acquire WRITE_LOCK for: {}, but INTENTION_READ_LOCK held by other thread(s): ", path); + } else if(lock.getReadLockCount() > 0) { + LOG.warn("About to acquire WRITE_LOCK for: {}, but READ_LOCK held by other thread(s): ", path); + } + } + + lockTable.attempt(groupId, path, LockType.COLLECTION, lockMode); + if(lock(lock, lockMode)) { + locked[i] = new Tuple3<>(lock, lockMode, path); + lockTable.acquired(groupId, path, LockType.COLLECTION, lockMode); + } else { + lockTable.attemptFailed(groupId, path, LockType.COLLECTION, lockMode); + + unlockAll(locked, l -> lockTable.released(groupId, l._3, LockType.COLLECTION, l._2)); + + throw new LockException("Unable to acquire " + lockMode + " for: " + path); + } + } + + return new ManagedCollectionLock( + collectionPath, + Arrays.stream(locked).map(Tuple3::get_1).toArray(MultiLock[]::new), + () -> unlockAll(locked, l -> lockTable.released(groupId, l._3, LockType.COLLECTION, l._2)) + ); + } + + /** + * Returns true if a WRITE_LOCK is held for a Collection + * + * @param collectionPath The URI of the Collection within the database + * + * @return true if a WRITE_LOCK is held + */ + public boolean isCollectionLockedForWrite(final XmldbURI collectionPath) { + final MultiLock existingLock = getCollectionLock(collectionPath.toString()); + return existingLock.getWriteLockCount() > 0; + } + + /** + * Returns true if a READ_LOCK is held for a Collection + * + * @param collectionPath The URI of the Collection within the database + * + * @return true if a READ_LOCK is held + */ + public boolean isCollectionLockedForRead(final XmldbURI collectionPath) { + final MultiLock existingLock = getCollectionLock(collectionPath.toString()); + return existingLock.getReadLockCount() > 0; + } + + /** + * Retrieves a lock for a Document + * + * This function is concerned with just the lock object + * and has no knowledge of the state of the lock. The only + * guarantee is that if this lock has not been requested before + * then it will be provided in the unlocked state + * + * @param documentPath The path of the Document for which a lock is requested + * + * @return A lock for the Document + */ + DocumentLock getDocumentLock(final String documentPath) { + return documentLocks.get(documentPath); + } + + /** + * Acquire a READ_LOCK on a Document + * + * @param documentPath The URI of the Document within the database + * + * @return the lock for the Document + * + * @throws LockException if the lock could not be acquired + */ + public ManagedDocumentLock acquireDocumentReadLock(final XmldbURI documentPath) throws LockException { + final long groupId = System.nanoTime(); + final String path = documentPath.toString(); + + final DocumentLock lock = getDocumentLock(path); + try { + lockTable.attempt(groupId, path, LockType.DOCUMENT, Lock.LockMode.READ_LOCK); + + lock.readLock().lockInterruptibly(); + + lockTable.acquired(groupId, path, LockType.DOCUMENT, Lock.LockMode.READ_LOCK); + } catch(final InterruptedException e) { + lockTable.attemptFailed(groupId, path, LockType.DOCUMENT, Lock.LockMode.READ_LOCK); + throw new LockException("Unable to acquire READ_LOCK for: " + path, e); + } + + return new ManagedDocumentLock(documentPath, lock.readLock(), () -> { + lock.readLock().unlock(); + lockTable.released(groupId, path, LockType.DOCUMENT, Lock.LockMode.READ_LOCK); + }); + } + + /** + * Acquire a WRITE_LOCK on a Document + * + * @param documentPath The URI of the Document within the database + * + * @return the lock for the Document + * + * @throws LockException if the lock could not be acquired + */ + public ManagedDocumentLock acquireDocumentWriteLock(final XmldbURI documentPath) throws LockException { + final long groupId = System.nanoTime(); + final String path = documentPath.toString(); + + final DocumentLock lock = getDocumentLock(path); + try { + lockTable.attempt(groupId, path, LockType.DOCUMENT, Lock.LockMode.WRITE_LOCK); + + lock.writeLock().lockInterruptibly(); + + lockTable.acquired(groupId, path, LockType.DOCUMENT, Lock.LockMode.WRITE_LOCK); + } catch(final InterruptedException e) { + lockTable.attemptFailed(groupId, path, LockType.DOCUMENT, Lock.LockMode.WRITE_LOCK); + throw new LockException("Unable to acquire WRITE_LOCK for: " + path, e); + } + + return new ManagedDocumentLock(documentPath, lock.writeLock(), () -> { + lock.writeLock().unlock(); + lockTable.released(groupId, path, LockType.DOCUMENT, Lock.LockMode.WRITE_LOCK); + }); + } + + /** + * Returns true if a WRITE_LOCK is held for a Document + * + * @param documentPath The URI of the Document within the database + * + * @return true if a WRITE_LOCK is held + */ + public boolean isDocumentLockedForWrite(final XmldbURI documentPath) { + final DocumentLock existingLock = getDocumentLock(documentPath.toString()); + return existingLock.isWriteLocked(); + } + + /** + * Returns true if a READ_LOCK is held for a Document + * + * @param documentPath The URI of the Document within the database + * + * @return true if a READ_LOCK is held + */ + public boolean isDocumentLockedForRead(final XmldbURI documentPath) { + final DocumentLock existingLock = getDocumentLock(documentPath.toString()); + return existingLock.getReadLockCount() > 0; + } + + /** + * Retrieves a lock for a {@link org.exist.storage.dom.DOMFile} + * + * This function is concerned with just the lock object + * and has no knowledge of the state of the lock. The only + * guarantee is that if this lock has not been requested before + * then it will be provided in the unlocked state + * + * @param domFileName The path of the Document for which a lock is requested + * + * @return A lock for the DOMFile + */ + ReentrantLock getBTreeLock(final String domFileName) { + return btreeLocks.get(domFileName); + } + + /** + * Acquire a WRITE_LOCK on a {@link org.exist.storage.btree.BTree} + * + * @param btreeFileName the filename of the BTree + * + * @return the lock for the BTree + * + * @throws LockException if the lock could not be acquired + */ + public ManagedLock acquireBtreeReadLock(final String btreeFileName) throws LockException { + final long groupId = System.nanoTime(); + + final ReentrantLock lock = getBTreeLock(btreeFileName); + try { + lockTable.attempt(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.READ_LOCK); + + lock.lockInterruptibly(); + + lockTable.acquired(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.READ_LOCK); + } catch(final InterruptedException e) { + lockTable.attemptFailed(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.READ_LOCK); + throw new LockException("Unable to acquire READ_LOCK for: " + btreeFileName, e); + } + + return new ManagedLock(lock, () -> { + lock.unlock(); + lockTable.released(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.READ_LOCK); + }); + } + + /** + * Acquire a WRITE_LOCK on a {@link org.exist.storage.btree.BTree} + * + * @param btreeFileName the filename of the BTree + * + * @return the lock for the BTree + * + * @throws LockException if the lock could not be acquired + */ + public ManagedLock acquireBtreeWriteLock(final String btreeFileName) throws LockException { + final long groupId = System.nanoTime(); + + final ReentrantLock lock = getBTreeLock(btreeFileName); + try { + lockTable.attempt(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.WRITE_LOCK); + + lock.lockInterruptibly(); + + lockTable.acquired(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.WRITE_LOCK); + } catch(final InterruptedException e) { + lockTable.attemptFailed(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.WRITE_LOCK); + throw new LockException("Unable to acquire WRITE_LOCK for: " + btreeFileName, e); + } + + return new ManagedLock(lock, () -> { + lock.unlock(); + lockTable.released(groupId, btreeFileName, LockType.BTREE, Lock.LockMode.WRITE_LOCK); + }); + } + + /** + * Returns true if the BTree for the file name is locked. + * + * @param btreeFileName The name of the .dbx file. + * + * @return true if the Btree is locked. + */ + public boolean isBtreeLocked(final String btreeFileName) { + final ReentrantLock lock = getBTreeLock(btreeFileName); + return lock.isLocked(); + } + + /** + * Returns true if the BTree for the file name is locked for writes. + * + * @param btreeFileName The name of the .dbx file. + * + * @return true if the Btree is locked for writes. + * + * @deprecated Just a place holder until we can make the BTree reader/writer safe + */ + @Deprecated + public boolean isBtreeLockedForWrite(final String btreeFileName) { + return isBtreeLocked(btreeFileName); + } + + /** + * Simple interface which describes + * the minimum methods needed by LockManager + * on a Document Lock. + */ + interface DocumentLock { + java.util.concurrent.locks.Lock readLock(); + java.util.concurrent.locks.Lock writeLock(); + boolean isWriteLocked(); + int getReadLockCount(); + boolean hasQueuedThreads(); + } + + /** + * Adapts {@link MultiLock} to a {@link DocumentLock}. + */ + private static class MultiLockDocumentLockAdapter implements DocumentLock { + private final MultiLock multiLock; + + public MultiLockDocumentLockAdapter(final MultiLock multiLock) { + this.multiLock = multiLock; + } + + @Override + public java.util.concurrent.locks.Lock readLock() { + return multiLock.asReadLock(); + } + + @Override + public java.util.concurrent.locks.Lock writeLock() { + return multiLock.asWriteLock(); + } + + @Override + public boolean isWriteLocked() { + return multiLock.getWriteLockCount() > 0; + } + + @Override + public int getReadLockCount() { + return multiLock.getReadLockCount(); + } + + @Override + public boolean hasQueuedThreads() { + return multiLock.hasQueuedThreads(); + } + } + + /** + * Adapts {@link ReentrantReadWriteLock} to a {@link DocumentLock}. + */ + private static class ReentrantReadWriteLockDocumentLockAdapter implements DocumentLock { + private final ReentrantReadWriteLock reentrantReadWriteLock; + + private ReentrantReadWriteLockDocumentLockAdapter(final ReentrantReadWriteLock reentrantReadWriteLock) { + this.reentrantReadWriteLock = reentrantReadWriteLock; + } + + @Override + public java.util.concurrent.locks.Lock readLock() { + return reentrantReadWriteLock.readLock(); + } + + @Override + public java.util.concurrent.locks.Lock writeLock() { + return reentrantReadWriteLock.writeLock(); + } + + @Override + public boolean isWriteLocked() { + return reentrantReadWriteLock.isWriteLocked(); + } + + @Override + public int getReadLockCount() { + return reentrantReadWriteLock.getReadLockCount(); + } + + @Override + public boolean hasQueuedThreads() { + return reentrantReadWriteLock.hasQueuedThreads(); + } + } +} diff --git a/src/org/exist/storage/lock/LockOwner.java b/src/org/exist/storage/lock/LockOwner.java deleted file mode 100644 index 74db21ed3f4..00000000000 --- a/src/org/exist/storage/lock/LockOwner.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-07 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - * - * $Id$ - */ -package org.exist.storage.lock; - -/** - * Used to track acquired locks, mainly for debugging. - */ -public class LockOwner { - - /** - * Global flag: set to true to receive debugging output, in particular, - * to see where a lock was acquired. Note: it adds some considerable - * processing overhead. - */ - public static boolean DEBUG = false; - - private final Thread owner; - private Throwable stack = null; - - public LockOwner(Thread owner) { - this.owner = owner; - if (DEBUG) - {this.stack = new Throwable().fillInStackTrace();} - } - - public final Thread getOwner() { - return owner; - } - - public final Throwable getStack() { - return stack; - } -} \ No newline at end of file diff --git a/src/org/exist/storage/lock/LockTable.java b/src/org/exist/storage/lock/LockTable.java new file mode 100644 index 00000000000..cda11ca2ae4 --- /dev/null +++ b/src/org/exist/storage/lock/LockTable.java @@ -0,0 +1,707 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import com.evolvedbinary.j8fu.Either; +import com.evolvedbinary.j8fu.tuple.Tuple2; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.exist.storage.NativeBroker; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.Lock.LockType; +import org.exist.storage.txn.Txn; + +import javax.annotation.Nullable; +import java.util.*; +import java.util.concurrent.*; +import java.util.stream.Collectors; + +import static org.exist.storage.lock.LockTable.LockAction.Action.*; +import static org.exist.util.ThreadUtils.newInstanceThread; + +/** + * The Lock Table holds the details of + * threads awaiting to acquire a Lock + * and threads that have acquired a lock. + * + * It is arranged by the id of the lock + * which is typically an indicator of the + * lock subject. + * + * @author Adam Retter + */ +public class LockTable { + + public static final String PROP_DISABLE = "exist.locktable.disable"; + public static final String PROP_SANITY_CHECK = "exist.locktable.sanity.check"; + public static final String PROP_TRACE_STACK_DEPTH = "exist.locktable.trace.stack.depth"; + + private static final Logger LOG = LogManager.getLogger(LockTable.class); + private static final String THIS_CLASS_NAME = LockTable.class.getName(); + + /** + * Set to false to disable all events + */ + private volatile boolean disableEvents = Boolean.getBoolean(PROP_DISABLE); + + /** + * Set to true to enable sanity checking of lock leases + */ + private volatile boolean sanityCheck = Boolean.getBoolean(PROP_SANITY_CHECK); + + /** + * Whether we should try and trace the stack for the lock event, -1 means all stack, + * 0 means no stack, n means n stack frames, 5 is a reasonable value + */ + private volatile int traceStackDepth = Optional.ofNullable(Integer.getInteger(PROP_TRACE_STACK_DEPTH)) + .orElse(0); + + /** + * List of threads attempting to acquire a lock + * + * Map>> + */ + private final ConcurrentMap>> attempting = new ConcurrentHashMap<>(); + + /** + * Reference count of acquired locks by id and type + * + * Map>>> + */ + private final ConcurrentMap>>> acquired = new ConcurrentHashMap<>(); + + /** + * The {@link #queue} holds lock events and lock listener events + * and is processed by the single thread {@link #queueConsumer} which uses + * {@link QueueConsumer} to ensure serializability of locking events and monitoring + */ + private final TransferQueue> queue = new LinkedTransferQueue<>(); + private final ExecutorService executorService; + private final Future queueConsumer; + + /** + * Holds a count of READ and WRITE locks by {@link LockAction#id} + * Only used for debugging, see {@link #sanityCheckLockLifecycles(LockAction)}. + */ + private final Map> lockCounts = new HashMap<>(); + + LockTable(final String brokerPoolId, final ThreadGroup threadGroup) { + this.executorService = Executors.newSingleThreadExecutor(runnable -> newInstanceThread(threadGroup, brokerPoolId, "lock-table.processor", runnable)); + this.queueConsumer = executorService.submit(new QueueConsumer(queue, attempting, acquired)); + + // add a log listener if trace level logging is enabled + if(LOG.isTraceEnabled()) { + registerListener(new LockEventLogListener(LOG, Level.TRACE)); + } + } + + /** + * Shuts down the lock table processor. + * + * After calling this, no further lock + * events will be reported. + */ + public void shutdown() { + if (!executorService.isShutdown()) { + executorService.shutdownNow(); + } + } + + /** + * Set the depth at which we should trace lock events through the stack + * + * @param traceStackDepth -1 traces the whole stack, 0 means no stack traces, n means n stack frames + */ + public void setTraceStackDepth(final int traceStackDepth) { + this.traceStackDepth = traceStackDepth; + } + + public void attempt(final long groupId, final String id, final LockType lockType, final LockMode mode) { + event(Attempt, groupId, id, lockType, mode); + } + + public void attemptFailed(final long groupId, final String id, final LockType lockType, final LockMode mode) { + event(AttemptFailed, groupId, id, lockType, mode); + } + + public void acquired(final long groupId, final String id, final LockType lockType, final LockMode mode) { + event(Acquired, groupId, id, lockType, mode); + } + + public void released(final long groupId, final String id, final LockType lockType, final LockMode mode) { + event(Released, groupId, id, lockType, mode); + } + + @Deprecated + public void released(final long groupId, final String id, final LockType lockType, final LockMode mode, final int count) { + event(Released, groupId, id, lockType, mode, count); + } + + private void event(final LockAction.Action action, final long groupId, final String id, final LockType lockType, final LockMode mode) { + event(action, groupId, id, lockType, mode, 1); + } + + private void event(final LockAction.Action action, final long groupId, final String id, final LockType lockType, final LockMode mode, final int count) { + if(disableEvents) { + return; + } + + final long timestamp = System.nanoTime(); + final Thread currentThread = Thread.currentThread(); + final String threadName = currentThread.getName(); + @Nullable final StackTraceElement[] stackTrace = getStackTrace(currentThread); + + if(ignoreEvent(threadName, id)) { + return; + } + + final LockAction lockAction = new LockAction(action, groupId, id, lockType, mode, threadName, count, timestamp, stackTrace); + + /** + * Very useful for debugging Lock life cycles + */ + if(sanityCheck) { + sanityCheckLockLifecycles(lockAction); + } + + queue.add(Either.Right(lockAction)); + } + + /** + * Simple filtering to ignore events that are not of interest + * + * @param threadName The name of the thread that triggered the event + * @param id The id of the lock + * + * @return true if the event should be ignored + */ + private boolean ignoreEvent(final String threadName, final String id) { + return false; + + // useful for debugging specific log events +// return threadName.startsWith("DefaultQuartzScheduler_") +// || id.equals("dom.dbx") +// || id.equals("collections.dbx") +// || id.equals("collections.dbx") +// || id.equals("structure.dbx") +// || id.equals("values.dbx") +// || id.equals("CollectionCache"); + } + + @Nullable + private StackTraceElement[] getStackTrace(final Thread thread) { + if(traceStackDepth == 0) { + return null; + } else { + final StackTraceElement[] stackTrace = thread.getStackTrace(); + final int lastStackTraceElementIdx = stackTrace.length - 1; + + final int from = findFirstExternalFrame(stackTrace); + final int to; + if (traceStackDepth == -1) { + to = lastStackTraceElementIdx; + } else { + final int calcTo = from + traceStackDepth; + if (calcTo > lastStackTraceElementIdx) { + to = lastStackTraceElementIdx; + } else { + to = calcTo; + } + } + + return Arrays.copyOfRange(stackTrace, from, to); + } + } + + private int findFirstExternalFrame(final StackTraceElement[] stackTrace) { + // we start with i = 1 to avoid Thread#getStackTrace() frame + for(int i = 1; i < stackTrace.length; i++) { + if(!THIS_CLASS_NAME.equals(stackTrace[i].getClassName())) { + return i; + } + } + return 0; + } + + public void registerListener(final LockEventListener lockEventListener) { + final ListenerAction listenerAction = new ListenerAction(ListenerAction.Action.Register, lockEventListener); + queue.add(Either.Left(listenerAction)); + } + + public void deregisterListener(final LockEventListener lockEventListener) { + final ListenerAction listenerAction = new ListenerAction(ListenerAction.Action.Deregister, lockEventListener); + queue.add(Either.Left(listenerAction)); + } + + public boolean hasPendingEvents() { + return !queue.isEmpty(); + } + + /** + * Get's a copy of the current lock attempt information + * + * @return lock attempt information + */ + public Map>> getAttempting() { + return new HashMap<>(attempting); + } + + /** + * Get's a copy of the current acquired lock information + * + * @return acquired lock information + */ + public Map>>> getAcquired() { + return new HashMap<>(acquired); + } + + public static class LockModeOwner { + final LockMode lockMode; + final String ownerThread; + + public LockModeOwner(final LockMode lockMode, final String ownerThread) { + this.lockMode = lockMode; + this.ownerThread = ownerThread; + } + + public LockMode getLockMode() { + return lockMode; + } + + public String getOwnerThread() { + return ownerThread; + } + } + + private static class QueueConsumer implements Runnable { + private final TransferQueue> queue; + private final ConcurrentMap>> attempting; + private final ConcurrentMap>>> acquired; + private final List listeners = new ArrayList<>(); + + QueueConsumer(final TransferQueue> queue, + final ConcurrentMap>> attempting, + final ConcurrentMap>>> acquired) { + this.queue = queue; + this.attempting = attempting; + this.acquired = acquired; + } + + @Override + public void run() { + try { + while (true) { + final Either event = queue.take(); + + if (event.isLeft()) { + processListenerAction(event.left().get()); + } else { + processLockAction(event.right().get()); + } + } + } catch (final InterruptedException e) { + LOG.warn("LockTable.QueueConsumer was interrupted. LockTable will no longer report lock events!"); + // Restore the interrupted status + Thread.currentThread().interrupt(); + } + } + + private void processListenerAction(final ListenerAction listenerAction) { + if(listenerAction.action == ListenerAction.Action.Register) { + listeners.add(listenerAction.lockEventListener); + listenerAction.lockEventListener.registered(); + } else if(listenerAction.action == ListenerAction.Action.Deregister) { + listeners.remove(listenerAction.lockEventListener); + listenerAction.lockEventListener.unregistered(); + } + } + + private void processLockAction(final LockAction lockAction) { + if (lockAction.action == Attempt) { + notifyListenersOfAttempt(lockAction); + addToAttempting(lockAction); + + } else if (lockAction.action == AttemptFailed) { + removeFromAttempting(lockAction); + notifyListenersOfAttemptFailed(lockAction); + + } else if (lockAction.action == Acquired) { + removeFromAttempting(lockAction); + incrementAcquired(lockAction); + + } else if (lockAction.action == Released) { + decrementAcquired(lockAction); + } + } + + private void notifyListenersOfAttempt(final LockAction lockAction) { + for(final LockEventListener listener : listeners) { + try { + listener.accept(lockAction); + } catch (final Exception e) { + LOG.error("Listener '{}' error: ", listener.getClass().getName(), e); + } + } + } + + private void notifyListenersOfAttemptFailed(final LockAction lockAction) { + for(final LockEventListener listener : listeners) { + try { + listener.accept(lockAction); + } catch (final Exception e) { + LOG.error("Listener '{}' error: ", listener.getClass().getName(), e); + } + } + } + + private void notifyListenersOfAcquire(final LockAction lockAction, final int newReferenceCount) { + final LockAction newLockAction = lockAction.withCount(newReferenceCount); + for(final LockEventListener listener : listeners) { + try { + listener.accept(newLockAction); + } catch (final Exception e) { + LOG.error("Listener '{}' error: ", listener.getClass().getName(), e); + } + } + } + + private void notifyListenersOfRelease(final LockAction lockAction, final int newReferenceCount) { + final LockAction newLockAction = lockAction.withCount(newReferenceCount); + for(final LockEventListener listener : listeners) { + try { + listener.accept(newLockAction); + } catch (final Exception e) { + LOG.error("Listener '{}' error: ", listener.getClass().getName(), e); + } + } + } + + private void addToAttempting(final LockAction lockAction) { + attempting.compute(lockAction.id, (id, attempts) -> { + if (attempts == null) { + attempts = new HashMap<>(); + } + + attempts.compute(lockAction.lockType, (lockType, v) -> { + if (v == null) { + v = new ArrayList<>(); + } + + v.add(new LockModeOwner(lockAction.mode, lockAction.threadName)); + return v; + }); + + return attempts; + }); + } + + private void removeFromAttempting(final LockAction lockAction) { + attempting.compute(lockAction.id, (id, attempts) -> { + if (attempts == null) { + return null; + } else { + attempts.compute(lockAction.lockType, (lockType, v) -> { + if (v == null) { + return null; + } + + v.removeIf(val -> val.getLockMode() == lockAction.mode && val.getOwnerThread().equals(lockAction.threadName)); + if (v.isEmpty()) { + return null; + } else { + return v; + } + }); + + if (attempts.isEmpty()) { + return null; + } else { + return attempts; + } + } + }); + } + + private void incrementAcquired(final LockAction lockAction) { + acquired.compute(lockAction.id, (id, acqu) -> { + if (acqu == null) { + acqu = new HashMap<>(); + } + + acqu.compute(lockAction.lockType, (lockType, v) -> { + if (v == null) { + v = new HashMap<>(); + } + + v.compute(lockAction.mode, (mode, ownerHolds) -> { + if (ownerHolds == null) { + ownerHolds = new HashMap<>(); + } + + ownerHolds.compute(lockAction.threadName, (threadName, holdCount) -> { + if(holdCount == null) { + holdCount = 0; + } + return ++holdCount; + }); + + final int lockModeHolds = ownerHolds.values().stream().collect(Collectors.summingInt(Integer::intValue)); + notifyListenersOfAcquire(lockAction, lockModeHolds); + + return ownerHolds; + }); + + return v; + }); + + return acqu; + }); + } + + private void decrementAcquired(final LockAction lockAction) { + acquired.compute(lockAction.id, (id, acqu) -> { + if (acqu == null) { + LOG.error("No entry found when trying to decrementAcquired for: id={}" + lockAction.id); + return null; + } + + acqu.compute(lockAction.lockType, (lockType, v) -> { + if (v == null) { + LOG.error("No entry found when trying to decrementAcquired for: id={}, lockType={}", lockAction.id, lockAction.lockType); + return null; + } + + v.compute(lockAction.mode, (mode, ownerHolds) -> { + if (ownerHolds == null) { + LOG.error("No entry found when trying to decrementAcquired for: id={}, lockType={}, lockMode={}", lockAction.id, lockAction.lockType, lockAction.mode); + return null; + } else { + ownerHolds.compute(lockAction.threadName, (threadName, holdCount) -> { + if(holdCount == null) { + LOG.error("No entry found when trying to decrementAcquired for: id={}, lockType={}, lockMode={}, threadName={}", lockAction.id, lockAction.lockType, lockAction.mode, lockAction.threadName); + return null; + } else if(holdCount == 0) { + LOG.error("Negative release when trying to decrementAcquired for: id={}, lockType={}, lockMode={}, threadName={}", lockAction.id, lockAction.lockType, lockAction.mode, lockAction.threadName); + return null; + } else if(holdCount == 1) { + return null; + } else { + return --holdCount; + } + }); + + final int lockModeHolds = ownerHolds.values().stream().collect(Collectors.summingInt(Integer::intValue)); + + notifyListenersOfRelease(lockAction, lockModeHolds); + + if (ownerHolds.isEmpty()) { + return null; + } else { + return ownerHolds; + } + } + }); + + if (v.isEmpty()) { + return null; + } else { + return v; + } + }); + + if (acqu.isEmpty()) { + return null; + } else { + return acqu; + } + }); + } + } + + public interface LockEventListener { + default void registered() {} + void accept(final LockAction lockAction); + default void unregistered() {} + } + + private static class ListenerAction { + enum Action { + Register, + Deregister + } + + private final Action action; + private final LockEventListener lockEventListener; + + public ListenerAction(final Action action, final LockEventListener lockEventListener) { + this.action = action; + this.lockEventListener = lockEventListener; + } + + @Override + public String toString() { + return action.name() + " " + lockEventListener.getClass().getName(); + } + } + + public static class LockAction { + private static final String NATIVE_BROKER_CLASS_NAME = NativeBroker.class.getName(); + private static final String COLLECTION_STORE_CLASS_NAME = NativeBroker.class.getName(); + private static final String TXN_CLASS_NAME = Txn.class.getName(); + + public enum Action { + Attempt, + AttemptFailed, + Acquired, + Released + } + + public final Action action; + public final long groupId; + public final String id; + public final LockType lockType; + public final LockMode mode; + public final String threadName; + public final int count; + /** + * System#nanoTime() + */ + public final long timestamp; + @Nullable public final StackTraceElement[] stackTrace; + + LockAction(final Action action, final long groupId, final String id, final LockType lockType, final LockMode mode, final String threadName, final int count, final long timestamp, @Nullable final StackTraceElement[] stackTrace) { + this.action = action; + this.groupId = groupId; + this.id = id; + this.lockType = lockType; + this.mode = mode; + this.threadName = threadName; + this.count = count; + this.timestamp = timestamp; + this.stackTrace = stackTrace; + } + + public LockAction withCount(final int count) { + return new LockAction(action, groupId, id, lockType, mode, threadName, count, timestamp, stackTrace); + } + + @Override + public String toString() { + final StringBuilder builder = new StringBuilder() + .append(action.toString()) + .append(' ') + .append(lockType.name()); + + if(groupId > -1) { + builder + .append("#") + .append(groupId); + } + + builder.append('(') + .append(mode.toString()) + .append(") of ") + .append(id); + + if(stackTrace != null) { + final String reason = getSimpleStackReason(); + if(reason != null) { + builder + .append(" for #") + .append(reason); + } + } + + builder + .append(" by ") + .append(threadName) + .append(" at ") + .append(timestamp); + + if (action == Acquired || action == Released) { + builder + .append(". count=") + .append(Integer.toString(count)); + } + + return builder.toString(); + } + + @Nullable + public String getSimpleStackReason() { + for (final StackTraceElement stackTraceElement : stackTrace) { + final String className = stackTraceElement.getClassName(); + + if (className.equals(NATIVE_BROKER_CLASS_NAME) || className.equals(COLLECTION_STORE_CLASS_NAME) || className.equals(TXN_CLASS_NAME)) { + if (!(stackTraceElement.getMethodName().endsWith("LockCollection") || stackTraceElement.getMethodName().equals("lockCollectionCache"))) { + return stackTraceElement.getMethodName() + '(' + stackTraceElement.getLineNumber() + ')'; + } + } + } + + return null; + } + } + + /** debugging tools below **/ + + /** + * Checks that there are not more releases that there are acquires + */ + private void sanityCheckLockLifecycles(final LockAction lockAction) { + synchronized(lockCounts) { + long read = 0; + long write = 0; + + final Tuple2 lockCount = lockCounts.get(lockAction.id); + if(lockCount != null) { + read = lockCount._1; + write = lockCount._2; + } + + if(lockAction.action == LockAction.Action.Acquired) { + if(lockAction.mode == LockMode.READ_LOCK) { + read++; + } else if(lockAction.mode == LockMode.WRITE_LOCK) { + write++; + } + } else if(lockAction.action == LockAction.Action.Released) { + if(lockAction.mode == LockMode.READ_LOCK) { + if(read == 0) { + LOG.error("Negative READ_LOCKs", new IllegalStateException()); + } + read--; + } else if(lockAction.mode == LockMode.WRITE_LOCK) { + if(write == 0) { + LOG.error("Negative WRITE_LOCKs", new IllegalStateException()); + } + write--; + } + } + + if(LOG.isTraceEnabled()) { + LOG.trace("QUEUE: {} (read={} write={})", lockAction.toString(), read, write); + } + + lockCounts.put(lockAction.id, new Tuple2<>(read, write)); + } + } +} diff --git a/src/org/exist/storage/lock/LockTableUtils.java b/src/org/exist/storage/lock/LockTableUtils.java new file mode 100644 index 00000000000..0a2dc5aafb7 --- /dev/null +++ b/src/org/exist/storage/lock/LockTableUtils.java @@ -0,0 +1,89 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import java.util.List; +import java.util.Map; + +/** + * Utilities for working with the Lock Table + * + * @author Adam Retter + */ +public class LockTableUtils { + + private static final String EOL = System.getProperty("line.separator"); + + public static String stateToString(final LockTable lockTable) { + final Map>> attempting = lockTable.getAttempting(); + final Map>>> acquired = lockTable.getAcquired(); + + final StringBuilder builder = new StringBuilder(); + + builder + .append(EOL) + .append("Acquired Locks").append(EOL) + .append("------------------------------------").append(EOL); + + for(final Map.Entry>>> acquire : acquired.entrySet()) { + builder.append(acquire.getKey()).append(EOL); + for(final Map.Entry>> type : acquire.getValue().entrySet()) { + builder.append('\t').append(type.getKey()).append(EOL); + for(final Map.Entry> lockModeOwners : type.getValue().entrySet()) { + builder + .append("\t\t").append(lockModeOwners.getKey()) + .append('\t'); + + boolean firstOwner = true; + for(final Map.Entry ownerHoldCount : lockModeOwners.getValue().entrySet()) { + if(!firstOwner) { + builder.append(", "); + } else { + firstOwner = false; + } + builder.append(ownerHoldCount.getKey()).append(" (count=").append(ownerHoldCount.getValue()).append(")"); + } + builder.append(EOL); + } + } + } + + builder.append(EOL).append(EOL); + + builder + .append("Attempting Locks").append(EOL) + .append("------------------------------------").append(EOL); + + for(final Map.Entry>> attempt : attempting.entrySet()) { + builder.append(attempt.getKey()).append(EOL); + for(final Map.Entry> type : attempt.getValue().entrySet()) { + builder.append('\t').append(type.getKey()).append(EOL); + for(final LockTable.LockModeOwner lockModeOwner : type.getValue()) { + builder + .append("\t\t").append(lockModeOwner.getLockMode()) + .append('\t').append(lockModeOwner.getOwnerThread()) + .append(EOL); + } + } + } + + return builder.toString(); + } +} diff --git a/src/org/exist/storage/lock/Locked.java b/src/org/exist/storage/lock/Locked.java deleted file mode 100644 index 621d4fbc6a1..00000000000 --- a/src/org/exist/storage/lock/Locked.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2001-2013 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - */ -package org.exist.storage.lock; - -import java.util.concurrent.Callable; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; -import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; - -/** - * @author Dmitriy Shabanov - * - */ -public class Locked { - - private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); - private final ReadLock readLock = lock.readLock(); - private final WriteLock writeLock = lock.writeLock(); - - public final R read(final Callable readOp) { - readLock.lock(); - try { - return readOp.call(); - } catch (Exception e) { - //can't be ignore - return (R)e; - } finally { - readLock.unlock(); - } - } - - public final R write(final Callable writeOp) { - writeLock.lock(); - try { - return writeOp.call(); - } catch (Exception e) { - //can't be ignore - return (R)e; - } finally { - writeLock.unlock(); - } - } - - public final R writeE(final Callable writeOp) throws Exception { - writeLock.lock(); - try { - return writeOp.call(); - } finally { - writeLock.unlock(); - } - } -} diff --git a/src/org/exist/storage/lock/LockedDocumentMap.java b/src/org/exist/storage/lock/LockedDocumentMap.java index 015d8f45ac9..be8a6553ac0 100644 --- a/src/org/exist/storage/lock/LockedDocumentMap.java +++ b/src/org/exist/storage/lock/LockedDocumentMap.java @@ -23,99 +23,105 @@ import net.jcip.annotations.NotThreadSafe; import org.exist.collections.Collection; -import org.exist.dom.persistent.DefaultDocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.storage.lock.Lock.LockMode; -import org.exist.util.hashtable.Int2ObjectHashMap; +import org.exist.dom.persistent.*; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; /** * This map is used by the XQuery engine to track how many read locks were * acquired for a document during query execution. */ @NotThreadSafe -public class LockedDocumentMap extends Int2ObjectHashMap { +public class LockedDocumentMap { private final static int DEFAULT_SIZE = 29; - private final static double DEFAULT_GROWTH = 1.75; + private final static float DEFAULT_GROWTH = 1.75f; - public LockedDocumentMap() { - super(DEFAULT_SIZE, DEFAULT_GROWTH); - } + private final Map> map = new LinkedHashMap<>(DEFAULT_SIZE, DEFAULT_GROWTH); - public void add(final DocumentImpl document) { - LockedDocument entry = (LockedDocument) get(document.getDocId()); - if (entry == null) { - entry = new LockedDocument(document); - put(document.getDocId(), entry); - } - entry.locksAcquired++; + public void add(final LockedDocument lockedDocument) { + map.compute(lockedDocument.getDocument().getDocId(), (k, v) -> { + if(v == null) { + v = new ArrayList<>(); + } + + v.add(lockedDocument); + + return v; + }); } public MutableDocumentSet toDocumentSet() { - final MutableDocumentSet docs = new DefaultDocumentSet(size()); - for (int idx = 0; idx < tabSize; idx++) { - if (values[idx] == null || values[idx] == REMOVED) { - continue; - } - final LockedDocument lockedDocument = (LockedDocument) values[idx]; - docs.add(lockedDocument.document); + final MutableDocumentSet docs = new DefaultDocumentSet(map.size()); + for(final List documentLocks : map.values()) { + docs.add(documentLocks.get(0).getDocument()); } return docs; } public DocumentSet getDocsByCollection(final Collection collection, MutableDocumentSet targetSet) { if (targetSet == null) { - targetSet = new DefaultDocumentSet(size()); + targetSet = new DefaultDocumentSet(map.size()); } - for (int idx = 0; idx < tabSize; idx++) { - if (values[idx] == null || values[idx] == REMOVED) { - continue; - } - final LockedDocument lockedDocument = (LockedDocument) values[idx]; - if (lockedDocument.document.getCollection().getURI().startsWith(collection.getURI())) { - targetSet.add(lockedDocument.document); + + for(final List documentLocks : map.values()) { + final DocumentImpl doc = documentLocks.get(0).getDocument(); + if(doc.getCollection().getURI().startsWith(collection.getURI())) { + targetSet.add(doc); } } return targetSet; } public void unlock() { - for (int idx = 0; idx < tabSize; idx++) { - if (values[idx] == null || values[idx] == REMOVED) { - continue; + // NOTE: locks should be released in the reverse order that they were acquired + final List> documentsLockedDocuments = new ArrayList<>(map.values()); + for(int i = documentsLockedDocuments.size() - 1; i >= 0; i--) { + final List documentLocks = documentsLockedDocuments.get(i); + + for(int j = documentLocks.size() - 1; j >= 0; j--) { + final LockedDocument documentLock = documentLocks.get(j); + documentLock.close(); } - final LockedDocument lockedDocument = (LockedDocument) values[idx]; - unlockDocument(lockedDocument); } } public LockedDocumentMap unlockSome(final DocumentSet keep) { - for (int idx = 0; idx < tabSize; idx++) { - if (values[idx] == null || values[idx] == REMOVED) { - continue; - } - final LockedDocument lockedDocument = (LockedDocument) values[idx]; - if (!keep.contains(lockedDocument.document.getDocId())) { - values[idx] = REMOVED; - unlockDocument(lockedDocument); + final int[] docIdsToRemove = new int[map.size() - keep.getDocumentCount()]; + + // NOTE: locks should be released in the reverse order that they were acquired + final List> documentsLockedDocuments = new ArrayList<>(map.values()); + final int len = documentsLockedDocuments.size(); + for(int i = len - 1; i >= 0; i--) { + final List documentLocks = documentsLockedDocuments.get(i); + + final int docId = documentLocks.get(0).getDocument().getDocId(); + if(!keep.contains(docId)) { + for (int j = documentLocks.size() - 1; j >= 0; j--) { + final LockedDocument documentLock = documentLocks.get(j); + documentLock.close(); + } + + docIdsToRemove[len - 1 - i] = docId; } } + + // cleanup + for(final int docIdToRemove : docIdsToRemove) { + map.remove(docIdToRemove); + } + return this; } - private void unlockDocument(final LockedDocument lockedDocument) { - final Lock documentLock = lockedDocument.document.getUpdateLock(); - documentLock.release(LockMode.WRITE_LOCK, lockedDocument.locksAcquired); + public boolean containsKey(final int docId) { + return map.containsKey(docId); } - private static class LockedDocument { - private final DocumentImpl document; - private int locksAcquired = 0; - - LockedDocument(final DocumentImpl document) { - this.document = document; - } + public int size() { + return map.size(); } } diff --git a/src/org/exist/storage/lock/ManagedCollectionLock.java b/src/org/exist/storage/lock/ManagedCollectionLock.java new file mode 100644 index 00000000000..3b2b345f02f --- /dev/null +++ b/src/org/exist/storage/lock/ManagedCollectionLock.java @@ -0,0 +1,25 @@ +package org.exist.storage.lock; + +import org.exist.xmldb.XmldbURI; +import uk.ac.ic.doc.slurp.multilock.MultiLock; + +/** + * @author Adam Retter + */ +public class ManagedCollectionLock extends ManagedLock { + + private final XmldbURI collectionUri; + + public ManagedCollectionLock(final XmldbURI collectionUri, final MultiLock[] locks, final Runnable closer) { + super(locks, closer); + this.collectionUri = collectionUri; + } + + public XmldbURI getPath() { + return collectionUri; + } + + public static ManagedCollectionLock notLocked(final XmldbURI collectionUri) { + return new ManagedCollectionLock(collectionUri, null, () -> {}); + } +} diff --git a/src/org/exist/storage/lock/ManagedDocumentLock.java b/src/org/exist/storage/lock/ManagedDocumentLock.java new file mode 100644 index 00000000000..abeaf6b1fdc --- /dev/null +++ b/src/org/exist/storage/lock/ManagedDocumentLock.java @@ -0,0 +1,44 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.storage.lock; + +import org.exist.xmldb.XmldbURI; + +/** + * @author Adam Retter + */ +public class ManagedDocumentLock extends ManagedLock { + + private final XmldbURI documentUri; + + public ManagedDocumentLock(final XmldbURI documentUri, final java.util.concurrent.locks.Lock lock, final Runnable closer) { + super(lock, closer); + this.documentUri = documentUri; + } + + public XmldbURI getPath() { + return documentUri; + } + + public static ManagedDocumentLock notLocked(final XmldbURI documentUri) { + return new ManagedDocumentLock(documentUri, null, () -> {}); + } +} diff --git a/src/org/exist/storage/lock/ManagedLock.java b/src/org/exist/storage/lock/ManagedLock.java new file mode 100644 index 00000000000..1bc86ce5db8 --- /dev/null +++ b/src/org/exist/storage/lock/ManagedLock.java @@ -0,0 +1,190 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.storage.lock; + +import org.exist.util.LockException; + +import java.util.concurrent.locks.ReentrantLock; + +/** + * Provides a simple wrapper around a Lock + * so that it may be used in a try-with-resources + * statement + * + * @author Adam Retter + */ +public class ManagedLock implements AutoCloseable { + protected final T lock; + private final Runnable closer; + protected volatile boolean closed = false; + + ManagedLock(final T lock, final Runnable closer) { + this.lock = lock; + this.closer = closer; + } + + /** + * Acquires and manages a lock with a specific mode + * + * @param lock The lock to call {@link Lock#acquire(Lock.LockMode)} on + * @param mode the mode of the lock + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock acquire(final Lock lock, final Lock.LockMode mode) throws LockException { + if(!lock.acquire(mode)) { + throw new LockException("Unable to acquire lock"); + } + return new ManagedLock<>(lock, () -> lock.release(mode)); + } + + /** + * Acquires and manages a lock with a specific mode + * + * @param lock The lock to call {@link Lock#acquire(Lock.LockMode)} on + * @param mode the mode of the lock + * @param type the type of the lock + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock acquire(final Lock lock, final Lock.LockMode mode, final Lock.LockType type) throws LockException { + if(!lock.acquire(mode)) { + throw new LockException("Unable to acquire lock: " + type); + } + return new ManagedLock<>(lock, () -> lock.release(mode)); + } + + /** + * Attempts to acquire and manage a lock with a specific mode + * + * @param lock The lock to call {@link Lock#attempt(Lock.LockMode)} on + * @param mode the mode of the lock + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock attempt(final Lock lock, final Lock.LockMode mode) throws LockException { + if(!lock.attempt(mode)) { + throw new LockException("Unable to attempt to acquire lock"); + } + return new ManagedLock<>(lock, () -> lock.release(mode)); + } + + /** + * Acquires and manages a lock with a specific mode + * + * @param lock The lock to call {@link java.util.concurrent.locks.Lock#lock()} on + * @param mode the mode of the lock + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock acquire(final java.util.concurrent.locks.ReadWriteLock lock, final Lock.LockMode mode) { + final java.util.concurrent.locks.Lock modeLock; + switch(mode) { + case READ_LOCK: + modeLock = lock.readLock(); + break; + + case WRITE_LOCK: + modeLock = lock.writeLock(); + break; + + default: + throw new IllegalArgumentException(); + } + + modeLock.lock(); + return new ManagedLock<>(lock, modeLock::unlock); + } + + /** + * Attempts to acquire and manage a lock with a specific mode + * + * @param lock The lock to call {@link java.util.concurrent.locks.Lock#tryLock()} on + * @param mode the mode of the lock + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock attempt(final java.util.concurrent.locks.ReadWriteLock lock, final Lock.LockMode mode) throws LockException { + final java.util.concurrent.locks.Lock modeLock; + switch(mode) { + case READ_LOCK: + modeLock = lock.readLock(); + break; + + case WRITE_LOCK: + modeLock = lock.writeLock(); + break; + + default: + throw new IllegalArgumentException(); + } + + if(!modeLock.tryLock()) { + throw new LockException("Unable to attempt to acquire lock"); + } + return new ManagedLock<>(lock, modeLock::unlock); + } + + /** + * Acquires and manages a lock + * + * @param lock The lock to call {@link java.util.concurrent.locks.Lock#lock()} on + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock acquire(final ReentrantLock lock) { + lock.lock(); + return new ManagedLock<>(lock, lock::unlock); + } + + /** + * Attempts to acquire and manage a lock + * + * @param lock The lock to call {@link java.util.concurrent.locks.Lock#tryLock()} on + * + * @return A managed lock which will be released with {@link #close()} + */ + public static ManagedLock attempt(final ReentrantLock lock) throws LockException { + if(!lock.tryLock()) { + throw new LockException("Unable to attempt to acquire lock"); + } + return new ManagedLock<>(lock, lock::unlock); + } + + /** + * Determines if the lock has already been released + * + * @return true if the lock has already been released + */ + boolean isReleased() { + return closed; + } + + /** + * Releases the lock + */ + @Override + public void close() { + if(!closed) { + closer.run(); + } + this.closed = true; + } +} diff --git a/src/org/exist/storage/lock/MultiReadReentrantLock.java b/src/org/exist/storage/lock/MultiReadReentrantLock.java deleted file mode 100644 index ee4ca0a6046..00000000000 --- a/src/org/exist/storage/lock/MultiReadReentrantLock.java +++ /dev/null @@ -1,587 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2005-2007 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * Original code is - * - * Copyright 2001-2004 The Apache Software Foundation. - * - * Licensed under the Apache License, Version 2.0 (the "License") - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * $Id$ - */ -package org.exist.storage.lock; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.exist.util.DeadlockException; -import org.exist.util.LockException; - -import java.io.PrintStream; -import java.util.ArrayList; -import java.util.List; - -/** - * A reentrant read/write lock, which allows multiple readers to acquire a lock. - * Waiting writers are preferred. - *

- * This is an adapted and bug-fixed version of code taken from Apache's Turbine - * JCS. - */ -public class MultiReadReentrantLock implements Lock { - - private final static Logger LOG = LogManager.getLogger(MultiReadReentrantLock.class); - - private final Object id; - - /** - * Number of threads waiting to read. - */ - private int waitingForReadLock = 0; - - /** - * Number of threads reading. - */ - private final List outstandingReadLocks = new ArrayList<>(4); - - /** - * The thread that has the write lock or null. - */ - private Thread writeLockedThread; - - /** - * The number of (nested) write locks that have been requested from - * writeLockedThread. - */ - private int outstandingWriteLocks = 0; - - /** - * Threads waiting to get a write lock are tracked in this ArrayList to - * ensure that write locks are issued in the same order they are requested. - */ - private final List waitingForWriteLock = new ArrayList<>(3); - - /** - * Default constructor. - */ - public MultiReadReentrantLock(final Object id) { - this.id = id; - } - - @Override - public String getId() { - return id.toString(); - } - - /** - * @deprecated Use {@link #acquire(LockMode)} - */ - @Override - public boolean acquire() throws LockException { - return acquire(LockMode.READ_LOCK); - } - - @Override - public boolean acquire(final LockMode mode) throws LockException { - switch (mode) { - case NO_LOCK: - LOG.warn("Acquired with LockMode.NO_LOCK!"); - return true; - - case READ_LOCK: - return readLock(true); - - case WRITE_LOCK: - return writeLock(true); - - default: - throw new IllegalStateException(); - } - } - - @Override - public boolean attempt(final LockMode mode) { - try { - switch (mode) { - case NO_LOCK: - LOG.warn("Attempted acquire with LockMode.NO_LOCK!"); - return true; - - case READ_LOCK: - return readLock(false); - - case WRITE_LOCK: - return writeLock(false); - - default: - throw new IllegalStateException(); - } - } catch (final LockException e) { - return false; - } - } - - /** - * Issue a read lock if there is no outstanding write lock or threads - * waiting to get a write lock. Caller of this method must be careful to - * avoid synchronizing the calling code so as to avoid deadlock. - * @param waitIfNecessary whether to wait if the lock is not available right away - */ - private synchronized boolean readLock(boolean waitIfNecessary) throws LockException { - final Thread thisThread = Thread.currentThread(); - if (writeLockedThread == thisThread) { - // add acquired lock to the current list of read locks - outstandingReadLocks.add(new LockOwner(thisThread)); - //LOG.debug("Thread already holds a write lock"); - return true; - } - deadlockCheck(); - waitingForReadLock++; - if (writeLockedThread != null) { - if (!waitIfNecessary) {return false;} - final WaitingThread waiter = new WaitingThread(thisThread, this, this, LockMode.READ_LOCK); - DeadlockDetection.addResourceWaiter(thisThread, waiter); - while (writeLockedThread != null) { - //LOG.debug("readLock wait by " + thisThread.getName() + " for " + getId()); - waiter.doWait(); - //LOG.debug("wake up from readLock wait"); - } - DeadlockDetection.clearResourceWaiter(thisThread); - } waitingForReadLock--; - //Add acquired lock to the current list of read locks - outstandingReadLocks.add(new LockOwner(thisThread)); - return true; - } - - /** - * Issue a write lock if there are no outstanding read or write locks. - * Caller of this method must be careful to avoid synchronizing the calling - * code so as to avoid deadlock. - * @param waitIfNecessary whether to wait if the lock is not available right away - */ - private boolean writeLock(boolean waitIfNecessary) throws LockException { - Thread thisThread = Thread.currentThread(); - WaitingThread waiter; - synchronized (this) { - if (writeLockedThread == thisThread) { - outstandingWriteLocks++; - return true; - } - if (writeLockedThread == null && grantWriteLock()) { - writeLockedThread = thisThread; - outstandingWriteLocks++; - return true; - } - if (!waitIfNecessary) { - return false; - } - deadlockCheck(); - waiter = new WaitingThread(thisThread, thisThread, this, LockMode.WRITE_LOCK); - addWaitingWrite(waiter); - DeadlockDetection.addResourceWaiter(thisThread, waiter); - } - List deadlockedThreads = null; - LockException exceptionCaught = null; - synchronized (thisThread) { - if (thisThread != writeLockedThread) { - while (thisThread != writeLockedThread && deadlockedThreads == null) { - if (LockOwner.DEBUG) { - final StringBuilder buf = new StringBuilder("Waiting for write: "); - for (int i = 0; i < waitingForWriteLock.size(); i++) { - buf.append(' '); - buf.append((waitingForWriteLock.get(i)).getThread().getName()); - } - LOG.debug(buf.toString()); - debugReadLocks("WAIT"); - } - deadlockedThreads = checkForDeadlock(thisThread); - if (deadlockedThreads == null) { - try { - waiter.doWait(); - } catch (LockException e) { - //Don't throw the exception now, leave the synchronized block and clean up first - exceptionCaught = e; - break; - } - } - } - } - if (deadlockedThreads == null && exceptionCaught == null) { - outstandingWriteLocks++; - } - } - synchronized (this) { - DeadlockDetection.clearResourceWaiter(thisThread); - removeWaitingWrite(waiter); - } - if (exceptionCaught != null) - {throw exceptionCaught;} - if (deadlockedThreads != null) { - for (final WaitingThread wt : deadlockedThreads) { - wt.signalDeadlock(); - } - throw new DeadlockException(); - } - return true; - } - - private void addWaitingWrite(WaitingThread waiter) { - waitingForWriteLock.add(waiter); - } - - private void removeWaitingWrite(WaitingThread waiter) { - for (int i = 0; i < waitingForWriteLock.size(); i++) { - final WaitingThread next = waitingForWriteLock.get(i); - if (next.getThread() == waiter.getThread()) { - waitingForWriteLock.remove(i); - break; - } - } - } - - /** - * @deprecated Use {@link #release(LockMode)} - */ - public void release() { - release(LockMode.READ_LOCK); - } - - @Override - public void release(final LockMode mode) { - switch (mode) { - case NO_LOCK: - LOG.warn("Released with LockMode.NO_LOCK!"); - break; - - case READ_LOCK: - releaseRead(1); - break; - - case WRITE_LOCK: - releaseWrite(1); - break; - - default: - throw new IllegalStateException(); - } - } - - @Override - public void release(final LockMode mode, final int count) { - switch (mode) { - case NO_LOCK: - LOG.warn("Released with LockMode.NO_LOCK and count=" + count + "!"); - break; - - case READ_LOCK: - releaseRead(count); - break; - - case WRITE_LOCK: - releaseWrite(count); - break; - - default: - throw new IllegalStateException(); - } - } - - private synchronized void releaseWrite(int count) { - if (Thread.currentThread() == writeLockedThread) { - if (outstandingWriteLocks > 0) - {outstandingWriteLocks -= count;} - if (outstandingWriteLocks > 0) { - return; - } - //If another thread is waiting for a write lock, we immediately - //pass control to it. No further checks should be required here. - if (grantWriteLockAfterRead()) { - final WaitingThread waiter = waitingForWriteLock.get(0); - removeWaitingWrite(waiter); - DeadlockDetection.clearResourceWaiter(waiter.getThread()); - writeLockedThread = waiter.getThread(); - synchronized (writeLockedThread) { - writeLockedThread.notifyAll(); - } - } else { - writeLockedThread = null; - if (waitingForReadLock > 0) { - //Wake up pending read locks - notifyAll(); - } - } - } else { - LOG.warn("Possible lock problem: a thread released a write lock it didn't hold. Either the " + - "thread was interrupted or it never acquired the lock.", new Throwable()); - //TODO : throw exception ? -pb - } - } - - /** - * Threads call this method to relinquish a lock that they previously got - * from this object. - * - * @throws IllegalStateException if called when there are no outstanding locks or there is a - * write lock issued to a different thread. - */ - private synchronized void releaseRead(int count) { - if (!outstandingReadLocks.isEmpty()) { - removeReadLock(count); - if (writeLockedThread == null && grantWriteLockAfterRead()) { - final WaitingThread waiter = waitingForWriteLock.get(0); - removeWaitingWrite(waiter); - DeadlockDetection.clearResourceWaiter(waiter.getThread()); - writeLockedThread = waiter.getThread(); - synchronized (writeLockedThread) { - writeLockedThread.notifyAll(); - } - } - return; - } else { - LOG.warn("Possible lock problem: thread " + Thread.currentThread().getName() + - " released a read lock it didn't hold. Either the " + - "thread was interrupted or it never acquired the lock. " + - "Write lock: " + (writeLockedThread != null ? writeLockedThread.getName() : "null"), - new Throwable()); - if (LockOwner.DEBUG) { - debugReadLocks("ILLEGAL RELEASE"); - } - //TODO : throw exception ? -pb - } - } - - @Override - public synchronized boolean isLockedForWrite() { - return writeLockedThread != null || (waitingForWriteLock != null && waitingForWriteLock.size() > 0); - } - - @Override - public synchronized boolean hasLock() { - return !outstandingReadLocks.isEmpty() || isLockedForWrite(); - } - - @Override - public synchronized boolean isLockedForRead(Thread owner) { - for (int i = outstandingReadLocks.size() - 1; i > -1; i--) { - if ((outstandingReadLocks.get(i)).getOwner() == owner) { - return true; - } - } - return false; - } - - private void removeReadLock(int count) { - final Object owner = Thread.currentThread(); - for (int i = outstandingReadLocks.size() - 1; i > -1 && count > 0; i--) { - final LockOwner current = outstandingReadLocks.get(i); - if (current.getOwner() == owner) { - outstandingReadLocks.remove(i); - --count; - } - } - } - - private void deadlockCheck() throws DeadlockException { - for (final LockOwner next : outstandingReadLocks) { - final Lock lock = DeadlockDetection.isWaitingFor(next.getOwner()); - if (lock != null) { - lock.wakeUp(); - } - } - } - - /** - * Detect circular wait on different resources: thread A has a write lock on - * resource R1; thread B has a write lock on resource R2; thread A tries to - * acquire lock on R2; thread B now tries to acquire lock on R1. Solution: - * suspend existing write lock of thread A and grant it to B. - * - * @return true if the write lock should be granted to the current thread - */ - private List checkForDeadlock(Thread waiter) { - final ArrayList waiters = new ArrayList(10); - if (DeadlockDetection.wouldDeadlock(waiter, writeLockedThread, waiters)) { - LOG.warn("Potential deadlock detected on lock " + getId() + "; killing threads: " + waiters.size()); - return waiters.size() > 0 ? waiters : null; - } - return null; - } - - /** - * Check if a write lock can be granted, either because there are no - * read locks, the read lock belongs to the current thread and can be - * upgraded or the thread which holds the lock is blocked by another - * lock held by the current thread. - * - * @return true if the write lock can be granted - */ - private boolean grantWriteLock() { - if (outstandingReadLocks.isEmpty()) { - return true; - } - final Thread waiter = Thread.currentThread(); - //Walk through outstanding read locks - for (final LockOwner next : outstandingReadLocks) { - //If the read lock is owned by the current thread, all is OK and we continue - if (next.getOwner() != waiter) { - //Otherwise, check if the lock belongs to a thread which is currently blocked - //by a lock owned by the current thread. if yes, it will be safe to grant the - //write lock: the other thread will be blocked anyway. - if (!DeadlockDetection.isBlockedBy(waiter, next.getOwner())) { - return false; - } - } - } - return true; - } - - /** - * Check if a write lock can be granted, either because there are no - * read locks or the read lock belongs to the current thread and can be - * upgraded. This method is called whenever a lock is released. - * - * @return true if the write lock can be granted - */ - private boolean grantWriteLockAfterRead() { - //Waiting write locks? - if (waitingForWriteLock != null && waitingForWriteLock.size() > 0) { - //Yes, check read locks - final int size = outstandingReadLocks.size(); - if (size > 0) { - //Grant lock if all read locks are held by the write thread - final WaitingThread waiter = waitingForWriteLock.get(0); - return isCompatible(waiter.getThread()); - } - return true; - } - return false; - } - - /** - * Check if the specified thread has a read lock on the resource. - * - * @param owner the thread - * @return true if owner has a read lock - */ - private boolean hasReadLock(final Thread owner) { - for (final LockOwner next : outstandingReadLocks) { - if (next.getOwner() == owner) { - return true; - } - } - return false; - } - - public Thread getWriteLockedThread() { - return writeLockedThread; - } - - /** - * Check if the specified thread holds either a write or a read lock - * on the resource. - * - * @param owner the thread - * @return true if owner has a lock - */ - @Override - public boolean hasLock(final Thread owner) { - if (writeLockedThread == owner) { - return true; - } - return hasReadLock(owner); - } - - @Override - public void wakeUp() { - //Nothing to do - } - - /** - * Check if the pending request for a write lock is compatible - * with existing read locks and other write requests. A lock request is - * compatible with another lock request if: (a) it belongs to the same thread, - * (b) it belongs to a different thread, but this thread is also waiting for a write lock. - * - * @param waiting - * @return true if the lock request is compatible with all other requests and the - * lock can be granted. - */ - private boolean isCompatible(final Thread waiting) { - for (final LockOwner next : outstandingReadLocks) { - //If the read lock is owned by the current thread, all is OK and we continue - if (next.getOwner() != waiting) { - //Otherwise, check if the lock belongs to a thread which is currently blocked - //by a lock owned by the current thread. if yes, it will be safe to grant the - //write lock: the other thread will be blocked anyway. - if (!DeadlockDetection.isBlockedBy(waiting, next.getOwner())) { - return false; - } - } - } - return true; - } - - @Override - public synchronized LockInfo getLockInfo() { - LockInfo info; - String[] readers = new String[0]; - if (outstandingReadLocks != null) { - readers = new String[outstandingReadLocks.size()]; - for (int i = 0; i < outstandingReadLocks.size(); i++) { - final LockOwner owner = outstandingReadLocks.get(i); - readers[i] = owner.getOwner().getName(); - } - } - if (writeLockedThread != null) { - info = new LockInfo(LockInfo.RESOURCE_LOCK, LockInfo.WRITE_LOCK, getId(), - new String[] {writeLockedThread.getName()}); - info.setReadLocks(readers); - } else { - info = new LockInfo(LockInfo.RESOURCE_LOCK, LockInfo.READ_LOCK, getId(), readers); - } - if (waitingForWriteLock != null) { - final String waitingForWrite[] = new String[waitingForWriteLock.size()]; - for (int i = 0; i < waitingForWriteLock.size(); i++) { - waitingForWrite[i] = waitingForWriteLock.get(i).getThread().getName(); - } - info.setWaitingForWrite(waitingForWrite); - } - return info; - } - - private void debugReadLocks(String msg) { - for (final LockOwner owner : outstandingReadLocks) { - LOG.debug(msg + ": " + owner.getOwner(), owner.getStack()); - } - } - - @Override - public void debug(final PrintStream out) { - getLockInfo().debug(out); - } -} \ No newline at end of file diff --git a/src/org/exist/storage/lock/ReentrantReadWriteLock.java b/src/org/exist/storage/lock/ReentrantReadWriteLock.java deleted file mode 100644 index 9be6e928331..00000000000 --- a/src/org/exist/storage/lock/ReentrantReadWriteLock.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2005-2007 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * - * File: ReentrantLock.java - * - * Originally written by Doug Lea and released into the public domain. - * This may be used for any purposes whatsoever without acknowledgment. - * Thanks for the assistance and support of Sun Microsystems Labs, - * and everyone contributing, testing, and using this code. - * - * $Id$ - * -*/ -package org.exist.storage.lock; - -import java.io.PrintStream; -import java.util.ArrayDeque; -import java.util.Deque; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.exist.util.LockException; - -/** - * A lock with the same semantics as builtin - * Java synchronized locks: Once a thread has a lock, it - * can re-obtain it any number of times without blocking. - * The lock is made available to other threads when - * as many releases as acquires have occurred. - * - * The lock has a timeout: a read lock will be released if the - * timeout is reached. -*/ - -public class ReentrantReadWriteLock implements Lock { - - private static final int WAIT_CHECK_PERIOD = 200; - - private static class SuspendedWaiter { - final Thread thread; - final LockMode lockMode; - final int lockCount; - - public SuspendedWaiter(final Thread thread, final LockMode lockMode, final int lockCount) { - this.thread = thread; - this.lockMode = lockMode; - this.lockCount = lockCount; - } - } - - private final static Logger LOG = LogManager.getLogger(ReentrantReadWriteLock.class); - - private final Object id_; - private Thread owner_ = null; - private final Deque suspendedThreads = new ArrayDeque<>(); - - private int holds_ = 0; - private LockMode mode_ = LockMode.NO_LOCK; - private final Deque modeStack = new ArrayDeque<>(); - private int writeLocks = 0; - private LockListener listener = null; - - private final boolean DEBUG = false; - private final Deque seStack; - - public ReentrantReadWriteLock(final Object id) { - this.id_ = id; - if (DEBUG) { - seStack = new ArrayDeque<>(); - } else { - seStack = null; - } - } - - @Override - public String getId() { - return id_.toString(); - } - - @Override - public boolean acquire() throws LockException { - return acquire(LockMode.READ_LOCK); - } - - @Override - public boolean acquire(final LockMode mode) throws LockException { - if (mode == LockMode.NO_LOCK) { - LOG.warn("Acquired with LockMode.NO_LOCK!"); - return true; - } - - if (Thread.interrupted()) { - throw new LockException(); - } - - final Thread caller = Thread.currentThread(); - synchronized (this) { - WaitingThread waitingOnResource; - if (caller == owner_) { - ++holds_; - modeStack.push(mode); - if (mode == LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - return true; - } else if (owner_ == null) { - owner_ = caller; - holds_ = 1; - modeStack.push(mode); - if (mode== LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - return true; - } else if ((waitingOnResource = - DeadlockDetection.deadlockCheckResource(caller, owner_)) != null) { - waitingOnResource.suspendWaiting(); - final SuspendedWaiter suspended = new SuspendedWaiter(owner_, mode_, holds_); - suspendedThreads.push(suspended); - owner_ = caller; - holds_ = 1; - modeStack.push(mode); - if (mode== LockMode.WRITE_LOCK) { - writeLocks++; - } - mode_ = mode; - listener = waitingOnResource; - return true; - } else { - DeadlockDetection.addCollectionWaiter(caller, this); - try { - for (;;) { - wait(WAIT_CHECK_PERIOD); - if ((waitingOnResource = DeadlockDetection.deadlockCheckResource(caller, owner_)) != null) { - waitingOnResource.suspendWaiting(); - final SuspendedWaiter suspended = new SuspendedWaiter(owner_, mode_, holds_); - suspendedThreads.push(suspended); - owner_ = caller; - holds_ = 1; - modeStack.push(mode); - if (mode== LockMode.WRITE_LOCK) { - writeLocks++; - } - mode_ = mode; - listener = waitingOnResource; - DeadlockDetection.clearCollectionWaiter(owner_); - return true; - } else if (caller == owner_) { - ++holds_; - modeStack.push(mode); - if (mode == LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - DeadlockDetection.clearCollectionWaiter(owner_); - return true; - } else if (owner_ == null) { - owner_ = caller; - holds_ = 1; - modeStack.push(mode); - if (mode == LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - DeadlockDetection.clearCollectionWaiter(owner_); - return true; - } - } - } catch (final InterruptedException ex) { - notify(); - throw new LockException("Interrupted while waiting for lock"); - } - } - } - } - - @Override - public synchronized void wakeUp() { - notifyAll(); - } - - @Override - public boolean attempt(final LockMode mode) { - if (mode == LockMode.NO_LOCK) { - LOG.warn("Attempted acquire with LockMode.NO_LOCK!"); - return true; - } - - final Thread caller = Thread.currentThread(); - synchronized (this) { - if (caller == owner_) { - ++holds_; - modeStack.push(mode); - if (mode == LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - return true; - } else if (owner_ == null) { - owner_ = caller; - holds_ = 1; - modeStack.push(mode); - if (mode == LockMode.WRITE_LOCK) { - writeLocks++; - } - if (DEBUG) { - final Throwable t = new Throwable(); - seStack.push(t.getStackTrace()); - } - mode_ = mode; - return true; - } else { - return false; - } - } - } - - @Override - public synchronized boolean isLockedForWrite() { - return writeLocks > 0; - } - - @Override - public boolean isLockedForRead(final Thread owner) { - // always returns false for this lock - return false; - } - - @Override - public synchronized boolean hasLock() { - return holds_ > 0; - } - - @Override - public boolean hasLock(final Thread owner) { - return this.owner_ == owner; - } - - public Thread getOwner() { - return this.owner_; - } - - @Override - public synchronized void release(final LockMode mode) { - if(mode == LockMode.NO_LOCK) { - LOG.warn("Released with LockMode.NO_LOCK!"); - return; - } - - if (Thread.currentThread() != owner_) { - - if(LOG.isDebugEnabled()){ - LOG.warn("Possible lock problem: thread " + Thread.currentThread() + - " Released a lock on " + getId() + " it didn't hold." + - " Either the thread was interrupted or it never acquired the lock." + - " The lock was owned by: " + owner_); - } - - if (DEBUG) { - LOG.debug("Lock was acquired by :"); - while (!seStack.isEmpty()) { - StackTraceElement[] se = seStack.pop(); - LOG.debug(se); - se = null; - } - } - return; - } - LockMode top = modeStack.pop(); - mode_ = top; - top = null; - if (mode_ != mode) { - LOG.warn("Released lock of different type. Expected " + mode_ + - " got " + mode, new Throwable()); - } - if (mode_ == LockMode.WRITE_LOCK) { - writeLocks--; - } - if (DEBUG) { - seStack.pop(); - } - if (--holds_ == 0) { - if (!suspendedThreads.isEmpty()) { - final SuspendedWaiter suspended = suspendedThreads.pop(); - owner_ = suspended.thread; - mode_ = suspended.lockMode; - holds_ = suspended.lockCount; - } else { - owner_ = null; - mode_ = LockMode.NO_LOCK; - notify(); - } - } - if (listener != null) { - listener.lockReleased(); - listener = null; - } - } - - @Override - public void release(final LockMode mode, final int count) { - throw new UnsupportedOperationException(getClass().getName() + - " does not support releasing multiple locks"); - } - - /** - * Return the number of unreleased acquires performed - * by the current thread. - * Returns zero if current thread does not hold lock. - **/ - public synchronized long holds() { - if (Thread.currentThread() != owner_) { - return 0; - } - return holds_; - } - - @Override - public synchronized LockInfo getLockInfo() { - final String lockType = mode_ == LockMode.WRITE_LOCK ? LockInfo.WRITE_LOCK : LockInfo.READ_LOCK; - return new LockInfo(LockInfo.COLLECTION_LOCK, lockType, getId(), - new String[] { (owner_==null)?"":owner_.getName() }); - } - - @Override - public void debug(final PrintStream out) { - getLockInfo().debug(out); - } -} diff --git a/src/org/exist/storage/lock/WaitingThread.java b/src/org/exist/storage/lock/WaitingThread.java deleted file mode 100644 index 85027344bb0..00000000000 --- a/src/org/exist/storage/lock/WaitingThread.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * eXist Open Source Native XML Database - * Copyright (C) 2007 The eXist Project - * http://exist-db.org - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software Foundation - * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * $Id$ - */ -package org.exist.storage.lock; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.exist.storage.lock.Lock.LockMode; -import org.exist.util.LockException; -import org.exist.util.DeadlockException; - -/** - * Wraps around a thread in order to be able to suspend it completely while it is waiting - * for a lock. - */ -public class WaitingThread implements LockListener { - private static final Logger LOG = LogManager.getLogger(WaitingThread.class); - - private final Thread thread; - private final Object monitor; - private final MultiReadReentrantLock lock; - private final LockMode lockMode; - - private boolean suspended = false; - private boolean deadlocked = false; - - public WaitingThread(final Thread thread, final Object monitor, final MultiReadReentrantLock lock, final LockMode lockMode) { - this.monitor = monitor; - this.lock = lock; - this.thread = thread; - this.lockMode = lockMode; - } - - /** - * Start waiting on the monitor object. Continue waiting if the thread wakes up - * and suspended is set to true. Only stop waiting if suspended is false. - * - * @throws LockException - */ - public void doWait() throws LockException { - do { - synchronized (monitor) { - try { - monitor.wait(500); - } catch (final InterruptedException e) { - throw new LockException("Interrupted while waiting for read lock"); - } - } - if (deadlocked) { - LOG.warn("Deadlock detected: cancelling wait..."); - throw new DeadlockException(); - } - } while (suspended); - } - - public void signalDeadlock() { - deadlocked = true; - synchronized (monitor) { - monitor.notify(); - } - } - - /** - * Put the thread into suspended mode, i.e. keep it asleep even if - * a notify causes it to wake up temporarily. - */ - public void suspendWaiting() { - suspended = true; - } - - /** - * Wake the thread from suspended mode. - */ - @Override - public void lockReleased() { -// LOG.debug("Reactivate suspended lock: " + thread.getName()); - suspended = false; - synchronized (monitor) { - monitor.notify(); - } - } - - public boolean isSuspended() { - return suspended; - } - - public Thread getThread() { - return thread; - } - - public Lock getLock() { - return lock; - } - - public LockMode getLockMode() { - return lockMode; - } - - @Override - public boolean equals(final Object obj) { - if(obj == null || !(obj instanceof WaitingThread)) { - return false; - } - - return thread == ((WaitingThread)obj).getThread(); - } -} diff --git a/src/org/exist/storage/md/MetaData.java b/src/org/exist/storage/md/MetaData.java index 29d5900f6e6..487be4f509e 100644 --- a/src/org/exist/storage/md/MetaData.java +++ b/src/org/exist/storage/md/MetaData.java @@ -31,6 +31,8 @@ import org.exist.xmldb.XmldbURI; import org.w3c.dom.Document; +import javax.annotation.Nullable; + /** * @author Dmitriy Shabanov * @@ -45,8 +47,8 @@ public static MetaData get() { protected final static Logger LOG = LogManager.getLogger(MetaData.class); - public abstract DocumentImpl getDocument(String uuid) throws EXistException, PermissionDeniedException; - public abstract Collection getCollection(String uuid) throws EXistException, PermissionDeniedException; + @Nullable public abstract DocumentImpl getDocument(String uuid) throws EXistException, PermissionDeniedException; + @Nullable public abstract Collection getCollection(String uuid) throws EXistException, PermissionDeniedException; public abstract List matchDocuments(String key, String value) throws EXistException, PermissionDeniedException; public abstract List matchDocumentsByKey(String key) throws EXistException, PermissionDeniedException; diff --git a/src/org/exist/storage/report/XMLStatistics.java b/src/org/exist/storage/report/XMLStatistics.java index df0ad788129..ca7cfdd3f5c 100644 --- a/src/org/exist/storage/report/XMLStatistics.java +++ b/src/org/exist/storage/report/XMLStatistics.java @@ -20,12 +20,11 @@ package org.exist.storage.report; import java.nio.file.Path; -import java.util.Iterator; import java.util.Optional; +import org.exist.collections.CollectionCache; import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; -import org.exist.storage.CollectionCacheManager; import org.exist.storage.NativeValueIndex; import org.exist.storage.dom.DOMFile; import org.exist.storage.index.BFile; @@ -77,7 +76,7 @@ private void getInstanceStatus(final AttributesImpl atts, final BrokerPool insta addValue("data-directory", ((Path)instance.getConfiguration().getProperty(BrokerPool.PROPERTY_DATA_DIR)).toAbsolutePath().toString()); addValue("cache-size", String.valueOf(instance.getConfiguration().getInteger("db-connection.cache-size"))); addValue("page-size", String.valueOf(instance.getConfiguration().getInteger("db-connection.page-size"))); - addValue("collection-cache-mem", String.valueOf(instance.getConfiguration().getInteger(CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES))); + addValue("collection-cache-mem", String.valueOf(instance.getConfiguration().getInteger(CollectionCache.PROPERTY_CACHE_SIZE_BYTES))); this.contentHandler.startElement(NAMESPACE, "pool", PREFIX + ":pool", atts); addValue("max", String.valueOf(instance.getMax())); addValue("active", String.valueOf(instance.countActiveBrokers())); diff --git a/src/org/exist/storage/serializers/AbstractChainOfReceivers.java b/src/org/exist/storage/serializers/AbstractChainOfReceivers.java index cc984c55a01..3fcc06476f5 100644 --- a/src/org/exist/storage/serializers/AbstractChainOfReceivers.java +++ b/src/org/exist/storage/serializers/AbstractChainOfReceivers.java @@ -49,7 +49,8 @@ public Receiver getNextInChain() { @Override public Receiver getLastInChain() { - Receiver last = this, next = getNextInChain(); + Receiver last = this; + Receiver next = getNextInChain(); while (next != null) { last = next; next = ((ChainOfReceivers)next).getNextInChain(); diff --git a/src/org/exist/storage/serializers/Serializer.java b/src/org/exist/storage/serializers/Serializer.java index 80fc021f5d8..31b2c4214bb 100644 --- a/src/org/exist/storage/serializers/Serializer.java +++ b/src/org/exist/storage/serializers/Serializer.java @@ -192,36 +192,9 @@ public abstract class Serializer implements XMLReader { protected LexicalHandler lexicalHandler = null; protected Subject user = null; - protected HttpContext httpContext = null; - - public static class HttpContext - { - private RequestWrapper request = null; - private ResponseWrapper response = null; - private SessionWrapper session = null; - - public RequestWrapper getRequest() { - return request; - } - public void setRequest(RequestWrapper request) { - this.request = request; - } - public ResponseWrapper getResponse() { - return response; - } - public void setResponse(ResponseWrapper response) { - this.response = response; - } - public SessionWrapper getSession() { - return session; - } - public void setSession(SessionWrapper session) { - this.session = session; - } - } + protected XQueryContext.HttpContext httpContext = null; - public void setHttpContext(HttpContext httpContext) - { + public void setHttpContext(final XQueryContext.HttpContext httpContext) { this.httpContext = httpContext; } diff --git a/src/org/exist/storage/serializers/XIncludeFilter.java b/src/org/exist/storage/serializers/XIncludeFilter.java index f15d44b80a4..76b3816cb28 100644 --- a/src/org/exist/storage/serializers/XIncludeFilter.java +++ b/src/org/exist/storage/serializers/XIncludeFilter.java @@ -404,6 +404,7 @@ protected Optional processXInclude(final String href, String xpoi CompiledXQuery compiled = pool.borrowCompiledXQuery(serializer.broker, source); if (compiled != null) { context = compiled.getContext(); + context.prepareForReuse(); } else { context = new XQueryContext(serializer.broker.getBrokerPool()); } @@ -412,17 +413,7 @@ protected Optional processXInclude(final String href, String xpoi //setup the http context if known if (serializer.httpContext != null) { - if (serializer.httpContext.getRequest() != null) { - context.declareVariable(RequestModule.PREFIX + ":request", serializer.httpContext.getRequest()); - } - - if (serializer.httpContext.getResponse() != null) { - context.declareVariable(ResponseModule.PREFIX + ":response", serializer.httpContext.getResponse()); - } - - if (serializer.httpContext.getSession() != null) { - context.declareVariable(SessionModule.PREFIX + ":session", serializer.httpContext.getSession()); - } + context.setHttpContext(serializer.httpContext); } //TODO: change these to putting the XmldbURI in, but we need to warn users! diff --git a/src/org/exist/storage/statistics/DataGuide.java b/src/org/exist/storage/statistics/DataGuide.java index 4219190c9ee..b32559e0845 100644 --- a/src/org/exist/storage/statistics/DataGuide.java +++ b/src/org/exist/storage/statistics/DataGuide.java @@ -54,9 +54,6 @@ public class DataGuide { // the (virtual) root of the tree whose name will always be null. private NodeStats root = new NodeStatsRoot(); - public DataGuide() { - } - public int getSize() { return root.getSize(); } diff --git a/src/org/exist/storage/statistics/IndexStatistics.java b/src/org/exist/storage/statistics/IndexStatistics.java index 1acb920f125..7ee0b473511 100644 --- a/src/org/exist/storage/statistics/IndexStatistics.java +++ b/src/org/exist/storage/statistics/IndexStatistics.java @@ -59,9 +59,6 @@ public class IndexStatistics extends AbstractIndex implements RawBackupSupport { private Path dataFile; private DataGuide dataGuide = new DataGuide(); - - public IndexStatistics() { - } public String getIndexId() { return ID; diff --git a/src/org/exist/storage/statistics/IndexStatisticsWorker.java b/src/org/exist/storage/statistics/IndexStatisticsWorker.java index 29cc9956313..e37c651ea4b 100644 --- a/src/org/exist/storage/statistics/IndexStatisticsWorker.java +++ b/src/org/exist/storage/statistics/IndexStatisticsWorker.java @@ -1,6 +1,6 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2001-2015 The eXist Project + * Copyright (C) 2001-2018 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or @@ -33,6 +33,7 @@ import org.exist.indexing.MatchListener; import org.exist.indexing.StreamListener; import org.exist.indexing.StreamListener.ReindexMode; +import org.exist.numbering.NodeId; import org.exist.stax.ExtendedXMLStreamReader; import org.exist.storage.DBBroker; import org.exist.storage.NativeBroker; @@ -42,7 +43,6 @@ import org.exist.storage.index.CollectionStore; import org.exist.storage.io.VariableByteInput; import org.exist.storage.txn.Txn; -import org.exist.util.DatabaseConfigurationException; import org.exist.util.Occurrences; import org.exist.xquery.QueryRewriter; import org.exist.xquery.TerminatedException; @@ -52,61 +52,64 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.IOException; +import java.util.ArrayDeque; +import java.util.Deque; import java.util.Map; -import java.util.Stack; /** */ public class IndexStatisticsWorker implements IndexWorker { - - private IndexStatistics index; + private final IndexStatistics index; + private final StatisticsListener listener = new StatisticsListener(); private DataGuide perDocGuide = null; - private ReindexMode mode = ReindexMode.STORE; private DocumentImpl currentDoc = null; - private StatisticsListener listener = new StatisticsListener(); - - public IndexStatisticsWorker(IndexStatistics index) { + public IndexStatisticsWorker(final IndexStatistics index) { this.index = index; } + @Override public String getIndexId() { return index.getIndexId(); } + @Override public String getIndexName() { return index.getIndexName(); } @Override - public QueryRewriter getQueryRewriter(XQueryContext context) { + public QueryRewriter getQueryRewriter(final XQueryContext context) { return null; } - public Object configure(IndexController controller, NodeList configNodes, Map namespaces) throws DatabaseConfigurationException { + @Override + public Object configure(final IndexController controller, final NodeList configNodes, + final Map namespaces) { return null; } @Override - public void setDocument(DocumentImpl doc) { + public void setDocument(final DocumentImpl doc) { setDocument(doc, ReindexMode.UNKNOWN); } @Override - public void setDocument(DocumentImpl doc, ReindexMode mode) { - perDocGuide = new DataGuide(); + public void setDocument(final DocumentImpl doc, final ReindexMode mode) { + this.perDocGuide = new DataGuide(); this.currentDoc = doc; this.mode = mode; } @Override - public void setMode(ReindexMode mode) { + public void setMode(final ReindexMode mode) { perDocGuide = new DataGuide(); this.mode = mode; } + @Override public DocumentImpl getDocument() { return currentDoc; } @@ -116,21 +119,26 @@ public ReindexMode getMode() { return mode; } - public IStoredNode getReindexRoot(IStoredNode node, NodePath path, boolean insert, boolean includeSelf) { + @Override + public IStoredNode getReindexRoot(final IStoredNode node, final NodePath path, + final boolean insert, final boolean includeSelf) { return null; } @Override public StreamListener getListener() { - if (mode == ReindexMode.STORE) - {return listener;} + if (mode == ReindexMode.STORE) { + return listener; + } return null; } - public MatchListener getMatchListener(DBBroker broker, NodeProxy proxy) { + @Override + public MatchListener getMatchListener(final DBBroker broker, final NodeProxy proxy) { return null; } + @Override public void flush() { if (perDocGuide != null) { index.mergeStats(perDocGuide); @@ -139,7 +147,7 @@ public void flush() { perDocGuide = new DataGuide(); } - public void updateIndex(DBBroker broker) { + public void updateIndex(final DBBroker broker) { perDocGuide = new DataGuide(); final DocumentCallback cb = new DocumentCallback(broker); try { @@ -150,60 +158,71 @@ public void updateIndex(DBBroker broker) { index.updateStats(perDocGuide); } - private void updateDocument(DBBroker broker, DocumentImpl doc) { + private void updateDocument(final DBBroker broker, final DocumentImpl doc) { final ElementImpl root = (ElementImpl) doc.getDocumentElement(); + final int rootLevel = root.getNodeId().getTreeLevel(); try { final NodePath path = new NodePath(); - final Stack stack = new Stack(); - QName qname; + final Deque stack = new ArrayDeque<>(); final ExtendedXMLStreamReader reader = broker.getXMLStreamReader(root, false); while (reader.hasNext()) { final int status = reader.next(); + switch (status) { + case XMLStreamReader.START_ELEMENT: - for (int i = 0; i < stack.size(); i++) { - final NodeStats next = stack.elementAt(i); + for (final NodeStats next : stack) { next.incDepth(); } - qname = reader.getQName(); + final QName qname = reader.getQName(); path.addComponent(qname); final NodeStats nodeStats = perDocGuide.add(path); stack.push(nodeStats); break; + case XMLStreamReader.END_ELEMENT: path.removeLastComponent(); final NodeStats stats = stack.pop(); stats.updateMaxDepth(); + + final NodeId otherId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int otherLevel = otherId.getTreeLevel(); + if (otherLevel == rootLevel) { + // finished `root element... + break; // exit-while + } break; } } - } catch (final IOException e) { - e.printStackTrace(); - } catch (final XMLStreamException e) { + } catch (final IOException | XMLStreamException e) { e.printStackTrace(); } } - public void removeCollection(Collection collection, DBBroker broker, boolean reindex) { + @Override + public void removeCollection(final Collection collection, final DBBroker broker, final boolean reindex) { + //no-op } - public boolean checkIndex(DBBroker broker) { + @Override + public boolean checkIndex(final DBBroker broker) { return false; } - public Occurrences[] scanIndex(XQueryContext context, DocumentSet docs, NodeSet contextSet, Map hints) { + @Override + public Occurrences[] scanIndex(final XQueryContext context, final DocumentSet docs, final NodeSet contextSet, + final Map hints) { return new Occurrences[0]; } private class StatisticsListener extends AbstractStreamListener { + private final Deque stack = new ArrayDeque<>(); - private Stack stack = new Stack(); - - public void startElement(Txn transaction, ElementImpl element, NodePath path) { + @Override + public void startElement(final Txn transaction, final ElementImpl element, final NodePath path) { super.startElement(transaction, element, path); if (perDocGuide != null) { - for (int i = 0; i < stack.size(); i++) { - final NodeStats next = stack.elementAt(i); + for (final NodeStats next : stack) { next.incDepth(); } final NodeStats nodeStats = perDocGuide.add(path); @@ -211,35 +230,36 @@ public void startElement(Txn transaction, ElementImpl element, NodePath path) { } } - public void endElement(Txn transaction, ElementImpl element, NodePath path) { + @Override + public void endElement(final Txn transaction, final ElementImpl element, final NodePath path) { super.endElement(transaction, element, path); if (perDocGuide != null) { - final NodeStats stats = (NodeStats) stack.pop(); + final NodeStats stats = stack.pop(); stats.updateMaxDepth(); } } + @Override public IndexWorker getWorker() { return IndexStatisticsWorker.this; } } private class DocumentCallback implements BTreeCallback { + private final DBBroker broker; - private DBBroker broker; - - private DocumentCallback(DBBroker broker) { + private DocumentCallback(final DBBroker broker) { this.broker = broker; } - public boolean indexInfo(Value key, long pointer) throws TerminatedException { - final CollectionStore store = (CollectionStore) ((NativeBroker)broker).getStorage(NativeBroker.COLLECTIONS_DBX_ID); + @Override + public boolean indexInfo(final Value key, final long pointer) throws TerminatedException { + final CollectionStore store = (CollectionStore) ((NativeBroker) broker).getStorage(NativeBroker.COLLECTIONS_DBX_ID); try { final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE]; final VariableByteInput istream = store.getAsStream(pointer); - DocumentImpl doc = null; if (type == DocumentImpl.XML_FILE) { - doc = new DocumentImpl(broker.getBrokerPool()); + final DocumentImpl doc = new DocumentImpl(broker.getBrokerPool()); doc.read(istream); updateDocument(broker, doc); } diff --git a/src/org/exist/storage/structural/NativeStructuralIndex.java b/src/org/exist/storage/structural/NativeStructuralIndex.java index 1c5e2adbc50..587824cd9c5 100644 --- a/src/org/exist/storage/structural/NativeStructuralIndex.java +++ b/src/org/exist/storage/structural/NativeStructuralIndex.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.file.Path; +import java.util.concurrent.locks.ReentrantLock; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -34,8 +35,8 @@ import org.exist.storage.DBBroker; import org.exist.storage.btree.DBException; import org.exist.storage.index.BTreeStore; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedLock; import org.exist.util.DatabaseConfigurationException; import org.exist.util.FileUtils; import org.exist.util.LockException; @@ -53,6 +54,7 @@ public class NativeStructuralIndex extends AbstractIndex implements RawBackupSup /** The datastore for this node index */ protected BTreeStore btree; + protected LockManager lockManager; protected SymbolTable symbols; public NativeStructuralIndex() { @@ -62,6 +64,7 @@ public NativeStructuralIndex() { @Override public void configure(BrokerPool pool, Path dataDir, Element config) throws DatabaseConfigurationException { super.configure(pool, dataDir, config); + lockManager = pool.getLockManager(); symbols = pool.getSymbols(); } @@ -86,11 +89,10 @@ public void close() throws DBException { @Override public void sync() throws DBException { - if (btree == null) - {return;} - final Lock lock = btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + if (btree == null) { + return; + } + try(final ManagedLock bfileLock = lockManager.acquireBtreeWriteLock(btree.getLockName())) { btree.flush(); } catch (final LockException e) { LOG.warn("Failed to acquire lock for '" + FileUtils.fileName(btree.getFile()) + "'", e); @@ -98,8 +100,6 @@ public void sync() throws DBException { } catch (final DBException e) { LOG.error(e.getMessage(), e); //TODO : throw an exception ? -pb - } finally { - lock.release(LockMode.WRITE_LOCK); } } diff --git a/src/org/exist/storage/structural/NativeStructuralIndexWorker.java b/src/org/exist/storage/structural/NativeStructuralIndexWorker.java index b0f6fc0caa8..c47f1044a65 100644 --- a/src/org/exist/storage/structural/NativeStructuralIndexWorker.java +++ b/src/org/exist/storage/structural/NativeStructuralIndexWorker.java @@ -42,8 +42,8 @@ import org.exist.storage.btree.BTreeCallback; import org.exist.storage.btree.IndexQuery; import org.exist.storage.btree.Value; -import org.exist.storage.lock.Lock; -import org.exist.storage.lock.Lock.LockMode; + +import org.exist.storage.lock.ManagedLock; import org.exist.storage.txn.Txn; import org.exist.util.ByteConversion; import org.exist.util.DatabaseConfigurationException; @@ -55,6 +55,8 @@ import org.w3c.dom.NodeList; import java.util.*; +import java.util.concurrent.locks.ReentrantLock; + import org.exist.security.PermissionDeniedException; /** @@ -105,7 +107,6 @@ public NodeSet findElementsByTagName(byte type, DocumentSet docs, QName qname, N } public NodeSet findElementsByTagName(byte type, DocumentSet docs, QName qname, NodeSelector selector, Expression parent) { - final Lock lock = index.btree.getLock(); final NewArrayNodeSet result = new NewArrayNodeSet(); final FindElementsCallback callback = new FindElementsCallback(type, qname, result, docs, selector, parent); @@ -114,8 +115,8 @@ public NodeSet findElementsByTagName(byte type, DocumentSet docs, QName qname, N final byte[] fromKey = computeKey(type, qname, range.start); final byte[] toKey = computeKey(type, qname, range.end + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); - try { - lock.acquire(LockMode.READ_LOCK); + + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) { index.btree.query(query, callback); } catch (final LockException e) { NativeStructuralIndex.LOG.warn("Lock problem while searching structural index: " + e.getMessage(), e); @@ -123,8 +124,6 @@ public NodeSet findElementsByTagName(byte type, DocumentSet docs, QName qname, N NativeStructuralIndex.LOG.warn("Query was terminated while searching structural index: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.error("Error while searching structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } return result; @@ -183,16 +182,15 @@ public NodeSet findDescendantsByTagName(byte type, QName qname, int axis, Docume } public NodeSet findDescendantsByTagName(byte type, QName qname, int axis, DocumentSet docs, NodeSet contextSet, int contextId, Expression parent) { - final Lock lock = index.btree.getLock(); final NewArrayNodeSet result = new NewArrayNodeSet(); final FindDescendantsCallback callback = new FindDescendantsCallback(type, axis, qname, contextId, result, parent); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) { for (final NodeProxy ancestor : contextSet) { final DocumentImpl doc = ancestor.getOwnerDocument(); final NodeId ancestorId = ancestor.getNodeId(); callback.setAncestor(doc, ancestor); - byte[] fromKey, toKey; + final byte[] fromKey; + final byte[] toKey; if (ancestorId == NodeId.DOCUMENT_NODE) { fromKey = computeKey(type, qname, doc.getDocId()); toKey = computeKey(type, qname, doc.getDocId() + 1); @@ -209,8 +207,6 @@ public NodeSet findDescendantsByTagName(byte type, QName qname, int axis, Docume } } catch (final LockException e) { NativeStructuralIndex.LOG.warn("Lock problem while searching structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } result.updateNoSort(); return result; @@ -218,10 +214,8 @@ public NodeSet findDescendantsByTagName(byte type, QName qname, int axis, Docume public NodeSet findAncestorsByTagName(byte type, QName qname, int axis, DocumentSet docs, NodeSet contextSet, int contextId) { - final Lock lock = index.btree.getLock(); final NewArrayNodeSet result = new NewArrayNodeSet(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) { for (final NodeProxy descendant : contextSet) { NodeId parentId; if (axis == Constants.ANCESTOR_SELF_AXIS || axis == Constants.SELF_AXIS) @@ -254,8 +248,6 @@ public NodeSet findAncestorsByTagName(byte type, QName qname, int axis, Document NativeStructuralIndex.LOG.warn("Lock problem while searching structural index: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.error("Error while searching structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } result.sort(true); return result; @@ -263,15 +255,13 @@ public NodeSet findAncestorsByTagName(byte type, QName qname, int axis, Document public NodeSet scanByType(byte type, int axis, NodeTest test, boolean useSelfAsContext, DocumentSet docs, NodeSet contextSet, int contextId) { - final Lock lock = index.btree.getLock(); final NewArrayNodeSet result = new NewArrayNodeSet(); final FindDescendantsCallback callback = new FindDescendantsCallback(type, axis, null, contextId, useSelfAsContext, result, null); for (final NodeProxy ancestor : contextSet) { final DocumentImpl doc = ancestor.getOwnerDocument(); final NodeId ancestorId = ancestor.getNodeId(); final List qnames = getQNamesForDoc(doc); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) { for (final QName qname : qnames) { if (test.getName() == null || test.matches(qname)) { callback.setAncestor(doc, ancestor); @@ -293,8 +283,6 @@ public NodeSet scanByType(byte type, int axis, NodeTest test, boolean useSelfAsC } } catch (final LockException e) { NativeStructuralIndex.LOG.warn("Lock problem while searching structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } // result.updateNoSort(); @@ -487,15 +475,14 @@ public void flush() { } protected void removeSome() { - if (pending.size() == 0) - {return;} + if (pending.size() == 0) { + return; + } try { - final Lock lock = index.btree.getLock(); for (final Map.Entry> entry: pending.entrySet()) { final QName qname = entry.getKey(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final List nodes = entry.getValue(); for (final NodeProxy proxy : nodes) { final NodeId nodeId = proxy.getNodeId(); @@ -506,8 +493,6 @@ protected void removeSome() { NativeStructuralIndex.LOG.warn("Failed to lock structural index: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while writing to structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } } finally { @@ -523,17 +508,13 @@ protected void removeDocument(DocumentImpl docToRemove) { final byte[] fromKey = computeKey(qname.getNameType(), qname, docToRemove.getDocId()); final byte[] toKey = computeKey(qname.getNameType(), qname, docToRemove.getDocId() + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { index.btree.remove(query, null); } catch (final LockException e) { NativeStructuralIndex.LOG.warn("Failed to lock structural index: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while removing structural index for document " + docToRemove.getURI() + ": " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } removeQNamesForDoc(docToRemove); @@ -543,17 +524,13 @@ protected void removeQNamesForDoc(DocumentImpl doc) { final byte[] fromKey = computeDocKey(doc.getDocId()); final byte[] toKey = computeDocKey(doc.getDocId() + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { index.btree.remove(query, null); } catch (final LockException e) { NativeStructuralIndex.LOG.warn("Failed to lock structural index: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while reading structural index for document " + doc.getURI() + ": " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } @@ -564,9 +541,7 @@ protected List getQNamesForDoc(DocumentImpl doc) { final byte[] fromKey = computeDocKey(doc.getDocId()); final byte[] toKey = computeDocKey(doc.getDocId() + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { index.btree.query(query, new BTreeCallback() { public boolean indexInfo(Value value, long pointer) throws TerminatedException { final QName qname = readQName(value.getData()); @@ -579,8 +554,6 @@ public boolean indexInfo(Value value, long pointer) throws TerminatedException { } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while reading structural index for document " + doc.getURI() + ": " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } return qnames; } @@ -627,9 +600,7 @@ public Occurrences[] scanIndex(XQueryContext context, DocumentSet docs, NodeSet final byte[] toKey = computeKey(qname.getNameType(), qname, doc.getDocId() + 1); final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey)); - final Lock lock = index.btree.getLock(); - try { - lock.acquire(LockMode.READ_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) { index.btree.query(query, new BTreeCallback() { public boolean indexInfo(Value value, long pointer) throws TerminatedException { Occurrences oc = occurrences.get(name); @@ -650,8 +621,6 @@ public boolean indexInfo(Value value, long pointer) throws TerminatedException { } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while reading structural index for document " + doc.getURI() + ": " + e.getMessage(), e); - } finally { - lock.release(LockMode.READ_LOCK); } } } @@ -696,11 +665,9 @@ private void processPending() { {return;} try { - final Lock lock = index.btree.getLock(); for (final Map.Entry> entry: pending.entrySet()) { final QName qname = entry.getKey(); - try { - lock.acquire(LockMode.WRITE_LOCK); + try(final ManagedLock btreeLock = index.lockManager.acquireBtreeWriteLock(index.btree.getLockName())) { final List nodes = entry.getValue(); for (final NodeProxy proxy : nodes) { final NodeId nodeId = proxy.getNodeId(); @@ -717,8 +684,6 @@ private void processPending() { // NativeStructuralIndex.LOG.warn("Read-only error: " + e.getMessage(), e); } catch (final Exception e) { NativeStructuralIndex.LOG.warn("Exception caught while writing to structural index: " + e.getMessage(), e); - } finally { - lock.release(LockMode.WRITE_LOCK); } } } finally { diff --git a/src/org/exist/storage/txn/TransactionManager.java b/src/org/exist/storage/txn/TransactionManager.java index 0268f338ef6..798b244d658 100644 --- a/src/org/exist/storage/txn/TransactionManager.java +++ b/src/org/exist/storage/txn/TransactionManager.java @@ -29,6 +29,7 @@ import org.exist.storage.*; import org.exist.storage.journal.JournalException; import org.exist.storage.journal.JournalManager; +import org.exist.util.LockException; import org.exist.xmldb.XmldbURI; import java.io.IOException; @@ -101,11 +102,12 @@ public Txn beginTransaction() { journalManager.get().journal(new TxnStart(txnId)); } catch(final JournalException e) { LOG.error("Failed to create transaction. Error writing to log file.", e); - } + } } final Txn txn = new Txn(TransactionManager.this, txnId); transactions.put(txn.getId(), new TxnCounter()); + broker.setCurrentTransaction(txn); return txn; }); } @@ -118,6 +120,12 @@ public Txn beginTransaction() { */ public void commit(final Txn txn) throws TransactionException { + if(txn instanceof Txn.ReusableTxn) { + txn.commit(); + return; + //throw new IllegalStateException("Commit should be called on the transaction and not via the TransactionManager"); //TODO(AR) remove later when API is cleaned up? + } + //we can only commit something which is in the STARTED state if (txn.getState() != Txn.State.STARTED) { return; @@ -179,13 +187,20 @@ public void close(final Txn txn) { if (txn.getState() == Txn.State.CLOSED) { return; } - try { //if the transaction is started, then we should auto-abort the uncommitted transaction if (txn.getState() == Txn.State.STARTED) { LOG.warn("Transaction was not committed or aborted, auto aborting!"); abort(txn); } + + try(final DBBroker broker = pool.getBroker()) { + broker.setCurrentTransaction(null); + } catch(final EXistException ee) { + LOG.fatal(ee.getMessage(), ee); + throw new RuntimeException(ee); + } + } finally { txn.setState(Txn.State.CLOSED); //transaction is now closed! } @@ -238,9 +253,10 @@ public void checkpoint(final boolean switchFiles) throws TransactionException { @Deprecated public void reindex(final DBBroker broker) throws IOException { broker.pushSubject(broker.getBrokerPool().getSecurityManager().getSystemSubject()); - try { - broker.reindexCollection(XmldbURI.ROOT_COLLECTION_URI); - } catch (final PermissionDeniedException e) { + try(final Txn transaction = beginTransaction()) { + broker.reindexCollection(transaction, XmldbURI.ROOT_COLLECTION_URI); + commit(transaction); + } catch (final PermissionDeniedException | LockException | TransactionException e) { LOG.error("Exception during reindex: " + e.getMessage(), e); } finally { broker.popSubject(); diff --git a/src/org/exist/storage/txn/Txn.java b/src/org/exist/storage/txn/Txn.java index 6b20a23980d..97f8b0a7b0e 100644 --- a/src/org/exist/storage/txn/Txn.java +++ b/src/org/exist/storage/txn/Txn.java @@ -21,11 +21,21 @@ import java.util.ArrayList; import java.util.List; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Supplier; +import com.evolvedbinary.j8fu.function.SupplierE; +import com.evolvedbinary.j8fu.tuple.Tuple2; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.exist.Transaction; import org.exist.storage.lock.Lock; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; +import org.exist.storage.lock.ManagedCollectionLock; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; +import org.exist.xmldb.XmldbURI; /** * @author wolf @@ -34,21 +44,31 @@ public class Txn implements Transaction { public enum State { STARTED, ABORTED, COMMITTED, CLOSED } - + private final TransactionManager tm; private final long id; + private final List locksHeld; + private final List listeners; private State state; private String originId; - private List locksHeld = new ArrayList<>(); - private List listeners = new ArrayList<>(); - public Txn(final TransactionManager tm, final long transactionId) { + public Txn(TransactionManager tm, long transactionId) { this.tm = tm; this.id = transactionId; + this.locksHeld = new ArrayList<>(); + this.listeners = new ArrayList<>(); this.state = State.STARTED; } + protected Txn(final Txn txn) { + this.tm = txn.tm; + this.id = txn.id; + this.locksHeld = txn.locksHeld; + this.listeners = txn.listeners; + this.state = txn.state; + } + public State getState() { return state; } @@ -67,18 +87,28 @@ public long getId() { */ @Deprecated public void registerLock(final Lock lock, final LockMode lockMode) { - locksHeld.add(new LockInfo(lock, lockMode)); + locksHeld.add(new LockInfo(new Tuple2<>(lock, lockMode), () -> lock.release(lockMode))); } public void acquireLock(final Lock lock, final LockMode lockMode) throws LockException { lock.acquire(lockMode); - locksHeld.add(new LockInfo(lock, lockMode)); + locksHeld.add(new LockInfo(new Tuple2<>(lock, lockMode), () -> lock.release(lockMode))); + } + + public void acquireCollectionLock(final SupplierE fnLockAcquire) throws LockException { + final ManagedCollectionLock lock = fnLockAcquire.get(); + locksHeld.add(new LockInfo(lock, lock::close)); + } + + public void acquireDocumentLock(final SupplierE fnLockAcquire) throws LockException { + final ManagedDocumentLock lock = fnLockAcquire.get(); + locksHeld.add(new LockInfo(lock, lock::close)); } - + public void releaseAll() { for (int i = locksHeld.size() - 1; i >= 0; i--) { final LockInfo info = locksHeld.get(i); - info.lock.release(info.lockMode); + info.closer.run(); } locksHeld.clear(); } @@ -101,16 +131,16 @@ protected void signalCommit() { } } - private static class LockInfo { - final Lock lock; - final LockMode lockMode; + private static class LockInfo { + final T lock; + final Runnable closer; - public LockInfo(final Lock lock, final LockMode lockMode) { + public LockInfo(final T lock, final Runnable closer) { this.lock = lock; - this.lockMode = lockMode; + this.closer = closer; } } - + @Override public void success() throws TransactionException { commit(); @@ -156,5 +186,73 @@ public void setOriginId(String id) { originId = id; } -} + /** + * A Txn that wraps an underlying transaction + * so that it can be reused as though it was + * a standard transaction. + */ + public static class ReusableTxn extends Txn { + private final static Logger LOG = LogManager.getLogger(ReusableTxn.class); + + private State reusableState = State.STARTED; + private final Txn underlyingTransaction; + + public ReusableTxn(final Txn txn) { + super(txn); + this.underlyingTransaction = txn; + if(txn.state != State.STARTED) { + throw new IllegalStateException("Underlying transaction must be in STARTED state, but is in: " + txn.state + " state."); + } + } + + @Override + public void abort() { + this.reusableState = State.ABORTED; + if(underlyingTransaction.state != State.ABORTED) { + super.abort(); + this.underlyingTransaction.setState(State.ABORTED); + } + } + + @Override + public void commit() throws TransactionException { + this.reusableState = State.COMMITTED; + } + + @Override + public void close() { + if (reusableState == State.STARTED) { + if (LOG.isDebugEnabled()) { + LOG.debug("Transaction was not committed or aborted, auto aborting!"); + } + this.reusableState = State.ABORTED; + if(underlyingTransaction.state != State.CLOSED) { + super.close(); + this.underlyingTransaction.setState(State.CLOSED); + } + this.reusableState = State.CLOSED; + } else if(reusableState == State.ABORTED) { + this.reusableState = State.CLOSED; + if(underlyingTransaction.state != State.CLOSED) { + super.close(); + this.underlyingTransaction.setState(State.CLOSED); + } + } else { + LOG.debug("Resetting transaction state for next use."); + this.reusableState = State.STARTED; //reset state for next commit/abort + } + } + + @Override + public void releaseAll() { + if(reusableState == State.ABORTED) { + super.releaseAll(); + } else { + //do nothing as when super#releaseAll is called + //then the locks acquired on the real transaction are released + throw new IllegalStateException("You must only call releaseAll on the real underlying transaction"); + } + } + } +} diff --git a/src/org/exist/util/Collations.java b/src/org/exist/util/Collations.java index afc4a441b1c..8c87fd4a41e 100644 --- a/src/org/exist/util/Collations.java +++ b/src/org/exist/util/Collations.java @@ -72,6 +72,11 @@ public class Collations { */ public final static String HTML_ASCII_CASE_INSENSITIVE_COLLATION_URI = "http://www.w3.org/2005/xpath-functions/collation/html-ascii-case-insensitive"; + /** + * The XQTS ASCII Case-blind Collation as defined by the XQTS 3.1. + */ + public final static String XQTS_ASCII_CASE_BLIND_COLLATION_URI = "http://www.w3.org/2010/09/qt-fots-catalog/collation/caseblind"; + /** * The URI used to select collations in eXist. */ @@ -82,6 +87,11 @@ public class Collations { */ private final static AtomicReference htmlAsciiCaseInsensitiveCollator = new AtomicReference<>(); + /** + * Lazy-initialized singleton XQTS Case Blind Collator + */ + private final static AtomicReference xqtsAsciiCaseBlindCollator = new AtomicReference<>(); + /** * Lazy-initialized singleton Samisk Collator */ @@ -205,7 +215,13 @@ public class Collations { try { return getHtmlAsciiCaseInsensitiveCollator(); } catch (final Exception e) { - throw new XPathException("Unable to instantiate HTML ASCII Case Insensitive Collator: "+ e.getMessage(), e); + throw new XPathException("Unable to instantiate HTML ASCII Case Insensitive Collator: " + e.getMessage(), e); + } + } else if(XQTS_ASCII_CASE_BLIND_COLLATION_URI.equals(uri)) { + try { + return getXqtsAsciiCaseBlindCollator(); + } catch (final Exception e) { + throw new XPathException("Unable to instantiate XQTS ASCII Case Blind Collator: " + e.getMessage(), e); } } else if (uri.startsWith("java:")) { // java class specified: this should be a subclass of @@ -692,4 +708,19 @@ private static Collator getHtmlAsciiCaseInsensitiveCollator() throws Exception { return collator; } + + private static Collator getXqtsAsciiCaseBlindCollator() throws Exception { + Collator collator = xqtsAsciiCaseBlindCollator.get(); + if (collator == null) { + collator = new RuleBasedCollator("&a=A &b=B &c=C &d=D &e=E &f=F &g=G &h=H " + + "&i=I &j=J &k=K &l=L &m=M &n=N &o=O &p=P &q=Q &r=R &s=S &t=T " + + "&u=U &v=V &w=W &x=X &y=Y &z=Z"); + collator.setStrength(Collator.PRIMARY); + xqtsAsciiCaseBlindCollator.compareAndSet(null, + collator.freeze()); + collator = xqtsAsciiCaseBlindCollator.get(); + } + + return collator; + } } diff --git a/src/org/exist/util/Compressor.java b/src/org/exist/util/Compressor.java index 5612d7890fe..f1c30d42e23 100644 --- a/src/org/exist/util/Compressor.java +++ b/src/org/exist/util/Compressor.java @@ -1,30 +1,29 @@ /* - * eXist Open Source Native XML Database Copyright (C) 2001-2005, Wolfgang M. - * Meier (meier@ifs.tu-darmstadt.de) - * - * This library is free software; you can redistribute it and/or modify it under - * the terms of the GNU Library General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) any - * later version. - * - * This library is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS - * FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more - * details. - * - * You should have received a copy of the GNU Library General Public License - * along with this program; if not, write to the Free Software Foundation, Inc., - * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - * - * $Id$ + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.exist.util; import java.io.IOException; +import java.io.InputStream; import java.io.OutputStream; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import java.util.zip.ZipOutputStream; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; import org.exist.util.io.FastByteArrayInputStream; import org.exist.util.io.FastByteArrayOutputStream; @@ -32,62 +31,85 @@ public class Compressor { /** - * The method compress + * Compress the byte array using GZip compression. + * + * GZip compression has some overhead for headers etc, + * so it does not make sense to use this with perfectly + * compressible buffers smaller than 23 bytes. + * In reality buffers are unlikely to be perfectly compressible, + * so you likely want to only use it with large buffers. + * + * @param buf the data to compress. + * + * @return the compressed data. * - * @param whatToCompress a byte[] value - * @return a byte[] value * @exception IOException if an error occurs */ - public static byte[] compress(byte[] whatToCompress) throws IOException { - return compress(whatToCompress, whatToCompress.length); + public static byte[] compress(final byte[] buf) throws IOException { + return compress(buf, buf.length); } /** - * The method compress + * Compress the byte array using GZip compression. + * + * GZip compression has some overhead for headers etc, + * so it does not make sense to use this with perfectly + * compressible buffers smaller than 23 bytes. + * In reality buffers are unlikely to be perfectly compressible, + * so you likely want to only use it with large buffers. + * + * @param buf the data to compress. + * @param len the number of bytes from buf to compress. + * + * @return the compressed data. * - * @param whatToCompress a byte[] value - * @param length an int value - * @return a byte[] value * @exception IOException if an error occurs */ - public static byte[] compress(byte[] whatToCompress, int length) throws IOException { - try (final FastByteArrayOutputStream baos = new FastByteArrayOutputStream(length); - final ZipOutputStream gzos = new ZipOutputStream(baos)) { - gzos.setMethod(ZipOutputStream.DEFLATED); - gzos.putNextEntry(new ZipEntry(length + "")); - gzos.write(whatToCompress, 0, length); - gzos.closeEntry(); + public static byte[] compress(final byte[] buf, final int len) throws IOException { + try (final FastByteArrayOutputStream baos = new FastByteArrayOutputStream(len); + final GZIPOutputStream gzos = new GZIPOutputStream(baos)) { + gzos.write(buf, 0, len); gzos.finish(); return baos.toByteArray(); } } - + /** - * The method uncompress + * Uncompress the byte array using GZip compression. + * + * @param buf the data to uncompress. + * + * @return the uncompressed data. * - * @param whatToUncompress a byte[] value - * @return a byte[] value * @exception IOException if an error occurs */ - public static byte[] uncompress(byte[] whatToUncompress) - throws IOException { + public static byte[] uncompress(final byte[] buf) throws IOException { try (final FastByteArrayOutputStream baos = new FastByteArrayOutputStream()) { - uncompress(whatToUncompress, baos); + uncompress(buf, baos); return baos.toByteArray(); } } - - public static void uncompress(byte[] whatToUncompress, OutputStream os) - throws IOException { - try (final FastByteArrayInputStream bais = new FastByteArrayInputStream(whatToUncompress); - final ZipInputStream gzis = new ZipInputStream(bais)) { - gzis.getNextEntry(); // move to the first entry in the zip stream! - final byte[] buf = new byte[512]; - int bread; - while ((bread = gzis.read(buf)) != -1) - os.write(buf, 0, bread); - gzis.closeEntry(); + + /** + * Uncompress the byte array using GZip compression. + * + * @param buf the data to uncompress. + * @param os the destination for the uncompressed data; + * + * + * @exception IOException if an error occurs + */ + public static int uncompress(final byte[] buf, final OutputStream os) throws IOException { + int written = 0; + try (final FastByteArrayInputStream bais = new FastByteArrayInputStream(buf); + final InputStream gzis = new GZIPInputStream(bais)) { + final byte[] tmp = new byte[4096]; + int read; + while ((read = gzis.read(tmp)) != -1) { + os.write(tmp, 0, read); + written += read; + } } + return written; } } - diff --git a/src/org/exist/util/ConcurrentValueWrapper.java b/src/org/exist/util/ConcurrentValueWrapper.java new file mode 100644 index 00000000000..e6103f1b6c7 --- /dev/null +++ b/src/org/exist/util/ConcurrentValueWrapper.java @@ -0,0 +1,119 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.util; + +import com.evolvedbinary.j8fu.function.Consumer2E; +import com.evolvedbinary.j8fu.function.ConsumerE; +import net.jcip.annotations.ThreadSafe; +import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedLock; + +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Consumer; +import java.util.function.Function; + +/** + * A wrapper which allows read or modify operations + * in a concurrent and thread-safe manner + * to an underlying value. + * + * @param The type of the underlying value + * + * @author Adam Retter + */ +@ThreadSafe +public class ConcurrentValueWrapper { + private final ReadWriteLock lock = new ReentrantReadWriteLock(); + private final T value; + + protected ConcurrentValueWrapper(final T value) { + this.value = value; + } + + /** + * Read from the value. + * + * @param readFn A function which reads the value + * and returns a result. + */ + public U read(final Function readFn) { + try (final ManagedLock readLock = ManagedLock.acquire(lock, LockMode.READ_LOCK)) { + return readFn.apply(value); + } + } + + /** + * Write to the value. + * + * @param writeFn A function which writes to the value. + */ + public void write(final Consumer writeFn) { + try (final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + writeFn.accept(value); + } + } + + /** + * Write to the value and return a result. + * + * @param writeFn A function which writes to the value + * and returns a result. + * + * @return the result of the write function. + */ + public U writeAndReturn(final Function writeFn) { + try (final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + return writeFn.apply(value); + } + } + + /** + * Write to the value. + * + * @param writeFn A function which writes to the value. + * + * @param An exception which may be thrown by the {@code writeFn}. + * + * @throws E if an exception is thrown by the {@code writeFn}. + */ + public final void writeE(final ConsumerE writeFn) throws E { + try (final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + writeFn.accept(value); + } + } + + /** + * Write to the value. + * + * @param writeFn A function which writes to the value. + * + * @param An exception which may be thrown by the {@code writeFn}. + * @param An exception which may be thrown by the {@code writeFn}. + * + * @throws E1 if an exception is thrown by the {@code writeFn}. + * @throws E2 if an exception is thrown by the {@code writeFn}. + */ + public final void write2E(final Consumer2E writeFn) throws E1, E2 { + try (final ManagedLock writeLock = ManagedLock.acquire(lock, LockMode.WRITE_LOCK)) { + writeFn.accept(value); + } + } +} diff --git a/src/org/exist/util/Configuration.java b/src/org/exist/util/Configuration.java index 9a3cc9f8ebf..fd16a9cb7ce 100644 --- a/src/org/exist/util/Configuration.java +++ b/src/org/exist/util/Configuration.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger; import org.exist.backup.SystemExport; +import org.exist.collections.CollectionCache; import org.exist.repo.Deployment; import org.w3c.dom.Document; @@ -44,7 +45,6 @@ import org.exist.security.internal.RealmImpl; import org.exist.storage.BrokerFactory; import org.exist.storage.BrokerPool; -import org.exist.storage.CollectionCacheManager; import org.exist.storage.DBBroker; import org.exist.storage.DefaultCacheManager; import org.exist.storage.IndexSpec; @@ -54,7 +54,6 @@ import org.exist.storage.journal.Journal; import org.exist.storage.serializers.CustomMatchListenerFactory; import org.exist.storage.serializers.Serializer; -import org.exist.storage.txn.TransactionManager; import org.exist.validation.GrammarPool; import org.exist.validation.resolver.eXistXMLCatalogResolver; import org.exist.xmldb.DatabaseImpl; @@ -538,6 +537,28 @@ private void configureTransformer( Element transformer ) } private void configureParser(final Element parser) { + configureXmlParser(parser); + configureHtmlToXmlParser(parser); + } + + private void configureXmlParser(final Element parser) { + final NodeList nlXml = parser.getElementsByTagName(XMLReaderPool.XmlParser.XML_PARSER_ELEMENT); + if(nlXml.getLength() > 0) { + final Element xml = (Element)nlXml.item(0); + + final NodeList nlFeatures = xml.getElementsByTagName(XMLReaderPool.XmlParser.XML_PARSER_FEATURES_ELEMENT); + if(nlFeatures.getLength() > 0) { + final Properties pFeatures = ParametersExtractor.parseFeatures(nlFeatures.item(0)); + if(pFeatures != null) { + final Map features = new HashMap<>(); + pFeatures.forEach((k,v) -> features.put(k.toString(), Boolean.valueOf(v.toString()))); + config.put(XMLReaderPool.XmlParser.XML_PARSER_FEATURES_PROPERTY, features); + } + } + } + } + + private void configureHtmlToXmlParser(final Element parser) { final NodeList nlHtmlToXml = parser.getElementsByTagName(HtmlToXmlParser.HTML_TO_XML_PARSER_ELEMENT); if(nlHtmlToXml.getLength() > 0) { final Element htmlToXml = (Element)nlHtmlToXml.item(0); @@ -832,7 +853,7 @@ private void configureBackend( final Optional dbHome, Element con ) throws LOG.warn("Cannot convert " + DefaultCacheManager.SHRINK_THRESHOLD_PROPERTY + " value to integer: " + cacheShrinkThreshold, nfe); } - String collectionCache = getConfigAttributeValue(con, CollectionCacheManager.CACHE_SIZE_ATTRIBUTE); + String collectionCache = getConfigAttributeValue(con, CollectionCache.CACHE_SIZE_ATTRIBUTE); if(collectionCache != null) { collectionCache = collectionCache.toLowerCase(); @@ -854,14 +875,14 @@ private void configureBackend( final Optional dbHome, Element con ) throws collectionCacheBytes = Integer.valueOf(collectionCache); } - config.put(CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES, collectionCacheBytes); + config.put(CollectionCache.PROPERTY_CACHE_SIZE_BYTES, collectionCacheBytes); if(LOG.isDebugEnabled()) { - LOG.debug("Set config {} = {}", CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES, config.get(CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES)); + LOG.debug("Set config {} = {}", CollectionCache.PROPERTY_CACHE_SIZE_BYTES, config.get(CollectionCache.PROPERTY_CACHE_SIZE_BYTES)); } } catch( final NumberFormatException nfe ) { - LOG.warn("Cannot convert " + CollectionCacheManager.PROPERTY_CACHE_SIZE_BYTES + " value to integer: " + collectionCache, nfe); + LOG.warn("Cannot convert " + CollectionCache.PROPERTY_CACHE_SIZE_BYTES + " value to integer: " + collectionCache, nfe); } } @@ -987,6 +1008,34 @@ private void configureBackend( final Optional dbHome, Element con ) throws } } + final String posixChownRestrictedStr = getConfigAttributeValue(con, DBBroker.POSIX_CHOWN_RESTRICTED_ATTRIBUTE); + final boolean posixChownRestricted; + if(posixChownRestrictedStr == null) { + posixChownRestricted = true; // default + } else { + if(Boolean.valueOf(posixChownRestrictedStr)) { + posixChownRestricted = true; + } else { + // configuration explicitly specifies that posix chown should NOT be restricted + posixChownRestricted = false; + } + } + config.put(DBBroker.POSIX_CHOWN_RESTRICTED_PROPERTY, posixChownRestricted); + + final String preserveOnCopyStr = getConfigAttributeValue(con, DBBroker.PRESERVE_ON_COPY_ATTRIBUTE); + final DBBroker.PreserveType preserveOnCopy; + if(preserveOnCopyStr == null) { + preserveOnCopy = DBBroker.PreserveType.NO_PRESERVE; // default + } else { + if(Boolean.valueOf(preserveOnCopyStr)) { + // configuration explicitly specifies that attributes should be preserved on copy + preserveOnCopy = DBBroker.PreserveType.PRESERVE; + } else { + preserveOnCopy = DBBroker.PreserveType.NO_PRESERVE; + } + } + config.put(DBBroker.PRESERVE_ON_COPY_PROPERTY, preserveOnCopy); + final NodeList securityConf = con.getElementsByTagName( BrokerPool.CONFIGURATION_SECURITY_ELEMENT_NAME ); String securityManagerClassName = BrokerPool.DEFAULT_SECURITY_CLASS; diff --git a/src/org/exist/util/ConfigurationHelper.java b/src/org/exist/util/ConfigurationHelper.java index 27e4a57017e..0e31587d752 100644 --- a/src/org/exist/util/ConfigurationHelper.java +++ b/src/org/exist/util/ConfigurationHelper.java @@ -1,5 +1,6 @@ package org.exist.util; +import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; @@ -96,12 +97,18 @@ public static Optional getExistHome(final String config) { final URL configUrl = ConfigurationHelper.class.getClassLoader().getResource(config); if (configUrl != null) { try { - final Path existHome = Paths.get(configUrl.toURI()).getParent(); - LOG.debug("Got eXist home from classpath: {}", existHome.toAbsolutePath().toString()); + final Path existHome; + if ("jar".equals(configUrl.getProtocol())) { + existHome = Paths.get(new URI(configUrl.getPath())).getParent().getParent(); + LOG.warn(config + " file was found on the classpath, but inside a Jar file! Derived EXIST_HOME from Jar's parent folder: {}", existHome); + } else { + existHome = Paths.get(configUrl.toURI()).getParent(); + LOG.debug("Got EXIST_HOME from classpath: {}", existHome.toAbsolutePath().toString()); + } return Optional.of(existHome); } catch (final URISyntaxException e) { // Catch all potential problems - LOG.error("Could not derive EXIST_HOME from classpath:: {}", e.getMessage(), e); + LOG.error("Could not derive EXIST_HOME from classpath: {}", e.getMessage(), e); } } diff --git a/src/org/exist/util/EXistInputSource.java b/src/org/exist/util/EXistInputSource.java index f7df3d932f0..7ddfe42e13d 100644 --- a/src/org/exist/util/EXistInputSource.java +++ b/src/org/exist/util/EXistInputSource.java @@ -10,12 +10,8 @@ public abstract class EXistInputSource extends InputSource implements Closeable private boolean closed = false; - public EXistInputSource() { - super(); - } - /** - * @Deprecated Should be avoided, trying to get the length of a stream will ultimately involve buffering + * @Deprecated Should be avoided, trying to get the length of a stream may involve buffering */ @Deprecated public abstract long getByteStreamLength(); diff --git a/src/org/exist/util/GlobToRegex.java b/src/org/exist/util/GlobToRegex.java index 5e2c1e1e79f..78b413d3cbc 100644 --- a/src/org/exist/util/GlobToRegex.java +++ b/src/org/exist/util/GlobToRegex.java @@ -129,8 +129,9 @@ private static boolean __isGlobMetaCharacter(char ch) { * pattern. */ public static String globToRegexp(CharSequence pattern) { - @SuppressWarnings("unused") - boolean inCharSet, starCannotMatchNull = false, questionMatchesZero; + boolean inCharSet; +// boolean starCannotMatchNull = false; +// boolean questionMatchesZero; int ch; StringBuilder buffer; diff --git a/src/org/exist/util/HSort.java b/src/org/exist/util/HSort.java index 30161a44c50..a0e303296d4 100644 --- a/src/org/exist/util/HSort.java +++ b/src/org/exist/util/HSort.java @@ -278,13 +278,11 @@ public static void sortByNodeId(NodeProxy[] a, int lo, int hi) private static > void siftdown(C[] a, int n, int vacant, C missing, int drop) { final int memo=vacant; - int child, parent; - int count, next_peek; - count=0; - next_peek=(drop+1)/2; + int count = 0; + int next_peek = (drop+1)/2; - child=2*(vacant+1); + int child = 2*(vacant+1); while(child> void siftdown(C[] a, int n, int vacant=n-1; } - parent=(vacant-1)/2; + int parent=(vacant-1)/2; while(vacant>memo) { if(a[parent].compareTo(missing)<0) { a[vacant]=a[parent]; diff --git a/src/org/exist/util/LockException.java b/src/org/exist/util/LockException.java index d672cb9d021..f6e3cbdc7be 100644 --- a/src/org/exist/util/LockException.java +++ b/src/org/exist/util/LockException.java @@ -1,3 +1,22 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ package org.exist.util; /** @@ -12,19 +31,15 @@ public class LockException extends Exception { private static final long serialVersionUID = -6273549212242606084L; - /** - * Constructor for LockException. - */ public LockException() { super(); } - /** - * Constructor for LockException. - * @param s - */ - public LockException(String s) { - super(s); + public LockException(final String message) { + super(message); } + public LockException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/src/org/exist/util/Lockable.java b/src/org/exist/util/Lockable.java index efd052d4519..8d641807e71 100644 --- a/src/org/exist/util/Lockable.java +++ b/src/org/exist/util/Lockable.java @@ -1,7 +1,5 @@ package org.exist.util; -import org.exist.storage.lock.Lock; - /** * @author wolf * @@ -12,6 +10,6 @@ */ public interface Lockable { - public Lock getLock(); + String getLockName(); } diff --git a/src/org/exist/util/LongLinkedList.java b/src/org/exist/util/LongLinkedList.java index 1d8ec93ba87..7e51bb108d4 100644 --- a/src/org/exist/util/LongLinkedList.java +++ b/src/org/exist/util/LongLinkedList.java @@ -12,7 +12,7 @@ public static class ListItem implements Comparable { public ListItem next = null; public ListItem prev = null; - + public ListItem() { } @@ -47,9 +47,6 @@ else if(l < ol) protected ListItem last = null; protected int count = 0; - public LongLinkedList() { - } - public void add( long l ) { if(first == null) { first = createListItem( l ); diff --git a/src/org/exist/util/NamedThreadFactory.java b/src/org/exist/util/NamedThreadFactory.java index fa861aee969..e799ef998dc 100644 --- a/src/org/exist/util/NamedThreadFactory.java +++ b/src/org/exist/util/NamedThreadFactory.java @@ -20,11 +20,16 @@ package org.exist.util; +import org.exist.Database; + import javax.annotation.Nullable; import java.util.Objects; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicLong; +import static org.exist.util.ThreadUtils.nameGlobalThread; +import static org.exist.util.ThreadUtils.nameInstanceThread; + /** * A simple thread factory that provides a standard naming convention * for threads. @@ -33,33 +38,64 @@ */ public class NamedThreadFactory implements ThreadFactory { - private static final String DEFAULT_THREAD_NAME_PREFIX = "exist"; - + private final ThreadGroup threadGroup; + @Nullable private final String instanceId; + private final String nameBase; private final AtomicLong threadId = new AtomicLong(); - private final String threadNamePrefix; /** - * A factory who will produce threads names "exist-${nameBase}-${id}". + * A factory who will produce threads named like either: + * "instance.${instanceId}.${nameBase}-${id}". * + * @param instanceId the id of the database instance * @param nameBase The name base for the thread name + * + * @deprecated use {@link #NamedThreadFactory(Database, String)}. */ - public NamedThreadFactory(final String nameBase) { - this(DEFAULT_THREAD_NAME_PREFIX, nameBase); + @Deprecated + public NamedThreadFactory(final String instanceId, final String nameBase) { + this(null, instanceId, nameBase); } /** - * A factory who will produce threads names "${prefix}-${nameBase}-${id}". + * A factory who will produce threads named like either: + * "instance.${instanceId}.${nameBase}-${id}". * - * @param prefix A common prefix for the thread names + * @param database the database instance which the threads are created for * @param nameBase The name base for the thread name + * + * @deprecated use {@link #NamedThreadFactory(Database, String)}. + */ + public NamedThreadFactory(final Database database, final String nameBase) { + this(database.getThreadGroup(), database.getId(), nameBase); + } + + /** + * A factory who will produce threads named like either: + * + * 1. "instance.${instanceId}.${nameBase}-${id}". + * 2. "global.${nameBase}-${id}". + * + * @param threadGroup The thread group for the created threads, or null + * to use the same group as the calling thread. + * @param instanceId the id of the database instance, or null if the + * thread is a global thread i.e. shared between instances. + * @param nameBase The name base for the thread name. */ - public NamedThreadFactory(@Nullable final String prefix, final String nameBase) { + public NamedThreadFactory(@Nullable final ThreadGroup threadGroup, @Nullable final String instanceId, final String nameBase) { Objects.requireNonNull(nameBase); - this.threadNamePrefix = (prefix == null ? "" : prefix + "-") + nameBase + "-"; + this.threadGroup = threadGroup; + this.instanceId = instanceId; + this.nameBase = nameBase; } @Override public Thread newThread(final Runnable runnable) { - return new Thread(runnable, threadNamePrefix + threadId.getAndIncrement()); + final String localName = nameBase + "-" + threadId.getAndIncrement(); + if (instanceId == null) { + return new Thread(threadGroup, runnable, nameGlobalThread(localName)); + } else { + return new Thread(threadGroup, runnable, nameInstanceThread(instanceId, localName)); + } } } diff --git a/src/org/exist/util/NamedThreadGroupFactory.java b/src/org/exist/util/NamedThreadGroupFactory.java new file mode 100644 index 00000000000..4c88497dba9 --- /dev/null +++ b/src/org/exist/util/NamedThreadGroupFactory.java @@ -0,0 +1,62 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.util; + +import javax.annotation.Nullable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * A simple factory for thread groups, where you + * may want multiple groups with similar names. + * + * @author Adam Retter + */ +public class NamedThreadGroupFactory { + + private final String threadGroupNameBase; + private final AtomicLong threadGroupId = new AtomicLong(); + + /** + * @param threadGroupNameBase the base name for the thread group. + */ + public NamedThreadGroupFactory(final String threadGroupNameBase) { + this.threadGroupNameBase = threadGroupNameBase; + } + + /** + * Produces a thread group named like: + * "${threadGroupNameBase}-${id}" + * + * Where id is a global monontonically increasing identifier. + * + * @param parent the parent thread group, or null to use the current threads thread group. + * + * @return the new thread group + */ + public ThreadGroup newThreadGroup(@Nullable final ThreadGroup parent) { + final String threadGroupName = threadGroupNameBase + "-" + threadGroupId.getAndIncrement(); + if (parent != null) { + return new ThreadGroup(parent, threadGroupName); + } else { + return new ThreadGroup(threadGroupName); + } + } +} diff --git a/src/org/exist/util/OrderedLinkedList.java b/src/org/exist/util/OrderedLinkedList.java index 330056442cd..31ba9c0a65b 100644 --- a/src/org/exist/util/OrderedLinkedList.java +++ b/src/org/exist/util/OrderedLinkedList.java @@ -34,9 +34,6 @@ public abstract static class Node { Node next = null; Node prev = null; - public Node() { - } - public Node getNextNode() { return next; } public Node getPrevNode() { return prev; } @@ -70,8 +67,6 @@ public Comparable getData() { private int size = 0; - public OrderedLinkedList() { - } public Node add(Node newNode) { newNode.next = null; @@ -233,25 +228,4 @@ public void remove() { throw new RuntimeException("not implemented"); } } - - public static void main(String args[]) { - final OrderedLinkedList list = new OrderedLinkedList(); - list.add(new SimpleNode("Adam")); - list.add(new SimpleNode("Sabine")); - list.add(new SimpleNode("Georg")); - list.add(new SimpleNode("Henry")); - list.add(new SimpleNode("Achim")); - list.add(new SimpleNode("Franz")); - list.add(new SimpleNode("Doris")); - list.add(new SimpleNode("Rudi")); - list.add(new SimpleNode("Hermann")); - list.add(new SimpleNode("Lisa")); - list.add(new SimpleNode("Xaver")); - list.add(new SimpleNode("Reinhard")); - list.add(new SimpleNode("Ludwig")); - list.remove(new SimpleNode("Lisa")); - list.remove(new SimpleNode("Henry")); - for(int i = 0; i < list.size(); i++) - System.out.println(((SimpleNode)list.get(i)).data); - } } diff --git a/src/org/exist/util/OrderedLongLinkedList.java b/src/org/exist/util/OrderedLongLinkedList.java index 6c403e6fd02..ada3ee82e2f 100644 --- a/src/org/exist/util/OrderedLongLinkedList.java +++ b/src/org/exist/util/OrderedLongLinkedList.java @@ -9,17 +9,7 @@ */ public class OrderedLongLinkedList extends LongLinkedList { - - /** - * Constructor for OrderedLongLinkedList. - */ - public OrderedLongLinkedList() { - super(); - } - - /** - * @see org.exist.util.LongLinkedList#add(long) - */ + @Override public void add(long l) { if (first == null) { first = createListItem(l); @@ -52,21 +42,4 @@ public void add(long l) { ++count; } } - - public static void main(String[] args) { - final OrderedLongLinkedList list = new OrderedLongLinkedList(); - list.add(7); - list.add(44); - list.add(4); - list.add(-43); - list.add(60); - list.add(-122); - list.add(1); - System.out.println("size: " + list.getSize()); - for(final Iterator i = list.iterator(); i.hasNext(); ) { - final OrderedLongLinkedList.ListItem item = - (OrderedLongLinkedList.ListItem)i.next(); - System.out.println(item.l); - } - } } diff --git a/src/org/exist/util/ProgressIndicator.java b/src/org/exist/util/ProgressIndicator.java index e8bda3615dc..20ecdb06242 100644 --- a/src/org/exist/util/ProgressIndicator.java +++ b/src/org/exist/util/ProgressIndicator.java @@ -65,9 +65,7 @@ public int getPercentage() { } public boolean changed() { - if(value_ % step_ == 0) - {return true;} - return false; + return value_ % step_ == 0; } /** diff --git a/src/org/exist/util/ThreadUtils.java b/src/org/exist/util/ThreadUtils.java new file mode 100644 index 00000000000..4c7f7648ddd --- /dev/null +++ b/src/org/exist/util/ThreadUtils.java @@ -0,0 +1,66 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ +package org.exist.util; + +import org.exist.Database; + +/** + * Simple utility functions for creating named threads + * + * @author Adam Retter + */ +public class ThreadUtils { + + public static String nameInstanceThreadGroup(final String instanceId) { + return "exist.db." + instanceId; + } + + public static ThreadGroup newInstanceSubThreadGroup(final Database database, final String subThreadGroupName) { + return new ThreadGroup(database.getThreadGroup(), subThreadGroupName); + } + + public static String nameInstanceThread(final Database database, final String threadName) { + return "db." + database.getId() + "." + threadName; + } + + public static String nameInstanceThread(final String instanceId, final String threadName) { + return "db." + instanceId + "." + threadName; + } + + public static String nameInstanceSchedulerThread(final Database database, final String threadName) { + return "db." + database.getId() + ".scheduler." + threadName; + } + + public static Thread newInstanceThread(final Database database, final String threadName, final Runnable runnable) { + return new Thread(database.getThreadGroup(), runnable, nameInstanceThread(database, threadName)); + } + + public static Thread newInstanceThread(final ThreadGroup threadGroup, final String instanceId, final String threadName, final Runnable runnable) { + return new Thread(threadGroup, runnable, nameInstanceThread(instanceId, threadName)); + } + + public static String nameGlobalThread(final String threadName) { + return "global." + threadName; + } + + public static Thread newGlobalThread(final String threadName, final Runnable runnable) { + return new Thread(runnable, nameGlobalThread(threadName)); + } +} diff --git a/src/org/exist/util/WeakLazyStripes.java b/src/org/exist/util/WeakLazyStripes.java new file mode 100644 index 00000000000..5fd176a3925 --- /dev/null +++ b/src/org/exist/util/WeakLazyStripes.java @@ -0,0 +1,143 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2017 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.util; + +import net.jcip.annotations.ThreadSafe; + +import java.lang.ref.Reference; +import java.lang.ref.ReferenceQueue; +import java.lang.ref.WeakReference; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; + +/** + * Inspired by Guava's com.google.common.util.concurrent.Striped#lazyWeakReadWriteLock(int) + * implementation. + * {@see https://google.github.io/guava/releases/21.0/api/docs/com/google/common/util/concurrent/Striped.html#lazyWeakReadWriteLock-int-} + * + * However this is much simpler, and there is no hashing; we + * will always return the same object (stripe) for the same key. + * + * This class basically couples Weak References with a + * ConcurrentHashMap and manages draining expired Weak + * References from the HashMap. + * + * @param The type of the key for the stripe. + * @param The type of the stripe. + * + * @author Adam Retter + */ +@ThreadSafe +public class WeakLazyStripes { + private static final int INITIAL_CAPACITY = 1000; + private static final float LOAD_FACTOR = 0.75f; + private static final int MAX_EXPIRED_REFERENCE_READ_COUNT = 1000; + + private final ReferenceQueue referenceQueue; + private final ConcurrentMap> stripes; + private final AtomicInteger expiredReferenceReadCount = new AtomicInteger(); + + private final Function creator; + + /** + * Constructs a WeakLazyStripes where the concurrencyLevel + * is the lower of either {@link ConcurrentHashMap#DEFAULT_CONCURRENCY_LEVEL} + * or {@code Runtime.getRuntime().availableProcessors() * 2}. + * + * @param creator A factory for creating new Stripes when needed + */ + public WeakLazyStripes(final Function creator) { + this(Math.min(16, Runtime.getRuntime().availableProcessors() * 2), creator); // 16 == ConcurrentHashMap#DEFAULT_CONCURRENCY_LEVEL + } + + /** + * Constructs a WeakLazyStripes. + * + * @param concurrencyLevel The concurrency level for the underlying + * {@link ConcurrentHashMap#ConcurrentHashMap(int, float, int)} + * @param creator A factory for creating new Stripes when needed + */ + public WeakLazyStripes(final int concurrencyLevel, final Function creator) { + this.stripes = new ConcurrentHashMap<>(INITIAL_CAPACITY, LOAD_FACTOR, concurrencyLevel); + this.referenceQueue = new ReferenceQueue<>(); + this.creator = creator; + } + + /** + * Get the stripe for the given key + * + * If the stripe does not exist, it will be created by + * calling {@link Function#apply(Object)} on {@link this#creator} + * + * @param key the key for the stripe + * @return the stripe + */ + public S get(final K key) { + final WeakReference stripeRef = stripes.compute(key, (k, valueRef) -> { + if(valueRef == null) { + return new WeakReference<>(creator.apply(k), referenceQueue); + } else if(valueRef.get() == null) { + expiredReferenceReadCount.incrementAndGet(); + return new WeakReference<>(creator.apply(k), referenceQueue); + } else { + return valueRef; + } + }); + + // have we reached the threshold where we should clear + // out any cleared WeakReferences from the stripes map + final int count = expiredReferenceReadCount.get(); + if(count > MAX_EXPIRED_REFERENCE_READ_COUNT + && expiredReferenceReadCount.compareAndSet(count, 0)) { + drainClearedReferences(); + } + + // check the weak reference before returning! + final S stripe = stripeRef.get(); + if(stripe != null) { + return stripe; + } + + // weak reference has expired in the mean time, regenerate + return get(key); + } + + /** + * Removes any cleared WeakReferences + * from the stripes map + */ + private void drainClearedReferences() { + Reference ref; + while ((ref = referenceQueue.poll()) != null) { + @SuppressWarnings("unchecked") + final WeakReference stripeRef = (WeakReference)ref; + + /* + If this is too slow, we could store Map>> + to sacrifice a small amount of memory for remove performance by then calling Map#remove(key) + instead of calling the iterative function Map#values()#remove(value) + */ + stripes.values().remove(stripeRef); + } + } +} diff --git a/src/org/exist/util/XMLChar.java b/src/org/exist/util/XMLChar.java index 5b0d2f20294..f0ae6fbf7e2 100644 --- a/src/org/exist/util/XMLChar.java +++ b/src/org/exist/util/XMLChar.java @@ -561,15 +561,17 @@ public static boolean isPubid(int c) { * @param name string to check * @return true if name is a valid Name */ - public static boolean isValidName(String name) { - if (name.length() == 0) - {return false;} + public static boolean isValidName(final String name) { + if (name.length() == 0) { + return false; + } char ch = name.charAt(0); - if( isNameStart(ch) == false) - {return false;} + if(!isNameStart(ch)) { + return false; + } for (int i = 1; i < name.length(); i++ ) { ch = name.charAt(i); - if( isName( ch ) == false ){ + if (!isName(ch)) { return false; } } @@ -588,15 +590,17 @@ public static boolean isValidName(String name) { * @param ncName string to check * @return true if name is a valid NCName */ - public static boolean isValidNCName(String ncName) { - if (ncName.length() == 0) - {return false;} + public static boolean isValidNCName(final String ncName) { + if (ncName.isEmpty()) { + return false; + } char ch = ncName.charAt(0); - if( isNCNameStart(ch) == false) - {return false;} + if (!isNCNameStart(ch)) { + return false; + } for (int i = 1; i < ncName.length(); i++ ) { ch = ncName.charAt(i); - if( isNCName( ch ) == false ){ + if (!isNCName(ch)){ return false; } } diff --git a/src/org/exist/util/XMLReaderObjectFactory.java b/src/org/exist/util/XMLReaderObjectFactory.java index db9a51d1d47..76564bc314b 100644 --- a/src/org/exist/util/XMLReaderObjectFactory.java +++ b/src/org/exist/util/XMLReaderObjectFactory.java @@ -51,7 +51,7 @@ public class XMLReaderObjectFactory extends BasePoolableObjectFactory implements private final static Logger LOG = LogManager.getLogger(XMLReaderObjectFactory.class); - public static enum VALIDATION_SETTING { + public enum VALIDATION_SETTING { UNKNOWN, ENABLED, AUTO, DISABLED } diff --git a/src/org/exist/util/XMLReaderPool.java b/src/org/exist/util/XMLReaderPool.java index 9d6a519d8a6..128eb09bef0 100644 --- a/src/org/exist/util/XMLReaderPool.java +++ b/src/org/exist/util/XMLReaderPool.java @@ -27,12 +27,16 @@ import org.exist.Namespaces; import org.exist.storage.BrokerPool; import org.exist.storage.BrokerPoolService; +import org.exist.storage.BrokerPoolServiceException; import org.exist.validation.GrammarPool; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.XMLReader; import org.xml.sax.ext.DefaultHandler2; +import javax.xml.parsers.ParserConfigurationException; +import java.util.Map; + /** * Maintains a pool of XMLReader objects. The pool is available through * {@link BrokerPool#getParserPool()}. @@ -45,6 +49,8 @@ public class XMLReaderPool extends StackObjectPool implements BrokerP private final static DefaultHandler2 DUMMY_HANDLER = new DefaultHandler2(); + private Configuration configuration = null; + /** * * @@ -52,18 +58,37 @@ public class XMLReaderPool extends StackObjectPool implements BrokerP * @param maxIdle * @param initIdleCapacity */ - public XMLReaderPool(PoolableObjectFactory factory, int maxIdle, int initIdleCapacity) { + public XMLReaderPool(final PoolableObjectFactory factory, final int maxIdle, final int initIdleCapacity) { super(factory, maxIdle, initIdleCapacity); } + @Override + public void configure(final Configuration configuration) throws BrokerPoolServiceException { + this.configuration = configuration; + } + public synchronized XMLReader borrowXMLReader() { try { - return super.borrowObject(); + final XMLReader reader = super.borrowObject(); + setParserConfigFeatures(reader); + return reader; } catch (final Exception e) { throw new IllegalStateException("error while returning XMLReader: " + e.getMessage(), e ); } } + /** + * Sets any features for the parser which were defined in conf.xml + */ + private void setParserConfigFeatures(final XMLReader xmlReader) throws ParserConfigurationException, SAXNotRecognizedException, SAXNotSupportedException { + final Map parserFeatures = (Map)configuration.getProperty(XmlParser.XML_PARSER_FEATURES_PROPERTY); + if(parserFeatures != null) { + for(final Map.Entry feature : parserFeatures.entrySet()) { + xmlReader.setFeature(feature.getKey(), feature.getValue()); + } + } + } + @Override public synchronized XMLReader borrowObject() throws Exception { return borrowXMLReader(); @@ -115,6 +140,12 @@ private Object getReaderProperty(XMLReader xmlReader, String propertyName){ return object; } - + + // just used for config properties + public interface XmlParser { + String XML_PARSER_ELEMENT = "xml"; + String XML_PARSER_FEATURES_ELEMENT = "features"; + String XML_PARSER_FEATURES_PROPERTY = "parser.xml-parser.features"; + } } diff --git a/src/org/exist/util/io/Resource.java b/src/org/exist/util/io/Resource.java index 9dff5a1453f..cbe72537298 100644 --- a/src/org/exist/util/io/Resource.java +++ b/src/org/exist/util/io/Resource.java @@ -28,11 +28,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.security.SecureRandom; -import java.util.ArrayList; -import java.util.Date; -import java.util.Iterator; -import java.util.List; -import java.util.Properties; +import java.util.*; import javax.xml.transform.OutputKeys; @@ -43,16 +39,15 @@ import org.exist.collections.Collection.CollectionEntry; import org.exist.collections.IndexInfo; import org.exist.collections.triggers.TriggerException; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentMetadata; -import org.exist.dom.persistent.LockToken; +import org.exist.dom.persistent.*; import org.exist.security.Permission; import org.exist.security.PermissionDeniedException; +import org.exist.security.PermissionFactory; import org.exist.security.Subject; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedCollectionLock; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.serializers.Serializer; import org.exist.storage.txn.TransactionManager; @@ -88,9 +83,6 @@ public class Resource extends File { XML_OUTPUT_PROPERTIES.setProperty(EXistOutputKeys.PROCESS_XSL_PI, "no"); } - public final static int DEFAULT_COLLECTION_PERM = 0777; - public final static int DEFAULT_RESOURCE_PERM = 0644; - private static final SecureRandom random = new SecureRandom(); @@ -293,12 +285,8 @@ public boolean _renameTo(File dest) { return false; } - org.exist.collections.Collection destination = null; - org.exist.collections.Collection source = null; - XmldbURI newName; - - try (final DBBroker broker = db.getBroker()) { - source = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK); + try (final DBBroker broker = db.getBroker(); + final Collection source = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK)) { if (source == null) { return false; } @@ -306,29 +294,23 @@ public boolean _renameTo(File dest) { if (doc == null) { return false; } - destination = broker.openCollection(destinationPath.removeLastSegment(), LockMode.WRITE_LOCK); - if (destination == null) { - return false; - } + try (final Collection destination = broker.openCollection(destinationPath.removeLastSegment(), LockMode.WRITE_LOCK)) { + if (destination == null) { + return false; + } - newName = destinationPath.lastSegment(); + final XmldbURI newName = destinationPath.lastSegment(); - try (final Txn transaction = tm.beginTransaction()) { - broker.moveResource(transaction, doc, destination, newName); - tm.commit(transaction); - } - return true; + try (final Txn transaction = tm.beginTransaction()) { + broker.moveResource(transaction, doc, destination, newName); + tm.commit(transaction); + } + return true; + } } catch (final Exception e) { e.printStackTrace(); return false; - } finally { - if (source != null) { - source.release(LockMode.WRITE_LOCK); - } - if (destination != null) { - destination.release(LockMode.WRITE_LOCK); - } } } @@ -345,27 +327,23 @@ public boolean renameTo(File dest) { return false; } - try (final DBBroker broker = db.getBroker()) { + try (final DBBroker broker = db.getBroker(); + final Collection source = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK)) { - org.exist.collections.Collection destination = null; - org.exist.collections.Collection source = null; - XmldbURI newName; + if (source == null) { + return false; + } + final DocumentImpl doc = source.getDocument(broker, uri.lastSegment()); + if (doc == null) { + return false; + } - try { - source = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK); - if (source == null) { - return false; - } - final DocumentImpl doc = source.getDocument(broker, uri.lastSegment()); - if (doc == null) { - return false; - } - destination = broker.openCollection(destinationPath.removeLastSegment(), LockMode.WRITE_LOCK); + try(final Collection destination = broker.openCollection(destinationPath.removeLastSegment(), LockMode.WRITE_LOCK)) { if (destination == null) { return false; } - newName = destinationPath.lastSegment(); + final XmldbURI newName = destinationPath.lastSegment(); final TransactionManager tm = db.getTransactionManager(); try (final Txn transaction = tm.beginTransaction()) { @@ -379,19 +357,9 @@ public boolean renameTo(File dest) { tm.commit(transaction); return true; } - - } catch (final Exception e) { - e.printStackTrace(); - return false; - } finally { - if (source != null) { - source.release(LockMode.WRITE_LOCK); - } - if (destination != null) { - destination.release(LockMode.WRITE_LOCK); - } } - } catch (final EXistException e) { + } catch (final Exception e) { + e.printStackTrace(); return false; } } @@ -558,9 +526,8 @@ public boolean delete() { return false; } - try (final DBBroker broker = db.getBroker()) { - - collection = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK); + try (final DBBroker broker = db.getBroker(); + final Collection collection = broker.openCollection(uri.removeLastSegment(), LockMode.WRITE_LOCK)) { if (collection == null) { return false; } @@ -586,10 +553,6 @@ public boolean delete() { } catch (final EXistException | IOException | PermissionDeniedException | LockException | TriggerException e) { LOG.error(e); return false; - } finally { - if(collection != null) { - collection.release(LockMode.WRITE_LOCK); - } } } @@ -603,10 +566,6 @@ public boolean createNewFile() throws IOException { } try (final DBBroker broker = db.getBroker()) { - -// if (!uri.startsWith("/db")) -// uri = XmldbURI.DB.append(uri); -// try { if (uri.endsWith("/")) { throw new IOException("It collection, but should be resource: " + uri); @@ -623,19 +582,6 @@ public boolean createNewFile() throws IOException { final XmldbURI fileName = uri.lastSegment(); -// try { -// resource = broker.getXMLResource(uri, LockMode.READ_LOCK); -// } catch (final PermissionDeniedException e1) { -// } finally { -// if (resource != null) { -// resource.getUpdateLock().release(LockMode.READ_LOCK); -// collection = resource.getCollection(); -// initialized = true; -// -// return false; -// } -// } -// try { resource = broker.getResource(uri, Permission.READ); } catch (final PermissionDeniedException e1) { @@ -662,14 +608,12 @@ public boolean createNewFile() throws IOException { final String str = ""; final IndexInfo info = collection.validateXMLResource(transaction, broker, fileName, str); info.getDocument().getMetadata().setMimeType(mimeType.getName()); - info.getDocument().getPermissions().setMode(DEFAULT_RESOURCE_PERM); collection.store(transaction, broker, info, str); } else { // store as binary resource try (final InputStream is = new FastByteArrayInputStream(new byte[0])) { final BinaryDocument blob = new BinaryDocument(db, collection, fileName); - blob.getPermissions().setMode(DEFAULT_RESOURCE_PERM); collection.addBinaryResource(transaction, broker, blob, is, mimeType.getName(), 0L, new Date(), new Date()); } @@ -709,8 +653,8 @@ private synchronized void init() throws IOException { //resource } else { - try { - resource = broker.getXMLResource(uri, LockMode.READ_LOCK); + try(final LockedDocument lockedResource = broker.getXMLResource(uri, LockMode.READ_LOCK)) { + resource = lockedResource == null ? null : lockedResource.getDocument(); if (resource == null) { //may be, it's collection ... checking ... collection = broker.getCollection(uri); @@ -720,10 +664,6 @@ private synchronized void init() throws IOException { } else { collection = resource.getCollection(); } - } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); - } } } } @@ -865,7 +805,7 @@ public String[] list() { return list.toArray(new String[list.size()]); } - } catch (final LockException | PermissionDeniedException | EXistException e) { + } catch (final LockException | PermissionDeniedException | IOException | EXistException e) { LOG.error(e); return new String[0]; } @@ -889,22 +829,22 @@ public File[] listFiles() { try { final BrokerPool db = BrokerPool.getInstance(); - try (final DBBroker broker = db.getBroker()) { - collection.getLock().acquire(LockMode.READ_LOCK); + try (final DBBroker broker = db.getBroker(); + final ManagedCollectionLock collectionLock = db.getLockManager().acquireCollectionReadLock(collection.getURI())) { final File[] children = new File[collection.getChildCollectionCount(broker) + collection.getDocumentCount(broker)]; //collections int j = 0; - for (final Iterator i = collection.collectionIterator(broker); i.hasNext(); j++) + for (final Iterator i = collection.collectionIterator(broker); i.hasNext(); j++) { children[j] = new Resource(collection.getURI().append(i.next())); + } //collections - final List allresources = new ArrayList(); - DocumentImpl doc = null; + final List allresources = new ArrayList<>(); for (final Iterator i = collection.iterator(broker); i.hasNext(); ) { - doc = i.next(); + final DocumentImpl doc = i.next(); // Include only when (1) locktoken is present or (2) // locktoken indicates that it is not a null resource @@ -920,15 +860,8 @@ public File[] listFiles() { } return children; - } catch (final LockException e) { - //throw new IOException("Failed to acquire lock on collection '" + uri + "'"); - return null; - } catch (final Exception e) { return null; - - } finally { - collection.release(LockMode.READ_LOCK); } } catch (final Exception e) { @@ -1027,20 +960,20 @@ public boolean setReadOnly() { modifyMetadata(new ModifyMetadata() { @Override - public void modify(DocumentImpl resource) throws IOException { - Permission perm = resource.getPermissions(); + public void modify(final DBBroker broker, final DocumentImpl resource) throws IOException { + final Permission perm = resource.getPermissions(); try { - perm.setMode(perm.getMode() | (READ << 6) & ~(WRITE << 6)); + PermissionFactory.chmod(broker, perm, Optional.of(perm.getMode() | (READ << 6) & ~(WRITE << 6)), Optional.empty()); } catch (PermissionDeniedException e) { throw new IOException(e); } } @Override - public void modify(Collection collection) throws IOException { - Permission perm = collection.getPermissionsNoLock(); + public void modify(final DBBroker broker, final Collection collection) throws IOException { + final Permission perm = collection.getPermissionsNoLock(); try { - perm.setMode(perm.getMode() | (READ << 6) & ~(WRITE << 6)); + PermissionFactory.chmod(broker, perm, Optional.of(perm.getMode() | (READ << 6) & ~(WRITE << 6)), Optional.empty()); } catch (PermissionDeniedException e) { throw new IOException(e); } @@ -1059,20 +992,20 @@ public boolean setExecutable(boolean executable, boolean ownerOnly) { modifyMetadata(new ModifyMetadata() { @Override - public void modify(DocumentImpl resource) throws IOException { - Permission perm = resource.getPermissions(); + public void modify(final DBBroker broker, final DocumentImpl resource) throws IOException { + final Permission perm = resource.getPermissions(); try { - perm.setMode(perm.getMode() | (EXECUTE << 6)); + PermissionFactory.chmod(broker, perm, Optional.of(perm.getMode() | (EXECUTE << 6)), Optional.empty()); } catch (PermissionDeniedException e) { throw new IOException(e); } } @Override - public void modify(Collection collection) throws IOException { - Permission perm = collection.getPermissionsNoLock(); + public void modify(final DBBroker broker, final Collection collection) throws IOException { + final Permission perm = collection.getPermissionsNoLock(); try { - perm.setMode(perm.getMode() | (EXECUTE << 6)); + PermissionFactory.chmod(broker, perm, Optional.of(perm.getMode() | (EXECUTE << 6)), Optional.empty()); } catch (PermissionDeniedException e) { throw new IOException(e); } @@ -1111,12 +1044,12 @@ public boolean setLastModified(final long time) { modifyMetadata(new ModifyMetadata() { @Override - public void modify(DocumentImpl resource) throws IOException { + public void modify(final DBBroker broker, DocumentImpl resource) throws IOException { resource.getMetadata().setLastModified(time); } @Override - public void modify(Collection collection) throws IOException { + public void modify(final DBBroker broker, Collection collection) throws IOException { throw new IOException("LastModified can't be set for collection."); } @@ -1147,9 +1080,8 @@ public long lastModified() { } interface ModifyMetadata { - public void modify(DocumentImpl resource) throws IOException; - - public void modify(Collection collection) throws IOException; + void modify(DBBroker broker, DocumentImpl resource) throws IOException; + void modify(DBBroker broker, Collection collection) throws IOException; } private void modifyMetadata(ModifyMetadata method) throws IOException { @@ -1177,39 +1109,37 @@ private void modifyMetadata(ModifyMetadata method) throws IOException { //resource } else { - resource = broker.getXMLResource(uri, LockMode.READ_LOCK); - if (resource == null) { - //may be, it's collection ... checking ... - collection = broker.getCollection(uri); - if (collection == null) { - throw new IOException("Resource not found: " + uri); - } + try(final LockedDocument lockedResource = broker.getXMLResource(uri, LockMode.READ_LOCK)) { + resource = lockedResource == null ? null : lockedResource.getDocument(); + if (resource == null) { + //may be, it's collection ... checking ... + collection = broker.getCollection(uri); + if (collection == null) { + throw new IOException("Resource not found: " + uri); + } - try (final Txn txn = tm.beginTransaction()) { - method.modify(collection); - broker.saveCollection(txn, collection); + try (final Txn txn = tm.beginTransaction()) { + method.modify(broker, collection); + broker.saveCollection(txn, collection); - tm.commit(txn); - } + tm.commit(txn); + } - } else { - collection = resource.getCollection(); + } else { + collection = resource.getCollection(); - try (final Txn txn = tm.beginTransaction()) { - method.modify(resource); - broker.storeMetadata(txn, resource); + try (final Txn txn = tm.beginTransaction()) { + method.modify(broker, resource); + broker.storeMetadata(txn, resource); - tm.commit(txn); + tm.commit(txn); + } } } } } catch (final Exception e) { LOG.error(e); throw new IOException(e); - } finally { - if (resource != null) { - resource.getUpdateLock().release(LockMode.READ_LOCK); - } } } catch (final EXistException e) { LOG.error(e); diff --git a/src/org/exist/util/sanity/SanityCheck.java b/src/org/exist/util/sanity/SanityCheck.java index 738a0178be6..ba663333f29 100644 --- a/src/org/exist/util/sanity/SanityCheck.java +++ b/src/org/exist/util/sanity/SanityCheck.java @@ -2,95 +2,105 @@ * eXist Open Source Native XML Database * Copyright (C) 2001-04 The eXist Project * http://exist-db.org - * + * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - * + * * $Id$ */ package org.exist.util.sanity; -import java.io.PrintWriter; -import java.io.StringWriter; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import javax.annotation.Nullable; + /** - * Utility class for sanity checks. Provides static methods ASSERT, THROW_ASSERT - * which can be used in the code to react to unexpected conditions. {@link #ASSERT(boolean)} - * logs a stack trace to the log4j log output. {@link #THROW_ASSERT(boolean)} - * throws an additional runtime exception. - * + * Utility class for sanity checks. Provides static methods which can be used in the + * code to react to unexpected conditions. + * + * {@link #ASSERT(boolean)} and {@link #ASSERT(boolean, String)} log a stack trace + * to the Log4J log output at the {@link org.apache.logging.log4j.Level#ERROR} level. + * + * {@link #THROW_ASSERT(boolean)} and {@link #THROW_ASSERT(boolean)} log a stack trace + * to the Log4J log output at the {@link org.apache.logging.log4j.Level#ERROR} level, + * and throws an additional runtime exception of {@link AssertFailure}. + * + * {@link #PRINT_STACK(int)} and {@link #TRACE(String)} log a stack trace to the Log4J + * log output at the {@link org.apache.logging.log4j.Level#TRACE} level. + * * @author wolf */ public class SanityCheck { + private static final Logger LOG = LogManager.getLogger(SanityCheck.class); + private static final String EOL = System.getProperty("line.separator"); - private final static Logger LOG = LogManager.getLogger(SanityCheck.class); - - public final static void ASSERT(boolean mustBeTrue) { - if (!mustBeTrue) { - final AssertFailure failure = new AssertFailure("ASSERT FAILED"); - showTrace(failure); - } + public static void ASSERT(final boolean mustBeTrue) { + ASSERT(mustBeTrue, null); } - public final static void ASSERT(boolean mustBeTrue, String failureMsg) { - if (!mustBeTrue) { - final AssertFailure failure = new AssertFailure("ASSERT FAILED: " + failureMsg); - showTrace(failure); + public static void ASSERT(final boolean mustBeTrue, @Nullable final String failureMsg) { + if (!mustBeTrue && LOG.isErrorEnabled()) { + final AssertFailure failure; + if (failureMsg == null) { + failure = new AssertFailure("ASSERT FAILED"); + } else { + failure = new AssertFailure("ASSERT FAILED: " + failureMsg); + } + + LOG.error(failure); } } - - public final static void THROW_ASSERT(boolean mustBeTrue) { - if (!mustBeTrue) { - final AssertFailure failure = new AssertFailure("ASSERT FAILED"); - showTrace(failure); - throw failure; - } + + public static void THROW_ASSERT(final boolean mustBeTrue) { + THROW_ASSERT(mustBeTrue, null); } - - public final static void THROW_ASSERT(boolean mustBeTrue, String failureMsg) { + + public static void THROW_ASSERT(final boolean mustBeTrue, @Nullable final String failureMsg) { if (!mustBeTrue) { - final AssertFailure failure = new AssertFailure("ASSERT FAILED: " + failureMsg); - showTrace(failure); + final AssertFailure failure; + if (failureMsg == null) { + failure = new AssertFailure("ASSERT FAILED"); + } else { + failure = new AssertFailure("ASSERT FAILED: " + failureMsg); + } + LOG.error(failure); throw failure; } } - - public final static void TRACE(String msg) { - final AssertFailure failure = new AssertFailure("TRACE: " + msg); - showTrace(failure); - } - - public final static void PRINT_STACK(int level) { - final StackTraceElement elements[] = new Exception("Trace").getStackTrace(); - if (level > elements.length) - {level = elements.length;} - final StringBuilder buf = new StringBuilder(); - for (int i = 1; i < level; i++) { - buf.append('\n'); - buf.append(elements[i].toString()); + + public static void TRACE(final String msg) { + if (LOG.isTraceEnabled()) { + final AssertFailure failure = new AssertFailure("TRACE: " + msg); + LOG.trace(failure); } - LOG.debug(buf.toString()); } - - private final static void showTrace(AssertFailure failure) { - final StringWriter sout = new StringWriter(); - final PrintWriter out = new PrintWriter(sout); - out.println("Stacktrace:"); - failure.printStackTrace(out); - LOG.warn(sout.toString()); + + public static void PRINT_STACK(final int level) { + if (LOG.isTraceEnabled()) { + + final StackTraceElement elements[] = new Exception("Trace").getStackTrace(); + final StringBuilder buf = new StringBuilder(); + final int depth = Math.min(level, elements.length); + for (int i = 1; i < depth; i++) { + buf.append(elements[i].toString()); + if (i < depth - 1) { + buf.append(EOL); + } + } + + LOG.trace(buf.toString()); + } } -} \ No newline at end of file +} diff --git a/src/org/exist/util/serializer/AttrList.java b/src/org/exist/util/serializer/AttrList.java index b0e58461b4b..08a221b3810 100644 --- a/src/org/exist/util/serializer/AttrList.java +++ b/src/org/exist/util/serializer/AttrList.java @@ -40,13 +40,6 @@ public class AttrList { protected String values[] = new String[4]; protected int type[] = new int[4]; protected int size = 0; - - /** - * - */ - public AttrList() { - super(); - } public void addAttribute(QName name, String value) { addAttribute(name, value, AttrImpl.CDATA); diff --git a/src/org/exist/util/serializer/DOMStreamer.java b/src/org/exist/util/serializer/DOMStreamer.java index 4e5edbec729..b2ee1334ebe 100644 --- a/src/org/exist/util/serializer/DOMStreamer.java +++ b/src/org/exist/util/serializer/DOMStreamer.java @@ -211,7 +211,8 @@ protected void startNode(final Node node) throws SAXException { stack.push(info); // output attributes final AttributesImpl saxAttrs = new AttributesImpl(); - String attrNS, attrLocalName; + String attrNS; + String attrLocalName; for (int i = 0; i < attrs.getLength(); i++) { nextAttr = (Attr) attrs.item(i); attrNS = nextAttr.getNamespaceURI(); diff --git a/src/org/exist/util/serializer/HTML5Writer.java b/src/org/exist/util/serializer/HTML5Writer.java index a3dac76512d..12601023900 100644 --- a/src/org/exist/util/serializer/HTML5Writer.java +++ b/src/org/exist/util/serializer/HTML5Writer.java @@ -35,6 +35,88 @@ */ public class HTML5Writer extends XHTML5Writer { + /** + * Holds the names of the attributes that are considered boolean + * according to http://www.w3.org/TR/html51/single-page.html. + * + * The value of these attributes are written if they equal the + * name of the attribute. For example: checked="checked" will be + * written as checked. + * + * See https://github.com/eXist-db/exist/issues/777 for details. + */ + private final static ObjectHashSet BOOLEAN_ATTRIBUTE_NAMES = new ObjectHashSet(68); + static { + BOOLEAN_ATTRIBUTE_NAMES.add("allowFullscreen"); + BOOLEAN_ATTRIBUTE_NAMES.add("async"); + BOOLEAN_ATTRIBUTE_NAMES.add("autofocus"); + BOOLEAN_ATTRIBUTE_NAMES.add("autoplay"); + BOOLEAN_ATTRIBUTE_NAMES.add("badInput"); + BOOLEAN_ATTRIBUTE_NAMES.add("checked"); + BOOLEAN_ATTRIBUTE_NAMES.add("closed"); + BOOLEAN_ATTRIBUTE_NAMES.add("commandChecked"); + BOOLEAN_ATTRIBUTE_NAMES.add("commandDisabled"); + BOOLEAN_ATTRIBUTE_NAMES.add("commandHidden"); + BOOLEAN_ATTRIBUTE_NAMES.add("compact"); + BOOLEAN_ATTRIBUTE_NAMES.add("complete"); + BOOLEAN_ATTRIBUTE_NAMES.add("controls"); + BOOLEAN_ATTRIBUTE_NAMES.add("cookieEnabled"); + BOOLEAN_ATTRIBUTE_NAMES.add("customError"); + BOOLEAN_ATTRIBUTE_NAMES.add("declare"); + BOOLEAN_ATTRIBUTE_NAMES.add("default"); + BOOLEAN_ATTRIBUTE_NAMES.add("defaultChecked"); + BOOLEAN_ATTRIBUTE_NAMES.add("defaultMuted"); + BOOLEAN_ATTRIBUTE_NAMES.add("defaultSelected"); + BOOLEAN_ATTRIBUTE_NAMES.add("defer"); + BOOLEAN_ATTRIBUTE_NAMES.add("disabled"); + BOOLEAN_ATTRIBUTE_NAMES.add("draggable"); + BOOLEAN_ATTRIBUTE_NAMES.add("enabled"); + BOOLEAN_ATTRIBUTE_NAMES.add("ended"); + BOOLEAN_ATTRIBUTE_NAMES.add("formNoValidate"); + BOOLEAN_ATTRIBUTE_NAMES.add("hidden"); + BOOLEAN_ATTRIBUTE_NAMES.add("indeterminate"); + BOOLEAN_ATTRIBUTE_NAMES.add("isContentEditable"); + BOOLEAN_ATTRIBUTE_NAMES.add("isMap"); + BOOLEAN_ATTRIBUTE_NAMES.add("itemScope"); + BOOLEAN_ATTRIBUTE_NAMES.add("javaEnabled"); + BOOLEAN_ATTRIBUTE_NAMES.add("loop"); + BOOLEAN_ATTRIBUTE_NAMES.add("multiple"); + BOOLEAN_ATTRIBUTE_NAMES.add("muted"); + BOOLEAN_ATTRIBUTE_NAMES.add("noHref"); + BOOLEAN_ATTRIBUTE_NAMES.add("noResize"); + BOOLEAN_ATTRIBUTE_NAMES.add("noShade"); + BOOLEAN_ATTRIBUTE_NAMES.add("noValidate"); + BOOLEAN_ATTRIBUTE_NAMES.add("noWrap"); + BOOLEAN_ATTRIBUTE_NAMES.add("onLine"); + BOOLEAN_ATTRIBUTE_NAMES.add("open"); + BOOLEAN_ATTRIBUTE_NAMES.add("patternMismatch"); + BOOLEAN_ATTRIBUTE_NAMES.add("pauseOnExit"); + BOOLEAN_ATTRIBUTE_NAMES.add("paused"); + BOOLEAN_ATTRIBUTE_NAMES.add("persisted"); + BOOLEAN_ATTRIBUTE_NAMES.add("rangeOverflow"); + BOOLEAN_ATTRIBUTE_NAMES.add("rangeUnderflow"); + BOOLEAN_ATTRIBUTE_NAMES.add("readOnly"); + BOOLEAN_ATTRIBUTE_NAMES.add("required"); + BOOLEAN_ATTRIBUTE_NAMES.add("reversed"); + BOOLEAN_ATTRIBUTE_NAMES.add("scoped"); + BOOLEAN_ATTRIBUTE_NAMES.add("seamless"); + BOOLEAN_ATTRIBUTE_NAMES.add("seeking"); + BOOLEAN_ATTRIBUTE_NAMES.add("selected"); + BOOLEAN_ATTRIBUTE_NAMES.add("sortable"); + BOOLEAN_ATTRIBUTE_NAMES.add("spellcheck"); + BOOLEAN_ATTRIBUTE_NAMES.add("stepMismatch"); + BOOLEAN_ATTRIBUTE_NAMES.add("tooLong"); + BOOLEAN_ATTRIBUTE_NAMES.add("tooShort"); + BOOLEAN_ATTRIBUTE_NAMES.add("translate"); + BOOLEAN_ATTRIBUTE_NAMES.add("trueSpeed"); + BOOLEAN_ATTRIBUTE_NAMES.add("typeMismatch"); + BOOLEAN_ATTRIBUTE_NAMES.add("typeMustMatch"); + BOOLEAN_ATTRIBUTE_NAMES.add("valid"); + BOOLEAN_ATTRIBUTE_NAMES.add("valueMissing"); + BOOLEAN_ATTRIBUTE_NAMES.add("visible"); + BOOLEAN_ATTRIBUTE_NAMES.add("willValidate"); + } + private final static ObjectHashSet EMPTY_TAGS = new ObjectHashSet(31); static { EMPTY_TAGS.add("area"); @@ -100,7 +182,7 @@ public void attribute(String qname, String value) throws TransformerException { final Writer writer = getWriter(); writer.write(' '); writer.write(qname); - if (!qname.equals(value)) { + if (!(BOOLEAN_ATTRIBUTE_NAMES.contains(qname) && qname.equals(value))) { writer.write("=\""); writeChars(value, true); writer.write('"'); @@ -125,8 +207,9 @@ public void attribute(QName qname, String value) throws TransformerException { writer.write(qname.getPrefix()); writer.write(':'); } - if (!qname.getLocalPart().equals(value)) { - writer.write(qname.getLocalPart()); + final String localPart = qname.getLocalPart(); + writer.write(localPart); + if (!(BOOLEAN_ATTRIBUTE_NAMES.contains(localPart) && localPart.equals(value))) { writer.write("=\""); writeChars(value, true); writer.write('"'); diff --git a/src/org/exist/util/serializer/SerializerObjectFactory.java b/src/org/exist/util/serializer/SerializerObjectFactory.java index 17afcd7eae8..2e67003319b 100644 --- a/src/org/exist/util/serializer/SerializerObjectFactory.java +++ b/src/org/exist/util/serializer/SerializerObjectFactory.java @@ -29,13 +29,6 @@ */ public class SerializerObjectFactory extends BaseKeyedPoolableObjectFactory { - /** - * - */ - public SerializerObjectFactory() { - super(); - } - public Object makeObject(Object key) throws Exception { if (key == SAXSerializer.class) {return new SAXSerializer();} diff --git a/src/org/exist/validation/internal/node/NodeInputStream.java b/src/org/exist/validation/internal/node/NodeInputStream.java index a89a7c59e79..b93404f7187 100644 --- a/src/org/exist/validation/internal/node/NodeInputStream.java +++ b/src/org/exist/validation/internal/node/NodeInputStream.java @@ -24,63 +24,64 @@ import java.io.IOException; import java.io.InputStream; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import java.util.concurrent.atomic.AtomicLong; + +import org.exist.Database; import org.exist.storage.io.BlockingInputStream; -import org.exist.storage.io.BlockingOutputStream; import org.exist.storage.serializers.Serializer; import org.exist.xquery.value.NodeValue; +import static org.exist.util.ThreadUtils.newInstanceThread; + /** * @author Dannes Wessels (dizzzz@exist-db.org) */ -public class NodeInputStream extends InputStream{ - - private final static Logger logger = LogManager.getLogger(NodeInputStream.class); - - private final BlockingInputStream bis; +public class NodeInputStream extends InputStream { + private final BlockingInputStream bis; + private static final AtomicLong nodeSerializerThreadId = new AtomicLong(); - /** Creates a new instance of NodeInputStream */ - public NodeInputStream(Serializer serializer, NodeValue node) { - logger.debug("Initializing NodeInputStream"); - - bis = new BlockingInputStream(); - BlockingOutputStream bos = bis.getOutputStream(); - - NodeSerializerThread rt = new NodeSerializerThread(serializer, node, bos); - - rt.start(); - - logger.debug("Initializing NodeInputStream done"); + /** + * Creates a new instance of NodeInputStream + */ + public NodeInputStream(final Database database, final Serializer serializer, final NodeValue node) { + this.bis = new BlockingInputStream(); + final Thread thread = newInstanceThread(database, "node-input-stream-serializer-" + nodeSerializerThreadId.getAndIncrement(), new NodeSerializerRunnable(serializer, node, bis.getOutputStream())); + thread.start(); } + @Override public int read(byte[] b, int off, int len) throws IOException { return bis.read(b, off, len); } - + + @Override public int read(byte[] b) throws IOException { return bis.read(b, 0, b.length); } - + + @Override public long skip(long n) throws IOException { return bis.skip(n); } - + + @Override public void reset() throws IOException { bis.reset(); } - + + @Override public int read() throws IOException { return bis.read(); } + @Override public void close() throws IOException { bis.close(); } + @Override public int available() throws IOException { return bis.available(); } - } diff --git a/src/org/exist/validation/internal/node/NodeSerializerThread.java b/src/org/exist/validation/internal/node/NodeSerializerRunnable.java similarity index 82% rename from src/org/exist/validation/internal/node/NodeSerializerThread.java rename to src/org/exist/validation/internal/node/NodeSerializerRunnable.java index 658155bb520..d95a3145dcc 100644 --- a/src/org/exist/validation/internal/node/NodeSerializerThread.java +++ b/src/org/exist/validation/internal/node/NodeSerializerRunnable.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.Properties; -import java.util.concurrent.atomic.AtomicLong; import javax.xml.transform.OutputKeys; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -35,21 +34,18 @@ /** * @author Dannes Wessels (dizzzz@exist-db.org) */ -public class NodeSerializerThread extends Thread { +public class NodeSerializerRunnable implements Runnable { - private final static Logger logger = LogManager.getLogger(NodeSerializerThread.class); + private final static Logger logger = LogManager.getLogger(NodeSerializerRunnable.class); private final Serializer serializer; private final NodeValue node; private final BlockingOutputStream bos; - - private static final AtomicLong nodeSerializerThreadId = new AtomicLong(); /** - * Creates a new instance of NodeSerializerThread + * Creates a new instance of NodeSerializerRunnable */ - public NodeSerializerThread(final Serializer serializer, final NodeValue node,final BlockingOutputStream bos) { - super("exist-nodeSerializerThread-" + nodeSerializerThreadId.getAndIncrement()); + public NodeSerializerRunnable(final Serializer serializer, final NodeValue node, final BlockingOutputStream bos) { this.serializer = serializer; this.node = node; this.bos = bos; @@ -77,10 +73,8 @@ public void run() { try { // NEEDED! bos.close(exception); } catch (final IOException ex) { - logger.debug(ex); + logger.warn(ex); } } } - - } diff --git a/src/org/exist/validation/resolver/AnyUriResolver.java b/src/org/exist/validation/resolver/AnyUriResolver.java index 9c0508499c4..ce2d138d5c1 100644 --- a/src/org/exist/validation/resolver/AnyUriResolver.java +++ b/src/org/exist/validation/resolver/AnyUriResolver.java @@ -43,7 +43,8 @@ * @author Dannes Wessels (dizzzz@exist-db.org) */ public class AnyUriResolver implements XMLEntityResolver { - private final static Logger LOG = LogManager.getLogger(AnyUriResolver.class); + private static final Logger LOG = LogManager.getLogger(AnyUriResolver.class); + private static final ThreadGroup threadGroup = new ThreadGroup("exist.xml-entity-resolver"); private String docPath; private final String parentURI; @@ -110,7 +111,7 @@ public XMLInputSource resolveEntity(XMLResourceIdentifier xri) throws XNIExcepti is = new EmbeddedInputStream(xmldbURL); } else { - is = new XmlrpcInputStream(xmldbURL); + is = new XmlrpcInputStream(threadGroup, xmldbURL); } } else { diff --git a/src/org/exist/validation/resolver/unstable/eXistLSResourceResolver.java b/src/org/exist/validation/resolver/unstable/eXistLSResourceResolver.java index a0a07d3f16c..75d14831e0c 100644 --- a/src/org/exist/validation/resolver/unstable/eXistLSResourceResolver.java +++ b/src/org/exist/validation/resolver/unstable/eXistLSResourceResolver.java @@ -42,8 +42,8 @@ * @author Dizzzz (dizzzz@exist-db.org) */ public class eXistLSResourceResolver implements LSResourceResolver { - - private final static Logger LOG = LogManager.getLogger(eXistLSResourceResolver.class); + private static final Logger LOG = LogManager.getLogger(eXistLSResourceResolver.class); + private static final ThreadGroup threadGroup = new ThreadGroup("exist.ls-resolver"); public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, String baseURI) { @@ -79,7 +79,7 @@ private InputStream getInputStream(String resourcePath) throws MalformedURLExcep is = new EmbeddedInputStream(xmldbURL); } else { - is = new XmlrpcInputStream(xmldbURL); + is = new XmlrpcInputStream(threadGroup, xmldbURL); } } else { diff --git a/src/org/exist/xmldb/AbstractEXistResource.java b/src/org/exist/xmldb/AbstractEXistResource.java index f972b70421b..1e904c24236 100644 --- a/src/org/exist/xmldb/AbstractEXistResource.java +++ b/src/org/exist/xmldb/AbstractEXistResource.java @@ -20,6 +20,7 @@ package org.exist.xmldb; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Permission; import org.exist.security.Subject; import org.exist.storage.BrokerPool; @@ -27,6 +28,7 @@ import org.exist.storage.lock.Lock.LockMode; import org.exist.storage.txn.Txn; import com.evolvedbinary.j8fu.function.FunctionE; +import com.evolvedbinary.j8fu.function.SupplierE; import org.exist.xmldb.function.LocalXmldbDocumentFunction; import org.xmldb.api.base.Collection; import org.xmldb.api.base.ErrorCodes; @@ -59,10 +61,23 @@ public void setMimeType(final String mime) { @Override public String getMimeType() throws XMLDBException { + return getMimeType(() -> read((document, broker, transaction) -> document.getMetadata().getMimeType())); + } + + /** + * Similar to {@link org.exist.xmldb.EXistResource#getMimeType()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + String getMimeType(final DBBroker broker, final Txn transaction) throws XMLDBException { + return getMimeType(() -> this.read(broker, transaction).apply((document, broker1, transaction1) -> document.getMetadata().getMimeType())); + } + + private String getMimeType(final SupplierE mimeTypeRead) throws XMLDBException { if (isNewResource) { return mimeType; } else { - return read((document, broker, transaction) -> document.getMetadata().getMimeType()); + return mimeTypeRead.get(); } } @@ -176,17 +191,15 @@ public FunctionE, R, XMLDBException> modify(fi private FunctionE, R, XMLDBException> with(final LockMode lockMode, final DBBroker broker, final Txn transaction) throws XMLDBException { return documentOp -> collection.with(lockMode, broker, transaction).apply((collection, broker1, transaction1) -> { - DocumentImpl doc = null; - try { - doc = collection.getDocumentWithLock(broker1, docId, lockMode); - if(doc == null) { + try(final LockedDocument lockedDoc = collection.getDocumentWithLock(broker1, docId, lockMode)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + if(lockedDoc == null) { throw new XMLDBException(ErrorCodes.INVALID_RESOURCE); } - return documentOp.apply(doc, broker1, transaction1); - } finally { - if(doc != null) { - doc.getUpdateLock().release(lockMode); - } + return documentOp.apply(lockedDoc.getDocument(), broker1, transaction1); } }); } diff --git a/src/org/exist/xmldb/AbstractLocal.java b/src/org/exist/xmldb/AbstractLocal.java index 17c8cbb849f..545808e55b4 100644 --- a/src/org/exist/xmldb/AbstractLocal.java +++ b/src/org/exist/xmldb/AbstractLocal.java @@ -29,10 +29,12 @@ import com.evolvedbinary.j8fu.function.FunctionE; import org.exist.xmldb.function.LocalXmldbCollectionFunction; import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.Collection; import org.xmldb.api.base.ErrorCodes; import org.xmldb.api.base.XMLDBException; import java.util.Optional; +import java.util.function.Function; /** * Base class for Local XMLDB classes @@ -40,6 +42,9 @@ * @author Adam Retter */ public abstract class AbstractLocal { + + public final static String PROP_JOIN_TRANSACTION_IF_PRESENT = "exist.api.xmldb.local.join-transaction-if-present"; + protected final BrokerPool brokerPool; protected final Subject user; protected LocalCollection collection; @@ -58,6 +63,17 @@ protected XmldbURI resolve(final XmldbURI name) { } } + + protected XmldbURI getCollectionUri(final DBBroker broker, final Txn transaction, final Collection collection) throws XMLDBException { + final String name; + if(collection instanceof LocalCollection) { + name = ((LocalCollection)collection).getName(broker, transaction); + } else { + name = collection.getName(); + } + return XmldbURI.create(name); + } + /** * Higher-order-function for performing read-only operations against a database collection * @@ -162,9 +178,7 @@ protected FunctionE, R, XMLDBException> with */ protected FunctionE, R, XMLDBException> with(final LockMode lockMode, final DBBroker broker, final Txn transaction, final XmldbURI collectionUri, final int errorCode) throws XMLDBException { return collectionOp -> { - org.exist.collections.Collection coll = null; - try { - coll = broker.openCollection(collectionUri, lockMode); + try(org.exist.collections.Collection coll = broker.openCollection(collectionUri, lockMode)) { if (coll == null) { throw new XMLDBException(errorCode, "Collection " + collectionUri.toString() + " not found"); } @@ -173,14 +187,34 @@ protected FunctionE, R, XMLDBException> with return result; } catch (final PermissionDeniedException e) { throw new XMLDBException(ErrorCodes.PERMISSION_DENIED, e.getMessage(), e); - } finally { - if (coll != null) { - coll.release(lockMode); - } } }; } + /** + * Either begins a new transaction (default) or + * attempts to join an existing transaction. + * + * If there is no existing transaction, a new transaction + * will begin. + * + * Controlled by the System Property {@link AbstractLocal#PROP_JOIN_TRANSACTION_IF_PRESENT } + * + * @return A transaction + * + * @deprecated This function will be removed when {@link DBBroker#continueOrBeginTransaction()} is removed + */ + @Deprecated + private static Function transaction() { + final boolean joinTransactionIfPresent = System.getProperty(PROP_JOIN_TRANSACTION_IF_PRESENT, "true") + .equalsIgnoreCase("true"); + if(joinTransactionIfPresent) { + return (broker) -> broker.continueOrBeginTransaction(); + } else { + return (broker) -> broker.getBrokerPool().getTransactionManager().beginTransaction(); + } + } + /** * Higher-order-function for performing an XMLDB operation on * the database @@ -189,9 +223,9 @@ protected FunctionE, R, XMLDBException> with * @param The return type of the operation * @throws org.xmldb.api.base.XMLDBException */ - R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { try (final DBBroker broker = brokerPool.get(Optional.ofNullable(user)); - final Txn transaction = brokerPool.getTransactionManager().beginTransaction()) { + final Txn transaction = transaction().apply(broker)) { try { final R result = dbOperation.apply(broker, transaction); transaction.commit(); diff --git a/src/org/exist/xmldb/AbstractRemoteResource.java b/src/org/exist/xmldb/AbstractRemoteResource.java index 388eda91ef6..6180307a0aa 100644 --- a/src/org/exist/xmldb/AbstractRemoteResource.java +++ b/src/org/exist/xmldb/AbstractRemoteResource.java @@ -34,6 +34,7 @@ import org.exist.util.EXistInputSource; import org.exist.util.FileUtils; import org.exist.util.Leasable; +import org.exist.util.ZipEntryInputSource; import org.exist.util.io.FastByteArrayInputStream; import org.exist.util.io.FastByteArrayOutputStream; import org.exist.util.io.TemporaryFileManager; @@ -57,7 +58,7 @@ public abstract class AbstractRemoteResource extends AbstractRemote protected Path file = null; private Path contentFile = null; protected InputSource inputSource = null; - private long contentLen = 0L; + private long contentLen = -1L; private Permission permissions = null; private boolean closed; @@ -196,6 +197,9 @@ protected boolean setContentInternal(final Object value) wasSet = true; } else if (value instanceof InputSource) { inputSource = (InputSource) value; + if (inputSource instanceof EXistInputSource) { + setExtendendContentLength(((EXistInputSource)inputSource).getByteStreamLength()); + } wasSet = true; } else if (value instanceof byte[]) { file = TemporaryFileManager.getInstance().getTemporaryFile(); diff --git a/src/org/exist/xmldb/DatabaseImpl.java b/src/org/exist/xmldb/DatabaseImpl.java index db6d7abaa0e..5ca8231c7d8 100644 --- a/src/org/exist/xmldb/DatabaseImpl.java +++ b/src/org/exist/xmldb/DatabaseImpl.java @@ -29,6 +29,7 @@ import org.exist.security.SecurityManager; import org.exist.security.Subject; import org.exist.storage.BrokerPool; +import org.exist.storage.journal.Journal; import org.exist.util.Configuration; import org.exist.util.Leasable; import org.exist.util.SSLHelper; @@ -42,6 +43,7 @@ import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; +import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -84,6 +86,8 @@ public class DatabaseImpl implements Database { private boolean autoCreate = false; private String configuration = null; + private String dataDir = null; + private String journalDir = null; private String currentInstanceName = null; private final Map> rpcClients = new HashMap<>(); @@ -109,6 +113,13 @@ public DatabaseImpl() { private void configure(final String instanceName) throws XMLDBException { try { final Configuration config = new Configuration(configuration, Optional.empty()); + if (dataDir != null) { + config.setProperty(BrokerPool.PROPERTY_DATA_DIR, Paths.get(dataDir)); + } + if (journalDir != null) { + config.setProperty(Journal.PROPERTY_RECOVERY_JOURNAL_DIR, Paths.get(journalDir)); + } + BrokerPool.configure(instanceName, 1, 5, config); if (shutdown != null) { BrokerPool.getInstance(instanceName).registerShutdownListener(shutdown); @@ -365,6 +376,8 @@ public String[] getNames() throws XMLDBException { public final static String CREATE_DATABASE = "create-database"; public final static String DATABASE_ID = "database-id"; public final static String CONFIGURATION = "configuration"; + public final static String DATA_DIR = "data-dir"; + public final static String JOURNAL_DIR = "journal-dir"; public final static String SSL_ENABLE = "ssl-enable"; public final static String SSL_ALLOW_SELF_SIGNED = "ssl-allow-self-signed"; public final static String SSL_VERIFY_HOSTNAME = "ssl-verify-hostname"; @@ -385,6 +398,14 @@ public String getProperty(final String property) throws XMLDBException { value = configuration; break; + case DATA_DIR: + value = dataDir; + break; + + case JOURNAL_DIR: + value = journalDir; + break; + case SSL_ENABLE: value = ssl_enable.toString(); break; @@ -418,6 +439,14 @@ public void setProperty(final String property, final String value) throws XMLDBE this.configuration = value; break; + case DATA_DIR: + this.dataDir = value; + break; + + case JOURNAL_DIR: + this.journalDir = value; + break; + case SSL_ENABLE: this.ssl_enable = Boolean.valueOf(value); break; diff --git a/src/org/exist/xmldb/EXistCollectionManagementService.java b/src/org/exist/xmldb/EXistCollectionManagementService.java index 8095f67fe48..e3c6205b667 100644 --- a/src/org/exist/xmldb/EXistCollectionManagementService.java +++ b/src/org/exist/xmldb/EXistCollectionManagementService.java @@ -65,10 +65,34 @@ public interface EXistCollectionManagementService extends CollectionManagementSe void moveResource(XmldbURI resourcePath, XmldbURI destinationPath, XmldbURI newName) throws XMLDBException; + /** + * @deprecated Use {@link #copyResource(XmldbURI, XmldbURI, XmldbURI, String)} + */ + @Deprecated void copyResource(XmldbURI resourcePath, XmldbURI destinationPath, XmldbURI newName) throws XMLDBException; + /** + * @param resourcePath The source document + * @param destinationPath The destination collection + * @param newName The new name of the copied source in the destination collection + * @param preserveType one of either "DEFAULT", "NO_PRESERVE", "PRESERVE" + */ + void copyResource(XmldbURI resourcePath, XmldbURI destinationPath, XmldbURI newName, String preserveType) throws XMLDBException; + + /** + * @deprecated Use {@link #copy(XmldbURI, XmldbURI, XmldbURI, String)} + */ + @Deprecated void copy(XmldbURI collection, XmldbURI destination, XmldbURI newName) throws XMLDBException; + /** + * @param collection The source collection + * @param destination The destination collection + * @param newName The new name of the copied source in the destination collection + * @param preserveType one of either "DEFAULT", "NO_PRESERVE", "PRESERVE" + */ + void copy(XmldbURI collection, XmldbURI destination, XmldbURI newName, String preserveType) throws XMLDBException; + Collection createCollection(XmldbURI collName, Date created) throws XMLDBException; /** diff --git a/src/org/exist/xmldb/EXistResource.java b/src/org/exist/xmldb/EXistResource.java index a55a096d657..cfda10b7ec5 100644 --- a/src/org/exist/xmldb/EXistResource.java +++ b/src/org/exist/xmldb/EXistResource.java @@ -44,6 +44,11 @@ public interface EXistResource extends Resource, AutoCloseable { Permission getPermissions() throws XMLDBException; + /** + * The content length if known. + * + * @return The content length, or -1 if not known. + */ long getContentLength() throws XMLDBException; void setLexicalHandler(LexicalHandler handler); diff --git a/src/org/exist/xmldb/EXistXPathQueryService.java b/src/org/exist/xmldb/EXistXPathQueryService.java index afb7ab4e694..6d5ad2214d3 100644 --- a/src/org/exist/xmldb/EXistXPathQueryService.java +++ b/src/org/exist/xmldb/EXistXPathQueryService.java @@ -32,6 +32,15 @@ */ public interface EXistXPathQueryService extends XPathQueryService { + //TODO(AR) this should likely be configurable + /** + * The maximum amount of times we should retry + * acquiring all locks in {@link #beginProtected()} + * + * Default: -1 which means infinite + */ + int BEGIN_PROTECTED_MAX_LOCKING_RETRIES = -1; + /** * Process an XPath query based on the result of a previous query. * The XMLResource contains the result received from a previous diff --git a/src/org/exist/xmldb/ExtendedResource.java b/src/org/exist/xmldb/ExtendedResource.java index d955b16b9ec..4df700ee92a 100644 --- a/src/org/exist/xmldb/ExtendedResource.java +++ b/src/org/exist/xmldb/ExtendedResource.java @@ -49,7 +49,9 @@ public interface ExtendedResource { InputStream getStreamContent() throws XMLDBException; /** - * It returns the length of the content, whichever it is its origin + * It returns the length of the content, if known. + * + * @return The content length, or -1 if not known. */ long getStreamLength() throws XMLDBException; diff --git a/src/org/exist/xmldb/LocalBinaryResource.java b/src/org/exist/xmldb/LocalBinaryResource.java index bd0c7be791e..991841c22d9 100644 --- a/src/org/exist/xmldb/LocalBinaryResource.java +++ b/src/org/exist/xmldb/LocalBinaryResource.java @@ -45,6 +45,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Properties; +import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; +import com.evolvedbinary.j8fu.function.SupplierE; public class LocalBinaryResource extends AbstractEXistResource implements ExtendedResource, BinaryResource, EXistResource { @@ -65,6 +68,19 @@ public String getResourceType() throws XMLDBException { @Override public Object getExtendedContent() throws XMLDBException { + return getExtendedContent(() -> read((document, broker, transaction) -> broker.getBinaryResource(((BinaryDocument) document)))); + } + + /** + * Similar to {@link org.exist.xmldb.ExtendedResource#getExtendedContent()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + Object getExtendedContent(final DBBroker broker, final Txn transaction) throws XMLDBException { + return getExtendedContent(() -> read(broker, transaction).apply((document, broker1, transaction1) -> broker1.getBinaryResource(((BinaryDocument) document)))); + } + + private Object getExtendedContent(final SupplierE binaryResourceRead) throws XMLDBException { if (file != null) { return file; } @@ -77,13 +93,26 @@ public Object getExtendedContent() throws XMLDBException { if(binaryValue != null) { return binaryValue; } - - return read((document, broker, transaction) -> broker.getBinaryResource(((BinaryDocument) document))); + return binaryResourceRead.get(); } @Override public Object getContent() throws XMLDBException { final Object res = getExtendedContent(); + return getContent(res); + } + + /** + * Similar to {@link org.exist.xmldb.LocalBinaryResource#getContent()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + Object getContent(final DBBroker broker, final Txn transaction) throws XMLDBException { + final Object res = getExtendedContent(broker, transaction); + return getContent(res); + } + + private Object getContent(final Object res) throws XMLDBException { if(res != null) { if(res instanceof Path) { return readFile((Path)res); @@ -108,7 +137,6 @@ public Object getContent() throws XMLDBException { } } } - return res; } @@ -135,6 +163,19 @@ public void setContent(final Object value) throws XMLDBException { @Override public InputStream getStreamContent() throws XMLDBException { + return getStreamContent(() -> read((document, broker, transaction) -> broker.getBinaryResource(((BinaryDocument) document)))); + } + + /** + * Similar to {@link org.exist.xmldb.LocalBinaryResource#getStreamContent()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + InputStream getStreamContent(final DBBroker broker, final Txn transaction) throws XMLDBException { + return getStreamContent(() -> this.read(broker, transaction).apply((document, broker1, transaction1) -> broker.getBinaryResource(((BinaryDocument) document)))); + } + + private InputStream getStreamContent(final SupplierE streamContentRead) throws XMLDBException { final InputStream is; if(file != null) { try { @@ -150,7 +191,7 @@ public InputStream getStreamContent() throws XMLDBException { } else if(binaryValue != null) { is = binaryValue.getInputStream(); } else { - is = read((document, broker, transaction) -> broker.getBinaryResource(((BinaryDocument) document))); + is = streamContentRead.get(); } return is; diff --git a/src/org/exist/xmldb/LocalCollection.java b/src/org/exist/xmldb/LocalCollection.java index b8f4c6de813..f85586924da 100644 --- a/src/org/exist/xmldb/LocalCollection.java +++ b/src/org/exist/xmldb/LocalCollection.java @@ -30,12 +30,14 @@ import org.exist.collections.IndexInfo; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.persistent.LockToken; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Account; import org.exist.security.Permission; import org.exist.security.Subject; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.sync.Sync; import org.exist.storage.txn.Txn; @@ -86,6 +88,7 @@ public class LocalCollection extends AbstractLocal implements EXistCollection { } private final XmldbURI path; + private final Random random = new Random(); private Properties properties = new Properties(defaultProperties); private boolean needsSync = false; private XMLReader userReader = null; @@ -120,11 +123,24 @@ public LocalCollection(Subject user, final BrokerPool brokerPool, final LocalCol this.path = name.toCollectionPathURI(); } - read(ErrorCodes.NO_SUCH_COLLECTION).apply((collection, broker, transaction) -> { - /* no-op, used to make sure the current user can open the collection! - will throw an XMLDBException if they cannot */ - return null; + /* + no-op, used to make sure the current user can open the collection! + will throw an XMLDBException if they cannot + we are careful to throw the exception outside of the transaction operation + so that it does not immediately close the current transaction and unwind the stack, + this is because not being able to open a collection is a valid operation e.g. xmldb:collection-available + */ + final Optional openException = withDb((broker, transaction) -> { + try { + return this.>read(broker, transaction, ErrorCodes.NO_SUCH_COLLECTION).apply((collection, broker1, transaction1) -> Optional.empty()); + } catch(final XMLDBException e) { + return Optional.of(e); + } }); + + if(openException.isPresent()) { + throw openException.get(); + } } protected boolean checkOwner(final Collection collection, final Account account) throws XMLDBException { @@ -161,13 +177,12 @@ public void close() throws XMLDBException { */ @Override public String createId() throws XMLDBException { - return this.read().apply((collection, broker, transaction) ->{ + return this.read().apply((collection, broker, transaction) -> { XmldbURI id; - final Random rand = new Random(); boolean ok; do { ok = true; - id = XmldbURI.create(Integer.toHexString(rand.nextInt()) + ".xml"); + id = XmldbURI.create(Integer.toHexString(random.nextInt()) + ".xml"); // check if this ID does already exist if (collection.hasDocument(broker, id)) { ok = false; @@ -250,20 +265,31 @@ public int getChildCollectionCount() throws XMLDBException { @Override public String getName() throws XMLDBException { - return this.read().apply((collection, broker, transaction) -> collection.getURI().toString()); + return withDb((broker, transaction) -> getName(broker, transaction)); + } + + /** + * Similar to {@link org.exist.xmldb.LocalCollection#getName()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + String getName(final DBBroker broker, final Txn transaction) throws XMLDBException { + return this.read(broker, transaction).apply((collection, broker1, transaction1) -> collection.getURI().toString()); } @Override public org.xmldb.api.base.Collection getParentCollection() throws XMLDBException { - if(getName().equals(XmldbURI.ROOT_COLLECTION)) { - return null; - } - - if(collection == null) { - final XmldbURI parentUri = this.read().apply((collection, broker, transaction) -> collection.getParentURI()); - this.collection = new LocalCollection(user, brokerPool, null, parentUri); - } - return collection; + return withDb((broker, transaction) -> { + if (getName(broker, transaction).equals(XmldbURI.ROOT_COLLECTION)) { + return null; + } + + if (collection == null) { + final XmldbURI parentUri = this.read(broker, transaction).apply((collection, broker1, transaction1) -> collection.getParentURI()); + this.collection = new LocalCollection(user, brokerPool, null, parentUri); + } + return collection; + }); } public String getPath() throws XMLDBException { @@ -284,28 +310,54 @@ public Resource getResource(final String id) throws XMLDBException { throw new XMLDBException(ErrorCodes.INVALID_URI, e); } - return this.read().apply((collection, broker, transaction) -> { - final DocumentImpl document = collection.getDocument(broker, idURI); - if(document == null) { - LOG.warn("Resource " + idURI + " not found"); - return null; - } + return withDb((broker, transaction) -> getResource(broker, transaction, idURI)); + } + + /** + * Similar to {@link org.exist.xmldb.LocalCollection#getResource(String)} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + Resource getResource(final DBBroker broker, final Txn transaction, final String id) throws XMLDBException { + final XmldbURI idURI; + try { + idURI = XmldbURI.xmldbUriFor(id); + } catch(final URISyntaxException e) { + throw new XMLDBException(ErrorCodes.INVALID_URI, e); + } - final Resource r; - switch(document.getResourceType()) { - case DocumentImpl.XML_FILE: - r = new LocalXMLResource(user, brokerPool, this, idURI); - break; + return getResource(broker, transaction, idURI); + } - case DocumentImpl.BINARY_FILE: - r = new LocalBinaryResource(user, brokerPool, this, idURI); - break; + Resource getResource(final DBBroker broker, final Txn transaction, final XmldbURI idURI) throws XMLDBException { + return this.read(broker, transaction).apply((collection, broker1, transaction1) -> { + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(broker1, idURI, LockMode.READ_LOCK)) { - default: - throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Unknown resource type"); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + final DocumentImpl document = lockedDocument == null ? null : lockedDocument.getDocument(); + if (document == null) { + LOG.warn("Resource " + idURI + " not found"); + return null; + } + + final Resource r; + switch (document.getResourceType()) { + case DocumentImpl.XML_FILE: + r = new LocalXMLResource(user, brokerPool, this, idURI); + break; + + case DocumentImpl.BINARY_FILE: + r = new LocalBinaryResource(user, brokerPool, this, idURI); + break; + + default: + throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Unknown resource type"); + } + ((AbstractEXistResource) r).setMimeType(document.getMetadata().getMimeType()); + return r; } - ((AbstractEXistResource)r).setMimeType(document.getMetadata().getMimeType()); - return r; }); } @@ -402,25 +454,26 @@ public String[] getChildCollections() throws XMLDBException { */ @Override public String[] listResources() throws XMLDBException { - return this.read().apply((collection, broker, transaction) -> { - final List allresources = new ArrayList<>(); + final List resources = this.>read().apply((collection, broker, transaction) -> { + final List allresources = new ArrayList<>(); for(final Iterator i = collection.iterator(broker); i.hasNext(); ) { final DocumentImpl doc = i.next(); - // Include only when (1) lockToken is present or (2) - // lockToken indicates that it is not a null resource - final LockToken lock = doc.getMetadata().getLockToken(); - if(lock == null || (!lock.isNullResource())){ - allresources.add(doc.getFileURI()); + + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentReadLock(doc.getURI())) { + + // Include only when (1) lockToken is present or (2) + // lockToken indicates that it is not a null resource + final LockToken lock = doc.getMetadata().getLockToken(); + if (lock == null || (!lock.isNullResource())) { + allresources.add(doc.getFileURI().toString()); + } } } - // Copy content of list into String array. - int j = 0; - final String[] resources = new String[allresources.size()]; - for(final Iterator i = allresources.iterator(); i.hasNext(); j++){ - resources[j] = i.next().toString(); - } - return resources; + return allresources; }); + + // Copy content of list into String array. + return resources.toArray(new String[resources.size()]); } @Override @@ -447,15 +500,23 @@ public void removeResource(final Resource res) throws XMLDBException { modify().apply((collection, broker, transaction) -> { //Check that the document exists - final DocumentImpl doc = collection.getDocument(broker, resURI); - if (doc == null) { - throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Resource " + resURI + " not found"); - } + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(broker, resURI, LockMode.WRITE_LOCK)) { + if (lockedDocument == null) { - if ("XMLResource".equals(res.getResourceType())) { - collection.removeXMLResource(transaction, broker, resURI); - } else { - collection.removeBinaryResource(transaction, broker, resURI); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Resource " + resURI + " not found"); + } + + if (XMLResource.RESOURCE_TYPE.equals(res.getResourceType())) { + collection.removeXMLResource(transaction, broker, resURI); + } else { + collection.removeBinaryResource(transaction, broker, resURI); + } + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); } return null; @@ -528,11 +589,11 @@ private void storeBinaryResource(final LocalBinaryResource res) throws XMLDBExce try { final long conLength = res.getStreamLength(); if (conLength != -1) { - try (InputStream is = res.getStreamContent()) { - collection.addBinaryResource(transaction, broker, resURI, is, res.getMimeType(), conLength, res.datecreated, res.datemodified); + try (InputStream is = res.getStreamContent(broker, transaction)) { + collection.addBinaryResource(transaction, broker, resURI, is, res.getMimeType(broker, transaction), conLength, res.datecreated, res.datemodified); } } else { - collection.addBinaryResource(transaction, broker, resURI, (byte[]) res.getContent(), res.getMimeType(), res.datecreated, res.datemodified); + collection.addBinaryResource(transaction, broker, resURI, (byte[]) res.getContent(broker, transaction), res.getMimeType(broker, transaction), res.datecreated, res.datemodified); } } catch(final EXistException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); @@ -559,10 +620,10 @@ private void storeXMLResource(final LocalXMLResource res) throws XMLDBException // collection.addObserver(observer); // } - try { + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentWriteLock(collection.getURI().append(resURI))) { final IndexInfo info; if (uri != null || res.inputSource != null) { - setupParser(collection, res); + setupParser(broker, transaction, collection, res); info = collection.validateXMLResource(transaction, broker, resURI, (uri != null) ? new InputSource(uri) : res.inputSource); } else if (res.root != null) { info = collection.validateXMLResource(transaction, broker, resURI, res.root); @@ -571,7 +632,7 @@ private void storeXMLResource(final LocalXMLResource res) throws XMLDBException } //Notice : the document should now have a LockMode.WRITE_LOCK update lock //TODO : check that no exception occurs in order to allow it to be released - info.getDocument().getMetadata().setMimeType(res.getMimeType()); + info.getDocument().getMetadata().setMimeType(res.getMimeType(broker, transaction)); if (res.datecreated != null) { info.getDocument().getMetadata().setCreated(res.datecreated.getTime()); } @@ -587,19 +648,26 @@ private void storeXMLResource(final LocalXMLResource res) throws XMLDBException collection.store(transaction, broker, info, res.content); } + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + return null; // collection.deleteObservers(); } catch(final EXistException | SAXException e) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); } }); } - private void setupParser(final Collection collection, final LocalXMLResource res) throws XMLDBException { + private void setupParser(final DBBroker broker, final Txn transaction, final Collection collection, final LocalXMLResource res) throws XMLDBException { final String normalize = properties.getProperty(NORMALIZE_HTML, "no"); if((normalize.equalsIgnoreCase("yes") || normalize.equalsIgnoreCase("true")) && - ("text/html".equals(res.getMimeType()) || res.getId().endsWith(".htm") || + ("text/html".equals(res.getMimeType(broker, transaction)) || res.getId().endsWith(".htm") || res.getId().endsWith(".html"))) { final Optional> maybeReaderInst = HtmlToXmlParser.getHtmlToXmlParser(brokerPool.getConfiguration()); @@ -676,7 +744,7 @@ public void setReader(final XMLReader reader){ * * @return A function to receive a read-only operation to perform against the collection */ - private FunctionE, R, XMLDBException> read() throws XMLDBException { + protected FunctionE, R, XMLDBException> read() throws XMLDBException { return readOp -> this.read(path).apply((collection, broker, transaction) -> { collection.setReader(userReader); return readOp.apply(collection, broker, transaction); @@ -716,6 +784,25 @@ private FunctionE, R, XMLDBException> read(f }); } + /** + * Higher-order-function for performing read-only operations against this collection + * + * NOTE this read will occur using the database user set on the collection + * + * @param broker The broker to use for the operation + * @param transaction The transaction to use for the operation + * @param errorCode The error code to use in the XMLDBException if the collection does not exist, see {@link ErrorCodes} + * @return A function to receive a read-only operation to perform against the collection + * + * @throws XMLDBException if the collection could not be read + */ + private FunctionE, R, XMLDBException> read(final DBBroker broker, final Txn transaction, final int errorCode) throws XMLDBException { + return readOp -> this.read(broker, transaction, path, errorCode).apply((collection, broker1, transaction1) -> { + collection.setReader(userReader); + return readOp.apply(collection, broker1, transaction1); + }); + } + /** * Higher-order-function for performing read/write operations against this collection * diff --git a/src/org/exist/xmldb/LocalCollectionManagementService.java b/src/org/exist/xmldb/LocalCollectionManagementService.java index 2b2b4275b29..5badc678992 100644 --- a/src/org/exist/xmldb/LocalCollectionManagementService.java +++ b/src/org/exist/xmldb/LocalCollectionManagementService.java @@ -21,12 +21,20 @@ import java.net.URISyntaxException; import java.util.Date; +import java.util.Optional; +import com.evolvedbinary.j8fu.tuple.Tuple2; import org.exist.EXistException; import org.exist.collections.triggers.TriggerException; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Subject; import org.exist.storage.BrokerPool; +import org.exist.storage.DBBroker.PreserveType; +import org.exist.storage.lock.Lock; +import org.exist.storage.lock.ManagedCollectionLock; +import org.exist.storage.lock.ManagedDocumentLock; +import org.exist.util.LockException; import org.xmldb.api.base.Collection; import org.xmldb.api.base.ErrorCodes; import org.xmldb.api.base.XMLDBException; @@ -72,13 +80,12 @@ public Collection createCollection(final XmldbURI name, final Date created) thro withDb((broker, transaction) -> { try { - final org.exist.collections.Collection coll = broker.getOrCreateCollection(transaction, collName); - if (created != null) { - coll.setCreationTime(created.getTime()); + final org.exist.collections.Collection coll = broker.getOrCreateCollection(transaction, collName, Optional.ofNullable(created).map(c -> new Tuple2<>(null, c.getTime()))); + try(final ManagedCollectionLock collectionLock = broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collName)) { + broker.saveCollection(transaction, coll); } - broker.saveCollection(transaction, coll); return null; - } catch (final TriggerException e) { + } catch (final LockException | TriggerException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); } }); @@ -158,15 +165,29 @@ public void moveResource(final XmldbURI src, final XmldbURI dest, final XmldbURI withDb((broker, transaction) -> modify(broker, transaction, srcPath.removeLastSegment()).apply((sourceCol, b1, t1) -> { - final DocumentImpl source = sourceCol.getDocument(b1, srcPath.lastSegment()); - if(source == null) { - throw new XMLDBException(ErrorCodes.NO_SUCH_RESOURCE, "Resource " + srcPath + " not found"); - } + try(final LockedDocument lockedSource = sourceCol.getDocumentWithLock(b1, srcPath.lastSegment(), Lock.LockMode.WRITE_LOCK)) { + final DocumentImpl source = lockedSource == null ? null : lockedSource.getDocument(); + if (source == null) { - return modify(b1, t1, destPath).apply((destinationCol, b2, t2) -> { - b2.moveResource(t2, source, destinationCol, newName); - return null; - }); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + sourceCol.close(); + + throw new XMLDBException(ErrorCodes.NO_SUCH_RESOURCE, "Resource " + srcPath + " not found"); + } + + return modify(b1, t1, destPath).apply((destinationCol, b2, t2) -> { + try(final ManagedDocumentLock lockedDestination = b2.getBrokerPool().getLockManager().acquireDocumentWriteLock(destinationCol.getURI().append(newName))) { + + b2.moveResource(t2, source, destinationCol, newName); + + // NOTE: early release of Collection locks inline with Asymmetrical Locking scheme + destinationCol.close(); + sourceCol.close(); + } + + return null; + }); + } }) ); } @@ -174,7 +195,7 @@ public void moveResource(final XmldbURI src, final XmldbURI dest, final XmldbURI @Override public void copy(final String collectionPath, final String destinationPath, final String newName) throws XMLDBException { try{ - copy(XmldbURI.xmldbUriFor(collectionPath), XmldbURI.xmldbUriFor(destinationPath),XmldbURI.xmldbUriFor(newName)); + copy(XmldbURI.xmldbUriFor(collectionPath), XmldbURI.xmldbUriFor(destinationPath),XmldbURI.xmldbUriFor(newName), PreserveType.DEFAULT); } catch(final URISyntaxException e) { throw new XMLDBException(ErrorCodes.INVALID_URI,e); } @@ -182,6 +203,15 @@ public void copy(final String collectionPath, final String destinationPath, fina @Override public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) throws XMLDBException { + copy(src, dest, name, PreserveType.DEFAULT); + } + + @Override + public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final String preserveType) throws XMLDBException { + copy(src, dest, name, PreserveType.valueOf(preserveType)); + } + + private void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final PreserveType preserve) throws XMLDBException { final XmldbURI srcPath = resolve(src); final XmldbURI destPath = dest == null ? srcPath.removeLastSegment() : resolve(dest); final XmldbURI newName; @@ -195,7 +225,7 @@ public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) t read(broker, transaction, srcPath).apply((source, b1, t1) -> modify(b1, t1, destPath).apply((destination, b2, t2) -> { try { - b2.copyCollection(t2, source, destination, newName); + b2.copyCollection(t2, source, destination, newName, preserve); return null; } catch (final EXistException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "failed to move collection " + srcPath, e); @@ -208,7 +238,7 @@ public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) t @Override public void copyResource(final String resourcePath, final String destinationPath, final String newName) throws XMLDBException { try{ - copyResource(XmldbURI.xmldbUriFor(resourcePath), XmldbURI.xmldbUriFor(destinationPath),XmldbURI.xmldbUriFor(newName)); + copyResource(XmldbURI.xmldbUriFor(resourcePath), XmldbURI.xmldbUriFor(destinationPath),XmldbURI.xmldbUriFor(newName), PreserveType.DEFAULT); } catch(final URISyntaxException e) { throw new XMLDBException(ErrorCodes.INVALID_URI,e); } @@ -216,6 +246,15 @@ public void copyResource(final String resourcePath, final String destinationPath @Override public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI name) throws XMLDBException { + copyResource(src, dest, name, PreserveType.DEFAULT); + } + + @Override + public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final String preserveType) throws XMLDBException { + copyResource(src, dest, name, PreserveType.valueOf(preserveType)); + } + + private void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final PreserveType preserve) throws XMLDBException { final XmldbURI srcPath = resolve(src); final XmldbURI destPath = dest == null ? srcPath.removeLastSegment() : resolve(dest); final XmldbURI newName; @@ -227,19 +266,32 @@ public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI withDb((broker, transaction) -> read(broker, transaction, srcPath.removeLastSegment()).apply((sourceCol, b1, t1) -> { - final DocumentImpl source = sourceCol.getDocument(b1, srcPath.lastSegment()); - if(source == null) { - throw new XMLDBException(ErrorCodes.NO_SUCH_RESOURCE, "Resource " + srcPath + " not found"); - } + try(final LockedDocument lockedSource = sourceCol.getDocumentWithLock(b1, srcPath.lastSegment(), Lock.LockMode.READ_LOCK)) { + final DocumentImpl source = lockedSource == null ? null : lockedSource.getDocument(); + if (source == null) { - return modify(b1, t1, destPath).apply((destinationCol, b2, t2) -> { - try { - b2.copyResource(t2, source, destinationCol, newName); - return null; - } catch (final EXistException e) { - throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "failed to copy resource " + srcPath, e); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + sourceCol.close(); + + throw new XMLDBException(ErrorCodes.NO_SUCH_RESOURCE, "Resource " + srcPath + " not found"); } - }); + + return modify(b1, t1, destPath).apply((destinationCol, b2, t2) -> { + try(final ManagedDocumentLock lockedDestination = b2.getBrokerPool().getLockManager().acquireDocumentWriteLock(destinationCol.getURI().append(newName))) { + try { + b2.copyResource(t2, source, destinationCol, newName, preserve); + + // NOTE: early release of Collection locks inline with Asymmetrical Locking scheme + destinationCol.close(); + sourceCol.close(); + + return null; + } catch (final EXistException e) { + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "failed to copy resource " + srcPath, e); + } + } + }); + } }) ); } diff --git a/src/org/exist/xmldb/LocalIndexQueryService.java b/src/org/exist/xmldb/LocalIndexQueryService.java index 0fcebba5ac7..9c8556c1309 100644 --- a/src/org/exist/xmldb/LocalIndexQueryService.java +++ b/src/org/exist/xmldb/LocalIndexQueryService.java @@ -22,11 +22,13 @@ import org.exist.collections.CollectionConfigurationException; import org.exist.collections.CollectionConfigurationManager; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.security.Subject; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock.LockMode; import org.exist.storage.sync.Sync; +import org.exist.util.LockException; import org.exist.util.Occurrences; import org.xmldb.api.base.ErrorCodes; import org.xmldb.api.base.XMLDBException; @@ -66,10 +68,14 @@ public void reindexCollection(final String collectionPath) throws XMLDBException @Override public void reindexCollection(final XmldbURI col) throws XMLDBException { final XmldbURI collectionPath = resolve(col); - withDb((broker, transaction) -> { - broker.reindexCollection(collectionPath); - broker.sync(Sync.MAJOR); - return null; + read(collectionPath).apply((collection, broker, transaction) -> { + try { + broker.reindexCollection(transaction, collectionPath); + broker.sync(Sync.MAJOR); + return null; + } catch(final LockException e) { + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e); + } }); } @@ -81,16 +87,12 @@ public void reindexDocument(final String name) throws XMLDBException { private void reindexDocument(final XmldbURI col, final String docName) throws XMLDBException { final XmldbURI collectionPath = resolve(col); withDb((broker, transaction) -> { - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(collectionPath.append(docName), LockMode.READ_LOCK); - broker.reindexXMLResource(transaction, doc, DBBroker.IndexMode.STORE); - broker.sync(Sync.MAJOR); - return null; - } finally { - if(doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); + try(final LockedDocument lockedDoc = broker.getXMLResource(collectionPath.append(docName), LockMode.READ_LOCK)) { + if(lockedDoc != null) { + broker.reindexXMLResource(transaction, lockedDoc.getDocument(), DBBroker.IndexMode.STORE); + broker.sync(Sync.MAJOR); } + return null; } }); } diff --git a/src/org/exist/xmldb/LocalUserManagementService.java b/src/org/exist/xmldb/LocalUserManagementService.java index 327ffaffdb8..1d35a4cda58 100644 --- a/src/org/exist/xmldb/LocalUserManagementService.java +++ b/src/org/exist/xmldb/LocalUserManagementService.java @@ -19,23 +19,18 @@ */ package org.exist.xmldb; -import java.util.Date; -import java.util.Iterator; -import java.util.List; +import java.util.*; import org.exist.dom.persistent.DocumentImpl; -import org.exist.security.ACLPermission; -import org.exist.security.Group; -import org.exist.security.Permission; -import org.exist.security.PermissionDeniedException; -import org.exist.security.Subject; -import org.exist.security.Account; -import org.exist.security.User; +import org.exist.security.*; import org.exist.security.SecurityManager; import org.exist.security.internal.aider.ACEAider; import org.exist.security.internal.aider.UserAider; import org.exist.storage.BrokerPool; +import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; import com.evolvedbinary.j8fu.function.FunctionE; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.xmldb.function.LocalXmldbCollectionFunction; import org.exist.xmldb.function.LocalXmldbDocumentFunction; import org.exist.xmldb.function.LocalXmldbFunction; @@ -44,6 +39,8 @@ import org.xmldb.api.base.Resource; import org.xmldb.api.base.XMLDBException; +import javax.annotation.Nullable; + /** * Local Implementation (i.e. embedded) of an eXist-specific service * which provides methods to manage users and @@ -110,53 +107,59 @@ public void setUserPrimaryGroup(final String username, final String groupName) t @Override public void setPermissions(final Resource resource, final Permission perm) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.setPermissions(perm); + PermissionFactory.chown(broker, document, Optional.of(perm.getOwner().getName()), Optional.of(perm.getGroup().getName())); + PermissionFactory.chmod(broker, document, Optional.of(perm.getMode()), getAces(perm)); return null; }); } @Override public void setPermissions(final Collection child, final Permission perm) throws XMLDBException { - final XmldbURI childUri = XmldbURI.create(child.getName()); - updateCollection(childUri).apply((collection, broker, transaction) -> { - collection.setPermissions(perm); + withDb((broker, transaction) -> { + final XmldbURI childUri = getCollectionUri(broker, transaction, child); + updateCollection(broker, transaction, childUri).apply((collection, broker1, transaction1) -> { + PermissionFactory.chown(broker, collection, Optional.of(perm.getOwner().getName()), Optional.of(perm.getGroup().getName())); + PermissionFactory.chmod(broker, collection, Optional.of(perm.getMode()), getAces(perm)); + return null; + }); return null; }); } @Override public void setPermissions(final Collection child, final String owner, final String group, final int mode, final List aces) throws XMLDBException { - final XmldbURI childUri = XmldbURI.create(child.getName()); - updateCollection(childUri).apply((collection, broker, transaction) -> { - final Permission permission = collection.getPermissionsNoLock(); - permission.setOwner(owner); - permission.setGroup(group); - permission.setMode(mode); - if (permission instanceof ACLPermission) { - final ACLPermission aclPermission = (ACLPermission) permission; - aclPermission.clear(); - for (final ACEAider ace : aces) { - aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); - } - } + withDb((broker, transaction) -> { + final XmldbURI childUri = getCollectionUri(broker, transaction, child); + updateCollection(broker, transaction, childUri).apply((collection, broker1, transaction1) -> { + final Permission permission = collection.getPermissionsNoLock(); + PermissionFactory.chown(broker, collection, Optional.ofNullable(owner), Optional.ofNullable(group)); + PermissionFactory.chmod(broker, collection, Optional.of(mode), Optional.ofNullable(aces)); + return null; + }); return null; }); } - + + private Optional> getAces(@Nullable final Permission permission) { + final Optional> maybeAces; + if (permission != null && permission instanceof ACLPermission) { + final ACLPermission aclPerm = (ACLPermission)permission; + final List aces = new ArrayList<>(aclPerm.getACECount()); + for (int i = 0; i < aclPerm.getACECount(); i++) { + aces.add(new ACEAider(aclPerm.getACEAccessType(i), aclPerm.getACETarget(i), aclPerm.getACEWho(i), aclPerm.getACEMode(i))); + } + maybeAces = Optional.of(aces); + } else { + maybeAces = Optional.empty(); + } + return maybeAces; + } + @Override public void setPermissions(final Resource resource, final String owner, final String group, final int mode, final List aces) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - final Permission permission = document.getPermissions(); - permission.setOwner(owner); - permission.setGroup(group); - permission.setMode(mode); - if (permission instanceof ACLPermission) { - final ACLPermission aclPermission = (ACLPermission) permission; - aclPermission.clear(); - for (final ACEAider ace : aces) { - aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); - } - } + PermissionFactory.chown(broker, document, Optional.ofNullable(owner), Optional.ofNullable(group)); + PermissionFactory.chmod(broker, document, Optional.of(mode), Optional.ofNullable(aces)); return null; }); } @@ -165,7 +168,7 @@ public void setPermissions(final Resource resource, final String owner, final St public void chmod(final String modeStr) throws XMLDBException { final XmldbURI collUri = collection.getPathURI(); updateCollection(collUri).apply((collection, broker, transaction) -> { - collection.setPermissions(modeStr); + PermissionFactory.chmod_str(broker, collection, Optional.ofNullable(modeStr), Optional.empty()); return null; }); } @@ -173,7 +176,7 @@ public void chmod(final String modeStr) throws XMLDBException { @Override public void chmod(final Resource resource, final int mode) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.getPermissions().setMode(mode); + PermissionFactory.chmod(broker, document, Optional.of(mode), Optional.empty()); return null; }); } @@ -182,7 +185,7 @@ public void chmod(final Resource resource, final int mode) throws XMLDBException public void chmod(final int mode) throws XMLDBException { final XmldbURI collUri = collection.getPathURI(); updateCollection(collUri).apply((collection, broker, transaction) -> { - collection.setPermissions(mode); + PermissionFactory.chmod(broker, collection, Optional.of(mode), Optional.empty()); return null; }); } @@ -190,7 +193,7 @@ public void chmod(final int mode) throws XMLDBException { @Override public void chmod(final Resource resource, final String modeStr) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.getPermissions().setMode(modeStr); + PermissionFactory.chmod_str(broker, document, Optional.ofNullable(modeStr), Optional.empty()); return null; }); } @@ -199,8 +202,7 @@ public void chmod(final Resource resource, final String modeStr) throws XMLDBExc public void chgrp(final String group) throws XMLDBException { final XmldbURI collUri = collection.getPathURI(); updateCollection(collUri).apply((collection, broker, transaction) -> { - final Permission permission = collection.getPermissionsNoLock(); - permission.setGroup(group); + PermissionFactory.chown(broker, collection, Optional.empty(), Optional.ofNullable(group)); return null; }); } @@ -209,8 +211,7 @@ public void chgrp(final String group) throws XMLDBException { public void chown(final Account u) throws XMLDBException { final XmldbURI collUri = collection.getPathURI(); updateCollection(collUri).apply((collection, broker, transaction) -> { - final Permission permission = collection.getPermissionsNoLock(); - permission.setOwner(u); + PermissionFactory.chown(broker, collection, Optional.ofNullable(u).map(Account::getName), Optional.empty()); return null; }); } @@ -219,9 +220,7 @@ public void chown(final Account u) throws XMLDBException { public void chown(final Account u, final String group) throws XMLDBException { final XmldbURI collUri = collection.getPathURI(); updateCollection(collUri).apply((collection, broker, transaction) -> { - final Permission permission = collection.getPermissionsNoLock(); - permission.setOwner(u); - permission.setGroup(group); + PermissionFactory.chown(broker, collection, Optional.ofNullable(u).map(Account::getName), Optional.ofNullable(group)); return null; }); } @@ -229,7 +228,7 @@ public void chown(final Account u, final String group) throws XMLDBException { @Override public void chgrp(final Resource resource, final String group) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.getPermissions().setGroup(group); + PermissionFactory.chown(broker, document, Optional.empty(), Optional.ofNullable(group)); return null; }); } @@ -237,7 +236,7 @@ public void chgrp(final Resource resource, final String group) throws XMLDBExcep @Override public void chown(final Resource resource, final Account u) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.getPermissions().setOwner(u); + PermissionFactory.chown(broker, document, Optional.ofNullable(u).map(Account::getName), Optional.empty()); return null; }); } @@ -245,8 +244,7 @@ public void chown(final Resource resource, final Account u) throws XMLDBExceptio @Override public void chown(final Resource resource, final Account u, final String group) throws XMLDBException { modify(resource).apply((document, broker, transaction) -> { - document.getPermissions().setOwner(u); - document.getPermissions().setGroup(group); + PermissionFactory.chown(broker, document, Optional.ofNullable(u).map(Account::getName), Optional.ofNullable(group)); return null; }); } @@ -353,16 +351,18 @@ public Permission getPermissions(final Resource resource) throws XMLDBException public Permission[] listResourcePermissions() throws XMLDBException { final XmldbURI collectionUri = collection.getPathURI(); return this.read(collectionUri).apply((collection, broker, transaction) -> { - if(!collection.getPermissionsNoLock().validate(user, Permission.READ)) { + if (!collection.getPermissionsNoLock().validate(user, Permission.READ)) { return new Permission[0]; } final Permission perms[] = new Permission[collection.getDocumentCount(broker)]; final Iterator itDocument = collection.iterator(broker); int i = 0; - while(itDocument.hasNext()) { + while (itDocument.hasNext()) { final DocumentImpl document = itDocument.next(); - perms[i++] = document.getPermissions(); + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentReadLock(document.getURI())) { + perms[i++] = document.getPermissions(); + } } return perms; @@ -605,4 +605,19 @@ private FunctionE, R, XMLDBException> update return result; }); } + + /** + * Higher-order-function for updating a collection and its metadata + * + * @param broker + * @param transaction + * @param collectionUri The collection to perform read/write operations on + */ + private FunctionE, R, XMLDBException> updateCollection(final DBBroker broker, final Txn transaction, final XmldbURI collectionUri) throws XMLDBException { + return updateOp -> this.modify(broker, transaction, collectionUri).apply((collection, broker1, transaction1) -> { + final R result = updateOp.apply(collection, broker1, transaction1); + broker1.saveCollection(transaction1, collection); + return result; + }); + } } diff --git a/src/org/exist/xmldb/LocalXMLResource.java b/src/org/exist/xmldb/LocalXMLResource.java index 42308521767..818c0ab01aa 100644 --- a/src/org/exist/xmldb/LocalXMLResource.java +++ b/src/org/exist/xmldb/LocalXMLResource.java @@ -20,10 +20,11 @@ package org.exist.xmldb; import com.evolvedbinary.j8fu.function.ConsumerE; -import net.sf.cglib.proxy.Enhancer; -import net.sf.cglib.proxy.MethodInterceptor; -import net.sf.cglib.proxy.MethodProxy; +import com.evolvedbinary.j8fu.tuple.Tuple3; +import net.sf.cglib.proxy.*; +import org.exist.dom.memtree.DocumentImpl; import org.exist.dom.persistent.NodeProxy; +import org.exist.dom.persistent.StoredNode; import org.exist.dom.persistent.XMLUtil; import org.exist.dom.memtree.AttrImpl; import org.exist.dom.memtree.NodeImpl; @@ -32,7 +33,9 @@ import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.serializers.Serializer; +import org.exist.storage.txn.Txn; import org.exist.util.MimeType; +import com.evolvedbinary.j8fu.Either; import org.exist.util.serializer.DOMSerializer; import org.exist.util.serializer.DOMStreamer; import org.exist.util.serializer.SAXSerializer; @@ -61,6 +64,7 @@ import java.util.Properties; import java.util.stream.Stream; +import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple; import static java.nio.charset.StandardCharsets.UTF_8; /** @@ -239,7 +243,7 @@ public Node getContentAsDOM() throws XMLDBException { * Provides a safe export of an internal persistent DOM * node from eXist via the Local XML:DB API. * - * This is done by providing a proxy object that only implements + * This is done by providing a cglib Proxy object that only implements * the appropriate W3C DOM interface. This helps prevent the * XML:DB Local API from leaking implementation through * its abstractions. @@ -252,35 +256,16 @@ private Node exportInternalNode(final Node node) { final Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(domClazz.get()); - enhancer.setCallback(new MethodInterceptor() { - @Override - public Object intercept(final Object obj, final Method method, final Object[] args, final MethodProxy proxy) throws Throwable { - - final Object domResult = method.invoke(node, args); - - if(domResult != null && Node.class.isAssignableFrom(method.getReturnType())) { - return exportInternalNode((Node) domResult); //recursively wrap node result - - } else if(domResult != null && method.getReturnType().equals(NodeList.class)) { - final NodeList underlying = (NodeList)domResult; //recursively wrap nodes in nodelist result - return new NodeList() { - @Override - public Node item(final int index) { - return Optional.ofNullable(underlying.item(index)) - .map(n -> exportInternalNode(n)) - .orElse(null); - } - - @Override - public int getLength() { - return underlying.getLength(); - } - }; - } else { - return domResult; - } - } - }); + final Class[] interfaceClasses; + if (node instanceof StoredNode) { + interfaceClasses = new Class[]{domClazz.get(), StoredNodeIdentity.class}; + } else if (node instanceof org.exist.dom.memtree.NodeImpl) { + interfaceClasses = new Class[]{domClazz.get(), MemtreeNodeIdentity.class}; + } else { + interfaceClasses = new Class[] { domClazz.get() }; + } + enhancer.setInterfaces(interfaceClasses); + enhancer.setCallback(new DOMMethodInterceptor(node)); return (Node)enhancer.create(); } @@ -292,6 +277,113 @@ private Optional> getW3cNodeInterface(final Class (Class)c); } + private class DOMMethodInterceptor implements MethodInterceptor { + private final Node node; + + public DOMMethodInterceptor(final Node node) { + this.node = node; + } + + @Override + public Object intercept(final Object obj, final Method method, final Object[] args, final MethodProxy proxy) throws Throwable { + /* + NOTE(AR): we have to take special care of eXist-db's + persistent and memtree DOM's node equality. + + For the persistent DOM, we reproduce in the proxy the behaviour taken + by org.exist.dom.persistent.StoredNode#equals(Object), + by overriding equals for StoredNode's and then implementing + a method to retrieve the nodeIds from each side of the equality + comparison. We have to do this as StoredNode attempts instanceof + equality which will fail against the proxied objects. + + For the memtree DOM, we reproduce in the proxy the behaviour taken + by org.exist.dom.memtree.NodeImpl#equals(Object), + by overriding equals for memtree.NodeImpl's and then implementing + a method to retrieve the nodeIds from each side of the equality + comparison. We have to do this as NodeImpl attempts instanceof and + reference equality which will fail against the proxied objects. + */ + Object domResult = null; + if(method.getName().equals("equals") + && obj instanceof StoredNodeIdentity + && args.length == 1 && args[0] instanceof StoredNodeIdentity) { + final StoredNodeIdentity ni1 = ((StoredNodeIdentity) obj); + final StoredNodeIdentity ni2 = ((StoredNodeIdentity) args[0]); + + final Optional niEquals = ni1.getNodeId().flatMap(n1id -> ni2.getNodeId().map(n2id -> n1id.equals(n2id))); + if (niEquals.isPresent()) { + domResult = niEquals.get(); + } + } else if(method.getName().equals("equals") + && obj instanceof MemtreeNodeIdentity + && args.length == 1 && args[0] instanceof MemtreeNodeIdentity) { + final MemtreeNodeIdentity ni1 = ((MemtreeNodeIdentity) obj); + final MemtreeNodeIdentity ni2 = ((MemtreeNodeIdentity) args[0]); + + final Optional niEquals = ni1.getNodeId().flatMap(n1id -> ni2.getNodeId().map(n2id -> n1id._1 == n2id._1 && n1id._2 == n2id._2 && n1id._3 == n2id._3)); + if (niEquals.isPresent()) { + domResult = niEquals.get(); + } + } else if(method.getName().equals("getNodeId")) { + if (obj instanceof StoredNodeIdentity + && args.length == 0 + && node instanceof StoredNode) { + domResult = Optional.of(((StoredNode) node).getNodeId()); + } else if (obj instanceof MemtreeNodeIdentity + && args.length == 0 + && node instanceof org.exist.dom.memtree.NodeImpl) { + final org.exist.dom.memtree.NodeImpl memtreeNode = (org.exist.dom.memtree.NodeImpl) node; + domResult = Optional.of(Tuple(memtreeNode.getOwnerDocument(), memtreeNode.getNodeNumber(), memtreeNode.getNodeType())); + } else { + domResult = Optional.empty(); + } + } + + if (domResult == null) { + domResult = method.invoke(node, args); + } + + if(domResult != null && Node.class.isAssignableFrom(method.getReturnType())) { + return exportInternalNode((Node) domResult); //recursively wrap node result + + } else if(domResult != null && method.getReturnType().equals(NodeList.class)) { + final NodeList underlying = (NodeList)domResult; //recursively wrap nodes in nodelist result + return new NodeList() { + @Override + public Node item(final int index) { + return Optional.ofNullable(underlying.item(index)) + .map(n -> exportInternalNode(n)) + .orElse(null); + } + + @Override + public int getLength() { + return underlying.getLength(); + } + }; + } else { + return domResult; + } + } + } + + /** + * Used by {@link DOMMethodInterceptor} to + * help with equality of persistent DOM nodes. + */ + private interface StoredNodeIdentity { + Optional getNodeId(); + } + + /** + * Used by {@link DOMMethodInterceptor} to + * help with equality of memtree DOM nodes. + */ + private interface MemtreeNodeIdentity { + Optional> getNodeId(); + } + @Override public void getContentAsSAX(final ContentHandler handler) throws XMLDBException { @@ -436,6 +528,19 @@ public NodeProxy getNode() throws XMLDBException { } } + /** + * Similar to {@link org.exist.xmldb.LocalXMLResource#getNode()} + * but useful for operations within the XML:DB Local API + * that are already working within a transaction + */ + public NodeProxy getNode(final DBBroker broker, final Txn transaction) throws XMLDBException { + if(proxy != null) { + return proxy; + } else { + return this.read(broker, transaction).apply((document, broker1, transaction1) -> new NodeProxy(document, NodeId.DOCUMENT_NODE)); + } + } + @Override public DocumentType getDocType() throws XMLDBException { return read((document, broker, transaction) -> document.getDoctype()); diff --git a/src/org/exist/xmldb/LocalXPathQueryService.java b/src/org/exist/xmldb/LocalXPathQueryService.java index b327a5a5d82..0a498cbef65 100644 --- a/src/org/exist/xmldb/LocalXPathQueryService.java +++ b/src/org/exist/xmldb/LocalXPathQueryService.java @@ -118,43 +118,46 @@ public ResourceSet query(final XMLResource res, final String query) throws XMLDB @Override public ResourceSet query(final String query, final String sortBy) throws XMLDBException { - final XmldbURI[] docs = new XmldbURI[] { XmldbURI.create(collection.getName()) }; - return doQuery(query, docs, null, sortBy); + return withDb((broker, transaction) -> { + final XmldbURI[] docs = new XmldbURI[] { XmldbURI.create(collection.getName(broker, transaction)) }; + return doQuery(broker, transaction, query, docs, null, sortBy); + }); } @Override public ResourceSet query(final XMLResource res, final String query, final String sortBy) throws XMLDBException { final Node n = ((LocalXMLResource) res).root; - if (n != null && n instanceof org.exist.dom.memtree.NodeImpl) { - final XmldbURI[] docs = new XmldbURI[] { XmldbURI.create(res.getParentCollection().getName()) }; - return doQuery(query, docs, (org.exist.dom.memtree.NodeImpl)n, sortBy); - } - final NodeProxy node = ((LocalXMLResource) res).getNode(); - if (node == null) { - // resource is a document - //TODO : use dedicated function in XmldbURI - final XmldbURI[] docs = new XmldbURI[] { XmldbURI.create(res.getParentCollection().getName()).append(res.getDocumentId()) }; - return doQuery(query, docs, null, sortBy); - } else { - final NodeSet set = new ExtArrayNodeSet(1); - set.add(node); - final XmldbURI[] docs = new XmldbURI[] { node.getOwnerDocument().getURI() }; - return doQuery(query, docs, set, sortBy); - } - } - private ResourceSet doQuery(final String query, final XmldbURI[] docs, final Sequence contextSet, final String sortExpr) throws XMLDBException { return withDb((broker, transaction) -> { - final Either maybeExpr = compileAndCheck(broker, transaction, query); - if(maybeExpr.isLeft()) { - final XPathException e = maybeExpr.left().get(); - throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); + if (n != null && n instanceof org.exist.dom.memtree.NodeImpl) { + final XmldbURI[] docs = new XmldbURI[]{ getCollectionUri(broker, transaction, res.getParentCollection()) }; + return doQuery(broker, transaction, query, docs, (org.exist.dom.memtree.NodeImpl) n, sortBy); + } + final NodeProxy node = ((LocalXMLResource) res).getNode(broker, transaction); + if (node == null) { + // resource is a document + //TODO : use dedicated function in XmldbURI + final XmldbURI[] docs = new XmldbURI[]{ getCollectionUri(broker, transaction, res.getParentCollection()).append(res.getDocumentId()) }; + return doQuery(broker, transaction, query, docs, null, sortBy); } else { - return execute(broker, transaction, docs, contextSet, maybeExpr.right().get(), sortExpr); + final NodeSet set = new ExtArrayNodeSet(1); + set.add(node); + final XmldbURI[] docs = new XmldbURI[]{node.getOwnerDocument().getURI()}; + return doQuery(broker, transaction, query, docs, set, sortBy); } }); } + private ResourceSet doQuery(final DBBroker broker, final Txn transaction, final String query, final XmldbURI[] docs, final Sequence contextSet, final String sortExpr) throws XMLDBException { + final Either maybeExpr = compileAndCheck(broker, transaction, query); + if(maybeExpr.isLeft()) { + final XPathException e = maybeExpr.left().get(); + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); + } else { + return execute(broker, transaction, docs, contextSet, maybeExpr.right().get(), sortExpr); + } + } + @Override public ResourceSet execute(final CompiledExpression expression) throws XMLDBException { return withDb((broker, transaction) -> @@ -164,10 +167,10 @@ public ResourceSet execute(final CompiledExpression expression) throws XMLDBExce @Override public ResourceSet execute(final XMLResource res, final CompiledExpression expression) throws XMLDBException { return withDb((broker, transaction) -> { - final NodeProxy node = ((LocalXMLResource) res).getNode(); + final NodeProxy node = ((LocalXMLResource) res).getNode(broker, transaction); if (node == null) { // resource is a document - final XmldbURI[] docs = new XmldbURI[]{XmldbURI.create(res.getParentCollection().getName()).append(res.getDocumentId())}; + final XmldbURI[] docs = new XmldbURI[]{ getCollectionUri(broker, transaction, res.getParentCollection()).append(res.getDocumentId()) }; return execute(broker, transaction, docs, null, expression, null); } else { final NodeSet set = new ExtArrayNodeSet(1); @@ -247,7 +250,7 @@ private ResourceSet execute(final LocalXmldbFunction sourceOp) throws XM final Source source = sourceOp.apply(broker, transaction); - final XmldbURI[] docs = new XmldbURI[]{XmldbURI.create(collection.getName())}; + final XmldbURI[] docs = new XmldbURI[]{XmldbURI.create(collection.getName(broker, transaction))}; final XQuery xquery = brokerPool.getXQueryService(); final XQueryPool pool = brokerPool.getXQueryPool(); @@ -258,6 +261,7 @@ private ResourceSet execute(final LocalXmldbFunction sourceOp) throws XM context = new XQueryContext(broker.getBrokerPool()); } else { context = compiled.getContext(); + context.prepareForReuse(); } context.setStaticallyKnownDocuments(docs); @@ -332,12 +336,14 @@ private Either compileAndCheck(final DBBroke @Override public ResourceSet queryResource(final String resource, final String query) throws XMLDBException { - final LocalXMLResource res = (LocalXMLResource) collection.getResource(resource); - if (res == null) { - throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "resource '" + resource + "' not found"); - } - final XmldbURI[] docs = new XmldbURI[] { XmldbURI.create(res.getParentCollection().getName()).append(res.getDocumentId()) }; - return doQuery(query, docs, null, null); + return withDb((broker, transaction) -> { + final LocalXMLResource res = (LocalXMLResource) collection.getResource(broker, transaction, resource); + if (res == null) { + throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "resource '" + resource + "' not found"); + } + final XmldbURI[] docs = new XmldbURI[]{ getCollectionUri(broker, transaction, res.getParentCollection()).append(res.getDocumentId()) }; + return doQuery(broker, transaction, query, docs, null, null); + }); } protected void setupContext(final Source source, final XQueryContext context) throws XMLDBException, XPathException { @@ -382,6 +388,7 @@ protected void setupContext(final Source source, final XQueryContext context) th @Override public void beginProtected() throws XMLDBException { try { + int retries = BEGIN_PROTECTED_MAX_LOCKING_RETRIES == - 1 ? -1 : BEGIN_PROTECTED_MAX_LOCKING_RETRIES - 2; boolean deadlockCaught; do { reservedBroker = brokerPool.get(Optional.of(user)); @@ -392,9 +399,9 @@ public void beginProtected() throws XMLDBException { lockedDocuments = new LockedDocumentMap(); docs = new DefaultDocumentSet(); coll.allDocs(reservedBroker, docs, true, lockedDocuments, LockMode.WRITE_LOCK); + return; } catch (final LockException e) { - LOG.debug("Deadlock detected. Starting over again. Docs: " + docs.getDocumentCount() + "; locked: " + - lockedDocuments.size()); + LOG.warn("Deadlock detected. Starting over again. Docs: {}; locked: {}. Cause: {}", docs.getDocumentCount(), lockedDocuments.size(), e.getMessage()); lockedDocuments.unlock(); reservedBroker.close(); deadlockCaught = true; @@ -402,13 +409,16 @@ public void beginProtected() throws XMLDBException { throw new XMLDBException(ErrorCodes.PERMISSION_DENIED, "Permission denied on document"); } - } while (deadlockCaught); + retries--; + } while (deadlockCaught && retries >= -1); } catch (final EXistException e) { if(reservedBroker != null) { reservedBroker.close(); } throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage()); } + + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "Unable to beginProtected after " + BEGIN_PROTECTED_MAX_LOCKING_RETRIES + " retries"); } /** diff --git a/src/org/exist/xmldb/LocalXUpdateQueryService.java b/src/org/exist/xmldb/LocalXUpdateQueryService.java index 52d8d3d07b0..0fc5dae724c 100644 --- a/src/org/exist/xmldb/LocalXUpdateQueryService.java +++ b/src/org/exist/xmldb/LocalXUpdateQueryService.java @@ -24,9 +24,11 @@ import org.exist.dom.persistent.DefaultDocumentSet; import org.exist.dom.persistent.DocumentImpl; +import org.exist.dom.persistent.LockedDocument; import org.exist.dom.persistent.MutableDocumentSet; import org.exist.security.Subject; import org.exist.storage.BrokerPool; +import org.exist.storage.lock.Lock; import org.exist.util.LockException; import org.exist.xupdate.Modification; import org.exist.xupdate.XUpdateProcessor; @@ -37,6 +39,7 @@ import org.xmldb.api.modules.XUpdateQueryService; import javax.xml.parsers.ParserConfigurationException; +import java.io.Reader; import java.io.StringReader; import java.net.URISyntaxException; @@ -77,11 +80,17 @@ public long updateResource(final String id, final String commands) throws XMLDBE } else { try { final XmldbURI resourceURI = XmldbURI.xmldbUriFor(id); - final DocumentImpl doc = collection.getDocument(broker1, resourceURI); - if (doc == null) { - throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Resource not found: " + id); + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(broker1, resourceURI, Lock.LockMode.READ_LOCK)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument(); + if (doc == null) { + throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "Resource not found: " + id); + } + d.add(doc); } - d.add(doc); } catch(final URISyntaxException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(),e); } @@ -89,7 +98,7 @@ public long updateResource(final String id, final String commands) throws XMLDBE return d; }); - try { + try(final Reader reader = new StringReader(commands)) { if (processor == null) { processor = new XUpdateProcessor(broker, docs); } else { @@ -97,7 +106,7 @@ public long updateResource(final String id, final String commands) throws XMLDBE processor.setDocumentSet(docs); } - final Modification modifications[] = processor.parse(new InputSource(new StringReader(commands))); + final Modification modifications[] = processor.parse(new InputSource(reader)); long mods = 0; for (int i = 0; i < modifications.length; i++) { mods += modifications[i].process(transaction); diff --git a/src/org/exist/xmldb/RemoteCollection.java b/src/org/exist/xmldb/RemoteCollection.java index 8f5aad798c6..014b2fe5351 100644 --- a/src/org/exist/xmldb/RemoteCollection.java +++ b/src/org/exist/xmldb/RemoteCollection.java @@ -44,6 +44,8 @@ import java.util.*; import java.util.stream.Stream; +import static java.nio.charset.StandardCharsets.UTF_8; + /** * A remote implementation of the Collection interface. This implementation * communicates with the server through the XMLRPC protocol. @@ -590,7 +592,8 @@ private void uploadAndStore(final Resource res) throws XMLDBException { descString = ((EXistInputSource) content).getSymbolicPath(); } } else if (content instanceof String) { - is = new FastByteArrayInputStream(((String) content).getBytes()); + // TODO(AR) we really should not allow String to be used here, as we loose the encoding info and default to UTF-8! + is = new FastByteArrayInputStream(((String) content).getBytes(UTF_8)); } else { LOG.error("Unable to get content from {}", content); } @@ -599,44 +602,59 @@ private void uploadAndStore(final Resource res) throws XMLDBException { final byte[] chunk; if (res instanceof ExtendedResource) { if(res instanceof AbstractRemoteResource) { - chunk = new byte[(int)Math.min(((AbstractRemoteResource)res).getContentLength(), MAX_UPLOAD_CHUNK)]; + final long contentLen = ((AbstractRemoteResource)res).getContentLength(); + if (contentLen != -1) { + // content length is known + chunk = new byte[(int)Math.min(contentLen, MAX_UPLOAD_CHUNK)]; + } else { + chunk = new byte[MAX_UPLOAD_CHUNK]; + } } else { - chunk = new byte[(int)Math.min(((ExtendedResource)res).getStreamLength(), MAX_UPLOAD_CHUNK)]; + final long streamLen = ((ExtendedResource)res).getStreamLength(); + if (streamLen != -1) { + // stream length is known + chunk = new byte[(int)Math.min(streamLen, MAX_UPLOAD_CHUNK)]; + } else { + chunk = new byte[MAX_UPLOAD_CHUNK]; + } } } else { chunk = new byte[MAX_UPLOAD_CHUNK]; } try { - int len; + String fileName = null; - while ((len = is.read(chunk)) > -1) { - final List params = new ArrayList<>(); - if (fileName != null) { - params.add(fileName); - } + if (chunk.length > 0) { + int len; + while ((len = is.read(chunk)) > -1) { + final List params = new ArrayList<>(); + if (fileName != null) { + params.add(fileName); + } /* Only compress the chunk if it is larger than 256 bytes, otherwise the compression framing overhead results in a larger chunk */ - if (len < 256) { - params.add(chunk); - params.add(len); - fileName = (String) xmlRpcClientLease.get().execute("upload", params); - } else { - final byte[] compressed = Compressor.compress(chunk, len); - params.add(compressed); - params.add(len); - fileName = (String) xmlRpcClientLease.get().execute("uploadCompressed", params); + if (len < 256) { + params.add(chunk); + params.add(len); + fileName = (String) xmlRpcClientLease.get().execute("upload", params); + } else { + final byte[] compressed = Compressor.compress(chunk, len); + params.add(compressed); + params.add(len); + fileName = (String) xmlRpcClientLease.get().execute("uploadCompressed", params); + } } } - // Zero length stream? Let's get a fileName! + if (fileName == null) { - final byte[] compressed = Compressor.compress(new byte[0], 0); + // Zero length stream? Let's get a fileName! final List params = new ArrayList<>(); - params.add(compressed); + params.add(new byte[0]); params.add(0); - fileName = (String) xmlRpcClientLease.get().execute("uploadCompressed", params); + fileName = (String) xmlRpcClientLease.get().execute("upload", params); } final Listparams = new ArrayList<>(); @@ -680,7 +698,7 @@ private void uploadAndStore(final Resource res) throws XMLDBException { } catch (final IOException e) { throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, "failed to read resource from " + descString, e); } catch (final XmlRpcException e) { - throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "networking error", e); + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "API error: " + e.getMessage(), e); } } finally { if(is != null) { @@ -706,7 +724,7 @@ public void setTriggersEnabled(final boolean triggersEnabled) throws XMLDBExcept try { xmlRpcClientLease.get().execute("setTriggersEnabled", params); } catch (final XmlRpcException e) { - throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "networking error", e); + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "API error: " + e.getMessage(), e); } } } diff --git a/src/org/exist/xmldb/RemoteCollectionManagementService.java b/src/org/exist/xmldb/RemoteCollectionManagementService.java index 6ce82dbe72e..e923e1bc4c7 100644 --- a/src/org/exist/xmldb/RemoteCollectionManagementService.java +++ b/src/org/exist/xmldb/RemoteCollectionManagementService.java @@ -234,7 +234,7 @@ public void moveResource(final XmldbURI src, final XmldbURI dest, final XmldbURI public void copy(final String collectionPath, final String destinationPath, final String newName) throws XMLDBException { try { - copy(XmldbURI.xmldbUriFor(collectionPath), XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName)); + copy(XmldbURI.xmldbUriFor(collectionPath), XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), "DEFAULT"); } catch (final URISyntaxException e) { throw new XMLDBException(ErrorCodes.INVALID_URI, e); } @@ -242,6 +242,11 @@ public void copy(final String collectionPath, final String destinationPath, @Override public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) throws XMLDBException { + copy(src, dest, name, "DEFAULT"); + } + + @Override + public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final String preserveType) throws XMLDBException { final XmldbURI srcPath = resolve(src); final XmldbURI destPath = dest == null ? srcPath.removeLastSegment() : resolve(dest); final XmldbURI newName; @@ -255,6 +260,7 @@ public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) t params.add(srcPath.toString()); params.add(destPath.toString()); params.add(newName.toString()); + params.add(preserveType); try { client.execute("copyCollection", params); } catch (final XmlRpcException xre) { @@ -270,9 +276,9 @@ public void copy(final XmldbURI src, final XmldbURI dest, final XmldbURI name) t @Deprecated @Override public void copyResource(final String resourcePath, final String destinationPath, - final String newName) throws XMLDBException { + final String newName) throws XMLDBException { try { - copyResource(XmldbURI.xmldbUriFor(resourcePath), XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName)); + copyResource(XmldbURI.xmldbUriFor(resourcePath), XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), "DEFAULT"); } catch (final URISyntaxException e) { throw new XMLDBException(ErrorCodes.INVALID_URI, e); } @@ -281,6 +287,12 @@ public void copyResource(final String resourcePath, final String destinationPath @Override public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI name) throws XMLDBException { + copyResource(src, dest, name, "DEFAULT"); + } + + @Override + public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI name, final String preserveType) + throws XMLDBException { final XmldbURI srcPath = resolve(src); final XmldbURI destPath = dest == null ? srcPath.removeLastSegment() : resolve(dest); final XmldbURI newName; @@ -293,6 +305,7 @@ public void copyResource(final XmldbURI src, final XmldbURI dest, final XmldbURI params.add(srcPath.toString()); params.add(destPath.toString()); params.add(newName.toString()); + params.add(preserveType); try { client.execute("copyResource", params); } catch (final XmlRpcException xre) { diff --git a/src/org/exist/xmldb/RemoteResourceSet.java b/src/org/exist/xmldb/RemoteResourceSet.java index 33553e10f1b..47d64cd9064 100644 --- a/src/org/exist/xmldb/RemoteResourceSet.java +++ b/src/org/exist/xmldb/RemoteResourceSet.java @@ -222,6 +222,7 @@ public Resource getResource(final long pos) throws XMLDBException { private RemoteXMLResource getResourceNode(final int pos, final Map nodeDetail) throws XMLDBException { final String doc = nodeDetail.get("docUri"); final Optional s_id = Optional.ofNullable(nodeDetail.get("nodeId")); + final Optional s_type = Optional.ofNullable(nodeDetail.get("type")); final XmldbURI docUri; try { docUri = XmldbURI.xmldbUriFor(doc); @@ -239,7 +240,8 @@ private RemoteXMLResource getResourceNode(final int pos, final Map id; + private final Optional type; private final int handle; private int pos = -1; private String content = null; @@ -85,9 +86,32 @@ public class RemoteXMLResource private Properties outputProperties = null; private LexicalHandler lexicalHandler = null; + /** + * @deprecated Use {@link #RemoteXMLResource(Leasable.Lease, RemoteCollection, XmldbURI, Optional, Optional)}. + */ + @Deprecated public RemoteXMLResource(final Leasable.Lease xmlRpcClientLease, final RemoteCollection parent, final XmldbURI docId, final Optional id) throws XMLDBException { - this(xmlRpcClientLease, parent, -1, -1, docId, id); + this(xmlRpcClientLease, parent, -1, -1, docId, id, Optional.empty()); + } + + public RemoteXMLResource(final Leasable.Lease xmlRpcClientLease, final RemoteCollection parent, final XmldbURI docId, final Optional id, final Optional type) + throws XMLDBException { + this(xmlRpcClientLease, parent, -1, -1, docId, id, type); + } + + /** + * @deprecared Use {@link #RemoteXMLResource(Leasable.Lease, RemoteCollection, int, int, XmldbURI, Optional, Optional)}. + */ + @Deprecated + public RemoteXMLResource( + final Leasable.Lease xmlRpcClientLease, + final RemoteCollection parent, + final int handle, + final int pos, + final XmldbURI docId, + final Optional id) throws XMLDBException { + this(xmlRpcClientLease, parent, handle, pos, docId, id, Optional.empty()); } public RemoteXMLResource( @@ -96,12 +120,14 @@ public RemoteXMLResource( final int handle, final int pos, final XmldbURI docId, - final Optional id) + final Optional id, + final Optional type) throws XMLDBException { super(xmlRpcClientLease, parent, docId, MimeType.XML_TYPE.getName()); this.handle = handle; this.pos = pos; this.id = id; + this.type = type; } @Override @@ -164,8 +190,13 @@ public Node getContentAsDOM() throws XMLDBException { factory.setValidating(false); final DocumentBuilder builder = factory.newDocumentBuilder(); final Document doc = builder.parse(is); - // return a full DOM doc, with root PI and comments - return doc; + + final boolean isDocumentNode = type.map(t -> t.equals("document-node()")).orElse(true); + if (isDocumentNode) { + return doc; + } else { + return doc.getFirstChild(); + } } catch (final SAXException | IOException | ParserConfigurationException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); } finally { diff --git a/src/org/exist/xmldb/XmldbURI.java b/src/org/exist/xmldb/XmldbURI.java index b19768d1c3c..5e994e0b929 100644 --- a/src/org/exist/xmldb/XmldbURI.java +++ b/src/org/exist/xmldb/XmldbURI.java @@ -607,7 +607,7 @@ public XmldbURI append(final XmldbURI uri) { return uri; } - if (!(prepend.charAt(prepend.length() - 1) == '/') && !(toAppend.charAt(0) == '/')) { + if (prepend.charAt(prepend.length() - 1) != '/' && toAppend.charAt(0) != '/') { return XmldbURI.create(prepend + "/" + toAppend, hadXmldbPrefix); } else { return XmldbURI.create(prepend + toAppend, hadXmldbPrefix); diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalCollection.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalCollection.java new file mode 100644 index 00000000000..9f20f98e646 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalCollection.java @@ -0,0 +1,151 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.EXistException; +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.storage.DBBroker; +import org.exist.storage.txn.Txn; +import org.exist.xmldb.*; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.ErrorCodes; +import org.xmldb.api.base.Service; +import org.xmldb.api.base.XMLDBException; + +import java.net.URISyntaxException; +import java.util.Optional; + +/** + * Avoids overlapping transactions on Collections + * when the XML:DB Local API executes XQuery that then + * calls the XMLDB XQuery Module which then tries + * to use the XML:DB Local API + * + * @author Adam Retter + */ +public class InTxnLocalCollection extends LocalCollection { + public InTxnLocalCollection(final Subject user, final BrokerPool brokerPool, final LocalCollection parent, final XmldbURI name) throws XMLDBException { + super(user, brokerPool, parent, name); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return withDb(brokerPool, user, dbOperation); + } + + static R withDb(final BrokerPool brokerPool, final Subject user, final LocalXmldbFunction dbOperation) throws XMLDBException { + try (final DBBroker broker = brokerPool.get(Optional.of(user)); + final Txn transaction = broker.continueOrBeginTransaction()) { + final R result = dbOperation.apply(broker, transaction); + transaction.commit(); + return result; + } catch (final EXistException e) { + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); + } + } + + @Override + public Service getService(final String name, final String version) throws XMLDBException { + final Service service; + switch(name) { + case "XPathQueryService": + case "XQueryService": + service = new InTxnLocalXPathQueryService(user, brokerPool, this); + break; + + case "CollectionManagementService": + case "CollectionManager": + service = new InTxnLocalCollectionManagementService(user, brokerPool, this); + break; + + case "UserManagementService": + service = new InTxnLocalUserManagementService(user, brokerPool, this); + break; + + case "DatabaseInstanceManager": + service = new LocalDatabaseInstanceManager(user, brokerPool); + break; + + case "XUpdateQueryService": + service = new InTxnLocalXUpdateQueryService(user, brokerPool, this); + break; + + case "IndexQueryService": + service = new InTxnLocalIndexQueryService(user, brokerPool, this); + break; + + default: + throw new XMLDBException(ErrorCodes.NO_SUCH_SERVICE); + } + return service; + } + + @Override + public Service[] getServices() throws XMLDBException { + final Service[] services = { + new InTxnLocalXPathQueryService(user, brokerPool, this), + new InTxnLocalCollectionManagementService(user, brokerPool, this), + new InTxnLocalUserManagementService(user, brokerPool, this), + new LocalDatabaseInstanceManager(user, brokerPool), + new InTxnLocalXUpdateQueryService(user, brokerPool, this), + new InTxnLocalIndexQueryService(user, brokerPool, this) + }; + return services; + } + + @Override + public org.xmldb.api.base.Collection getParentCollection() throws XMLDBException { + if(getName().equals(XmldbURI.ROOT_COLLECTION)) { + return null; + } + + if(collection == null) { + final XmldbURI parentUri = this.read().apply((collection, broker, transaction) -> collection.getParentURI()); + this.collection = new InTxnLocalCollection(user, brokerPool, null, parentUri); + } + return collection; + } + + @Override + public org.xmldb.api.base.Collection getChildCollection(final String name) throws XMLDBException { + + final XmldbURI childURI; + try { + childURI = XmldbURI.xmldbUriFor(name); + } catch(final URISyntaxException e) { + throw new XMLDBException(ErrorCodes.INVALID_URI,e); + } + + final XmldbURI nameUri = this.read().apply((collection, broker, transaction) -> { + XmldbURI childName = null; + if (collection.hasChildCollection(broker, childURI)) { + childName = getPathURI().append(childURI); + } + return childName; + }); + + if(nameUri != null) { + return new InTxnLocalCollection(user, brokerPool, this, nameUri); + } else { + return null; + } + } +} diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalCollectionManagementService.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalCollectionManagementService.java new file mode 100644 index 00000000000..4dc3d724318 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalCollectionManagementService.java @@ -0,0 +1,67 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.collections.triggers.TriggerException; +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.xmldb.LocalCollection; +import org.exist.xmldb.LocalCollectionManagementService; +import org.exist.xmldb.XmldbURI; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.Collection; +import org.xmldb.api.base.ErrorCodes; +import org.xmldb.api.base.XMLDBException; + +import java.util.Date; + +/** + * @author Adam Retter + */ +public class InTxnLocalCollectionManagementService extends LocalCollectionManagementService { + public InTxnLocalCollectionManagementService(final Subject user, final BrokerPool pool, final LocalCollection parent) { + super(user, pool, parent); + } + + @Override + public Collection createCollection(final XmldbURI name, final Date created) throws XMLDBException { + final XmldbURI collName = resolve(name); + + withDb((broker, transaction) -> { + try { + final org.exist.collections.Collection coll = broker.getOrCreateCollection(transaction, collName); + if (created != null) { + coll.setCreationTime(created.getTime()); + } + broker.saveCollection(transaction, coll); + return null; + } catch (final TriggerException e) { + throw new XMLDBException(ErrorCodes.VENDOR_ERROR, e.getMessage(), e); + } + }); + + return new InTxnLocalCollection(user, brokerPool, collection, collName); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return InTxnLocalCollection.withDb(brokerPool, user, dbOperation); + } +} diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalIndexQueryService.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalIndexQueryService.java new file mode 100644 index 00000000000..64a6f99c398 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalIndexQueryService.java @@ -0,0 +1,41 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.xmldb.LocalCollection; +import org.exist.xmldb.LocalIndexQueryService; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.XMLDBException; + +/** + * @author Adam Retter + */ +public class InTxnLocalIndexQueryService extends LocalIndexQueryService { + public InTxnLocalIndexQueryService(final Subject user, final BrokerPool pool, final LocalCollection parent) { + super(user, pool, parent); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return InTxnLocalCollection.withDb(brokerPool, user, dbOperation); + } +} diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalUserManagementService.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalUserManagementService.java new file mode 100644 index 00000000000..559f03f02f0 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalUserManagementService.java @@ -0,0 +1,41 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.xmldb.LocalCollection; +import org.exist.xmldb.LocalUserManagementService; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.XMLDBException; + +/** + * @author Adam Retter + */ +public class InTxnLocalUserManagementService extends LocalUserManagementService { + public InTxnLocalUserManagementService(final Subject user, final BrokerPool pool, final LocalCollection collection) { + super(user, pool, collection); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return InTxnLocalCollection.withDb(brokerPool, user, dbOperation); + } +} diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalXPathQueryService.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalXPathQueryService.java new file mode 100644 index 00000000000..9ef11736ee6 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalXPathQueryService.java @@ -0,0 +1,41 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.xmldb.LocalCollection; +import org.exist.xmldb.LocalXPathQueryService; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.XMLDBException; + +/** + * @author Adam Retter + * */ +public class InTxnLocalXPathQueryService extends LocalXPathQueryService { + public InTxnLocalXPathQueryService(final Subject user, final BrokerPool pool, final LocalCollection collection) { + super(user, pool, collection); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return InTxnLocalCollection.withDb(brokerPool, user, dbOperation); + } +} diff --git a/src/org/exist/xmldb/txn/bridge/InTxnLocalXUpdateQueryService.java b/src/org/exist/xmldb/txn/bridge/InTxnLocalXUpdateQueryService.java new file mode 100644 index 00000000000..1514f7fe9c0 --- /dev/null +++ b/src/org/exist/xmldb/txn/bridge/InTxnLocalXUpdateQueryService.java @@ -0,0 +1,41 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2016 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software Foundation + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ +package org.exist.xmldb.txn.bridge; + +import org.exist.security.Subject; +import org.exist.storage.BrokerPool; +import org.exist.xmldb.LocalCollection; +import org.exist.xmldb.LocalXUpdateQueryService; +import org.exist.xmldb.function.LocalXmldbFunction; +import org.xmldb.api.base.XMLDBException; + +/** + * @author Adam Retter + */ +public class InTxnLocalXUpdateQueryService extends LocalXUpdateQueryService { + public InTxnLocalXUpdateQueryService(final Subject user, final BrokerPool pool, final LocalCollection parent) { + super(user, pool, parent); + } + + @Override + protected R withDb(final LocalXmldbFunction dbOperation) throws XMLDBException { + return InTxnLocalCollection.withDb(brokerPool, user, dbOperation); + } +} diff --git a/src/org/exist/xmlrpc/RpcAPI.java b/src/org/exist/xmlrpc/RpcAPI.java index 6694b63a671..dd311f620ad 100644 --- a/src/org/exist/xmlrpc/RpcAPI.java +++ b/src/org/exist/xmlrpc/RpcAPI.java @@ -815,24 +815,24 @@ boolean updateAccount(String name, String passwd, String digestPassword, List groups) throws EXistException, PermissionDeniedException; - boolean setPermissions(String resource, String permissions) + boolean setPermissions(String resource, String mode) throws EXistException, PermissionDeniedException, URISyntaxException; - boolean setPermissions(String resource, int permissions) + boolean setPermissions(String resource, int mode) throws EXistException, PermissionDeniedException, URISyntaxException; boolean setPermissions( String resource, String owner, - String ownerGroup, - String permissions) + String group, + String mode) throws EXistException, PermissionDeniedException, URISyntaxException; boolean setPermissions( String resource, String owner, - String ownerGroup, - int permissions) + String group, + int mode) throws EXistException, PermissionDeniedException, URISyntaxException; boolean setPermissions( @@ -845,7 +845,7 @@ boolean setPermissions( boolean chgrp( final String resource, - final String ownerGroup) + final String group) throws EXistException, PermissionDeniedException, URISyntaxException; boolean chown( @@ -856,7 +856,7 @@ boolean chown( boolean chown( final String resource, final String owner, - final String ownerGroup) + final String group) throws EXistException, PermissionDeniedException, URISyntaxException; boolean lockResource(String path, String userName) @@ -923,9 +923,15 @@ boolean moveResource(String docPath, String destinationPath, String newName) boolean copyCollection(String collectionPath, String destinationPath, String newName) throws EXistException, PermissionDeniedException, URISyntaxException; + boolean copyCollection(String collectionPath, String destinationPath, String newName, final String preserveType) + throws EXistException, PermissionDeniedException, URISyntaxException; + boolean copyResource(String docPath, String destinationPath, String newName) throws EXistException, PermissionDeniedException, URISyntaxException; + boolean copyResource(String docPath, String destinationPath, String newName, final String preserveType) + throws EXistException, PermissionDeniedException, URISyntaxException; + boolean reindexCollection(String name) throws EXistException, PermissionDeniedException, URISyntaxException; diff --git a/src/org/exist/xmlrpc/RpcConnection.java b/src/org/exist/xmlrpc/RpcConnection.java index d866ac6a040..cbe3d654fe4 100644 --- a/src/org/exist/xmlrpc/RpcConnection.java +++ b/src/org/exist/xmlrpc/RpcConnection.java @@ -19,21 +19,10 @@ */ package org.exist.xmlrpc; -import com.evolvedbinary.j8fu.function.ConsumerE; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.dom.QName; -import org.exist.dom.persistent.NodeProxy; -import org.exist.dom.persistent.DocumentMetadata; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.dom.persistent.ExtArrayNodeSet; -import org.exist.dom.persistent.DocumentTypeImpl; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.NodeSet; -import org.exist.dom.persistent.SortedNodeSet; -import org.exist.dom.persistent.DefaultDocumentSet; +import org.exist.dom.persistent.*; import org.exist.EXistException; import org.exist.Namespaces; import org.exist.Version; @@ -67,8 +56,12 @@ import org.exist.source.Source; import org.exist.source.StringSource; import org.exist.storage.*; +import org.exist.storage.DBBroker.PreserveType; import org.exist.storage.lock.Lock.LockMode; +import org.exist.storage.lock.LockManager; import org.exist.storage.lock.LockedDocumentMap; +import org.exist.storage.lock.ManagedCollectionLock; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.serializers.Serializer; import org.exist.storage.sync.Sync; @@ -95,9 +88,11 @@ import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; +import com.evolvedbinary.j8fu.function.ConsumerE; import com.evolvedbinary.j8fu.function.Function2E; import com.evolvedbinary.j8fu.function.Function3E; import com.evolvedbinary.j8fu.function.SupplierE; +import com.evolvedbinary.j8fu.tuple.Tuple2; import java.io.*; import java.net.URISyntaxException; @@ -112,6 +107,7 @@ import org.xmldb.api.base.*; +import static org.exist.xmldb.EXistXPathQueryService.BEGIN_PROTECTED_MAX_LOCKING_RETRIES; import static java.nio.file.StandardOpenOption.*; /** @@ -131,6 +127,7 @@ public class RpcConnection implements RpcAPI { private final XmldbRequestProcessorFactory factory; private final Subject user; + private final Random random = new Random(); public RpcConnection(final XmldbRequestProcessorFactory factory, final Subject user) { super(); @@ -164,18 +161,12 @@ private boolean createCollection(final XmldbURI collUri, final Date created) thr return true; } - current = broker.getOrCreateCollection(transaction, collUri); + current = broker.getOrCreateCollection(transaction, collUri, Optional.ofNullable(created).map(c -> new Tuple2<>(null, c.getTime()))); - //TODO : register a lock (wich one ?) within the transaction ? - if (created != null) { - current.setCreationTime(created.getTime()); - } - if(LOG.isDebugEnabled()) { - LOG.debug("creating collection " + collUri); + try(final ManagedCollectionLock collectionLock = broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collUri)) { + broker.saveCollection(transaction, current); } - broker.saveCollection(transaction, current); - return null; }); @@ -268,22 +259,23 @@ protected LockedDocumentMap beginProtected(final DBBroker broker, final Map= -1); + + throw new EXistException("Unable to beginProtected after " + BEGIN_PROTECTED_MAX_LOCKING_RETRIES + " retries"); } /** @@ -299,6 +291,7 @@ private CompiledXQuery compile(final DBBroker broker, final Source source, final context = new XQueryContext(broker.getBrokerPool()); } else { context = compiled.getContext(); + context.prepareForReuse(); } final String base = (String) parameters.get(RpcAPI.BASE_URI); if (base != null) { @@ -438,14 +431,8 @@ public boolean existsAndCanOpenCollection(final String collectionUri) throws EXi } return withDb((broker, transaction) -> { - Collection collection = null; - try { - collection = broker.openCollection(uri, LockMode.READ_LOCK); + try(final Collection collection = broker.openCollection(uri, LockMode.READ_LOCK)) { return collection != null; - } finally { - if (collection != null) { - collection.release(LockMode.READ_LOCK); - } } }); } @@ -467,15 +454,17 @@ private Map getCollectionDesc(final XmldbURI rootUri) throws EXi if (collection.getPermissionsNoLock().validate(user, Permission.READ)) { for (final Iterator i = collection.iterator(broker); i.hasNext(); ) { final DocumentImpl doc = i.next(); - final Permission perms = doc.getPermissions(); - - final Map hash = new HashMap<>(5); - hash.put("name", doc.getFileURI().toString()); - hash.put("owner", perms.getOwner().getName()); - hash.put("group", perms.getGroup().getName()); - hash.put("permissions", perms.getMode()); - hash.put("type", doc.getResourceType() == DocumentImpl.BINARY_FILE ? "BinaryResource" : "XMLResource"); - docs.add(hash); + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentReadLock(doc.getURI())) { + final Permission perms = doc.getPermissions(); + + final Map hash = new HashMap<>(5); + hash.put("name", doc.getFileURI().toString()); + hash.put("owner", perms.getOwner().getName()); + hash.put("group", perms.getGroup().getName()); + hash.put("permissions", perms.getMode()); + hash.put("type", doc.getResourceType() == DocumentImpl.BINARY_FILE ? "BinaryResource" : "XMLResource"); + docs.add(hash); + } } for (final Iterator i = collection.collectionIterator(broker); i.hasNext(); ) { collections.add(i.next().toString()); @@ -830,9 +819,9 @@ private int xupdate(final XmldbURI collUri, final String xupdate) throws Permiss final Collection collectionRef = this.readCollection(collUri).apply((collection, broker1, transaction1) -> collection); //TODO : register a lock (which one ?) in the transaction ? final DocumentSet docs = collectionRef.allDocs(broker, new DefaultDocumentSet(), true); - try { + try(final Reader reader = new StringReader(xupdate)) { final XUpdateProcessor processor = new XUpdateProcessor(broker, docs); - final Modification modifications[] = processor.parse(new InputSource(new StringReader(xupdate))); + final Modification modifications[] = processor.parse(new InputSource(reader)); long mods = 0; for (final Modification modification : modifications) { mods += modification.process(transaction); @@ -856,13 +845,16 @@ public int xupdateResource(final String resource, final byte[] xupdate, final St private int xupdateResource(final XmldbURI docUri, final String xupdate) throws PermissionDeniedException, EXistException { return withDb((broker, transaction) -> { - final DocumentImpl documentRef = this.readDocument(docUri).apply((document, broker1, transaction1) -> document); - //TODO : register a lock (which one ?) within the transaction ? - final MutableDocumentSet docs = new DefaultDocumentSet(); - docs.add(documentRef); - try { + final MutableDocumentSet docs = this.readDocument(docUri).apply((document, broker1, transaction1) -> { + //TODO : register a lock (which one ?) within the transaction ? + final MutableDocumentSet documentSet = new DefaultDocumentSet(); + documentSet.add(document); + return documentSet; + }); + + try(final Reader reader = new StringReader(xupdate)) { final XUpdateProcessor processor = new XUpdateProcessor(broker, docs); - final Modification modifications[] = processor.parse(new InputSource(new StringReader(xupdate))); + final Modification modifications[] = processor.parse(new InputSource(reader)); long mods = 0; for (final Modification modification : modifications) { mods += modification.process(transaction); @@ -979,11 +971,10 @@ public String createResourceId(final String collectionName) private String createResourceId(final XmldbURI collUri) throws EXistException, PermissionDeniedException { return this.readCollection(collUri).apply((collection, broker, transaction) -> { XmldbURI id; - final Random rand = new Random(); boolean ok; do { ok = true; - id = XmldbURI.create(Integer.toHexString(rand.nextInt()) + ".xml"); + id = XmldbURI.create(Integer.toHexString(random.nextInt()) + ".xml"); // check if this id does already exist if (collection.hasDocument(broker, id)) { ok = false; @@ -1019,39 +1010,18 @@ public Map getPermissions(final String name) private Map getPermissions(final XmldbURI uri) throws EXistException, PermissionDeniedException { return withDb((broker, transaction) -> { - Collection collection = null; - try { - collection = broker.openCollection(uri, LockMode.READ_LOCK); - final Permission perm; + try(final Collection collection = broker.openCollection(uri, LockMode.READ_LOCK)) { if (collection == null) { - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(uri, LockMode.READ_LOCK); - if (doc == null) { + try(final LockedDocument lockedDoc = broker.getXMLResource(uri, LockMode.READ_LOCK)) { + if (lockedDoc == null) { throw new EXistException("document or collection " + uri + " not found"); } - perm = doc.getPermissions(); - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } + final Permission permission = lockedDoc.getDocument().getPermissions(); + return toMap(permission); } } else { - perm = collection.getPermissionsNoLock(); - } - - final Map result = new HashMap<>(); - result.put("owner", perm.getOwner().getName()); - result.put("group", perm.getGroup().getName()); - result.put("permissions", perm.getMode()); - - if (perm instanceof ACLPermission) { - result.put("acl", getACEs(perm)); - } - return result; - } finally { - if (collection != null) { - collection.release(LockMode.READ_LOCK); + final Permission permission = collection.getPermissionsNoLock(); + return toMap(permission); } } }); @@ -1059,35 +1029,14 @@ private Map getPermissions(final XmldbURI uri) throws EXistExcep @Override public Map getSubCollectionPermissions(final String parentPath, final String name) throws EXistException, PermissionDeniedException, URISyntaxException { - final XmldbURI uri = XmldbURI.xmldbUriFor(parentPath); - final Permission perm = this.readCollection(uri).apply((collection, broker, transaction) -> collection.getChildCollectionEntry(broker, name).getPermissions()); - - final Map result = new HashMap<>(); - result.put("owner", perm.getOwner().getName()); - result.put("group", perm.getGroup().getName()); - result.put("permissions", perm.getMode()); - - if (perm instanceof ACLPermission) { - result.put("acl", getACEs(perm)); - } - return result; + return this.>readCollection(uri).apply((collection, broker, transaction) -> toMap(collection.getChildCollectionEntry(broker, name).getPermissions())); } @Override public Map getSubResourcePermissions(final String parentPath, final String name) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(parentPath); - final Permission perm = this.readCollection(uri).apply((collection, broker, transaction) -> collection.getResourceEntry(broker, name).getPermissions()); - - final Map result = new HashMap<>(); - result.put("owner", perm.getOwner().getName()); - result.put("group", perm.getGroup().getName()); - result.put("permissions", perm.getMode()); - - if (perm instanceof ACLPermission) { - result.put("acl", getACEs(perm)); - } - return result; + return this.>readCollection(uri).apply((collection, broker, transaction) -> toMap(collection.getResourceEntry(broker, name).getPermissions())); } @Override @@ -1096,6 +1045,19 @@ public long getSubCollectionCreationTime(final String parentPath, final String n return this.readCollection(uri).apply((collection, broker, transaction) -> collection.getChildCollectionEntry(broker, name).getCreated()); } + private Map toMap(final Permission permission) { + final Map result = new HashMap<>(); + result.put("owner", permission.getOwner().getName()); + result.put("group", permission.getGroup().getName()); + result.put("permissions", permission.getMode()); + + if (permission instanceof ACLPermission) { + result.put("acl", getACEs(permission)); + } + + return result; + } + private List getACEs(final Permission perm) { final List aces = new ArrayList<>(); final ACLPermission aclPermission = (ACLPermission) perm; @@ -1116,16 +1078,11 @@ private Map listDocumentPermissions(final XmldbURI collUri) throws final Map result = new HashMap<>(collection.getDocumentCount(broker)); for (final Iterator i = collection.iterator(broker); i.hasNext(); ) { final DocumentImpl doc = i.next(); - final Permission perm = doc.getPermissions(); - final List tmp = new ArrayList(4); - tmp.add(perm.getOwner().getName()); - tmp.add(perm.getGroup().getName()); - tmp.add(perm.getMode()); - if (perm instanceof ACLPermission) { - tmp.add(getACEs(perm)); + try(final ManagedDocumentLock documentLock = broker.getBrokerPool().getLockManager().acquireDocumentReadLock(doc.getURI())) { + final Permission perm = doc.getPermissions(); + result.put(doc.getFileURI().toString(), toList(perm)); } - result.put(doc.getFileURI().toString(), tmp); } return result; }); @@ -1145,20 +1102,24 @@ private Map listCollectionPermissions(final XmldbURI collUri) final XmldbURI child = i.next(); final XmldbURI path = collUri.append(child); final Collection childColl = broker.getCollection(path); - final Permission perm = childColl.getPermissionsNoLock(); - final List tmp = new ArrayList(4); - tmp.add(perm.getOwner().getName()); - tmp.add(perm.getGroup().getName()); - tmp.add(perm.getMode()); - if (perm instanceof ACLPermission) { - tmp.add(getACEs(perm)); - } - result.put(child, tmp); + final Permission perm = childColl.getPermissionsNoLock(); // NOTE: we already have a READ lock on childColl implicitly + result.put(child, toList(perm)); } return result; }); } + private List toList(final Permission permission) { + final List result = new ArrayList<>(4); + result.add(permission.getOwner().getName()); + result.add(permission.getGroup().getName()); + result.add(permission.getMode()); + if (permission instanceof ACLPermission) { + result.add(getACEs(permission)); + } + return result; + } + @Override public Date getCreationDate(final String collectionPath) throws PermissionDeniedException, EXistException, URISyntaxException { @@ -1202,58 +1163,47 @@ public boolean setLastModified(final String documentPath, final long lastModifie @Override public Map getAccount(final String name) throws EXistException, PermissionDeniedException { return withDb((broker, transaction) -> { - final Account u = factory.getBrokerPool().getSecurityManager().getAccount(name); - if (u == null) { + final Account user = factory.getBrokerPool().getSecurityManager().getAccount(name); + if (user == null) { throw new EXistException("account '" + name + "' does not exist"); } - final Map tab = new HashMap<>(); - tab.put("uid", user.getId()); - tab.put("name", u.getName()); - tab.put("groups", Arrays.asList(u.getGroups())); - - final Group dg = u.getDefaultGroup(); - if (dg != null) { - tab.put("default-group-id", dg.getId()); - tab.put("default-group-realmId", dg.getRealmId()); - tab.put("default-group-name", dg.getName()); - } - - tab.put("enabled", Boolean.toString(u.isEnabled())); - - tab.put("umask", u.getUserMask()); - - final Map metadata = new HashMap<>(); - for (final SchemaType key : u.getMetadataKeys()) { - metadata.put(key.getNamespace(), u.getMetadataValue(key)); - } - tab.put("metadata", metadata); - - return tab; + return toMap(user); }); } @Override public List> getAccounts() throws EXistException, PermissionDeniedException { - final java.util.Collection users = factory.getBrokerPool().getSecurityManager().getUsers(); - final List> r = new ArrayList<>(); + final List> result = new ArrayList<>(); for (final Account user : users) { - final Map tab = new HashMap<>(); - tab.put("uid", user.getId()); - tab.put("name", user.getName()); - tab.put("groups", Arrays.asList(user.getGroups())); - tab.put("enabled", Boolean.toString(user.isEnabled())); - tab.put("umask", user.getUserMask()); + result.add(toMap(user)); + } + return result; + } - final Map metadata = new HashMap<>(); - for (final SchemaType key : user.getMetadataKeys()) { - metadata.put(key.getNamespace(), user.getMetadataValue(key)); - } - tab.put("metadata", metadata); - r.add(tab); + private Map toMap(final Account account) { + final Map result = new HashMap<>(); + result.put("uid", account.getId()); + result.put("name", account.getName()); + result.put("groups", Arrays.asList(account.getGroups())); + + final Group dg = account.getDefaultGroup(); + if (dg != null) { + result.put("default-group-id", dg.getId()); + result.put("default-group-realmId", dg.getRealmId()); + result.put("default-group-name", dg.getName()); + } + + result.put("enabled", Boolean.toString(account.isEnabled())); + result.put("umask", account.getUserMask()); + + final Map metadata = new HashMap<>(); + for (final SchemaType key : account.getMetadataKeys()) { + metadata.put(key.getNamespace(), account.getMetadataValue(key)); } - return r; + result.put("metadata", metadata); + return result; } @Override @@ -1334,37 +1284,46 @@ private boolean parse(final byte[] xml, final XmldbURI docUri, final int overwrite, final Date created, final Date modified) throws EXistException, PermissionDeniedException { return this.writeCollection(docUri.removeLastSegment()).apply((collection, broker, transaction) -> { - if (overwrite == 0) { - final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); - if (old != null) { - throw new PermissionDeniedException("Document exists and overwrite is not allowed"); + + try(final ManagedDocumentLock lockedDocument = broker.getBrokerPool().getLockManager().acquireDocumentWriteLock(docUri)) { + if (overwrite == 0) { + final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); // NOTE: we have the document write lock above + if (old != null) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + throw new PermissionDeniedException("Document exists and overwrite is not allowed"); + } } - } try (final InputStream is = new FastByteArrayInputStream(xml)) { - final InputSource source = new InputSource(is); + final InputSource source = new InputSource(is); - final long startTime = System.currentTimeMillis(); + final long startTime = System.currentTimeMillis(); - final IndexInfo info = collection.validateXMLResource(transaction, broker, docUri.lastSegment(), source); - final MimeType mime = MimeTable.getInstance().getContentTypeFor(docUri.lastSegment()); - if (mime != null && mime.isXMLType()) { - info.getDocument().getMetadata().setMimeType(mime.getName()); - } - if (created != null) { - info.getDocument().getMetadata().setCreated(created.getTime()); - } - if (modified != null) { - info.getDocument().getMetadata().setLastModified(modified.getTime()); - } + final IndexInfo info = collection.validateXMLResource(transaction, broker, docUri.lastSegment(), source); + final MimeType mime = MimeTable.getInstance().getContentTypeFor(docUri.lastSegment()); + if (mime != null && mime.isXMLType()) { + info.getDocument().getMetadata().setMimeType(mime.getName()); + } + if (created != null) { + info.getDocument().getMetadata().setCreated(created.getTime()); + } + if (modified != null) { + info.getDocument().getMetadata().setLastModified(modified.getTime()); + } - collection.store(transaction, broker, info, source); + collection.store(transaction, broker, info, source); - if(LOG.isDebugEnabled()) { - LOG.debug("parsing " + docUri + " took " + (System.currentTimeMillis() - startTime) + "ms."); + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + if(LOG.isDebugEnabled()) { + LOG.debug("parsing " + docUri + " took " + (System.currentTimeMillis() - startTime) + "ms."); + } + return true; } - return true; } }); } @@ -1433,39 +1392,49 @@ private boolean parseLocal(final String localFile, final XmldbURI docUri, final throws EXistException, PermissionDeniedException { return this.writeCollection(docUri.removeLastSegment()).apply((collection, broker, transaction) -> { - if (overwrite == 0) { - final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); - if (old != null) { - throw new PermissionDeniedException("Old document exists and overwrite is not allowed"); - } - } + try(final ManagedDocumentLock lockedDocument = broker.getBrokerPool().getLockManager().acquireDocumentWriteLock(docUri)) { + if (overwrite == 0) { + final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); // NOTE: we have the document write lock above + if (old != null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); - // get the source for parsing - SupplierE sourceSupplier; - try { - final int handle = Integer.parseInt(localFile); - final SerializedResult sr = factory.resultSets.getSerializedResult(handle); - if (sr == null) { - throw new EXistException("Invalid handle specified"); + throw new PermissionDeniedException("Old document exists and overwrite is not allowed"); + } } - sourceSupplier = () -> { - final FileInputSource source = new FileInputSource(sr.result); - sr.result = null; // de-reference the VirtualTempFile in the SerializeResult - factory.resultSets.remove(handle); - return source; - }; - } catch (final NumberFormatException nfe) { + // get the source for parsing + SupplierE sourceSupplier; + try { + final int handle = Integer.parseInt(localFile); + final SerializedResult sr = factory.resultSets.getSerializedResult(handle); + if (sr == null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); - // As this file can be a non-temporal one, we should not - // blindly erase it! - final Path path = Paths.get(localFile); - if (!Files.isReadable(path)) { - throw new EXistException("unable to read file " + path.toAbsolutePath().toString()); - } + throw new EXistException("Invalid handle specified"); + } - sourceSupplier = () -> new FileInputSource(path); + sourceSupplier = () -> { + final FileInputSource source = new FileInputSource(sr.result); + sr.result = null; // de-reference the temp file in the SerializeResult, so it is not re-claimed before we need it + factory.resultSets.remove(handle); + return source; + }; + } catch (final NumberFormatException nfe) { + + // As this file can be a non-temporal one, we should not + // blindly erase it! + final Path path = Paths.get(localFile); + if (!Files.isReadable(path)) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + throw new EXistException("unable to read file " + path.toAbsolutePath().toString()); + } + + sourceSupplier = () -> new FileInputSource(path); } // parse the source @@ -1473,28 +1442,37 @@ private boolean parseLocal(final String localFile, final XmldbURI docUri, final final MimeType mime = Optional.ofNullable(MimeTable.getInstance().getContentType(mimeType)).orElse(MimeType.BINARY_TYPE); final boolean treatAsXML = (isXML != null && isXML) || (isXML == null && mime.isXMLType()); - if (treatAsXML) { - final IndexInfo info = collection.validateXMLResource(transaction, broker, docUri.lastSegment(), source); - if (created != null) { - info.getDocument().getMetadata().setCreated(created.getTime()); - } - if (modified != null) { - info.getDocument().getMetadata().setLastModified(modified.getTime()); - } - collection.store(transaction, broker, info, source); - } else { - try (final InputStream is = source.getByteStream()) { - final DocumentImpl doc = collection.addBinaryResource(transaction, broker, docUri.lastSegment(), is, mime.getName(), source.getByteStreamLength()); + if (treatAsXML) { + final IndexInfo info = collection.validateXMLResource(transaction, broker, docUri.lastSegment(), source); if (created != null) { - doc.getMetadata().setCreated(created.getTime()); + info.getDocument().getMetadata().setCreated(created.getTime()); } if (modified != null) { - doc.getMetadata().setLastModified(modified.getTime()); + info.getDocument().getMetadata().setLastModified(modified.getTime()); + } + collection.store(transaction, broker, info, source); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + } else { + try (final InputStream is = source.getByteStream()) { + final DocumentImpl doc = collection.addBinaryResource(transaction, broker, docUri.lastSegment(), is, mime.getName(), source.getByteStreamLength()); + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + if (created != null) { + doc.getMetadata().setCreated(created.getTime()); + } + if (modified != null) { + doc.getMetadata().setLastModified(modified.getTime()); + } } } - } - return true; + return true; + } } }); } @@ -1518,27 +1496,46 @@ public boolean storeBinary(final byte[] data, final String documentPath, final S private boolean storeBinary(final byte[] data, final XmldbURI docUri, final String mimeType, final int overwrite, final Date created, final Date modified) throws EXistException, PermissionDeniedException { return this.writeCollection(docUri.removeLastSegment()).apply((collection, broker, transaction) -> { + // keep a write lock in the transaction - transaction.acquireLock(collection.getLock(), LockMode.WRITE_LOCK); - if (overwrite == 0) { - final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); - if (old != null) { - throw new PermissionDeniedException("Old document exists and overwrite is not allowed"); + transaction.acquireCollectionLock(() -> broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collection.getURI())); + + try(final ManagedDocumentLock lockedDocument = broker.getBrokerPool().getLockManager().acquireDocumentWriteLock(docUri)) { + if (overwrite == 0) { + final DocumentImpl old = collection.getDocument(broker, docUri.lastSegment()); // NOTE: we have the document write lock above + + if (old != null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + throw new PermissionDeniedException("Old document exists and overwrite is not allowed"); + } } - } - if(LOG.isDebugEnabled()) { - LOG.debug("Storing binary resource to collection " + collection.getURI()); - } - final DocumentImpl doc = collection.addBinaryResource(transaction, broker, docUri.lastSegment(), data, mimeType); - if (created != null) { - doc.getMetadata().setCreated(created.getTime()); - } - if (modified != null) { - doc.getMetadata().setLastModified(modified.getTime()); - } + if(LOG.isDebugEnabled()) { + LOG.debug("Storing binary resource to collection " + collection.getURI()); + } + + final DocumentImpl doc = collection.addBinaryResource(transaction, broker, docUri.lastSegment(), data, mimeType); + if(doc != null) { + if (created != null) { + doc.getMetadata().setCreated(created.getTime()); + } + if (modified != null) { + doc.getMetadata().setLastModified(modified.getTime()); + } - return doc != null; + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + return true; + } else { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + return false; + } + } }); } @@ -1578,7 +1575,10 @@ public String upload(final byte[] chunk, final int length, @Nullable String file try (final OutputStream os = Files.newOutputStream(tempFile, openOptions)) { if (compressed) { - Compressor.uncompress(chunk, os); + final int uncompressedLen = Compressor.uncompress(chunk, os); + if (uncompressedLen != length) { + throw new IOException("Expected " + length + " bytes of uncompressed data, but actually " + uncompressedLen); + } } else { os.write(chunk, 0, length); } @@ -2046,19 +2046,28 @@ public boolean remove(final String documentPath) throws URISyntaxException, EXis private boolean remove(final XmldbURI docUri) throws EXistException, PermissionDeniedException { return this.writeCollection(docUri.removeLastSegment()).apply((collection, broker, transaction) -> { // keep a write lock in the transaction - transaction.acquireLock(collection.getLock(), LockMode.WRITE_LOCK); + transaction.acquireCollectionLock(() -> broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collection.getURI())); - final DocumentImpl doc = collection.getDocument(broker, docUri.lastSegment()); - if (doc == null) { - throw new EXistException("Document " + docUri + " not found"); - } + try(final LockedDocument lockedDoc = collection.getDocumentWithLock(broker, docUri.lastSegment(), LockMode.WRITE_LOCK)) { + if (lockedDoc == null) { + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); - if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { - collection.removeBinaryResource(transaction, broker, doc); - } else { - collection.removeXMLResource(transaction, broker, docUri.lastSegment()); + throw new EXistException("Document " + docUri + " not found"); + } + + final DocumentImpl doc = lockedDoc.getDocument(); + if (doc.getResourceType() == DocumentImpl.BINARY_FILE) { + collection.removeBinaryResource(transaction, broker, doc); + } else { + collection.removeXMLResource(transaction, broker, docUri.lastSegment()); + } + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + return true; } - return true; }); } @@ -2071,7 +2080,7 @@ private boolean removeCollection(final XmldbURI collURI) throws EXistException, try { return this.writeCollection(collURI).apply((collection, broker, transaction) -> { // keep a write lock in the transaction - transaction.acquireLock(collection.getLock(), LockMode.WRITE_LOCK); + transaction.acquireCollectionLock(() -> broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(collection.getURI())); if(LOG.isDebugEnabled()) { LOG.debug("removing collection " + collURI); } @@ -2426,10 +2435,10 @@ public Map retrieveAllFirstChunk(final int resultId, final Map { - PermissionFactory.updatePermissions(broker, uri, permission -> permission.setGroup(ownerGroup)); + PermissionFactory.chown(broker, transaction, uri, Optional.empty(), Optional.ofNullable(group)); return true; }); } @@ -2438,73 +2447,54 @@ public boolean chgrp(final String resource, final String ownerGroup) throws EXis public boolean chown(final String resource, final String owner) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> permission.setOwner(owner)); + PermissionFactory.chown(broker, transaction, uri, Optional.ofNullable(owner), Optional.empty()); return true; }); } @Override - public boolean chown(final String resource, final String owner, final String ownerGroup) throws EXistException, PermissionDeniedException, URISyntaxException { + public boolean chown(final String resource, final String owner, final String group) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> { - permission.setOwner(owner); - permission.setGroup(ownerGroup); - }); + PermissionFactory.chown(broker, transaction, uri, Optional.ofNullable(owner), Optional.ofNullable(group)); return true; }); } @Override - public boolean setPermissions(final String resource, final int permissions) throws EXistException, PermissionDeniedException, URISyntaxException { + public boolean setPermissions(final String resource, final int mode) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> permission.setMode(permissions)); + PermissionFactory.chmod(broker, transaction, uri, Optional.of(mode), Optional.empty()); return true; }); } @Override - public boolean setPermissions(final String resource, final String permissions) throws EXistException, PermissionDeniedException, URISyntaxException { + public boolean setPermissions(final String resource, final String mode) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> { - try { - permission.setMode(permissions); - } catch (final SyntaxException se) { - throw new PermissionDeniedException("Unrecognised mode syntax: " + se.getMessage(), se); - } - }); + PermissionFactory.chmod_str(broker, transaction, uri, Optional.ofNullable(mode), Optional.empty()); return true; }); } @Override - public boolean setPermissions(final String resource, final String owner, final String ownerGroup, final String permissions) throws EXistException, PermissionDeniedException, URISyntaxException { + public boolean setPermissions(final String resource, final String owner, final String group, final String mode) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> { - permission.setOwner(owner); - permission.setGroup(ownerGroup); - try { - permission.setMode(permissions); - } catch (final SyntaxException se) { - throw new PermissionDeniedException("Unrecognised mode syntax: " + se.getMessage(), se); - } - }); + PermissionFactory.chown(broker, transaction, uri, Optional.ofNullable(owner), Optional.ofNullable(group)); + PermissionFactory.chmod_str(broker, transaction, uri, Optional.ofNullable(mode), Optional.empty()); return true; }); } @Override - public boolean setPermissions(final String resource, final String owner, final String ownerGroup, final int permissions) throws EXistException, PermissionDeniedException, URISyntaxException { + public boolean setPermissions(final String resource, final String owner, final String group, final int mode) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> { - permission.setOwner(owner); - permission.setGroup(ownerGroup); - permission.setMode(permissions); - }); + PermissionFactory.chown(broker, transaction, uri, Optional.ofNullable(owner), Optional.ofNullable(group)); + PermissionFactory.chmod(broker, transaction, uri, Optional.of(mode), Optional.empty()); return true; }); } @@ -2513,18 +2503,8 @@ public boolean setPermissions(final String resource, final String owner, final S public boolean setPermissions(final String resource, final String owner, final String group, final int mode, final List aces) throws EXistException, PermissionDeniedException, URISyntaxException { final XmldbURI uri = XmldbURI.xmldbUriFor(resource); return withDb((broker, transaction) -> { - PermissionFactory.updatePermissions(broker, uri, permission -> { - permission.setOwner(owner); - permission.setGroup(group); - permission.setMode(mode); - if (permission instanceof ACLPermission) { - final ACLPermission aclPermission = ((ACLPermission) permission); - aclPermission.clear(); - for (final ACEAider ace : aces) { - aclPermission.addACE(ace.getAccessType(), ace.getTarget(), ace.getWho(), ace.getMode()); - } - } - }); + PermissionFactory.chown(broker, transaction, uri, Optional.ofNullable(owner), Optional.ofNullable(group)); + PermissionFactory.chmod(broker, transaction, uri, Optional.of(mode), Optional.ofNullable(aces)); return true; }); } @@ -2925,63 +2905,12 @@ public Map summary(final String xpath) throws EXistException, Pe if (qr.hasErrors()) { throw qr.getException(); } - final Map map = new HashMap<>(); - final Map doctypes = new HashMap<>(); - NodeProxy p; - String docName; - DocumentType doctype; - NodeCount counter; - DoctypeCount doctypeCounter; - for (final SequenceIterator i = qr.result.iterate(); i.hasNext(); ) { - final Item item = i.nextItem(); - if (Type.subTypeOf(item.getType(), Type.NODE)) { - final NodeValue nv = (NodeValue) item; - if (nv.getImplementationType() == NodeValue.PERSISTENT_NODE) { - p = (NodeProxy) nv; - docName = p.getOwnerDocument().getURI().toString(); - doctype = p.getOwnerDocument().getDoctype(); - if (map.containsKey(docName)) { - counter = map.get(docName); - counter.inc(); - } else { - counter = new NodeCount(p.getOwnerDocument()); - map.put(docName, counter); - } - if (doctype == null) { - continue; - } - if (doctypes.containsKey(doctype.getName())) { - doctypeCounter = doctypes.get(doctype.getName()); - doctypeCounter.inc(); - } else { - doctypeCounter = new DoctypeCount(doctype); - doctypes.put(doctype.getName(), doctypeCounter); - } - } - } + if (qr.result == null) { + return summaryToMap(qr.queryTime, null, null, null); } - final Map result = new HashMap<>(); - result.put("queryTime", System.currentTimeMillis() - startTime); - result.put("hits", qr.result.getItemCount()); - final List documents = new ArrayList<>(); - for (final NodeCount nodeCounter : map.values()) { - final List hitsByDoc = new ArrayList<>(); - hitsByDoc.add(nodeCounter.doc.getFileURI().toString()); - hitsByDoc.add(nodeCounter.doc.getDocId()); - hitsByDoc.add(nodeCounter.count); - documents.add(hitsByDoc); - } - result.put("documents", documents); - - final List dtypes = new ArrayList<>(); - for (final DoctypeCount docTemp : doctypes.values()) { - final List hitsByType = new ArrayList<>(); - hitsByType.add(docTemp.doctype.getName()); - hitsByType.add(docTemp.count); - dtypes.add(hitsByType); - } - result.put("doctypes", dtypes); - return result; + + final Tuple2, java.util.Collection> summary = summarise(qr.result); + return summaryToMap(System.currentTimeMillis() - startTime, qr.result, summary._1, summary._2); } catch (final XPathException e) { throw new EXistException(e); @@ -2995,68 +2924,83 @@ public Map summary(final int resultId) throws EXistException, XP throw new EXistException("result set unknown or timed out"); } qr.touch(); - final Map result = new HashMap<>(); - result.put("queryTime", qr.queryTime); if (qr.result == null) { - result.put("hits", 0); - return result; + return summaryToMap(qr.queryTime, null, null, null); } - final Map map = new HashMap<>(); - final Map doctypes = new HashMap<>(); - NodeProxy p; - String docName; - DocumentType doctype; + + final Tuple2, java.util.Collection> summary = summarise(qr.result); + return summaryToMap(qr.queryTime, qr.result, summary._1, summary._2); + } + + private Tuple2, java.util.Collection> summarise(final Sequence results) throws XPathException { + final Map nodeCounts = new HashMap<>(); + final Map doctypeCounts = new HashMap<>(); NodeCount counter; DoctypeCount doctypeCounter; - for (final SequenceIterator i = qr.result.iterate(); i.hasNext(); ) { + for (final SequenceIterator i = results.iterate(); i.hasNext(); ) { final Item item = i.nextItem(); if (Type.subTypeOf(item.getType(), Type.NODE)) { final NodeValue nv = (NodeValue) item; if (nv.getImplementationType() == NodeValue.PERSISTENT_NODE) { - p = (NodeProxy) nv; - docName = p.getOwnerDocument().getURI().toString(); - doctype = p.getOwnerDocument().getDoctype(); - if (map.containsKey(docName)) { - counter = map.get(docName); + final NodeProxy p = (NodeProxy) nv; + final String docName = p.getOwnerDocument().getURI().toString(); + final DocumentType doctype = p.getOwnerDocument().getDoctype(); + if (nodeCounts.containsKey(docName)) { + counter = nodeCounts.get(docName); counter.inc(); } else { counter = new NodeCount(p.getOwnerDocument()); - map.put(docName, counter); + nodeCounts.put(docName, counter); } if (doctype == null) { continue; } - if (doctypes.containsKey(doctype.getName())) { - doctypeCounter = (DoctypeCount) doctypes.get(doctype - .getName()); + if (doctypeCounts.containsKey(doctype.getName())) { + doctypeCounter = doctypeCounts.get(doctype.getName()); doctypeCounter.inc(); } else { doctypeCounter = new DoctypeCount(doctype); - doctypes.put(doctype.getName(), doctypeCounter); + doctypeCounts.put(doctype.getName(), doctypeCounter); } } } } - result.put("hits", qr.result.getItemCount()); + + return new Tuple2<>(nodeCounts.values(), doctypeCounts.values()); + } + + private Map summaryToMap(final long queryTime, @Nullable final Sequence results, + @Nullable final java.util.Collection nodeCounts, @Nullable final java.util.Collection doctypeCounts) { + final Map result = new HashMap<>(); + + result.put("queryTime", queryTime); + + if (results == null) { + result.put("hits", 0); + return result; + } + + result.put("hits", results.getItemCount()); final List documents = new ArrayList<>(); - for (final NodeCount nodeCounter : map.values()) { + for (final NodeCount nodeCount : nodeCounts) { final List hitsByDoc = new ArrayList<>(); - hitsByDoc.add(nodeCounter.doc.getFileURI().toString()); - hitsByDoc.add(nodeCounter.doc.getDocId()); - hitsByDoc.add(nodeCounter.count); + hitsByDoc.add(nodeCount.doc.getFileURI().toString()); + hitsByDoc.add(nodeCount.doc.getDocId()); + hitsByDoc.add(nodeCount.count); documents.add(hitsByDoc); } result.put("documents", documents); final List dtypes = new ArrayList<>(); - for (final DoctypeCount docTemp : doctypes.values()) { + for (final DoctypeCount docTemp : doctypeCounts) { final List hitsByType = new ArrayList<>(); hitsByType.add(docTemp.doctype.getName()); hitsByType.add(docTemp.count); dtypes.add(hitsByType); } result.put("doctypes", dtypes); + return result; } @@ -3094,23 +3038,9 @@ private Properties toProperties(final Map parameters) { return properties; } - static class CachedQuery { - - final PathExpr expression; - final String queryString; - final long timestamp; - - public CachedQuery(final PathExpr expr, final String query) { - this.expression = expr; - this.queryString = query; - this.timestamp = System.currentTimeMillis(); - } - } - - static class DoctypeCount { - - int count = 1; + private static class DoctypeCount { final DocumentType doctype; + int count = 1; public DoctypeCount(final DocumentType doctype) { this.doctype = doctype; @@ -3121,10 +3051,9 @@ public void inc() { } } - static class NodeCount { - - int count = 1; + private static class NodeCount { final DocumentImpl doc; + int count = 1; public NodeCount(final DocumentImpl doc) { this.doc = doc; @@ -3157,15 +3086,24 @@ public byte[] getDocumentChunk(final String name, final int start, final int len return buffer; } + @Deprecated + public boolean moveOrCopyResource(final String documentPath, final String destinationPath, + final String newName, final boolean move) + throws EXistException, PermissionDeniedException, URISyntaxException { + return moveOrCopyResource(XmldbURI.xmldbUriFor(documentPath), + XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move, PreserveType.DEFAULT); + } + + @Deprecated public boolean moveOrCopyResource(final String documentPath, final String destinationPath, - final String newName, final boolean move) + final String newName, final boolean move, final PreserveType preserve) throws EXistException, PermissionDeniedException, URISyntaxException { return moveOrCopyResource(XmldbURI.xmldbUriFor(documentPath), - XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move); + XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move, preserve); } private boolean moveOrCopyResource(final XmldbURI docUri, final XmldbURI destUri, - final XmldbURI newName, final boolean move) + final XmldbURI newName, final boolean move, final PreserveType preserve) throws EXistException, PermissionDeniedException { // use WRITE_LOCK if moving or if src and dest collection are the same @@ -3179,7 +3117,7 @@ private boolean moveOrCopyResource(final XmldbURI docUri, final XmldbURI destUri if (move) { broker3.moveResource(transaction3, document, destination, newName); } else { - broker3.copyResource(transaction3, document, destination, newName); + broker3.copyResource(transaction3, document, destination, newName, preserve); } return true; }) @@ -3189,15 +3127,24 @@ private boolean moveOrCopyResource(final XmldbURI docUri, final XmldbURI destUri ); } + @Deprecated + public boolean moveOrCopyCollection(final String collectionName, final String destinationPath, + final String newName, final boolean move) + throws EXistException, PermissionDeniedException, URISyntaxException { + return moveOrCopyCollection(XmldbURI.xmldbUriFor(collectionName), + XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move, PreserveType.DEFAULT); + } + + @Deprecated public boolean moveOrCopyCollection(final String collectionName, final String destinationPath, - final String newName, final boolean move) + final String newName, final boolean move, final PreserveType preserve) throws EXistException, PermissionDeniedException, URISyntaxException { return moveOrCopyCollection(XmldbURI.xmldbUriFor(collectionName), - XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move); + XmldbURI.xmldbUriFor(destinationPath), XmldbURI.xmldbUriFor(newName), move, preserve); } private boolean moveOrCopyCollection(final XmldbURI collUri, final XmldbURI destUri, - final XmldbURI newName, final boolean move) + final XmldbURI newName, final boolean move, final PreserveType preserve) throws EXistException, PermissionDeniedException { // use WRITE_LOCK if moving or if src and dest collection are the same @@ -3210,7 +3157,7 @@ private boolean moveOrCopyCollection(final XmldbURI collUri, final XmldbURI dest if (move) { broker2.moveCollection(transaction2, source, destination, newName); } else { - broker2.copyCollection(transaction2, source, destination, newName); + broker2.copyCollection(transaction2, source, destination, newName, preserve); } return true; }) @@ -3220,13 +3167,13 @@ private boolean moveOrCopyCollection(final XmldbURI collUri, final XmldbURI dest @Override public boolean reindexCollection(final String collectionName) throws URISyntaxException, EXistException, PermissionDeniedException { - reindexCollection(XmldbURI.xmldbUriFor(collectionName)); + reindexCollection(XmldbURI.xmldbUriFor(collectionName)); return true; } private void reindexCollection(final XmldbURI collUri) throws EXistException, PermissionDeniedException { withDb((broker, transaction) -> { - broker.reindexCollection(collUri); + broker.reindexCollection(transaction, collUri); if(LOG.isDebugEnabled()) { LOG.debug("collection " + collUri + " and sub-collections reindexed"); } @@ -3237,18 +3184,12 @@ private void reindexCollection(final XmldbURI collUri) throws EXistException, Pe @Override public boolean reindexDocument(final String docUri) throws EXistException, PermissionDeniedException { withDb((broker, transaction) -> { - DocumentImpl doc = null; - try { - doc = broker.getXMLResource(XmldbURI.create(docUri), LockMode.READ_LOCK); - broker.reindexXMLResource(transaction, doc, DBBroker.IndexMode.STORE); + try(final LockedDocument lockedDoc = broker.getXMLResource(XmldbURI.create(docUri), LockMode.READ_LOCK)) { + broker.reindexXMLResource(transaction, lockedDoc.getDocument(), DBBroker.IndexMode.STORE); if(LOG.isDebugEnabled()) { LOG.debug("document " + docUri + " reindexed"); } return null; - } finally { - if (doc != null) { - doc.getUpdateLock().release(LockMode.READ_LOCK); - } } }); return true; @@ -3368,22 +3309,32 @@ private boolean setDocType(final XmldbURI docUri, final String doctypename, fina @Override public boolean copyResource(final String docPath, final String destinationPath, final String newName) throws EXistException, PermissionDeniedException, URISyntaxException { - return moveOrCopyResource(docPath, destinationPath, newName, false); + return moveOrCopyResource(docPath, destinationPath, newName, false, PreserveType.DEFAULT); + } + + @Override + public boolean copyResource(final String docPath, final String destinationPath, final String newName, final String preserveType) throws EXistException, PermissionDeniedException, URISyntaxException { + return moveOrCopyResource(docPath, destinationPath, newName, false, PreserveType.valueOf(preserveType)); } @Override public boolean copyCollection(final String collectionPath, final String destinationPath, final String newName) throws EXistException, PermissionDeniedException, URISyntaxException { - return moveOrCopyCollection(collectionPath, destinationPath, newName, false); + return moveOrCopyCollection(collectionPath, destinationPath, newName, false, PreserveType.DEFAULT); + } + + @Override + public boolean copyCollection(final String collectionPath, final String destinationPath, final String newName, final String preserveType) throws EXistException, PermissionDeniedException, URISyntaxException { + return moveOrCopyCollection(collectionPath, destinationPath, newName, false, PreserveType.valueOf(preserveType)); } @Override public boolean moveResource(final String docPath, final String destinationPath, final String newName) throws EXistException, PermissionDeniedException, URISyntaxException { - return moveOrCopyResource(docPath, destinationPath, newName, true); + return moveOrCopyResource(docPath, destinationPath, newName, true, PreserveType.DEFAULT); } @Override public boolean moveCollection(final String collectionPath, final String destinationPath, final String newName) throws EXistException, PermissionDeniedException, URISyntaxException { - return moveOrCopyCollection(collectionPath, destinationPath, newName, true); + return moveOrCopyCollection(collectionPath, destinationPath, newName, true, PreserveType.DEFAULT); } @Override @@ -3401,44 +3352,50 @@ public List getDocumentChunk(final String name, final Map parameters = new HashMap<>(); - parameters.put(OutputKeys.INDENT, "no"); - parameters.put(EXistOutputKeys.EXPAND_XINCLUDES, "no"); - parameters.put(OutputKeys.ENCODING, DEFAULT_ENCODING); - - final Map desc = getCollectionDesc(name); - final Object[] collections = (Object[]) desc.get("collections"); - final Object[] documents = (Object[]) desc.get("documents"); + createCollection(namedest); - //recurse the collection - for (final Object collection : collections) { - final String nome = collection.toString(); - createCollection(namedest + "/" + nome); - copyCollection(name + "/" + nome, namedest + "/" + nome); - } - - //Copy i file - int p, dsize = documents.length; - for (int i = 0; i < dsize; i++) { - final Map hash = (Map) documents[i]; - String nome = (String) hash.get("name"); - //TODO : use dedicated function in XmldbURI - if ((p = nome.lastIndexOf("/")) != Constants.STRING_NOT_FOUND) { - nome = nome.substring(p + 1); + final Map parameters = new HashMap<>(); + parameters.put(OutputKeys.INDENT, "no"); + parameters.put(EXistOutputKeys.EXPAND_XINCLUDES, "no"); + parameters.put(OutputKeys.ENCODING, DEFAULT_ENCODING); + + final Map desc = getCollectionDesc(name); + final Object[] collections = (Object[]) desc.get("collections"); + final Object[] documents = (Object[]) desc.get("documents"); + + //recurse the collection + for (final Object collection : collections) { + final String nome = collection.toString(); + createCollection(namedest + "/" + nome); + copyCollection(name + "/" + nome, namedest + "/" + nome); + } + + //Copy i file + int p, dsize = documents.length; + for (int i = 0; i < dsize; i++) { + final Map hash = (Map) documents[i]; + String docName = (String) hash.get("name"); + //TODO : use dedicated function in XmldbURI + if ((p = docName.lastIndexOf("/")) != Constants.STRING_NOT_FOUND) { + docName = docName.substring(p + 1); + } + + final String srcDocUri = name + "/" + docName; + final String destDocUri = namedest + "/" + docName; + withDb((broker, transaction) -> { + final LockManager lockManager = broker.getBrokerPool().getLockManager(); + try(final ManagedDocumentLock srcDocLock = lockManager.acquireDocumentReadLock(XmldbURI.create(srcDocUri)); + final ManagedDocumentLock destDocLock = lockManager.acquireDocumentWriteLock(XmldbURI.create(destDocUri))) { + final byte[] xml = getDocument(srcDocUri, parameters); + parse(xml, destDocUri); + return null; + } catch (final URISyntaxException e) { + throw new EXistException(e); } - - final byte[] xml = getDocument(name + "/" + nome, parameters); - parse(xml, namedest + "/" + nome); - } - - return true; - - } catch (final URISyntaxException e) { - throw new EXistException(e); + }); } + + return true; } @Override @@ -3590,8 +3547,7 @@ public byte[] getDocument(final String name, final String encoding, final int pr @Override public boolean setTriggersEnabled(final String path, final String value) throws EXistException, PermissionDeniedException { final boolean triggersEnabled = Boolean.parseBoolean(value); - return withDb((broker, transaction) -> { - final Collection collection = broker.getCollection(XmldbURI.create(path)); + return this.writeCollection(XmldbURI.create(path)).apply((collection, broker2, transaction2) -> { if (collection == null) { return false; } @@ -3742,9 +3698,7 @@ private Function2E, R, EXistException, Permissio */ private Function2E, R, EXistException, PermissionDeniedException> withCollection(final LockMode lockMode, final DBBroker broker, final Txn transaction, final XmldbURI uri) throws EXistException, PermissionDeniedException { return readOp -> { - Collection collection = null; - try { - collection = broker.openCollection(uri, lockMode); + try(final Collection collection = broker.openCollection(uri, lockMode)) { if (collection == null) { final String msg = "collection " + uri + " not found!"; if (LOG.isDebugEnabled()) { @@ -3753,10 +3707,6 @@ private Function2E, R, EXistException, Permissio throw new EXistException(msg); } return readOp.apply(collection, broker, transaction); - } finally { - if (collection != null) { - collection.release(lockMode); - } } }; } @@ -3859,23 +3809,21 @@ private Function2E, R, EXistException, PermissionD */ private Function2E, R, EXistException, PermissionDeniedException> withDocument(final LockMode lockMode, final DBBroker broker, final Txn transaction, final Collection collection, final XmldbURI uri) throws EXistException, PermissionDeniedException { return readOp -> { - DocumentImpl document = null; - try { - document = collection.getDocumentWithLock(broker, uri.lastSegment(), lockMode); - if (document == null) { + try(final LockedDocument lockedDocument = collection.getDocumentWithLock(broker, uri.lastSegment(), lockMode)) { + + // NOTE: early release of Collection lock inline with Asymmetrical Locking scheme + collection.close(); + + if (lockedDocument == null) { final String msg = "document " + uri + " not found!"; if (LOG.isDebugEnabled()) { LOG.debug(msg); } throw new EXistException(msg); } - return readOp.apply(document, broker, transaction); + return readOp.apply(lockedDocument.getDocument(), broker, transaction); } catch (final LockException e) { throw new EXistException(e); - } finally { - if (document != null) { - collection.releaseDocument(document, lockMode); - } } }; } diff --git a/src/org/exist/xquery/AbstractExpression.java b/src/org/exist/xquery/AbstractExpression.java index 37f2f4cbd65..3e2bed9d50b 100644 --- a/src/org/exist/xquery/AbstractExpression.java +++ b/src/org/exist/xquery/AbstractExpression.java @@ -71,26 +71,12 @@ public Sequence eval(Sequence contextSequence) throws XPathException { return eval(contextSequence, null); } - /* (non-Javadoc) - * @see org.exist.xquery.Expression#eval(org.exist.xquery.value.Sequence, org.exist.xquery.value.Item) - */ - @Override - public abstract Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException; - - /* (non-Javadoc) - * @see org.exist.xquery.Expression#returnsType() - */ - @Override - public abstract int returnsType(); - - /* (non-Javadoc) - * @see org.exist.xquery.Expression#resetState() - */ @Override public void resetState(boolean postOptimization) { contextDocSet = null; } + @Override public boolean needsReset() { // always return true unless a subclass overwrites this return true; @@ -123,9 +109,6 @@ public int getPrimaryAxis() { return Constants.UNKNOWN_AXIS; } - /* (non-Javadoc) - * @see org.exist.xquery.Expression#setContextDocSet(org.exist.dom.persistent.DocumentSet) - */ @Override public void setContextDocSet(DocumentSet contextSet) { this.contextDocSet = contextSet; diff --git a/src/org/exist/xquery/AbstractInternalModule.java b/src/org/exist/xquery/AbstractInternalModule.java index 3e0959835b5..8b244417c44 100644 --- a/src/org/exist/xquery/AbstractInternalModule.java +++ b/src/org/exist/xquery/AbstractInternalModule.java @@ -21,15 +21,8 @@ */ package org.exist.xquery; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -//import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import java.util.*; + import org.exist.dom.QName; import org.exist.xquery.value.Sequence; @@ -49,7 +42,7 @@ public abstract class AbstractInternalModule implements InternalModule { public static class FunctionComparator implements Comparator { @Override - public int compare(FunctionDef o1, FunctionDef o2) { + public int compare(final FunctionDef o1, final FunctionDef o2) { return o1.getSignature().getFunctionId().compareTo(o2.getSignature().getFunctionId()); } } @@ -58,23 +51,21 @@ public int compare(FunctionDef o1, FunctionDef o2) { protected final boolean ordered; private final Map> parameters; - protected final TreeMap mGlobalVariables = new TreeMap(); + protected final Map mGlobalVariables = new HashMap<>(); - public AbstractInternalModule(FunctionDef[] functions, - Map> parameters) { + public AbstractInternalModule(final FunctionDef[] functions, + final Map> parameters) { this(functions, parameters, false); } - public AbstractInternalModule(FunctionDef[] functions, Map> parameters, boolean functionsOrdered) { + public AbstractInternalModule(final FunctionDef[] functions, + final Map> parameters, + final boolean functionsOrdered) { this.mFunctions = functions; this.ordered = functionsOrdered; this.parameters = parameters; } - /* (non-Javadoc) - * @see org.exist.xquery.Module#isInternalModule() - */ @Override public boolean isInternalModule() { return true; @@ -83,12 +74,12 @@ public boolean isInternalModule() { /** * returns a module parameter */ - protected List getParameter(String paramName) { + protected List getParameter(final String paramName) { return parameters.get(paramName); } @Override - public void setContextItem(Sequence contextItem) { + public void setContextItem(final Sequence contextItem) { // not used for internal modules } @@ -107,8 +98,8 @@ public FunctionSignature[] listFunctions() { } @Override - public Iterator getSignaturesForFunction(QName qname) { - final List signatures = new ArrayList(2); + public Iterator getSignaturesForFunction(final QName qname) { + final List signatures = new ArrayList<>(2); for (int i = 0; i < mFunctions.length; i++) { final FunctionSignature signature = mFunctions[i].getSignature(); if (signature.getName().compareTo(qname) == 0){ @@ -118,9 +109,6 @@ public Iterator getSignaturesForFunction(QName qname) { return signatures.iterator(); } - /* (non-Javadoc) - * @see org.exist.xquery.Module#getClassForFunction(org.exist.dom.QName) - */ @Override public FunctionDef getFunctionDef(QName qname, int arity) { final FunctionId id = new FunctionId(qname, arity); @@ -136,7 +124,7 @@ public FunctionDef getFunctionDef(QName qname, int arity) { return null; } - private FunctionDef binarySearch(FunctionId id) { + private FunctionDef binarySearch(final FunctionId id) { int low = 0; int high = mFunctions.length - 1; @@ -156,8 +144,8 @@ private FunctionDef binarySearch(FunctionId id) { } @Override - public List getFunctionsByName(QName qname) { - final List funcs = new ArrayList(); + public List getFunctionsByName(final QName qname) { + final List funcs = new ArrayList<>(); for (int i = 0; i < mFunctions.length; i++) { final FunctionSignature sig = mFunctions[i].getSignature(); if (sig.getName().compareTo(qname) == 0) { @@ -171,8 +159,24 @@ public Iterator getGlobalVariables() { return mGlobalVariables.keySet().iterator(); } + /** + * Declares a variable defined by the module. + * + * NOTE: this should not be called from the constructor of a module + * otherwise when {@link #reset(XQueryContext, boolean)} is called + * with {@code keepGlobals = false}, the variables will be removed + * from the module. Which means they will not be available + * for subsequent re-executions of a cached XQuery. + * Instead, module level variables should be initialised + * in {@link #prepare(XQueryContext)}. + * + * @param qname The name of the variable + * @param value The Java value of the variable, will be converted to an XDM type. + * + * @return the variable + */ @Override - public Variable declareVariable(QName qname, Object value) throws XPathException { + public Variable declareVariable(final QName qname, final Object value) throws XPathException { final Sequence val = XPathUtil.javaObjectToXPath(value, null); Variable var = mGlobalVariables.get(qname); if (var == null){ @@ -183,34 +187,47 @@ public Variable declareVariable(QName qname, Object value) throws XPathException return var; } + /** + * Declares a variable defined by the module. + * + * NOTE: this should not be called from the constructor of a module + * otherwise when {@link #reset(XQueryContext, boolean)} is called + * with {@code keepGlobals = false}, the variables will be removed + * from the module. Which means they will not be available + * for subsequent re-executions of a cached XQuery. + * Instead, module level variables should be initialised + * in {@link #prepare(XQueryContext)}. + * + * @param var The variable + * + * @return the variable + */ @Override - public Variable declareVariable(Variable var) { + public Variable declareVariable(final Variable var) { mGlobalVariables.put(var.getQName(), var); return var; } - /* (non-Javadoc) - * @see org.exist.xquery.Module#resolveVariable(org.exist.dom.QName) - */ @Override - public Variable resolveVariable(QName qname) throws XPathException { + public Variable resolveVariable(final QName qname) throws XPathException { return mGlobalVariables.get(qname); } @Override - public boolean isVarDeclared(QName qname) { + public boolean isVarDeclared(final QName qname) { return mGlobalVariables.get(qname) != null; } @Override - public void reset(XQueryContext context) { + public void reset(final XQueryContext context) { //Nothing to do } @Override - public void reset(XQueryContext xqueryContext, boolean keepGlobals) { + public void reset(final XQueryContext xqueryContext, final boolean keepGlobals) { // call deprecated method for backwards compatibility reset(xqueryContext); + if (!keepGlobals) { mGlobalVariables.clear(); } diff --git a/src/org/exist/xquery/BasicFunction.java b/src/org/exist/xquery/BasicFunction.java index 55188afbec9..4d2dd14de64 100644 --- a/src/org/exist/xquery/BasicFunction.java +++ b/src/org/exist/xquery/BasicFunction.java @@ -39,7 +39,7 @@ public BasicFunction(final XQueryContext context, final FunctionSignature signat } @Override - public Sequence eval(Sequence contextSequence, final Item contextItem) throws XPathException { + public Sequence eval(Sequence contextSequence, final Item contextItem) throws XPathException { if (context.getProfiler().isEnabled()) { context.getProfiler().start(this); context.getProfiler().message(this, Profiler.DEPENDENCIES, "DEPENDENCIES", Dependency.getDependenciesName(this.getDependencies())); diff --git a/src/org/exist/xquery/Cardinality.java b/src/org/exist/xquery/Cardinality.java index 4fba829c411..fbeb8a8cad6 100644 --- a/src/org/exist/xquery/Cardinality.java +++ b/src/org/exist/xquery/Cardinality.java @@ -44,15 +44,21 @@ public class Cardinality { public final static String toString(int cardinality) { switch(cardinality) { case EMPTY: - return "empty()"; + return "empty-sequence()"; + case EXACTLY_ONE: return ""; + + case MANY: case ONE_OR_MORE: return "+"; + case ZERO_OR_MORE: return "*"; + case ZERO_OR_ONE: return "?"; + default: // impossible throw new IllegalArgumentException("unknown cardinality: " + cardinality); diff --git a/src/org/exist/xquery/DeferredFunctionCall.java b/src/org/exist/xquery/DeferredFunctionCall.java index 028c76bce26..39f9dc95a56 100644 --- a/src/org/exist/xquery/DeferredFunctionCall.java +++ b/src/org/exist/xquery/DeferredFunctionCall.java @@ -139,10 +139,11 @@ public int getItemType() { } } - public int getItemCount() { + @Override + public long getItemCountLong() { try { realize(); - return sequence.getItemCount(); + return sequence.getItemCountLong(); } catch (XPathException e) { caughtException = e; LOG.error("Exception in deferred function: " + e.getMessage()); diff --git a/src/org/exist/xquery/DynamicVariable.java b/src/org/exist/xquery/DynamicVariable.java new file mode 100644 index 00000000000..6c17d920981 --- /dev/null +++ b/src/org/exist/xquery/DynamicVariable.java @@ -0,0 +1,124 @@ +/* + * eXist Open Source Native XML Database + * Copyright (C) 2001-2018 The eXist Project + * http://exist-db.org + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + */ + +package org.exist.xquery; + +import net.jcip.annotations.Immutable; +import org.exist.dom.QName; +import org.exist.dom.persistent.DocumentSet; +import org.exist.xquery.value.Sequence; +import org.exist.xquery.value.SequenceType; + +import java.util.function.Supplier; + +@Immutable +public class DynamicVariable implements Variable { + + private final QName name; + private final Supplier valueSupplier; + + public DynamicVariable(final QName name, final Supplier valueSupplier) { + this.name = name; + this.valueSupplier = valueSupplier; + } + + @Override + public void setValue(final Sequence val) { + throwImmutable(); + } + + @Override + public Sequence getValue() { + return valueSupplier.get(); + } + + @Override + public QName getQName() { + return name; + } + + @Override + public int getType() { + return valueSupplier.get().getItemType(); + } + + @Override + public void setSequenceType(final SequenceType type) { + throwImmutable(); + } + + @Override + public SequenceType getSequenceType() { + final Sequence value = getValue(); + return new SequenceType(value.getItemType(), value.getCardinality()); + } + + @Override + public void setStaticType(final int type) { + throwImmutable(); + } + + @Override + public int getStaticType() { + return getType(); + } + + @Override + public boolean isInitialized() { + return true; + } + + @Override + public void setIsInitialized(final boolean initialized) { + } + + @Override + public int getDependencies(final XQueryContext context) { + return 0; + } + + @Override + public int getCardinality() { + return getValue().getCardinality(); + } + + @Override + public void setStackPosition(final int position) { + } + + @Override + public DocumentSet getContextDocs() { + return DocumentSet.EMPTY_DOCUMENT_SET; + } + + @Override + public void setContextDocs(final DocumentSet docs) { + throwImmutable(); + } + + @Override + public void checkType() { + + } + + private static void throwImmutable() { + throw new UnsupportedOperationException("Changing a dynamic variable is not permitted"); + } +} diff --git a/src/org/exist/xquery/EnclosedExpr.java b/src/org/exist/xquery/EnclosedExpr.java index 94b6120d337..d69133fbda7 100644 --- a/src/org/exist/xquery/EnclosedExpr.java +++ b/src/org/exist/xquery/EnclosedExpr.java @@ -134,7 +134,9 @@ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathExc "An attribute may not appear after another child node."); } try { + receiver.setCheckNS(false); next.copyTo(context.getBroker(), receiver); + receiver.setCheckNS(true); } catch (DOMException e) { if (e.code == DOMException.NAMESPACE_ERR) { throw new XPathException(this, ErrorCodes.XQDY0102, e.getMessage()); diff --git a/src/org/exist/xquery/ErrorCodes.java b/src/org/exist/xquery/ErrorCodes.java index 893f5352136..6b52986d1cd 100644 --- a/src/org/exist/xquery/ErrorCodes.java +++ b/src/org/exist/xquery/ErrorCodes.java @@ -131,8 +131,9 @@ public class ErrorCodes { "Bind the prefix xmlns to any namespace URI.\n" + "Bind a prefix to the namespace URI http://www.w3.org/2000/xmlns/.\n" + "Bind any prefix (including the empty prefix) to a zero-length namespace URI."); - public static final ErrorCode XQDY0138 = new W3CErrorCode("XQDY0138", "Position n does not exist in this array"); public static final ErrorCode XQDY0102 = new W3CErrorCode("XQDY0102", "If the name of an element in an element constructor is in no namespace, creating a default namespace for that element using a computed namespace constructor is an error."); + public static final ErrorCode XQDY0137 = new W3CErrorCode("XQDY0137", "No two keys in a map may have the same key value"); + public static final ErrorCode XQDY0138 = new W3CErrorCode("XQDY0138", "Position n does not exist in this array"); /* XQuery 1.0 and XPath 2.0 Functions and Operators http://www.w3.org/TR/xpath-functions/#error-summary */ public static final ErrorCode FOER0000 = new W3CErrorCode("FOER0000", "Unidentified error."); diff --git a/src/org/exist/xquery/Function.java b/src/org/exist/xquery/Function.java index 43f55d4fc04..5037a1a3125 100644 --- a/src/org/exist/xquery/Function.java +++ b/src/org/exist/xquery/Function.java @@ -307,7 +307,7 @@ protected Expression checkArgument(Expression expr, final SequenceType type, fin if (!Type.subTypeOf(returnType, Type.ATOMIC)) { expr = new Atomize(context, expr); } - if (!(type.getPrimaryType() == Type.ATOMIC)) { + if (type.getPrimaryType() != Type.ATOMIC) { expr = new UntypedValueCheck(context, type.getPrimaryType(), expr, new Error(Error.FUNC_PARAM_TYPE, String.valueOf(argPosition), mySignature)); } @@ -360,9 +360,6 @@ public void analyze(final AnalyzeContextInfo contextInfo) throws XPathException } } - public abstract Sequence eval(final Sequence contextSequence, final Item contextItem) - throws XPathException; - public Sequence[] getArguments(Sequence contextSequence, final Item contextItem) throws XPathException { if (contextItem != null) { diff --git a/src/org/exist/xquery/FunctionCall.java b/src/org/exist/xquery/FunctionCall.java index 04544dd1a9b..a8244f54b1e 100644 --- a/src/org/exist/xquery/FunctionCall.java +++ b/src/org/exist/xquery/FunctionCall.java @@ -232,7 +232,7 @@ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathExc getSignature().getReturnType().checkType(result.getItemType()); } } catch(final XPathException e) { - throw new XPathException(this, ErrorCodes.XPTY0004, "err:XPTY0004: return type of function '" + getSignature().getName() + "'. " + e.getMessage(), Sequence.EMPTY_SEQUENCE, e); + throw new XPathException(this, ErrorCodes.XPTY0004, "Return type of function '" + getSignature().getName() + "'. " + e.getMessage(), Sequence.EMPTY_SEQUENCE, e); } diff --git a/src/org/exist/xquery/InternalModule.java b/src/org/exist/xquery/InternalModule.java index 63042445677..1c2b7de9abf 100644 --- a/src/org/exist/xquery/InternalModule.java +++ b/src/org/exist/xquery/InternalModule.java @@ -36,6 +36,15 @@ */ public interface InternalModule extends Module { + /** + * Prepare the module for use. + * + * @param context The XQuery Context. + */ + default void prepare(final XQueryContext context) throws XPathException { + // no-op + } + /** * Returns the implementing class for the function identified * by qname or null if it is not defined. Called by @@ -44,7 +53,7 @@ public interface InternalModule extends Module { * @param qname * @return implementing class for the function */ - public FunctionDef getFunctionDef(QName qname, int argCount); + FunctionDef getFunctionDef(QName qname, int argCount); /** * Returns all functions defined in this module matching the @@ -53,5 +62,5 @@ public interface InternalModule extends Module { * @param qname * @return all functions defined in this module */ - public List getFunctionsByName(QName qname); + List getFunctionsByName(QName qname); } \ No newline at end of file diff --git a/src/org/exist/xquery/LocationStep.java b/src/org/exist/xquery/LocationStep.java index 1c94716906d..232ca128a32 100644 --- a/src/org/exist/xquery/LocationStep.java +++ b/src/org/exist/xquery/LocationStep.java @@ -7,16 +7,16 @@ * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * + * * $Id$ */ package org.exist.xquery; @@ -32,10 +32,7 @@ import org.exist.dom.memtree.InMemoryNodeSet; import org.exist.dom.memtree.NodeImpl; import org.exist.numbering.NodeId; -import org.exist.stax.EmbeddedXMLStreamReader; -import org.exist.stax.ExtendedXMLStreamReader; -import org.exist.stax.IEmbeddedXMLStreamReader; -import org.exist.stax.StaXUtil; +import org.exist.stax.*; import org.exist.storage.ElementValue; import org.exist.storage.UpdateListener; import org.exist.xquery.value.*; @@ -47,114 +44,100 @@ import javax.xml.stream.XMLStreamReader; import java.io.IOException; import java.util.Iterator; + import org.exist.dom.persistent.NodeHandle; /** * Processes all location path steps (like descendant::*, ancestor::XXX). - * + *

* The results of the first evaluation of the expression are cached for the * lifetime of the object and only reloaded if the context sequence (as passed * to the {@link #eval(Sequence, Item)} method) has changed. - * + * * @author wolf */ public class LocationStep extends Step { private static final int INDEX_SCAN_THRESHOLD = 10000; - protected NodeSet currentSet = null; - - protected DocumentSet currentDocs = null; - - protected UpdateListener listener = null; - - protected Expression parent = null; - - // Fields for caching the last result - protected CachedResult cached = null; - - protected int parentDeps = Dependency.UNKNOWN_DEPENDENCY; - - protected boolean preloadedData = false; - - protected boolean optimized = false; - - protected boolean inUpdate = false; - - protected boolean useDirectChildSelect = false; - - protected boolean applyPredicate = true; - - // Cache for the current NodeTest type - private Integer nodeTestType = null; - - /** - * Creates a new LocationStep instance. - * - * @param context - * a XQueryContext value - * @param axis - * an int value - */ - public LocationStep(XQueryContext context, int axis) { - super(context, axis); - } - - /** - * Creates a new LocationStep instance. - * - * @param context - * a XQueryContext value - * @param axis - * an int value - * @param test - * a NodeTest value - */ - public LocationStep(XQueryContext context, int axis, NodeTest test) { - super(context, axis, test); - } - - /* - * (non-Javadoc) - * - * @see org.exist.xquery.AbstractExpression#getDependencies() - */ - public int getDependencies() { - int deps = Dependency.CONTEXT_SET; - - // self axis has an obvious dependency on the context item - // likewise we depend on the context item if this is a single path step (outside a predicate) - if (!this.inPredicate && (this.axis == Constants.SELF_AXIS || - (parent != null && parent.getSubExpressionCount() > 0 && parent.getSubExpression(0) == this))) - {deps = deps | Dependency.CONTEXT_ITEM;} - - // TODO : normally, we should call this one... - // int deps = super.getDependencies(); ??? - for (final Predicate pred : predicates) { - deps |= pred.getDependencies(); - } - - // TODO : should we remove the CONTEXT_ITEM dependency returned by the - // predicates ? See the comment above. - // consider nested predicates however... - - return deps; - } - - /** - * If the current path expression depends on local variables from a for - * expression, we can optimize by preloading entire element or attribute - * sets. - * - * @return Whether or not we can optimize - */ - protected boolean hasPreloadedData() { - // TODO : log elsewhere ? - if (preloadedData) { - context.getProfiler().message(this, Profiler.OPTIMIZATIONS, null, - "Preloaded NodeSets"); - return true; - } + private NodeSet currentSet = null; + private DocumentSet currentDocs = null; + protected UpdateListener listener = null; + protected Expression parent = null; + + // Fields for caching the last result + protected CachedResult cached = null; + + //private int parentDeps = Dependency.UNKNOWN_DEPENDENCY; + private boolean preloadedData = false; + protected boolean optimized = false; +// private boolean inUpdate = false; + private boolean useDirectChildSelect = false; + private boolean applyPredicate = true; + + // Cache for the current NodeTest type + private Integer nodeTestType = null; + + /** + * Creates a new instance. + * + * @param context the XQuery context + * @param axis the axis of the location step + */ + public LocationStep(final XQueryContext context, final int axis) { + super(context, axis); + } + + /** + * Creates a new instance. + * + * @param context the XQuery context + * @param axis the axis of the location step + * @param test a node test on the axis + */ + public LocationStep(final XQueryContext context, final int axis, final NodeTest test) { + super(context, axis, test); + } + + @Override + public int getDependencies() { + int deps = Dependency.CONTEXT_SET; + + // self axis has an obvious dependency on the context item + // likewise we depend on the context item if this is a single path step (outside a predicate) + if (!this.inPredicate && + (this.axis == Constants.SELF_AXIS || + (parent != null && parent.getSubExpressionCount() > 0 && parent.getSubExpression(0) == this))) { + deps = deps | Dependency.CONTEXT_ITEM; + } + + // TODO : normally, we should call this one... + // int deps = super.getDependencies(); ??? + for (final Predicate pred : predicates) { + deps |= pred.getDependencies(); + } + + // TODO : should we remove the CONTEXT_ITEM dependency returned by the + // predicates ? See the comment above. + // consider nested predicates however... + + return deps; + } + + /** + * If the current path expression depends on local variables from a for + * expression, we can optimize by preloading entire element or attribute + * sets. + * + * @return Whether or not we can optimize + */ + private boolean hasPreloadedData() { + // TODO : log elsewhere ? + if (preloadedData) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, null, + "Preloaded NodeSets"); + return true; + } // if (inUpdate) // return false; // if ((parentDeps & Dependency.LOCAL_VARS) == Dependency.LOCAL_VARS) { @@ -162,246 +145,238 @@ protected boolean hasPreloadedData() { // "Preloaded NodeSets"); // return true; // } - return false; - } - - /** - * The method setPreloadedData - * - * @param docs - * a DocumentSet value - * @param nodes - * a NodeSet value - */ - public void setPreloadedData(DocumentSet docs, NodeSet nodes) { - this.preloadedData = true; - this.currentDocs = docs; - this.currentSet = nodes; - this.optimized = true; - } - - /** - * The method applyPredicate - * - * @param outerSequence - * a Sequence value - * @param contextSequence - * a Sequence value - * @return a Sequence value - * @exception XPathException - * if an error occurs - */ - protected Sequence applyPredicate(Sequence outerSequence, - Sequence contextSequence) throws XPathException { - if (contextSequence == null) - {return Sequence.EMPTY_SEQUENCE;} - if (predicates.size() == 0 - || !applyPredicate - || (!(contextSequence instanceof VirtualNodeSet) && contextSequence - .isEmpty())) - // Nothing to apply - {return contextSequence;} - Sequence result; - final Predicate pred = (Predicate) predicates.get(0); - // If the current step is an // abbreviated step, we have to treat the - // predicate - // specially to get the context position right. //a[1] translates to - // /descendant-or-self::node()/a[1], - // so we need to return the 1st a from any parent of a. - // - // If the predicate is known to return a node set, no special treatment - // is required. - if (abbreviatedStep - && (pred.getExecutionMode() != Predicate.NODE || !contextSequence - .isPersistentSet())) { - result = new ValueSequence(); - ((ValueSequence)result).keepUnOrdered(unordered); - if (contextSequence.isPersistentSet()) { - final NodeSet contextSet = contextSequence.toNodeSet(); - outerSequence = contextSet.getParents(-1); - for (final SequenceIterator i = outerSequence.iterate(); i.hasNext();) { - final NodeValue node = (NodeValue) i.nextItem(); - final Sequence newContextSeq = contextSet.selectParentChild( - (NodeSet) node, NodeSet.DESCENDANT, - getExpressionId()); - final Sequence temp = processPredicate(outerSequence, - newContextSeq); - result.addAll(temp); - } - } else { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - outerSequence = nodes.getParents(new AnyNodeTest()); - for (final SequenceIterator i = outerSequence.iterate(); i.hasNext();) { - final NodeValue node = (NodeValue) i.nextItem(); - final InMemoryNodeSet newSet = new InMemoryNodeSet(); - ((NodeImpl) node).selectChildren(test, newSet); - final Sequence temp = processPredicate(outerSequence, newSet); - result.addAll(temp); - } - } - } else - {result = processPredicate(outerSequence, contextSequence);} - return result; - } - - private Sequence processPredicate(Sequence outerSequence, - Sequence contextSequence) throws XPathException { - Predicate pred; - Sequence result = contextSequence; - for (final Iterator i = predicates.iterator(); i.hasNext() - && (result instanceof VirtualNodeSet || !result.isEmpty());) { - // TODO : log and/or profile ? - pred = i.next(); - pred.setContextDocSet(getContextDocSet()); - result = pred.evalPredicate(outerSequence, result, axis); - // subsequent predicates operate on the result of the previous one - outerSequence = null; + return false; + } + + /** + * The method setPreloadedData + * + * @param docs a DocumentSet value + * @param nodes a NodeSet value + */ + public void setPreloadedData(final DocumentSet docs, final NodeSet nodes) { + this.preloadedData = true; + this.currentDocs = docs; + this.currentSet = nodes; + this.optimized = true; + } + + /** + * The method applyPredicate + * + * @param outerSequence a Sequence value + * @param contextSequence a Sequence value + * @return a Sequence value + * @throws XPathException if an error occurs + */ + private Sequence applyPredicate(Sequence outerSequence, final Sequence contextSequence) throws XPathException { + if (contextSequence == null) { + return Sequence.EMPTY_SEQUENCE; + } + if (predicates.size() == 0 + || !applyPredicate + || (!(contextSequence instanceof VirtualNodeSet) && contextSequence + .isEmpty())) + // Nothing to apply + { + return contextSequence; + } + Sequence result; + final Predicate pred = predicates.get(0); + // If the current step is an // abbreviated step, we have to treat the + // predicate + // specially to get the context position right. //a[1] translates to + // /descendant-or-self::node()/a[1], + // so we need to return the 1st a from any parent of a. + // + // If the predicate is known to return a node set, no special treatment + // is required. + if (abbreviatedStep + && (pred.getExecutionMode() != Predicate.NODE || !contextSequence + .isPersistentSet())) { + result = new ValueSequence(); + ((ValueSequence) result).keepUnOrdered(unordered); + if (contextSequence.isPersistentSet()) { + final NodeSet contextSet = contextSequence.toNodeSet(); + outerSequence = contextSet.getParents(-1); + for (final SequenceIterator i = outerSequence.iterate(); i.hasNext(); ) { + final NodeValue node = (NodeValue) i.nextItem(); + final Sequence newContextSeq = contextSet.selectParentChild( + (NodeSet) node, NodeSet.DESCENDANT, + getExpressionId()); + final Sequence temp = processPredicate(outerSequence, + newContextSeq); + result.addAll(temp); + } + } else { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + outerSequence = nodes.getParents(new AnyNodeTest()); + for (final SequenceIterator i = outerSequence.iterate(); i.hasNext(); ) { + final NodeValue node = (NodeValue) i.nextItem(); + final InMemoryNodeSet newSet = new InMemoryNodeSet(); + ((NodeImpl) node).selectChildren(test, newSet); + final Sequence temp = processPredicate(outerSequence, newSet); + result.addAll(temp); + } + } + } else { + result = processPredicate(outerSequence, contextSequence); + } + return result; + } + + private Sequence processPredicate(Sequence outerSequence, final Sequence contextSequence) throws XPathException { + Sequence result = contextSequence; + for (final Iterator i = predicates.iterator(); i.hasNext() + && (result instanceof VirtualNodeSet || !result.isEmpty()); ) { + // TODO : log and/or profile ? + final Predicate pred = i.next(); + pred.setContextDocSet(getContextDocSet()); + result = pred.evalPredicate(outerSequence, result, axis); + // subsequent predicates operate on the result of the previous one + outerSequence = null; context.setContextSequencePosition(-1, null); - } - return result; - } - - /* - * (non-Javadoc) - * - * @see org.exist.xquery.Step#analyze(org.exist.xquery.Expression) - */ - public void analyze(AnalyzeContextInfo contextInfo) throws XPathException { - this.parent = contextInfo.getParent(); + } + return result; + } + + @Override + public void analyze(final AnalyzeContextInfo contextInfo) throws XPathException { + this.parent = contextInfo.getParent(); unordered = (contextInfo.getFlags() & UNORDERED) > 0; - parentDeps = parent.getDependencies(); - if ((contextInfo.getFlags() & IN_UPDATE) > 0) - {inUpdate = true;} +// parentDeps = parent.getDependencies(); +// if ((contextInfo.getFlags() & IN_UPDATE) > 0) { +// inUpdate = true; +// } // if ((contextInfo.getFlags() & SINGLE_STEP_EXECUTION) > 0) { // preloadedData = true; // } - if ((contextInfo.getFlags() & USE_TREE_TRAVERSAL) > 0) { - useDirectChildSelect = true; - } - // Mark ".", which is expanded as self::node() by the parser - // even though it may *also* be relevant with atomic sequences - if (this.axis == Constants.SELF_AXIS - && this.test.getType() == Type.NODE) - {contextInfo.addFlag(DOT_TEST);} - - //Change axis from descendant-or-self to descendant for '//' - if (this.axis == Constants.DESCENDANT_SELF_AXIS && isAbbreviated()) { - this.axis = Constants.DESCENDANT_AXIS; - } - - // static analysis for empty-sequence - Expression contextStep; - switch (axis) { - case Constants.SELF_AXIS: - if (getTest().getType() != Type.NODE) { - - contextStep = contextInfo.getContextStep(); - if (contextStep instanceof LocationStep) { - final LocationStep cStep = (LocationStep) contextStep; - - // WM: the following checks will only work on simple filters like //a[self::b], so we - // have to make sure they are not applied to more complex expression types - if (parent.getSubExpressionCount() == 1 && !Type.subTypeOf(getTest().getType(), cStep.getTest().getType())) - {throw new XPathException(this, - ErrorCodes.XPST0005, "Got nothing from self::"+getTest()+", because parent node kind "+Type.getTypeName(cStep.getTest().getType()));} - - if (parent.getSubExpressionCount() == 1 && !(cStep.getTest().isWildcardTest() || getTest().isWildcardTest()) && !cStep.getTest().equals(getTest())) - {throw new XPathException(this, - ErrorCodes.XPST0005, "Self::"+getTest()+" called on set of nodes which do not contain any nodes of this name.");} - } - } - break; + if ((contextInfo.getFlags() & USE_TREE_TRAVERSAL) > 0) { + useDirectChildSelect = true; + } + // Mark ".", which is expanded as self::node() by the parser + // even though it may *also* be relevant with atomic sequences + if (this.axis == Constants.SELF_AXIS + && this.test.getType() == Type.NODE) { + contextInfo.addFlag(DOT_TEST); + } + + //Change axis from descendant-or-self to descendant for '//' + if (this.axis == Constants.DESCENDANT_SELF_AXIS && isAbbreviated()) { + this.axis = Constants.DESCENDANT_AXIS; + } + + // static analysis for empty-sequence + switch (axis) { + case Constants.SELF_AXIS: + if (getTest().getType() != Type.NODE) { + final Expression contextStep = contextInfo.getContextStep(); + if (contextStep instanceof LocationStep) { + final LocationStep cStep = (LocationStep) contextStep; + + // WM: the following checks will only work on simple filters like //a[self::b], so we + // have to make sure they are not applied to more complex expression types + if (parent.getSubExpressionCount() == 1 && !Type.subTypeOf(getTest().getType(), cStep.getTest().getType())) { + throw new XPathException(this, + ErrorCodes.XPST0005, "Got nothing from self::" + getTest() + ", because parent node kind " + Type.getTypeName(cStep.getTest().getType())); + } + + if (parent.getSubExpressionCount() == 1 && !(cStep.getTest().isWildcardTest() || getTest().isWildcardTest()) && !cStep.getTest().equals(getTest())) { + throw new XPathException(this, + ErrorCodes.XPST0005, "Self::" + getTest() + " called on set of nodes which do not contain any nodes of this name."); + } + } + } + break; // case Constants.DESCENDANT_AXIS: - case Constants.DESCENDANT_SELF_AXIS: - contextStep = contextInfo.getContextStep(); - if (contextStep instanceof LocationStep) { - final LocationStep cStep = (LocationStep) contextStep; - - if (( - cStep.getTest().getType() == Type.ATTRIBUTE || - cStep.getTest().getType() == Type.TEXT - ) - && cStep.getTest() != getTest()) - {throw new XPathException(this, - ErrorCodes.XPST0005, "Descendant-or-self::"+getTest()+" from an attribute gets nothing.");} - } - break; + case Constants.DESCENDANT_SELF_AXIS: + final Expression contextStep = contextInfo.getContextStep(); + if (contextStep instanceof LocationStep) { + final LocationStep cStep = (LocationStep) contextStep; + + if (( + cStep.getTest().getType() == Type.ATTRIBUTE || + cStep.getTest().getType() == Type.TEXT + ) + && cStep.getTest() != getTest()) { + throw new XPathException(this, + ErrorCodes.XPST0005, "Descendant-or-self::" + getTest() + " from an attribute gets nothing."); + } + } + break; // case Constants.PARENT_AXIS: // case Constants.ATTRIBUTE_AXIS: - default: - } - - // TODO : log somewhere ? - super.analyze(contextInfo); - } - - /** - * The method eval - * - * @param contextSequence - * a Sequence value - * @param contextItem - * an Item value - * @return a Sequence value - * @exception XPathException - * if an error occurs - */ - public Sequence eval(Sequence contextSequence, Item contextItem) - throws XPathException { - if (context.getProfiler().isEnabled()) { - context.getProfiler().start(this); - context.getProfiler().message(this, Profiler.DEPENDENCIES, - "DEPENDENCIES", - Dependency.getDependenciesName(this.getDependencies())); - if (contextSequence != null) - {context.getProfiler().message(this, Profiler.START_SEQUENCES, - "CONTEXT SEQUENCE", contextSequence);} - if (contextItem != null) - {context.getProfiler().message(this, Profiler.START_SEQUENCES, - "CONTEXT ITEM", contextItem.toSequence());} - } - - Sequence result; - if (contextItem != null) { - contextSequence = contextItem.toSequence(); - } - /* - * if(contextSequence == null) //Commented because this the high level - * result nodeset is *really* null result = NodeSet.EMPTY_SET; //Try to - * return cached results else - */ - // TODO: disabled cache for now as it may cause concurrency issues - // better use compile-time inspection and maybe a pragma to mark those - // sections in the query that can be safely cached - // if (cached != null && cached.isValid(contextSequence, contextItem)) { - // - // // WARNING : commented since predicates are *also* applied below ! - // // -pb - // /* - // * if (predicates.size() > 0) { applyPredicate(contextSequence, - // * cached.getResult()); } else { - // */ - // result = cached.getResult(); - // if (context.getProfiler().isEnabled()) { - // LOG.debug("Using cached results"); - // } - // context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - // "Using cached results", result); - // - // // } - if (needsComputation()) { - if (contextSequence == null) - {throw new XPathException(this, - ErrorCodes.XPDY0002, "Undefined context sequence for '" - + this.toString() + "'");} + default: + } + + // TODO : log somewhere ? + super.analyze(contextInfo); + } + + @Override + public Sequence eval(Sequence contextSequence, final Item contextItem) + throws XPathException { + if (context.getProfiler().isEnabled()) { + context.getProfiler().start(this); + context.getProfiler().message(this, Profiler.DEPENDENCIES, + "DEPENDENCIES", + Dependency.getDependenciesName(this.getDependencies())); + if (contextSequence != null) { + context.getProfiler().message(this, Profiler.START_SEQUENCES, + "CONTEXT SEQUENCE", contextSequence); + } + if (contextItem != null) { + context.getProfiler().message(this, Profiler.START_SEQUENCES, + "CONTEXT ITEM", contextItem.toSequence()); + } + } + + if (contextItem != null) { + contextSequence = contextItem.toSequence(); + } + /* + * if(contextSequence == null) //Commented because this the high level + * result nodeset is *really* null result = NodeSet.EMPTY_SET; //Try to + * return cached results else + */ + // TODO: disabled cache for now as it may cause concurrency issues + // better use compile-time inspection and maybe a pragma to mark those + // sections in the query that can be safely cached + // if (cached != null && cached.isValid(contextSequence, contextItem)) { + // + // // WARNING : commented since predicates are *also* applied below ! + // // -pb + // /* + // * if (predicates.size() > 0) { applyPredicate(contextSequence, + // * cached.getResult()); } else { + // */ + // result = cached.getResult(); + // if (context.getProfiler().isEnabled()) { + // LOG.debug("Using cached results"); + // } + // context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + // "Using cached results", result); + // + // // } + + Sequence result; + if (needsComputation()) { + if (contextSequence == null) { + throw new XPathException(this, + ErrorCodes.XPDY0002, "Undefined context sequence for '" + + this.toString() + "'"); + } + try { switch (axis) { + case Constants.DESCENDANT_AXIS: case Constants.DESCENDANT_SELF_AXIS: result = getDescendants(context, contextSequence); break; + case Constants.CHILD_AXIS: // VirtualNodeSets may have modified the axis ; checking the // type @@ -413,13 +388,16 @@ public Sequence eval(Sequence contextSequence, Item contextItem) result = getChildren(context, contextSequence); // } break; + case Constants.ANCESTOR_SELF_AXIS: case Constants.ANCESTOR_AXIS: result = getAncestors(context, contextSequence); break; + case Constants.PARENT_AXIS: result = getParents(context, contextSequence); break; + case Constants.SELF_AXIS: if (!(contextSequence instanceof VirtualNodeSet) && Type.subTypeOf(contextSequence.getItemType(), @@ -435,1210 +413,1105 @@ public Sequence eval(Sequence contextSequence, Item contextItem) result = getSelf(context, contextSequence); } break; + case Constants.ATTRIBUTE_AXIS: case Constants.DESCENDANT_ATTRIBUTE_AXIS: result = getAttributes(context, contextSequence); break; + case Constants.PRECEDING_AXIS: - result = getPreceding(context, contextSequence); - break; case Constants.FOLLOWING_AXIS: - result = getFollowing(context, contextSequence); + result = getPrecedingOrFollowing(context, contextSequence); break; + case Constants.PRECEDING_SIBLING_AXIS: case Constants.FOLLOWING_SIBLING_AXIS: result = getSiblings(context, contextSequence); break; + default: throw new IllegalArgumentException("Unsupported axis specified"); } - } catch (XPathException e) { + } catch (final XPathException e) { if (e.getLine() <= 0) { e.setLocation(getLine(), getColumn(), getSource()); } throw e; } - } else { - result = NodeSet.EMPTY_SET; - } - // Caches the result - if (axis != Constants.SELF_AXIS && contextSequence != null - && contextSequence.isCacheable()) { - // TODO : cache *after* removing duplicates ? -pb - cached = new CachedResult(contextSequence, contextItem, result); - registerUpdateListener(); - } - // Remove duplicate nodes - result.removeDuplicates(); - // Apply the predicate - result = applyPredicate(contextSequence, result); - - if (context.getProfiler().isEnabled()) - {context.getProfiler().end(this, "", result);} - // actualReturnType = result.getItemType(); - - return result; - } - - // Avoid unnecessary tests (these should be detected by the parser) - private boolean needsComputation() { - // TODO : log this ? - switch (axis) { - // Certainly not exhaustive - case Constants.ANCESTOR_SELF_AXIS: - case Constants.PARENT_AXIS: - // case Constants.SELF_AXIS: - if (nodeTestType == null) - {nodeTestType = Integer.valueOf(test.getType());} - if (nodeTestType.intValue() != Type.NODE - && nodeTestType.intValue() != Type.ELEMENT - && nodeTestType.intValue() != Type.PROCESSING_INSTRUCTION) { - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", "avoid useless computations");} - return false; - } - - } - return true; - } - - /** - * The method getSelf - * - * @param context - * a XQueryContext value - * @param contextSequence - * a NodeSet value - * @return a Sequence value - */ - protected Sequence getSelf(XQueryContext context, Sequence contextSequence) - throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.getSelf(test); - } - if (hasPreloadedData() && !test.isWildcardTest()) { - NodeSet ns = null; - if (contextSequence instanceof NodeSet) { - ns = (NodeSet)contextSequence; - } - NodeProxy np = null; - - for (final Iterator i = currentSet.iterator(); i.hasNext(); ) { - final NodeProxy p = i.next(); - p.addContextNode(contextId, p); - - if (ns != null) { - np = ns.get(p); - - if (np != null && np.getMatches() != null) - {p.addMatch( np.getMatches() );} - } - } - return currentSet; - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (test.getType() == Type.PROCESSING_INSTRUCTION) { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - - if (test.isWildcardTest()) { - if (nodeTestType == null) { - nodeTestType = Integer.valueOf(test.getType()); - } - if (Type.subTypeOf(nodeTestType.intValue(), Type.NODE)) { - if (Expression.NO_CONTEXT_ID != contextId) { - if (contextSet instanceof VirtualNodeSet) { - ((VirtualNodeSet) contextSet).setInPredicate(true); - ((VirtualNodeSet) contextSet).setContextId(contextId); - ((VirtualNodeSet) contextSet).setSelfIsContext(); - } else if (Type.subTypeOf(contextSet.getItemType(), - Type.NODE)) { - NodeProxy p; - for (final Iterator i = contextSet.iterator(); i.hasNext();) { - p = i.next(); - if (test.matches(p)) - {p.addContextNode(contextId, p);} - } - } - } - return contextSet; - } else { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), - axis, test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - final NodeSelector selector = new SelfSelector(contextSet, contextId); - return index.findElementsByTagName(ElementValue.ELEMENT, docs, test - .getName(), selector, this); - } - } - - /** - * The method getAttributes - * - * @param context - * a XQueryContext value - * @param contextSequence - * a NodeSet value - * @return a NodeSet value - */ - protected Sequence getAttributes(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - if (axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) - {return nodes.getDescendantAttributes(test);} - else - {return nodes.getAttributes(test);} - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (!hasPreloadedData() && test.isWildcardTest()) { - final NodeSet result = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - ((VirtualNodeSet) result) - .setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return result; - // if there's just a single known node in the context, it is faster - // do directly search for the attribute in the parent node. - } - if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - // TODO : why a null selector here ? We have one below ! - currentSet = index.findElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - switch (axis) { - case Constants.ATTRIBUTE_AXIS: - return currentSet.selectParentChild(contextSet, - NodeSet.DESCENDANT, contextId); - case Constants.DESCENDANT_ATTRIBUTE_AXIS: - return currentSet.selectAncestorDescendant(contextSet, - NodeSet.DESCENDANT, false, contextId, true); - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - if (!contextSet.getProcessInReverseOrder()) { - return index.findDescendantsByTagName(ElementValue.ATTRIBUTE, - test.getName(), axis, docs, contextSet, - contextId, this); - } else { - NodeSelector selector; - switch (axis) { - case Constants.ATTRIBUTE_AXIS: - selector = new ChildSelector(contextSet, contextId); - break; - case Constants.DESCENDANT_ATTRIBUTE_AXIS: - selector = new DescendantSelector(contextSet, contextId); - break; - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - return index.findElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), selector, this); - } - } - } - - /** - * The method getChildren - * - * @param context - * a XQueryContext value - * @param contextSequence - * the context sequence - * @return a NodeSet value - */ - protected Sequence getChildren(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.getChildren(test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if ((!hasPreloadedData() && test.isWildcardTest()) - || test.getType() == Type.PROCESSING_INSTRUCTION) { - // test is one out of *, text(), node() including - // processing-instruction(targetname) - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - - // IndexStatistics stats = (IndexStatistics) - // context.getBroker().getBrokerPool(). - // getIndexManager().getIndexById(IndexStatistics.ID); - // int parentDepth = stats.getMaxParentDepth(test.getName()); - // LOG.debug("parentDepth for " + test.getName() + ": " + parentDepth); - - if (useDirectChildSelect) { - final NewArrayNodeSet result = new NewArrayNodeSet(); - for (final NodeProxy p : contextSet) { - result.addAll(p.directSelectChild(test.getName(), contextId)); - } - return result; - } else if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - // TODO : understand why this one is different from the other - // ones - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - return currentSet.selectParentChild(contextSet, - NodeSet.DESCENDANT, contextId); - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - if (!contextSet.getProcessInReverseOrder() && !(contextSet instanceof VirtualNodeSet) && - contextSet.getLength() < INDEX_SCAN_THRESHOLD) { - return index.findDescendantsByTagName(ElementValue.ELEMENT, - test.getName(), axis, docs, contextSet, - contextId, parent); - } else { - // if (contextSet instanceof VirtualNodeSet) - // ((VirtualNodeSet)contextSet).realize(); - final NodeSelector selector = new ChildSelector(contextSet, contextId); - return index.findElementsByTagName(ElementValue.ELEMENT, docs, - test.getName(), selector, this); - } - } - } - - /** - * The method getDescendants - * - * @param context - * a XQueryContext value - * @param contextSequence - * the context sequence - * @return a NodeSet value - */ - protected Sequence getDescendants(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.getDescendants(axis == Constants.DESCENDANT_SELF_AXIS, - test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if ((!hasPreloadedData() && test.isWildcardTest()) - || test.getType() == Type.PROCESSING_INSTRUCTION) { - // test is one out of *, text(), node() including - // processing-instruction(targetname) - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } else if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - // TODO : understand why this one is different from the other - // ones - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - switch (axis) { - case Constants.DESCENDANT_SELF_AXIS: - final NodeSet tempSet = currentSet.selectAncestorDescendant( - contextSet, NodeSet.DESCENDANT, true, contextId, - true); - return tempSet; - case Constants.DESCENDANT_AXIS: - return currentSet.selectAncestorDescendant(contextSet, - NodeSet.DESCENDANT, false, contextId, true); - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - } - } else { - final DocumentSet docs = contextSet.getDocumentSet(); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) { - context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'"); - } - if (!contextSet.getProcessInReverseOrder() && (contextSet instanceof VirtualNodeSet || contextSet.getLength() < INDEX_SCAN_THRESHOLD)) { - return index.findDescendantsByTagName(ElementValue.ELEMENT, - test.getName(), axis, docs, contextSet, - contextId, this); - } else { - NodeSelector selector; - switch (axis) { - case Constants.DESCENDANT_SELF_AXIS: - selector = new DescendantOrSelfSelector(contextSet, - contextId); - break; - case Constants.DESCENDANT_AXIS: - selector = new DescendantSelector(contextSet, contextId); - break; - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - return index.findElementsByTagName(ElementValue.ELEMENT, docs, - test.getName(), selector, this); - } - - } - } - - /** - * The method getSiblings - * - * @param context - * a XQueryContext value - * @param contextSequence - * a NodeSet value - * @return a NodeSet value - */ - protected Sequence getSiblings(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - if (axis == Constants.PRECEDING_SIBLING_AXIS) - {return nodes.getPrecedingSiblings(test);} - else - {return nodes.getFollowingSiblings(test);} - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if (test.getType() == Type.PROCESSING_INSTRUCTION) { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - if (test.isWildcardTest()) { - final NewArrayNodeSet result = new NewArrayNodeSet(); - try { - for (final NodeProxy current : contextSet) { - //ignore document elements to avoid NPE at getXMLStreamReader - if (NodeId.ROOT_NODE.equals(current.getNodeId())) - {continue;} - - final NodeProxy parent = new NodeProxy(current.getOwnerDocument(), - current.getNodeId().getParentId()); - StreamFilter filter; - if (axis == Constants.PRECEDING_SIBLING_AXIS) - {filter = new PrecedingSiblingFilter(test, current, - result, contextId);} - else - {filter = new FollowingSiblingFilter(test, current, - result, contextId);} - final IEmbeddedXMLStreamReader reader = context.getBroker() - .getXMLStreamReader(parent, false); - reader.filter(filter); - } - } catch (final IOException e) { - throw new XPathException(this, e); - } catch (final XMLStreamException e) { - throw new XPathException(this, e); - } - return result; - } else { - // TODO : no test on preloaded data ? - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null || currentDocs == null - || !(docs.equalDocs(currentDocs))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - switch (axis) { - case Constants.PRECEDING_SIBLING_AXIS: - return currentSet.selectPrecedingSiblings(contextSet, - contextId); - case Constants.FOLLOWING_SIBLING_AXIS: - return currentSet.selectFollowingSiblings(contextSet, - contextId); - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - } - } - } - - /** - * The method getPreceding - * - * @param context - * a XQueryContext value - * @param contextSequence - * a Sequence value - * @return a NodeSet value - * @exception XPathException - * if an error occurs - */ - protected Sequence getPreceding(XQueryContext context, - Sequence contextSequence) throws XPathException { - int position = -1; - if (hasPositionalPredicate) { - final Predicate pred = (Predicate) predicates.get(0); - final Sequence seq = pred.preprocess(); - - final NumericValue v = (NumericValue) seq.itemAt(0); - // Non integers return... nothing, not even an error ! - if (!v.hasFractionalPart() && !v.isZero()) { - position = v.getInt(); - } - } - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - if (hasPositionalPredicate && position > -1) - {applyPredicate = false;} - return nodes.getPreceding(test, position); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if (test.getType() == Type.PROCESSING_INSTRUCTION) { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - if (test.isWildcardTest()) { - try { - final NodeSet result = new NewArrayNodeSet(); - for (final NodeProxy next : contextSet) { - final NodeList cl = next.getOwnerDocument().getChildNodes(); - for (int j = 0; j < cl.getLength(); j++) { - final NodeHandle node = (NodeHandle) cl.item(j); - final NodeProxy root = new NodeProxy(node); - final PrecedingFilter filter = new PrecedingFilter(test, - next, result, contextId); - final IEmbeddedXMLStreamReader reader = context.getBroker() - .getXMLStreamReader(root, false); - reader.filter(filter); - } - } - return result; - } catch (final XMLStreamException e) { - throw new XPathException(this, e); - } catch (final IOException e) { - throw new XPathException(this, e); - } - } else { - // TODO : no test on preloaded data ? - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null || currentDocs == null - || !(docs.equalDocs(currentDocs))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - if (hasPositionalPredicate) { - try { - applyPredicate = false; - return currentSet.selectPreceding(contextSet, position, - contextId); - } catch (final UnsupportedOperationException e) { - return currentSet - .selectPreceding(contextSet, contextId); - } - } else - {return currentSet.selectPreceding(contextSet, contextId);} - } - } - } - - /** - * The method getFollowing - * - * @param context - * a XQueryContext value - * @param contextSequence - * a Sequence value - * @return a NodeSet value - * @exception XPathException - * if an error occurs - */ - protected Sequence getFollowing(XQueryContext context, - Sequence contextSequence) throws XPathException { - int position = -1; - if (hasPositionalPredicate) { - final Predicate pred = (Predicate) predicates.get(0); - final Sequence seq = pred.preprocess(); - - final NumericValue v = (NumericValue) seq.itemAt(0); - // Non integers return... nothing, not even an error ! - if (!v.hasFractionalPart() && !v.isZero()) { - position = v.getInt(); - } - } - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - if (hasPositionalPredicate && position > -1) - {applyPredicate = false;} - return nodes.getFollowing(test, position); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if (test.getType() == Type.PROCESSING_INSTRUCTION) { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return vset; - } - if (test.isWildcardTest() - && test.getType() != Type.PROCESSING_INSTRUCTION) { - // handle wildcard steps like following::node() - try { - final NodeSet result = new NewArrayNodeSet(); - for (final NodeProxy next : contextSet) { - final NodeList cl = next.getOwnerDocument().getChildNodes(); - for (int j = 0; j < cl.getLength(); j++) { - final NodeHandle node = (NodeHandle) cl.item(j); - final NodeProxy root = new NodeProxy(node); - final FollowingFilter filter = new FollowingFilter(test, - next, result, contextId); - final IEmbeddedXMLStreamReader reader = context.getBroker() - .getXMLStreamReader(root, false); - reader.filter(filter); - } - } - return result; - } catch (final XMLStreamException e) { - throw new XPathException(this, e); - } catch (final IOException e) { - throw new XPathException(this, e); - } - } else { - // TODO : no test on preloaded data ? - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null || currentDocs == null - || !(docs.equalDocs(currentDocs))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - if (hasPositionalPredicate) { - try { - applyPredicate = false; - return currentSet.selectFollowing(contextSet, position, - contextId); - } catch (final UnsupportedOperationException e) { - return currentSet - .selectFollowing(contextSet, contextId); - } - } else - {return currentSet.selectFollowing(contextSet, contextId);} - } - } - } - - /** - * The method getAncestors - * - * @param context - * a XQueryContext value - * @param contextSequence - * a Sequence value - * @return a NodeSet value - */ - protected Sequence getAncestors(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.getAncestors(axis == Constants.ANCESTOR_SELF_AXIS, - test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (test.isWildcardTest()) { - final NodeSet result = new NewArrayNodeSet(); - result.setProcessInReverseOrder(true); - for (final NodeProxy current : contextSet) { - NodeProxy ancestor; - if (axis == Constants.ANCESTOR_SELF_AXIS - && test.matches(current)) { + } else { + result = NodeSet.EMPTY_SET; + } + // Caches the result + if (axis != Constants.SELF_AXIS && contextSequence != null + && contextSequence.isCacheable()) { + // TODO : cache *after* removing duplicates ? -pb + cached = new CachedResult(contextSequence, contextItem, result); + registerUpdateListener(); + } + // Remove duplicate nodes + result.removeDuplicates(); + // Apply the predicate + result = applyPredicate(contextSequence, result); + + if (context.getProfiler().isEnabled()) { + context.getProfiler().end(this, "", result); + } + // actualReturnType = result.getItemType(); + + return result; + } + + // Avoid unnecessary tests (these should be detected by the parser) + private boolean needsComputation() { + // TODO : log this ? + switch (axis) { + // Certainly not exhaustive + case Constants.ANCESTOR_SELF_AXIS: + case Constants.PARENT_AXIS: + // case Constants.SELF_AXIS: + if (nodeTestType == null) { + nodeTestType = test.getType(); + } + if (nodeTestType != Type.NODE + && nodeTestType != Type.ELEMENT + && nodeTestType != Type.PROCESSING_INSTRUCTION) { + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", "avoid useless computations"); + } + return false; + } + + } + return true; + } + + private Sequence getSelf(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.getSelf(test); + } + if (hasPreloadedData() && !test.isWildcardTest()) { + final NodeSet ns; + if (contextSequence instanceof NodeSet) { + ns = (NodeSet) contextSequence; + } else { + ns = null; + } + + for (final NodeProxy p : currentSet) { + p.addContextNode(contextId, p); + + if (ns != null) { + final NodeProxy np = ns.get(p); + + if (np != null && np.getMatches() != null) { + p.addMatch(np.getMatches()); + } + } + } + return currentSet; + } + + final NodeSet contextSet = contextSequence.toNodeSet(); +// if (test.getType() == Type.PROCESSING_INSTRUCTION) { +// final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); +// vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); +// return vset; +// } + + if (test.isWildcardTest()) { + if (nodeTestType == null) { + nodeTestType = test.getType(); + } + + if (Type.subTypeOf(nodeTestType, Type.NODE)) { + if (Expression.NO_CONTEXT_ID != contextId) { + if (contextSet instanceof VirtualNodeSet) { + ((VirtualNodeSet) contextSet).setInPredicate(true); + ((VirtualNodeSet) contextSet).setContextId(contextId); + ((VirtualNodeSet) contextSet).setSelfIsContext(); + } else if (Type.subTypeOf(contextSet.getItemType(), Type.NODE)) { + for (final NodeProxy p : contextSet) { + if (test.matches(p)) { + p.addContextNode(contextId, p); + } + } + } + return contextSet; + } else { + final NewArrayNodeSet results = new NewArrayNodeSet(); + for (final NodeProxy p : contextSet) { + if(test.matches(p)) { + results.add(p); + } + } + return results; + } + } else { + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return vset; + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + final NodeSelector selector = new SelfSelector(contextSet, contextId); + return index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), selector, this); + } + } + + protected Sequence getAttributes(final XQueryContext context, final Sequence contextSequence) + throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + if (axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) { + return nodes.getDescendantAttributes(test); + } else { + return nodes.getAttributes(test); + } + } + final NodeSet contextSet = contextSequence.toNodeSet(); + if (!hasPreloadedData() && test.isWildcardTest()) { + final NodeSet result = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + ((VirtualNodeSet) result).setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return result; + // if there's just a single known node in the context, it is faster + // do directly search for the attribute in the parent node. + } + if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null + || currentDocs == null + || (!optimized && !(docs == currentDocs || docs + .equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + // TODO : why a null selector here ? We have one below ! + currentSet = index.findElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + switch (axis) { + case Constants.ATTRIBUTE_AXIS: + return currentSet.selectParentChild(contextSet, NodeSet.DESCENDANT, contextId); + case Constants.DESCENDANT_ATTRIBUTE_AXIS: + return currentSet.selectAncestorDescendant(contextSet, NodeSet.DESCENDANT, false, contextId, + true); + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + if (!contextSet.getProcessInReverseOrder()) { + return index.findDescendantsByTagName(ElementValue.ATTRIBUTE, test.getName(), axis, docs, contextSet, + contextId, this); + } else { + final NodeSelector selector; + switch (axis) { + case Constants.ATTRIBUTE_AXIS: + selector = new ChildSelector(contextSet, contextId); + break; + case Constants.DESCENDANT_ATTRIBUTE_AXIS: + selector = new DescendantSelector(contextSet, contextId); + break; + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + return index.findElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), selector, this); + } + } + } + + private Sequence getChildren(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.getChildren(test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + // TODO : understand this. I guess comments should be treated in a + // similar way ? -pb + if ((!hasPreloadedData() && test.isWildcardTest()) || test.getType() == Type.PROCESSING_INSTRUCTION) { + // test is one out of *, text(), node() including + // processing-instruction(targetname) + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return vset; + } + + // IndexStatistics stats = (IndexStatistics) + // context.getBroker().getBrokerPool(). + // getIndexManager().getIndexById(IndexStatistics.ID); + // int parentDepth = stats.getMaxParentDepth(test.getName()); + // LOG.debug("parentDepth for " + test.getName() + ": " + parentDepth); + + if (useDirectChildSelect) { + final NewArrayNodeSet result = new NewArrayNodeSet(); + for (final NodeProxy p : contextSet) { + result.addAll(p.directSelectChild(test.getName(), contextId)); + } + return result; + } else if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + // TODO : understand why this one is different from the other + // ones + if (currentSet == null + || currentDocs == null + || (!optimized && !(docs == currentDocs || docs + .equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + return currentSet.selectParentChild(contextSet, NodeSet.DESCENDANT, contextId); + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + if (!contextSet.getProcessInReverseOrder() && !(contextSet instanceof VirtualNodeSet) && + contextSet.getLength() < INDEX_SCAN_THRESHOLD) { + return index.findDescendantsByTagName(ElementValue.ELEMENT, + test.getName(), axis, docs, contextSet, + contextId, parent); + } else { + // if (contextSet instanceof VirtualNodeSet) + // ((VirtualNodeSet)contextSet).realize(); + final NodeSelector selector = new ChildSelector(contextSet, contextId); + return index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), selector, this); + } + } + } + + private Sequence getDescendants(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.getDescendants(axis == Constants.DESCENDANT_SELF_AXIS, + test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + // TODO : understand this. I guess comments should be treated in a + // similar way ? -pb + if ((!hasPreloadedData() && test.isWildcardTest()) + || test.getType() == Type.PROCESSING_INSTRUCTION) { + // test is one out of *, text(), node() including + // processing-instruction(targetname) + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return vset; + } else if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + // TODO : understand why this one is different from the other + // ones + if (currentSet == null + || currentDocs == null + || (!optimized && !(docs == currentDocs || docs + .equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + + switch (axis) { + case Constants.DESCENDANT_SELF_AXIS: + return currentSet.selectAncestorDescendant(contextSet, NodeSet.DESCENDANT, true, contextId, + true); + case Constants.DESCENDANT_AXIS: + return currentSet.selectAncestorDescendant(contextSet, NodeSet.DESCENDANT, false, contextId, + true); + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } + } else { + final DocumentSet docs = contextSet.getDocumentSet(); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + if (!contextSet.getProcessInReverseOrder() + && (contextSet instanceof VirtualNodeSet || contextSet.getLength() < INDEX_SCAN_THRESHOLD)) { + return index.findDescendantsByTagName(ElementValue.ELEMENT, test.getName(), axis, docs, contextSet, + contextId, this); + } else { + final NodeSelector selector; + switch (axis) { + case Constants.DESCENDANT_SELF_AXIS: + selector = new DescendantOrSelfSelector(contextSet, contextId); + break; + case Constants.DESCENDANT_AXIS: + selector = new DescendantSelector(contextSet, contextId); + break; + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + return index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), selector, this); + } + + } + } + + /** + * Get's the sibling nodes of the context set + * + * @param context a XQueryContext value + * @param contextSequence a NodeSet value + * @return a NodeSet value + */ + protected Sequence getSiblings(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + if (axis == Constants.PRECEDING_SIBLING_AXIS) { + return nodes.getPrecedingSiblings(test); + } else { + return nodes.getFollowingSiblings(test); + } + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + // TODO : understand this. I guess comments should be treated in a + // similar way ? -pb + if (test.getType() == Type.PROCESSING_INSTRUCTION) { + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, + test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return vset; + } + + if (test.isWildcardTest()) { + final NewArrayNodeSet result = new NewArrayNodeSet(); + try { + for (final NodeProxy current : contextSet) { + // document-node() does not have any preceding or following elements + if (NodeId.DOCUMENT_NODE.equals(current.getNodeId())) { + continue; + } + + final IEmbeddedXMLStreamReader reader; + final StreamFilter filter; + if (axis == Constants.PRECEDING_SIBLING_AXIS) { + final NodeId startNodeId; + if (NodeId.DOCUMENT_NODE.equals(current.getNodeId().getParentId())) { + // parent would be document-node(), start from document-node()/node()[1] + startNodeId = NodeId.ROOT_NODE; + } else { + startNodeId = current.getNodeId().getParentId().getChild(1); + } + final NodeProxy startNode = new NodeProxy(current.getOwnerDocument(), startNodeId); + + reader = context.getBroker().getXMLStreamReader(startNode, false); + filter = new PrecedingSiblingFilter(test, startNode, current, result, contextId); + } else { + reader = context.getBroker().getXMLStreamReader(current, false); + filter = new FollowingSiblingFilter(test, current, result, contextId); + } + + reader.filter(filter); + } + } catch (final IOException | XMLStreamException e) { + throw new XPathException(this, e); + } + + return result; + } else { + // TODO : no test on preloaded data ? + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null || currentDocs == null || !(docs.equalDocs(currentDocs))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + switch (axis) { + case Constants.PRECEDING_SIBLING_AXIS: + return currentSet.selectPrecedingSiblings(contextSet, contextId); + case Constants.FOLLOWING_SIBLING_AXIS: + return currentSet.selectFollowingSiblings(contextSet, contextId); + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } + } + } + + /** + * Get the preceding or following axis nodes + * + * @param context the xquery context + * @param contextSequence the context sequence + * + * @return the nodes from the preceding or following axis + * + * @throws XPathException if an error occurs + */ + private Sequence getPrecedingOrFollowing(final XQueryContext context, final Sequence contextSequence) + throws XPathException { + int position = -1; + if (hasPositionalPredicate) { + final Predicate pred = predicates.get(0); + final Sequence seq = pred.preprocess(); + + final NumericValue v = (NumericValue) seq.itemAt(0); + // Non integers return... nothing, not even an error ! + if (!v.hasFractionalPart() && !v.isZero()) { + position = v.getInt(); + } + } + + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + if (hasPositionalPredicate && position > -1) { + applyPredicate = false; + } + + if (axis == Constants.PRECEDING_AXIS) { + return nodes.getPreceding(test, position); + } else { + return nodes.getFollowing(test, position); + } + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + // TODO : understand this. I guess comments should be treated in a + // similar way ? -pb + if (test.getType() == Type.PROCESSING_INSTRUCTION) { + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return vset; + } + + if (test.isWildcardTest()) { + try { + final NodeSet result = new NewArrayNodeSet(); + for (final NodeProxy next : contextSet) { + final NodeList cl = next.getOwnerDocument().getChildNodes(); + for (int j = 0; j < cl.getLength(); j++) { + final NodeHandle node = (NodeHandle) cl.item(j); + final NodeProxy root = new NodeProxy(node); + final StreamFilter filter; + if (axis == Constants.PRECEDING_AXIS) { + filter = new PrecedingFilter(test, root, next, result, contextId); + } else { + filter = new FollowingFilter(test, root, next, result, contextId); + } + final IEmbeddedXMLStreamReader reader = context.getBroker().getXMLStreamReader(root, false); + reader.filter(filter); + } + } + return result; + } catch (final XMLStreamException | IOException e) { + throw new XPathException(this, e); + } + } else { + // TODO : no test on preloaded data ? + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null || currentDocs == null || !(docs.equalDocs(currentDocs))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + + if (hasPositionalPredicate) { + try { + applyPredicate = false; + if (axis == Constants.PRECEDING_AXIS) { + return currentSet.selectPreceding(contextSet, position, contextId); + } else { + return currentSet.selectFollowing(contextSet, position, contextId); + } + } catch (final UnsupportedOperationException e) { + if (axis == Constants.PRECEDING_AXIS) { + return currentSet.selectPreceding(contextSet, contextId); + } else { + return currentSet.selectFollowing(contextSet, contextId); + } + } + } else { + if (axis == Constants.PRECEDING_AXIS) { + return currentSet.selectPreceding(contextSet, contextId); + } else { + return currentSet.selectFollowing(contextSet, contextId); + } + } + } + } + } + + /** + * Get the ancestor axis nodes + * + * @param context the xquery context + * @param contextSequence the context sequence + * + * @return the ancestor nodes + * + * @throws XPathException if an error occurs + */ + protected Sequence getAncestors(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.getAncestors(axis == Constants.ANCESTOR_SELF_AXIS, test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + if (test.isWildcardTest()) { + final NodeSet result = new NewArrayNodeSet(); + result.setProcessInReverseOrder(true); + for (final NodeProxy current : contextSet) { + NodeProxy ancestor; + if (axis == Constants.ANCESTOR_SELF_AXIS && test.matches(current)) { ancestor = new NodeProxy(current); ancestor.setNodeType(Node.ELEMENT_NODE); - final NodeProxy t = result.get(ancestor); - if (t == null) { - if (Expression.NO_CONTEXT_ID != contextId) - {ancestor.addContextNode(contextId, current);} - else - {ancestor.copyContext(current);} - ancestor.addMatches(current); - result.add(ancestor); - } else { - t.addContextNode(contextId, current); - t.addMatches(current); - } - } - NodeId parentID = current.getNodeId().getParentId(); - while (parentID != null) { - ancestor = new NodeProxy(current.getOwnerDocument(), parentID, - Node.ELEMENT_NODE); - // Filter out the temporary nodes wrapper element - if (parentID != NodeId.DOCUMENT_NODE - && !(parentID.getTreeLevel() == 1 && current - .getOwnerDocument().getCollection() - .isTempCollection())) { - if (test.matches(ancestor)) { - final NodeProxy t = result.get(ancestor); - if (t == null) { - if (Expression.NO_CONTEXT_ID != contextId) - {ancestor.addContextNode(contextId, current);} - else - {ancestor.copyContext(current);} - ancestor.addMatches(current); - result.add(ancestor); - } else { - t.addContextNode(contextId, current); - t.addMatches(current); - } - } - } - parentID = parentID.getParentId(); - } - } - return result; - } else if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - switch (axis) { - case Constants.ANCESTOR_SELF_AXIS: - return currentSet.selectAncestors(contextSet, true, - contextId); - case Constants.ANCESTOR_AXIS: - return currentSet.selectAncestors(contextSet, false, - contextId); - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} + final NodeProxy t = result.get(ancestor); + if (t == null) { + if (Expression.NO_CONTEXT_ID != contextId) { + ancestor.addContextNode(contextId, current); + } else { + ancestor.copyContext(current); + } + ancestor.addMatches(current); + result.add(ancestor); + } else { + t.addContextNode(contextId, current); + t.addMatches(current); + } + } + + NodeId parentID = current.getNodeId().getParentId(); + while (parentID != null) { + ancestor = new NodeProxy(current.getOwnerDocument(), parentID, Node.ELEMENT_NODE); + // Filter out the temporary nodes wrapper element + if (parentID != NodeId.DOCUMENT_NODE + && !(parentID.getTreeLevel() == 1 && current.getOwnerDocument().getCollection().isTempCollection())) { + if (test.matches(ancestor)) { + final NodeProxy t = result.get(ancestor); + if (t == null) { + if (Expression.NO_CONTEXT_ID != contextId) { + ancestor.addContextNode(contextId, current); + } else { + ancestor.copyContext(current); + } + ancestor.addMatches(current); + result.add(ancestor); + } else { + t.addContextNode(contextId, current); + t.addMatches(current); + } + } + } + parentID = parentID.getParentId(); + } + } + return result; + } else if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null || currentDocs == null || (!optimized && !(docs == currentDocs || docs.equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + switch (axis) { + case Constants.ANCESTOR_SELF_AXIS: + return currentSet.selectAncestors(contextSet, true, contextId); + case Constants.ANCESTOR_AXIS: + return currentSet.selectAncestors(contextSet, false, contextId); + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } return index.findAncestorsByTagName(ElementValue.ELEMENT, test.getName(), axis, docs, contextSet, contextId); - } - } - - /** - * The method getParents - * - * @param context - * a XQueryContext value - * @param contextSequence - * a Sequence value - * @return a NodeSet value - */ - protected Sequence getParents(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.getParents(test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (test.isWildcardTest()) { - final NodeSet temp = contextSet.getParents(contextId); - final NodeSet result = new NewArrayNodeSet(); - NodeProxy p; - for (final Iterator i = temp.iterator(); i.hasNext();) { - p = i.next(); - - if (test.matches(p)) { - result.add(p); - } - } - return result; - } else if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - currentSet = index.findElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - return contextSet.selectParentChild(currentSet, - NodeSet.ANCESTOR); - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} + } + } + + /** + * Get the parent axis nodes + * + * @param context the xquery context + * @param contextSequence the context sequence + * + * @return the parent nodes + * + * @throws XPathException if an error occurs + */ + protected Sequence getParents(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.getParents(test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + if (test.isWildcardTest()) { + final NodeSet temp = contextSet.getParents(contextId); + final NodeSet result = new NewArrayNodeSet(); + for (final NodeProxy p : temp) { + if (test.matches(p)) { + result.add(p); + } + } + return result; + } else if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null || currentDocs == null || (!optimized && !(docs == currentDocs || docs.equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + currentSet = index.findElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + return contextSet.selectParentChild(currentSet, NodeSet.ANCESTOR); + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } return index.findAncestorsByTagName(ElementValue.ELEMENT, test.getName(), Constants.PARENT_AXIS, docs, contextSet, contextId); - } - } - - /** - * The method getDocumentSet - * - * @param contextSet - * a NodeSet value - * @return a DocumentSet value - */ - protected DocumentSet getDocumentSet(NodeSet contextSet) { - DocumentSet ds = getContextDocSet(); - if (ds == null) - {ds = contextSet.getDocumentSet();} - return ds; - } - - /** - * The method getParent - * - * @return an Expression value - */ - public Expression getParentExpression() { - return this.parent; - } - - /** - * The method registerUpdateListener - * - */ - protected void registerUpdateListener() { - if (listener == null) { - listener = new UpdateListener() { - @Override - public void documentUpdated(DocumentImpl document, int event) { - cached = null; - if (document == null || event == UpdateListener.ADD - || event == UpdateListener.REMOVE) { - // clear all - currentDocs = null; - currentSet = null; - } else { - if (currentDocs != null - && currentDocs - .contains(document.getDocId())) { - currentDocs = null; - currentSet = null; - } - } - } - - @Override - public void nodeMoved(NodeId oldNodeId, NodeHandle newNode) { - } - - @Override - public void unsubscribe() { - LocationStep.this.listener = null; - } - - @Override - public void debug() { - LOG.debug("UpdateListener: Line: " - + LocationStep.this.toString() + "; id: " - + LocationStep.this.getExpressionId()); - } - }; - context.registerUpdateListener(listener); - } - } - - /** - * The method accept - * - * @param visitor - * an ExpressionVisitor value - */ - public void accept(ExpressionVisitor visitor) { - visitor.visitLocationStep(this); - } - - public void setParent(Expression parent) { - this.parent = parent; - } - - /* - * (non-Javadoc) - * - * @see org.exist.xquery.Step#resetState() - */ - public void resetState(boolean postOptimization) { - super.resetState(postOptimization); - if (!postOptimization) { - // TODO : preloadedData = false ? - // No : introduces a regression in testMatchCount - // TODO : Investigate... - currentSet = null; - currentDocs = null; - optimized = false; - cached = null; - listener = null; - } - } - - private static class FollowingSiblingFilter implements StreamFilter { - - private NodeTest test; - private NodeProxy referenceNode; - private NodeSet result; - private int contextId; - private boolean isAfter = false; - - private FollowingSiblingFilter(NodeTest test, NodeProxy referenceNode, - NodeSet result, int contextId) { - this.test = test; - this.referenceNode = referenceNode; - this.result = result; - this.contextId = contextId; - } - - public boolean accept(XMLStreamReader reader) { - if (reader.getEventType() == XMLStreamReader.END_ELEMENT) { - return true; - } - final NodeId refId = referenceNode.getNodeId(); - final NodeId currentId = (NodeId) reader - .getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); - if (!isAfter) { - isAfter = currentId.equals(refId); - } else if (currentId.getTreeLevel() == refId.getTreeLevel() - && test.matches(reader)) { - NodeProxy sibling = result.get(referenceNode.getOwnerDocument(), - currentId); - if (sibling == null) { - sibling = new NodeProxy(referenceNode.getOwnerDocument(), - currentId, StaXUtil.streamType2DOM(reader - .getEventType()), - ((EmbeddedXMLStreamReader) reader) - .getCurrentPosition()); - - if (Expression.IGNORE_CONTEXT != contextId) { - if (Expression.NO_CONTEXT_ID == contextId) { - sibling.copyContext(referenceNode); - } else { - sibling.addContextNode(contextId, referenceNode); - } - } - result.add(sibling); - } else if (Expression.NO_CONTEXT_ID != contextId) - {sibling.addContextNode(contextId, referenceNode);} - } - return true; - } - } - - private static class PrecedingSiblingFilter implements StreamFilter { - - private NodeTest test; - private NodeProxy referenceNode; - private NodeSet result; - private int contextId; - - private PrecedingSiblingFilter(NodeTest test, NodeProxy referenceNode, - NodeSet result, int contextId) { - this.test = test; - this.referenceNode = referenceNode; - this.result = result; - this.contextId = contextId; - } - - public boolean accept(XMLStreamReader reader) { - if (reader.getEventType() == XMLStreamReader.END_ELEMENT) { - return true; - } - final NodeId refId = referenceNode.getNodeId(); - final NodeId currentId = (NodeId) reader - .getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID); - if (currentId.equals(refId)) { - return false; - } else if (currentId.getTreeLevel() == refId.getTreeLevel() - && test.matches(reader)) { - NodeProxy sibling = result.get(referenceNode.getOwnerDocument(), - currentId); - if (sibling == null) { - sibling = new NodeProxy(referenceNode.getOwnerDocument(), - currentId, StaXUtil.streamType2DOM(reader - .getEventType()), - ((EmbeddedXMLStreamReader) reader) - .getCurrentPosition()); - if (Expression.IGNORE_CONTEXT != contextId) { - if (Expression.NO_CONTEXT_ID == contextId) { - sibling.copyContext(referenceNode); - } else { - sibling.addContextNode(contextId, referenceNode); - } - } - result.add(sibling); - } else if (Expression.NO_CONTEXT_ID != contextId) - {sibling.addContextNode(contextId, referenceNode);} - - } - return true; - } - } - - private static class FollowingFilter implements StreamFilter { - - private NodeTest test; - private NodeProxy referenceNode; - private NodeSet result; - private int contextId; - private boolean isAfter = false; - - private FollowingFilter(NodeTest test, NodeProxy referenceNode, - NodeSet result, int contextId) { - this.test = test; - this.referenceNode = referenceNode; - this.result = result; - this.contextId = contextId; - } - - public boolean accept(XMLStreamReader reader) { - if (reader.getEventType() == XMLStreamReader.END_ELEMENT) - {return true;} - final NodeId refId = referenceNode.getNodeId(); - final NodeId currentId = (NodeId) reader - .getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID); - if (!isAfter) - {isAfter = currentId.compareTo(refId) > 0 - && !currentId.isDescendantOf(refId);} - if (isAfter && !refId.isDescendantOf(currentId) - && test.matches(reader)) { - final NodeProxy proxy = new NodeProxy(referenceNode.getOwnerDocument(), - currentId, StaXUtil.streamType2DOM(reader - .getEventType()), - ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); - if (Expression.IGNORE_CONTEXT != contextId) { - if (Expression.NO_CONTEXT_ID == contextId) { - proxy.copyContext(referenceNode); - } else { - proxy.addContextNode(contextId, referenceNode); - } - } - result.add(proxy); - } - return true; - } - } - - private static class PrecedingFilter implements StreamFilter { - - private NodeTest test; - private NodeProxy referenceNode; - private NodeSet result; - private int contextId; - - private PrecedingFilter(NodeTest test, NodeProxy referenceNode, - NodeSet result, int contextId) { - this.test = test; - this.referenceNode = referenceNode; - this.result = result; - this.contextId = contextId; - } - - public boolean accept(XMLStreamReader reader) { - if (reader.getEventType() == XMLStreamReader.END_ELEMENT) - {return true;} - final NodeId refId = referenceNode.getNodeId(); - final NodeId currentId = (NodeId) reader - .getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID); - if (currentId.compareTo(refId) >= 0) - {return false;} - if (!refId.isDescendantOf(currentId) && test.matches(reader)) { - final NodeProxy proxy = new NodeProxy(referenceNode.getOwnerDocument(), - currentId, StaXUtil.streamType2DOM(reader - .getEventType()), - ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); - if (Expression.IGNORE_CONTEXT != contextId) { - if (Expression.NO_CONTEXT_ID == contextId) { - proxy.copyContext(referenceNode); - } else { - proxy.addContextNode(contextId, referenceNode); - } - } - result.add(proxy); - } - return true; - } - } - - public Boolean match(Sequence contextSequence, Item contextItem) - throws XPathException { - if (context.getProfiler().isEnabled()) { - context.getProfiler().start(this); - context.getProfiler().message(this, Profiler.DEPENDENCIES, - "DEPENDENCIES", - Dependency.getDependenciesName(this.getDependencies())); - if (contextSequence != null) - {context.getProfiler().message(this, Profiler.START_SEQUENCES, - "CONTEXT SEQUENCE", contextSequence);} - if (contextItem != null) - {context.getProfiler().message(this, Profiler.START_SEQUENCES, - "CONTEXT ITEM", contextItem.toSequence());} - } - - Boolean result; - if (needsComputation()) { - if (contextSequence == null) - {throw new XPathException(this, - ErrorCodes.XPDY0002, "Undefined context sequence for '" - + this.toString() + "'");} - switch (axis) { - case Constants.DESCENDANT_AXIS: - case Constants.DESCENDANT_SELF_AXIS: - result = null;//getDescendants(context, contextSequence); - break; - case Constants.CHILD_AXIS: - // VirtualNodeSets may have modified the axis ; checking the - // type - // TODO : further checks ? - if (this.test.getType() == Type.ATTRIBUTE) { - this.axis = Constants.ATTRIBUTE_AXIS; - result = matchAttributes(context, contextSequence); - } else { - result = matchChildren(context, contextItem.toSequence());//matchChildren(context, contextSequence); - } - break; - case Constants.ANCESTOR_SELF_AXIS: - case Constants.ANCESTOR_AXIS: - result = null;//getAncestors(context, contextSequence); - break; - case Constants.PARENT_AXIS: - result = null;//getParents(context, contextSequence); - break; - case Constants.SELF_AXIS: + } + } + + /** + * Get the document set + * + * @param contextSet the context set + * @return the document set + */ + protected DocumentSet getDocumentSet(final NodeSet contextSet) { + DocumentSet ds = getContextDocSet(); + if (ds == null) { + ds = contextSet.getDocumentSet(); + } + return ds; + } + + /** + * Get the parent expression + * + * @return the parent expression + */ + public Expression getParentExpression() { + return this.parent; + } + + /** + * Register the update listener + */ + protected void registerUpdateListener() { + if (listener == null) { + listener = new UpdateListener() { + @Override + public void documentUpdated(final DocumentImpl document, final int event) { + cached = null; + if (document == null || event == UpdateListener.ADD || event == UpdateListener.REMOVE) { + // clear all + currentDocs = null; + currentSet = null; + } else { + if (currentDocs != null && currentDocs.contains(document.getDocId())) { + currentDocs = null; + currentSet = null; + } + } + } + + @Override + public void nodeMoved(final NodeId oldNodeId, final NodeHandle newNode) { + //no-op + } + + @Override + public void unsubscribe() { + LocationStep.this.listener = null; + } + + @Override + public void debug() { + if (LOG.isDebugEnabled()) { + LOG.debug("UpdateListener: Line: " + + LocationStep.this.toString() + "; id: " + + LocationStep.this.getExpressionId()); + } + } + }; + context.registerUpdateListener(listener); + } + } + + @Override + public void accept(final ExpressionVisitor visitor) { + visitor.visitLocationStep(this); + } + + /** + * Set the parent expression + * + * @param parent the parent expression + */ + public void setParent(final Expression parent) { + this.parent = parent; + } + + @Override + public void resetState(final boolean postOptimization) { + super.resetState(postOptimization); + if (!postOptimization) { + // TODO : preloadedData = false ? + // No : introduces a regression in testMatchCount + // TODO : Investigate... + currentSet = null; + currentDocs = null; + optimized = false; + cached = null; + listener = null; + } + } + + private static class FollowingSiblingFilter implements StreamFilter { + private final NodeTest test; + private final NodeProxy start; + private final int level; + private final NodeSet result; + private final int contextId; + private boolean sibling = false; + + private FollowingSiblingFilter(final NodeTest test, final NodeProxy start, final NodeSet result, + final int contextId) { + this.test = test; + this.start = start; + this.level = start.getNodeId().getTreeLevel(); + this.result = result; + this.contextId = contextId; + } + + @Override + public boolean accept(final XMLStreamReader reader) { + final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + final int currentLevel = currentId.getTreeLevel(); + + if (!sibling) { + // skip over the start node to the first sibling + sibling = currentId.equals(start.getNodeId()); + + } else if (currentLevel == level && !reader.isEndElement() && test.matches(reader)) { + // sibling which matches the test + NodeProxy sibling = result.get(start.getOwnerDocument(), currentId); + if (sibling == null) { + sibling = new NodeProxy(start.getOwnerDocument(), currentId, + StaXUtil.streamType2DOM(reader.getEventType()), ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); + + if (Expression.IGNORE_CONTEXT != contextId) { + if (Expression.NO_CONTEXT_ID == contextId) { + sibling.copyContext(start); + } else { + sibling.addContextNode(contextId, start); + } + } + result.add(sibling); + } else if (Expression.NO_CONTEXT_ID != contextId) { + sibling.addContextNode(contextId, start); + } + + } else if (currentLevel < level) { + // exited the parent node, so stop filtering + return false; + } + + return true; + } + } + + private static class PrecedingSiblingFilter implements StreamFilter { + private final NodeTest test; + private final int level; + private final NodeProxy referenceNode; + private final NodeSet result; + private final int contextId; + + private PrecedingSiblingFilter(final NodeTest test, final NodeProxy start, final NodeProxy referenceNode, + final NodeSet result, final int contextId) { + this.test = test; + this.level = start.getNodeId().getTreeLevel(); + this.referenceNode = referenceNode; + this.result = result; + this.contextId = contextId; + } + + @Override + public boolean accept(final XMLStreamReader reader) { + final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + + final NodeId refId = referenceNode.getNodeId(); + if (currentId.equals(refId)) { + // reached the reference node + return false; + } + + if (currentId.getTreeLevel() == level && !reader.isEndElement() && test.matches(reader)) { + // sibling which matches the test + NodeProxy sibling = result.get(referenceNode.getOwnerDocument(), currentId); + if (sibling == null) { + sibling = new NodeProxy(referenceNode.getOwnerDocument(), currentId, + StaXUtil.streamType2DOM(reader.getEventType()), ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); + if (Expression.IGNORE_CONTEXT != contextId) { + if (Expression.NO_CONTEXT_ID == contextId) { + sibling.copyContext(referenceNode); + } else { + sibling.addContextNode(contextId, referenceNode); + } + } + result.add(sibling); + } else if (Expression.NO_CONTEXT_ID != contextId) { + sibling.addContextNode(contextId, referenceNode); + } + } + + return true; + } + } + + private static class FollowingFilter implements StreamFilter { + private final NodeTest test; + private final NodeProxy root; + private final NodeProxy referenceNode; + private final NodeSet result; + private final int contextId; + private boolean isAfter = false; + + private FollowingFilter(final NodeTest test, final NodeProxy root, final NodeProxy referenceNode, final NodeSet result, + final int contextId) { + this.test = test; + this.root = root; + this.referenceNode = referenceNode; + this.result = result; + this.contextId = contextId; + } + + @Override + public boolean accept(final XMLStreamReader reader) { + final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + if (reader.getEventType() == XMLStreamReader.END_ELEMENT) { + if (currentId.getTreeLevel() == root.getNodeId().getTreeLevel()) { + // exited the root element, so stop filtering + return false; + } + + return true; + } + + final NodeId refId = referenceNode.getNodeId(); + + if (!isAfter) { + isAfter = currentId.compareTo(refId) > 0 && !currentId.isDescendantOf(refId); + } + + if (isAfter && !refId.isDescendantOf(currentId) && test.matches(reader)) { + final NodeProxy proxy = new NodeProxy(referenceNode.getOwnerDocument(), currentId, + StaXUtil.streamType2DOM(reader.getEventType()), ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); + if (Expression.IGNORE_CONTEXT != contextId) { + if (Expression.NO_CONTEXT_ID == contextId) { + proxy.copyContext(referenceNode); + } else { + proxy.addContextNode(contextId, referenceNode); + } + } + result.add(proxy); + } + return true; + } + } + + private static class PrecedingFilter implements StreamFilter { + private final NodeTest test; + private final NodeProxy root; + private final NodeProxy referenceNode; + private final NodeSet result; + private final int contextId; + + private PrecedingFilter(final NodeTest test, final NodeProxy root, final NodeProxy referenceNode, final NodeSet result, + final int contextId) { + this.test = test; + this.root = root; + this.referenceNode = referenceNode; + this.result = result; + this.contextId = contextId; + } + + @Override + public boolean accept(final XMLStreamReader reader) { + final NodeId currentId = (NodeId) reader.getProperty(ExtendedXMLStreamReader.PROPERTY_NODE_ID); + + if (reader.getEventType() == XMLStreamReader.END_ELEMENT) { + if (currentId.getTreeLevel() == root.getNodeId().getTreeLevel()) { + // exited the root element, so stop filtering + return false; + } + + return true; + } + + final NodeId refId = referenceNode.getNodeId(); + if (currentId.compareTo(refId) >= 0) { + return false; + } + + if (!refId.isDescendantOf(currentId) && test.matches(reader)) { + final NodeProxy proxy = new NodeProxy(referenceNode.getOwnerDocument(), currentId, + StaXUtil.streamType2DOM(reader.getEventType()), ((EmbeddedXMLStreamReader) reader).getCurrentPosition()); + if (Expression.IGNORE_CONTEXT != contextId) { + if (Expression.NO_CONTEXT_ID == contextId) { + proxy.copyContext(referenceNode); + } else { + proxy.addContextNode(contextId, referenceNode); + } + } + result.add(proxy); + } + return true; + } + } + + @Override + public Boolean match(final Sequence contextSequence, final Item contextItem) throws XPathException { + if (context.getProfiler().isEnabled()) { + context.getProfiler().start(this); + context.getProfiler().message(this, Profiler.DEPENDENCIES, + "DEPENDENCIES", + Dependency.getDependenciesName(this.getDependencies())); + if (contextSequence != null) { + context.getProfiler().message(this, Profiler.START_SEQUENCES, + "CONTEXT SEQUENCE", contextSequence); + } + if (contextItem != null) { + context.getProfiler().message(this, Profiler.START_SEQUENCES, + "CONTEXT ITEM", contextItem.toSequence()); + } + } + + Boolean result; + if (needsComputation()) { + if (contextSequence == null) { + throw new XPathException(this, ErrorCodes.XPDY0002, "Undefined context sequence for '" + + this.toString() + "'"); + } + switch (axis) { + case Constants.DESCENDANT_AXIS: + case Constants.DESCENDANT_SELF_AXIS: + result = null;//getDescendants(context, contextSequence); + break; + + case Constants.CHILD_AXIS: + // VirtualNodeSets may have modified the axis ; checking the + // type + // TODO : further checks ? + if (this.test.getType() == Type.ATTRIBUTE) { + this.axis = Constants.ATTRIBUTE_AXIS; + result = matchAttributes(context, contextSequence); + } else { + result = matchChildren(context, contextItem.toSequence());//matchChildren(context, contextSequence); + } + break; + + case Constants.ANCESTOR_SELF_AXIS: + case Constants.ANCESTOR_AXIS: + result = null;//getAncestors(context, contextSequence); + break; + + case Constants.PARENT_AXIS: + result = null;//getParents(context, contextSequence); + break; + + case Constants.SELF_AXIS: // if (!(contextSequence instanceof VirtualNodeSet) // && Type.subTypeOf(contextSequence.getItemType(), // Type.ATOMIC)) { @@ -1649,284 +1522,280 @@ public Boolean match(Sequence contextSequence, Item contextItem) // + " cannot be applied to an atomic value."); // result = null;//contextSequence; // } else { - result = matchSelf(context, contextItem.toSequence()); + result = matchSelf(context, contextItem.toSequence()); // } - break; - case Constants.ATTRIBUTE_AXIS: - case Constants.DESCENDANT_ATTRIBUTE_AXIS: - result = null;//getAttributes(context, contextSequence); - break; - case Constants.PRECEDING_AXIS: - result = null;//getPreceding(context, contextSequence); - break; - case Constants.FOLLOWING_AXIS: - result = null;//getFollowing(context, contextSequence); - break; - case Constants.PRECEDING_SIBLING_AXIS: - case Constants.FOLLOWING_SIBLING_AXIS: - result = null;//getSiblings(context, contextSequence); - break; - default: - throw new IllegalArgumentException("Unsupported axis specified"); - } - } else { - result = null;//NodeSet.EMPTY_SET; - } - - result = matchPredicate(contextSequence, (Node)contextItem, result); - - if (context.getProfiler().isEnabled()) - {context.getProfiler().end(this, "", null);} - // actualReturnType = result.getItemType(); - - return result; - } - - private Boolean matchPredicate(Sequence contextSequence, Node contextItem, - Boolean result) throws XPathException { - - if (result == null) {return false;} - - if (!result) - {return result;} - - if (contextSequence == null) - {return false;} - - if (predicates.size() == 0) - {return result;} - - Predicate pred; - - for (final Iterator i = predicates.iterator(); i.hasNext();) { + break; + + case Constants.ATTRIBUTE_AXIS: + case Constants.DESCENDANT_ATTRIBUTE_AXIS: + result = null;//getAttributes(context, contextSequence); + break; + + case Constants.PRECEDING_AXIS: + result = null;//getPreceding(context, contextSequence); + break; + case Constants.FOLLOWING_AXIS: + result = null;//getFollowing(context, contextSequence); + break; + + case Constants.PRECEDING_SIBLING_AXIS: + case Constants.FOLLOWING_SIBLING_AXIS: + result = null;//getSiblings(context, contextSequence); + break; + + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } else { + result = null;//NodeSet.EMPTY_SET; + } + + result = matchPredicate(contextSequence, (Node) contextItem, result); + + if (context.getProfiler().isEnabled()) { + context.getProfiler().end(this, "", null); + } + // actualReturnType = result.getItemType(); + + return result; + } + + private Boolean matchPredicate(final Sequence contextSequence, final Node contextItem, Boolean result) + throws XPathException { + + if (result == null) { + return false; + } + + if (!result) { + return result; + } + + if (contextSequence == null) { + return false; + } + + if (predicates.size() == 0) { + return result; + } + + for (final Predicate pred : predicates) { // && (result instanceof VirtualNodeSet || !result.isEmpty());) { - // TODO : log and/or profile ? - pred = i.next(); - pred.setContextDocSet(getContextDocSet()); + // TODO : log and/or profile ? + pred.setContextDocSet(getContextDocSet()); + + //result = pred.evalPredicate(outerSequence, result, axis); + result = pred.matchPredicate(contextSequence, (Item) contextItem, axis); - //result = pred.evalPredicate(outerSequence, result, axis); - result = pred.matchPredicate(contextSequence, (Item)contextItem, axis); + if (!result) { + return false; + } - if (!result) - {return false;} - - // subsequent predicates operate on the result of the previous one + // subsequent predicates operate on the result of the previous one // outerSequence = null; - } - return result; - } - - private Boolean matchSelf(XQueryContext context, Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.matchSelf(test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (test.getType() == Type.PROCESSING_INSTRUCTION) { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return !vset.isEmpty(); - } - - if (test.isWildcardTest()) { - if (nodeTestType == null) { - nodeTestType = Integer.valueOf(test.getType()); - } - if (Type.subTypeOf(nodeTestType.intValue(), Type.NODE)) { - if (Expression.NO_CONTEXT_ID != contextId) { - if (contextSet instanceof VirtualNodeSet) { - ((VirtualNodeSet) contextSet).setInPredicate(true); - ((VirtualNodeSet) contextSet).setContextId(contextId); - ((VirtualNodeSet) contextSet).setSelfIsContext(); - } else if (Type.subTypeOf(contextSet.getItemType(), - Type.NODE)) { - - for (final NodeProxy p : contextSet) { - if (test.matches(p)) - {return true;} - } - } - } - return false; - } else { - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), - axis, test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return !vset.isEmpty(); - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - final NodeSelector selector = new SelfSelector(contextSet, contextId); - return index.matchElementsByTagName(ElementValue.ELEMENT, docs, test - .getName(), selector); - } - } - - protected Boolean matchChildren(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - return nodes.matchChildren(test); - } - final NodeSet contextSet = contextSequence.toNodeSet(); - // TODO : understand this. I guess comments should be treated in a - // similar way ? -pb - if (test.isWildcardTest() - || test.getType() == Type.PROCESSING_INSTRUCTION) { - // test is one out of *, text(), node() including - // processing-instruction(targetname) - final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return !vset.isEmpty(); - } - - // IndexStatistics stats = (IndexStatistics) - // context.getBroker().getBrokerPool(). - // getIndexManager().getIndexById(IndexStatistics.ID); - // int parentDepth = stats.getMaxParentDepth(test.getName()); - // LOG.debug("parentDepth for " + test.getName() + ": " + parentDepth); - - if (useDirectChildSelect) { - //NewArrayNodeSet result = new NewArrayNodeSet(); - for (final NodeProxy p : contextSet) { - if (p.directMatchChild(test.getName(), contextId)) - {return true;} - } - return false; - } else if (hasPreloadedData()) { - final DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - // TODO : understand why this one is different from the other - // ones + } + return result; + } + + private Boolean matchSelf(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.matchSelf(test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + if (test.getType() == Type.PROCESSING_INSTRUCTION) { + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return !vset.isEmpty(); + } + + if (test.isWildcardTest()) { + if (nodeTestType == null) { + nodeTestType = test.getType(); + } + + if (Type.subTypeOf(nodeTestType, Type.NODE)) { + if (Expression.NO_CONTEXT_ID != contextId) { + if (contextSet instanceof VirtualNodeSet) { + ((VirtualNodeSet) contextSet).setInPredicate(true); + ((VirtualNodeSet) contextSet).setContextId(contextId); + ((VirtualNodeSet) contextSet).setSelfIsContext(); + } else if (Type.subTypeOf(contextSet.getItemType(), Type.NODE)) { + for (final NodeProxy p : contextSet) { + if (test.matches(p)) { + return true; + } + } + } + } + return false; + } else { + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return !vset.isEmpty(); + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + final NodeSelector selector = new SelfSelector(contextSet, contextId); + return index.matchElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), selector); + } + } + + private Boolean matchChildren(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + return nodes.matchChildren(test); + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + // TODO : understand this. I guess comments should be treated in a + // similar way ? -pb + if (test.isWildcardTest() || test.getType() == Type.PROCESSING_INSTRUCTION) { + // test is one out of *, text(), node() including + // processing-instruction(targetname) + final VirtualNodeSet vset = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + vset.setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return !vset.isEmpty(); + } + + // IndexStatistics stats = (IndexStatistics) + // context.getBroker().getBrokerPool(). + // getIndexManager().getIndexById(IndexStatistics.ID); + // int parentDepth = stats.getMaxParentDepth(test.getName()); + // LOG.debug("parentDepth for " + test.getName() + ": " + parentDepth); + + if (useDirectChildSelect) { + //NewArrayNodeSet result = new NewArrayNodeSet(); + for (final NodeProxy p : contextSet) { + if (p.directMatchChild(test.getName(), contextId)) { + return true; + } + } + return false; + } else if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + // TODO : understand why this one is different from the other + // ones // if (currentSet == null // || currentDocs == null // || (!optimized && !(docs == currentDocs || docs // .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - return index.matchElementsByTagName( - ElementValue.ELEMENT, docs, test.getName(), null); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + return index.matchElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), null); // currentDocs = docs; //UNDERSTAND: TODO: registerUpdateListener(); - } + } // return currentSet.selectParentChild(contextSet, // NodeSet.DESCENDANT, contextId); // } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - if (contextSet instanceof ExtNodeSet - && !contextSet.getProcessInReverseOrder()) { - return index.matchDescendantsByTagName(ElementValue.ELEMENT, - test.getName(), axis, docs, (ExtNodeSet) contextSet, - contextId); - } else { - // if (contextSet instanceof VirtualNodeSet) - // ((VirtualNodeSet)contextSet).realize(); - final NodeSelector selector = new ChildSelector(contextSet, contextId); - return index.matchElementsByTagName(ElementValue.ELEMENT, docs, - test.getName(), selector); - } - } - } - - protected boolean matchAttributes(XQueryContext context, - Sequence contextSequence) throws XPathException { - if (!contextSequence.isPersistentSet()) { - final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); - if (axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) - {return nodes.matchDescendantAttributes(test);} - else - {return nodes.matchAttributes(test);} - } - final NodeSet contextSet = contextSequence.toNodeSet(); - if (test.isWildcardTest()) { - final NodeSet result = new VirtualNodeSet(context.getBroker(), axis, - test, contextId, contextSet); - ((VirtualNodeSet) result) - .setInPredicate(Expression.NO_CONTEXT_ID != contextId); - return !result.isEmpty(); - // if there's just a single known node in the context, it is faster - // do directly search for the attribute in the parent node. - } - if (hasPreloadedData()) { - DocumentSet docs = getDocumentSet(contextSet); - synchronized (context) { - if (currentSet == null - || currentDocs == null - || (!optimized && !(docs == currentDocs || docs - .equalDocs(currentDocs)))) { - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message( - this, - Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() - + "'");} - // TODO : why a null selector here ? We have one below ! - currentSet = index.findElementsByTagName( - ElementValue.ATTRIBUTE, docs, test.getName(), null, this); - currentDocs = docs; - registerUpdateListener(); - } - switch (axis) { - case Constants.ATTRIBUTE_AXIS: - return currentSet.matchParentChild(contextSet, - NodeSet.DESCENDANT, contextId); - case Constants.DESCENDANT_ATTRIBUTE_AXIS: - return currentSet.matchAncestorDescendant(contextSet, - NodeSet.DESCENDANT, false, contextId, true); - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - } - } else { - final DocumentSet docs = getDocumentSet(contextSet); - final StructuralIndex index = context.getBroker().getStructuralIndex(); - if (context.getProfiler().isEnabled()) - {context.getProfiler().message(this, Profiler.OPTIMIZATIONS, - "OPTIMIZATION", - "Using structural index '" + index.toString() + "'");} - if (contextSet instanceof ExtNodeSet - && !contextSet.getProcessInReverseOrder()) { - return index.matchDescendantsByTagName(ElementValue.ATTRIBUTE, - test.getName(), axis, docs, (ExtNodeSet) contextSet, - contextId); - } else { - NodeSelector selector; - switch (axis) { - case Constants.ATTRIBUTE_AXIS: - selector = new ChildSelector(contextSet, contextId); - break; - case Constants.DESCENDANT_ATTRIBUTE_AXIS: - selector = new DescendantSelector(contextSet, contextId); - break; - default: - throw new IllegalArgumentException( - "Unsupported axis specified"); - } - return index.matchElementsByTagName(ElementValue.ATTRIBUTE, - docs, test.getName(), selector); - } - } - } -} \ No newline at end of file + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + if (contextSet instanceof ExtNodeSet && !contextSet.getProcessInReverseOrder()) { + return index.matchDescendantsByTagName(ElementValue.ELEMENT, test.getName(), axis, docs, + (ExtNodeSet) contextSet, contextId); + } else { + // if (contextSet instanceof VirtualNodeSet) + // ((VirtualNodeSet)contextSet).realize(); + final NodeSelector selector = new ChildSelector(contextSet, contextId); + return index.matchElementsByTagName(ElementValue.ELEMENT, docs, test.getName(), selector); + } + } + } + + private boolean matchAttributes(final XQueryContext context, final Sequence contextSequence) throws XPathException { + if (!contextSequence.isPersistentSet()) { + final MemoryNodeSet nodes = contextSequence.toMemNodeSet(); + if (axis == Constants.DESCENDANT_ATTRIBUTE_AXIS) { + return nodes.matchDescendantAttributes(test); + } else { + return nodes.matchAttributes(test); + } + } + + final NodeSet contextSet = contextSequence.toNodeSet(); + if (test.isWildcardTest()) { + final NodeSet result = new VirtualNodeSet(context.getBroker(), axis, test, contextId, contextSet); + ((VirtualNodeSet) result).setInPredicate(Expression.NO_CONTEXT_ID != contextId); + return !result.isEmpty(); + // if there's just a single known node in the context, it is faster + // do directly search for the attribute in the parent node. + } + + if (hasPreloadedData()) { + final DocumentSet docs = getDocumentSet(contextSet); + synchronized (context) { + if (currentSet == null || currentDocs == null || (!optimized && !(docs == currentDocs || docs.equalDocs(currentDocs)))) { + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message( + this, + Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + + "'"); + } + // TODO : why a null selector here ? We have one below ! + currentSet = index.findElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), null, this); + currentDocs = docs; + registerUpdateListener(); + } + + switch (axis) { + case Constants.ATTRIBUTE_AXIS: + return currentSet.matchParentChild(contextSet, NodeSet.DESCENDANT, contextId); + case Constants.DESCENDANT_ATTRIBUTE_AXIS: + return currentSet.matchAncestorDescendant(contextSet, NodeSet.DESCENDANT, false, contextId, true); + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + } + } else { + final DocumentSet docs = getDocumentSet(contextSet); + final StructuralIndex index = context.getBroker().getStructuralIndex(); + if (context.getProfiler().isEnabled()) { + context.getProfiler().message(this, Profiler.OPTIMIZATIONS, + "OPTIMIZATION", + "Using structural index '" + index.toString() + "'"); + } + if (contextSet instanceof ExtNodeSet && !contextSet.getProcessInReverseOrder()) { + return index.matchDescendantsByTagName(ElementValue.ATTRIBUTE, test.getName(), axis, docs, + (ExtNodeSet) contextSet, contextId); + } else { + final NodeSelector selector; + switch (axis) { + case Constants.ATTRIBUTE_AXIS: + selector = new ChildSelector(contextSet, contextId); + break; + case Constants.DESCENDANT_ATTRIBUTE_AXIS: + selector = new DescendantSelector(contextSet, contextId); + break; + default: + throw new IllegalArgumentException("Unsupported axis specified"); + } + return index.matchElementsByTagName(ElementValue.ATTRIBUTE, docs, test.getName(), selector); + } + } + } +} diff --git a/src/org/exist/xquery/ModuleContext.java b/src/org/exist/xquery/ModuleContext.java index b5de3070bee..a1fd4c7e9ac 100644 --- a/src/org/exist/xquery/ModuleContext.java +++ b/src/org/exist/xquery/ModuleContext.java @@ -1,26 +1,26 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2004-2007 The eXist Project + * Copyright (C) 2004-2018 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. - * + * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - * - * $Id$ */ package org.exist.xquery; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.exist.debuggee.DebuggeeJoint; import org.exist.dom.persistent.DocumentSet; import org.exist.dom.QName; @@ -29,18 +29,18 @@ import org.exist.storage.UpdateListener; import org.exist.util.FileUtils; import org.exist.xmldb.XmldbURI; -import org.exist.xquery.functions.response.ResponseModule; import org.exist.xquery.value.AnyURIValue; import org.exist.xquery.value.BinaryValue; import org.exist.xquery.value.Sequence; +import javax.annotation.Nullable; import javax.xml.datatype.XMLGregorianCalendar; import java.net.URISyntaxException; import java.util.Iterator; -import java.util.HashMap; import java.util.List; import java.util.Map; + import org.exist.xquery.value.StringValue; import org.exist.xquery.value.ValueSequence; @@ -48,27 +48,28 @@ /** * Subclass of {@link org.exist.xquery.XQueryContext} for * imported modules. - * + * * @author wolf */ public class ModuleContext extends XQueryContext { - private XQueryContext parentContext; + private static final Logger LOG = LogManager.getLogger(ModuleContext.class); + + private XQueryContext parentContext; private String modulePrefix; private String moduleNamespace; private final String location; - /** - * @param parentContext - */ - public ModuleContext(XQueryContext parentContext, String modulePrefix, String moduleNamespace, String location) { - super(); - this.moduleNamespace = moduleNamespace; + public ModuleContext(final XQueryContext parentContext, final String modulePrefix, final String moduleNamespace, + final String location) { + super(); this.modulePrefix = modulePrefix; + this.moduleNamespace = moduleNamespace; this.location = location; setParentContext(parentContext); - loadDefaults(getBroker().getConfiguration()); - this.profiler = new Profiler( getBroker().getBrokerPool() ); + + loadDefaults(getBroker().getConfiguration()); + this.profiler = new Profiler(getBroker().getBrokerPool()); } @Override @@ -78,73 +79,75 @@ public Subject getRealUser() { } String getLocation() { - return location; - } - - String getModuleNamespace() { - return moduleNamespace; - } - - public void setModuleNamespace(String prefix, String namespaceURI) { + return location; + } + + String getModuleNamespace() { + return moduleNamespace; + } + + public void setModuleNamespace(final String prefix, final String namespaceURI) { this.modulePrefix = prefix; this.moduleNamespace = namespaceURI; } - void setModulesChanged() { - parentContext.setModulesChanged(); - } - - private void setParentContext(XQueryContext parentContext) { + @Override + protected void setModulesChanged() { + parentContext.setModulesChanged(); + } + + private void setParentContext(final XQueryContext parentContext) { this.parentContext = parentContext; //XXX: raise error on null! if (parentContext != null) { - this.db = parentContext.db; - baseURI = parentContext.baseURI; - try { - if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX) || - (location.indexOf(':') < 0 && - parentContext.getModuleLoadPath().startsWith(XmldbURI.XMLDB_URI_PREFIX))) { - // use XmldbURI resolution - unfortunately these are not interpretable as URIs - // because the scheme xmldb:exist: is not a valid URI scheme + this.db = parentContext.db; + this.baseURI = parentContext.baseURI; + try { + if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX) || + (location.indexOf(':') < 0 && + parentContext.getModuleLoadPath().startsWith(XmldbURI.XMLDB_URI_PREFIX))) { + // use XmldbURI resolution - unfortunately these are not interpretable as URIs + // because the scheme xmldb:exist: is not a valid URI scheme final XmldbURI locationUri = XmldbURI.xmldbUriFor(FileUtils.dirname(location)); - if (".".equals(parentContext.getModuleLoadPath())) - {setModuleLoadPath(locationUri.toString());} - else { - try { - final XmldbURI parentLoadUri = XmldbURI.xmldbUriFor(parentContext.getModuleLoadPath()); - final XmldbURI moduleLoadUri = parentLoadUri.resolveCollectionPath(locationUri); - setModuleLoadPath(moduleLoadUri.toString()); - } catch (final URISyntaxException e) { + if (".".equals(parentContext.getModuleLoadPath())) { + setModuleLoadPath(locationUri.toString()); + } else { + try { + final XmldbURI parentLoadUri = XmldbURI.xmldbUriFor(parentContext.getModuleLoadPath()); + final XmldbURI moduleLoadUri = parentLoadUri.resolveCollectionPath(locationUri); + setModuleLoadPath(moduleLoadUri.toString()); + } catch (final URISyntaxException e) { setModuleLoadPath(locationUri.toString()); - } + } } - } else { - String dir = FileUtils.dirname(location); + } else { + final String dir = FileUtils.dirname(location); if (dir.matches("^[a-z]+:.*")) { moduleLoadPath = dir; } else if (".".equals(parentContext.moduleLoadPath)) { - if (! ".".equals(dir)) { - if (dir.startsWith("/")) { - setModuleLoadPath("." + dir); - } else { - setModuleLoadPath("./" + dir); - } - } - } else { - if (dir.startsWith("/")) { - setModuleLoadPath(dir); - } else { - setModuleLoadPath(FileUtils.addPaths(parentContext.getModuleLoadPath(), dir)); - } - } - } - } catch (final URISyntaxException e) { + if (!".".equals(dir)) { + if (dir.startsWith("/")) { + setModuleLoadPath("." + dir); + } else { + setModuleLoadPath("./" + dir); + } + } + } else { + if (dir.startsWith("/")) { + setModuleLoadPath(dir); + } else { + setModuleLoadPath(FileUtils.addPaths(parentContext.getModuleLoadPath(), dir)); + } + } + } + } catch (final URISyntaxException e) { e.printStackTrace(); } } - } + } - public void setModule(String namespaceURI, Module module) { + @Override + public void setModule(final String namespaceURI, final Module module) { if (module == null) { modules.remove(namespaceURI); // unbind the module } else { @@ -153,105 +156,92 @@ public void setModule(String namespaceURI, Module module) { setRootModule(namespaceURI, module); } - XQueryContext getParentContext() { - return parentContext; - } + private XQueryContext getParentContext() { + return parentContext; + } + @Override public boolean hasParent() { return true; } + @Override public XQueryContext getRootContext() { return parentContext.getRootContext(); } - public void updateContext(XQueryContext from) { + @Override + public void updateContext(final XQueryContext from) { if (from.hasParent()) { // TODO: shouldn't this call setParentContext ? - sokolov - this.parentContext = ((ModuleContext)from).parentContext; + this.parentContext = ((ModuleContext) from).parentContext; } - - //workaround for shared context issue, remove after fix - try { - final Variable var = from.getRootContext().resolveVariable(ResponseModule.PREFIX + ":response"); - if (var != null) - {declareVariable( - ResponseModule.PREFIX + ":response", - var.getValue() - );} - } catch (final Exception e) { - //ignore if not set - } - - setModule( - ResponseModule.NAMESPACE_URI, - from.getRootContext().getModule(ResponseModule.NAMESPACE_URI) - ); } + @Override public XQueryContext copyContext() { final ModuleContext ctx = new ModuleContext(parentContext, modulePrefix, moduleNamespace, location); copyFields(ctx); try { ctx.declareNamespace(modulePrefix, moduleNamespace); } catch (final XPathException e) { - e.printStackTrace(); + LOG.error(e); } return ctx; } - public void addDynamicOption(String qnameString, String contents) throws XPathException - { - parentContext.addDynamicOption(qnameString, contents); - } - - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getStaticallyKnownDocuments() - */ - public DocumentSet getStaticallyKnownDocuments() throws XPathException { - return parentContext.getStaticallyKnownDocuments(); - } - - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getModule(java.lang.String) - */ - public Module getModule(String namespaceURI) { - Module module = super.getModule(namespaceURI); - // TODO: I don't think modules should be able to access their parent context's modules, - // since that breaks lexical scoping. However, it seems that some eXist modules rely on - // this so let's leave it for now. (pkaminsk2) - if(module == null) - {module = parentContext.getModule(namespaceURI);} - return module; - } - - protected void setRootModule(String namespaceURI, Module module) { - parentContext.setRootModule(namespaceURI, module); - } - - public Iterator getRootModules() { - return parentContext.getRootModules(); - } - - public Iterator getAllModules() { - return parentContext.getAllModules(); - } - - public Module getRootModule(String namespaceURI) { - return parentContext.getRootModule(namespaceURI); - } - + @Override + public void addDynamicOption(final String name, final String value) throws XPathException { + parentContext.addDynamicOption(name, value); + } + + @Override + public DocumentSet getStaticallyKnownDocuments() throws XPathException { + return parentContext.getStaticallyKnownDocuments(); + } + + @Override + public Module getModule(final String namespaceURI) { + Module module = super.getModule(namespaceURI); + // TODO: I don't think modules should be able to access their parent context's modules, + // since that breaks lexical scoping. However, it seems that some eXist modules rely on + // this so let's leave it for now. (pkaminsk2) + if (module == null) { + module = parentContext.getModule(namespaceURI); + } + return module; + } + + @Override + protected void setRootModule(final String namespaceURI, final Module module) { + parentContext.setRootModule(namespaceURI, module); + } + + @Override + public Iterator getRootModules() { + return parentContext.getRootModules(); + } + + @Override + public Iterator getAllModules() { + return parentContext.getAllModules(); + } + + @Override + public Module getRootModule(final String namespaceURI) { + return parentContext.getRootModule(namespaceURI); + } + @Override final protected XPathException moduleLoadException(final String message, final String moduleLocation) throws XPathException { - return moduleLoadException(message, moduleLocation, null); + return moduleLoadException(message, moduleLocation, null); } - + @Override final protected XPathException moduleLoadException(final String message, final String moduleLocation, final Exception e) throws XPathException { - //final String dependantModule = XmldbURI.create(moduleLoadPath).append(location).toString(); String dependantModule; try { - if(location != null && location.startsWith(XmldbURI.LOCAL_DB)) { + if (location != null && location.startsWith(XmldbURI.LOCAL_DB)) { dependantModule = location; } else { dependantModule = XmldbURI.create(getParentContext().getModuleLoadPath(), false).append(location).toString(); @@ -259,121 +249,94 @@ final protected XPathException moduleLoadException(final String message, final S } catch (final Exception ex) { dependantModule = location; } - - if(e == null) { + + if (e == null) { return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation), new StringValue(dependantModule))); } else { return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation), new StringValue(dependantModule)), e); } } - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getWatchDog() - */ - public XQueryWatchDog getWatchDog() { - return parentContext.getWatchDog(); - } + @Override + public XQueryWatchDog getWatchDog() { + return parentContext.getWatchDog(); + } + @Override public Profiler getProfiler() { return parentContext.getProfiler(); } - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getCalendar() - */ - public XMLGregorianCalendar getCalendar(){ + @Override + public XMLGregorianCalendar getCalendar() { return parentContext.getCalendar(); } - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getBaseURI() - */ - public AnyURIValue getBaseURI() throws XPathException { - return parentContext.getBaseURI(); - } - - public void setBaseURI(AnyURIValue uri) { + @Override + public AnyURIValue getBaseURI() throws XPathException { + return parentContext.getBaseURI(); + } + + @Override + public void setBaseURI(final AnyURIValue uri) { parentContext.setBaseURI(uri); } - - /** - * Delegate to parent context - * - * @see org.exist.xquery.XQueryContext#setXQueryContextVar(String, Object) - */ - public void setXQueryContextVar(String name, Object XQvar) - { - parentContext.setXQueryContextVar(name, XQvar); + + @Override + public void setXQueryContextVar(final String name, final Object xqVar) { + parentContext.setXQueryContextVar(name, xqVar); } - /** - * Delegate to parent context - * - * @see org.exist.xquery.XQueryContext#getXQueryContextVar(String) - */ - public Object getXQueryContextVar(String name) - { - return(parentContext.getXQueryContextVar(name)); - } - -// /* (non-Javadoc) -// * @see org.exist.xquery.XQueryContext#getBroker() -// */ -// public DBBroker getBroker() { -// return parentContext.getBroker(); -// } - - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#getDocumentBuilder() - */ - public MemTreeBuilder getDocumentBuilder() { - return parentContext.getDocumentBuilder(); - } - - public MemTreeBuilder getDocumentBuilder(boolean explicitCreation) { + @Override + public Object getXQueryContextVar(final String name) { + return (parentContext.getXQueryContextVar(name)); + } + + @Override + public MemTreeBuilder getDocumentBuilder() { + return parentContext.getDocumentBuilder(); + } + + @Override + public MemTreeBuilder getDocumentBuilder(final boolean explicitCreation) { return parentContext.getDocumentBuilder(explicitCreation); } - - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#pushDocumentContext() - */ - public void pushDocumentContext() { - parentContext.pushDocumentContext(); - } - public LocalVariable markLocalVariables(boolean newContext) { + @Override + public void pushDocumentContext() { + parentContext.pushDocumentContext(); + } + + @Override + public LocalVariable markLocalVariables(final boolean newContext) { return parentContext.markLocalVariables(newContext); } - public void popLocalVariables(LocalVariable var) { + @Override + public void popLocalVariables(final LocalVariable var) { parentContext.popLocalVariables(var); } - public void popLocalVariables(LocalVariable var, Sequence resultSequence) { + @Override + public void popLocalVariables(final LocalVariable var, final Sequence resultSequence) { parentContext.popLocalVariables(var, resultSequence); } - public LocalVariable declareVariableBinding(LocalVariable var) throws XPathException { + @Override + public LocalVariable declareVariableBinding(final LocalVariable var) throws XPathException { return parentContext.declareVariableBinding(var); } - protected Variable resolveLocalVariable(QName qname) throws XPathException { + @Override + protected Variable resolveLocalVariable(final QName qname) throws XPathException { return parentContext.resolveLocalVariable(qname); } - /** - * Try to resolve a variable. - * - * @param qname the qualified name of the variable - * @return the declared Variable object - * @throws XPathException if the variable is unknown - */ - public Variable resolveVariable(QName qname) throws XPathException { - Variable var; - + @Override + public Variable resolveVariable(final QName qname) throws XPathException { // check if the variable is declared local - var = resolveLocalVariable(qname); + Variable var = resolveLocalVariable(qname); // check if the variable is declared in a module if (var == null) { @@ -383,49 +346,65 @@ public Variable resolveVariable(QName qname) throws XPathException { } else { module = getModule(qname.getNamespaceURI()); } - if(module != null) { + if (module != null) { var = module.resolveVariable(qname); } } // check if the variable is declared global - if (var == null) - {var = globalVariables.get(qname);} + if (var == null) { + var = globalVariables.get(qname); + } //if (var == null) // throw new XPathException("variable $" + qname + " is not bound"); return var; } + @Override public Map getVariables() { - return parentContext.getVariables(); + return parentContext.getVariables(); } + @Override public Map getLocalVariables() { - return parentContext.getLocalVariables(); + return parentContext.getLocalVariables(); } + @Override public List getLocalStack() { - return parentContext.getLocalStack(); + return parentContext.getLocalStack(); } - + + @Override public Map getGlobalVariables() { - return parentContext.getGlobalVariables(); + return parentContext.getGlobalVariables(); + } + + @Nullable + @Override + public HttpContext getHttpContext() { + return parentContext.getHttpContext(); } - public void restoreStack(List stack) throws XPathException { - parentContext.restoreStack(stack); + @Override + public void setHttpContext(final HttpContext httpContext) { + parentContext.setHttpContext(httpContext); } - + + @Override + public void restoreStack(final List stack) throws XPathException { + parentContext.restoreStack(stack); + } + + @Override public int getCurrentStackSize() { return parentContext.getCurrentStackSize(); } - /* (non-Javadoc) - * @see org.exist.xquery.XQueryContext#popDocumentContext() - */ - public void popDocumentContext() { - parentContext.popDocumentContext(); - } + @Override + public void popDocumentContext() { + parentContext.popDocumentContext(); + } /** * First checks the parent context for in-scope namespaces, @@ -434,14 +413,17 @@ public void popDocumentContext() { * @param prefix the prefix to look up * @return the namespace currently mapped to that prefix */ - public String getURIForPrefix(String prefix) { + @Override + public String getURIForPrefix(final String prefix) { String uri = getInScopeNamespace(prefix); - if (uri != null) - {return uri;} + if (uri != null) { + return uri; + } //TODO : test NS inheritance uri = getInheritedNamespace(prefix); - if (uri != null) - {return uri;} + if (uri != null) { + return uri; + } // Check global declarations return staticNamespaces.get(prefix); } @@ -453,39 +435,47 @@ public String getURIForPrefix(String prefix) { * @param uri the URI to look up * @return a prefix for the URI */ - public String getPrefixForURI(String uri) { + @Override + public String getPrefixForURI(final String uri) { String prefix = getInScopePrefix(uri); - if (prefix != null) - {return prefix;} - //TODO : test the NS inheritance + if (prefix != null) { + return prefix; + } + //TODO : test the NS inheritance prefix = getInheritedPrefix(uri); - if (prefix != null) - {return prefix;} - return staticPrefixes.get(uri); + if (prefix != null) { + return prefix; + } + return staticPrefixes.get(uri); } - public String getInScopeNamespace(String prefix) { + @Override + public String getInScopeNamespace(final String prefix) { return parentContext.getInScopeNamespace(prefix); } - public String getInScopePrefix(String uri) { + @Override + public String getInScopePrefix(final String uri) { return parentContext.getInScopePrefix(uri); } - public String getInheritedNamespace(String prefix) { + @Override + public String getInheritedNamespace(final String prefix) { return parentContext.getInheritedNamespace(prefix); } - public String getInheritedPrefix(String uri) { + @Override + public String getInheritedPrefix(final String uri) { return parentContext.getInheritedPrefix(uri); } - public void declareInScopeNamespace(String prefix, String uri) { + @Override + public void declareInScopeNamespace(final String prefix, final String uri) { parentContext.declareInScopeNamespace(prefix, uri); } @Override - public void pushInScopeNamespaces(boolean inherit) { + public void pushInScopeNamespaces(final boolean inherit) { parentContext.pushInScopeNamespaces(inherit); } @@ -499,40 +489,48 @@ public void popInScopeNamespaces() { parentContext.popInScopeNamespaces(); } - public void registerUpdateListener(UpdateListener listener) { - parentContext.registerUpdateListener(listener); - } - - protected void clearUpdateListeners() { - // will be cleared by the parent context - } + @Override + public void registerUpdateListener(final UpdateListener listener) { + parentContext.registerUpdateListener(listener); + } - public DebuggeeJoint getDebuggeeJoint() { - return parentContext.getDebuggeeJoint(); - } - - public boolean isDebugMode() { - return parentContext.isDebugMode(); - } + @Override + protected void clearUpdateListeners() { + // will be cleared by the parent context + } - public void expressionStart(Expression expr) throws TerminatedException { - parentContext.expressionStart(expr); + @Override + public DebuggeeJoint getDebuggeeJoint() { + return parentContext.getDebuggeeJoint(); } - public void expressionEnd(Expression expr) { - parentContext.expressionEnd(expr); + @Override + public boolean isDebugMode() { + return parentContext.isDebugMode(); } - public void stackEnter(Expression expr) throws TerminatedException { - parentContext.stackEnter(expr); + @Override + public void expressionStart(final Expression expr) throws TerminatedException { + parentContext.expressionStart(expr); } - public void stackLeave(Expression expr) { - parentContext.stackLeave(expr); + @Override + public void expressionEnd(final Expression expr) { + parentContext.expressionEnd(expr); + } + + @Override + public void stackEnter(final Expression expr) throws TerminatedException { + parentContext.stackEnter(expr); + } + + @Override + public void stackLeave(final Expression expr) { + parentContext.stackLeave(expr); } @Override - public void registerBinaryValueInstance(BinaryValue binaryValue) { + public void registerBinaryValueInstance(final BinaryValue binaryValue) { parentContext.registerBinaryValueInstance(binaryValue); } diff --git a/src/org/exist/xquery/OrderByClause.java b/src/org/exist/xquery/OrderByClause.java index eecdb62ffc5..6d7a3de5c58 100644 --- a/src/org/exist/xquery/OrderByClause.java +++ b/src/org/exist/xquery/OrderByClause.java @@ -99,6 +99,7 @@ public void accept(ExpressionVisitor visitor) { @Override public void resetState(boolean postOptimization) { super.resetState(postOptimization); + returnExpr.resetState(postOptimization); stack.clear(); } } diff --git a/src/org/exist/xquery/PathExpr.java b/src/org/exist/xquery/PathExpr.java index 4b143d28616..c295bc5dda8 100644 --- a/src/org/exist/xquery/PathExpr.java +++ b/src/org/exist/xquery/PathExpr.java @@ -323,7 +323,10 @@ public DocumentSet getDocumentSet() { return null; } - //TODO: @Deprecated //use getSubExpression + /** + * @deprecated use getSubExpression + */ + @Deprecated public Expression getExpression(final int pos) { return steps.isEmpty() ? null : steps.get(pos); } @@ -538,4 +541,4 @@ public Expression simplify() { } return this; } -} \ No newline at end of file +} diff --git a/src/org/exist/xquery/Profiler.java b/src/org/exist/xquery/Profiler.java index 06bfc83c7b1..143f65a724c 100644 --- a/src/org/exist/xquery/Profiler.java +++ b/src/org/exist/xquery/Profiler.java @@ -21,7 +21,8 @@ */ package org.exist.xquery; -import java.util.Stack; +import java.util.ArrayDeque; +import java.util.Deque; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -65,7 +66,7 @@ public class Profiler { */ private Logger log = LogManager.getLogger("xquery.profiling"); - private Stack stack = new Stack(); + private Deque stack = new ArrayDeque<>(); private final StringBuilder buf = new StringBuilder(64); @@ -222,13 +223,14 @@ public final void start(Expression expr, String message) { if (!enabled) {return;} - if (stack.size() == 0) { + if (stack.isEmpty()) { log.debug("QUERY START"); } buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } final ProfiledExpr e = new ProfiledExpr(expr); stack.push(e); @@ -240,8 +242,9 @@ public final void start(Expression expr, String message) { if (message != null && !"".equals(message)) { buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } buf.append("MSG\t"); buf.append(message); buf.append("\t"); @@ -275,8 +278,9 @@ public final void end(Expression expr, String message, Sequence result) { if (message != null && !"".equals(message)) { buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } buf.append("MSG\t"); buf.append(message); buf.append("\t"); @@ -287,8 +291,9 @@ public final void end(Expression expr, String message, Sequence result) { if (verbosity > START_SEQUENCES) { buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } buf.append("RESULT\t"); /* if (verbosity >= SEQUENCE_DUMP) buf.append(result.toString()); @@ -305,8 +310,9 @@ else if (verbosity >= SEQUENCE_PREVIEW) if (verbosity >= TIME) { buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } buf.append("TIME\t"); buf.append(elapsed).append(" ms"); buf.append("\t"); @@ -316,8 +322,9 @@ else if (verbosity >= SEQUENCE_PREVIEW) } buf.setLength(0); - for (int i = 0; i < stack.size(); i++) - buf.append('\t'); + for (int i = 0; i < stack.size(); i++) { + buf.append('\t'); + } buf.append("END\t"); printPosition(e.expr); buf.append(expr.toString()); @@ -348,8 +355,9 @@ public final void message(Expression expr, int level, String title, Sequence seq {return;} buf.setLength(0); - for (int i = 0; i < stack.size() - 1; i++) - buf.append('\t'); + for (int i = 0; i < stack.size() - 1; i++) { + buf.append('\t'); + } if (title != null && !"".equals(title)) {buf.append(title);} else @@ -374,8 +382,9 @@ public final void message(Expression expr, int level, String title, String messa {return;} buf.setLength(0); - for (int i = 0; i < stack.size() - 1; i++) - buf.append('\t'); + for (int i = 0; i < stack.size() - 1; i++) { + buf.append('\t'); + } if (title != null && !"".equals(title)) {buf.append(title);} else @@ -391,8 +400,9 @@ public final void message(Expression expr, int level, String title, String messa } public void reset() { - if (stack.size() > 0) - {log.debug("QUERY RESET");} + if (!stack.isEmpty()) { + log.debug("QUERY RESET"); + } stack.clear(); if (stats.isEnabled() && stats.hasData()) { save(); diff --git a/src/org/exist/xquery/RangeSequence.java b/src/org/exist/xquery/RangeSequence.java index 77aa8921eb6..87451af7d73 100644 --- a/src/org/exist/xquery/RangeSequence.java +++ b/src/org/exist/xquery/RangeSequence.java @@ -13,73 +13,90 @@ public class RangeSequence extends AbstractSequence { - private final static Logger LOG = LogManager.getLogger(AbstractSequence.class); - - private IntegerValue start; - private IntegerValue end; - - public RangeSequence(IntegerValue start, IntegerValue end) { - this.start = start; - this.end = end; - } + private final static Logger LOG = LogManager.getLogger(AbstractSequence.class); - public void add(Item item) throws XPathException { - throw new XPathException("Internal error: adding to an immutable sequence"); - } + private final IntegerValue start; + private final IntegerValue end; - public void addAll(Sequence other) throws XPathException { - throw new XPathException("Internal error: adding to an immutable sequence"); - } + public RangeSequence(final IntegerValue start, final IntegerValue end) { + this.start = start; + this.end = end; + } - public int getItemType() { - return Type.INTEGER; - } + @Override + public void add(final Item item) throws XPathException { + throw new XPathException("Internal error: adding to an immutable sequence"); + } @Override - public SequenceIterator iterate() throws XPathException { - return new RangeSequenceIterator(start.getLong(), end.getLong()); - } + public void addAll(final Sequence other) throws XPathException { + throw new XPathException("Internal error: adding to an immutable sequence"); + } + + public int getItemType() { + return Type.INTEGER; + } @Override - public SequenceIterator unorderedIterator() throws XPathException { - return new RangeSequenceIterator(start.getLong(), end.getLong()); - } + public SequenceIterator iterate() { + return new RangeSequenceIterator(start.getLong(), end.getLong()); + } - public SequenceIterator iterateInReverse() throws XPathException { + @Override + public SequenceIterator unorderedIterator() { + return new RangeSequenceIterator(start.getLong(), end.getLong()); + } + + public SequenceIterator iterateInReverse() { return new ReverseRangeSequenceIterator(start.getLong(), end.getLong()); } - private static class RangeSequenceIterator implements SequenceIterator { - private long current; + private static class RangeSequenceIterator implements SequenceIterator { + private long current; private final long end; - public RangeSequenceIterator(final long start, final long end) { - this.current = start; + private RangeSequenceIterator(final long start, final long end) { + this.current = start; this.end = end; - } + } - public Item nextItem() { + @Override + public Item nextItem() { if (current <= end) { return new IntegerValue(current++); } else { return null; } - } + } - public boolean hasNext() { + @Override + public boolean hasNext() { return current <= end; - } - } + } + + @Override + public long skippable() { + return end - current + 1; + } + + @Override + public long skip(final long n) { + final long skip = Math.min(n, end - current + 1); + current += skip; + return skip; + } + } private static class ReverseRangeSequenceIterator implements SequenceIterator { private final long start; private long current; - public ReverseRangeSequenceIterator(final long start, final long end) { + private ReverseRangeSequenceIterator(final long start, final long end) { this.start = start; this.current = end; } + @Override public Item nextItem() { if (current >= start) { return new IntegerValue(current--); @@ -88,54 +105,100 @@ public Item nextItem() { } } + @Override public boolean hasNext() { return current >= start; } + + @Override + public long skippable() { + return current - start + 1; + } + + @Override + public long skip(final long n) { + final long skip = Math.min(n, current - start + 1); + current -= skip; + return skip; + } } - - public int getItemCount() { - if (start.compareTo(end) > 0) - {return 0;} - try { - return ((IntegerValue) end.minus(start)).getInt() + 1; - } catch (final XPathException e) { - LOG.warn("Unexpected exception when processing result of range expression: " + e.getMessage(), e); - return 0; - } - } - public boolean isEmpty() { - return getItemCount() == 0; - } + @Override + public long getItemCountLong() { + if (start.compareTo(end) > 0) { + return 0; + } + try { + return ((IntegerValue) end.minus(start)).getLong() + 1; + } catch (final XPathException e) { + LOG.warn("Unexpected exception when processing result of range expression: " + e.getMessage(), e); + return 0; + } + } - public boolean hasOne() { - return getItemCount() == 1; - } + @Override + public boolean isEmpty() { + return getItemCountLong() == 0; + } - public boolean hasMany() { - return getItemCount() > 1; - } + @Override + public boolean hasOne() { + return getItemCountLong() == 1; + } - public Item itemAt(int pos) { - if (pos <= getItemCount()) - try { - return new IntegerValue(start.getLong() + pos); - } catch (final XPathException e) { - LOG.warn("Unexpected exception when processing result of range expression: " + e.getMessage(), e); - } - return null; - } + @Override + public boolean hasMany() { + return getItemCountLong() > 1; + } - public NodeSet toNodeSet() throws XPathException { - throw new XPathException("Type error: the sequence cannot be converted into" + - " a node set. Item type is xs:integer"); - } + @Override + public int getCardinality() { + final long itemCount = getItemCountLong(); + if (itemCount <= 0) { + return Cardinality.EMPTY; + } + if (itemCount == 1) { + return Cardinality.EXACTLY_ONE; + } + return Cardinality.MANY; + } - public MemoryNodeSet toMemNodeSet() throws XPathException { - throw new XPathException("Type error: the sequence cannot be converted into" + - " a node set. Item type is xs:integer"); - } + @Override + public Item itemAt(final int pos) { + if (pos < getItemCountLong()) { + return new IntegerValue(start.getLong() + pos); + } + return null; + } + + @Override + public NodeSet toNodeSet() throws XPathException { + throw new XPathException("Type error: the sequence cannot be converted into" + + " a node set. Item type is xs:integer"); + } + + @Override + public MemoryNodeSet toMemNodeSet() throws XPathException { + throw new XPathException("Type error: the sequence cannot be converted into" + + " a memory node set. Item type is xs:integer"); + } + + @Override + public void removeDuplicates() { + } - public void removeDuplicates() { + /** + * Generates a string representation of the Range Sequence. + * + * Range sequences can potentially be + * very large, so we generate a summary here + * rather than evaluating to generate a (possibly) + * huge sequence of objects. + * + * @return a string representation of the range sequence. + */ + @Override + public String toString() { + return "Range(" + start + " to " + end + ")"; } } diff --git a/src/org/exist/xquery/RootNode.java b/src/org/exist/xquery/RootNode.java index 58d1260afbc..1e5ae2bbf9b 100644 --- a/src/org/exist/xquery/RootNode.java +++ b/src/org/exist/xquery/RootNode.java @@ -20,6 +20,7 @@ */ package org.exist.xquery; +import org.exist.collections.ManagedLocks; import org.exist.dom.persistent.DocumentImpl; import org.exist.dom.persistent.DocumentSet; import org.exist.dom.persistent.NewArrayNodeSet; @@ -28,11 +29,10 @@ import org.exist.dom.persistent.NodeSet; import org.exist.numbering.NodeId; import org.exist.storage.UpdateListener; +import org.exist.storage.lock.ManagedDocumentLock; import org.exist.util.LockException; import org.exist.xquery.util.ExpressionDumper; -import org.exist.xquery.value.Item; -import org.exist.xquery.value.Sequence; -import org.exist.xquery.value.Type; +import org.exist.xquery.value.*; import java.util.Iterator; @@ -66,7 +66,31 @@ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathExc if (contextItem != null) {context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT ITEM", contextItem.toSequence());} } - + + // first check if a context item is declared + final ContextItemDeclaration decl = context.getContextItemDeclartion(); + if (decl != null) { + final Sequence seq = decl.eval(null, null); + if (!seq.isEmpty()) { + final Item item = seq.itemAt(0); + // context item must be a node + if (!Type.subTypeOf(item.getType(), Type.NODE)) { + throw new XPathException(this, ErrorCodes.XPTY0020, "Context item is not a node"); + } + final NodeValue node = (NodeValue)item; + // return fn:root(self::node()) treat as document-node() + if (node.getImplementationType() == NodeValue.PERSISTENT_NODE) { + return new NodeProxy(((NodeProxy)item).getOwnerDocument()); + } else { + if (node.getType() == Type.DOCUMENT) { + return node; + } + return (org.exist.dom.memtree.DocumentImpl) node.getOwnerDocument(); + } + } + return Sequence.EMPTY_SEQUENCE; + } + // get statically known documents from the context DocumentSet ds = context.getStaticallyKnownDocuments(); if (ds == null || ds.getDocumentCount() == 0) {return Sequence.EMPTY_SEQUENCE;} @@ -80,10 +104,13 @@ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathExc // check if the loaded documents should remain locked NewArrayNodeSet result = new NewArrayNodeSet(); + ManagedLocks docLocks = null; try { // wait for pending updates - if (!context.inProtectedMode()) - {ds.lock(context.getBroker(), false);} + if (!context.inProtectedMode()) { + docLocks = ds.lock(context.getBroker(), false); + } + DocumentImpl doc; for (final Iterator i = ds.getDocumentIterator(); i.hasNext();) { doc = i.next(); @@ -99,8 +126,9 @@ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathExc throw new XPathException(this, "Failed to acquire lock on the context document set"); } finally { // release all locks - if (!context.inProtectedMode()) - {ds.unlock();} + if (!context.inProtectedMode() && docLocks != null) { + docLocks.close(); + } } // result.updateNoSort(); if (context.getProfiler().isEnabled()) diff --git a/src/org/exist/xquery/Step.java b/src/org/exist/xquery/Step.java index 72b67b4e728..cc505036227 100644 --- a/src/org/exist/xquery/Step.java +++ b/src/org/exist/xquery/Step.java @@ -86,7 +86,7 @@ public List getPredicates() { public void analyze(AnalyzeContextInfo contextInfo) throws XPathException { if (test != null && test.getName() != null && test.getName().getPrefix() != null && - !"".equals(test.getName().getPrefix()) && context.inScopePrefixes != null && + (!test.getName().getPrefix().isEmpty()) && context.getInScopePrefixes() != null && context.getURIForPrefix(test.getName().getPrefix()) == null) {throw new XPathException(this, ErrorCodes.XPST0081, "undeclared prefix '" + test.getName().getPrefix() + "'");} diff --git a/src/org/exist/xquery/ValueComparison.java b/src/org/exist/xquery/ValueComparison.java index a29daf15e0d..8e5009a6059 100644 --- a/src/org/exist/xquery/ValueComparison.java +++ b/src/org/exist/xquery/ValueComparison.java @@ -69,9 +69,8 @@ protected Sequence genericCompare(Sequence contextSequence, Item contextItem) th if(ls.isEmpty() || rs.isEmpty()) {return Sequence.EMPTY_SEQUENCE;} if (ls.hasOne() && rs.hasOne()) { - AtomicValue lv, rv; - lv = ls.itemAt(0).atomize(); - rv = rs.itemAt(0).atomize(); + final AtomicValue lv = ls.itemAt(0).atomize(); + final AtomicValue rv = rs.itemAt(0).atomize(); final Collator collator = getCollator(contextSequence); return BooleanValue.valueOf(compareAtomic(collator, lv, rv, StringTruncationOperator.NONE, relation)); } diff --git a/src/org/exist/xquery/VariableDeclaration.java b/src/org/exist/xquery/VariableDeclaration.java index badd3a543ee..96826ecf4ad 100644 --- a/src/org/exist/xquery/VariableDeclaration.java +++ b/src/org/exist/xquery/VariableDeclaration.java @@ -83,13 +83,13 @@ public void analyze(AnalyzeContextInfo contextInfo) throws XPathException { if(myModule != null) { // WM: duplicate var declaration is now caught in the XQuery tree parser // if (myModule.isVarDeclared(qn)) -// throw new XPathException(this, "err:XQST0049: It is a static error if more than one " + +// throw new XPathException(this, ErrorCodes.XQST0049, "It is a static error if more than one " + // "variable declared or imported by a module has the same expanded QName. Variable: " + qn); myModule.declareVariable(var); } else { // WM: duplicate var declaration is now caught in the XQuery tree parser // if(context.isVarDeclared(qn)) { -// throw new XPathException(this, "err:XQST0049: It is a static error if more than one " + +// throw new XPathException(this, ErrorCodes.XQST0049, "It is a static error if more than one " + // "variable declared or imported by a module has the same expanded QName. Variable: " + qn); // } context.declareGlobalVariable(var); diff --git a/src/org/exist/xquery/XQueryContext.java b/src/org/exist/xquery/XQueryContext.java index 697ccc6fdd9..3c814aa0439 100644 --- a/src/org/exist/xquery/XQueryContext.java +++ b/src/org/exist/xquery/XQueryContext.java @@ -1,6 +1,6 @@ /* * eXist Open Source Native XML Database - * Copyright (C) 2001-2015 The eXist Project + * Copyright (C) 2001-2018 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or @@ -16,7 +16,6 @@ * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - * */ package org.exist.xquery; @@ -28,16 +27,32 @@ import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.Charset; import java.nio.file.Path; import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.Duration; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.stream.XMLStreamException; +import antlr.RecognitionException; +import antlr.TokenStreamException; +import antlr.collections.AST; +import com.evolvedbinary.j8fu.Either; +import com.evolvedbinary.j8fu.function.TriFunctionE; +import com.evolvedbinary.j8fu.function.QuadFunctionE; +import com.evolvedbinary.j8fu.tuple.Tuple2; import com.ibm.icu.text.Collator; +import net.jcip.annotations.Immutable; +import net.jcip.annotations.ThreadSafe; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.exist.Database; @@ -46,15 +61,9 @@ import org.exist.collections.Collection; import org.exist.debuggee.Debuggee; import org.exist.debuggee.DebuggeeJoint; -import org.exist.dom.persistent.BinaryDocument; -import org.exist.dom.persistent.DefaultDocumentSet; -import org.exist.dom.persistent.DocumentImpl; -import org.exist.dom.persistent.DocumentSet; -import org.exist.dom.persistent.MutableDocumentSet; -import org.exist.dom.persistent.NodeHandle; -import org.exist.dom.persistent.NodeProxy; +import org.exist.dom.persistent.*; import org.exist.dom.QName; -import org.exist.http.servlets.RequestWrapper; +import org.exist.http.servlets.*; import org.exist.interpreter.Context; import org.exist.dom.memtree.InMemoryXMLStreamReader; import org.exist.dom.memtree.MemTreeBuilder; @@ -71,252 +80,307 @@ import org.exist.storage.UpdateListener; import org.exist.storage.lock.Lock.LockMode; import org.exist.storage.lock.LockedDocumentMap; +import org.exist.storage.txn.Txn; import org.exist.util.Collations; import org.exist.util.Configuration; import org.exist.util.LockException; import org.exist.util.hashtable.NamePool; import org.exist.xmldb.XmldbURI; -import org.exist.xquery.functions.request.RequestModule; import org.exist.xquery.parser.*; import org.exist.xquery.pragmas.*; import org.exist.xquery.update.Modification; import org.exist.xquery.value.*; - -import antlr.RecognitionException; -import antlr.TokenStreamException; -import antlr.collections.AST; import org.w3c.dom.Node; -import java.util.function.Predicate; - +import static com.evolvedbinary.j8fu.tuple.Tuple.Tuple; import static java.lang.invoke.MethodType.methodType; +import static javax.xml.XMLConstants.XMLNS_ATTRIBUTE; +import static javax.xml.XMLConstants.XML_NS_PREFIX; +import static org.exist.Namespaces.XML_NS; /** * The current XQuery execution context. Contains the static as well as the dynamic * XQuery context components. * - * @author Wolfgang Meier (wolfgang@exist-db.org) + * @author Wolfgang Meier (wolfgang@exist-db.org) */ -public class XQueryContext implements BinaryValueManager, Context -{ - public static final String ENABLE_QUERY_REWRITING_ATTRIBUTE = "enable-query-rewriting"; - public static final String XQUERY_BACKWARD_COMPATIBLE_ATTRIBUTE = "backwardCompatible"; - public static final String XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_ATTRIBUTE = "raise-error-on-failed-retrieval"; - public static final String ENFORCE_INDEX_USE_ATTRIBUTE = "enforce-index-use"; +public class XQueryContext implements BinaryValueManager, Context { + + private static final Logger LOG = LogManager.getLogger(XQueryContext.class); + + public static final String ENABLE_QUERY_REWRITING_ATTRIBUTE = "enable-query-rewriting"; + public static final String XQUERY_BACKWARD_COMPATIBLE_ATTRIBUTE = "backwardCompatible"; + public static final String XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_ATTRIBUTE = "raise-error-on-failed-retrieval"; + public static final String ENFORCE_INDEX_USE_ATTRIBUTE = "enforce-index-use"; //TODO : move elsewhere ? - public static final String BUILT_IN_MODULE_URI_ATTRIBUTE = "uri"; - public static final String BUILT_IN_MODULE_CLASS_ATTRIBUTE = "class"; - public static final String BUILT_IN_MODULE_SOURCE_ATTRIBUTE = "src"; + public static final String BUILT_IN_MODULE_URI_ATTRIBUTE = "uri"; + public static final String BUILT_IN_MODULE_CLASS_ATTRIBUTE = "class"; + public static final String BUILT_IN_MODULE_SOURCE_ATTRIBUTE = "src"; - public static final String PROPERTY_XQUERY_BACKWARD_COMPATIBLE = "xquery.backwardCompatible"; - public static final String PROPERTY_ENABLE_QUERY_REWRITING = "xquery.enable-query-rewriting"; - public static final String PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL = "xquery.raise-error-on-failed-retrieval"; - public static final boolean XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT = false; - public static final String PROPERTY_ENFORCE_INDEX_USE = "xquery.enforce-index-use"; + public static final String PROPERTY_XQUERY_BACKWARD_COMPATIBLE = "xquery.backwardCompatible"; + public static final String PROPERTY_ENABLE_QUERY_REWRITING = "xquery.enable-query-rewriting"; + public static final String PROPERTY_XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL = "xquery.raise-error-on-failed-retrieval"; + public static final boolean XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT = false; + public static final String PROPERTY_ENFORCE_INDEX_USE = "xquery.enforce-index-use"; //TODO : move elsewhere ? - public static final String PROPERTY_BUILT_IN_MODULES = "xquery.modules"; - public static final String PROPERTY_STATIC_MODULE_MAP = "xquery.modules.static"; - public static final String PROPERTY_MODULE_PARAMETERS = "xquery.modules.parameters"; + public static final String PROPERTY_BUILT_IN_MODULES = "xquery.modules"; + public static final String PROPERTY_STATIC_MODULE_MAP = "xquery.modules.static"; + public static final String PROPERTY_MODULE_PARAMETERS = "xquery.modules.parameters"; - public static final String JAVA_URI_START = "java:"; + public static final String JAVA_URI_START = "java:"; //private static final String XMLDB_URI_START = "xmldb:exist://"; - protected final static Logger LOG = LogManager.getLogger( XQueryContext.class ); + private static final String TEMP_STORE_ERROR = "Error occurred while storing temporary data"; + public static final String XQUERY_CONTEXTVAR_XQUERY_UPDATE_ERROR = "_eXist_xquery_update_error"; + public static final String HTTP_SESSIONVAR_XMLDB_USER = "_eXist_xmldb_user"; - private static final String TEMP_STORE_ERROR = "Error occurred while storing temporary data"; - public static final String XQUERY_CONTEXTVAR_XQUERY_UPDATE_ERROR = "_eXist_xquery_update_error"; - public static final String HTTP_SESSIONVAR_XMLDB_USER = "_eXist_xmldb_user"; - public static final String HTTP_REQ_ATTR_USER = "xquery.user"; - public static final String HTTP_REQ_ATTR_PASS = "xquery.password"; + public static final String HTTP_REQ_ATTR_USER = "xquery.user"; + public static final String HTTP_REQ_ATTR_PASS = "xquery.password"; // Static namespace/prefix mappings - protected HashMap staticNamespaces = new HashMap(); + protected Map staticNamespaces = new HashMap<>(); // Static prefix/namespace mappings - protected HashMap staticPrefixes = new HashMap(); + protected Map staticPrefixes = new HashMap<>(); // Local in-scope namespace/prefix mappings in the current context - protected HashMap inScopeNamespaces = new HashMap(); + Map inScopeNamespaces = new HashMap<>(); // Local prefix/namespace mappings in the current context - protected HashMap inScopePrefixes = new HashMap(); + private Map inScopePrefixes = new HashMap<>(); // Inherited in-scope namespace/prefix mappings in the current context - protected HashMap inheritedInScopeNamespaces = new HashMap(); + private Map inheritedInScopeNamespaces = new HashMap<>(); // Inherited prefix/namespace mappings in the current context - protected HashMap inheritedInScopePrefixes = new HashMap(); + private Map inheritedInScopePrefixes = new HashMap<>(); - protected HashMap mappedModules = new HashMap(); + private Map mappedModules = new HashMap<>(); - private boolean preserveNamespaces = true; + private boolean preserveNamespaces = true; - private boolean inheritNamespaces = true; + private boolean inheritNamespaces = true; // Local namespace stack - protected Stack> namespaceStack = new Stack>(); + private Deque> namespaceStack = new ArrayDeque<>(); // Known user defined functions in the local module - protected TreeMap declaredFunctions = new TreeMap(); + private TreeMap declaredFunctions = new TreeMap<>(); // Globally declared variables - protected Map globalVariables = new TreeMap(); + protected Map globalVariables = new TreeMap<>(); // The last element in the linked list of local in-scope variables - protected LocalVariable lastVar = null; + private LocalVariable lastVar = null; - protected Stack contextStack = new Stack(); + private Deque contextStack = new ArrayDeque<>(); - protected Stack callStack = new Stack(); + private Deque callStack = new ArrayDeque<>(); // The current size of the variable stack - protected int variableStackSize = 0; + private int variableStackSize = 0; // Unresolved references to user defined functions - protected Deque forwardReferences = new ArrayDeque<>(); + private Deque forwardReferences = new ArrayDeque<>(); // Inline functions using closures need to be cleared after execution - protected Deque closures = new ArrayDeque<>(); + private Deque closures = new ArrayDeque<>(); // List of options declared for this query at compile time - i.e. declare option - protected List

+ * {@see https://www.w3.org/TR/xpath-31/#dt-available-docs}. + */ + private Map, XPathException>> dynamicDocuments = null; - /** The actual set of statically known documents. This will be generated on demand from staticDocumentPaths. */ - protected DocumentSet staticDocuments = null; + /** + * The available test resources of the dynamic context. + *

+ * {@see https://www.w3.org/TR/xpath-31/#dt-available-text-resources}. + */ + private Map, QuadFunctionE> dynamicTextResources = null; - /** The set of statically known documents specified as an array of paths to documents and collections. */ - protected XmldbURI[] staticCollections = null; + /** + * The available collections of the dynamic context. + *

+ * {@see https://www.w3.org/TR/xpath-31/#dt-available-collections}. + */ + private Map> dynamicCollections = null; /** * A set of documents which were modified during the query, usually through an XQuery update extension. The documents will be checked after the * query completed to see if a defragmentation run is needed. */ - protected MutableDocumentSet modifiedDocuments = null; + protected MutableDocumentSet modifiedDocuments = null; - /** A general-purpose map to set attributes in the current query context. */ - protected Map attributes = new HashMap(); + /** + * A general-purpose map to set attributes in the current query context. + */ + protected Map attributes = new HashMap<>(); - protected AnyURIValue baseURI = AnyURIValue.EMPTY_URI; + protected AnyURIValue baseURI = AnyURIValue.EMPTY_URI; - protected boolean baseURISetInProlog = false; + private boolean baseURISetInProlog = false; - protected String moduleLoadPath = "."; + protected String moduleLoadPath = "."; - protected String defaultFunctionNamespace = Function.BUILTIN_FUNCTION_NS; - protected AnyURIValue defaultElementNamespace = AnyURIValue.EMPTY_URI; - protected AnyURIValue defaultElementNamespaceSchema = AnyURIValue.EMPTY_URI; + private String defaultFunctionNamespace = Function.BUILTIN_FUNCTION_NS; + private AnyURIValue defaultElementNamespace = AnyURIValue.EMPTY_URI; + private AnyURIValue defaultElementNamespaceSchema = AnyURIValue.EMPTY_URI; - /** The default collation URI. */ - private String defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI; + /** + * The default collation URI. + */ + private String defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI; - /** Default Collator. Will be null for the default unicode codepoint collation. */ - private Collator defaultCollator = null; + /** + * Default Collator. Will be null for the default unicode codepoint collation. + */ + private Collator defaultCollator = null; - /** Set to true to enable XPath 1.0 backwards compatibility. */ - private boolean backwardsCompatible = false; + /** + * Set to true to enable XPath 1.0 backwards compatibility. + */ + private boolean backwardsCompatible = false; - /** Should whitespace inside node constructors be stripped? */ - private boolean stripWhitespace = true; + /** + * Should whitespace inside node constructors be stripped? + */ + private boolean stripWhitespace = true; - /** Should empty order greatest or least? */ - private boolean orderEmptyGreatest = true; + /** + * Should empty order greatest or least? + */ + private boolean orderEmptyGreatest = true; /** * XQuery 3.0 - declare context item := */ - private ContextItemDeclaration contextItemDeclaration = null; + private ContextItemDeclaration contextItemDeclaration = null; - /** The context item set in the query prolog or externally */ - private Sequence contextItem = Sequence.EMPTY_SEQUENCE; + /** + * The context item set in the query prolog or externally + */ + private Sequence contextItem = Sequence.EMPTY_SEQUENCE; /** * The position of the currently processed item in the context sequence. This field has to be set on demand, for example, before calling the * fn:position() function. */ - private int contextPosition = 0; - private Sequence contextSequence = null; + private int contextPosition = 0; + private Sequence contextSequence = null; - /** Shared name pool used by all in-memory documents constructed in this query context. */ - private NamePool sharedNamePool = null; - - /** Stack for temporary document fragments. */ - private Stack fragmentStack = new Stack(); + /** + * Shared name pool used by all in-memory documents constructed in this query context. + */ + private NamePool sharedNamePool = null; - /** The root of the expression tree. */ - private Expression rootExpression; + /** + * Stack for temporary document fragments. + */ + private Deque fragmentStack = new ArrayDeque<>(); - /** An incremental counter to count the expressions in the current XQuery. Used during compilation to assign a unique ID to every expression. */ - private int expressionCounter = 0; + /** + * The root of the expression tree. + */ + private Expression rootExpression; /** - * Should all documents loaded by the query be locked? If set to true, it is the responsibility of the calling client code to unlock documents - * after the query has completed. + * An incremental counter to count the expressions in the current XQuery. Used during compilation to assign a unique ID to every expression. */ + private int expressionCounter = 0; + +// /** +// * Should all documents loaded by the query be locked? If set to true, it is the responsibility of the calling client code to unlock documents +// * after the query has completed. +// */ // private boolean lockDocumentsOnLoad = false; - /** Documents locked during the query. */ +// /** +// * Documents locked during the query. +// */ // private LockedDocumentMap lockedDocuments = null; - private LockedDocumentMap protectedDocuments = null; + private LockedDocumentMap protectedDocuments = null; - /** The profiler instance used by this context. */ - protected Profiler profiler; + /** + * The profiler instance used by this context. + */ + protected Profiler profiler; //For holding XQuery Context variables for general storage in the XQuery Context - HashMap XQueryContextVars = new HashMap(); - + private Map XQueryContextVars = new HashMap<>(); + //For holding the environment variables - Map envs; + private Map envs; - private ContextUpdateListener updateListener = null; + private ContextUpdateListener updateListener = null; - private boolean enableOptimizer = true; + private boolean enableOptimizer = true; - private boolean raiseErrorOnFailedRetrieval = XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT; + private boolean raiseErrorOnFailedRetrieval = XQUERY_RAISE_ERROR_ON_FAILED_RETRIEVAL_DEFAULT; - private boolean isShared = false; + private boolean isShared = false; private Source source = null; - - private DebuggeeJoint debuggeeJoint = null; - private int xqueryVersion = 31; - + private DebuggeeJoint debuggeeJoint = null; + + private int xqueryVersion = 31; + protected Database db; private boolean analyzed = false; - + /** * The Subject of the User that requested the execution of the XQuery * attached by this Context. This is not the same as the Effective User @@ -332,14 +396,72 @@ public class XQueryContext implements BinaryValueManager, Context */ private boolean pushedUserFromHttpSession = false; - public synchronized Optional getRepository() - throws XPathException { + /** + * The HTTP context within which the XQuery + * is executing, or null if there is no + * HTTP context. + */ + @Nullable + private HttpContext httpContext = null; + + public XQueryContext() { + profiler = new Profiler(null); + } + + public XQueryContext(final Database db) { + this(); + this.db = db; + loadDefaults(db.getConfiguration()); + this.profiler = new Profiler(db); + } + + public XQueryContext(final XQueryContext copyFrom) { + this(); + this.db = copyFrom.db; + loadDefaultNS(); + + for (final String prefix : copyFrom.staticNamespaces.keySet()) { + if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) { + continue; + } + + try { + declareNamespace(prefix, copyFrom.staticNamespaces.get(prefix)); + } catch (final XPathException ex) { + ex.printStackTrace(); + } + } + this.profiler = copyFrom.profiler; + } + + + /** + * Get the HTTP context of the XQuery. + * + * @return the HTTP context, or null if the query + * is not being executed within an HTTP context. + */ + public @Nullable + HttpContext getHttpContext() { + return httpContext; + } + + /** + * Set the HTTP context of the XQuery. + * + * @param httpContext the HTTP context within which the XQuery + * is being executed. + */ + public void setHttpContext(final HttpContext httpContext) { + this.httpContext = httpContext; + } + + public Optional getRepository() { return getBroker().getBrokerPool().getExpathRepo(); } - private Module resolveInEXPathRepository(String namespace, String prefix) - throws XPathException - { + private Module resolveInEXPathRepository(final String namespace, final String prefix) + throws XPathException { // the repo and its eXist handler final Optional repo = getRepository(); // try an internal module @@ -354,7 +476,7 @@ private Module resolveInEXPathRepository(String namespace, String prefix) if (repo.isPresent()) { resolved = repo.get().resolveXQueryModule(namespace); // use the resolved file or return null - if ( resolved == null ) { + if (resolved == null) { return null; } } @@ -362,178 +484,140 @@ private Module resolveInEXPathRepository(String namespace, String prefix) final Source src = new FileSource(resolved, false); return compileOrBorrowModule(prefix, namespace, "", src); } - // TODO: end of expath repo manager, may change - - - public XQueryContext( ) - { - profiler = new Profiler( null ); - } - - - public XQueryContext( Database db ) - { - this( ); - this.db = db; - loadDefaults( db.getConfiguration() ); - this.profiler = new Profiler( db ); - } - - - public XQueryContext( XQueryContext copyFrom ) - { - this( ); - this.db = copyFrom.db; - loadDefaultNS(); - final Iterator prefixes = copyFrom.staticNamespaces.keySet().iterator(); - - while( prefixes.hasNext() ) { - final String prefix = prefixes.next(); - - if( "xml".equals(prefix) || "xmlns".equals(prefix) ) { - continue; - } - try { - declareNamespace( prefix, copyFrom.staticNamespaces.get( prefix ) ); - } - catch( final XPathException ex ) { - ex.printStackTrace(); + /** + * Prepares the XQuery Context for use. + *

+ * Should be called before compilation to prepare the query context, + * or before re-execution if the query was cached. + */ + public void prepareForReuse() throws XPathException { + // prepare the variables of the internal modules (which were previously reset) + try (final Stream internalModules = allModules + .values() + .stream() + .filter(module -> module instanceof InternalModule) + .map(module -> (InternalModule) module)) { + for (final InternalModule internalModule : internalModules.collect(Collectors.toList())) { + internalModule.prepare(this); } } - this.profiler = copyFrom.profiler; } - /** - * Returns true if this context has a parent context (means it is a module context). - * - * @return False. - */ - public boolean hasParent() - { - return( false ); + @Override + public boolean hasParent() { + return false; } - - public XQueryContext getRootContext() - { - return( this ); + @Override + public XQueryContext getRootContext() { + return this; } - - public XQueryContext copyContext() - { - final XQueryContext ctx = new XQueryContext( this ); - copyFields( ctx ); - return( ctx ); + @Override + public XQueryContext copyContext() { + final XQueryContext ctx = new XQueryContext(this); + copyFields(ctx); + return ctx; } - - /** - * Update the current dynamic context using the properties of another context. This is needed by {@link org.exist.xquery.functions.util.Eval}. - * - * @param from - */ - public void updateContext( XQueryContext from ) - { - this.watchdog = from.watchdog; - this.lastVar = from.lastVar; - this.variableStackSize = from.getCurrentStackSize(); - this.contextStack = from.contextStack; - this.inScopeNamespaces = from.inScopeNamespaces; - this.inScopePrefixes = from.inScopePrefixes; + @Override + public void updateContext(final XQueryContext from) { + this.watchdog = from.watchdog; + this.lastVar = from.lastVar; + this.variableStackSize = from.getCurrentStackSize(); + this.contextStack = from.contextStack; + this.inScopeNamespaces = from.inScopeNamespaces; + this.inScopePrefixes = from.inScopePrefixes; this.inheritedInScopeNamespaces = from.inheritedInScopeNamespaces; - this.inheritedInScopePrefixes = from.inheritedInScopePrefixes; - this.variableStackSize = from.variableStackSize; - this.attributes = from.attributes; - this.updateListener = from.updateListener; - this.modules = from.modules; - this.allModules = from.allModules; - this.mappedModules = from.mappedModules; - this.dynamicOptions = from.dynamicOptions; - this.staticOptions = from.staticOptions; - this.db = from.db; - } - - - protected void copyFields( XQueryContext ctx ) - { - ctx.calendar = this.calendar; - ctx.implicitTimeZone = this.implicitTimeZone; - ctx.baseURI = this.baseURI; - ctx.baseURISetInProlog = this.baseURISetInProlog; - ctx.staticDocumentPaths = this.staticDocumentPaths; - ctx.staticDocuments = this.staticDocuments; - ctx.moduleLoadPath = this.moduleLoadPath; + this.inheritedInScopePrefixes = from.inheritedInScopePrefixes; + this.variableStackSize = from.variableStackSize; + this.attributes = from.attributes; + this.updateListener = from.updateListener; + this.modules = from.modules; + this.allModules = from.allModules; + this.mappedModules = from.mappedModules; + this.dynamicOptions = from.dynamicOptions; + this.staticOptions = from.staticOptions; + this.db = from.db; + this.httpContext = from.httpContext; + } + + protected void copyFields(final XQueryContext ctx) { + ctx.calendar = this.calendar; + ctx.implicitTimeZone = this.implicitTimeZone; + ctx.baseURI = this.baseURI; + ctx.baseURISetInProlog = this.baseURISetInProlog; + ctx.staticDocumentPaths = this.staticDocumentPaths; + ctx.staticDocuments = this.staticDocuments; + ctx.dynamicDocuments = this.dynamicDocuments; + ctx.dynamicTextResources = this.dynamicTextResources; + ctx.dynamicCollections = this.dynamicCollections; + ctx.moduleLoadPath = this.moduleLoadPath; ctx.defaultFunctionNamespace = this.defaultFunctionNamespace; - ctx.defaultElementNamespace = this.defaultElementNamespace; - ctx.defaultCollation = this.defaultCollation; - ctx.defaultCollator = this.defaultCollator; - ctx.backwardsCompatible = this.backwardsCompatible; - ctx.enableOptimizer = this.enableOptimizer; - ctx.stripWhitespace = this.stripWhitespace; - ctx.preserveNamespaces = this.preserveNamespaces; - ctx.inheritNamespaces = this.inheritNamespaces; - ctx.orderEmptyGreatest = this.orderEmptyGreatest; - - ctx.declaredFunctions = new TreeMap( this.declaredFunctions ); - ctx.globalVariables = new TreeMap( this.globalVariables ); - ctx.attributes = new HashMap( this.attributes ); + ctx.defaultElementNamespace = this.defaultElementNamespace; + ctx.defaultCollation = this.defaultCollation; + ctx.defaultCollator = this.defaultCollator; + ctx.backwardsCompatible = this.backwardsCompatible; + ctx.enableOptimizer = this.enableOptimizer; + ctx.stripWhitespace = this.stripWhitespace; + ctx.preserveNamespaces = this.preserveNamespaces; + ctx.inheritNamespaces = this.inheritNamespaces; + ctx.orderEmptyGreatest = this.orderEmptyGreatest; + + ctx.declaredFunctions = new TreeMap<>(this.declaredFunctions); + ctx.globalVariables = new TreeMap<>(this.globalVariables); + ctx.attributes = new HashMap<>(this.attributes); // make imported modules available in the new context - ctx.modules = new HashMap(); + ctx.modules = new HashMap<>(); - for( final Module module : this.modules.values() ) { + for (final Module module : this.modules.values()) { try { - ctx.modules.put( module.getNamespaceURI(), module ); - final String prefix = this.staticPrefixes.get( module.getNamespaceURI() ); - ctx.declareNamespace( prefix, module.getNamespaceURI() ); - } - catch( final XPathException e ) { + ctx.modules.put(module.getNamespaceURI(), module); + final String prefix = this.staticPrefixes.get(module.getNamespaceURI()); + ctx.declareNamespace(prefix, module.getNamespaceURI()); + } catch (final XPathException e) { // ignore } } - ctx.allModules = new HashMap(); + ctx.allModules = new HashMap<>(); - for( final Module module : this.allModules.values() ) { + for (final Module module : this.allModules.values()) { - if( module != null ) { //UNDERSTAND: why is it possible? -shabanovd - ctx.allModules.put( module.getNamespaceURI(), module ); + if (module != null) { //UNDERSTAND: why is it possible? -shabanovd + ctx.allModules.put(module.getNamespaceURI(), module); } } - ctx.watchdog = this.watchdog; - ctx.profiler = getProfiler(); - ctx.lastVar = this.lastVar; + ctx.watchdog = this.watchdog; + ctx.profiler = getProfiler(); + ctx.lastVar = this.lastVar; ctx.variableStackSize = getCurrentStackSize(); - ctx.contextStack = this.contextStack; - ctx.mappedModules = new HashMap( this.mappedModules ); - ctx.staticNamespaces = new HashMap( this.staticNamespaces ); - ctx.staticPrefixes = new HashMap( this.staticPrefixes ); - - if (this.dynamicOptions != null){ - ctx.dynamicOptions = new ArrayList

eXist internally keeps a table containing all prefix/namespace mappings it found in documents, which have been previously stored into the - * database. These default mappings need not to be declared explicitely.

- * - * @param prefix - * @param uri - * - * @throws XPathException - */ - public void declareNamespace( String prefix, String uri ) throws XPathException - { - if( prefix == null ) { + @Override + public void declareNamespace(String prefix, String uri) throws XPathException { + if (prefix == null) { prefix = ""; } - if( uri == null ) { + if (uri == null) { uri = ""; } - if( "xml".equals(prefix) || "xmlns".equals(prefix) ) { - throw( new XPathException( ErrorCodes.XQST0070, "Namespace predefined prefix '" + prefix + "' can not be bound" ) ); + if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) { + throw new XPathException(ErrorCodes.XQST0070, "Namespace predefined prefix '" + prefix + "' can not be bound"); } - if( uri.equals( Namespaces.XML_NS ) ) { - throw( new XPathException( ErrorCodes.XQST0070, "Namespace URI '" + uri + "' must be bound to the 'xml' prefix" ) ); + if (uri.equals(XML_NS)) { + throw new XPathException(ErrorCodes.XQST0070, "Namespace URI '" + uri + "' must be bound to the 'xml' prefix"); } - - final String prevURI = staticNamespaces.get( prefix ); + + final String prevURI = staticNamespaces.get(prefix); //This prefix was not bound - if( prevURI == null ) { - - if( uri.length() > 0 ) { - //Bind it - staticNamespaces.put( prefix, uri ); - staticPrefixes.put( uri, prefix ); - return; - - } else { + if (prevURI == null) { + + if (uri.isEmpty()) { //Nothing to bind //TODO : check the specs : unbinding an NS which is not already bound may be disallowed. - LOG.warn( "Unbinding unbound prefix '" + prefix + "'" ); + LOG.warn("Unbinding unbound prefix '" + prefix + "'"); + } else { + //Bind it + staticNamespaces.put(prefix, uri); + staticPrefixes.put(uri, prefix); } - + } else { //This prefix was bound //Unbind it - if( uri.length() == 0 ) { - + if (uri.isEmpty()) { // if an empty namespace is specified, // remove any existing mapping for this namespace //TODO : improve, since XML_NS can't be unbound - staticPrefixes.remove( uri ); - staticNamespaces.remove( prefix ); + staticPrefixes.remove(uri); + staticNamespaces.remove(prefix); return; } //those prefixes can be rebound to different URIs - if( ( "xs".equals(prefix) && Namespaces.SCHEMA_NS.equals( prevURI ) ) - || ( "xsi".equals(prefix) && Namespaces.SCHEMA_INSTANCE_NS.equals( prevURI ) ) - || ( "xdt".equals(prefix) && Namespaces.XPATH_DATATYPES_NS.equals( prevURI ) ) - || ( "fn".equals(prefix) && Namespaces.XPATH_FUNCTIONS_NS.equals( prevURI ) ) - || ( "math".equals(prefix)) && Namespaces.XPATH_FUNCTIONS_MATH_NS.equals( prevURI ) - || ( "local".equals(prefix) && Namespaces.XQUERY_LOCAL_NS.equals( prevURI ) ) ) { - - staticPrefixes.remove( prevURI ); - staticNamespaces.remove( prefix ); - - if( uri.length() > 0 ) { - staticNamespaces.put( prefix, uri ); - staticPrefixes.put( uri, prefix ); - return; - - } else { - //Nothing to bind (not sure if it should raise an error though) + if (("xs".equals(prefix) && Namespaces.SCHEMA_NS.equals(prevURI)) + || ("xsi".equals(prefix) && Namespaces.SCHEMA_INSTANCE_NS.equals(prevURI)) + || ("xdt".equals(prefix) && Namespaces.XPATH_DATATYPES_NS.equals(prevURI)) + || ("fn".equals(prefix) && Namespaces.XPATH_FUNCTIONS_NS.equals(prevURI)) + || ("math".equals(prefix)) && Namespaces.XPATH_FUNCTIONS_MATH_NS.equals(prevURI) + || ("local".equals(prefix) && Namespaces.XQUERY_LOCAL_NS.equals(prevURI))) { + + staticPrefixes.remove(prevURI); + staticNamespaces.remove(prefix); + + staticNamespaces.put(prefix, uri); + staticPrefixes.put(uri, prefix); - //TODO : check the specs : unbinding an NS which is not already bound may be disallowed. - LOG.warn( "Unbinding unbound prefix '" + prefix + "'" ); - } - } else { //Forbids rebinding the *same* prefix in a *different* namespace in this *same* context - if( !uri.equals( prevURI ) ) { + if (!uri.equals(prevURI)) { throw new XPathException(ErrorCodes.XQST0033, "Cannot bind prefix '" + prefix + "' to '" + uri + "' it is already bound to '" + prevURI + "'"); } } } } + @Override + public void declareNamespaces(final Map namespaceMap) { + for (final Map.Entry entry : namespaceMap.entrySet()) { + String prefix = entry.getKey(); + String uri = entry.getValue(); - public void declareNamespaces( Map namespaceMap ) - { - String prefix; - String uri; - - for( final Map.Entry entry : namespaceMap.entrySet() ) { - prefix = entry.getKey(); - uri = entry.getValue(); - - if( prefix == null ) { + if (prefix == null) { prefix = ""; } - if( uri == null ) { + if (uri == null) { uri = ""; } - staticNamespaces.put( prefix, uri ); - staticPrefixes.put( uri, prefix ); + staticNamespaces.put(prefix, uri); + staticPrefixes.put(uri, prefix); } } + @Override + public void removeNamespace(final String uri) { + staticPrefixes.remove(uri); - /** - * Removes the namespace URI from the prefix/namespace mappings table. - * - * @param uri - */ - public void removeNamespace( String uri ) - { - staticPrefixes.remove( uri ); - - for( final Iterator i = staticNamespaces.values().iterator(); i.hasNext(); ) { - - if( i.next().equals( uri ) ) { + for (final Iterator i = staticNamespaces.values().iterator(); i.hasNext(); ) { + if (i.next().equals(uri)) { i.remove(); return; } } - inScopePrefixes.remove( uri ); - - if( inScopeNamespaces != null ) { + inScopePrefixes.remove(uri); - for( final Iterator i = inScopeNamespaces.values().iterator(); i.hasNext(); ) { - - if( i.next().equals( uri ) ) { + if (inScopeNamespaces != null) { + for (final Iterator i = inScopeNamespaces.values().iterator(); i.hasNext(); ) { + if (i.next().equals(uri)) { i.remove(); return; } } } + inheritedInScopePrefixes.remove(uri); - //TODO : is this relevant ? - inheritedInScopePrefixes.remove( uri ); - - if( inheritedInScopeNamespaces != null ) { - - for( final Iterator i = inheritedInScopeNamespaces.values().iterator(); i.hasNext(); ) { - - if( i.next().equals( uri ) ) { + if (inheritedInScopeNamespaces != null) { + for (final Iterator i = inheritedInScopeNamespaces.values().iterator(); i.hasNext(); ) { + if (i.next().equals(uri)) { i.remove(); return; } @@ -794,84 +806,64 @@ public void removeNamespace( String uri ) } } - - /** - * Declare an in-scope namespace. This is called during query execution. - * - * @param prefix - * @param uri - */ - public void declareInScopeNamespace( String prefix, String uri ) - { - if( ( prefix == null ) || ( uri == null ) ) { - throw( new IllegalArgumentException( "null argument passed to declareNamespace" ) ); + @Override + public void declareInScopeNamespace(final String prefix, final String uri) { + if (prefix == null || uri == null) { + throw new IllegalArgumentException("null argument passed to declareNamespace"); } //Activate the namespace by removing it from the inherited namespaces - if( inheritedInScopePrefixes.get( getURIForPrefix( prefix ) ) != null ) { - inheritedInScopePrefixes.remove( uri ); - } - - if( inheritedInScopeNamespaces.get( prefix ) != null ) { - inheritedInScopeNamespaces.remove( prefix ); + if (inheritedInScopePrefixes.containsKey(getURIForPrefix(prefix))) { + inheritedInScopePrefixes.remove(uri); } - inScopePrefixes.put( uri, prefix ); - inScopeNamespaces.put( prefix, uri ); - } + inheritedInScopeNamespaces.remove(prefix); - public String getInScopeNamespace( String prefix ) - { - return( ( inScopeNamespaces == null ) ? null : inScopeNamespaces.get( prefix ) ); + inScopePrefixes.put(uri, prefix); + inScopeNamespaces.put(prefix, uri); } - - public String getInScopePrefix( String uri ) - { - return( ( inScopePrefixes == null ) ? null : inScopePrefixes.get( uri ) ); + @Override + public String getInScopeNamespace(final String prefix) { + return inScopeNamespaces == null ? null : inScopeNamespaces.get(prefix); } - public Map getInScopePrefixes( ) - { - return( ( inScopePrefixes == null ) ? null : inScopePrefixes ); + @Override + public String getInScopePrefix(final String uri) { + return inScopePrefixes == null ? null : inScopePrefixes.get(uri); } - public String getInheritedNamespace( String prefix ) - { - return( ( inheritedInScopeNamespaces == null ) ? null : inheritedInScopeNamespaces.get( prefix ) ); + public Map getInScopePrefixes() { + return inScopePrefixes; } - - public String getInheritedPrefix( String uri ) - { - return( ( inheritedInScopePrefixes == null ) ? null : inheritedInScopePrefixes.get( uri ) ); + @Override + public String getInheritedNamespace(final String prefix) { + return inheritedInScopeNamespaces == null ? null : inheritedInScopeNamespaces.get(prefix); } + @Override + public String getInheritedPrefix(final String uri) { + return inheritedInScopePrefixes == null ? null : inheritedInScopePrefixes.get(uri); + } - /** - * Return the namespace URI mapped to the registered prefix or null if the prefix is not registered. - * - * @param prefix - * - * @return namespace - */ - public String getURIForPrefix( String prefix ) - { + @Override + public String getURIForPrefix(final String prefix) { // try in-scope namespace declarations - String uri = ( inScopeNamespaces == null ) ? null : inScopeNamespaces.get( prefix ); + String uri = (inScopeNamespaces == null) ? null : inScopeNamespaces.get(prefix); - if( uri != null ) { - return( uri ); + if (uri != null) { + return uri; } - if( inheritNamespaces ) { - uri = ( inheritedInScopeNamespaces == null ) ? null : inheritedInScopeNamespaces.get( prefix ); + if (inheritNamespaces) { + uri = (inheritedInScopeNamespaces == null) ? null : inheritedInScopeNamespaces.get(prefix); - if( uri != null ) { - return( uri ); + if (uri != null) { + return uri; } } - return( staticNamespaces.get( prefix ) ); + return staticNamespaces.get(prefix); /* old code checked namespaces first String ns = (String) namespaces.get(prefix); if (ns == null) @@ -884,379 +876,335 @@ public String getURIForPrefix( String prefix ) */ } + @Override + public String getPrefixForURI(final String uri) { + String prefix = (inScopePrefixes == null) ? null : inScopePrefixes.get(uri); - /** - * Get URI Prefix - * - * @param uri - * - * @return the prefix mapped to the registered URI or null if the URI is not registered. - */ - public String getPrefixForURI( String uri ) - { - String prefix = ( inScopePrefixes == null ) ? null : inScopePrefixes.get( uri ); - - if( prefix != null ) { - return( prefix ); + if (prefix != null) { + return prefix; } - if( inheritNamespaces ) { - prefix = ( inheritedInScopePrefixes == null ) ? null : inheritedInScopePrefixes.get( uri ); + if (inheritNamespaces) { + prefix = (inheritedInScopePrefixes == null) ? null : inheritedInScopePrefixes.get(uri); - if( prefix != null ) { - return( prefix ); + if (prefix != null) { + return prefix; } } - return( staticPrefixes.get( uri ) ); + return staticPrefixes.get(uri); } - - /** - * Clear all user-defined prefix/namespace mappings. - * - * @return - */ - // TODO: remove since never used? -// public void clearNamespaces() { -// staticNamespaces.clear(); -// staticPrefixes.clear(); -// if (inScopeNamespaces != null) { -// inScopeNamespaces.clear(); -// inScopePrefixes.clear(); -// } -// //TODO : it this relevant ? -// if (inheritedInScopeNamespaces != null) { -// inheritedInScopeNamespaces.clear(); -// inheritedInScopePrefixes.clear(); -// } -// loadDefaults(broker.getConfiguration()); -// } - - /** - * Returns the current default function namespace. - * - * @return current default function namespace - */ - public String getDefaultFunctionNamespace() - { - return( defaultFunctionNamespace ); + @Override + public String getDefaultFunctionNamespace() { + return defaultFunctionNamespace; } - - /** - * Set the default function namespace. By default, this points to the namespace for XPath built-in functions. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultFunctionNamespace( String uri ) throws XPathException - { - //Not sure for the 2nd clause : eXist forces the function NS as default. - if( ( defaultFunctionNamespace != null ) && !defaultFunctionNamespace.equals( Function.BUILTIN_FUNCTION_NS ) && !defaultFunctionNamespace.equals( uri ) ) { - throw( new XPathException( "err:XQST0066: default function namespace is already set to: '" + defaultFunctionNamespace + "'" ) ); + @Override + public void setDefaultFunctionNamespace(final String uri) throws XPathException { + //Not sure for the 2nd clause : eXist-db forces the function NS as default. + if ((defaultFunctionNamespace != null) && !defaultFunctionNamespace.equals(Function.BUILTIN_FUNCTION_NS) && !defaultFunctionNamespace.equals(uri)) { + throw new XPathException(ErrorCodes.XQST0066, "Default function namespace is already set to: '" + defaultFunctionNamespace + "'"); } defaultFunctionNamespace = uri; } - - /** - * Returns the current default element namespace. - * - * @return current default element namespace schema - * - * @throws XPathException - */ - public String getDefaultElementNamespaceSchema() throws XPathException - { - return( defaultElementNamespaceSchema.getStringValue() ); + @Override + public String getDefaultElementNamespaceSchema() throws XPathException { + return defaultElementNamespaceSchema.getStringValue(); } - - /** - * Set the default element namespace. By default, this points to the empty uri. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultElementNamespaceSchema( String uri ) throws XPathException - { + @Override + public void setDefaultElementNamespaceSchema(final String uri) throws XPathException { // eXist forces the empty element NS as default. - if( !defaultElementNamespaceSchema.equals( AnyURIValue.EMPTY_URI ) ) { - throw( new XPathException( "err:XQST0066: default function namespace schema is already set to: '" + defaultElementNamespaceSchema.getStringValue() + "'" ) ); + if (!defaultElementNamespaceSchema.equals(AnyURIValue.EMPTY_URI)) { + throw new XPathException(ErrorCodes.XQST0066, "Default function namespace schema is already set to: '" + defaultElementNamespaceSchema.getStringValue() + "'"); } - defaultElementNamespaceSchema = new AnyURIValue( uri ); + defaultElementNamespaceSchema = new AnyURIValue(uri); } + @Override + public String getDefaultElementNamespace() throws XPathException { + return defaultElementNamespace.getStringValue(); + } - /** - * Returns the current default element namespace. - * - * @return current default element namespace - * - * @throws XPathException - */ - public String getDefaultElementNamespace() throws XPathException - { - return( defaultElementNamespace.getStringValue() ); - } - - - /** - * Set the default element namespace. By default, this points to the empty uri. - * - * @param uri a String value - * @param schema a String value - * - * @exception XPathException if an error occurs - */ - public void setDefaultElementNamespace( String uri, String schema ) throws XPathException - { + @Override + public void setDefaultElementNamespace(final String uri, @Nullable final String schema) throws XPathException { // eXist forces the empty element NS as default. - if( !defaultElementNamespace.equals( AnyURIValue.EMPTY_URI ) ) { - throw( new XPathException( "err:XQST0066: default element namespace is already set to: '" + defaultElementNamespace.getStringValue() + "'" ) ); + if (!defaultElementNamespace.equals(AnyURIValue.EMPTY_URI)) { + throw new XPathException(ErrorCodes.XQST0066, + "Default element namespace is already set to: '" + defaultElementNamespace.getStringValue() + "'"); } - defaultElementNamespace = new AnyURIValue( uri ); + defaultElementNamespace = new AnyURIValue(uri); - if( schema != null ) { - defaultElementNamespaceSchema = new AnyURIValue( schema ); + if (schema != null) { + defaultElementNamespaceSchema = new AnyURIValue(schema); } } - - /** - * Set the default collation to be used by all operators and functions on strings. Throws an exception if the collation is unknown or cannot be - * instantiated. - * - * @param uri - * - * @throws XPathException - */ - public void setDefaultCollation( String uri ) throws XPathException - { - if( uri.equals( Collations.UNICODE_CODEPOINT_COLLATION_URI) || uri.equals( Collations.CODEPOINT_SHORT ) ) { + @Override + public void setDefaultCollation(final String uri) throws XPathException { + if (uri.equals(Collations.UNICODE_CODEPOINT_COLLATION_URI) || uri.equals(Collations.CODEPOINT_SHORT)) { defaultCollation = Collations.UNICODE_CODEPOINT_COLLATION_URI; - defaultCollator = null; + defaultCollator = null; } - URI uriTest; - + final URI uriTest; try { - uriTest = new URI( uri ); - } - catch( final URISyntaxException e ) { - throw( new XPathException( "err:XQST0038: Unknown collation : '" + uri + "'" ) ); + uriTest = new URI(uri); + } catch (final URISyntaxException e) { + throw new XPathException(ErrorCodes.XQST0038, "Unknown collation : '" + uri + "'"); } - if( uri.startsWith( Collations.EXIST_COLLATION_URI ) || uri.startsWith( "?" ) || uriTest.isAbsolute() ) { - defaultCollator = Collations.getCollationFromURI(uri); + if (uri.startsWith(Collations.EXIST_COLLATION_URI) || uri.charAt(0) == '?' || uriTest.isAbsolute()) { + defaultCollator = Collations.getCollationFromURI(uri); defaultCollation = uri; } else { String absUri = getBaseURI().getStringValue() + uri; - defaultCollator = Collations.getCollationFromURI(absUri); + defaultCollator = Collations.getCollationFromURI(absUri); defaultCollation = absUri; } } - - public String getDefaultCollation() - { - return( defaultCollation ); + @Override + public String getDefaultCollation() { + return defaultCollation; } - - public Collator getCollator( String uri ) throws XPathException - { - if( uri == null ) { - return( defaultCollator ); + @Override + public Collator getCollator(final String uri) throws XPathException { + if (uri == null) { + return defaultCollator; } - return( Collations.getCollationFromURI( uri ) ); + return Collations.getCollationFromURI(uri); } - - public Collator getDefaultCollator() - { - return( defaultCollator ); + @Override + public Collator getDefaultCollator() { + return defaultCollator; } - - /** - * Set the set of statically known documents for the current execution context. These documents will be processed if no explicit document set has - * been set for the current expression with fn:doc() or fn:collection(). - * - * @param docs - */ - public void setStaticallyKnownDocuments( XmldbURI[] docs ) - { + @Override + public void setStaticallyKnownDocuments(final XmldbURI[] docs) { staticDocumentPaths = docs; } - - public void setStaticallyKnownDocuments( DocumentSet set ) - { + @Override + public void setStaticallyKnownDocuments(final DocumentSet set) { staticDocuments = set; } + public void addDynamicallyAvailableDocument(final String uri, + final TriFunctionE, XPathException> supplier) { + if (dynamicDocuments == null) { + dynamicDocuments = new HashMap<>(); + } + dynamicDocuments.put(uri, supplier); + } - //TODO : not sure how these 2 options might/have to be related - public void setCalendar( XMLGregorianCalendar newCalendar ) - { - this.calendar = (XMLGregorianCalendar)newCalendar.clone(); + public void addDynamicallyAvailableTextResource(final String uri, final Charset encoding, + final QuadFunctionE supplier) { + if (dynamicTextResources == null) { + dynamicTextResources = new HashMap<>(); + } + dynamicTextResources.put(Tuple(uri, encoding), supplier); } + public void addDynamicallyAvailableCollection(final String uri, + final TriFunctionE supplier) { + if (dynamicCollections == null) { + dynamicCollections = new HashMap<>(); + } + dynamicCollections.put(uri, supplier); + } - public void setTimeZone( TimeZone newTimeZone ) - { - this.implicitTimeZone = newTimeZone; + @Override + public void setCalendar(final XMLGregorianCalendar newCalendar) { + this.calendar = (XMLGregorianCalendar) newCalendar.clone(); } + @Override + public void setTimeZone(final TimeZone newTimeZone) { + this.implicitTimeZone = newTimeZone; + } - public XMLGregorianCalendar getCalendar() - { + @Override + public XMLGregorianCalendar getCalendar() { //TODO : we might prefer to return null - if( calendar == null ) { - + if (calendar == null) { try { - //Initialize to current dateTime - calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar( new GregorianCalendar() ); - } - catch( final DatatypeConfigurationException e ) { - LOG.error( e.getMessage(), e ); + calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar(new GregorianCalendar()); + } catch (final DatatypeConfigurationException e) { + LOG.error(e.getMessage(), e); } } //That's how we ensure stability of that static context function - return( calendar ); + return calendar; } - - public TimeZone getImplicitTimeZone() - { - if( implicitTimeZone == null ) { + @Override + public TimeZone getImplicitTimeZone() { + if (implicitTimeZone == null) { implicitTimeZone = TimeZone.getDefault(); - if( implicitTimeZone.inDaylightTime( new Date() ) ) { - implicitTimeZone.setRawOffset( implicitTimeZone.getRawOffset() + implicitTimeZone.getDSTSavings() ); + if (implicitTimeZone.inDaylightTime(new Date())) { + implicitTimeZone.setRawOffset(implicitTimeZone.getRawOffset() + implicitTimeZone.getDSTSavings()); } } //That's how we ensure stability of that static context function - return( this.implicitTimeZone ); + return this.implicitTimeZone; } - - /** - * Get statically known documents - * - * @return set of statically known documents. - * - * @throws XPathException - */ - public DocumentSet getStaticallyKnownDocuments() throws XPathException - { - if( staticDocuments != null ) { + @Override + public DocumentSet getStaticallyKnownDocuments() throws XPathException { + if (staticDocuments != null) { // the document set has already been built, return it - return( staticDocuments ); + return staticDocuments; } - if( protectedDocuments != null ) { + if (protectedDocuments != null) { staticDocuments = protectedDocuments.toDocumentSet(); - return( staticDocuments ); + return staticDocuments; } - MutableDocumentSet ndocs = new DefaultDocumentSet( 1031 ); + final MutableDocumentSet ndocs = new DefaultDocumentSet(40); - if( staticDocumentPaths == null ) { + if (staticDocumentPaths == null) { // no path defined: return all documents in the db try { - getBroker().getAllXMLResources( ndocs ); - } catch(final PermissionDeniedException pde) { - LOG.warn("Permission denied to read resource all resources" + pde.getMessage(), pde); - throw new XPathException("Permission denied to read resource all resources" + pde.getMessage(), pde); + getBroker().getAllXMLResources(ndocs); + } catch (final PermissionDeniedException | LockException e) { + LOG.warn(e); + throw new XPathException("Permission denied to read resource all resources: " + e.getMessage(), e); } } else { - DocumentImpl doc; - Collection collection; - - for( int i = 0; i < staticDocumentPaths.length; i++ ) { + for (final XmldbURI staticDocumentPath : staticDocumentPaths) { try { - collection = getBroker().getCollection( staticDocumentPaths[i] ); + final Collection collection = getBroker().getCollection(staticDocumentPath); - if( collection != null ) { - collection.allDocs( getBroker(), ndocs, true); + if (collection != null) { + collection.allDocs(getBroker(), ndocs, true); } else { - doc = getBroker().getXMLResource( staticDocumentPaths[i], LockMode.READ_LOCK ); + try (final LockedDocument lockedDocument = getBroker().getXMLResource(staticDocumentPath, LockMode.READ_LOCK)) { - if( doc != null ) { + final DocumentImpl doc = lockedDocument == null ? null : lockedDocument.getDocument(); + if (doc != null) { - if( doc.getPermissions().validate( - getBroker().getCurrentSubject(), Permission.READ ) ) { - - ndocs.add( doc ); + if (doc.getPermissions().validate( + getBroker().getCurrentSubject(), Permission.READ)) { + + ndocs.add(doc); + } } - doc.getUpdateLock().release( LockMode.READ_LOCK ); } } - } - catch( final PermissionDeniedException e ) { - LOG.warn( "Permission denied to read resource " + staticDocumentPaths[i] + ". Skipping it." ); + } catch (final PermissionDeniedException | LockException e) { + LOG.warn("Permission denied to read resource " + staticDocumentPath + ". Skipping it."); } } } staticDocuments = ndocs; - return( staticDocuments ); + return staticDocuments; } public DocumentSet getStaticDocs() { return staticDocuments; } - public ExtendedXMLStreamReader getXMLStreamReader( NodeValue nv ) throws XMLStreamException, IOException - { - ExtendedXMLStreamReader reader; + /** + * Get's a document from the "Available documents" of the + * dynamic context. + */ + public @Nullable + Sequence getDynamicallyAvailableDocument(final String uri) throws XPathException { + if (dynamicDocuments == null) { + return null; + } - if( nv.getImplementationType() == NodeValue.IN_MEMORY_NODE ) { - final NodeImpl node = (NodeImpl)nv; - final org.exist.dom.memtree.DocumentImpl ownerDoc = node.getNodeType() == Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl)node : node.getOwnerDocument(); - reader = new InMemoryXMLStreamReader( ownerDoc, ownerDoc ); - } else { - final NodeProxy proxy = (NodeProxy)nv; - reader = getBroker().newXMLStreamReader( new NodeProxy( proxy.getOwnerDocument(), NodeId.DOCUMENT_NODE, proxy.getOwnerDocument().getFirstChildAddress() ), false ); + final TriFunctionE, XPathException> docSupplier + = dynamicDocuments.get(uri); + if (docSupplier == null) { + return null; } - return( reader ); + + return docSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri).fold(md -> md, pd -> (Sequence) pd); } + /** + * Get's a text resource from the "Available text resources" of the + * dynamic context. + */ + public @Nullable + Reader getDynamicallyAvailableTextResource(final String uri, final Charset charset) + throws XPathException { + if (dynamicTextResources == null) { + return null; + } - public void setProtectedDocs( LockedDocumentMap map ) - { - this.protectedDocuments = map; + final QuadFunctionE textResourceSupplier + = dynamicTextResources.get(Tuple(uri, charset)); + if (textResourceSupplier == null) { + return null; + } + + return textResourceSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri, charset); } + /** + * Get's a collection from the "Available collections" of the + * dynamic context. + */ + public @Nullable + Sequence getDynamicallyAvailableCollection(final String uri) throws XPathException { + if (dynamicCollections == null) { + return null; + } - public LockedDocumentMap getProtectedDocs() - { - return( this.protectedDocuments ); + final TriFunctionE collectionSupplier + = dynamicCollections.get(uri); + if (collectionSupplier == null) { + return null; + } + + return collectionSupplier.apply(getBroker(), getBroker().getCurrentTransaction(), uri); } + @Override + public ExtendedXMLStreamReader getXMLStreamReader(final NodeValue nv) throws XMLStreamException, IOException { + final ExtendedXMLStreamReader reader; + if (nv.getImplementationType() == NodeValue.IN_MEMORY_NODE) { + final NodeImpl node = (NodeImpl) nv; + final org.exist.dom.memtree.DocumentImpl ownerDoc = node.getNodeType() == Node.DOCUMENT_NODE ? (org.exist.dom.memtree.DocumentImpl) node : node.getOwnerDocument(); + reader = new InMemoryXMLStreamReader(ownerDoc, ownerDoc); + } else { + final NodeProxy proxy = (NodeProxy) nv; + reader = getBroker().newXMLStreamReader(new NodeProxy(proxy.getOwnerDocument(), NodeId.DOCUMENT_NODE, proxy.getOwnerDocument().getFirstChildAddress()), false); + } + return reader; + } - public boolean inProtectedMode() - { - return( protectedDocuments != null ); + @Override + public void setProtectedDocs(final LockedDocumentMap map) { + this.protectedDocuments = map; } + @Override + public LockedDocumentMap getProtectedDocs() { + return this.protectedDocuments; + } - /** - * Should loaded documents be locked? - * - *

see #setLockDocumentsOnLoad(boolean)

- */ - public boolean lockDocumentsOnLoad() - { - return( false ); + @Override + public boolean inProtectedMode() { + return protectedDocuments != null; + } + + @Override + public boolean lockDocumentsOnLoad() { + return false; } @@ -1279,8 +1227,8 @@ public boolean lockDocumentsOnLoad() // } - public void addLockedDocument( DocumentImpl doc ) - { + @Override + public void addLockedDocument(final DocumentImpl doc) { // if (lockedDocuments != null) // lockedDocuments.add(doc); } @@ -1330,43 +1278,34 @@ public void addLockedDocument( DocumentImpl doc ) // return remaining; // } - public void setShared( boolean shared ) - { + @Override + public void setShared(final boolean shared) { isShared = shared; } - - public boolean isShared() - { - return( isShared ); + @Override + public boolean isShared() { + return isShared; } - - public void addModifiedDoc( DocumentImpl document ) - { - if( modifiedDocuments == null ) { + @Override + public void addModifiedDoc(final DocumentImpl document) { + if (modifiedDocuments == null) { modifiedDocuments = new DefaultDocumentSet(); } - modifiedDocuments.add( document ); + modifiedDocuments.add(document); } - - public void reset() - { - reset( false ); + @Override + public void reset() { + reset(false); } - - /** - * Prepare this XQueryContext to be reused. This should be called when adding an XQuery to the cache. - * - * @param keepGlobals - */ @Override public void reset(final boolean keepGlobals) { setRealUser(null); - if(this.pushedUserFromHttpSession) { + if (this.pushedUserFromHttpSession) { try { getBroker().popSubject(); } finally { @@ -1374,32 +1313,33 @@ public void reset(final boolean keepGlobals) { } } - if( modifiedDocuments != null ) { - + if (modifiedDocuments != null) { try { - Modification.checkFragmentation( this, modifiedDocuments ); - } - catch( final EXistException e ) { - LOG.warn( "Error while checking modified documents: " + e.getMessage(), e ); + Modification.checkFragmentation(this, modifiedDocuments); + } catch (final LockException | EXistException e) { + LOG.warn("Error while checking modified documents: " + e.getMessage(), e); } modifiedDocuments = null; } - calendar = null; + + calendar = null; implicitTimeZone = null; - + resetDocumentBuilder(); contextSequence = null; contextItem = Sequence.EMPTY_SEQUENCE; - if( !keepGlobals ) { - + if (!keepGlobals) { // do not reset the statically known documents staticDocumentPaths = null; - staticDocuments = null; + staticDocuments = null; + dynamicDocuments = null; + dynamicTextResources = null; + dynamicCollections = null; } - if( !isShared ) { + if (!isShared) { lastVar = null; } @@ -1407,299 +1347,231 @@ public void reset(final boolean keepGlobals) { closures.forEach(func -> func.setClosureVariables(null)); closures.clear(); - fragmentStack = new Stack(); + fragmentStack = new ArrayDeque<>(); callStack.clear(); protectedDocuments = null; - if( !keepGlobals ) { + if (!keepGlobals) { globalVariables.clear(); } - if( dynamicOptions != null ) { + if (dynamicOptions != null) { dynamicOptions.clear(); //clear any dynamic options } - if( !isShared ) { + if (!isShared) { watchdog.reset(); } - for( final Module module : allModules.values() ) { - module.reset( this, keepGlobals ); + for (final Module module : allModules.values()) { + module.reset(this, keepGlobals); } - if( !keepGlobals ) { + if (!keepGlobals) { mappedModules.clear(); } savedState.restore(); - + //remove the context-vars, subsequent execution of the query //may generate different values for the vars based on the //content of the db - XQueryContextVars.clear(); - + if (!keepGlobals) { + XQueryContextVars.clear(); + } + attributes.clear(); clearUpdateListeners(); profiler.reset(); - + + if (!keepGlobals) { + httpContext = null; + } + analyzed = false; } - /** - * Returns true if whitespace between constructed element nodes should be stripped by default. - */ - public boolean stripWhitespace() - { - return( stripWhitespace ); + @Override + public boolean stripWhitespace() { + return stripWhitespace; } - - public void setStripWhitespace( boolean strip ) - { + @Override + public void setStripWhitespace(final boolean strip) { this.stripWhitespace = strip; } - - /** - * Returns true if namespaces for constructed element and document nodes should be preserved on copy by default. - */ - public boolean preserveNamespaces() - { - return( preserveNamespaces ); + @Override + public boolean preserveNamespaces() { + return preserveNamespaces; } - - /** - * The method setPreserveNamespaces. - * - * @param preserve a boolean value - */ - public void setPreserveNamespaces( final boolean preserve ) - { + @Override + public void setPreserveNamespaces(final boolean preserve) { this.preserveNamespaces = preserve; } - - /** - * Returns true if namespaces for constructed element and document nodes - * should be inherited on copy by default. - */ - public boolean inheritNamespaces() - { - return( inheritNamespaces ); + @Override + public boolean inheritNamespaces() { + return inheritNamespaces; } - - /** - * The method setInheritNamespaces. - * - * @param inherit a boolean value - */ - public void setInheritNamespaces( final boolean inherit ) - { + @Override + public void setInheritNamespaces(final boolean inherit) { this.inheritNamespaces = inherit; } - - /** - * Returns true if order empty is set to greatest, otherwise false for order empty is least. - */ - public boolean orderEmptyGreatest() - { - return( orderEmptyGreatest ); + @Override + public boolean orderEmptyGreatest() { + return orderEmptyGreatest; } - - /** - * The method setOrderEmptyGreatest. - * - * @param order a boolean value - */ - public void setOrderEmptyGreatest( final boolean order ) - { + @Override + public void setOrderEmptyGreatest(final boolean order) { this.orderEmptyGreatest = order; } - - /** - * Get modules - * - * @return iterator over all modules imported into this context - */ - public Iterator getModules() - { - return( modules.values().iterator() ); + @Override + public Iterator getModules() { + return modules.values().iterator(); } - - /** - * Get root modules - * - * @return iterator over all modules registered in the entire context tree - */ - public Iterator getRootModules() - { - return( getAllModules() ); + @Override + public Iterator getRootModules() { + return getAllModules(); } - - public Iterator getAllModules() - { - return( allModules.values().iterator() ); + @Override + public Iterator getAllModules() { + return allModules.values().iterator(); } - - /** - * Get the built-in module registered for the given namespace URI. - * - * @param namespaceURI - * - * @return built-in module - */ - public Module getModule( String namespaceURI ) - { - return( modules.get( namespaceURI ) ); + @Override + @Nullable + public Module getModule(final String namespaceURI) { + return modules.get(namespaceURI); } - - public Module getRootModule( String namespaceURI ) - { - return( allModules.get( namespaceURI ) ); + @Override + public Module getRootModule(final String namespaceURI) { + return allModules.get(namespaceURI); } - - public void setModule( String namespaceURI, Module module ) - { - if( module == null ) { - modules.remove( namespaceURI ); // unbind the module + @Override + public void setModule(final String namespaceURI, final Module module) { + if (module == null) { + modules.remove(namespaceURI); // unbind the module } else { - modules.put( namespaceURI, module ); + modules.put(namespaceURI, module); } - setRootModule( namespaceURI, module ); + setRootModule(namespaceURI, module); } - - protected void setRootModule( String namespaceURI, Module module ) - { - if( module == null ) { - allModules.remove( namespaceURI ); // unbind the module + protected void setRootModule(final String namespaceURI, final Module module) { + if (module == null) { + allModules.remove(namespaceURI); // unbind the module return; } - if( allModules.get( namespaceURI ) != module ) { + if (allModules.get(namespaceURI) != module) { setModulesChanged(); } - allModules.put( namespaceURI, module ); + allModules.put(namespaceURI, module); } - - void setModulesChanged() - { + protected void setModulesChanged() { this.modulesChanged = true; } - - /** - * For compiled expressions: check if the source of any module imported by the current - * query has changed since compilation. - */ - public boolean checkModulesValid() - { - for (final Module module : allModules.values() ) { - if( !module.isInternalModule() ) { - if( !( (ExternalModule)module ).moduleIsValid( getBroker() ) ) { - LOG.debug( "Module with URI " + module.getNamespaceURI() + " has changed and needs to be reloaded" ); - return( false ); + @Override + public boolean checkModulesValid() { + for (final Module module : allModules.values()) { + if (!module.isInternalModule()) { + if (!((ExternalModule) module).moduleIsValid(getBroker())) { + if (LOG.isDebugEnabled()) { + LOG.debug("Module with URI " + module.getNamespaceURI() + " has changed and needs to be reloaded"); + } + return false; } - } - } - return( true ); + } + } + return true; } - - public void analyzeAndOptimizeIfModulesChanged( Expression expr ) throws XPathException - { - if (analyzed) - {return;} - analyzed = true; - for (final Module module : expr.getContext().modules.values()) { - if( !module.isInternalModule() ) { - final Expression root = ((ExternalModule)module).getRootExpression(); - ((ExternalModule)module).getContext().analyzeAndOptimizeIfModulesChanged(root); + @Override + public void analyzeAndOptimizeIfModulesChanged(final Expression expr) throws XPathException { + if (analyzed) { + return; + } + analyzed = true; + for (final Module module : expr.getContext().modules.values()) { + if (!module.isInternalModule()) { + final Expression root = ((ExternalModule) module).getRootExpression(); + ((ExternalModule) module).getContext().analyzeAndOptimizeIfModulesChanged(root); } - } - expr.analyze( new AnalyzeContextInfo() ); + } + expr.analyze(new AnalyzeContextInfo()); - if( optimizationsEnabled() ) { - final Optimizer optimizer = new Optimizer( this ); - expr.accept( optimizer ); + if (optimizationsEnabled()) { + final Optimizer optimizer = new Optimizer(this); + expr.accept(optimizer); - if( optimizer.hasOptimized() ) { - reset( true ); - expr.resetState( true ); - expr.analyze( new AnalyzeContextInfo() ); + if (optimizer.hasOptimized()) { + reset(true); + expr.resetState(true); + expr.analyze(new AnalyzeContextInfo()); } } modulesChanged = false; } - - /** - * Load a built-in module from the given class name and assign it to the namespace URI. The specified class should be a subclass of {@link - * Module}. The method will try to instantiate the class. If the class is not found or an exception is thrown, the method will silently fail. The - * namespace URI has to be equal to the namespace URI declared by the module class. Otherwise, the module is not loaded. - * - * @param namespaceURI - * @param moduleClass - * - * @return Module - */ - public Module loadBuiltInModule( String namespaceURI, String moduleClass ) - { + @Override + @Nullable + public Module loadBuiltInModule(final String namespaceURI, final String moduleClass) { Module module = null; - if (namespaceURI != null) - {module = getModule( namespaceURI );} + if (namespaceURI != null) { + module = getModule(namespaceURI); + } - if( module != null ) { -// LOG.debug("module " + namespaceURI + " is already present"); - return( module ); + if (module != null) { + if (LOG.isDebugEnabled()) { + LOG.debug("module " + namespaceURI + " is already present"); + } + return module; } - return( initBuiltInModule( namespaceURI, moduleClass ) ); + return initBuiltInModule(namespaceURI, moduleClass); } - - @SuppressWarnings( "unchecked" ) - protected Module initBuiltInModule( String namespaceURI, String moduleClass ) - { + @SuppressWarnings("unchecked") + Module initBuiltInModule(final String namespaceURI, final String moduleClass) { Module module = null; - try { - // lookup the class final ClassLoader existClassLoader = getBroker().getBrokerPool().getClassLoader(); final Class mClass = Class.forName(moduleClass, false, existClassLoader); - if( !( Module.class.isAssignableFrom( mClass ) ) ) { - LOG.info( "failed to load module. " + moduleClass + " is not an instance of org.exist.xquery.Module." ); - return( null ); + if (!(Module.class.isAssignableFrom(mClass))) { + LOG.info("failed to load module. " + moduleClass + " is not an instance of org.exist.xquery.Module."); + return null; } //instantiateModule( namespaceURI, (Class)mClass ); // INOTE: expathrepo - module = instantiateModule( namespaceURI, (Class)mClass, (Map>>) getBroker().getConfiguration().getProperty(PROPERTY_MODULE_PARAMETERS)); - //LOG.debug("module " + module.getNamespaceURI() + " loaded successfully."); - } - catch( final ClassNotFoundException e ) { - LOG.warn( "module class " + moduleClass + " not found. Skipping..." ); + module = instantiateModule(namespaceURI, (Class) mClass, (Map>>) getBroker().getConfiguration().getProperty(PROPERTY_MODULE_PARAMETERS)); + if (LOG.isDebugEnabled()) { + LOG.debug("module " + module.getNamespaceURI() + " loaded successfully."); + } + } catch (final ClassNotFoundException e) { + LOG.warn("module class " + moduleClass + " not found. Skipping..."); } - return( module ); + return module; } - - protected Module instantiateModule( String namespaceURI, Class mClazz, Map>> moduleParameters) { + @SuppressWarnings("unchecked") + private Module instantiateModule(final String namespaceURI, final Class mClazz, + final Map>> moduleParameters) { Module module = null; - try { final MethodHandles.Lookup lookup = MethodHandles.lookup(); final MethodHandle methodHandle = lookup.findConstructor(mClazz, methodType(void.class, Map.class)); @@ -1709,18 +1581,22 @@ protected Module instantiateModule( String namespaceURI, Class mClazz, M methodHandle.type().erase(), methodHandle, methodHandle.type()).getTarget().invokeExact(); module = ctor.apply(moduleParameters.get(namespaceURI)); - if(namespaceURI != null && !module.getNamespaceURI().equals(namespaceURI)) { - LOG.warn( "the module declares a different namespace URI. Expected: " + namespaceURI + " found: " + module.getNamespaceURI() ); - return( null ); + if (namespaceURI != null && !module.getNamespaceURI().equals(namespaceURI)) { + LOG.warn("the module declares a different namespace URI. Expected: " + namespaceURI + " found: " + module.getNamespaceURI()); + return null; } - if((getPrefixForURI( module.getNamespaceURI() ) == null) && (module.getDefaultPrefix().length() > 0)) { - declareNamespace( module.getDefaultPrefix(), module.getNamespaceURI() ); + if (getPrefixForURI(module.getNamespaceURI()) == null && !module.getDefaultPrefix().isEmpty()) { + declareNamespace(module.getDefaultPrefix(), module.getNamespaceURI()); } modules.put(module.getNamespaceURI(), module); allModules.put(module.getNamespaceURI(), module); - } catch(final Throwable e) { + + if (module instanceof InternalModule) { + ((InternalModule) module).prepare(this); + } + } catch (final Throwable e) { if (e instanceof InterruptedException) { // NOTE: must set interrupted flag Thread.currentThread().interrupt(); @@ -1728,145 +1604,92 @@ protected Module instantiateModule( String namespaceURI, Class mClazz, M LOG.warn("error while instantiating module class " + mClazz.getName(), e); } - + return module; } - /** - * Declare a user-defined function. All user-defined functions are kept in a single hash map. - * - * @param function - * - * @throws XPathException - */ - public void declareFunction( UserDefinedFunction function ) throws XPathException - { + @Override + public void declareFunction(final UserDefinedFunction function) throws XPathException { // TODO: redeclaring functions should be forbidden. however, throwing an // exception will currently break util:eval. - - final QName name = function.getSignature().getName(); - - if(Namespaces.XML_NS.equals(name.getNamespaceURI())) { - throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.XML_NS + "'" ); + + final QName name = function.getSignature().getName(); + + if (XML_NS.equals(name.getNamespaceURI())) { + throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + XML_NS + "'"); } - if(Namespaces.SCHEMA_NS.equals(name.getNamespaceURI())) { + if (Namespaces.SCHEMA_NS.equals(name.getNamespaceURI())) { throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_NS + "'"); } - if(Namespaces.SCHEMA_INSTANCE_NS.equals(name.getNamespaceURI())) { + if (Namespaces.SCHEMA_INSTANCE_NS.equals(name.getNamespaceURI())) { throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.SCHEMA_INSTANCE_NS + "'"); } - if(Namespaces.XPATH_FUNCTIONS_NS.equals(name.getNamespaceURI())) { + if (Namespaces.XPATH_FUNCTIONS_NS.equals(name.getNamespaceURI())) { throw new XPathException(function, ErrorCodes.XQST0045, "Function '" + name + "' is in the forbidden namespace '" + Namespaces.XPATH_FUNCTIONS_NS + "'"); } - if("".equals( name.getNamespaceURI())) { + if (name.getNamespaceURI().isEmpty()) { throw new XPathException(function, ErrorCodes.XQST0060, "Every declared function name must have a non-null namespace URI, but function '" + name + "' does not meet this requirement."); } - declaredFunctions.put( function.getSignature().getFunctionId(), function ); + declaredFunctions.put(function.getSignature().getFunctionId(), function); // if (declaredFunctions.get(function.getSignature().getFunctionId()) == null) // declaredFunctions.put(function.getSignature().getFunctionId(), function); // else // throw new XPathException("XQST0034: function " + function.getName() + " is already defined with the same arity"); } - - /** - * Resolve a user-defined function. - * - * @param name - * @param argCount - * - * @return user-defined function - * - * @throws XPathException - */ - public UserDefinedFunction resolveFunction( QName name, int argCount ) throws XPathException - { - final FunctionId id = new FunctionId( name, argCount ); - final UserDefinedFunction func = declaredFunctions.get( id ); - return( func ); + @Override + @Nullable + public UserDefinedFunction resolveFunction(final QName name, final int argCount) throws XPathException { + final FunctionId id = new FunctionId(name, argCount); + return declaredFunctions.get(id); } + @Override + public Iterator getSignaturesForFunction(final QName name) { + final ArrayList signatures = new ArrayList<>(2); - public Iterator getSignaturesForFunction( QName name ) - { - final ArrayList signatures = new ArrayList( 2 ); - - for( final UserDefinedFunction func : declaredFunctions.values() ) { - - if( func.getName().equals( name ) ) { - signatures.add( func.getSignature() ); + for (final UserDefinedFunction func : declaredFunctions.values()) { + if (func.getName().equals(name)) { + signatures.add(func.getSignature()); } } - return( signatures.iterator() ); + return signatures.iterator(); } - - public Iterator localFunctions() - { - return( declaredFunctions.values().iterator() ); + @Override + public Iterator localFunctions() { + return declaredFunctions.values().iterator(); } - - /** - * Declare a local variable. This is called by variable binding expressions like "let" and "for". - * - * @param var - * - * @return LocalVariable - * - * @throws XPathException - */ - public LocalVariable declareVariableBinding( LocalVariable var ) throws XPathException - { - if( lastVar == null ) { + @Override + public LocalVariable declareVariableBinding(final LocalVariable var) throws XPathException { + if (lastVar == null) { lastVar = var; } else { - lastVar.addAfter( var ); + lastVar.addAfter(var); lastVar = var; } - var.setStackPosition( getCurrentStackSize() ); - return( var ); + var.setStackPosition(getCurrentStackSize()); + return var; } - - /** - * Declare a global variable as by "declare variable". - * - * @param var - * - * @return Variable - * - * @throws XPathException - */ - public Variable declareGlobalVariable( Variable var ) throws XPathException - { - globalVariables.put( var.getQName(), var ); - var.setStackPosition( getCurrentStackSize() ); - return( var ); + @Override + public Variable declareGlobalVariable(final Variable var) { + globalVariables.put(var.getQName(), var); + var.setStackPosition(getCurrentStackSize()); + return var; } - public void undeclareGlobalVariable( QName name ) { + @Override + public void undeclareGlobalVariable(final QName name) { globalVariables.remove(name); } - /** - * Declare a user-defined variable. - * - *

The value argument is converted into an XPath value (@see XPathUtil#javaObjectToXPath(Object)).

- * - * @param qname the qualified name of the new variable. Any namespaces should have been declared before. - * @param value a Java object, representing the fixed value of the variable - * - * @return the created Variable object - * - * @throws XPathException if the value cannot be converted into a known XPath value or the variable QName references an unknown - * namespace-prefix. - */ @Override public Variable declareVariable(final String qname, final Object value) throws XPathException { try { @@ -1876,73 +1699,61 @@ public Variable declareVariable(final String qname, final Object value) throws X } } - - public Variable declareVariable( QName qn, Object value ) throws XPathException - { + @Override + public Variable declareVariable(final QName qn, final Object value) throws XPathException { Variable var; - final Module module = getModule( qn.getNamespaceURI() ); + final Module module = getModule(qn.getNamespaceURI()); - if( module != null ) { - var = module.declareVariable( qn, value ); - return( var ); + if (module != null) { + var = module.declareVariable(qn, value); + return var; } - final Sequence val = XPathUtil.javaObjectToXPath( value, this ); - var = globalVariables.get( qn ); + final Sequence val = XPathUtil.javaObjectToXPath(value, this); + var = globalVariables.get(qn); - if( var == null ) { - var = new VariableImpl( qn ); - globalVariables.put( qn, var ); + if (var == null) { + var = new VariableImpl(qn); + globalVariables.put(qn, var); } - if( var.getSequenceType() != null ) { + if (var.getSequenceType() != null) { int actualCardinality; - if( val.isEmpty() ) { + if (val.isEmpty()) { actualCardinality = Cardinality.EMPTY; - } else if( val.hasMany() ) { + } else if (val.hasMany()) { actualCardinality = Cardinality.MANY; } else { actualCardinality = Cardinality.ONE; } //Type.EMPTY is *not* a subtype of other types ; checking cardinality first - if( !Cardinality.checkCardinality( var.getSequenceType().getCardinality(), actualCardinality ) ) { - throw( new XPathException( "XPTY0004: Invalid cardinality for variable $" + var.getQName() + ". Expected " + Cardinality.getDescription( var.getSequenceType().getCardinality() ) + ", got " + Cardinality.getDescription( actualCardinality ) ) ); + if (!Cardinality.checkCardinality(var.getSequenceType().getCardinality(), actualCardinality)) { + throw new XPathException("XPTY0004: Invalid cardinality for variable $" + var.getQName() + ". Expected " + Cardinality.getDescription(var.getSequenceType().getCardinality()) + ", got " + Cardinality.getDescription(actualCardinality)); } //TODO : ignore nodes right now ; they are returned as xs:untypedAtomicType - if( !Type.subTypeOf( var.getSequenceType().getPrimaryType(), Type.NODE ) ) { - - if( !val.isEmpty() && !Type.subTypeOf( val.getItemType(), var.getSequenceType().getPrimaryType() ) ) { - throw( new XPathException( "XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName( var.getSequenceType().getPrimaryType() ) + ", got " + Type.getTypeName( val.getItemType() ) ) ); + if (!Type.subTypeOf(var.getSequenceType().getPrimaryType(), Type.NODE)) { + if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) { + throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType())); } //Here is an attempt to process the nodes correctly } else { //Same as above : we probably may factorize - if( !val.isEmpty() && !Type.subTypeOf( val.getItemType(), var.getSequenceType().getPrimaryType() ) ) { - throw( new XPathException( "XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName( var.getSequenceType().getPrimaryType() ) + ", got " + Type.getTypeName( val.getItemType() ) ) ); + if (!val.isEmpty() && !Type.subTypeOf(val.getItemType(), var.getSequenceType().getPrimaryType())) { + throw new XPathException("XPTY0004: Invalid type for variable $" + var.getQName() + ". Expected " + Type.getTypeName(var.getSequenceType().getPrimaryType()) + ", got " + Type.getTypeName(val.getItemType())); } } } //TODO : should we allow global variable *re*declaration ? - var.setValue( val ); - return( var ); + var.setValue(val); + return var; } - - /** - * Try to resolve a variable. - * - * @param name the qualified name of the variable as string - * - * @return the declared Variable object - * - * @throws XPathException if the variable is unknown - */ @Override public Variable resolveVariable(final String name) throws XPathException { try { @@ -1953,288 +1764,205 @@ public Variable resolveVariable(final String name) throws XPathException { } } - /** - * Try to resolve a variable. - * - * @param qname the qualified name of the variable - * - * @return the declared Variable object - * - * @throws XPathException if the variable is unknown - */ - public Variable resolveVariable( QName qname ) throws XPathException - { - Variable var; - + @Override + public Variable resolveVariable(final QName qname) throws XPathException { // check if the variable is declared local - var = resolveLocalVariable( qname ); + Variable var = resolveLocalVariable(qname); // check if the variable is declared in a module - if( var == null ) { - final Module module = getModule( qname.getNamespaceURI() ); + if (var == null) { + final Module module = getModule(qname.getNamespaceURI()); - if( module != null ) { - var = module.resolveVariable( qname ); + if (module != null) { + var = module.resolveVariable(qname); } } // check if the variable is declared global - if( var == null ) { - var = globalVariables.get( qname ); + if (var == null) { + var = globalVariables.get(qname); } //if (var == null) // throw new XPathException("variable $" + qname + " is not bound"); - return( var ); + return var; } - protected Variable resolveGlobalVariable(QName qname) { + Variable resolveGlobalVariable(final QName qname) { return globalVariables.get(qname); } - protected Variable resolveLocalVariable( QName qname ) throws XPathException - { - final LocalVariable end = contextStack.isEmpty() ? null : contextStack.peek(); - - for( LocalVariable var = lastVar; var != null; var = var.before ) { - - if( var == end ) { - return( null ); + protected Variable resolveLocalVariable(final QName qname) throws XPathException { + final LocalVariable end = contextStack.peek(); + for (LocalVariable var = lastVar; var != null; var = var.before) { + if (var == end) { + return null; } - - if( qname.equals( var.getQName() ) ) { - return( var ); + if (qname.equals(var.getQName())) { + return var; } } - return( null ); + return null; } - public boolean isVarDeclared( QName qname ) - { - final Module module = getModule( qname.getNamespaceURI() ); - - if( module != null ) { - - if( module.isVarDeclared( qname ) ) { - return( true ); + @Override + public boolean isVarDeclared(final QName qname) { + final Module module = getModule(qname.getNamespaceURI()); + if (module != null) { + if (module.isVarDeclared(qname)) { + return true; } } - return( globalVariables.get( qname ) != null ); + return globalVariables.get(qname) != null; } - - public Map getVariables() - { - final Map variables = new HashMap(); - - variables.putAll( globalVariables ); - - final LocalVariable end = contextStack.isEmpty() ? null : (LocalVariable)contextStack.peek(); - - for( LocalVariable var = lastVar; var != null; var = var.before ) { - - if( var == end ) { + @Override + public Map getVariables() { + final Map variables = new HashMap<>(globalVariables); + LocalVariable end = contextStack.peek(); + for (LocalVariable var = lastVar; var != null; var = var.before) { + if (var == end) { break; } - - variables.put( var.getQName(), var ); + variables.put(var.getQName(), var); } - - return( variables ); + return variables; } + @Override public Map getLocalVariables() { - final Map variables = new HashMap(); - - final LocalVariable end = contextStack.isEmpty() ? null : (LocalVariable)contextStack.peek(); - - for ( LocalVariable var = lastVar; var != null; var = var.before ) { - - if ( var == end ) { + final Map variables = new HashMap<>(); + LocalVariable end = contextStack.peek(); + for (LocalVariable var = lastVar; var != null; var = var.before) { + if (var == end) { break; } - - variables.put( var.getQName(), var ); + variables.put(var.getQName(), var); } - - return ( variables ); + return variables; } /** * Return a copy of all currently visible local variables. * Used by {@link InlineFunction} to implement closures. - * + * * @return currently visible local variables as a stack */ public List getLocalStack() { - List closure = null; + final LocalVariable end = contextStack.peek(); + for (LocalVariable var = lastVar; var != null; var = var.before) { - final LocalVariable end = contextStack.isEmpty() ? null : contextStack.peek(); - - for ( LocalVariable var = lastVar; var != null; var = var.before ) { - - if ( var == end ) { + if (var == end) { break; } if (closure == null) { closure = new ArrayList<>(6); } - closure.add( new ClosureVariable(var) ); + closure.add(new ClosureVariable(var)); } - return ( closure ); + return closure; } - - public Map getGlobalVariables() { - final Map variables = new HashMap(); - variables.putAll( globalVariables ); - - return( variables ); + @Override + public Map getGlobalVariables() { + return new HashMap<>(globalVariables); } - + /** * Restore a saved stack of local variables. Used to implement closures. - * - * @param stack - * @throws XPathException + * + * @param stack the stack of local variables + * @throws XPathException if the stack cannot be restored */ - public void restoreStack(List stack) throws XPathException { + public void restoreStack(final List stack) throws XPathException { for (int i = stack.size() - 1; i > -1; i--) { declareVariableBinding(new ClosureVariable(stack.get(i))); } } - - /** - * Turn on/off XPath 1.0 backwards compatibility. - * - *

If turned on, comparison expressions will behave like in XPath 1.0, i.e. if any one of the operands is a number, the other operand will be - * cast to a double.

- * - * @param backwardsCompatible - */ - public void setBackwardsCompatibility( boolean backwardsCompatible ) - { + + @Override + public void setBackwardsCompatibility(boolean backwardsCompatible) { this.backwardsCompatible = backwardsCompatible; } - - /** - * XPath 1.0 backwards compatibility turned on? - * - *

In XPath 1.0 compatible mode, additional conversions will be applied to values if a numeric value is expected.

- */ - public boolean isBackwardsCompatible() - { - return( this.backwardsCompatible ); + @Override + public boolean isBackwardsCompatible() { + return this.backwardsCompatible; } - - public boolean isRaiseErrorOnFailedRetrieval() - { - return( raiseErrorOnFailedRetrieval ); + @Override + public boolean isRaiseErrorOnFailedRetrieval() { + return raiseErrorOnFailedRetrieval; } - public Database getDatabase() { - return db; + return db; } - /** - * Get the DBBroker instance used for the current query. - * - *

The DBBroker is the main database access object, providing access to all internal database functions.

- * - * @return DBBroker instance - */ + @Override public DBBroker getBroker() { - return db.getActiveBroker(); + return db.getActiveBroker(); } /** * Get the user which executes the current query. * - * @return user - * @deprecated use getCurrentSubject + * @return user + * @deprecated Use {@link #getSubject()}. */ + @Deprecated public Subject getUser() { return getSubject(); } - /** - * Get the subject which executes the current query. - * - * @return subject - */ + @Override public Subject getSubject() { return getBroker().getCurrentSubject(); } - /** * If there is a HTTP Session, and a User has been stored in the session then this will return the user object from the session. * - * @return The user or null if there is no session or no user + * @return The user or null if there is no session or no user */ - public Subject getUserFromHttpSession() - { - final RequestModule myModule = (RequestModule)getModule( RequestModule.NAMESPACE_URI ); - - //Sanity check : one may *not* want to bind the module ! - if( myModule == null ) { - return( null ); - } - - Variable var = null; - - try { - var = myModule.resolveVariable( RequestModule.REQUEST_VAR ); - } - catch( final XPathException xpe ) { - return( null ); - } + Subject getUserFromHttpSession() { + final Optional maybeRequest = Optional.ofNullable(getHttpContext()) + .map(HttpContext::getRequest); - if( ( var != null ) && ( var.getValue() != null ) ) { - - if( var.getValue().getItemType() == Type.JAVA_OBJECT ) { - final JavaObjectValue reqValue = (JavaObjectValue)var.getValue().itemAt( 0 ); - - if( reqValue.getObject() instanceof RequestWrapper) { - final RequestWrapper req = (RequestWrapper) reqValue.getObject(); - final Object user = req.getAttribute(HTTP_REQ_ATTR_USER); - final Object passAttr = req.getAttribute(HTTP_REQ_ATTR_PASS); - if (user != null) { - final String password = passAttr == null ? null : passAttr.toString(); - try { - return getBroker().getBrokerPool().getSecurityManager().authenticate(user.toString(), password); - } catch (final AuthenticationException e) { - LOG.error("User can not be authenticated: " + user.toString()); - } - } else { - if (req.getSession() != null) { - return (Subject) req.getSession().getAttribute(HTTP_SESSIONVAR_XMLDB_USER); - } - } + if (maybeRequest.isPresent()) { + final RequestWrapper request = maybeRequest.get(); + final Object user = request.getAttribute(HTTP_REQ_ATTR_USER); + final Object passAttr = request.getAttribute(HTTP_REQ_ATTR_PASS); + if (user != null) { + final String password = passAttr == null ? null : passAttr.toString(); + try { + return getBroker().getBrokerPool().getSecurityManager().authenticate(user.toString(), password); + } catch (final AuthenticationException e) { + LOG.error("User can not be authenticated: " + user.toString()); + } + } else { + final Optional maybeSession = Optional.ofNullable(getHttpContext()) + .map(HttpContext::getSession); + if (maybeSession.isPresent()) { + return (Subject) maybeSession.get().getAttribute(HTTP_SESSIONVAR_XMLDB_USER); } } } - return( null ); + return null; } - /** The builder used for creating in-memory document fragments. */ - private MemTreeBuilder documentBuilder = null; - /** - * Get the document builder currently used for creating temporary document fragments. A new document builder will be created on demand. - * - * @return document builder + * The builder used for creating in-memory document fragments. */ + private MemTreeBuilder documentBuilder = null; + @Override public MemTreeBuilder getDocumentBuilder() { - if(documentBuilder == null) { + if (documentBuilder == null) { documentBuilder = new MemTreeBuilder(this); documentBuilder.startDocument(); } @@ -2242,214 +1970,143 @@ public MemTreeBuilder getDocumentBuilder() { } @Override - public MemTreeBuilder getDocumentBuilder(boolean explicitCreation) { - if(documentBuilder == null) { + public MemTreeBuilder getDocumentBuilder(final boolean explicitCreation) { + if (documentBuilder == null) { documentBuilder = new MemTreeBuilder(this); documentBuilder.startDocument(explicitCreation); } return documentBuilder; } - + private void resetDocumentBuilder() { setDocumentBuilder(null); } - - private void setDocumentBuilder(MemTreeBuilder documentBuilder) { + + private void setDocumentBuilder(final MemTreeBuilder documentBuilder) { this.documentBuilder = documentBuilder; } - - - - /** - * Returns the shared name pool used by all in-memory documents which are created within this query context. Create a name pool for every document - * would be a waste of memory, especially since it is likely that the documents contain elements or attributes with similar names. - * - * @return the shared name pool - */ - public NamePool getSharedNamePool() - { - if( sharedNamePool == null ) { + @Override + public NamePool getSharedNamePool() { + if (sharedNamePool == null) { sharedNamePool = new NamePool(); } - return( sharedNamePool ); + return sharedNamePool; } - - /* DebuggeeJoint methods */ - - public XQueryContext getContext() - { - return( null ); + @Override + public XQueryContext getContext() { + return null; } - public void prologEnter(Expression expr) { + @Override + public void prologEnter(final Expression expr) { if (debuggeeJoint != null) { debuggeeJoint.prologEnter(expr); } } - public void expressionStart( Expression expr ) throws TerminatedException - { - if( debuggeeJoint != null ) { - debuggeeJoint.expressionStart( expr ); + @Override + public void expressionStart(final Expression expr) throws TerminatedException { + if (debuggeeJoint != null) { + debuggeeJoint.expressionStart(expr); } } - - public void expressionEnd( Expression expr ) - { - if( debuggeeJoint != null ) { - debuggeeJoint.expressionEnd( expr ); + @Override + public void expressionEnd(final Expression expr) { + if (debuggeeJoint != null) { + debuggeeJoint.expressionEnd(expr); } } - - public void stackEnter( Expression expr ) throws TerminatedException - { - if( debuggeeJoint != null ) { - debuggeeJoint.stackEnter( expr ); + @Override + public void stackEnter(final Expression expr) throws TerminatedException { + if (debuggeeJoint != null) { + debuggeeJoint.stackEnter(expr); } } - - public void stackLeave( Expression expr ) - { - if( debuggeeJoint != null ) { - debuggeeJoint.stackLeave( expr ); + @Override + public void stackLeave(final Expression expr) { + if (debuggeeJoint != null) { + debuggeeJoint.stackLeave(expr); } } - - /* Methods delegated to the watchdog */ - - public void proceed() throws TerminatedException - { - getWatchDog().proceed( null ); + @Override + public void proceed() throws TerminatedException { + getWatchDog().proceed(null); } - - public void proceed( Expression expr ) throws TerminatedException - { - getWatchDog().proceed( expr ); + @Override + public void proceed(final Expression expr) throws TerminatedException { + getWatchDog().proceed(expr); } - - public void proceed( Expression expr, MemTreeBuilder builder ) throws TerminatedException - { - getWatchDog().proceed( expr, builder ); + @Override + public void proceed(final Expression expr, final MemTreeBuilder builder) throws TerminatedException { + getWatchDog().proceed(expr, builder); } - - public void setWatchDog( XQueryWatchDog watchdog ) - { + @Override + public void setWatchDog(final XQueryWatchDog watchdog) { this.watchdog = watchdog; } - - public XQueryWatchDog getWatchDog() - { - return( watchdog ); + @Override + public XQueryWatchDog getWatchDog() { + return watchdog; } - - /** - * Push any document fragment created within the current execution context on the stack. - */ - public void pushDocumentContext() - { + @Override + public void pushDocumentContext() { fragmentStack.push(getDocumentBuilder()); resetDocumentBuilder(); } - - public void popDocumentContext() - { - if( !fragmentStack.isEmpty() ) { + @Override + public void popDocumentContext() { + if (!fragmentStack.isEmpty()) { setDocumentBuilder(fragmentStack.pop()); } } - - /** - * Set the base URI for the evaluation context. - * - *

This is the URI returned by the fn:base-uri() function.

- * - * @param uri - */ - public void setBaseURI( AnyURIValue uri ) - { - setBaseURI( uri, false ); + @Override + public void setBaseURI(final AnyURIValue uri) { + setBaseURI(uri, false); } - - /** - * Set the base URI for the evaluation context. - * - *

A base URI specified via the base-uri directive in the XQuery prolog overwrites any other setting.

- * - * @param uri - * @param setInProlog - */ - public void setBaseURI( AnyURIValue uri, boolean setInProlog ) - { - if( baseURISetInProlog ) { + @Override + public void setBaseURI(final AnyURIValue uri, final boolean setInProlog) { + if (baseURISetInProlog) { return; } - if( uri == null ) { + if (uri == null) { baseURI = AnyURIValue.EMPTY_URI; } - baseURI = uri; + baseURI = uri; baseURISetInProlog = setInProlog; } - - /** - * Set the path to a base directory where modules should be loaded from. Relative module paths will be resolved against this directory. The - * property is usually set by the XQueryServlet or XQueryGenerator, but can also be specified manually. - * - * @param path - */ @Override - public void setModuleLoadPath(String path) { + public void setModuleLoadPath(final String path) { this.moduleLoadPath = path; } - @Override public String getModuleLoadPath() { return moduleLoadPath; } - - /** - * The method isBaseURIDeclared. - * - * @return a boolean value - */ - public boolean isBaseURIDeclared() - { - if( ( baseURI == null ) || baseURI.equals( AnyURIValue.EMPTY_URI ) ) { - return( false ); - } else { - return( true ); - } + @Override + public boolean isBaseURIDeclared() { + return baseURI != null && !baseURI.equals(AnyURIValue.EMPTY_URI); } - - /** - * Get the base URI of the evaluation context. - * - *

This is the URI returned by the fn:base-uri() function.

- * - * @return base URI of the evaluation context - * - * @exception XPathException if an error occurs - */ - public AnyURIValue getBaseURI() throws XPathException - { + @Override + public AnyURIValue getBaseURI() throws XPathException { // the base URI in the static context is established according to the // principles outlined in [RFC3986] Section 5.1—that is, it defaults // first to the base URI of the encapsulating entity, then to the URI @@ -2462,148 +2119,110 @@ public AnyURIValue getBaseURI() throws XPathException // an absolute base URI; however, the base URI in the static context // is then undefined, and any attempt to use its value may result in // an error [err:XPST0001]. - if( ( baseURI == null ) || baseURI.equals( AnyURIValue.EMPTY_URI ) ) { - //throw new XPathException("err:XPST0001: base URI of the static context has not been assigned a value."); - // We catch and resolve this to the XmlDbURI.ROOT_COLLECTION_URI - // at least in DocumentImpl so maybe we should do it here./ljo - } - return( baseURI ); +// if ((baseURI == null) || baseURI.equals(AnyURIValue.EMPTY_URI)) { +// //throw new XPathException(ErrorCodes.XPST0001, "Base URI of the static context has not been assigned a value."); +// // We catch and resolve this to the XmlDbURI.ROOT_COLLECTION_URI +// // at least in DocumentImpl so maybe we should do it here./ljo +// } + return baseURI; } - - /** - * Set the current context position, i.e. the position of the currently processed item in the context sequence. This value is required by some - * expressions, e.g. fn:position(). - * - * @param pos - * @param sequence - */ - public void setContextSequencePosition( int pos, Sequence sequence ) - { + @Override + public void setContextSequencePosition(final int pos, final Sequence sequence) { contextPosition = pos; contextSequence = sequence; } - - /** - * Get the current context position, i.e. the position of the currently processed item in the context sequence. - * - * @return current context position - */ - public int getContextPosition() - { - return( contextPosition ); - } - - - public Sequence getContextSequence() - { - return( contextSequence ); + @Override + public int getContextPosition() { + return contextPosition; } - - public void pushInScopeNamespaces() - { - pushInScopeNamespaces( true ); + @Override + public Sequence getContextSequence() { + return contextSequence; } - - /** - * Push all in-scope namespace declarations onto the stack. - * - * @param inherit - */ - @SuppressWarnings( "unchecked" ) - public void pushInScopeNamespaces( boolean inherit ) - { + @Override + public void pushInScopeNamespaces() { + pushInScopeNamespaces(true); + } + + @Override + public void pushInScopeNamespaces(final boolean inherit) { //TODO : push into an inheritedInScopeNamespaces HashMap... and return an empty HashMap - final HashMap m = (HashMap)inScopeNamespaces.clone(); - final HashMap p = (HashMap)inScopePrefixes.clone(); - namespaceStack.push( inheritedInScopeNamespaces ); - namespaceStack.push( inheritedInScopePrefixes ); - namespaceStack.push( inScopeNamespaces ); - namespaceStack.push( inScopePrefixes ); + final Map m = new HashMap<>(inScopeNamespaces); + final Map p = new HashMap<>(inScopePrefixes); + namespaceStack.push(inheritedInScopeNamespaces); + namespaceStack.push(inheritedInScopePrefixes); + namespaceStack.push(inScopeNamespaces); + namespaceStack.push(inScopePrefixes); //Current namespaces now become inherited just like the previous inherited ones - if( inherit ) { - inheritedInScopeNamespaces = (HashMap)inheritedInScopeNamespaces.clone(); - inheritedInScopeNamespaces.putAll( m ); - inheritedInScopePrefixes = (HashMap)inheritedInScopePrefixes.clone(); - inheritedInScopePrefixes.putAll( p ); + if (inherit) { + inheritedInScopeNamespaces = new HashMap<>(inheritedInScopeNamespaces); + inheritedInScopeNamespaces.putAll(m); + inheritedInScopePrefixes = new HashMap<>(inheritedInScopePrefixes); + inheritedInScopePrefixes.putAll(p); } else { - inheritedInScopeNamespaces = new HashMap(); - inheritedInScopePrefixes = new HashMap(); + inheritedInScopeNamespaces = new HashMap<>(); + inheritedInScopePrefixes = new HashMap<>(); } //TODO : consider dynamic instanciation - inScopeNamespaces = new HashMap(); - inScopePrefixes = new HashMap(); + inScopeNamespaces = new HashMap<>(); + inScopePrefixes = new HashMap<>(); } - - public void popInScopeNamespaces() - { - inScopePrefixes = namespaceStack.pop(); - inScopeNamespaces = namespaceStack.pop(); - inheritedInScopePrefixes = namespaceStack.pop(); + @Override + public void popInScopeNamespaces() { + inScopePrefixes = namespaceStack.pop(); + inScopeNamespaces = namespaceStack.pop(); + inheritedInScopePrefixes = namespaceStack.pop(); inheritedInScopeNamespaces = namespaceStack.pop(); } - - @SuppressWarnings( "unchecked" ) - public void pushNamespaceContext() - { - HashMap m = (HashMap)staticNamespaces.clone(); - HashMap p = (HashMap)staticPrefixes.clone(); - namespaceStack.push( staticNamespaces ); - namespaceStack.push( staticPrefixes ); + @Override + public void pushNamespaceContext() { + final Map m = new HashMap<>(staticNamespaces); + final Map p = new HashMap<>(staticPrefixes); + namespaceStack.push(staticNamespaces); + namespaceStack.push(staticPrefixes); staticNamespaces = m; - staticPrefixes = p; + staticPrefixes = p; } - - public void popNamespaceContext() - { - staticPrefixes = namespaceStack.pop(); + @Override + public void popNamespaceContext() { + staticPrefixes = namespaceStack.pop(); staticNamespaces = namespaceStack.pop(); } - - /** - * Returns the last variable on the local variable stack. The current variable context can be restored by passing the return value to {@link - * #popLocalVariables(LocalVariable)}. - * - * @param newContext - * - * @return last variable on the local variable stack - */ - public LocalVariable markLocalVariables( boolean newContext ) - { - if( newContext ) { - - if( lastVar == null ) { - lastVar = new LocalVariable( QName.EMPTY_QNAME ); + @Override + public LocalVariable markLocalVariables(final boolean newContext) { + if (newContext) { + if (lastVar == null) { + lastVar = new LocalVariable(QName.EMPTY_QNAME); } - contextStack.push( lastVar ); + contextStack.push(lastVar); } variableStackSize++; - return( lastVar ); + return lastVar; } - - public void popLocalVariables(LocalVariable var) { + @Override + public void popLocalVariables(@Nullable final LocalVariable var) { popLocalVariables(var, null); } /** * Restore the local variable stack to the position marked by variable var. * - * @param var - * + * @param var only clear variables after this variable, or null + * @param resultSeq the result sequence */ - public void popLocalVariables(LocalVariable var, Sequence resultSeq) - { - if( var != null ) { + public void popLocalVariables(@Nullable final LocalVariable var, final Sequence resultSeq) { + if (var != null) { // clear all variables registered after var. they should be out of scope. LocalVariable outOfScope = var.after; while (outOfScope != null) { @@ -2615,7 +2234,7 @@ public void popLocalVariables(LocalVariable var, Sequence resultSeq) // reset the stack var.after = null; - if( !contextStack.isEmpty() && ( var == contextStack.peek() ) ) { + if (!contextStack.isEmpty() && (var == contextStack.peek())) { contextStack.pop(); } } @@ -2629,165 +2248,127 @@ public void popLocalVariables(LocalVariable var, Sequence resultSeq) * * @param func an inline function definition using closure variables */ - public void pushClosure(final UserDefinedFunction func) { + void pushClosure(final UserDefinedFunction func) { closures.add(func); } - /** - * Returns the current size of the stack. This is used to determine where a variable has been declared. - * - * @return current size of the stack - */ - public int getCurrentStackSize() - { - return( variableStackSize ); + @Override + public int getCurrentStackSize() { + return variableStackSize; } - /* ----------------- Function call stack ------------------------ */ - - - /** - * Report the start of a function execution. Adds the reported function signature to the function call stack. - * - * @param signature - */ - public void functionStart( FunctionSignature signature ) - { - callStack.push( signature ); + @Override + public void functionStart(final FunctionSignature signature) { + callStack.push(signature); } - - /** - * Report the end of the currently executed function. Pops the last function signature from the function call stack. - */ - public void functionEnd() - { - if( callStack.isEmpty() ) { - LOG.warn( "Function call stack is empty, but XQueryContext.functionEnd() was called. This " + "could indicate a concurrency issue (shared XQueryContext?)" ); + @Override + public void functionEnd() { + if (callStack.isEmpty()) { + LOG.warn("Function call stack is empty, but XQueryContext.functionEnd() was called. This " + + "could indicate a concurrency issue (shared XQueryContext?)"); } else { callStack.pop(); } } - - /** - * Check if the specified function signature is found in the current function called stack. If yes, the function might be tail recursive and needs - * to be optimized. - * - * @param signature - */ - public boolean tailRecursiveCall( FunctionSignature signature ) - { - return( callStack.contains( signature ) ); + @Override + public boolean tailRecursiveCall(final FunctionSignature signature) { + return callStack.contains(signature); } + @Override + public void mapModule(final String namespace, final XmldbURI uri) { + mappedModules.put(namespace, uri); + } - /* ----------------- Module imports ------------------------ */ + @Override + public Module importModule(String namespaceURI, String prefix, String location) + throws XPathException { - public void mapModule( String namespace, XmldbURI uri ) - { - mappedModules.put( namespace, uri ); - } - - /** - * Import a module and make it available in this context. The prefix and location parameters are optional. If prefix is null, the default prefix - * specified by the module is used. If location is null, the module will be read from the namespace URI. - * - * @param namespaceURI - * @param prefix - * @param location - * - * @throws XPathException - */ - public Module importModule( String namespaceURI, String prefix, String location ) throws XPathException { - - if(prefix != null && ("xml".equals(prefix) || "xmlns".equals(prefix))) { + if (XML_NS_PREFIX.equals(prefix) || XMLNS_ATTRIBUTE.equals(prefix)) { throw new XPathException(ErrorCodes.XQST0070, "The prefix declared for a module import must not be 'xml' or 'xmlns'."); } - - if(namespaceURI != null && namespaceURI.isEmpty()) { + + if (namespaceURI != null && namespaceURI.isEmpty()) { throw new XPathException(ErrorCodes.XQST0088, "The first URILiteral in a module import must be of nonzero length."); } - - Module module = null; - if (namespaceURI != null) - {module = getRootModule( namespaceURI );} + Module module = null; - if( module != null ) { - LOG.debug( "Module " + namespaceURI + " already present." ); + if (namespaceURI != null) { + module = getRootModule(namespaceURI); + } + if (module != null) { + if (LOG.isDebugEnabled()) { + LOG.debug("Module " + namespaceURI + " already present."); + } // Set locally to remember the dependency in case it was inherited. - setModule( namespaceURI, module ); + setModule(namespaceURI, module); + } else { // if location is not specified, try to resolve in expath repo if (location == null && namespaceURI != null) { module = resolveInEXPathRepository(namespaceURI, prefix); } - if ( module == null ) { - - if( location == null && namespaceURI != null) { + if (module == null) { + if (location == null && namespaceURI != null) { // check if there's a static mapping in the configuration - location = getModuleLocation( namespaceURI ); + location = getModuleLocation(namespaceURI); - if( location == null ) { + if (location == null) { location = namespaceURI; } } //Is the module's namespace mapped to a URL ? - if( mappedModules.containsKey( location ) ) { - location = mappedModules.get( location ).toString(); + if (mappedModules.containsKey(location)) { + location = mappedModules.get(location).toString(); } // is it a Java module? - if( location.startsWith( JAVA_URI_START ) ) { - location = location.substring( JAVA_URI_START.length() ); - module = loadBuiltInModule( namespaceURI, location ); + if (location.startsWith(JAVA_URI_START)) { + location = location.substring(JAVA_URI_START.length()); + module = loadBuiltInModule(namespaceURI, location); } else { Source moduleSource; - if( location.startsWith( XmldbURI.XMLDB_URI_PREFIX ) - || ( ( location.indexOf( ':' ) == -1 ) && moduleLoadPath.startsWith( XmldbURI.XMLDB_URI_PREFIX ) ) ) { + if (location.startsWith(XmldbURI.XMLDB_URI_PREFIX) + || ((location.indexOf(':') == -1) && moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX))) { // Is the module source stored in the database? try { - XmldbURI locationUri = XmldbURI.xmldbUriFor( location ); + XmldbURI locationUri = XmldbURI.xmldbUriFor(location); - if( moduleLoadPath.startsWith( XmldbURI.XMLDB_URI_PREFIX ) ) { - final XmldbURI moduleLoadPathUri = XmldbURI.xmldbUriFor( moduleLoadPath ); - locationUri = moduleLoadPathUri.resolveCollectionPath( locationUri ); + if (moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX)) { + final XmldbURI moduleLoadPathUri = XmldbURI.xmldbUriFor(moduleLoadPath); + locationUri = moduleLoadPathUri.resolveCollectionPath(locationUri); } - DocumentImpl sourceDoc = null; - - try { - sourceDoc = getBroker().getXMLResource( locationUri.toCollectionPathURI(), LockMode.READ_LOCK ); + try (final LockedDocument lockedSourceDoc = getBroker().getXMLResource(locationUri.toCollectionPathURI(), LockMode.READ_LOCK)) { - if(sourceDoc == null) { + final DocumentImpl sourceDoc = lockedSourceDoc == null ? null : lockedSourceDoc.getDocument(); + if (sourceDoc == null) { throw moduleLoadException("Module location hint URI '" + location + "' does not refer to anything.", location); } - if(( sourceDoc.getResourceType() != DocumentImpl.BINARY_FILE ) || !"application/xquery".equals(sourceDoc.getMetadata().getMimeType())) { + if ((sourceDoc.getResourceType() != DocumentImpl.BINARY_FILE) || !"application/xquery".equals(sourceDoc.getMetadata().getMimeType())) { throw moduleLoadException("Module location hint URI '" + location + "' does not refer to an XQuery.", location); } - moduleSource = new DBSource( getBroker(), (BinaryDocument)sourceDoc, true ); + moduleSource = new DBSource(getBroker(), (BinaryDocument) sourceDoc, true); // we don't know if the module will get returned, oh well - module = compileOrBorrowModule( prefix, namespaceURI, location, moduleSource ); + module = compileOrBorrowModule(prefix, namespaceURI, location, moduleSource); - } catch(final PermissionDeniedException e) { + } catch (final PermissionDeniedException e) { throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e); - } finally { - if(sourceDoc != null) { - sourceDoc.getUpdateLock().release(LockMode.READ_LOCK); - } } - } catch(final URISyntaxException e) { + } catch (final URISyntaxException e) { throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e); } @@ -2795,15 +2376,17 @@ public Module importModule( String namespaceURI, String prefix, String location // No. Load from file or URL try { - //TODO: use URIs to ensure proper resolution of relative locations - moduleSource = SourceFactory.getSource( getBroker(), moduleLoadPath, location, true ); + moduleSource = SourceFactory.getSource(getBroker(), moduleLoadPath, location, true); + if (moduleSource == null) { + throw moduleLoadException("Source for module '" + namespaceURI + "' not found module location hint URI '" + location + "'.", location); + } - } catch(final MalformedURLException e) { + } catch (final MalformedURLException e) { throw moduleLoadException("Invalid module location hint URI '" + location + "'.", location, e); - } catch(final IOException e) { - throw moduleLoadException("Source for module '" + namespaceURI + "' not found module location hint URI '" + location + "'.", location, e); - } catch(final PermissionDeniedException e) { + } catch (final IOException e) { + throw moduleLoadException("Source for module '" + namespaceURI + "' could not be read, module location hint URI '" + location + "'.", location, e); + } catch (final PermissionDeniedException e) { throw moduleLoadException("Permission denied to read module source from location hint URI '" + location + ".", location, e); } @@ -2814,231 +2397,182 @@ public Module importModule( String namespaceURI, String prefix, String location } // NOTE: expathrepo related, closes the EXPath else (if module != null) } - if(module != null) { + if (module != null) { if (namespaceURI == null) { namespaceURI = module.getNamespaceURI(); } if (prefix == null) { prefix = module.getDefaultPrefix(); } - declareNamespace(prefix, namespaceURI); } return module; } - - protected XPathException moduleLoadException(final String message, final String moduleLocation) throws XPathException { + protected XPathException moduleLoadException(final String message, final String moduleLocation) + throws XPathException { return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation))); } - - protected XPathException moduleLoadException(final String message, final String moduleLocation, final Exception e) throws XPathException { + + protected XPathException moduleLoadException(final String message, final String moduleLocation, final Exception e) + throws XPathException { return new XPathException(ErrorCodes.XQST0059, message, new ValueSequence(new StringValue(moduleLocation)), e); } - /** - * Returns the static location mapped to an XQuery source module, if known. - * - * @param namespaceURI the URI of the module - * - * @return the location string - */ - @SuppressWarnings( "unchecked" ) - public String getModuleLocation( String namespaceURI ) - { - final Map moduleMap = (Map)getBroker().getConfiguration().getProperty( PROPERTY_STATIC_MODULE_MAP ); - return( moduleMap.get( namespaceURI ) ); + @SuppressWarnings("unchecked") + @Override + public String getModuleLocation(final String namespaceURI) { + final Map moduleMap = + (Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP); + return moduleMap.get(namespaceURI); } - - /** - * Returns an iterator over all module namespace URIs which are statically mapped to a known location. - * - * @return an iterator - */ - @SuppressWarnings( "unchecked" ) - public Iterator getMappedModuleURIs() - { - final Map moduleMap = (Map)getBroker().getConfiguration().getProperty( PROPERTY_STATIC_MODULE_MAP ); - return( moduleMap.keySet().iterator() ); + @SuppressWarnings("unchecked") + @Override + public Iterator getMappedModuleURIs() { + final Map moduleMap = + (Map) getBroker().getConfiguration().getProperty(PROPERTY_STATIC_MODULE_MAP); + return moduleMap.keySet().iterator(); } - - private ExternalModule compileOrBorrowModule( String prefix, String namespaceURI, String location, Source source ) throws XPathException - { - final ExternalModule module = compileModule( prefix, namespaceURI, location, source ); - - if(module != null) { + private ExternalModule compileOrBorrowModule(final String prefix, final String namespaceURI, final String location, + final Source source) throws XPathException { + final ExternalModule module = compileModule(prefix, namespaceURI, location, source); + if (module != null) { setModule(module.getNamespaceURI(), module); declareModuleVars(module); } - return module; } - /** - * Compile Module - * - * @param prefix - * @param namespaceURI - * @param location - * @param source + * Compile an XQuery Module * - * @return The compiled module. - * - * @throws XPathException + * @param prefix the namespace prefix of the module. + * @param namespaceURI the namespace URI of the module. + * @param location the location of the module + * @param source the source of the module. + * @return The compiled module, or null if the source is not a module + * @throws XPathException if the module could not be loaded or compiled */ - public ExternalModule compileModule( String prefix, String namespaceURI, - String location, Source source ) throws XPathException - { - LOG.debug( "Loading module from " + location ); - - Reader reader; - - try { - reader = source.getReader(); + private @Nullable + ExternalModule compileModule(final String prefix, String namespaceURI, final String location, + final Source source) throws XPathException { + if (LOG.isDebugEnabled()) { + LOG.debug("Loading module from " + location); + } - if( reader == null ) { - throw(moduleLoadException("failed to load module: '" + namespaceURI + "' from: " + - "'" + source + "', location: '" + location + "'. Source not found. ", location)); + try (final Reader reader = source.getReader()) { + if (reader == null) { + throw moduleLoadException("failed to load module: '" + namespaceURI + "' from: " + + "'" + source + "', location: '" + location + "'. Source not found. ", location); } if (namespaceURI == null) { final QName qname = source.isModule(); - if (qname == null) - {return null;} + if (qname == null) { + return null; + } namespaceURI = qname.getNamespaceURI(); } - } - catch( final IOException e ) { - throw(moduleLoadException("IO exception while loading module '" + namespaceURI + "'" + - " from '" + source + "'", location, e)); - } - final ExternalModuleImpl modExternal = new ExternalModuleImpl(namespaceURI, prefix); - setModule(namespaceURI, modExternal); - final XQueryContext modContext = new ModuleContext( this, prefix, namespaceURI, location ); - modExternal.setContext( modContext ); - final XQueryLexer lexer = new XQueryLexer( modContext, reader ); - final XQueryParser parser = new XQueryParser( lexer ); - final XQueryTreeParser astParser = new XQueryTreeParser( modContext, modExternal ); - try { - parser.xpath(); + final ExternalModuleImpl modExternal = new ExternalModuleImpl(namespaceURI, prefix); + setModule(namespaceURI, modExternal); + final XQueryContext modContext = new ModuleContext(this, prefix, namespaceURI, location); + modExternal.setContext(modContext); + final XQueryLexer lexer = new XQueryLexer(modContext, reader); + final XQueryParser parser = new XQueryParser(lexer); + final XQueryTreeParser astParser = new XQueryTreeParser(modContext, modExternal); - if( parser.foundErrors() ) { - LOG.debug( parser.getErrorMessage() ); - throw( new XPathException( "error found while loading module from " + location + - ": " + parser.getErrorMessage() ) ); - } - final AST ast = parser.getAST(); + try { + parser.xpath(); - final PathExpr path = new PathExpr( modContext ); - astParser.xpath( ast, path ); + if (parser.foundErrors()) { + if (LOG.isDebugEnabled()) { + LOG.debug(parser.getErrorMessage()); + } + throw new XPathException("error found while loading module from " + location + ": " + parser.getErrorMessage()); + } + final AST ast = parser.getAST(); - if( astParser.foundErrors() ) { - throw( new XPathException( "error found while loading module from " + location + ": " + astParser.getErrorMessage(), astParser.getLastException() ) ); - } - - modExternal.setRootExpression(path); + final PathExpr path = new PathExpr(modContext); + astParser.xpath(ast, path); - if(namespaceURI != null && !modExternal.getNamespaceURI().equals(namespaceURI)) { - throw( new XPathException( "namespace URI declared by module (" + modExternal.getNamespaceURI() + ") does not match namespace URI in import statement, which was: " + namespaceURI ) ); - } + if (astParser.foundErrors()) { + throw new XPathException("error found while loading module from " + location + ": " + astParser.getErrorMessage(), astParser.getLastException()); + } + + modExternal.setRootExpression(path); + + if (namespaceURI != null && !modExternal.getNamespaceURI().equals(namespaceURI)) { + throw new XPathException("namespace URI declared by module (" + modExternal.getNamespaceURI() + ") does not match namespace URI in import statement, which was: " + namespaceURI); + } - // Set source information on module context + // Set source information on module context // String sourceClassName = source.getClass().getName(); // modContext.setSourceKey(source.getKey().toString()); - // Extract the source type from the classname by removing the package prefix and the "Source" suffix + // Extract the source type from the classname by removing the package prefix and the "Source" suffix // modContext.setSourceType( sourceClassName.substring( 17, sourceClassName.length() - 6 ) ); - modExternal.setSource( source ); - modContext.setSource(source); - modExternal.setIsReady(true); - return( modExternal ); - } - catch( final RecognitionException e ) { - throw( new XPathException( e.getLine(), e.getColumn(), "error found while loading module from " + location + ": " + e.getMessage() ) ); - } - catch( final TokenStreamException e ) { - throw( new XPathException( "error found while loading module from " + location + ": " + e.getMessage(), e ) ); - } - catch( final XPathException e ) { - e.prependMessage( "Error while loading module " + location + ": " ); - throw( e ); - } - catch( final Exception e ) { - e.printStackTrace(); - throw( new XPathException( "Internal error while loading module: " + location, e ) ); - } - finally { - - try { - - if( reader != null ) { - reader.close(); - } - } - catch( final IOException e ) { - LOG.warn( "Error while closing module source: " + e.getMessage(), e ); + modExternal.setSource(source); + modContext.setSource(source); + modExternal.setIsReady(true); + return modExternal; + } catch (final RecognitionException e) { + throw new XPathException(e.getLine(), e.getColumn(), "error found while loading module from " + location + ": " + e.getMessage()); + } catch (final TokenStreamException e) { + throw new XPathException("error found while loading module from " + location + ": " + e.getMessage(), e); + } catch (final XPathException e) { + e.prependMessage("Error while loading module " + location + ": "); + throw e; } + } catch (final IOException e) { + throw moduleLoadException("IO exception while loading module '" + namespaceURI + "'" + + " from '" + source + "'", location, e); } } - - private void declareModuleVars( Module module ) - { + private void declareModuleVars(final Module module) { final String moduleNS = module.getNamespaceURI(); - for( final Iterator i = globalVariables.values().iterator(); i.hasNext(); ) { + for (final Iterator i = globalVariables.values().iterator(); i.hasNext(); ) { final Variable var = i.next(); - if( moduleNS.equals( var.getQName().getNamespaceURI() ) ) { - module.declareVariable( var ); + if (moduleNS.equals(var.getQName().getNamespaceURI())) { + module.declareVariable(var); i.remove(); } } } - - /** - * Add a forward reference to an undeclared function. Forward references will be resolved later. - * - * @param call - */ - public void addForwardReference( FunctionCall call ) - { - forwardReferences.add( call ); + @Override + public void addForwardReference(final FunctionCall call) { + forwardReferences.add(call); } - /** - * Resolve all forward references to previously undeclared functions. - * - * @throws XPathException - */ - public void resolveForwardReferences() throws XPathException - { - while( !forwardReferences.isEmpty() ) { - final FunctionCall call = forwardReferences.pop(); - final UserDefinedFunction func = call.getContext().resolveFunction( call.getQName(), call.getArgumentCount() ); - - if( func == null ) { - throw (new XPathException(call, ErrorCodes.XPST0017, "Call to undeclared function: " + call.getQName().getStringValue())); + @Override + public void resolveForwardReferences() throws XPathException { + while (!forwardReferences.isEmpty()) { + final FunctionCall call = forwardReferences.pop(); + final UserDefinedFunction func = call.getContext().resolveFunction(call.getQName(), call.getArgumentCount()); + + if (func == null) { + throw new XPathException(call, ErrorCodes.XPST0017, "Call to undeclared function: " + call.getQName().getStringValue()); } else { call.resolveForwardReference(func); } } } - + /** - * Get environment variables. The variables shall not change + * Get environment variables. The variables shall not change * during execution of query. - * + * * @return Map of environment variables */ - public Map getEnvironmentVariables(){ - if(envs==null){ + public Map getEnvironmentVariables() { + if (envs == null) { envs = System.getenv(); } return envs; @@ -3047,204 +2581,166 @@ public Map getEnvironmentVariables(){ /** * Gets the Effective user * i.e. the user that the query is executing as - * + * * @return The Effective User */ public Subject getEffectiveUser() { return getBroker().getCurrentSubject(); } - + /** * Gets the Real User * i.e. the user that initiated execution of the query * Note this is not necessarily the same as the user that the * query is executing as - * @see org.exist.xquery.XQueryContext#getEffectiveUser() - * + * * @return The Real User + * @see org.exist.xquery.XQueryContext#getEffectiveUser() */ public Subject getRealUser() { return realUser; } - - protected void setRealUser(final Subject realUser) { + + private void setRealUser(final Subject realUser) { this.realUser = realUser; } - /* ----------------- Save state ------------------------ */ - + /** + * Save state + */ private class SavedState { - - private HashMap modulesSaved = null; - private HashMap allModulesSaved = null; - private HashMap staticNamespacesSaved = null; - private HashMap staticPrefixesSaved = null; - - @SuppressWarnings("unchecked") - void save() { - if (modulesSaved == null) { - modulesSaved = (HashMap) modules.clone(); - allModulesSaved = (HashMap) allModules.clone(); - staticNamespacesSaved = (HashMap) staticNamespaces.clone(); - staticPrefixesSaved = (HashMap) staticPrefixes.clone(); - } - } - - void restore() { - if (modulesSaved != null) { - modules = modulesSaved; - modulesSaved = null; - allModules = allModulesSaved; - allModulesSaved = null; - staticNamespaces = staticNamespacesSaved; - staticNamespacesSaved = null; - staticPrefixes = staticPrefixesSaved; - staticPrefixesSaved = null; - } - } - } - + private Map modulesSaved = null; + private Map allModulesSaved = null; + private Map staticNamespacesSaved = null; + private Map staticPrefixesSaved = null; + + @SuppressWarnings("unchecked") + void save() { + if (modulesSaved == null) { + modulesSaved = new HashMap<>(modules); + allModulesSaved = new HashMap(allModules); + staticNamespacesSaved = new HashMap(staticNamespaces); + staticPrefixesSaved = new HashMap(staticPrefixes); + } + } + + void restore() { + if (modulesSaved != null) { + modules = modulesSaved; + modulesSaved = null; + allModules = allModulesSaved; + allModulesSaved = null; + staticNamespaces = staticNamespacesSaved; + staticNamespacesSaved = null; + staticPrefixes = staticPrefixesSaved; + staticPrefixesSaved = null; + } + } + } + /** * Before a dynamic import, make sure relevant parts of the current context a saved * to the stack. This is important for util:import-module. The context will be restored * during {@link #reset()}. */ public void saveState() { - savedState.save(); - } - - public boolean optimizationsEnabled() - { - return( enableOptimizer ); + savedState.save(); } + @Override + public boolean optimizationsEnabled() { + return enableOptimizer; + } - /** - * for static compile-time options i.e. declare option - * - * @param qnameString - * @param contents - * - * @throws XPathException - */ - public void addOption( String qnameString, String contents ) throws XPathException - { - if( staticOptions == null ) { - staticOptions = new ArrayList