diff --git a/.cargo/config.toml b/.cargo/config.toml index 11b9a10d7..e80d97e65 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,4 +1,4 @@ [target.x86_64-unknown-linux-gnu] -rustflags = "-L deps/readies/wd40/linux-x64" +rustflags = ["-L", "deps/readies/wd40/linux-x64"] diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 7e57a69bf..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,702 +0,0 @@ -version: 2.1 - -parameters: - run_default_flow: - default: true - type: boolean - run_benchmark_flow_label: - default: false - type: boolean - -commands: - early-returns: - steps: - - run: - name: Early return if this is a docs build - command: | - if [[ $CIRCLE_BRANCH == *docs ]]; then - echo "Identifies as documents PR, no testing required." - circleci step halt - fi - - run: - name: Early return if this branch should ignore CI - command: | - if [[ $CIRCLE_BRANCH == *noci ]]; then - echo "Identifies as actively ignoring CI, no testing required." - circleci step halt - fi - - early-return-for-forked-pull-requests: - description: >- - If this build is from a fork, stop executing the current job and return success. - This is useful to avoid steps that will fail due to missing credentials. - steps: - - run: - name: Early return if this build is from a forked PR - command: | - if [[ -n "$CIRCLE_PR_NUMBER" ]]; then - echo "Nothing to do for forked PRs, so marking this step successful" - circleci step halt - fi - - setup-executor: - steps: - - run: - name: Setup executor - command: | - apt-get -qq update - apt-get -q install -y git openssh-client curl ca-certificates make tar gzip - bash <(curl -fsSL https://raw.githubusercontent.com/docker/docker-install/master/install.sh) - - setup_remote_docker: - version: 20.10.14 - docker_layer_caching: true - - checkout-all: - steps: - - checkout - - run: - name: Checkout submodules - command: git submodule update --init --recursive - - setup-automation: - steps: - - run: - name: Setup automation - command: | - git submodule update --init deps/readies - if [[ $(uname -s) == Darwin ]]; then rm -f /usr/local/bin/python3; fi - ./deps/readies/bin/getpy3 - - run: - name: Setup automation (part 2) - shell: /bin/bash -l -eo pipefail - command: | - export HOMEBREW_NO_AUTO_UPDATE=1 - ./deps/readies/bin/getaws - ls -l /usr/local/bin/python* || true - echo "python3: $(command -v python3)" - python3 --version - python3 -m pip list - - install-prerequisites: - parameters: - redis_version: - type: string - default: "7" - getredis_params: - type: string - default: "" - system_setup_params: - type: string - default: "" - steps: - - setup-automation - - run: - name: System setup - shell: /bin/bash -l -eo pipefail - command: | - ./sbin/system-setup.py <> - - run: - name: Install Redis - shell: /bin/bash -l -eo pipefail - command: | - export HOMEBREW_NO_AUTO_UPDATE=1 - ./deps/readies/bin/getredis -v '<>' --force <> - - run: - name: System report - shell: /bin/bash -l -eo pipefail - command: | - source $HOME/.cargo/env - make info - - save-tests-logs: - steps: - - run: - name: Cleanup test log dir - command: | - rm -f tests/pytest/logs/*.{aof,rdb} - when: always - - store_artifacts: - path: tests/pytest/logs - - persist-artifacts: - steps: - - early-return-for-forked-pull-requests - - run: - name: List artifacts - command: | - cd bin/artifacts - du -ah --apparent-size * - - persist_to_workspace: - root: bin/ - paths: - - artifacts/*.zip - - artifacts/*.tgz - - artifacts/*.tar - - build-steps: - parameters: - build_params: - type: string - default: "" - test_params: - type: string - default: "" - redis_version: - type: string - default: "7" - getredis_params: - type: string - default: "" - steps: - - early-returns - - checkout-all - - install-prerequisites: - redis_version: <> - getredis_params: <> - - restore_cache: - keys: - - v3-dependencies-{{ arch }}-{{ checksum "Cargo.lock" }} - - run: - name: Check formatting - shell: /bin/bash -l -eo pipefail - command: make lint - - run: - name: Build debug - shell: /bin/bash -l -eo pipefail - command: make build DEBUG=1 <> - - run: - name: Build release - shell: /bin/bash -l -eo pipefail - command: make build pack <> - - save_cache: - key: v3-dependencies-{{ arch }}-{{ checksum "Cargo.lock" }} - paths: - - "~/.cargo" - - "./target" - - test-steps: - steps: - - run: - name: Run tests - shell: /bin/bash -l -eo pipefail - command: | - python3 -m RLTest --version - make test - timeout: 30m - no_output_timeout: 30m - - save_cache: - key: v2-dependencies-{{ arch }}-{{ checksum "Cargo.lock" }} - paths: - - "~/.cargo" - - "./target" - - save-tests-logs - - build-platforms-steps: - parameters: - platform: - type: string - steps: - - early-returns - - setup-executor - - checkout-all - - setup-automation - - run: - name: Build for platform - shell: /bin/bash -l -eo pipefail - command: | - ROOT=$PWD - cd build/docker - make build OSNICK=<> VERSION=$CIRCLE_TAG BRANCH=$CIRCLE_BRANCH TEST=1 OFFICIAL=1 SHOW=1 - cd $ROOT - mkdir -p tests/pytest/logs - tar -C tests/pytest/logs -xzf bin/artifacts/pytest-logs*.tgz - timeout: 60m - no_output_timeout: 30m - - save-tests-logs - - early-return-for-forked-pull-requests - - run: - name: Upload artifacts to S3 - shell: /bin/bash -l -eo pipefail - command: | - if [[ -n $CIRCLE_BRANCH ]]; then - make upload-artifacts OSNICK=<> SHOW=1 - fi - - run: - name: Publish container - shell: /bin/bash -l -eo pipefail - command: | - docker login -u redisfab -p $DOCKER_REDISFAB_PWD - cd build/docker - make publish OSNICK=<> VERSION=$CIRCLE_TAG BRANCH=$CIRCLE_BRANCH OFFICIAL=1 SHOW=1 - - persist-artifacts - - vm-build-platforms-steps: - parameters: - platform: - type: string - steps: - - early-returns - - checkout - - setup-automation - - run: - name: Install Docker - shell: /bin/bash -l -eo pipefail - command: ./deps/readies/bin/getdocker - - run: - name: Build for platform - command: | - ROOT=$PWD - cd build/docker - make build OSNICK=<> VERSION=$CIRCLE_TAG BRANCH=$CIRCLE_BRANCH TEST=1 OFFICIAL=1 SHOW=1 - cd $ROOT - mkdir -p tests/pytest/logs - tar -C tests/pytest/logs -xzf bin/artifacts/pytest-logs*.tgz - no_output_timeout: 30m - - save-tests-logs - - early-return-for-forked-pull-requests - - run: - name: Upload artifacts to S3 - command: | - if [[ -n $CIRCLE_BRANCH ]]; then - make upload-artifacts OSNICK=<> SHOW=1 - fi - - run: - name: Publish container - command: | - docker login -u redisfab -p $DOCKER_REDISFAB_PWD - cd build/docker - make publish OSNICK=<> VERSION=$CIRCLE_TAG BRANCH=$CIRCLE_BRANCH OFFICIAL=1 SHOW=1 - - persist-artifacts - - benchmark-steps: - parameters: - github_actor: - type: string - default: $CIRCLE_USERNAME - module_path: - type: string - default: bin/linux-x64-release/rejson.so - profile_env: - type: string - default: "0" - benchmark_glob: - type: string - default: "*.yml" - triggering_env: - type: string - default: "circleci" - allowed_envs: - type: string - default: "oss-standalone" - steps: - - run: - name: Prepare automation - command: | - ./deps/readies/bin/getpy3 - python3 -m pip install -r tests/benchmarks/requirements.txt - VERSION=0.14.8 ./deps/readies/bin/getterraform - - run: - name: Run CI benchmarks on aws - timeout: 60m - no_output_timeout: 30m - command: | - ROOT="$PWD" - cd tests/benchmarks - export AWS_ACCESS_KEY_ID=$PERFORMANCE_EC2_ACCESS_KEY - export AWS_SECRET_ACCESS_KEY=$PERFORMANCE_EC2_SECRET_KEY - export AWS_DEFAULT_REGION=$PERFORMANCE_EC2_REGION - export EC2_PRIVATE_PEM=$PERFORMANCE_EC2_PRIVATE_PEM - export PROFILE=<< parameters.profile_env >> - export BENCHMARK_GLOB=<< parameters.benchmark_glob >> - export PERF_CALLGRAPH_MODE="dwarf" - redisbench-admin run-remote \ - --required-module ReJSON \ - --module_path "$ROOT/<< parameters.module_path >>" \ - --github_actor "<< parameters.github_actor >>" \ - --github_repo $CIRCLE_PROJECT_REPONAME \ - --github_org $CIRCLE_PROJECT_USERNAME \ - --github_sha $CIRCLE_SHA1 \ - --github_branch $CIRCLE_BRANCH \ - --upload_results_s3 \ - --fail_fast \ - --triggering_env << parameters.triggering_env >> \ - --push_results_redistimeseries \ - --allowed-envs << parameters.allowed_envs >> || true - - run: - name: Generate Pull Request Performance info - command: | - if [[ -n ${CIRCLE_PULL_REQUEST##*/} ]]; then - redisbench-admin compare \ - --defaults_filename ./tests/benchmarks/defaults.yml \ - --comparison-branch $CIRCLE_BRANCH \ - --auto-approve \ - --pull-request ${CIRCLE_PULL_REQUEST##*/} - fi -#---------------------------------------------------------------------------------------------------------------------------------- - -jobs: - build-linux-debian: - docker: - - image: redisfab/rmbuilder:6.2.7-x64-bullseye - parameters: - redis_version: - type: string - default: "7" - persist: - type: string - default: "yes" - steps: - - build-steps: - redis_version: <> - - test-steps - - run: - name: Persist artifacts? - command: | - if [[ "<>" != "yes" ]]; then - circleci step halt - fi - - persist-artifacts - - build-platforms: - parameters: - platform: - type: string - # docker: - # - image: debian:bullseye - machine: - enabled: true - image: ubuntu-2004:202010-01 - resource_class: large - steps: - - vm-build-platforms-steps: - platform: <> - - build-arm-platforms: - parameters: - platform: - type: string - machine: - image: ubuntu-2004:202101-01 - resource_class: arm.medium - steps: - - vm-build-platforms-steps: - platform: <> - - build-macos-x64: - macos: - xcode: 12.5.1 - resource_class: macos.x86.medium.gen2 - parameters: - upload: - type: string - default: "yes" - steps: - - early-returns - - build-steps - - test-steps - - run: - name: Upload artifacts to S3 - command: | - if [[ -n $CIRCLE_BRANCH && "<>" == "yes" ]]; then - make upload-artifacts SHOW=1 - fi - - persist-artifacts - - build-macos-m1: - macos: - xcode: 14.2.0 - resource_class: macos.m1.large.gen1 - parameters: - upload: - type: string - default: "yes" - steps: - - early-returns - - build-steps - - test-steps - - run: - name: Upload artifacts to S3 - command: | - if [[ -n $CIRCLE_BRANCH && "<>" == "yes" ]]; then - make upload-artifacts SHOW=1 VERBOSE=1 - fi - - persist-artifacts - - coverage: - docker: - - image: redisfab/rmbuilder:6.2.7-x64-focal - steps: - - early-returns - - checkout-all - - install-prerequisites - - run: - name: Build & Test - shell: /bin/bash -l -eo pipefail - command: | - make coverage SHOW=1 - make upload-cov SHOW=1 - no_output_timeout: 30m - - save-tests-logs - - sanitize: - docker: - - image: redisfab/clang:16-x64-focal - parameters: - san-type: - type: string - steps: - - early-returns - - checkout-all - - install-prerequisites - - run: - name: Build & test - shell: /bin/bash -l -eo pipefail - command: make SAN=<> build test SHOW=1 - no_output_timeout: 30m - - save-tests-logs - - upload-artifacts: - parameters: - staging-lab: - type: string - default: "0" - docker: - - image: redisfab/rmbuilder:6.2.7-x64-bullseye - steps: - - early-returns - - early-return-for-forked-pull-requests - - checkout - - setup-automation - - attach_workspace: - at: ~/workspace - - run: - name: Upload artifacts to S3 - command: | - mkdir -p bin - ln -s ~/workspace/artifacts bin/artifacts - if [[ -n $CIRCLE_TAG && "<>" != 1 ]]; then - make upload-release SHOW=1 - else - make upload-release SHOW=1 STAGING=1 - fi - - release-qa-tests: - docker: - - image: redisfab/rmbuilder:6.2.7-x64-bullseye - steps: - - early-returns - - early-return-for-forked-pull-requests - - checkout - - setup-automation - - run: - name: Run QA Automation - command: ./tests/qa/qatests -m "$CIRCLE_TAG" - - benchmark-json-oss-standalone: - docker: - - image: redisfab/rmbuilder:6.2.7-x64-focal - steps: - - build-steps - - benchmark-steps - - benchmark-json-oss-standalone-profiler: - docker: - - image: redisfab/rmbuilder:6.2.7-x64-focal - steps: - - build-steps - - benchmark-steps: - profile_env: "1" - triggering_env: "circleci.profilers" # results generated with profilers attached are not mixed with the ones without it - -#---------------------------------------------------------------------------------------------------------------------------------- - -on-any-branch: &on-any-branch - filters: - branches: - only: /.*/ - tags: - only: /.*/ - -always: &always - filters: - branches: - only: /.*/ - tags: - only: /.*/ - -never: &never - filters: - branches: - ignore: /.*/ - tags: - ignore: /.*/ - -on-master: &on-master - filters: - branches: - only: master - tags: - ignore: /.*/ - -on-integ-branch: &on-integ-branch - filters: - branches: - only: - - master - - /^\d+\.\d+.*$/ - - /^feature.*$/ - tags: - ignore: /.*/ - -on-integ-branch-cron: &on-integ-branch-cron - filters: - branches: - only: - - master - - /^\d+\.\d+.*$/ - - /^feature.*$/ - -not-on-integ-branch: ¬-on-integ-branch - filters: - branches: - ignore: - - master - - /^\d+\.\d+.*$/ - - /^feature.*$/ - tags: - ignore: /.*/ - -on-version-tags: &on-version-tags - filters: - branches: - ignore: /.*/ - tags: - only: /^v[0-9].*/ - -on-integ-and-version-tags: &on-integ-and-version-tags - filters: - branches: - only: - - master - - /^\d+\.\d+.*$/ - - /^feature.*$/ - tags: - only: /^v[0-9].*/ - -#---------------------------------------------------------------------------------------------------------------------------------- - -workflows: - version: 2 - default-flow: - when: - << pipeline.parameters.run_default_flow >> - jobs: - - build-linux-debian: - name: build - <<: *not-on-integ-branch - - build-platforms: - <<: *on-integ-and-version-tags - context: common - matrix: - parameters: - platform: [jammy, focal, bionic, centos7, rocky8, bullseye, amzn2] - - build-arm-platforms: - <<: *on-integ-and-version-tags - context: common - matrix: - parameters: - platform: [jammy, focal, bionic] - - build-macos-x64: - <<: *on-version-tags - context: common - - build-macos-m1: - context: common - <<: *on-version-tags - - coverage: - <<: *always - - sanitize: - name: sanitize-<< matrix.san-type >> - <<: *always - matrix: - parameters: - san-type: [address] - - upload-artifacts: - name: upload-artifacts-to-staging-lab - <<: *on-integ-branch - staging-lab: "1" - context: common - requires: - - build-platforms - - build-arm-platforms - - build-macos-x64 - - build-macos-m1 - - upload-artifacts: - name: upload-release-artifacts - <<: *on-version-tags - context: common - requires: - - build-platforms - - build-arm-platforms - - build-macos-x64 - - build-macos-m1 - - release-qa-tests: - <<: *on-version-tags - context: common - requires: - - upload-release-artifacts - - benchmark-json-oss-standalone: - <<: *on-integ-and-version-tags - context: common - - benchmark-json-oss-standalone-profiler: - <<: *on-integ-and-version-tags - context: common - - benchmark_flow_label: - when: - << pipeline.parameters.run_benchmark_flow_label >> - jobs: - - benchmark-json-oss-standalone: - <<: *on-any-branch - context: common - - benchmark-json-oss-standalone-profiler: - <<: *on-any-branch - context: common - - nightly: - triggers: - - schedule: - cron: "07 20 * * *" - <<: *on-integ-branch-cron - jobs: - - build-linux-debian: - name: build-with-redis-<> - matrix: - parameters: - redis_version: ["7", "unstable"] - - nightly-perf-once-a-week: - triggers: - - schedule: - # “At 07:00 on Mondays.” - cron: "00 07 * * 1" - filters: - branches: - only: master - jobs: - - benchmark-json-oss-standalone: - context: common - - benchmark-json-oss-standalone-profiler: - context: common - - nightly-twice-a-week: - triggers: - - schedule: - cron: "20 17 * * 0,3" - <<: *on-integ-branch-cron - jobs: - - build-macos-x64: - context: common - upload: "yes" - - build-macos-m1: - context: common - upload: "yes" diff --git a/.github/actions/build-json-module-and-redis-with-cargo/action.yml b/.github/actions/build-json-module-and-redis-with-cargo/action.yml new file mode 100644 index 000000000..18da16fdd --- /dev/null +++ b/.github/actions/build-json-module-and-redis-with-cargo/action.yml @@ -0,0 +1,25 @@ +name: Build JSON module and Redis Server +description: | + Build JSON module and Redis Server + +inputs: + redis-ref: + description: 'Redis version to build' + required: true + default: '7.4' + +runs: + using: composite + steps: + - name: Build Redis + shell: bash + working-directory: redis + run: | + make install -j `nproc` + - name: Build module + shell: bash + run: | + . $HOME/.cargo/env + echo "source $HOME/.cargo/env" >> $HOME/.bash_profile + cargo build --release + cp $(realpath ./target/release)/librejson.so $(realpath ./target/release)/rejson.so diff --git a/.github/actions/build-json-module-and-redis/action.yml b/.github/actions/build-json-module-and-redis/action.yml new file mode 100644 index 000000000..f317c9c97 --- /dev/null +++ b/.github/actions/build-json-module-and-redis/action.yml @@ -0,0 +1,27 @@ +name: Build JSON module and Redis Server +description: | + Build JSON module and Redis Server + +inputs: + sanitizer: + type: string + +runs: + using: composite + steps: + - name: Build Redis + shell: bash + working-directory: redis + run: | + echo ::group::Build Redis + make install -j `nproc` + echo ::endgroup:: + - name: Build module + shell: bash + run: | + echo ::group::Build RedisJSON + if [[ -f "$HOME/.cargo/env" ]]; then + . "$HOME/.cargo/env" + fi + make build SAN=${{ inputs.sanitizer }} -j `nproc` + echo ::endgroup:: diff --git a/.github/actions/make-pack/action.yml b/.github/actions/make-pack/action.yml new file mode 100644 index 000000000..4b541eae0 --- /dev/null +++ b/.github/actions/make-pack/action.yml @@ -0,0 +1,9 @@ +name: Run make pack module script + +runs: + using: composite + steps: + - name: Pack module + shell: bash + run: | + BRANCH=$TAG_OR_BRANCH make pack diff --git a/.github/actions/node20-supported/action.yml b/.github/actions/node20-supported/action.yml new file mode 100644 index 000000000..65db7653f --- /dev/null +++ b/.github/actions/node20-supported/action.yml @@ -0,0 +1,30 @@ +name: Check if node20 is supported +description: Check if node20 is supported + +inputs: + container: + type: string + +outputs: + supported: + value: ${{ steps.node20.outputs.supported }} + +runs: + using: composite + steps: + - name: Check if node20 is Supported + id: node20 + shell: bash + run: | + for platform in ubuntu:bionic centos:7 amazonlinux:2 alpine:3; do + if [[ "${{ inputs.container }}" == "$platform" ]]; then + echo "supported=false" >> $GITHUB_OUTPUT + # https://github.com/actions/checkout/issues/1809 + echo "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION=node16" >> $GITHUB_ENV + echo "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION=node16" >> $GITHUB_ENV + # https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/ + echo "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION=true" >> $GITHUB_ENV + exit 0 + fi + done + echo "supported=true" >> $GITHUB_OUTPUT diff --git a/.github/actions/pack-module/action.yml b/.github/actions/pack-module/action.yml new file mode 100644 index 000000000..3b958449a --- /dev/null +++ b/.github/actions/pack-module/action.yml @@ -0,0 +1,16 @@ +name: Run pack module script + +runs: + using: composite + steps: + - name: Pack module + shell: bash + run: | + if command -v scl_source &> /dev/null + then + . scl_source enable devtoolset-11 || true + fi + . venv/bin/activate + git config --global --add safe.directory $GITHUB_WORKSPACE + export PATH="$GITHUB_WORKSPACE/redis/src:$PATH" + BRANCH=$TAG_OR_BRANCH SHOW=1 OSNICK=${{ matrix.docker.nick }} ./sbin/pack.sh $(realpath ./target/release/rejson.so) diff --git a/.github/actions/run-tests/action.yml b/.github/actions/run-tests/action.yml new file mode 100644 index 000000000..3cd82ea15 --- /dev/null +++ b/.github/actions/run-tests/action.yml @@ -0,0 +1,29 @@ +name: Run module tests + +inputs: + run_valgrind: + description: 'Run valgrind on the tests' + type: string + default: 0 + +runs: + using: composite + steps: + - name: Run tests + shell: bash + run: | + echo ::group::Activate virtual environment + python3 -m venv venv + echo "source $PWD/venv/bin/activate" >> ~/.bash_profile + . venv/bin/activate + echo ::endgroup:: + echo ::group::Install python dependencies + ./.install/common_installations.sh + echo ::endgroup:: + echo ::group::Unit tests + . $HOME/.cargo/env + cargo test + echo ::endgroup:: + echo ::group::Flow tests + MODULE=$(realpath ./target/release/rejson.so) RLTEST_ARGS='--no-progress' ./tests/pytest/tests.sh VG=${{inputs.run_valgrind}} + echo ::endgroup:: diff --git a/.github/actions/san-run-tests/action.yml b/.github/actions/san-run-tests/action.yml new file mode 100644 index 000000000..4673fbce9 --- /dev/null +++ b/.github/actions/san-run-tests/action.yml @@ -0,0 +1,126 @@ +name: Common Flow for Tests + +# Documentation: https://redislabs.atlassian.net/wiki/spaces/DX/pages/3967844669/RediSearch+CI+refactor + +inputs: + env: + default: "ubuntu-22.04" + type: string + container: + type: string + sanitizer: + type: string + test-config: + description: 'Test configuration environment variable (e.g. "CONFIG=tls" or "QUICK=1")' + required: true + type: string + redis-ref: + description: 'Redis ref to checkout' + type: string + required: true + +runs: + using: composite + steps: + - name: Get Installation Mode + shell: bash + id: mode + run: | + [[ -z "${{ inputs.container }}" ]] && echo "mode=sudo" >> $GITHUB_OUTPUT || echo "mode=" >> $GITHUB_OUTPUT + - name: Check if node20 is Supported + id: node20 + uses: ./.github/actions/node20-supported + with: + container: ${{ inputs.container }} + - name: Install git + shell: bash + run: | + # TODO: must be changed to run a script based on the input env + echo ::group::Install git + ${{ steps.mode.outputs.mode }} apt-get update && apt-get install -y git + echo ::endgroup:: + - name: Setup specific + shell: bash + working-directory: .install + run: | + echo ::group::OS-Specific Setup + ./install_script.sh ${{ steps.mode.outputs.mode }} + echo ::endgroup:: + echo ::group::Get Rust + ./getrust.sh ${{ steps.mode.outputs.mode }} + echo ::endgroup:: + + - name: Full checkout (node20 supported) + if: steps.node20.outputs.supported == 'true' + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Full checkout (node20 unsupported) + if: steps.node20.outputs.supported == 'false' + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Get Redis + uses: actions/checkout@v4 + with: + repository: redis/redis + ref: ${{ inputs.redis-ref }} + path: redis + submodules: 'recursive' + - name: Build + uses: ./.github/actions/build-json-module-and-redis + with: + sanitizer: ${{ inputs.san }} + + - name: Set Artifact Names + shell: bash + # Artifact names have to be unique, so we base them on the environment. + # We also remove invalid characters from the name. + id: artifact-names + run: | + # Invalid characters include: Double quote ", Colon :, Less than <, Greater than >, Vertical bar |, Asterisk *, Question mark ? + echo "name=$(echo "${{ inputs.container || inputs.env }} ${{ runner.arch }}, Redis ${{ inputs.redis-ref }}" | \ + sed -e 's/[":\/\\<>\|*?]/_/g' -e 's/__*/_/g' -e 's/^_//' -e 's/_$//')" >> $GITHUB_OUTPUT + + - name: Run tests + shell: bash + id: test + run: | + echo ::group::Activate virtual environment + python3 -m venv venv + echo "source $PWD/venv/bin/activate" >> ~/.bash_profile + source venv/bin/activate + echo ::endgroup:: + echo ::group::Install python dependencies + ./.install/common_installations.sh + echo ::endgroup:: + echo ::group::Unit tests + . "$HOME/.cargo/env" + make cargo_test LOG=1 CLEAR_LOGS=0 SAN=${{ inputs.san }} + echo ::endgroup:: + echo ::group::Flow tests + make pytest LOG=1 CLEAR_LOGS=0 SAN=${{ inputs.san }} ${{ inputs.test-config }} + echo ::endgroup:: + env: + PIP_BREAK_SYSTEM_PACKAGES: 1 + + - name: Upload test artifacts (node20 supported) + if: steps.node20.outputs.supported == 'true' && steps.test.outcome == 'failure' + uses: actions/upload-artifact@v4 + with: + name: Test logs ${{ steps.artifact-names.outputs.name }} + path: tests/**/logs/*.log* + if-no-files-found: ignore + - name: Upload test artifacts (node20 unsupported) + if: steps.node20.outputs.supported == 'false' && steps.test.outcome == 'failure' + uses: actions/upload-artifact@v4 + with: + name: Test logs ${{ steps.artifact-names.outputs.name }} + path: tests/**/logs/*.log* + if-no-files-found: ignore + + - name: Fail flow if tests failed + shell: bash + if: steps.test.outcome == 'failure' + run: exit 1 diff --git a/.github/actions/setup-env/action.yml b/.github/actions/setup-env/action.yml new file mode 100644 index 000000000..dc460200d --- /dev/null +++ b/.github/actions/setup-env/action.yml @@ -0,0 +1,71 @@ +name: Setup environment +description: | + setup environment for the build and output the branch and tag information + +inputs: + github-ref: + description: GitHub ref + required: true + redis-ref: + description: Redis ref + required: false + +outputs: + TAGGED: + description: 'Is this a tagged build, actual value is 1 or 0' + value: ${{ steps.set-tagged.outputs.TAGGED }} + TAG: + description: 'The tag name' + value: ${{ steps.set-git-info.outputs.TAG }} + BRANCH: + description: 'The branch name' + value: ${{ steps.set-git-info.outputs.BRANCH }} + TAG_OR_BRANCH: + description: 'The tag or branch name' + value: ${{ steps.set-git-info.outputs.TAG }}${{ steps.set-git-info.outputs.BRANCH }} + redis-ref: + description: 'The redis ref' + value: ${{ steps.set-redis-ref.outputs.REDIS_REF }} + +runs: + using: composite + steps: + - name: Set the branch and tag outputs + shell: bash + id: set-git-info + run: | + if [[ "${{github.event_name}}" != "pull_request" ]]; then + REF="${{ github.ref }}" + TAG_PATTERN="^refs/tags/(.*)$" + if [[ $REF =~ $TAG_PATTERN ]]; then + TAG=${BASH_REMATCH[1]} + fi + fi + + if [[ -z $TAG ]]; then + BRANCH=${{ github.head_ref || github.ref_name }} + fi + echo "TAG=${TAG}" >> $GITHUB_OUTPUT + echo "BRANCH=${BRANCH}" >> $GITHUB_OUTPUT + echo "TAG=${TAG}, BRANCH=${BRANCH}" + - name: Set the tagged flag + shell: bash + id: set-tagged + run: | + # If this is a version tag, then set to false, meaning this + # is not a production build. + export REF="${{ inputs.github-ref }}" + export PATTERN="refs/tags/v[0-9]+.*" + if [[ $REF =~ $PATTERN ]]; then + echo "This is a tagged build" + echo "TAGGED=1" >> $GITHUB_OUTPUT + else + echo "This is not a tagged build" + echo "TAGGED=0" >> $GITHUB_OUTPUT + fi + - name: Set redis ref + shell: bash + id: set-redis-ref + run: | + export REDIS_REF="${{ inputs.redis-ref || '7.4'}}" + echo "REDIS_REF=${REDIS_REF}" >> $GITHUB_OUTPUT diff --git a/.github/actions/upload-artifacts-to-s3-without-make/action.yml b/.github/actions/upload-artifacts-to-s3-without-make/action.yml new file mode 100644 index 000000000..4e510ed40 --- /dev/null +++ b/.github/actions/upload-artifacts-to-s3-without-make/action.yml @@ -0,0 +1,60 @@ +name: Upload Artifacts to S3 +description: | + Uploads module artifacts to S3 bucket. + +inputs: + aws-access-key-id: + description: 'AWS Access Key ID' + required: true + aws-secret-access-key: + description: 'AWS Secret Access Key' + required: true + osnick: + description: 'OS Nickname' + required: false + default: '' + github-ref: + description: 'github ref' + required: false + default: '' + +runs: + using: composite + steps: + - name: Configure AWS credentials and upload artifcats # todo - use aws role instead + shell: bash + run: | + echo ::group::install aws cli + python3 -m venv .aws-cli-venv && source .aws-cli-venv/bin/activate && + pip3 install --upgrade pip && pip3 install --no-cache-dir awscli && rm -rf /var/cache/apk/* + echo ::endgroup:: + + # Variables from the workflow + export AWS_ACCESS_KEY_ID="${{ inputs.aws-access-key-id }}" + export AWS_SECRET_ACCESS_KEY="${{ inputs.aws-secret-access-key }}" + export AWS_REGION="us-east-1" + # Check if the required environment variables are set + if [ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ] || [ -z "$AWS_REGION" ]; then + echo "Missing AWS credentials or region configuration." + exit 1 + fi + # Configure AWS CLI with provided credentials and region + echo "Configuring AWS CLI with access keys..." + aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID" + aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY" + aws configure set region "$AWS_REGION" + + echo ::group::upload artifacts + SNAPSHOT=1 SHOW=1 VERBOSE=1 ./sbin/upload-artifacts + echo ::endgroup:: + echo ::group::upload staging release + RELEASE=1 SHOW=1 STAGING=1 VERBOSE=1 ./sbin/upload-artifacts + echo ::endgroup:: + + echo ::group::upload production release + REF="${{ inputs.github-ref }}" + PATTERN="refs/tags/v[0-9]+.*" + if [[ $REF =~ $PATTERN ]]; then + RELEASE=1 SHOW=1 VERBOSE=1 ./sbin/upload-artifacts + fi + echo ::endgroup:: diff --git a/.github/actions/upload-artifacts-to-s3/action.yml b/.github/actions/upload-artifacts-to-s3/action.yml new file mode 100644 index 000000000..dfed5d609 --- /dev/null +++ b/.github/actions/upload-artifacts-to-s3/action.yml @@ -0,0 +1,58 @@ +name: Upload Artifacts to S3 +description: | + Uploads module artifacts to S3 bucket. + +inputs: + aws-access-key-id: + description: 'AWS Access Key ID' + required: true + aws-secret-access-key: + description: 'AWS Secret Access Key' + required: true + github-ref: + description: 'GitHub ref' + required: true + osnick: + description: 'OS Nickname' + required: false + default: '' + +runs: + using: composite + steps: + - name: Upload artifacts to S3 - staging + shell: bash + run: | + echo ::group::install aws cli + python3 -m venv .aws-cli-venv && source .aws-cli-venv/bin/activate && + pip3 install --upgrade pip && pip3 install --no-cache-dir awscli && rm -rf /var/cache/apk/* + echo ::endgroup:: + + # Variables from the workflow + export AWS_ACCESS_KEY_ID="${{ inputs.aws-access-key-id }}" + export AWS_SECRET_ACCESS_KEY="${{ inputs.aws-secret-access-key }}" + export AWS_REGION="us-east-1" + # Check if the required environment variables are set + if [ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ] || [ -z "$AWS_REGION" ]; then + echo "Missing AWS credentials or region configuration." + exit 1 + fi + # Configure AWS CLI with provided credentials and region + echo "Configuring AWS CLI with access keys..." + aws configure set aws_access_key_id "$AWS_ACCESS_KEY_ID" + aws configure set aws_secret_access_key "$AWS_SECRET_ACCESS_KEY" + aws configure set region "$AWS_REGION" + + echo ::group::upload artifacts + make upload-artifacts SHOW=1 VERBOSE=1 + echo ::endgroup:: + echo ::group::upload staging release + make upload-release SHOW=1 STAGING=1 VERBOSE=1 + echo ::endgroup:: + + echo ::group::upload production release + # todo: trigger this manually instead + if [[ "${{ inputs.github-ref}}" != 'refs/heads/master' ]]; then + make upload-release SHOW=1 VERBOSE=1 + fi + echo ::endgroup:: diff --git a/.github/actions/upload-artifacts/action.yml b/.github/actions/upload-artifacts/action.yml new file mode 100644 index 000000000..f3f9ca630 --- /dev/null +++ b/.github/actions/upload-artifacts/action.yml @@ -0,0 +1,26 @@ +name: Pack the module and upload it to S3 +description: Pack the module and upload it to S3 + +inputs: + image: # The Docker image to use for the build + description: 'The Docker image to use for the build' + required: true + +runs: + using: composite + steps: + - name: Set Artifact Names + # Artifact names have to be unique, so we base them on the environment. + # We also remove invalid characters from the name. + id: artifact-names + shell: bash + run: | # Invalid characters include: Double quote ", Colon :, Less than <, Greater than >, Vertical bar |, Asterisk *, Question mark ? + echo "name=$(echo "${{ inputs.image }} x86-64, Redis unstable" | \ + sed -e 's/[":\/\\<>\|*?]/_/g' -e 's/__*/_/g' -e 's/^_//' -e 's/_$//')" >> $GITHUB_OUTPUT + - name: Upload test artifacts + if: inputs.image != 'amazonlinux:2' && inputs.image != 'ubuntu:bionic' + uses: actions/upload-artifact@v4 + with: + name: Test logs ${{ steps.artifact-names.outputs.name }} + path: tests/**/logs/*.log* + if-no-files-found: ignore diff --git a/.github/wordlist.txt b/.github/wordlist.txt index 1cdd9984d..9c545754d 100644 --- a/.github/wordlist.txt +++ b/.github/wordlist.txt @@ -108,3 +108,21 @@ uint vachhanihpavan www zstd +mitigations +backport +md +AGPLv +Affero +RedisXXX +github +https +tri +txt +PRs +semantical +stackoverflow +Enforceability +MERCHANTABILITY +sublicense +licensable +repo diff --git a/.github/workflows/backport_pr.yml b/.github/workflows/backport_pr.yml new file mode 100644 index 000000000..43156c1e4 --- /dev/null +++ b/.github/workflows/backport_pr.yml @@ -0,0 +1,34 @@ +name: Backport merged pull request +on: + pull_request_target: + types: [closed] + issue_comment: + types: [created] +permissions: + contents: write # so it can comment + pull-requests: write # so it can create pull requests +jobs: + backport: + name: Backport pull request + runs-on: ubuntu-latest + + # Only run when pull request is merged + # or when a comment containing `/backport` is created by someone other than the + # https://github.com/backport-action bot user (user id: 97796249). Note that if you use your + # own PAT as `github_token`, that you should replace this id with yours. + if: > + ( + github.event.pull_request.merged + ) || ( + github.event_name == 'issue_comment' && + github.event.issue.pull_request && + github.event.comment.user.id != 97796249 && + contains(github.event.comment.body, '/backport') + ) + steps: + - uses: actions/checkout@v4 + - name: Create backport pull requests + uses: korthout/backport-action@v1 + with: + pull_title: '[${target_branch}] ${pull_title}' + merge_commits: 'skip' diff --git a/.github/workflows/benchmark-flow.yml b/.github/workflows/benchmark-flow.yml new file mode 100644 index 000000000..fcfdd133b --- /dev/null +++ b/.github/workflows/benchmark-flow.yml @@ -0,0 +1,118 @@ +name: Run a Benchmark Flow + +on: + workflow_call: + inputs: + container: + type: string + default: redisfab/rmbuilder:6.2.7-x64-focal + module_path: + type: string + default: bin/linux-x64-release/rejson.so + profile_env: + type: number # for default of 0 + cluster_env: + type: string # for default of "" + benchmark_glob: + type: string + default: "*.yml" + triggering_env: + type: string + default: "circleci" # TODO: change to "github-actions" when ready on grafana + allowed_envs: + type: string + default: "oss-standalone" + allowed_setups: + type: string + benchmark_runner_group_member_id: + type: number + default: 1 + benchmark_runner_group_total: + type: number + default: 1 + +jobs: + benchmark-steps: + runs-on: ubuntu-latest + container: ${{ inputs.container }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - run: | + git init + git config --global --add safe.directory '*' + git submodule update --init --recursive + + - name: System setup + run: ./sbin/system-setup.py + + - name: Get Redis + uses: actions/checkout@v4 + with: + repository: redis/redis + ref: '7.2' + path: redis + + - name: Build Redis + working-directory: redis + run: make install + + - name: Build RedisJSON + run: | + . $HOME/.cargo/env + make build + + - name: Prepare automation + run: | + ./deps/readies/bin/getpy3 + python3 -m pip install -r tests/benchmarks/requirements.txt + VERSION=0.14.8 ./deps/readies/bin/getterraform + + - name: Run CI benchmarks on aws for envs ${{ inputs.allowed_envs }} + timeout-minutes: 240 # timeout for the step + working-directory: tests/benchmarks + continue-on-error: true + env: + # Hard-coded + PERF_CALLGRAPH_MODE: dwarf + MAX_PROFILERS: 1 + # Secrets + AWS_ACCESS_KEY_ID: ${{ secrets.PERFORMANCE_EC2_ACCESS_KEY }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.PERFORMANCE_EC2_SECRET_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.PERFORMANCE_EC2_REGION }} + EC2_PRIVATE_PEM: ${{ secrets.PERFORMANCE_EC2_PRIVATE_PEM }} + # Inputs + PROFILE: ${{ inputs.profile_env }} + BENCHMARK_GLOB: ${{ inputs.benchmark_glob }} + BENCHMARK_RUNNER_GROUP_M_ID: ${{ inputs.benchmark_runner_group_member_id }} + BENCHMARK_RUNNER_GROUP_TOTAL: ${{ inputs.benchmark_runner_group_total }} + run: redisbench-admin run-remote + --module_path ../../${{ inputs.module_path }} + --github_actor ${{ github.triggering_actor }} + --github_repo ${{ github.event.repository.name }} + --github_org ${{ github.repository_owner }} + --required-module ReJSON + --github_sha ${{ github.sha }} + --github_branch ${{ github.head_ref || github.ref_name }} + --upload_results_s3 + --triggering_env ${{ inputs.triggering_env }} + --allowed-envs ${{ inputs.allowed_envs }} + --allowed-setups ${{ inputs.allowed_setups }} + --push_results_redistimeseries + --redistimeseries_host ${{ secrets.PERFORMANCE_RTS_HOST }} + --redistimeseries_port ${{ secrets.PERFORMANCE_RTS_PORT }} + --redistimeseries_pass '${{ secrets.PERFORMANCE_RTS_AUTH }}' + + - name: Generate Pull Request Performance info + if: github.event.number + env: + PERFORMANCE_GH_TOKEN: ${{ secrets.PERFORMANCE_GH_TOKEN }} + PERFORMANCE_WH_TOKEN: ${{ secrets.PERFORMANCE_WH_TOKEN }} + run: redisbench-admin compare + --defaults_filename ./tests/benchmarks/defaults.yml + --comparison-branch ${{ github.head_ref || github.ref_name }} + --auto-approve + --pull-request ${{ github.event.number }} + --redistimeseries_host ${{ secrets.PERFORMANCE_RTS_HOST }} + --redistimeseries_port ${{ secrets.PERFORMANCE_RTS_PORT }} + --redistimeseries_pass '${{ secrets.PERFORMANCE_RTS_AUTH }}' diff --git a/.github/workflows/benchmark-runner.yml b/.github/workflows/benchmark-runner.yml new file mode 100644 index 000000000..fb1dd6c2b --- /dev/null +++ b/.github/workflows/benchmark-runner.yml @@ -0,0 +1,39 @@ +name: Run RedisJSON Benchmarks + +on: + workflow_dispatch: + inputs: + extended: + type: boolean + description: 'Run extended benchmarks' + default: false + workflow_call: + inputs: + extended: + type: boolean + default: false + +jobs: + benchmark-json-oss-standalone: + strategy: + matrix: + member_id: [1, 2, 3] + uses: ./.github/workflows/benchmark-flow.yml + secrets: inherit + with: + benchmark_runner_group_member_id: ${{ matrix.member_id }} + benchmark_runner_group_total: ${{ strategy.job-total }} + + benchmark-json-oss-standalone-profiler: + strategy: + matrix: + member_id: [1, 2, 3] + uses: ./.github/workflows/benchmark-flow.yml + secrets: inherit + with: + profile_env: 1 + # TODO: change to "github-actions.profilers" when ready on grafana + triggering_env: "circleci.profilers" # results generated with profilers attached are not mixed with the ones without it + benchmark_runner_group_member_id: ${{ matrix.member_id }} + benchmark_runner_group_total: ${{ strategy.job-total }} + diff --git a/.github/workflows/benchmark-trigger.yml b/.github/workflows/benchmark-trigger.yml new file mode 100644 index 000000000..4a9b949cc --- /dev/null +++ b/.github/workflows/benchmark-trigger.yml @@ -0,0 +1,26 @@ +name: Check if needs trigger benchmark + +on: + pull_request: + types: [opened, labeled, unlabeled, synchronize] + +jobs: + haslabel: + name: analyse labels + runs-on: ubuntu-latest + outputs: + benchmark: ${{ steps.haslabel.outputs.labeled-run-benchmark }} + steps: + - uses: actions/checkout@v4 + - name: Check if labeled with run-benchmark + id: haslabel + uses: DanielTamkin/HasLabel@v1.0.4 + with: + contains: 'run-benchmark' + + perf-ci: + name: Trigger + needs: haslabel + if: needs.haslabel.outputs.benchmark + uses: ./.github/workflows/benchmark-runner.yml + secrets: inherit diff --git a/.github/workflows/event-ci.yml b/.github/workflows/event-ci.yml new file mode 100644 index 000000000..7db983dab --- /dev/null +++ b/.github/workflows/event-ci.yml @@ -0,0 +1,65 @@ +name: Event CI + +permissions: + id-token: write + contents: read + +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + docs-only: + uses: ./.github/workflows/task-check-docs.yml + prepare-values: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: set env + id: set-env + run: | + echo "redis-ref=unstable" >> $GITHUB_OUTPUT # todo change per version/tag + linux: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: jammy rocky9 amazonlinux2 azurelinux3 + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + azurelinux3-arm64: + uses: ./.github/workflows/flow-azurelinux3-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + linux-valgrind: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + run_valgrind: true + secrets: inherit + linux-sanitizer: + needs: [prepare-values, docs-only] + if: ${{ needs.docs-only.outputs.only-docs-changed == 'false' && !github.event.pull_request.draft }} + uses: ./.github/workflows/flow-sanitizer.yml + with: + container: ubuntu:jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + linux-coverage: + needs: [prepare-values, docs-only] + if: ${{ needs.docs-only.outputs.only-docs-changed == 'false' && !github.event.pull_request.draft }} + uses: ./.github/workflows/flow-coverage.yml + secrets: inherit + spellcheck: + uses: ./.github/workflows/flow-spellcheck.yml + secrets: inherit + linter: + uses: ./.github/workflows/flow-linter.yml + secrets: inherit diff --git a/.github/workflows/event-nightly.yml b/.github/workflows/event-nightly.yml new file mode 100644 index 000000000..909b2e8d0 --- /dev/null +++ b/.github/workflows/event-nightly.yml @@ -0,0 +1,83 @@ +name: Event Nightly + +permissions: + id-token: write + contents: read + +on: + push: + branches: + - main + - master + - '[0-9]+.[0-9]+.[0-9]+' + - '[0-9]+.[0-9]+' + schedule: + - cron: '20 20 * * *' # 20:20 UTC every day + workflow_dispatch: + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' +jobs: + prepare-values: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: set env + id: set-env + run: | + echo "redis-ref=unstable" >> $GITHUB_OUTPUT # todo change per version/tag + linux: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: bionic focal jammy rocky8 rocky9 bullseye amazonlinux2 mariner2 azurelinux3 + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + ubuntu-arm64: + uses: ./.github/workflows/flow-ubuntu-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + azurelinux3-arm64: + uses: ./.github/workflows/flow-azurelinux3-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + alpine: + uses: ./.github/workflows/flow-alpine.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + macos: + uses: ./.github/workflows/flow-macos.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + linux-valgrind: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + run_valgrind: true + secrets: inherit + linux-sanitizer: + uses: ./.github/workflows/flow-sanitizer.yml + needs: [prepare-values] + with: + container: ubuntu:jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + spellcheck: + uses: ./.github/workflows/flow-spellcheck.yml + secrets: inherit + linter: + uses: ./.github/workflows/flow-linter.yml + secrets: inherit diff --git a/.github/workflows/event-push-to-integ.yml b/.github/workflows/event-push-to-integ.yml new file mode 100644 index 000000000..9002682c0 --- /dev/null +++ b/.github/workflows/event-push-to-integ.yml @@ -0,0 +1,13 @@ +name: Push to Version, Feature or Master Branch + +on: + push: + branches: + - master + - 'feature-*' + - '[0-9]+.[0-9]+' + +jobs: + benchmark: + uses: ./.github/workflows/benchmark-runner.yml + secrets: inherit diff --git a/.github/workflows/event-tag.yml b/.github/workflows/event-tag.yml new file mode 100644 index 000000000..70d046ba8 --- /dev/null +++ b/.github/workflows/event-tag.yml @@ -0,0 +1,64 @@ +name: Event TAG + +permissions: + id-token: write + contents: read + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + workflow_dispatch: + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' + +jobs: + prepare-values: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: set env + id: set-env + run: | + echo "redis-ref=unstable" >> $GITHUB_OUTPUT # todo change per version/tag + linux: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: bionic focal jammy rocky8 rocky9 bullseye amazonlinux2 mariner2 azurelinux3 + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + ubuntu-arm64: + uses: ./.github/workflows/flow-ubuntu-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + azurelinux3-arm64: + uses: ./.github/workflows/flow-azurelinux3-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + alpine: + uses: ./.github/workflows/flow-alpine.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + macos: + uses: ./.github/workflows/flow-macos.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + spellcheck: + uses: ./.github/workflows/flow-spellcheck.yml + secrets: inherit + linter: + uses: ./.github/workflows/flow-linter.yml + secrets: inherit diff --git a/.github/workflows/event-weekly.yml b/.github/workflows/event-weekly.yml new file mode 100644 index 000000000..418f677b0 --- /dev/null +++ b/.github/workflows/event-weekly.yml @@ -0,0 +1,79 @@ +name: Event Weekly + +on: + schedule: + - cron: "0 0 * * 0" + +permissions: + id-token: write + contents: read + +jobs: + run-benchmarks: + uses: ./.github/workflows/benchmark-runner.yml + secrets: inherit + with: + extended: true + + prepare-values: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: set env + id: set-env + run: | + echo "redis-ref=unstable" >> $GITHUB_OUTPUT # todo change per version/tag + linux: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + # os: jammy rocky9 amazonlinux2 + os: bionic focal jammy rocky8 rocky9 bullseye amazonlinux2 mariner2 azurelinux3 + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + ubuntu-arm64: + uses: ./.github/workflows/flow-ubuntu-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + azurelinux3-arm64: + uses: ./.github/workflows/flow-azurelinux3-arm.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + alpine: + uses: ./.github/workflows/flow-alpine.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + macos: + uses: ./.github/workflows/flow-macos.yml + needs: [prepare-values] + with: + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + linux-valgrind: + uses: ./.github/workflows/flow-linux-x86.yml + needs: [prepare-values] + with: + os: jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + run_valgrind: true + secrets: inherit + linux-sanitizer: + uses: ./.github/workflows/flow-sanitizer.yml + needs: [prepare-values] + with: + container: ubuntu:jammy + redis-ref: ${{needs.prepare-values.outputs.redis-ref}} + secrets: inherit + spellcheck: + uses: ./.github/workflows/flow-spellcheck.yml + secrets: inherit + linter: + uses: ./.github/workflows/flow-linter.yml + secrets: inherit diff --git a/.github/workflows/flow-alpine.yml b/.github/workflows/flow-alpine.yml new file mode 100644 index 000000000..5ab4d4de0 --- /dev/null +++ b/.github/workflows/flow-alpine.yml @@ -0,0 +1,113 @@ +name: Flow alpine + +permissions: + id-token: write + contents: read + +on: + workflow_dispatch: # Allows you to run this workflow manually from the Actions tab + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' + run-test: + type: boolean + default: true + workflow_call: # Allows to run this workflow from another workflow + inputs: + redis-ref: + description: 'Redis ref to checkout' + type: string + required: true + run-test: + type: boolean + default: true + +jobs: + setup-environment: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + TAGGED: ${{ steps.set-env.outputs.TAGGED }} + TAG: ${{ steps.set-env.outputs.TAG }} + BRANCH: ${{ steps.set-env.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ steps.set-env.outputs.TAG }}${{ steps.set-env.outputs.BRANCH }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: set env + id: set-env + uses: ./.github/actions/setup-env + with: + github-ref: ${{ github.ref }} + redis-ref: ${{ inputs.redis-ref }} + build: + runs-on: ${{matrix.runs_on}} + needs: setup-environment + defaults: + run: + shell: bash + container: + image: alpine:3 + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + PIP_BREAK_SYSTEM_PACKAGES: 1 + strategy: + matrix: + runs_on: [ubuntu24-arm64-4-16, ubuntu-latest] + steps: + - name: Install prerequisites + shell: sh + run: | + apk add bash make tar cargo python3 python3-dev py3-pip gcc git curl build-base autoconf automake py3-cryptography linux-headers musl-dev libffi-dev openssl-dev openssh py-virtualenv clang18-libclang + - name: git checkout + run: | + # Perform checkout + REPO_URL="https://github.com/${{ github.repository }}.git" + # Clone the repository to the current directory + git clone --recurse-submodules --depth=1 $REPO_URL . + git config --global --add safe.directory /__w/${{ github.repository }} + REF=${{github.sha}} + git fetch origin ${REF} + git checkout ${REF} + git submodule update --init --recursive + - name: Install python dependencies + run: | + pip install -q --upgrade setuptools + pip install -q --upgrade pip + pip install -q -r tests/pytest/requirements.txt + pip install -q -r .install/build_package_requirements.txt + env: + PIP_BREAK_SYSTEM_PACKAGES: 1 + - name: checkout redis + run: | + # Perform checkout + REPO_URL="https://github.com/redis/redis.git" + # Clone the repository to the current directory + git clone --recurse-submodules $REPO_URL redis + cd redis + git fetch origin ${{ needs.setup-environment.outputs.redis-ref }} + git checkout ${{ needs.setup-environment.outputs.redis-ref }} + git submodule update --init --recursive + - name: Build Redis + working-directory: redis + run: make install + - name: Build module + run: | + make build + - name: Run tests + if: ${{inputs.run-test}} + run: | + make test + - name: Pack module + uses: ./.github/actions/make-pack + - name: Upload artifacts to S3 + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} diff --git a/.github/workflows/flow-azurelinux3-arm.yml b/.github/workflows/flow-azurelinux3-arm.yml new file mode 100644 index 000000000..dedd81aa5 --- /dev/null +++ b/.github/workflows/flow-azurelinux3-arm.yml @@ -0,0 +1,155 @@ +name: Build and Test Azure Linux 3 ARM instances + +on: + workflow_dispatch: # Allows you to run this workflow manually from the Actions tab + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' + run-test: + type: boolean + default: true + workflow_call: # Allows to run this workflow from another workflow + inputs: + redis-ref: + description: 'Redis ref to checkout' + type: string + required: true + run-test: + type: boolean + default: true + +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + +jobs: + setup-environment: + runs-on: ubuntu-latest + outputs: + TAGGED: ${{ steps.set-env.outputs.TAGGED }} + TAG: ${{ steps.set-env.outputs.TAG }} + BRANCH: ${{ steps.set-env.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ steps.set-env.outputs.TAG }}${{ steps.set-env.outputs.BRANCH }} + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: set env + id: set-env + uses: ./.github/actions/setup-env + with: + github-ref: ${{ github.ref }} + redis-ref: ${{ inputs.redis-ref }} + + azurelinux3-arm64: + runs-on: ubuntu24-arm64-4-16 # ubuntu24-arm64-2-8 + needs: setup-environment + strategy: + matrix: + docker: + - image: "mcr.microsoft.com/azurelinux/base/core:3.0" + nick: "azurelinux3" + install_git: | + tdnf install --noplugins --skipsignature -y ca-certificates git + install_deps: | + tdnf -y update + tdnf install -y \ + git \ + wget \ + gcc \ + clang-devel \ + llvm-devel \ + make \ + cmake \ + libffi-devel \ + openssl-devel \ + build-essential \ + zlib-devel \ + bzip2-devel \ + python3-devel \ + which \ + unzip \ + ca-certificates \ + python3-pip \ + curl \ + rsync + defaults: + run: + shell: bash + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + container: + image: ${{ matrix.docker.image }} + steps: + - name: Install git + run: | + ${{ matrix.docker.install_git }} + - name: git checkout + run: | + # Perform checkout + REPO_URL="https://github.com/${{ github.repository }}.git" + # Clone the repository to the current directory + git config --global --add safe.directory /__w/${{ github.repository }} + git clone --recurse-submodules --depth=1 $REPO_URL . + REF=${{github.sha}} + git fetch origin ${REF} + git checkout ${REF} + git submodule update --init --recursive + - name: Install dependencies + run: | + ${{ matrix.docker.install_deps }} + - name: Get Redis + run: | + # Perform checkout + REPO_URL="https://github.com/redis/redis.git" + # Clone the repository to the current directory + git clone --recurse-submodules $REPO_URL redis + cd redis + git fetch origin ${{ needs.setup-environment.outputs.redis-ref }} + git checkout ${{ needs.setup-environment.outputs.redis-ref }} + git submodule update --init --recursive + - name: Get Rust + run: | + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + source "$HOME/.cargo/env" + rustup update + rustup update nightly + rustup component add rust-src --toolchain nightly + - name: Install python dependencies + run: | + echo ::group::activate venv + python3 -m venv venv + echo "source $PWD/venv/bin/activate" >> ~/.bash_profile + source venv/bin/activate + echo ::endgroup:: + echo ::group::install requirements + pip install -q --upgrade setuptools + # Upgrade pip to latest version to ensure ARM64 wheel support + pip install -q --upgrade "pip>=21.0" + # Install compatible Cython version as fallback for source builds + pip install -q "Cython<3.0" + # Prefer binary wheels to avoid compilation issues on ARM64 + pip install -q --prefer-binary -r tests/pytest/requirements.txt + pip install -q --prefer-binary -r .install/build_package_requirements.txt + echo ::endgroup:: + env: + PIP_BREAK_SYSTEM_PACKAGES: 1 + - name: build + uses: ./.github/actions/build-json-module-and-redis-with-cargo + - name: Run tests + uses: ./.github/actions/run-tests + - name: Pack module + uses: ./.github/actions/pack-module + - name: Upload artifacts to S3 + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} \ No newline at end of file diff --git a/.github/workflows/flow-coverage.yml b/.github/workflows/flow-coverage.yml new file mode 100644 index 000000000..b7cc5facd --- /dev/null +++ b/.github/workflows/flow-coverage.yml @@ -0,0 +1,43 @@ +name: Coverage analysis + +on: + workflow_call: + +jobs: + coverage: + runs-on: ubuntu-22.04 + defaults: + run: + shell: bash -l -eo pipefail {0} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 # fetch all history for accurate results + - name: Setup specific + working-directory: .install + run: | + ./install_script.sh sudo + - name: Setup common + run: | + ./.install/common_installations.sh sudo + - name: Get Rust + working-directory: .install + run: | + ./getrust.sh sudo + - name: Install clang 18 + working-directory: .install + run: | + ./install_clang.sh sudo + - name: Get Redis + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: ./deps/readies/bin/getredis --with-github-token + - name: Build and test + run: make coverage QUICK=1 SHOW=1 + - name: Upload coverage + uses: codecov/codecov-action@v3 + with: + file: bin/linux-x64-debug-cov/cov.info + fail_ci_if_error: true # Fail on upload errors + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/flow-linter.yml b/.github/workflows/flow-linter.yml new file mode 100644 index 000000000..cbdb0723b --- /dev/null +++ b/.github/workflows/flow-linter.yml @@ -0,0 +1,22 @@ +name: Flow linter +on: + workflow_call: # Allows to run this workflow from another workflow + +jobs: + linter: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: true + - name: Install build dependencies + run: sudo apt-get update && sudo apt-get install -y build-essential + - name: Install Rust + working-directory: .install + run: | + ./install_script.sh sudo + ./getrust.sh sudo + - name: Linter + run: | + source "$HOME/.cargo/env" + make lint diff --git a/.github/workflows/flow-linux-x86.yml b/.github/workflows/flow-linux-x86.yml new file mode 100644 index 000000000..9ebffc28a --- /dev/null +++ b/.github/workflows/flow-linux-x86.yml @@ -0,0 +1,191 @@ +name: Build all supported linux platforms + +on: + workflow_dispatch: # Allows to run this workflow from another workflow + inputs: + redis-ref: + description: 'Redis ref to checkout' # todo change per version/tag + type: string + required: true + run-test: + type: boolean + default: true + os: + description: 'OS to build on, bash array style' + type: string + required: true + run_valgrind: + description: 'Run valgrind on the tests' + type: boolean + default: false + workflow_call: # Allows to run this workflow from another workflow + inputs: + redis-ref: + description: 'Redis ref to checkout' # todo change per version/tag + type: string + required: true + run-test: + type: boolean + default: true + os: + description: 'OS to build on, bash array style' + type: string + required: true + run_valgrind: + description: 'Run valgrind on the tests' + type: boolean + default: false + +jobs: + setup-environment: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + TAGGED: ${{ steps.set-env.outputs.TAGGED }} + TAG: ${{ steps.set-env.outputs.TAG }} + BRANCH: ${{ steps.set-env.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ steps.set-env.outputs.TAG }}${{ steps.set-env.outputs.BRANCH }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: set env + id: set-env + uses: ./.github/actions/setup-env + with: + github-ref: ${{ github.ref }} + redis-ref: ${{ inputs.redis-ref }} + - name: Set matrix + id: set-matrix + run: | + OS="${{ inputs.os }}" + if [ -z "${OS}" ]; then + OS="bionic focal jammy rocky8 rocky9 bullseye amazonlinux2 mariner2 azurelinux3" + fi + MATRIX="[" + for os in $OS; do + case $os in + bionic) + MATRIX="${MATRIX}{\"image\": \"ubuntu:bionic\", \"pre_req_install_cmd\": \"apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git\"}," + ;; + focal) + MATRIX="${MATRIX}{\"image\": \"ubuntu:focal\", \"pre_req_install_cmd\": \"apt-get update && apt-get install -y software-properties-common && add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git\"}," + ;; + jammy) + MATRIX="${MATRIX}{\"image\": \"ubuntu:jammy\", \"pre_req_install_cmd\": \"apt-get update && apt-get install -y git\"}," + ;; + rocky8) + MATRIX="${MATRIX}{\"image\": \"rockylinux:8\", \"pre_req_install_cmd\": \"dnf install -y git\"}," + ;; + rocky9) + MATRIX="${MATRIX}{\"image\": \"rockylinux:9\", \"pre_req_install_cmd\": \"dnf install -y git\"}," + ;; + bullseye) + MATRIX="${MATRIX}{\"image\": \"debian:bullseye\", \"pre_req_install_cmd\": \"apt-get update && apt-get install -y git\"}," + ;; + amazonlinux2) + MATRIX="${MATRIX}{\"image\": \"amazonlinux:2\", \"pre_req_install_cmd\": \"yum update -y && yum install -y git\"}," + ;; + mariner2) + MATRIX="${MATRIX}{\"image\": \"mcr.microsoft.com/cbl-mariner/base/core:2.0\", \"pre_req_install_cmd\": \"tdnf install --noplugins --skipsignature -y ca-certificates git\"}," + ;; + azurelinux3) + MATRIX="${MATRIX}{\"image\": \"mcr.microsoft.com/azurelinux/base/core:3.0\", \"pre_req_install_cmd\": \"tdnf install --noplugins --skipsignature -y ca-certificates git\"}," + ;; + *) + echo "Unknown OS: $os" + exit 1 + ;; + esac + done + MATRIX="${MATRIX%?}]" + echo "${MATRIX}" + echo "matrix=${MATRIX}" >> $GITHUB_OUTPUT + + build-linux-matrix: + name: ${{matrix.docker_image.image}}, ${{needs.setup-environment.outputs.redis-ref}} + runs-on: ubuntu-latest + needs: setup-environment + strategy: + fail-fast: false + matrix: + docker_image: ${{fromJson(needs.setup-environment.outputs.matrix)}} + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + PIP_BREAK_SYSTEM_PACKAGES: 1 + container: + image: ${{ matrix.docker_image.image }} + defaults: + run: + shell: bash -l -eo pipefail {0} + steps: + - name: Install git + run: ${{ matrix.docker_image.pre_req_install_cmd }} + - name: Checkout + if: matrix.docker_image.image != 'amazonlinux:2' && matrix.docker_image.image != 'ubuntu:bionic' + uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Checkout Redis + if: matrix.docker_image.image != 'amazonlinux:2' && matrix.docker_image.image != 'ubuntu:bionic' + uses: actions/checkout@v4 + with: + repository: redis/redis + ref: ${{needs.setup-environment.outputs.redis-ref}} + path: redis + submodules: 'recursive' + - name: git checkout + if: matrix.docker_image.image == 'amazonlinux:2' || matrix.docker_image.image == 'ubuntu:bionic' + run: | + # Perform checkout + REPO_URL="https://github.com/${{ github.repository }}.git" + # Clone the repository to the current directory + git config --global --add safe.directory /__w/${{ github.repository }} + git clone --recurse-submodules --depth=1 $REPO_URL . + REF=${{github.sha}} + git fetch origin ${REF} + git checkout ${REF} + git submodule update --init --recursive + + # Perform checkout + REPO_URL="https://github.com/redis/redis.git" + # Clone the repository to the current directory + git clone --recurse-submodules $REPO_URL redis + cd redis + git fetch origin ${{needs.setup-environment.outputs.redis-ref}} + git checkout ${{needs.setup-environment.outputs.redis-ref}} + git submodule update --init --recursive + - name: Setup + working-directory: .install + run: | + echo ::group::Install dependencies + ./install_script.sh + echo ::endgroup:: + echo ::group::Install rust + ./getrust.sh + echo ::endgroup:: + - name: build + uses: ./.github/actions/build-json-module-and-redis-with-cargo + - name: Run tests + if: ${{inputs.run-test}} + uses: ./.github/actions/run-tests + with: + run_valgrind: ${{inputs.run_valgrind && '1' || '0'}} + - name: Upload test artifacts + if: failure() + uses: ./.github/actions/upload-artifacts + with: + image: ${{ matrix.docker_image.image }} + - name: Pack module + if: ${{!inputs.run_valgrind}} + uses: ./.github/actions/pack-module + - name: Upload artifacts to S3 + if: ${{!inputs.run_valgrind}} + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} diff --git a/.github/workflows/flow-macos.yml b/.github/workflows/flow-macos.yml new file mode 100644 index 000000000..244fd4731 --- /dev/null +++ b/.github/workflows/flow-macos.yml @@ -0,0 +1,167 @@ +name: Build for macos + +on: + workflow_dispatch: # Allows you to run this workflow manually from the Actions tab + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' + run-test: + type: boolean + default: true + workflow_call: # Allows you to run this workflow manually from the Actions tab + inputs: + redis-ref: + description: 'Redis ref to checkout' + type: string + default: 'unstable' + run-test: + type: boolean + default: true + +jobs: + setup-environment: + runs-on: ubuntu-latest + outputs: + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + TAGGED: ${{ steps.set-env.outputs.TAGGED }} + TAG: ${{ steps.set-env.outputs.TAG }} + BRANCH: ${{ steps.set-env.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ steps.set-env.outputs.TAG }}${{ steps.set-env.outputs.BRANCH }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: set env + id: set-env + uses: ./.github/actions/setup-env + with: + github-ref: ${{ github.ref }} + redis-ref: ${{ inputs.redis-ref }} + build-macos-x64: + runs-on: macos-13 + needs: setup-environment + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + PIP_BREAK_SYSTEM_PACKAGES: 1 + defaults: + run: + shell: bash -l -eo pipefail {0} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Deps checkout + uses: actions/checkout@v4 + with: + path: setup + sparse-checkout-cone-mode: false + sparse-checkout: | + .install + tests/pytest/requirements.* + - name: Setup specific + working-directory: .install + run: ./install_script.sh + - name: Full checkout + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Setup common + run: .install/common_installations.sh + - name: Get Redis + uses: actions/checkout@v4 + with: + repository: redis/redis + ref: ${{ needs.setup-environment.outputs.redis-ref }} + path: redis + - name: Build Redis + working-directory: redis + run: make install + - name: Build module + run: | + make build + - name: Test + if: ${{inputs.run-test}} + run: | + make test + - name: Pack module + run: make pack BRANCH=$TAG_OR_BRANCH + - name: Upload artifacts to S3 + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} + + + build-macos-m1: + runs-on: macos-latest-xlarge + needs: setup-environment + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + PIP_BREAK_SYSTEM_PACKAGES: 1 + defaults: + run: + shell: bash -l -eo pipefail {0} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Deps checkout + uses: actions/checkout@v4 + with: + path: setup + sparse-checkout-cone-mode: false + sparse-checkout: | + .install + tests/pytest/requirements.* + - name: Setup specific + working-directory: setup/.install + run: ./install_script.sh + - name: Full checkout + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Setup common + run: | + echo ::group::Activate virtual environment + python3 -m venv venv + echo "source venv/bin/activate" >> ~/.bashrc + echo "source venv/bin/activate" >> ~/.zshrc + . venv/bin/activate + echo ::endgroup:: + echo ::group::Install python dependencies + ./.install/common_installations.sh + echo ::endgroup:: + - name: Get Redis + uses: actions/checkout@v4 + with: + repository: redis/redis + ref: ${{ needs.setup-environment.outputs.redis-ref }} + path: redis + - name: Build Redis + working-directory: redis + run: make install + - name: Build module + run: | + make build + - name: Test + if: ${{inputs.run-test}} + run: | + make test + - name: Pack module + run: make pack BRANCH=$TAG_OR_BRANCH + - name: Upload artifacts to S3 + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} diff --git a/.github/workflows/flow-sanitizer.yml b/.github/workflows/flow-sanitizer.yml new file mode 100644 index 000000000..9c59d6eb0 --- /dev/null +++ b/.github/workflows/flow-sanitizer.yml @@ -0,0 +1,35 @@ +name: Clang Sanitizer + +# Documentation: https://redislabs.atlassian.net/wiki/spaces/DX/pages/3967844669/RediSearch+CI+refactor + +on: + workflow_call: + inputs: + container: + default: "ubuntu:jammy" + type: string + required: true + redis-ref: + description: 'Redis ref to checkout' + type: string + required: true + +jobs: + clang-sanitizer: + runs-on: ubuntu-22.04 + defaults: + run: + shell: bash -l -eo pipefail {0} + container: + image: ${{ inputs.container }} + steps: + - name: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: ./.github/actions/san-run-tests + with: + container: ${{ inputs.container }} + test-config: QUICK=1 + sanitizer: addr + redis-ref: ${{inputs.redis-ref}} diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/flow-spellcheck.yml similarity index 55% rename from .github/workflows/spellcheck.yml rename to .github/workflows/flow-spellcheck.yml index 8b445ccb1..bd8a5ba0b 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/flow-spellcheck.yml @@ -1,16 +1,14 @@ name: Spellcheck on: - push: - branches: [master] - pull_request: - branches: [master] + workflow_call: # Allows to run this workflow from another workflow + jobs: spellcheck: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Spellcheck - uses: rojopolis/spellcheck-github-actions@0.30.0 + uses: rojopolis/spellcheck-github-actions@0.45.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown diff --git a/.github/workflows/flow-ubuntu-arm.yml b/.github/workflows/flow-ubuntu-arm.yml new file mode 100644 index 000000000..ac575a6d9 --- /dev/null +++ b/.github/workflows/flow-ubuntu-arm.yml @@ -0,0 +1,186 @@ +name: Build and Test ubuntu ARM instances + +on: + workflow_dispatch: # Allows you to run this workflow manually from the Actions tab + inputs: + redis-ref: + description: 'Redis ref to checkout' + required: true + default: 'unstable' + run-test: + type: boolean + default: true + workflow_call: # Allows to run this workflow from another workflow + inputs: + redis-ref: + description: 'Redis ref to checkout' + type: string + required: true + run-test: + type: boolean + default: true + +permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + + +jobs: + setup-environment: + runs-on: ubuntu-latest + outputs: + TAGGED: ${{ steps.set-env.outputs.TAGGED }} + TAG: ${{ steps.set-env.outputs.TAG }} + BRANCH: ${{ steps.set-env.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ steps.set-env.outputs.TAG }}${{ steps.set-env.outputs.BRANCH }} + redis-ref: ${{ steps.set-env.outputs.redis-ref }} + steps: + - name: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: set env + id: set-env + uses: ./.github/actions/setup-env + with: + github-ref: ${{ github.ref }} + redis-ref: ${{ inputs.redis-ref }} + + ubuntu-arm64: + runs-on: ubuntu24-arm64-4-16 # ubuntu24-arm64-2-8 + needs: setup-environment + strategy: + matrix: + docker: + - image: "ubuntu:bionic" + nick: "bionic" + install_git: | + apt-get update && apt-get install -y software-properties-common + add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git + install_deps: | + apt update -qq + apt upgrade -yqq + apt dist-upgrade -yqq + apt install -yqq software-properties-common unzip rsync + add-apt-repository ppa:ubuntu-toolchain-r/test -y + apt update + apt install -yqq build-essential wget curl make gcc-10 g++-10 openssl libssl-dev cargo binfmt-support \ + lsb-core awscli libclang-dev clang curl + update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 60 --slave /usr/bin/g++ g++ /usr/bin/g++-10 + apt -y install python3.8 python3.8-venv python3.8-dev python3-venv python3-dev python3-pip + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 + - image: "ubuntu:focal" + nick: focal + install_git: | + apt-get update && apt-get install -y software-properties-common + add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git + install_deps: | + apt update -qq + apt upgrade -yqq + apt install -yqq wget make clang-format gcc python3 python3-venv python3-pip lcov git openssl libssl-dev \ + unzip rsync build-essential gcc-10 g++-10 cargo libclang-dev clang curl + update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 60 --slave /usr/bin/g++ g++ /usr/bin/g++-10 + - image: "ubuntu:jammy" + nick: "jammy" + install_git: | + apt-get update && apt-get install -y git + install_deps: | + apt update -qq + apt upgrade -yqq + apt install -yqq git wget build-essential lcov openssl libssl-dev \ + python3 python3-pip python3-venv python3-dev unzip rsync libclang-dev clang curl + defaults: + run: + shell: bash + env: + TAGGED: ${{ needs.setup-environment.outputs.TAGGED }} + VERSION: ${{ needs.setup-environment.outputs.TAG }} + BRANCH: ${{ needs.setup-environment.outputs.BRANCH }} + TAG_OR_BRANCH: ${{ needs.setup-environment.outputs.TAG_OR_BRANCH}} + container: + image: ${{ matrix.docker.image }} + steps: + - name: Install git + run: | + ${{ matrix.docker.install_git }} + - name: git checkout + if: matrix.docker.image == 'ubuntu:bionic' + run: | + # Perform checkout + REPO_URL="https://github.com/${{ github.repository }}.git" + # Clone the repository to the current directory + git config --global --add safe.directory /__w/${{ github.repository }} + git clone --recurse-submodules --depth=1 $REPO_URL . + REF=${{github.sha}} + git fetch origin ${REF} + git checkout ${REF} + git submodule update --init --recursive + - name: Checkout the module + if: matrix.docker.image != 'ubuntu:bionic' + uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Install dependencies + run: | + ${{ matrix.docker.install_deps }} + env: + DEBIAN_FRONTEND: noninteractive + - name: Checkout Redis + if: matrix.docker.image != 'ubuntu:bionic' + uses: actions/checkout@v4 + with: + repository: 'redis/redis' + ref: ${{ needs.setup-environment.outputs.redis-ref }} + path: 'redis' + - name: Get Redis + if: matrix.docker.image == 'ubuntu:bionic' + run: | + # Perform checkout + REPO_URL="https://github.com/redis/redis.git" + # Clone the repository to the current directory + git clone --recurse-submodules $REPO_URL redis + cd redis + git fetch origin ${{ needs.setup-environment.outputs.redis-ref }} + git checkout ${{ needs.setup-environment.outputs.redis-ref }} + git submodule update --init --recursive + - name: Get Rust + run: | + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + source "$HOME/.cargo/env" + rustup update + rustup update nightly + rustup component add rust-src --toolchain nightly + - name: Install python dependencies + run: | + echo ::group::activate venv + python3 -m venv venv + echo "source $PWD/venv/bin/activate" >> ~/.bash_profile + source venv/bin/activate + echo ::endgroup:: + echo ::group::install requirements + pip install -q --upgrade setuptools + # Upgrade pip to latest version to ensure ARM64 wheel support + pip install -q --upgrade "pip>=21.0" + # Install compatible Cython version as fallback for source builds + pip install -q "Cython<3.0" + # Prefer binary wheels to avoid compilation issues on ARM64 + pip install -q --prefer-binary -r tests/pytest/requirements.txt + pip install -q --prefer-binary -r .install/build_package_requirements.txt + echo ::endgroup:: + env: + PIP_BREAK_SYSTEM_PACKAGES: 1 + - name: build + uses: ./.github/actions/build-json-module-and-redis-with-cargo + - name: Test + if: ${{inputs.run-test}} + run: | + source venv/bin/activate + MODULE=$(realpath ./target/release/rejson.so) RLTEST_ARGS='--no-progress' ./tests/pytest/tests.sh + - name: Pack module + uses: ./.github/actions/pack-module + - name: Upload artifacts to S3 + uses: ./.github/actions/upload-artifacts-to-s3-without-make + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + github-ref: ${{ github.ref }} diff --git a/.github/workflows/freebsd.yml b/.github/workflows/freebsd.yml index f7f39c72b..276946d95 100644 --- a/.github/workflows/freebsd.yml +++ b/.github/workflows/freebsd.yml @@ -1,20 +1,16 @@ name: freebsd -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] +on: [] jobs: build: - runs-on: macos-12 + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: build - uses: vmactions/freebsd-vm@v0.3.0 + uses: vmactions/freebsd-vm@v1 with: usesh: true sync: rsync diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index da24a8143..000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: macos - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - -jobs: - build: - - runs-on: macos-latest - - steps: - - uses: actions/checkout@v2 - - name: Build - run: cargo build --all --all-targets --verbose - - name: Run tests - run: cargo test --all --all-targets --verbose diff --git a/.github/workflows/task-check-docs.yml b/.github/workflows/task-check-docs.yml new file mode 100644 index 000000000..951dca656 --- /dev/null +++ b/.github/workflows/task-check-docs.yml @@ -0,0 +1,29 @@ +name: Checks if Only Documentation Files were Changed + +# Documentation: https://redislabs.atlassian.net/wiki/spaces/DX/pages/3967844669/RediSearch+CI+refactor + +on: + workflow_call: + outputs: + only-docs-changed: + value: ${{ jobs.check-only-docs-changed.outputs.only-docs-changed }} + +jobs: + check-only-docs-changed: + runs-on: ubuntu-latest + outputs: + only-docs-changed: ${{ steps.check-docs.outputs.only_modified }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 # required for changed-files action to work + - name: Check if only docs were changed + id: check-docs + uses: tj-actions/changed-files@v44 + with: + # List of files we allow to be changed without running the CI. Modify as needed. + files: | + **.md + licenses/** + LICENSE.txt diff --git a/.github/workflows/trigger-benchmark.yml b/.github/workflows/trigger-benchmark.yml deleted file mode 100644 index 980b2aeee..000000000 --- a/.github/workflows/trigger-benchmark.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Check if needs trigger CircleCI benchmark - -on: - pull_request: - types: [opened, labeled, unlabeled, synchronize] - -jobs: - haslabel: - name: analyse labels - runs-on: ubuntu-latest - outputs: - benchmark: ${{ steps.haslabel.outputs.labeled-run-benchmark }} - steps: - - uses: actions/checkout@v2 - - name: Labeled with run-benchmark - id: haslabel - uses: DanielTamkin/HasLabel@v1.0.4 - with: - contains: 'run-benchmark' - perf-ci: - name: Trigger CI benchmarks - needs: haslabel - if: needs.haslabel.outputs.benchmark - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: curl-circle-ci - run: | - curl --request POST \ - --url https://circleci.com/api/v2/project/gh/${{ github.repository }}/pipeline \ - --header 'Circle-Token: ${{ secrets.CIRCLE_CI_SECRET }}' \ - --header 'content-type: application/json' \ - --data '{"branch": "${{ github.event.pull_request.head.ref }}", - "parameters": {"run_default_flow":true, "run_benchmark_flow_label":true}}' diff --git a/.github/workflows/trigger-build.yml b/.github/workflows/trigger-build.yml deleted file mode 100644 index 8c5dc3574..000000000 --- a/.github/workflows/trigger-build.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Trigger website deploy -on: - push: - branches: - - master - - '[0-9]+.[0-9]+' - paths: - - 'docs/**' - - 'commands.json' - -jobs: - trigger: - runs-on: ubuntu-latest - steps: - - run: | - echo "'$DATA'" | xargs \ - curl \ - -X POST https://api.netlify.com/build_hooks/${NETLIFY_BUILD_HOOK_ID} \ - -d - env: - NETLIFY_BUILD_HOOK_ID: ${{ secrets.NETLIFY_BUILD_HOOK_ID }} - DATA: '{"repository":"${{ github.repository }}", "sha":"${{ github.sha }}", "ref":"${{ github.ref }}"}}' diff --git a/.gitignore b/.gitignore index 6e730bc9b..afc3b3a87 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,8 @@ .settings/ wordlist.dic + +# RLTest +config.txt + +venv/ diff --git a/.install/activate_venv.sh b/.install/activate_venv.sh new file mode 100755 index 000000000..03e84ff58 --- /dev/null +++ b/.install/activate_venv.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +activate_venv() { + echo "copy ativation script to shell config" + if [[ $OS_TYPE == Darwin ]]; then + echo "source venv/bin/activate" >> ~/.bashrc + echo "source venv/bin/activate" >> ~/.zshrc + else + echo "source $PWD/venv/bin/activate" >> ~/.bash_profile + fi +} + +python3 -m venv venv +activate_venv +source venv/bin/activate diff --git a/.install/amazon_linux_2.sh b/.install/amazon_linux_2.sh new file mode 100755 index 000000000..c7f3c8611 --- /dev/null +++ b/.install/amazon_linux_2.sh @@ -0,0 +1,29 @@ +#!/bin/bash +MODE=$1 # whether to install using sudo or not +set -e +export DEBIAN_FRONTEND=noninteractive + +$MODE yum update -y +# Install the RPM package that provides the Software Collections (SCL) required for devtoolset-11 +$MODE yum install -y https://vault.centos.org/centos/7/extras/x86_64/Packages/centos-release-scl-rh-2-3.el7.centos.noarch.rpm + +# http://mirror.centos.org/centos/7/ is deprecated, so we changed the above link to `https://vault.centos.org`, +# and we have to change the baseurl in the repo file to the working mirror (from mirror.centos.org to vault.centos.org) +$MODE sed -i 's/mirrorlist=/#mirrorlist=/g' /etc/yum.repos.d/CentOS-SCLo-scl-rh.repo # Disable mirrorlist +$MODE sed -i 's/#baseurl=http:\/\/mirror/baseurl=http:\/\/vault/g' /etc/yum.repos.d/CentOS-SCLo-scl-rh.repo # Enable a working baseurl + +$MODE yum install -y wget git which devtoolset-11-gcc devtoolset-11-gcc-c++ devtoolset-11-make \ + rsync python3 unzip tar python3-devel python3-pip awscli + +source /opt/rh/devtoolset-11/enable + +cp /opt/rh/devtoolset-11/enable /etc/profile.d/scl-devtoolset-11.sh + +$MODE yum install -y curl +$MODE yum install -y openssl11 openssl11-devel +$MODE ln -s `which openssl11` /usr/bin/openssl + +# Install clang +$MODE yum install -y clang + +source install_cmake.sh $MODE diff --git a/.install/build_package_requirements.txt b/.install/build_package_requirements.txt new file mode 100644 index 000000000..bd647bb41 --- /dev/null +++ b/.install/build_package_requirements.txt @@ -0,0 +1,4 @@ +addict +toml +jinja2 +ramp-packer==2.5.13 diff --git a/.install/common_base_linux_mariner_2.0.sh b/.install/common_base_linux_mariner_2.0.sh new file mode 100755 index 000000000..8936d807f --- /dev/null +++ b/.install/common_base_linux_mariner_2.0.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +tdnf install -q -y build-essential git wget ca-certificates tar openssl-devel \ + cmake python3 python3-pip rust clang which unzip + +pip install -q --upgrade setuptools +pip install -q --upgrade pip +pip install -q -r tests/pytest/requirements.txt + +# These packages are needed to build the package +pip install -q -r .install/build_package_requirements.txt + +# Install aws-cli for uploading artifacts to s3 +curdir="$PWD" +cd /tmp/ +curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" +unzip awscliv2.zip +./aws/install +cd $curdir diff --git a/.install/common_installations.sh b/.install/common_installations.sh new file mode 100755 index 000000000..dc781d70f --- /dev/null +++ b/.install/common_installations.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -e +OS_TYPE=$(uname -s) +MODE=$1 # whether to install using sudo or not + +pip3 install --upgrade pip +pip3 install -q --upgrade setuptools +echo "pip version: $(pip3 --version)" +echo "pip path: $(which pip3)" + +pip3 install -q -r tests/pytest/requirements.txt +# These packages are needed to build the package +pip3 install -q -r .install/build_package_requirements.txt + +# List installed packages +pip3 list diff --git a/.install/debian_gnu_linux_11.sh b/.install/debian_gnu_linux_11.sh new file mode 100755 index 000000000..f7db0cc66 --- /dev/null +++ b/.install/debian_gnu_linux_11.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e +export DEBIAN_FRONTEND=noninteractive +MODE=$1 # whether to install using sudo or not + +$MODE apt update -qq +$MODE apt upgrade -yqq +$MODE apt install -yqq git wget curl build-essential lcov openssl libssl-dev python3 python3-venv python3-pip \ + rsync unzip cargo libclang-dev clang + +source install_cmake.sh $MODE diff --git a/.install/getrust.sh b/.install/getrust.sh new file mode 100755 index 000000000..c92280ba3 --- /dev/null +++ b/.install/getrust.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +MODE=$1 # whether to install using sudo or not + +# Download and install rustup +$MODE curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + +# Source the cargo environment script to update the PATH +echo "source $HOME/.cargo/env" >> $HOME/.bashrc +source $HOME/.cargo/env + +# Update rustup +$MODE rustup update + +# Install the toolchain specified in rust-toolchain.toml (if present) +if [ -f "rust-toolchain.toml" ]; then + TOOLCHAIN=$(grep -E '^\s*channel\s*=' rust-toolchain.toml | sed 's/.*=\s*"\([^"]*\)".*/\1/' | tr -d ' ') + if [ -n "$TOOLCHAIN" ]; then + $MODE rustup toolchain install "$TOOLCHAIN" + else + $MODE rustup update nightly + fi +else + $MODE rustup update nightly +fi + +# Install required components for the active toolchain +$MODE rustup component add rust-src +$MODE rustup component add rustfmt +$MODE rustup component add clippy + +# Verify cargo installation +cargo --version + +rustup show diff --git a/.install/install_aws.sh b/.install/install_aws.sh new file mode 100755 index 000000000..0fcf40036 --- /dev/null +++ b/.install/install_aws.sh @@ -0,0 +1,14 @@ +#!/bin/bash +ARCH=$(uname -m) +OS_TYPE=$(uname -s) +MODE=$1 # whether to install using sudo or not + +if [[ $OS_TYPE = 'Darwin' ]] +then + curl "https://awscli.amazonaws.com/AWSCLIV2.pkg" -o "AWSCLIV2.pkg" + $MODE installer -pkg AWSCLIV2.pkg -target / +else + wget -O awscliv2.zip https://awscli.amazonaws.com/awscli-exe-linux-${ARCH}.zip + unzip awscliv2.zip + $MODE ./aws/install +fi diff --git a/.install/install_clang.sh b/.install/install_clang.sh new file mode 100755 index 000000000..18b8e8b1f --- /dev/null +++ b/.install/install_clang.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env sh + +rust_llvm_major_version=$(rustc --version --verbose | grep "LLVM version" | awk '{print $3}' | cut -d. -f1) + +export CWD=$(dirname `which "${0}"`) +export CLANG_VERSION=${rust_llvm_major_version} +export DEBIAN_FRONTEND=noninteractive +MODE=$1 # whether to install using sudo or not + +wget https://apt.llvm.org/llvm.sh -O llvm.sh + +chmod u+x llvm.sh + +$MODE ./llvm.sh $CLANG_VERSION all + +$MODE apt-get install python3-lldb-${rust_llvm_major_version} --yes + +$MODE $CWD/update_clang_alternatives.sh $CLANG_VERSION 1 + +ls /bin/ + +$MODE clang --version +$MODE clang++ --version +$MODE llvm-cov --version +$MODE llvm-profdata --version + diff --git a/.install/install_cmake.sh b/.install/install_cmake.sh new file mode 100755 index 000000000..418a8df22 --- /dev/null +++ b/.install/install_cmake.sh @@ -0,0 +1,22 @@ +#!/bin/bash +version=3.25.1 +processor=$(uname -m) +OS_TYPE=$(uname -s) +MODE=$1 # whether to install using sudo or not + +if [[ $OS_TYPE = 'Darwin' ]] +then + brew install cmake +else + if [[ $processor = 'x86_64' ]] + then + filename=cmake-${version}-linux-x86_64.sh + else + filename=cmake-${version}-linux-aarch64.sh + fi + + wget https://github.com/Kitware/CMake/releases/download/v${version}/${filename} + chmod u+x ./${filename} + $MODE ./${filename} --skip-license --prefix=/usr/local --exclude-subdir + cmake --version +fi diff --git a/.install/install_script.sh b/.install/install_script.sh new file mode 100755 index 000000000..b66f8effd --- /dev/null +++ b/.install/install_script.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +OS_TYPE=$(uname -s) +MODE=$1 # whether to install using sudo or not + +if [[ $OS_TYPE = 'Darwin' ]] +then + OS='macos' +else + VERSION=$(grep '^VERSION_ID=' /etc/os-release | sed 's/"//g') + VERSION=${VERSION#"VERSION_ID="} + OS_NAME=$(grep '^NAME=' /etc/os-release | sed 's/"//g') + OS_NAME=${OS_NAME#"NAME="} + [[ $OS_NAME == 'Rocky Linux' ]] && VERSION=${VERSION%.*} # remove minor version for Rocky Linux + OS=${OS_NAME,,}_${VERSION} + OS=$(echo $OS | sed 's/[/ ]/_/g') # replace spaces and slashes with underscores +fi +echo $OS + +source ${OS}.sh $MODE + +git config --global --add safe.directory '*' diff --git a/.install/macos.sh b/.install/macos.sh new file mode 100755 index 000000000..01108f543 --- /dev/null +++ b/.install/macos.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +if ! which brew &> /dev/null; then + echo "Brew is not installed. Install from https://brew.sh" + exit 1 +fi + +export HOMEBREW_NO_AUTO_UPDATE=1 + +LLVM_VERSION="18" + +brew update +brew install coreutils +brew install make +brew install openssl +brew install llvm@$LLVM_VERSION + +BREW_PREFIX=$(brew --prefix) +GNUBIN=$BREW_PREFIX/opt/make/libexec/gnubin +LLVM="$BREW_PREFIX/opt/llvm@$LLVM_VERSION/bin" +COREUTILS=$BREW_PREFIX/opt/coreutils/libexec/gnubin + +update_profile() { + local profile_path=$1 + local newpath="export PATH=$COREUTILS:$LLVM:$GNUBIN:\$PATH" + grep -qxF "$newpath" "$profile_path" || echo "$newpath" >> "$profile_path" + source $profile_path +} + +[[ -f ~/.bash_profile ]] && update_profile ~/.bash_profile +[[ -f ~/.zshrc ]] && update_profile ~/.zshrc + +source install_cmake.sh diff --git a/.install/microsoft_azure_linux_3.0.sh b/.install/microsoft_azure_linux_3.0.sh new file mode 100644 index 000000000..1a3279851 --- /dev/null +++ b/.install/microsoft_azure_linux_3.0.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# This script automates the process of setting up a development environment for RedisJSON on a Microsoft Azure Linux virtual machine. + +set -e + +# Update and install dev tools needed for building and testing +tdnf -y update && \ +tdnf install -y \ + git \ + wget \ + gcc \ + clang-devel \ + llvm-devel \ + make \ + cmake \ + libffi-devel \ + openssl-devel \ + build-essential \ + zlib-devel \ + bzip2-devel \ + python3-devel \ + which \ + unzip \ + ca-certificates \ + python3-pip + +# Install AWS CLI for uploading to S3 +pip3 install awscli --upgrade + diff --git a/.install/rocky_linux_8.sh b/.install/rocky_linux_8.sh new file mode 100755 index 000000000..9753f1852 --- /dev/null +++ b/.install/rocky_linux_8.sh @@ -0,0 +1,24 @@ +#!/bin/bash +MODE=$1 # whether to install using sudo or not +set -e + +$MODE dnf update -y + +# Development Tools includes python11 and config-manager +$MODE dnf groupinstall "Development Tools" -yqq +# install pip +$MODE dnf install python3.11-pip -y + +# powertools is needed to install epel +$MODE dnf config-manager --set-enabled powertools + +# get epel to install gcc11 +$MODE dnf install epel-release -yqq + + +$MODE dnf install -y gcc-toolset-11-gcc gcc-toolset-11-gcc-c++ gcc-toolset-11-libatomic-devel make wget git openssl openssl-devel \ + bzip2-devel libffi-devel zlib-devel tar xz which rsync cargo clang curl + +cp /opt/rh/gcc-toolset-11/enable /etc/profile.d/gcc-toolset-11.sh + +source install_cmake.sh $MODE diff --git a/.install/rocky_linux_9.sh b/.install/rocky_linux_9.sh new file mode 100755 index 000000000..4f85741fe --- /dev/null +++ b/.install/rocky_linux_9.sh @@ -0,0 +1,15 @@ +#!/bin/bash +MODE=$1 # whether to install using sudo or not +set -e +export DEBIAN_FRONTEND=noninteractive +$MODE dnf update -y + +$MODE dnf install -y gcc-toolset-13-gcc gcc-toolset-13-gcc-c++ make wget git openssl openssl-devel python3 which \ + rsync unzip cargo clang + +# install pip +$MODE dnf install python3-pip -y + +cp /opt/rh/gcc-toolset-13/enable /etc/profile.d/gcc-toolset-13.sh + +source install_cmake.sh $MODE diff --git a/.install/ubuntu_18.04.sh b/.install/ubuntu_18.04.sh new file mode 100755 index 000000000..244e049c9 --- /dev/null +++ b/.install/ubuntu_18.04.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e +export DEBIAN_FRONTEND=noninteractive +MODE=$1 # whether to install using sudo or not + +$MODE apt update -qq +$MODE apt upgrade -yqq +$MODE apt dist-upgrade -yqq +$MODE apt install -yqq software-properties-common unzip rsync + +# ppa for modern python and gcc10 +$MODE add-apt-repository ppa:ubuntu-toolchain-r/test -y +$MODE apt update +$MODE apt install -yqq build-essential wget curl make gcc-10 g++-10 openssl libssl-dev cargo binfmt-support lsb-core awscli libclang-dev clang curl +$MODE update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 60 --slave /usr/bin/g++ g++ /usr/bin/g++-10 + +# Install Python 3.8 +$MODE apt -y install python3.8 python3.8-venv python3.8-dev python3-venv python3-dev python3-pip + +# Set python3 to point to python3.8 +$MODE update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 2 + +source install_cmake.sh $MODE diff --git a/.install/ubuntu_20.04.sh b/.install/ubuntu_20.04.sh new file mode 100755 index 000000000..b084abf59 --- /dev/null +++ b/.install/ubuntu_20.04.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -e +export DEBIAN_FRONTEND=noninteractive +MODE=$1 # whether to install using sudo or not + +$MODE apt update -qq +$MODE apt upgrade -yqq + +$MODE apt install -yqq wget make clang-format gcc python3 python3-venv python3-pip lcov git openssl libssl-dev \ + unzip rsync build-essential gcc-10 g++-10 cargo libclang-dev clang curl + +$MODE update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 60 --slave /usr/bin/g++ g++ /usr/bin/g++-10 + +source install_cmake.sh $MODE diff --git a/.install/ubuntu_22.04.sh b/.install/ubuntu_22.04.sh new file mode 100755 index 000000000..3fb4f2344 --- /dev/null +++ b/.install/ubuntu_22.04.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e +export DEBIAN_FRONTEND=noninteractive +MODE=$1 # whether to install using sudo or not + +$MODE apt update -qq +$MODE apt upgrade -yqq +$MODE apt install -yqq git wget build-essential lcov openssl libssl-dev \ + python3 python3-pip python3-venv python3-dev unzip rsync libclang-dev clang curl +source install_cmake.sh $MODE diff --git a/.install/update_clang_alternatives.sh b/.install/update_clang_alternatives.sh new file mode 100755 index 000000000..c023ae07e --- /dev/null +++ b/.install/update_clang_alternatives.sh @@ -0,0 +1,133 @@ +#!/usr/bin/env bash + +# This script registers a specific version of Clang and LLVM tools using the `update-alternatives` command. +# It creates symlinks for the given version, allowing users to switch between multiple versions of Clang and LLVM. + + +# Function: register_clang_version +# Arguments: +# 1. version - The version of Clang/LLVM to be registered (e.g., 18). +# 2. priority - The priority of the version. A higher priority number indicates a preferred version. +# +# Sets up a primary symlink and several slave symlinks that correspond to related tools for Clang and LLVM. +# +# `update-alternatives --install` creates or updates the alternative for the given tool. +# The first argument after `--install` is the path to the main symlink (e.g., /usr/bin/clang). +# The second argument is the name of the alternative (e.g., clang), and the third argument is the +# path to the actual binary for the specified version (e.g., /usr/bin/clang-18). +# The `--slave` options are used to link other related binaries (like clang-format, llvm-nm, etc.) +# so that all tools are switched consistently when the main tool (e.g., clang) is switched. + +# Function to register alternatives as slave first and fall back to master if it fails +register_alternative() { + local tool=$1 + local tool_with_version=$2 + local version=$3 + local priority=$4 + + # Try registering as slave first + update-alternatives --remove-all "${tool}" + update-alternatives --verbose --install "/usr/bin/${tool}" "${tool}" "/usr/bin/${tool_with_version}" "${priority}" + + # Check if the previous command resulted in an error indicating that the tool is a master alternative + #if [ $? -ne 0 ]; then + # echo "Error: Failed to set up ${tool} as an alternative." +# + # # Force reinstallation in case of broken symlink group + # echo "Forcing reinstallation of ${tool}." + # update-alternatives --remove "${tool}" "/usr/bin/${tool}-${version}" + # update-alternatives --install "/usr/bin/${tool}" "${tool}" "/usr/bin/${tool_with_version}" ${priority} + #fi +} + +# Function to register Clang tools +register_clang_version() { + local version=$1 + local priority=$2 + + # List of all Clang and LLVM tools and their binary names + declare -a tools=( + # Clang tools + "clang" "clang-${version}" + "clang-cpp" "clang-cpp-${version}" + "clang-cl" "clang-cl-${version}" + "clangd" "clangd-${version}" + "clang-check" "clang-check-${version}" + "clang-query" "clang-query-${version}" + "asan_symbolize" "asan_symbolize-${version}" + "bugpoint" "bugpoint-${version}" + "dsymutil" "dsymutil-${version}" + "lld" "lld-${version}" + "ld.lld" "ld.lld-${version}" + "lld-link" "lld-link-${version}" + "llc" "llc-${version}" + "lli" "lli-${version}" + "opt" "opt-${version}" + "sanstats" "sanstats-${version}" + "verify-uselistorder" "verify-uselistorder-${version}" + "wasm-ld" "wasm-ld-${version}" + "yaml2obj" "yaml2obj-${version}" + "clang++" "clang++-${version}" + "clang-tidy" "clang-tidy-${version}" + "clang-format" "clang-format-${version}" + + # LLVM tools + "llvm-config" "llvm-config-${version}" + "llvm-ar" "llvm-ar-${version}" + "llvm-as" "llvm-as-${version}" + "llvm-bcanalyzer" "llvm-bcanalyzer-${version}" + "llvm-c-test" "llvm-c-test-${version}" + "llvm-cat" "llvm-cat-${version}" + "llvm-cfi-verify" "llvm-cfi-verify-${version}" + "llvm-cov" "llvm-cov-${version}" + "llvm-cvtres" "llvm-cvtres-${version}" + "llvm-cxxdump" "llvm-cxxdump-${version}" + "llvm-cxxfilt" "llvm-cxxfilt-${version}" + "llvm-diff" "llvm-diff-${version}" + "llvm-dis" "llvm-dis-${version}" + "llvm-dlltool" "llvm-dlltool-${version}" + "llvm-dwarfdump" "llvm-dwarfdump-${version}" + "llvm-dwp" "llvm-dwp-${version}" + "llvm-exegesis" "llvm-exegesis-${version}" + "llvm-extract" "llvm-extract-${version}" + "llvm-lib" "llvm-lib-${version}" + "llvm-link" "llvm-link-${version}" + "llvm-lto" "llvm-lto-${version}" + "llvm-lto2" "llvm-lto2-${version}" + "llvm-mc" "llvm-mc-${version}" + "llvm-mca" "llvm-mca-${version}" + "llvm-modextract" "llvm-modextract-${version}" + "llvm-mt" "llvm-mt-${version}" + "llvm-nm" "llvm-nm-${version}" + "llvm-objcopy" "llvm-objcopy-${version}" + "llvm-objdump" "llvm-objdump-${version}" + "llvm-opt-report" "llvm-opt-report-${version}" + "llvm-pdbutil" "llvm-pdbutil-${version}" + "llvm-PerfectShuffle" "llvm-PerfectShuffle-${version}" + "llvm-profdata" "llvm-profdata-${version}" + "llvm-ranlib" "llvm-ranlib-${version}" + "llvm-rc" "llvm-rc-${version}" + "llvm-readelf" "llvm-readelf-${version}" + "llvm-readobj" "llvm-readobj-${version}" + "llvm-rtdyld" "llvm-rtdyld-${version}" + "llvm-size" "llvm-size-${version}" + "llvm-split" "llvm-split-${version}" + "llvm-stress" "llvm-stress-${version}" + "llvm-strings" "llvm-strings-${version}" + "llvm-strip" "llvm-strip-${version}" + "llvm-symbolizer" "llvm-symbolizer-${version}" + "llvm-tblgen" "llvm-tblgen-${version}" + "llvm-undname" "llvm-undname-${version}" + "llvm-xray" "llvm-xray-${version}" + ) + + # Loop through the tools list and register them + for ((i=0; i<${#tools[@]}; i+=2)); do + tool="${tools[$i]}" + tool_bin="${tools[$i+1]}" + register_alternative "$tool" "$tool_bin" "$version" "$priority" + done +} + +# Call the function to register clang version (replace with actual version and priority) +register_clang_version $1 $2 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..d15023ae1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,59 @@ +By contributing code to the Redis project in any form you agree to the Redis Software Grant and Contributor License Agreement attached below. Only contributions made under the Redis Software Grant and Contributor License Agreement may be accepted by Redis, and any contribution is subject to the terms of the Redis tri-license under RSALv2/SSPLv1/AGPLv3 as described in the LICENSE.txt file included in the Redis source distribution. + +REDIS SOFTWARE GRANT AND CONTRIBUTOR LICENSE AGREEMENT +To specify the intellectual property license granted in any Contribution, Redis Ltd., ("Redis") requires a Software Grant and Contributor License Agreement ("Agreement"). This Agreement is for your protection as a contributor as well as the protection of Redis and its users; it does not change your rights to use your own Contribution for any other purpose permitted by this Agreement. + +By making any Contribution, You accept and agree to the following terms and conditions for the Contribution. Except for the license granted in this Agreement to Redis and the recipients of the software distributed by Redis, You reserve all right, title, and interest in and to Your Contribution. + +Definitions + +1.1. "You" (or "Your") means the copyright owner or legal entity authorized by the copyright owner that is entering into this Agreement with Redis. For legal entities, the entity making a Contribution and all other entities that Control, are Controlled by, or are under common Control with that entity are considered to be a single contributor. For the purposes of this definition, "Control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +1.2. "Contribution" means the code, documentation, or any original work of authorship, including any modifications or additions to an existing work described above. + +"Work" means any software project stewarded by Redis. + +Grant of Copyright License. Subject to the terms and conditions of this Agreement, You grant to Redis and to the recipients of the software distributed by Redis a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contribution and such derivative works. + +Grant of Patent License. Subject to the terms and conditions of this Agreement, You grant to Redis and to the recipients of the software distributed by Redis a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution alone or by a combination of Your Contribution with the Work to which such Contribution was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes a direct or contributory patent infringement, then any patent licenses granted to the claimant entity under this Agreement for that Contribution or Work terminate as of the date such litigation is filed. + +Representations and Warranties. You represent and warrant that: (i) You are legally entitled to grant the above licenses; and (ii) if You are an entity, each employee or agent designated by You is authorized to submit the Contribution on behalf of You; and (iii) your Contribution is Your original work, and that it will not infringe on any third party's intellectual property right(s). + +Disclaimer. You are not expected to provide support for Your Contribution, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contribution on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. + +Enforceability. Nothing in this Agreement will be construed as creating any joint venture, employment relationship, or partnership between You and Redis. If any provision of this Agreement is held to be unenforceable, the remaining provisions of this Agreement will not be affected. This represents the entire agreement between You and Redis relating to the Contribution. + +IMPORTANT: HOW TO USE REDIS GITHUB ISSUES +GitHub issues SHOULD ONLY BE USED to report bugs and for DETAILED feature requests. Everything else should be asked on Discord: + +https://discord.com/invite/redis +PLEASE DO NOT POST GENERAL QUESTIONS that are not about bugs or suspected bugs in the GitHub issues system. We'll be delighted to help you and provide all the support on Discord. + +There is also an active community of Redis users at Stack Overflow: + +https://stackoverflow.com/questions/tagged/redis +Issues and pull requests for documentation belong on the redis-doc repo: + +https://github.com/redis/redis-doc +If you are reporting a security bug or vulnerability, see SECURITY.md. + +How to provide a patch for a new feature +If it is a major feature or a semantical change, please don't start coding straight away: if your feature is not a conceptual fit you'll lose a lot of time writing the code without any reason. Start by posting in the mailing list and creating an issue at Github with the description of, exactly, what you want to accomplish and why. Use cases are important for features to be accepted. Here you can see if there is consensus about your idea. + +If in step 1 you get an acknowledgment from the project leaders, use the following procedure to submit a patch: + +a. Fork Redis on GitHub ( https://docs.github.com/en/github/getting-started-with-github/fork-a-repo ) + +b. Create a topic branch (git checkout -b my_branch) + +c. Push to your branch (git push origin my_branch) + +d. Initiate a pull request on GitHub ( https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request ) + +e. Done :) + +Keep in mind that we are very overloaded, so issues and PRs sometimes wait for a very long time. However this is not a lack of interest, as the project gets more and more users, we find ourselves in a constant need to prioritize certain issues/PRs over others. If you think your issue/PR is very important, try to popularize it, have other users commenting and sharing their point of view, and so forth. This helps. + +For minor fixes - open a pull request on GitHub. + +Additional information on the RSALv2/SSPLv1/AGPLv3 tri-license is also found in the LICENSE.txt file. diff --git a/Cargo.lock b/Cargo.lock index 3090ee52b..5c4a37af0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,85 +1,78 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", + "getrandom", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.0.4" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - [[package]] name = "autocfg" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets", ] +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "bindgen" -version = "0.65.1" +version = "0.66.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" +checksum = "f2b84e06fc203107bfbad243f4aba2af864eb7db3b1cf46ea0a023b0b433d2a7" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "cexpr", "clang-sys", "lazy_static", @@ -88,11 +81,11 @@ dependencies = [ "peeking_take_while", "prettyplease", "proc-macro2", - "quote 1.0.33", + "quote 1.0.37", "regex", "rustc-hash", "shlex", - "syn 2.0.29", + "syn 2.0.87", "which", ] @@ -104,9 +97,21 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "bitvec" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] [[package]] name = "block-buffer" @@ -119,41 +124,44 @@ dependencies = [ [[package]] name = "bson" -version = "0.14.1" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c177ed0122f24ce5e0f05bf9b29e79f3ac1a359bc504e0e14c3b34896c71c00" +checksum = "068208f2b6fcfa27a7f1ee37488d2bb8ba2640f68f5475d08e1d9130696aba59" dependencies = [ - "byteorder", - "chrono", + "ahash", + "base64", + "bitvec", "hex", - "libc", - "linked-hash-map", - "md5", + "indexmap", + "js-sys", + "once_cell", "rand", "serde", + "serde_bytes", "serde_json", "time", + "uuid", ] [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" -version = "1.0.83" +version = "1.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70" dependencies = [ - "libc", + "shlex", ] [[package]] @@ -171,43 +179,22 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chrono" -version = "0.4.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits 0.2.16", - "time", - "wasm-bindgen", - "winapi", -] - [[package]] name = "clang-sys" -version = "1.6.1" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" dependencies = [ "glob", "libc", "libloading", ] -[[package]] -name = "core-foundation-sys" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" - [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" dependencies = [ "libc", ] @@ -224,17 +211,26 @@ dependencies = [ [[package]] name = "dashmap" -version = "5.5.1" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.0", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", ] +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + [[package]] name = "digest" version = "0.10.7" @@ -247,9 +243,9 @@ dependencies = [ [[package]] name = "either" -version = "1.9.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "enum-primitive-derive" @@ -264,9 +260,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" dependencies = [ "humantime", "is-terminal", @@ -283,24 +279,19 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.2" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ - "errno-dragonfly", "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] -name = "errno-dragonfly" -version = "0.1.2" +name = "funty" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "generic-array" @@ -314,20 +305,20 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.1.16" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.9.0+wasi-snapshot-preview1", + "wasi", ] [[package]] name = "gimli" -version = "0.28.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -346,9 +337,15 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" [[package]] name = "heck" @@ -358,49 +355,35 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "hex" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" - -[[package]] -name = "humantime" -version = "2.1.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] -name = "iana-time-zone" -version = "0.1.57" +name = "home" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows", + "windows-sys 0.52.0", ] [[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" +name = "humantime" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "ijson" version = "0.1.3" -source = "git+https://github.com/RedisJSON/ijson?rev=e0119ac74f6c4ee918718ee122c3948b74ebeba8#e0119ac74f6c4ee918718ee122c3948b74ebeba8" +source = "git+https://github.com/RedisJSON/ijson?rev=d3ec366ea9fc4788ab54642aab709d1e05c31f6f#d3ec366ea9fc4788ab54642aab709d1e05c31f6f" dependencies = [ "dashmap", "hashbrown 0.13.2", @@ -411,45 +394,45 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.15.1", ] [[package]] name = "is-terminal" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" dependencies = [ "hermit-abi", - "rustix", - "windows-sys", + "libc", + "windows-sys 0.52.0", ] [[package]] name = "itertools" -version = "0.10.5" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] @@ -461,6 +444,7 @@ dependencies = [ "bson", "env_logger", "ijson", + "itertools", "log", "pest", "pest_derive", @@ -471,9 +455,9 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" @@ -483,57 +467,51 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.147" +version = "0.2.161" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" [[package]] name = "libloading" -version = "0.7.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "winapi", + "windows-targets", ] -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - [[package]] name = "linkme" -version = "0.3.15" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f948366ad5bb46b5514ba7a7a80643726eef08b06632592699676748c8bc33b" +checksum = "70fe496a7af8c406f877635cbf3cd6a9fac9d6f443f58691cd8afe6ce0971af4" dependencies = [ "linkme-impl", ] [[package]] name = "linkme-impl" -version = "0.3.15" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc28438cad73dcc90ff3466fc329a9252b1b8ba668eb0d5668ba97088cf4eef0" +checksum = "b01f197a15988fb5b2ec0a5a9800c97e70771499c456ad757d63b3c5e9b96e75" dependencies = [ "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", ] [[package]] name = "linux-raw-sys" -version = "0.4.5" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -541,21 +519,15 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" - -[[package]] -name = "md5" -version = "0.6.1" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" @@ -574,25 +546,24 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] name = "nix" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", "cfg-if", "libc", "memoffset", "pin-utils", - "static_assertions", ] [[package]] @@ -605,44 +576,50 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-traits" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31" dependencies = [ - "num-traits 0.2.16", + "num-traits 0.2.19", ] [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "object" -version = "0.32.0" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", @@ -659,19 +636,20 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "pest" -version = "2.7.2" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a" +checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ + "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.2" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "666d00490d4ac815001da55838c500eafb0320019bbaa44444137c48b443a853" +checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" dependencies = [ "pest", "pest_generator", @@ -679,22 +657,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.2" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ca01446f50dbda87c1786af8770d535423fa8a53aec03b8f4e3d7eb10e0929" +checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" dependencies = [ "pest", "pest_meta", "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", ] [[package]] name = "pest_meta" -version = "2.7.2" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56af0a30af74d0445c0bf6d9d051c979b516a1a5af790d251daee76005420a48" +checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" dependencies = [ "once_cell", "pest", @@ -707,27 +685,36 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "prettyplease" -version = "0.2.12" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.29", + "syn 2.0.87", ] [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" dependencies = [ "unicode-ident", ] @@ -740,31 +727,35 @@ checksum = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "rand" -version = "0.7.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ - "getrandom", "libc", "rand_chacha", "rand_core", - "rand_hc", ] [[package]] name = "rand_chacha" -version = "0.2.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", @@ -772,53 +763,55 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.5.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core", -] - [[package]] name = "redis-module" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f44c13151f81bd51dcee24944c1146f52eb481a36d5a194733b227e9c8fd74c1" +version = "2.1.2" +source = "git+https://github.com/RedisLabsModules/redismodule-rs?tag=v2.1.2#556f420685b0b9662cdb816e22b1c55bc696a6dc" dependencies = [ "backtrace", "bindgen", - "bitflags 2.4.0", + "bitflags 2.6.0", "cc", "cfg-if", "enum-primitive-derive", "libc", "linkme", + "log", "nix", - "num-traits 0.2.16", + "num-traits 0.2.19", "redis-module-macros-internals", "regex", "serde", "strum_macros", ] +[[package]] +name = "redis-module-macros" +version = "99.99.99" +source = "git+https://github.com/RedisLabsModules/redismodule-rs?tag=v2.1.2#556f420685b0b9662cdb816e22b1c55bc696a6dc" +dependencies = [ + "proc-macro2", + "quote 1.0.37", + "serde", + "serde_syn", + "syn 1.0.109", +] + [[package]] name = "redis-module-macros-internals" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0396c62561c0eda86e7b6992bb89f20d283818c5570cbdf7e0f67c77848932e" +version = "99.99.99" +source = "git+https://github.com/RedisLabsModules/redismodule-rs?tag=v2.1.2#556f420685b0b9662cdb816e22b1c55bc696a6dc" dependencies = [ "lazy_static", "proc-macro2", - "quote 1.0.33", + "quote 1.0.37", "syn 1.0.109", ] @@ -826,32 +819,34 @@ dependencies = [ name = "redis_json" version = "99.99.99" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "bson", - "env_logger", "ijson", "itertools", "json_path", + "lazy_static", "libc", + "linkme", "redis-module", + "redis-module-macros", "serde", "serde_json", ] [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.9.3" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -861,9 +856,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.6" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", @@ -872,15 +867,15 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" @@ -890,28 +885,28 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" -version = "0.38.8" +version = "0.38.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" +checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustversion" -version = "1.0.14" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "scopeguard" @@ -921,41 +916,63 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.171" +version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.214" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" dependencies = [ "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", ] [[package]] name = "serde_json" -version = "1.0.105" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap", "itoa", + "memchr", "ryu", "serde", ] +[[package]] +name = "serde_syn" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e0ca8adcbd02c69d24859f7f0c54ede988e4509c8767c8a3185ec0eb158281c" +dependencies = [ + "bitflags 1.3.2", + "proc-macro2", + "serde", + "syn 1.0.109", +] + [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -964,21 +981,15 @@ dependencies = [ [[package]] name = "shlex" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "smallvec" -version = "1.11.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "strum_macros" @@ -988,7 +999,7 @@ checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck", "proc-macro2", - "quote 1.0.33", + "quote 1.0.37", "rustversion", "syn 1.0.109", ] @@ -1011,18 +1022,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", - "quote 1.0.33", + "quote 1.0.37", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.29" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", - "quote 1.0.33", + "quote 1.0.37", "unicode-ident", ] @@ -1035,63 +1046,89 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "termcolor" -version = "1.2.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] name = "thiserror" -version = "1.0.47" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" +checksum = "02dd99dc800bbb97186339685293e1cc5d9df1f8fae2d0aecd9ff1c77efea892" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.47" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" +checksum = "a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e" dependencies = [ "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", ] [[package]] name = "time" -version = "0.1.45" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", ] [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-xid" @@ -1100,146 +1137,131 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" [[package]] -name = "version_check" -version = "0.9.4" +name = "uuid" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +dependencies = [ + "getrandom", + "serde", +] [[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" +name = "version_check" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ - "quote 1.0.33", + "quote 1.0.37", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", - "quote 1.0.33", - "syn 2.0.29", + "quote 1.0.37", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "which" -version = "4.4.0" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", - "libc", + "home", "once_cell", + "rustix", ] -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "winapi", + "windows-sys 0.59.0", ] [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.48.0" +name = "windows-sys" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", + "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", @@ -1248,42 +1270,78 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.48.5" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wyz" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote 1.0.37", + "syn 2.0.87", +] diff --git a/Cargo.toml b/Cargo.toml index 2d0aef012..25c070ecc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,5 @@ [workspace] +resolver = "2" members = [ "json_path", @@ -6,20 +7,16 @@ members = [ ] [workspace.dependencies] -ijson = { git="https://github.com/RedisJSON/ijson", rev="e0119ac74f6c4ee918718ee122c3948b74ebeba8", default_features=false} -serde_json = { version="1.0", features = ["unbounded_depth"]} -# DO NOT CHANGE to avoid security issues. This exact version -# specification is required for the second level dependencies -# to use exactly this version too, so that the dependencies of -# this project will not be able to compromise it. -serde = { version = "=1.0.171", features = ["derive"] } -serde_derive = "=1.0.171" -bson = "0.14" +ijson = { git="https://github.com/RedisJSON/ijson", rev="d3ec366ea9fc4788ab54642aab709d1e05c31f6f", default-features=false} +serde_json = { version="1", features = ["unbounded_depth"]} +serde = { version = "1", features = ["derive"] } +serde_derive = "1" +bson = "2.11" [workspace.package] edition = "2021" repository = "https://github.com/RedisJSON/RedisJSON" -license = "Redis Source Available License 2.0 (RSALv2) or the Server Side Public License v1 (SSPLv1)" +license = "Redis Source Available License 2.0 (RSALv2) or the Server Side Public License v1 (SSPLv1) or the GNU Affero General Public License version 3 (AGPLv3)" [profile.release] debug = 1 diff --git a/LICENSE.txt b/LICENSE.txt index 7a5b26f7d..0ad9027ee 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,2 +1 @@ -Except as otherwise specified in the source code headers for specific files, the source code in this repository is made available to you under your choice of -(i) Redis Source Available License 2.0 (RSALv2) or (ii) the Server Side Public License v1 (SSPLv1) +Except as otherwise specified in the source code headers for specific files, the source code in this repository is made available to you under your choice of the following starting with Redis 8: (i) Redis Source Available License 2.0 (RSALv2); (ii) the Server Side Public License v1 (SSPLv1); or (iii) the GNU Affero General Public License version 3 (AGPLv3). Please review the license folder for the full license terms and conditions. Prior versions remain subject to (i) and (ii). diff --git a/Makefile b/Makefile index 5ba41a1c2..be8c4a0a2 100644 --- a/Makefile +++ b/Makefile @@ -87,7 +87,7 @@ ifeq ($(DEBUG),1) else NIGHTLY=1 CARGO_FLAGS += -Zbuild-std - RUST_FLAGS += -Zsanitizer=$(SAN) + RUST_FLAGS += -Zsanitizer=$(SAN) -C link-args=-znostart-stop-gc ifeq ($(SAN),memory) RUST_FLAGS += -Zsanitizer-memory-track-origins endif @@ -98,19 +98,13 @@ else endif ifeq ($(COV),1) -RUST_FLAGS += -C instrument_coverage +RUST_FLAGS += -C instrument-coverage endif ifeq ($(PROFILE),1) RUST_FLAGS += -g -C force-frame-pointers=yes endif -ifeq ($(OS),macos) -ifeq ($(ARCH),x64) - CARGO_TOOLCHAIN = +1.66.1 -endif -endif - ifeq ($(NIGHTLY),1) TARGET_DIR=$(BINDIR)/target/$(RUST_TARGET)/debug @@ -197,7 +191,7 @@ run: test: cargo_test pytest pytest: - $(SHOW)MODULE=$(abspath $(TARGET)) $(realpath ./tests/pytest/tests.sh) + $(SHOW)MODULE=$(abspath $(TARGET)) RLTEST_ARGS='--no-progress' $(realpath ./tests/pytest/tests.sh) cargo_test: $(SHOW)cargo $(CARGO_TOOLCHAIN) test --all @@ -228,7 +222,7 @@ bench benchmark: $(TARGET) #---------------------------------------------------------------------------------------------- pack: - $(SHOW)MODULE=$(abspath $(TARGET)) ./sbin/pack.sh + $(SHOW) BINDIR=$(BINDIR) ./sbin/pack.sh $(abspath $(TARGET)) upload-release: $(SHOW)RELEASE=1 ./sbin/upload-artifacts @@ -240,6 +234,13 @@ upload-artifacts: #---------------------------------------------------------------------------------------------- +clang-install: + ./sbin/install_clang.sh + +.PHONY: clang-install + +#---------------------------------------------------------------------------------------------- + COV_EXCLUDE_DIRS += bin deps tests COV_EXCLUDE.llvm += $(foreach D,$(COV_EXCLUDE_DIRS),'$(realpath $(ROOT))/$(D)/*') diff --git a/README.md b/README.md index acfab2894..c7944ec57 100644 --- a/README.md +++ b/README.md @@ -6,170 +6,37 @@ # RedisJSON -[![Forum](https://img.shields.io/badge/Forum-RedisJSON-blue)](https://forum.redislabs.com/c/modules/redisjson) [![Discord](https://img.shields.io/discord/697882427875393627?style=flat-square)](https://discord.gg/QUkjSsk) logo +> [!NOTE] +> Starting with Redis 8, the JSON data structure is integral to Redis. You don't need to install this module separately. +> +> We no longer release standalone versions of RedisJSON. +> +> See https://github.com/redis/redis + ## Overview -RedisJSON is a [Redis](https://redis.io/) module that implements [ECMA-404 The JSON Data Interchange Standard](https://json.org/) as a native data type. It allows storing, updating and fetching JSON values from Redis keys (documents). +RedisJSON is a [Redis](https://redis.io/) module that implements [ECMA-404 The JSON Data Interchange Standard](https://json.org/) as a native data type. It allows storing, updating, and fetching JSON values from Redis keys (documents). ## Primary features * Full support of the JSON standard * [JSONPath](https://goessner.net/articles/JsonPath/) syntax for selecting elements inside documents * Documents are stored as binary data in a tree structure, allowing fast access to sub-elements -* Typed atomic operations for all JSON values types -* Secondary index support when combined with [RediSearch](https://redisearch.io) - -## Quick start - -```bash -docker run -p 6379:6379 --name redis-stack redis/redis-stack:latest -``` +* Typed atomic operations for all JSON value types +* Secondary index support when combined with [RediSearch](https://redis.io/docs/latest/develop/interact/search-and-query/) ## Documentation -Read the docs at - -## Build - -Make sure you have Rust installed: - - -Then, build as usual: - -```bash -cargo build --release -``` - -When running the tests, you need to explicitly specify the `test` feature to disable use of the Redis memory allocator when testing: - -```bash -cargo test -``` - -If you forget to do this, you'll see an error mentioning `signal: 4, SIGILL: illegal instruction`. - -## Run - -### Linux - -```bash -redis-server --loadmodule ./target/release/librejson.so -``` - -### Mac OS - -```bash -redis-server --loadmodule ./target/release/librejson.dylib -``` - -## Client libraries - -### Official clients - -| [][dotnet-quickstart] | [][java-quickstart] | [][nodejs-quickstart] | [][python-quickstart] | -|---|---|---|---| -| [NRedisStack][dotnet-quickstart] | [Jedis][java-quickstart] | [node-redis][nodejs-quickstart] | [redis-py][python-quickstart] | -| [Redis.OM][dotnet-om] | [Redis OM Spring][java-om] | [redis-om-node][nodejs-om] | [redis-om][python-om] | - -[dotnet-quickstart]: https://redis.io/docs/redis-clients/dotnet/ -[dotnet-om]: https://github.com/redis/redis-om-dotnet - -[java-quickstart]: https://redis.io/docs/redis-clients/java/ -[java-om]: https://github.com/redis/redis-om-spring +Read the docs at -[nodejs-quickstart]: https://redis.io/docs/redis-clients/nodejs/ -[nodejs-om]: https://github.com/redis/redis-om-node - -[python-quickstart]: https://redis.io/docs/redis-clients/python/ -[python-om]: https://github.com/redis/redis-om-python - -### Community supported clients - -| Project | Language | License | Author | Stars | Package | Comment | -| ------- | -------- | ------- | ------ | ----- | ------- | ------- | -| [Redisson][Redisson-url] | Java | Apache-2.0 | [Redisson][Redisson-author] | [![Redisson-stars]][Redisson-url] | [Maven][Redisson-package] | -| [redis-modules-java][redis-modules-java-url] | Java | Apache-2.0 | [Liming Deng @dengliming][redis-modules-java-author] | [![redis-modules-java-stars]][redis-modules-java-url] | [maven][redis-modules-java-package] | -| [ioredis-rejson][ioredis-rejson-url] | Node.js | MIT | [Felipe Schulz @schulzf][ioredis-rejson-author] | [![ioredis-rejson-stars]][ioredis-rejson-url] | [npm][ioredis-rejson-package] | -| [go-rejson][go-rejson-url] | Go | MIT | [Nitish Malhotra @nitishm][go-rejson-author] | [![go-rejson-stars]][go-rejson-url] | | -| [rejonson][rejonson-url] | Go | Apache-2.0 | [Daniel Krom @KromDaniel][rejonson-author] | [![rejonson-stars]][rejonson-url] | | -| [rueidis][rueidis-url] | Go | Apache-2.0 | [Rueian @rueian][rueidis-author] | [![rueidis-stars]][rueidis-url] | | -| [NReJSON][NReJSON-url] | .NET | MIT/Apache-2.0 | [Tommy Hanks @tombatron][NReJSON-author] | [![NReJSON-stars]][NReJSON-url] | [nuget][NReJSON-package] | -| [phpredis-json][phpredis-json-url] | PHP | MIT | [Rafa Campoy @averias][phpredis-json-author] | [![phpredis-json-stars]][phpredis-json-url] | [composer][phpredis-json-package] | -| [redislabs-rejson][redislabs-rejson-url] | PHP | MIT | [Mehmet Korkmaz @mkorkmaz][redislabs-rejson-author] | [![redislabs-rejson-stars]][redislabs-rejson-url] | [composer][redislabs-rejson-package] | -| [rejson-rb][rejson-rb-url] | Ruby | MIT | [Pavan Vachhani @vachhanihpavan][rejson-rb-author] | [![rejson-rb-stars]][rejson-rb-url] | [rubygems][rejson-rb-package]| -| [rustis][rustis-url] | Rust | MIT | [Dahomey Technologies][rustis-author] | [![rustis-stars]][rustis-url] | [crate][rustis-package]| [Documentation](https://docs.rs/rustis/latest/rustis/commands/trait.JsonCommands.html) | -| [coredis][coredis-url] | Python | MIT | [Ali-Akber Saifee @alisaifee][coredis-author] | [![coredis-stars]][coredis-url] | [pypi][coredis-package]| [Documentation][coredis-documentation] | - -[Redisson-author]: https://github.com/redisson/ -[Redisson-url]: https://github.com/redisson/redisson -[Redisson-package]: https://search.maven.org/artifact/org.redisson/redisson/ -[Redisson-stars]: https://img.shields.io/github/stars/redisson/redisson.svg?style=social&label=Star&maxAge=2592000 - -[redis-modules-java-author]: https://github.com/dengliming/ -[redis-modules-java-url]: https://github.com/dengliming/redis-modules-java -[redis-modules-java-package]: https://search.maven.org/artifact/io.github.dengliming.redismodule/redis-modules-java/ -[redis-modules-java-stars]: https://img.shields.io/github/stars/dengliming/redis-modules-java.svg?style=social&label=Star&maxAge=2592000 - -[ioredis-rejson-author]: https://github.com/schulzf -[ioredis-rejson-url]: https://github.com/schulzf/ioredis-rejson -[ioredis-rejson-package]: https://www.npmjs.com/package/ioredis-rejson -[ioredis-rejson-stars]: https://img.shields.io/github/stars/schulzf/ioredis-rejson.svg?style=social&label=Star&maxAge=2592000 - -[go-rejson-author]: https://github.com/nitishm -[go-rejson-url]: https://github.com/nitishm/go-rejson/ -[go-rejson-stars]: https://img.shields.io/github/stars/nitishm/go-rejson.svg?style=social&label=Star&maxAge=2592000 - -[rueidis-url]: https://github.com/rueian/rueidis -[rueidis-author]: https://github.com/rueian -[rueidis-stars]: https://img.shields.io/github/stars/rueian/rueidis.svg?style=social&label=Star&maxAge=2592000 - -[rejonson-author]: https://github.com/KromDaniel -[rejonson-url]: https://github.com/KromDaniel/rejonson -[rejonson-stars]: https://img.shields.io/github/stars/KromDaniel/rejonson?style=social&label=Star&maxAge=2592000 - -[NReJSON-author]: https://github.com/tombatron -[NReJSON-url]: https://github.com/tombatron/NReJSON -[NReJSON-package]: https://www.nuget.org/packages/NReJSON/ -[NReJSON-stars]: https://img.shields.io/github/stars/tombatron/NReJSON.svg?style=social&label=Star&maxAge=2592000 - -[phpredis-json-author]: https://github.com/averias -[phpredis-json-url]: https://github.com/averias/phpredis-json -[phpredis-json-package]: https://packagist.org/packages/averias/phpredis-json -[phpredis-json-stars]: https://img.shields.io/github/stars/averias/phpredis-json.svg?style=social&label=Star&maxAge=2592000 - -[redislabs-rejson-author]: https://github.com/mkorkmaz -[redislabs-rejson-url]: https://github.com/mkorkmaz/redislabs-rejson -[redislabs-rejson-package]: https://packagist.org/packages/mkorkmaz/redislabs-rejson -[redislabs-rejson-stars]: https://img.shields.io/github/stars/mkorkmaz/redislabs-rejson.svg?style=social&label=Star&maxAge=2592000 - -[rejson-rb-author]: https://github.com/vachhanihpavan -[rejson-rb-url]: https://github.com/vachhanihpavan/rejson-rb -[rejson-rb-package]: https://rubygems.org/gems/rejson-rb -[rejson-rb-stars]: https://img.shields.io/github/stars/vachhanihpavan/rejson-rb.svg?style=social&label=Star&maxAge=2592000 - -[rustis-url]: https://github.com/dahomey-technologies/rustis -[rustis-author]: https://github.com/dahomey-technologies -[rustis-stars]: https://img.shields.io/github/stars/dahomey-technologies/rustis.svg?style=social&label=Star&maxAge=2592000 -[rustis-package]: https://crates.io/crates/rustis - -[coredis-author]: https://github.com/alisaifee -[coredis-url]: https://github.com/alisaifee/coredis -[coredis-package]: https://pypi.org/project/coredis/ -[coredis-stars]: https://img.shields.io/github/stars/alisaifee/coredis.svg?style=social&label=Star&maxAge=2592000 -[coredis-documentation]: https://coredis.readthedocs.io/en/stable/handbook/modules.html#redisjson - -## Acknowledgments - -RedisJSON is developed with <3 at [Redis Labs](https://redislabs.com). - -RedisJSON is made possible only because of the existence of this amazing open source project: +## License -* [redis](https://github.com/antirez/redis) +Starting with Redis 8, RedisJSON is licensed under your choice of: (i) Redis Source Available License 2.0 (RSALv2); (ii) the Server Side Public License v1 (SSPLv1); or (iii) the GNU Affero General Public License version 3 (AGPLv3). Please review the license folder for the full license terms and conditions. Prior versions remain subject to (i) and (ii). -## License +## Code contributions -RedisJSON is licensed under the [Redis Source Available License 2.0 (RSALv2)](https://redis.com/legal/rsalv2-agreement) or the [Server Side Public License v1 (SSPLv1)](https://www.mongodb.com/licensing/server-side-public-license). +By contributing code to this Redis module in any form, including sending a pull request via GitHub, a code fragment or patch via private email or public discussion groups, you agree to release your code under the terms of the Redis Software Grant and Contributor License Agreement. Please see the CONTRIBUTING.md file in this source distribution for more information. For security bugs and vulnerabilities, please see SECURITY.md. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..67c9a2740 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,43 @@ +# Security Policy + +## Supported Versions + +RedisJSON is generally backwards compatible with very few exceptions, so we +recommend users to always use the latest version to experience stability, +performance and security. + +We generally backport security issues to a single previous major version, +unless this is not possible or feasible with a reasonable effort. + +| Version | Supported | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------------------- | +| (no newer standalone versions) | RedisJSON is now an integral part of Redis. See [Redis Security Policy](https://github.com/redis/redis/security/policy) | +| 2.8 | :white_check_mark: | +| 2.6 | :white_check_mark: | +| < 2.6 | :x: | + +## Reporting a Vulnerability + +If you believe you've discovered a serious vulnerability, please contact the +Redis core team at redis@redis.io. We will evaluate your report and if +necessary issue a fix and an advisory. If the issue was previously undisclosed, +we'll also mention your name in the credits. + +## Responsible Disclosure + +In some cases, we may apply a responsible disclosure process to reported or +otherwise discovered vulnerabilities. We will usually do that for a critical +vulnerability, and only if we have a good reason to believe information about +it is not yet public. + +This process involves providing an early notification about the vulnerability, +its impact and mitigations to a short list of vendors under a time-limited +embargo on public disclosure. + +Vendors on the list are individuals or organizations that maintain Redis +distributions or provide Redis as a service, who have third party users who +will benefit from the vendor's ability to prepare for a new version or deploy a +fix early. + +If you believe you should be on the list, please contact us and we will +consider your request based on the above criteria. diff --git a/TODO.md b/TODO.md deleted file mode 100644 index 76023b32e..000000000 --- a/TODO.md +++ /dev/null @@ -1,152 +0,0 @@ -# RedisJSON TODOs - ---- - -# MVP milestone - -This is what RedisJSON (https://github.com/RedisJSON/RedisJSON) currently has ready for the MVP: - -* Code is under src/ -* Building with CMake - - Need to verify on OSX - - Currently does not have an `install` option - needed? -* Documentation - - docs/commands.md - - docs/design.md ~ 30% done - - README.md ~ 85% done - - Missing about/what is ReJSON - - Some notes about performance - - Perhaps a Node.js example - - Source code is about 90% documented -* AGPLv3 license -* Copyright - -## Missing misc - -1. Peer review of project CTO->RnD/QA? -1. Number overflows in number operations -1. Something is printing "inf" - -## Source code/style - -1. Review and standardize use of int/size_t/uint32... -1. Improve/remove error reporting and logging in case of module internal errors - -## Benchmarks - -1. Need to include a simple standalone "benchmark", either w/ redis-benchmark or not ~ 30% done, need to complete some suites and generate graphs from output - -## Examples - -TBD - -1. A session token that also has a list of last seen times, what stack though -1. Node.js example perhaps - -## Blog post - -References: - -* [Parsing JSON Is A Minefield](http://seriot.ch/parsing_json.php) - ---- - -# Post MVP - -## Profiling - -1. Memory usage: implemented JSON.MEMORY, need to compile an automated reporting tool -1. Performance with callgrind and/or gperftools - -## Build/test - -1. Run https://github.com/nst/JSONTestSuite and report like http://seriot.ch/parsing_json.php -1. Need a dependable cycle to check for memory leaks -1. Once we have a way to check baseline performance, add regression -1. Fuzz all module commands with a mix of keys paths and values -1. Memory leaks suite to run - `valgrind --tool=memcheck --suppressions=../redis/src/valgrind.sup ../redis/src/redis-server --loadmodule ./lib/rejson.so` -1. Verify each command's syntax - need a YAML -1. Add CI to repo? - -## Path parsing - -1. Add array slice - -## Dictionary optimiztions - -Encode as trie over a certain size threshold to save memory and increase lookup performance. Alternatively, use a hash dictionary. - -## Secondary indexing - -Integrate with @dvirsky's `secondary` library. - -## Schema - -Support [JSON Schema](http://json-schema.org/). - -JSON.SETSCHEMA - -Notes: -1. Could be replaced by a JSON.SET modifier -2. Indexing will be specified in the schema -3. Cluster needs to be taken into account as well - -JSON.VALIDATE - -## Expiry - -JSON.EXPIRE - -# Cache serialized objects - -Manage a cache inside the module for frequently accessed object in order to avoid repetitive -serialization. - -## KeyRef nodes - -Add a node type that references a Redis key that is either a JSON data type or a regular Redis key. -The module's API can transparently support resolving referenced keys and querying the data in them. -KeyRefs in cluster mode will only be allowed if in the same slot. - -Redis core data types can be mapped to flat (i.e. non-nested) JSON structure as follows: -* A Redis String is a JSON String (albeit some escaping may be needed) -* A List: a JSON Array of JSON Strings, where the indices are the identical -* A Hash: a JSON Object where the Hash fields are the Object's keys and the values are JSON Strings -* A Set: a JSON Object where the Set members are the keys and their values are always a JSON Null -* A Sorted Set: a JSON Array that is made from two elements: - * A JSON Object where each key is a member and value is the score - * A JSON Array of all members ordered by score in ascending order - -## Compression - -Compress (string only? entire objects?) values over a (configureable?) size threshold with zstd. - -## Additions to API - -JSON.STATS -Print statistics about encountered values, parsing performance and such - -JSON.OBJSET -An alias for 'JSON.SET' - -JSON.COUNT -P: count JS: ? R: N/A -Counts the number of occurances for scalar in the array - -JSON.REMOVE [count] -P: builtin del JS: ? R: LREM (but also has a count and direction) -Removes the first `count` occurances (default 1) of value from array. If index is negative, -traversal is reversed. - -JSON.EXISTS -P: in JS: ? R: HEXISTS/LINDEX -Checks if path key or array index exists. Syntactic sugar for JSON.TYPE. - -JSON.REVERSE -P: reverse JS: ? R: N/A -Reverses the array. Nice to have. - -JSON.SORT -P: sort JS: ? R: SORT -Sorts the values in an array. Nice to have. diff --git a/build.rs b/build.rs index 06b09daa8..cb4634feb 100644 --- a/build.rs +++ b/build.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ use std::process::Command; diff --git a/build/docker/Makefile b/build/docker/Makefile index e30187fa3..a57acced5 100644 --- a/build/docker/Makefile +++ b/build/docker/Makefile @@ -5,12 +5,12 @@ include $(ROOT)/deps/readies/mk/main REPO=rejson -REDIS_VERSION=7.2-rc2 +REDIS_VERSION=7.4 OSNICK.official=bullseye INT_BRANCHES=2.6 2.4 2.2 2.0 1.2 1.0 -LATEST_BRANCH=2.4 +LATEST_BRANCH=2.6 PREVIEW_BRANCH=2.6 ART_DIR=$(ROOT)/bin/artifacts diff --git a/deps/readies b/deps/readies index e5f70f481..28ddde9e6 160000 --- a/deps/readies +++ b/deps/readies @@ -1 +1 @@ -Subproject commit e5f70f48172b4bfd4e15e47bf99fc0c7f890f4da +Subproject commit 28ddde9e66d72289b3d3e700dc7114b27de20888 diff --git a/docs/commands/json.arrappend.md b/docs/commands/json.arrappend.md deleted file mode 100644 index 7cc9e3a9f..000000000 --- a/docs/commands/json.arrappend.md +++ /dev/null @@ -1,68 +0,0 @@ -Append the `json` values into the array at `path` after the last element in it - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
value - -is one or more values to append to one or more arrays. - -{{% alert title="About using strings with JSON commands" color="warning" %}} -To specify a string as an array value to append, wrap the quoted string with an additional set of single quotes. Example: `'"silver"'`. For more detailed use, see [Examples](#examples). -{{% /alert %}} -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return value - -`JSON.ARRAPEND` returns an [array](/docs/reference/protocol-spec/#resp-arrays) of integer replies for each path, the array's new size, or `nil`, if the matching JSON value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Add a new color to a list of product colors - -Create a document for noise-cancelling headphones in black and silver colors. - -{{< highlight bash >}} -redis> JSON.SET item:1 $ '{"name":"Noise-cancelling Bluetooth headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","connection":{"wireless":true,"type":"Bluetooth"},"price":99.98,"stock":25,"colors":["black","silver"]}' -OK -{{< / highlight >}} - -Add color `blue` to the end of the `colors` array. `JSON.ARRAPEND` returns the array's new size. - -{{< highlight bash >}} -redis> JSON.ARRAPPEND item:1 $.colors '"blue"' -1) (integer) 3 -{{< / highlight >}} - -Return the new length of the `colors` array. - -{{< highlight bash >}} -redis> JSON.GET item:1 -"{\"name\":\"Noise-cancelling Bluetooth headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"connection\":{\"wireless\":true,\"type\":\"Bluetooth\"},\"price\":99.98,\"stock\":25,\"colors\":[\"black\",\"silver\",\"blue\"]}" -{{< / highlight >}} - -
- -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.arrindex.md b/docs/commands/json.arrindex.md deleted file mode 100644 index 5a9a054c6..000000000 --- a/docs/commands/json.arrindex.md +++ /dev/null @@ -1,112 +0,0 @@ -Search for the first occurrence of a JSON value in an array - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -
path - -is JSONPath to specify. -
- -
value - -is value to find its index in one or more arrays. - -{{% alert title="About using strings with JSON commands" color="warning" %}} -To specify a string as an array value to index, wrap the quoted string with an additional set of single quotes. Example: `'"silver"'`. For more detailed use, see [Examples](#examples). -{{% /alert %}} -
- -## Optional arguments - -
start - -is inclusive start value to specify in a slice of the array to search. Default is `0`. -
- - -
stop - -is exclusive stop value to specify in a slice of the array to search, including the last element. Default is `0`. Negative values are interpreted as starting from the end. -
- -{{% alert title="About out-of-range indexes" color="warning" %}} - -Out-of-range indexes round to the array's start and end. An inverse index range (such as the range from 1 to 0) returns unfound or `-1`. -{{% /alert %}} - -## Return value - -`JSON.ARRINDEX` returns an [array](/docs/reference/protocol-spec/#resp-arrays) of integer replies for each path, the first position in the array of each JSON value that matches the path, `-1` if unfound in the array, or `nil`, if the matching JSON value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Find the specific place of a color in a list of product colors - -Create a document for noise-cancelling headphones in black and silver colors. - -{{< highlight bash >}} -redis> JSON.SET item:1 $ '{"name":"Noise-cancelling Bluetooth headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","connection":{"wireless":true,"type":"Bluetooth"},"price":99.98,"stock":25,"colors":["black","silver"]}' -OK -{{< / highlight >}} - -Add color `blue` to the end of the `colors` array. `JSON.ARRAPEND` returns the array's new size. - -{{< highlight bash >}} -redis> JSON.ARRAPPEND item:1 $.colors '"blue"' -1) (integer) 3 -{{< / highlight >}} - -Return the new length of the `colors` array. - -{{< highlight bash >}} -JSON.GET item:1 -"{\"name\":\"Noise-cancelling Bluetooth headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"connection\":{\"wireless\":true,\"type\":\"Bluetooth\"},\"price\":99.98,\"stock\":25,\"colors\":[\"black\",\"silver\",\"blue\"]}" -{{< / highlight >}} - -Get the list of colors for the product. - -{{< highlight bash >}} -redis> JSON.GET item:1 '$.colors[*]' -"[\"black\",\"silver\",\"blue\"]" -{{< / highlight >}} - -Insert two more colors after the second color. You now have five colors. - -{{< highlight bash >}} -redis> JSON.ARRINSERT item:1 $.colors 2 '"yellow"' '"gold"' -1) (integer) 5 -{{< / highlight >}} - -Get the updated list of colors. - -{{< highlight bash >}} -redis> JSON.GET item:1 $.colors -"[[\"black\",\"silver\",\"yellow\",\"gold\",\"blue\"]]" -{{< / highlight >}} - -Find the place where color `silver` is located. - -{{< highlight bash >}} -redis> JSON.ARRINDEX item:1 $..colors '"silver"' -1) (integer) 1 -{{< / highlight >}} -
- -## See also - -`JSON.ARRAPPEND` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.arrinsert.md b/docs/commands/json.arrinsert.md deleted file mode 100644 index 1e89cdf68..000000000 --- a/docs/commands/json.arrinsert.md +++ /dev/null @@ -1,93 +0,0 @@ -Insert the `json` values into the array at `path` before the `index` (shifts to the right) - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
value - -is one or more values to insert in one or more arrays. - -{{% alert title="About using strings with JSON commands" color="warning" %}} -To specify a string as an array value to insert, wrap the quoted string with an additional set of single quotes. Example: `'"silver"'`. For more detailed use, see [Examples](#examples). -{{% /alert %}} -
- -
index - -is position in the array where you want to insert a value. The index must be in the array's range. Inserting at `index` 0 prepends to the array. Negative index values start from the end of the array. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return value - -`JSON.ARRINSERT` returns an [array](/docs/reference/protocol-spec/#resp-arrays) of integer replies for each path, the array's new size, or `nil`, if the matching JSON value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Add new colors to a specific place in a list of product colors - -Create a document for noise-cancelling headphones in black and silver colors. - -{{< highlight bash >}} -redis> JSON.SET item:1 $ '{"name":"Noise-cancelling Bluetooth headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","connection":{"wireless":true,"type":"Bluetooth"},"price":99.98,"stock":25,"colors":["black","silver"]}' -OK -{{< / highlight >}} - -Add color `blue` to the end of the `colors` array. `JSON.ARRAPEND` returns the array's new size. - -{{< highlight bash >}} -redis> JSON.ARRAPPEND item:1 $.colors '"blue"' -1) (integer) 3 -{{< / highlight >}} - -Return the new length of the `colors` array. - -{{< highlight bash >}} -JSON.GET item:1 -"{\"name\":\"Noise-cancelling Bluetooth headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"connection\":{\"wireless\":true,\"type\":\"Bluetooth\"},\"price\":99.98,\"stock\":25,\"colors\":[\"black\",\"silver\",\"blue\"]}" -{{< / highlight >}} - -Get the list of colors for the product. - -{{< highlight bash >}} -redis> JSON.GET item:1 '$.colors[*]' -"[\"black\",\"silver\",\"blue\"]" -{{< / highlight >}} - -Insert two more colors after the second color. You now have five colors. - -{{< highlight bash >}} -redis> JSON.ARRINSERT item:1 $.colors 2 '"yellow"' '"gold"' -1) (integer) 5 -{{< / highlight >}} - -Get the updated list of colors. - -{{< highlight bash >}} -redis> JSON.GET item:1 $.colors -"[[\"black\",\"silver\",\"yellow\",\"gold\",\"blue\"]]" -{{< / highlight >}} -
- -## See also - -`JSON.ARRAPPEND` | `JSON.ARRINDEX` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.arrlen.md b/docs/commands/json.arrlen.md deleted file mode 100644 index 069fbaf16..000000000 --- a/docs/commands/json.arrlen.md +++ /dev/null @@ -1,72 +0,0 @@ -Report the length of the JSON array at `path` in `key` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`, if not provided. Returns null if the `key` or `path` do not exist. -
- -## Return - -`JSON.ARRLEN` returns an [array](/docs/reference/protocol-spec/#resp-arrays) of integer replies, an integer for each matching value, each is the array's length, or `nil`, if the matching value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Get lengths of JSON arrays in a document - -Create a document for wireless earbuds. - -{{< highlight bash >}} -redis> JSON.SET item:2 $ '{"name":"Wireless earbuds","description":"Wireless Bluetooth in-ear headphones","connection":{"wireless":true,"type":"Bluetooth"},"price":64.99,"stock":17,"colors":["black","white"], "max_level":[80, 100, 120]}' -OK -{{< / highlight >}} - -Find lengths of arrays in all objects of the document. - -{{< highlight bash >}} -redis> JSON.ARRLEN item:2 '$.[*]' -1) (nil) -2) (nil) -3) (nil) -4) (nil) -5) (nil) -6) (integer) 2 -7) (integer) 3 -{{< / highlight >}} - -Return the length of the `max_level` array. - -{{< highlight bash >}} -redis> JSON.ARRLEN item:2 '$..max_level' -1) (integer) 3 -{{< / highlight >}} - -Get all the maximum level values. - -{{< highlight bash >}} -redis> JSON.GET item:2 '$..max_level' -"[[80,100,120]]" -{{< / highlight >}} - -
- -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.arrpop.md b/docs/commands/json.arrpop.md deleted file mode 100644 index 67b0f78b3..000000000 --- a/docs/commands/json.arrpop.md +++ /dev/null @@ -1,84 +0,0 @@ -Remove and return an element from the index in the array - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
index - -is position in the array to start popping from. Default is `-1`, meaning the last element. Out-of-range indexes round to their respective array ends. Popping an empty array returns null. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return - -`JSON.ARRPOP` returns an [array](/docs/reference/protocol-spec/#resp-arrays) of bulk string replies for each path, each reply is the popped JSON value, or `nil`, if the matching JSON value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Pop a value from an index and insert a new value - -Create two headphone products with maximum sound levels. - -{{< highlight bash >}} -redis> JSON.SET key $ '[{"name":"Healthy headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","connection":{"wireless":true,"type":"Bluetooth"},"price":99.98,"stock":25,"colors":["black","silver"],"max_level":[60,70,80]},{"name":"Noisy headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","connection":{"wireless":true,"type":"Bluetooth"},"price":99.98,"stock":25,"colors":["black","silver"],"max_level":[80,90,100,120]}]' -OK -{{< / highlight >}} - -Get all maximum values for the second product. - -{{< highlight bash >}} -redis> JSON.GET key $.[1].max_level -"[[80,90,100,120]]" -{{< / highlight >}} - -Update the `max_level` field of the product: remove an unavailable value and add a newly available value. - -{{< highlight bash >}} -redis> JSON.ARRPOP key $.[1].max_level 0 -1) "80" -{{< / highlight >}} - -Get the updated array. - -{{< highlight bash >}} -redis> JSON.GET key $.[1].max_level -"[[90,100,120]]" -{{< / highlight >}} - -Now insert a new lowest value. - -{{< highlight bash >}} -redis> JSON.ARRINSERT key $.[1].max_level 0 85 -1) (integer) 4 -{{< / highlight >}} - -Get the updated array. - -{{< highlight bash >}} -redis> JSON.GET key $.[1].max_level -"[[85,90,100,120]]" -{{< / highlight >}} -
- -## See also - -`JSON.ARRAPPEND` | `JSON.ARRINDEX` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.arrtrim.md b/docs/commands/json.arrtrim.md deleted file mode 100644 index 0a9abbcc5..000000000 --- a/docs/commands/json.arrtrim.md +++ /dev/null @@ -1,95 +0,0 @@ -Trim an array so that it contains only the specified inclusive range of elements - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -
start - -is index of the first element to keep (previous elements are trimmed). Default is 0. -
- -
stop - -is the index of the last element to keep (following elements are trimmed), including the last element. Default is 0. Negative values are interpreted as starting from the end. -
- -{{% alert title="About out-of-range indexes" color="warning" %}} - -JSON.ARRTRIM is extremely forgiving, and using it with out-of-range indexes does not produce an error. Note a few differences between how RedisJSON v2.0 and legacy versions handle out-of-range indexes. - -Behavior as of RedisJSON v2.0: - -* If `start` is larger than the array's size or `start` > `stop`, returns 0 and an empty array. -* If `start` is < 0, then start from the end of the array. -* If `stop` is larger than the end of the array, it is treated like the last element. -{{% /alert %}} - -## Return - -JSON.ARRTRIM returns an array of integer replies for each path, the array's new size, or `nil`, if the matching JSON value is not an array. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Trim an array to a specific set of values - -Create two headphone products with maximum sound levels. - -{{< highlight bash >}} -redis> JSON.GET key $ -"[[{\"name\":\"Healthy headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"connection\":{\"wireless\":true,\"type\":\"Bluetooth\"},\"price\":99.98,\"stock\":25,\"colors\":[\"black\",\"silver\"],\"max_level\":[60,70,80]},{\"name\":\"Noisy headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"connection\":{\"wireless\":true,\"type\":\"Bluetooth\"},\"price\":99.98,\"stock\":25,\"colors\":[\"black\",\"silver\"],\"max_level\":[85,90,100,120]}]]" -OK -{{< / highlight >}} - -Add new sound level values to the second product. - -{{< highlight bash >}} -redis> JSON.ARRAPPEND key $.[1].max_level 140 160 180 200 220 240 260 280 -1) (integer) 12 -{{< / highlight >}} - -Get the updated array. - -{{< highlight bash >}} -redis> JSON.GET key $.[1].max_level -"[[85,90,100,120,140,160,180,200,220,240,260,280]]" -{{< / highlight >}} - -Keep only the values between the fifth and the ninth element, inclusive of that last element. - -{{< highlight bash >}} -redis> JSON.ARRTRIM key $.[1].max_level 4 8 -1) (integer) 5 -{{< / highlight >}} - -Get the updated array. - -{{< highlight bash >}} -redis> JSON.GET key $.[1].max_level -"[[140,160,180,200,220]]" -{{< / highlight >}} -
- -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.clear.md b/docs/commands/json.clear.md deleted file mode 100644 index c217637cd..000000000 --- a/docs/commands/json.clear.md +++ /dev/null @@ -1,65 +0,0 @@ -Clear container values (arrays/objects) and set numeric values to `0` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Nonexisting paths are ignored. -
- -## Return - -JSON.CLEAR returns an integer reply specifying the number of matching JSON arrays and objects cleared + number of matching JSON numerical values zeroed. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -{{% alert title="Note" color="warning" %}} - -Already cleared values are ignored for empty containers and zero numbers. - -{{% /alert %}} - -## Examples - -
-Clear container values and set numeric values to 0 - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"obj":{"a":1, "b":2}, "arr":[1,2,3], "str": "foo", "bool": true, "int": 42, "float": 3.14}' -OK -{{< / highlight >}} - -Clear all container values. This returns the number of objects with cleared values. - -{{< highlight bash >}} -redis> JSON.CLEAR doc $.* -(integer) 4 -{{< / highlight >}} - -Get the updated document. Note that numeric values have been set to `0`. - -{{< highlight bash >}} -redis> JSON.GET doc $ -"[{\"obj\":{},\"arr\":[],\"str\":\"foo\",\"bool\":true,\"int\":0,\"float\":0}]" -{{< / highlight >}} -
- -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.debug-help.md b/docs/commands/json.debug-help.md deleted file mode 100644 index e7f116c29..000000000 --- a/docs/commands/json.debug-help.md +++ /dev/null @@ -1,14 +0,0 @@ -Return helpful information about the `JSON.DEBUG` command - -## Return - -JSON.DEBUG HELP returns an array with helpful messages. - -## See also - -`JSON.DEBUG` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.debug-memory.md b/docs/commands/json.debug-memory.md deleted file mode 100644 index dbe301c81..000000000 --- a/docs/commands/json.debug-memory.md +++ /dev/null @@ -1,52 +0,0 @@ -Report a value's memory usage in bytes - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return - -JSON.DEBUG MEMORY returns an integer reply specified as the value size in bytes. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Report a value's memory usage in bytes - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET item:2 $ '{"name":"Wireless earbuds","description":"Wireless Bluetooth in-ear headphones","connection":{"wireless":true,"type":"Bluetooth"},"price":64.99,"stock":17,"colors":["black","white"], "max_level":[80, 100, 120]}' -OK -{{< / highlight >}} - -Get the values' memory usage in bytes. - -{{< highlight bash >}} -redis> JSON.DEBUG MEMORY item:2 -(integer) 253 -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.ARRLEN` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.debug.md b/docs/commands/json.debug.md deleted file mode 100644 index 1bd976f5d..000000000 --- a/docs/commands/json.debug.md +++ /dev/null @@ -1 +0,0 @@ -This is a container command for debugging related tasks. diff --git a/docs/commands/json.del.md b/docs/commands/json.del.md deleted file mode 100644 index 5d572dd7a..000000000 --- a/docs/commands/json.del.md +++ /dev/null @@ -1,67 +0,0 @@ -Delete a value - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Nonexisting paths are ignored. - -{{% alert title="Note" color="warning" %}} - -Deleting an object's root is equivalent to deleting the key from Redis. - -{{% /alert %}} -
- -## Return - -JSON.DEL returns an integer reply specified as the number of paths deleted (0 or more). -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Delete a value - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a": 1, "nested": {"a": 2, "b": 3}}' -OK -{{< / highlight >}} - -Delete specified values. - -{{< highlight bash >}} -redis> JSON.DEL doc $..a -(integer) 2 -{{< / highlight >}} - -Get the updated document. - -{{< highlight bash >}} -redis> JSON.GET doc $ -"[{\"nested\":{\"b\":3}}]" -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.ARRLEN` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - - - diff --git a/docs/commands/json.forget.md b/docs/commands/json.forget.md deleted file mode 100644 index 9ac8cdc52..000000000 --- a/docs/commands/json.forget.md +++ /dev/null @@ -1 +0,0 @@ -See `JSON.DEL`. \ No newline at end of file diff --git a/docs/commands/json.get.md b/docs/commands/json.get.md deleted file mode 100644 index 6aeebb879..000000000 --- a/docs/commands/json.get.md +++ /dev/null @@ -1,99 +0,0 @@ -Return the value at `path` in JSON serialized form - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. JSON.GET accepts multiple `path` arguments. - -{{% alert title="Note" color="warning" %}} - -When using a single JSONPath, the root of the matching values is a JSON string with a top-level **array** of serialized JSON value. -In contrast, a legacy path returns a single value. - -When using multiple JSONPath arguments, the root of the matching values is a JSON string with a top-level **object**, with each object value being a top-level array of serialized JSON value. -In contrast, if all paths are legacy paths, each object value is a single serialized JSON value. -If there are multiple paths that include both legacy path and JSONPath, the returned value conforms to the JSONPath version (an array of values). - -{{% /alert %}} - -
- -
INDENT - -sets the indentation string for nested levels. -
- -
NEWLINE - -sets the string that's printed at the end of each line. -
- -
SPACE - -sets the string that's put between a key and a value. -
- -{{% alert title="Note" color="warning" %}} - -Produce pretty-formatted JSON with `redis-cli` by following this example: - -{{< highlight bash >}} -~/$ redis-cli --raw -redis> JSON.GET myjsonkey INDENT "\t" NEWLINE "\n" SPACE " " path.to.value[1] -{{< / highlight >}} - -{{% /alert %}} - -## Return - -JSON.GET returns a bulk string representing a JSON array of string replies. -Each string is the JSON serialization of each JSON value that matches a path. -Using multiple paths, JSON.GET returns a bulk string representing a JSON object with string values. -Each string value is an array of the JSON serialization of each JSON value that matches a path. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Return the value at path in JSON serialized form - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2, "b": 3, "nested": {"a": 4, "b": null}}' -OK -{{< / highlight >}} - -With a single JSONPath (JSON array bulk string): - -{{< highlight bash >}} -redis> JSON.GET doc $..b -"[3,null]" -{{< / highlight >}} - -Using multiple paths with at least one JSONPath returns a JSON string with a top-level object with an array of JSON values per path: - -{{< highlight bash >}} -redis> JSON.GET doc ..a $..b -"{\"$..b\":[3,null],\"..a\":[2,4]}" -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.MGET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.merge.md b/docs/commands/json.merge.md deleted file mode 100644 index bb8f4c4ca..000000000 --- a/docs/commands/json.merge.md +++ /dev/null @@ -1,120 +0,0 @@ -Merge a given JSON value into matching paths. Consequently, JSON values at matching paths are updated, deleted, or expanded with new children. - -This command complies with [RFC7396](https://datatracker.ietf.org/doc/html/rfc7396) Json Merge Patch - -[Examples](#examples) - -## Required arguments - -
key - -is key to merge into. -
- -
path - -is JSONPath to specify. For non-existing keys the `path` must be `$`. For existing keys, for each matched `path`, the value that matches the `path` is being merged with the JSON `value`. For existing keys, when the path exists, except for the last element, a new child is added with the JSON `value`. - -
- -
value - -is JSON value to merge with at the specified path. Merging is done according to the following rules per JSON value in the `value` argument while considering the corresponding original value if it exists: -* merging an existing object key with a `null` value deletes the key -* merging an existing object key with non-null value updates the value -* merging a non-existing object key adds the key and value -* merging an existing array with any merged value, replaces the entire array with the value -
- -## Return value - -JSET.MERGE returns a simple string reply: `OK` if executed correctly or `error` if fails to set the new values - -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -The JSON.MERGE provide four different behaviours to merge changes on a given key: create unexistent path, update an existing path with a new value, delete a existing path or replace an array with a new array - -
-Create a unexistent path-value - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2}' -OK -redis> JSON.MERGE doc $.b '8' -OK -redis> JSON.GET doc $ -"[{\"a\":2,\"b\":8}]" -{{< / highlight >}} - -
- -
-Replace an existing value - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2}' -OK -redis> JSON.MERGE doc $.a '3' -OK -redis> JSON.GET doc $ -"[{\"a\":3}]" -{{< / highlight >}} - -
- -
-Delete on existing value - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2}' -OK -redis> JSON.MERGE doc $.a 'null' -OK -redis> JSON.GET doc $ -"[{}]" -{{< / highlight >}} - -
- -
-Replace an Array - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":[2,4,6,8]}' -OK -redis> JSON.MERGE doc $.a '[10,12]' -OK -redis> JSON.GET doc $ -"[{\"a\":[10,12]}]" -{{< / highlight >}} - -
- - -
-Merge changes in multi-paths - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"f1": {"a":1}, "f2":{"a":2}}' -OK -redis> JSON.GET doc -"{\"f1\":{\"a\":1},\"f2\":{\"a\":2}}" -redis> JSON.MERGE doc $ '{"f1": 'null', "f2":{"a":3, "b":4}, "f3":'[2,4,6]'}' -OK -redis> JSON.GET doc -"{\"f2\":{\"a\":3,\"b\":4},\"f3\":[2,4,6]}" -{{< / highlight >}} - -
- -## See also - -`JSON.GET` | `JSON.MGET` | `JSON.SET` | `JSON.MSET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.mget.md b/docs/commands/json.mget.md deleted file mode 100644 index 9f2fb5682..000000000 --- a/docs/commands/json.mget.md +++ /dev/null @@ -1,55 +0,0 @@ -Return the values at `path` from multiple `key` arguments - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. Returns `null` for nonexistent keys. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Returns `null` for nonexistent paths. - -
- -## Return - -JSON.MGET returns an array of bulk string replies specified as the JSON serialization of the value at each key's path. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Return the values at path from multiple key arguments - -Create two JSON documents. - -{{< highlight bash >}} -redis> JSON.SET doc1 $ '{"a":1, "b": 2, "nested": {"a": 3}, "c": null}' -OK -redis> JSON.SET doc2 $ '{"a":4, "b": 5, "nested": {"a": 6}, "c": null}' -OK -{{< / highlight >}} - -Get values from all arguments in the documents. - -{{< highlight bash >}} -redis> JSON.MGET doc1 doc2 $..a -1) "[1,3]" -2) "[4,6]" -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.GET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.mset.md b/docs/commands/json.mset.md deleted file mode 100644 index 9584e4bcb..000000000 --- a/docs/commands/json.mset.md +++ /dev/null @@ -1,58 +0,0 @@ -Set or update one or more JSON values according to the specified `key`-`path`-`value` triplets - -`JSON.MSET` is atomic, hence, all given additions or updates are either applied or not. It is not possible for clients to see that some of the keys were updated while others are unchanged. - -A JSON value is a hierarchical structure. If you change a value in a specific path - nested values are affected. - - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
path - -is JSONPath to specify. For new Redis keys the `path` must be the root. For existing keys, when the entire `path` exists, the value that it contains is replaced with the `json` value. For existing keys, when the `path` exists, except for the last element, a new child is added with the `json` value. - -
- -
value - -is value to set at the specified path -
- -## Return value - -JSET.MSET returns a simple string reply: `OK` if executed correctly or `error` if fails to set the new values - -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Add a new values in multiple keys - -{{< highlight bash >}} -redis> JSON.MSET doc1 $ '{"a":2}' doc2 $.f.a '3' doc3 $ '{"f1": {"a":1}, "f2":{"a":2}}' -OK -redis> JSON.GET doc1 $ -"[{\"a\":2}]" -redis> JSON.GET doc2 $ -"[{\"f\":{\"a\":3]" -redis> JSON.GET doc3 -"{\"f1\":{\"a\":3},\"f2\":{\"a\":3}}" -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.MGET` | `JSON.GET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.numincrby.md b/docs/commands/json.numincrby.md deleted file mode 100644 index cda825421..000000000 --- a/docs/commands/json.numincrby.md +++ /dev/null @@ -1,62 +0,0 @@ -Increment the number value stored at `path` by `number` - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
path - -is JSONPath to specify. -
- -
value - -is number value to increment. -
- -## Return - -JSON.NUMINCRBY returns a bulk string reply specified as a stringified new value for each path, or `nil`, if the matching JSON value is not a number. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Increment number values - -Create a document. - -{{< highlight bash >}} -redis> JSON.SET doc . '{"a":"b","b":[{"a":2}, {"a":5}, {"a":"c"}]}' -OK -{{< / highlight >}} - -Increment a value of `a` object by 2. The command fails to find a number and returns `null`. - -{{< highlight bash >}} -redis> JSON.NUMINCRBY doc $.a 2 -"[null]" -{{< / highlight >}} - -Recursively find and increment a value of all `a` objects. The command increments numbers it finds and returns `null` for nonnumber values. - -{{< highlight bash >}} -redis> JSON.NUMINCRBY doc $..a 2 -"[null,4,7,null]" -{{< / highlight >}} - -
- -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.nummultby.md b/docs/commands/json.nummultby.md deleted file mode 100644 index ac112d0a6..000000000 --- a/docs/commands/json.nummultby.md +++ /dev/null @@ -1,47 +0,0 @@ -Multiply the number value stored at `path` by `number` - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
value - -is number value to multiply. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return - -JSON.NUMMULTBY returns a bulk string reply specified as a stringified new values for each path, or `nil` element if the matching JSON value is not a number. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc . '{"a":"b","b":[{"a":2}, {"a":5}, {"a":"c"}]}' -OK -redis> JSON.NUMMULTBY doc $.a 2 -"[null]" -redis> JSON.NUMMULTBY doc $..a 2 -"[null,4,10,null]" -{{< / highlight >}} - -## See also - -`JSON.NUMINCRBY` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.objkeys.md b/docs/commands/json.objkeys.md deleted file mode 100644 index a91cde25e..000000000 --- a/docs/commands/json.objkeys.md +++ /dev/null @@ -1,43 +0,0 @@ -Return the keys in the object that's referenced by `path` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. Returns `null` for nonexistent keys. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Returns `null` for nonexistant path. - -
- -## Return - -JSON.OBJKEYS returns an array of array replies for each path, an array of the key names in the object as a bulk string reply, or `nil` if the matching JSON value is not an object. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":[3], "nested": {"a": {"b":2, "c": 1}}}' -OK -redis> JSON.OBJKEYS doc $..a -1) (nil) -2) 1) "b" - 2) "c" -{{< / highlight >}} - -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.objlen.md b/docs/commands/json.objlen.md deleted file mode 100644 index 41408b048..000000000 --- a/docs/commands/json.objlen.md +++ /dev/null @@ -1,42 +0,0 @@ -Report the number of keys in the JSON object at `path` in `key` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. Returns `null` for nonexistent keys. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Returns `null` for nonexistant path. - -
- -## Return - -JSON.OBJLEN returns an array of integer replies for each path specified as the number of keys in the object or `nil`, if the matching JSON value is not an object. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":[3], "nested": {"a": {"b":2, "c": 1}}}' -OK -redis> JSON.OBJLEN doc $..a -1) (nil) -2) (integer) 2 -{{< / highlight >}} - -## See also - -`JSON.ARRINDEX` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.resp.md b/docs/commands/json.resp.md deleted file mode 100644 index a4892134c..000000000 --- a/docs/commands/json.resp.md +++ /dev/null @@ -1,82 +0,0 @@ -Return the JSON in `key` in [Redis serialization protocol specification](/docs/reference/protocol-spec) form - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. This command uses the following mapping from JSON to RESP: - -* JSON `null` maps to the bulk string reply. -* JSON `false` and `true` values map to the simple string reply. -* JSON number maps to the integer reply or bulk string reply, depending on type. -* JSON string maps to the bulk string reply. -* JSON array is represented as an array reply in which the first element is the simple string reply `[`, followed by the array's elements. -* JSON object is represented as an array reply in which the first element is the simple string reply `{`. Each successive entry represents a key-value pair as a two-entry array reply of the bulk string reply. - -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). -
- -## Return - -JSON.RESP returns an array reply specified as the JSON's RESP form detailed in [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Return an array of RESP details about a document - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET item:2 $ '{"name":"Wireless earbuds","description":"Wireless Bluetooth in-ear headphones","connection":{"wireless":true,"type":"Bluetooth"},"price":64.99,"stock":17,"colors":["black","white"], "max_level":[80, 100, 120]}' -OK -{{< / highlight >}} - -Get all RESP details about the document. - -{{< highlight bash >}} -redis> JSON.RESP item:2 - 1) { - 2) "name" - 3) "Wireless earbuds" - 4) "description" - 5) "Wireless Bluetooth in-ear headphones" - 6) "connection" - 7) 1) { - 2) "wireless" - 3) true - 4) "type" - 5) "Bluetooth" - 8) "price" - 9) "64.989999999999995" -10) "stock" -11) (integer) 17 -12) "colors" -13) 1) [ - 2) "black" - 3) "white" -14) "max_level" -15) 1) [ - 2) (integer) 80 - 3) (integer) 100 - 4) (integer) 120 -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.ARRLEN` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.set.md b/docs/commands/json.set.md deleted file mode 100644 index 6e68f5e8a..000000000 --- a/docs/commands/json.set.md +++ /dev/null @@ -1,89 +0,0 @@ -Set the JSON value at `path` in `key` - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
path - -is JSONPath to specify. Default is root `$`. For new Redis keys the `path` must be the root. For existing keys, when the entire `path` exists, the value that it contains is replaced with the `json` value. For existing keys, when the `path` exists, except for the last element, a new child is added with the `json` value. - -Adds a key (with its respective value) to a JSON Object (in a RedisJSON data type key) only if it is the last child in the `path`, or it is the parent of a new child being added in the `path`. Optional arguments `NX` and `XX` modify this behavior for both new RedisJSON data type keys as well as the JSON Object keys in them. -
- -
value - -is value to set at the specified path -
- -## Optional arguments - -
NX - -sets the key only if it does not already exist. -
- -
XX - -sets the key only if it already exists. -
- -## Return value - -JSET.SET returns a simple string reply: `OK` if executed correctly or `nil` if the specified `NX` or `XX` conditions were not met. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Replace an existing value - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2}' -OK -redis> JSON.SET doc $.a '3' -OK -redis> JSON.GET doc $ -"[{\"a\":3}]" -{{< / highlight >}} -
- -
-Add a new value - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2}' -OK -redis> JSON.SET doc $.b '8' -OK -redis> JSON.GET doc $ -"[{\"a\":2,\"b\":8}]" -{{< / highlight >}} -
- -
-Update multi-paths - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"f1": {"a":1}, "f2":{"a":2}}' -OK -redis> JSON.SET doc $..a 3 -OK -redis> JSON.GET doc -"{\"f1\":{\"a\":3},\"f2\":{\"a\":3}}" -{{< / highlight >}} -
- -## See also - -`JSON.GET` | `JSON.MGET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.strappend.md b/docs/commands/json.strappend.md deleted file mode 100644 index 44293e4fe..000000000 --- a/docs/commands/json.strappend.md +++ /dev/null @@ -1,54 +0,0 @@ -Append the `json-string` values to the string at `path` - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -
value - -is value to append to one or more strings. - -{{% alert title="About using strings with JSON commands" color="warning" %}} -To specify a string as an array value to append, wrap the quoted string with an additional set of single quotes. Example: `'"silver"'`. For more detailed use, see [Examples](#examples). -{{% /alert %}} -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. -
- -## Return value - -JSON.STRAPPEND returns an array of integer replies for each path, the string's new length, or `nil`, if the matching JSON value is not a string. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":"foo", "nested": {"a": "hello"}, "nested2": {"a": 31}}' -OK -redis> JSON.STRAPPEND doc $..a '"baz"' -1) (integer) 6 -2) (integer) 8 -3) (nil) -redis> JSON.GET doc $ -"[{\"a\":\"foobaz\",\"nested\":{\"a\":\"hellobaz\"},\"nested2\":{\"a\":31}}]" -{{< / highlight >}} - -## See also - -`JSON.ARRAPEND` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.strlen.md b/docs/commands/json.strlen.md deleted file mode 100644 index 701d572c6..000000000 --- a/docs/commands/json.strlen.md +++ /dev/null @@ -1,42 +0,0 @@ -Report the length of the JSON String at `path` in `key` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`, if not provided. Returns null if the `key` or `path` do not exist. -
- -## Return - -JSON.STRLEN returns by recursive descent an array of integer replies for each path, the array's length, or `nil`, if the matching JSON value is not a string. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":"foo", "nested": {"a": "hello"}, "nested2": {"a": 31}}' -OK -redis> JSON.STRLEN doc $..a -1) (integer) 3 -2) (integer) 5 -3) (nil) -{{< / highlight >}} - -## See also - -`JSON.ARRLEN` | `JSON.ARRINSERT` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/commands/json.toggle.md b/docs/commands/json.toggle.md deleted file mode 100644 index c778bfa86..000000000 --- a/docs/commands/json.toggle.md +++ /dev/null @@ -1,74 +0,0 @@ -Toggle a Boolean value stored at `path` - -[Examples](#examples) - -## Required arguments - -
key - -is key to modify. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. - -
- -## Return - -JSON.TOGGLE returns an array of integer replies for each path, the new value (`0` if `false` or `1` if `true`), or `nil` for JSON values matching the path that are not Boolean. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -
-Toogle a Boolean value stored at path - -Create a JSON document. - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"bool": true}' -OK -{{< / highlight >}} - -Toggle the Boolean value. - -{{< highlight bash >}} -redis> JSON.TOGGLE doc $.bool -1) (integer) 0 -{{< / highlight >}} - -Get the updated document. - -{{< highlight bash >}} -redis> JSON.GET doc $ -"[{\"bool\":false}]" -{{< / highlight >}} - -Toggle the Boolean value. - -{{< highlight bash >}} -redis> JSON.TOGGLE doc $.bool -1) (integer) 1 -{{< / highlight >}} - -Get the updated document. - -{{< highlight bash >}} -redis> JSON.GET doc $ -"[{\"bool\":true}]" -{{< / highlight >}} -
- -## See also - -`JSON.SET` | `JSON.GET` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) - diff --git a/docs/commands/json.type.md b/docs/commands/json.type.md deleted file mode 100644 index 94a0939db..000000000 --- a/docs/commands/json.type.md +++ /dev/null @@ -1,45 +0,0 @@ -Report the type of JSON value at `path` - -[Examples](#examples) - -## Required arguments - -
key - -is key to parse. -
- -## Optional arguments - -
path - -is JSONPath to specify. Default is root `$`. Returns null if the `key` or `path` do not exist. - -
- -## Return - -JSON.TYPE returns an array of string replies for each path, specified as the value's type. -For more information about replies, see [Redis serialization protocol specification](/docs/reference/protocol-spec). - -## Examples - -{{< highlight bash >}} -redis> JSON.SET doc $ '{"a":2, "nested": {"a": true}, "foo": "bar"}' -OK -redis> JSON.TYPE doc $..foo -1) "string" -redis> JSON.TYPE doc $..a -1) "integer" -2) "boolean" -redis> JSON.TYPE doc $..dummy -{{< / highlight >}} - -## See also - -`JSON.SET` | `JSON.ARRLEN` - -## Related topics - -* [RedisJSON](/docs/stack/json) -* [Index and search JSON documents](/docs/stack/search/indexing_json) diff --git a/docs/docs/_index.md b/docs/docs/_index.md deleted file mode 100644 index 2f929790e..000000000 --- a/docs/docs/_index.md +++ /dev/null @@ -1,278 +0,0 @@ ---- -title: JSON -description: JSON support for Redis -linkTitle: JSON -type: docs -stack: true -aliases: - - /docs/stack/json ---- - -[![Discord](https://img.shields.io/discord/697882427875393627?style=flat-square)](https://discord.gg/QUkjSsk) -[![Github](https://img.shields.io/static/v1?label=&message=repository&color=5961FF&logo=github)](https://github.com/RedisJSON/RedisJSON/) - -The JSON capability of Redis Stack provides JavaScript Object Notation (JSON) support for Redis. It lets you store, update, and retrieve JSON values in a Redis database, similar to any other Redis data type. Redis JSON also works seamlessly with [Search and Query](https://redis.io/docs/stack/search/) to let you [index and query JSON documents](https://redis.io/docs/stack/search/indexing_json). - -## Primary features - -* Full support for the JSON standard -* A [JSONPath](http://goessner.net/articles/JsonPath/) syntax for selecting/updating elements inside documents (see [JSONPath syntax](/redisjson/path#jsonpath-syntax)) -* Documents stored as binary data in a tree structure, allowing fast access to sub-elements -* Typed atomic operations for all JSON value types - -## Use Redis JSON - -To learn how to use JSON, it's best to start with the Redis CLI. The following examples assume that you're connected to a Redis server with JSON enabled. - -### `redis-cli` examples - -First, start [`redis-cli`](http://redis.io/topics/rediscli) in interactive mode. - -The first JSON command to try is `JSON.SET`, which sets a Redis key with a JSON value. `JSON.SET` accepts all JSON value types. This example creates a JSON string: - -```sh -> JSON.SET animal $ '"dog"' -"OK" -> JSON.GET animal $ -"[\"dog\"]" -> JSON.TYPE animal $ -1) "string" -``` - -Note how the commands include the dollar sign character `$`. This is the [path](/redisjson/path) to the value in the JSON document (in this case it just means the root). - -Here are a few more string operations. `JSON.STRLEN` tells you the length of the string, and you can append another string to it with `JSON.STRAPPEND`. - -```sh -> JSON.STRLEN animal $ -1) "3" -> JSON.STRAPPEND animal $ '" (Canis familiaris)"' -1) "22" -> JSON.GET animal $ -"[\"dog (Canis familiaris)\"]" -``` - -Numbers can be [incremented](/commands/json.numincrby) and [multiplied](/commands/json.nummultby): - -``` -> JSON.SET num $ 0 -OK -> JSON.NUMINCRBY num $ 1 -"[1]" -> JSON.NUMINCRBY num $ 1.5 -"[2.5]" -> JSON.NUMINCRBY num $ -0.75 -"[1.75]" -> JSON.NUMMULTBY num $ 24 -"[42]" -``` - -Here's a more interesting example that includes JSON arrays and objects: - -``` -> JSON.SET example $ '[ true, { "answer": 42 }, null ]' -OK -> JSON.GET example $ -"[[true,{\"answer\":42},null]]" -> JSON.GET example $[1].answer -"[42]" -> JSON.DEL example $[-1] -(integer) 1 -> JSON.GET example $ -"[[true,{\"answer\":42}]]" -``` - -The `JSON.DEL` command deletes any JSON value you specify with the `path` parameter. - -You can manipulate arrays with a dedicated subset of JSON commands: - -``` -> JSON.SET arr $ [] -OK -> JSON.ARRAPPEND arr $ 0 -1) (integer) 1 -> JSON.GET arr $ -"[[0]]" -> JSON.ARRINSERT arr $ 0 -2 -1 -1) (integer) 3 -> JSON.GET arr $ -"[[-2,-1,0]]" -> JSON.ARRTRIM arr $ 1 1 -1) (integer) 1 -> JSON.GET arr $ -"[[-1]]" -> JSON.ARRPOP arr $ -1) "-1" -> JSON.ARRPOP arr $ -1) (nil) -``` - -JSON objects also have their own commands: - -``` -> JSON.SET obj $ '{"name":"Leonard Cohen","lastSeen":1478476800,"loggedOut": true}' -OK -> JSON.OBJLEN obj $ -1) (integer) 3 -> JSON.OBJKEYS obj $ -1) 1) "name" - 2) "lastSeen" - 3) "loggedOut" -``` - -To return a JSON response in a more human-readable format, run `redis-cli` in raw output mode and include formatting keywords such as `INDENT`, `NEWLINE`, and `SPACE` with the `JSON.GET` command: - -```sh -$ redis-cli --raw -> JSON.GET obj INDENT "\t" NEWLINE "\n" SPACE " " $ -[ - { - "name": "Leonard Cohen", - "lastSeen": 1478476800, - "loggedOut": true - } -] -``` - -### Python example - -This code snippet shows how to use JSON with raw Redis commands from Python with [redis-py](https://github.com/redis/redis-py): - -```Python -import redis - -data = { - 'dog': { - 'scientific-name' : 'Canis familiaris' - } -} - -r = redis.Redis() -r.json().set('doc', '$', data) -doc = r.json().get('doc', '$') -dog = r.json().get('doc', '$.dog') -scientific_name = r.json().get('doc', '$..scientific-name') -``` - -### Run with Docker - -To run RedisJSON with Docker, use the `redis-stack-server` Docker image: - -```sh -$ docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest -``` - -For more information about running Redis Stack in a Docker container, see [Run Redis Stack on Docker](/docs/getting-started/install-stack/docker). - -### Download binaries - -To download and run the RedisJSON module that provides the JSON data structure from a precompiled binary: - -1. Download a precompiled version from the [Redis download center](https://redis.com/download-center/modules/). - -2. Load the module it in Redis - - ```sh - $ redis-server --loadmodule /path/to/module/src/rejson.so - ``` - -### Build from source - -To build RedisJSON from the source code: - -1. Clone the [repository](https://github.com/RedisJSON/RedisJSON) (make sure you include the `--recursive` option to properly clone submodules): - - ```sh - $ git clone --recursive https://github.com/RedisJSON/RedisJSON.git - $ cd RedisJSON - ``` - -2. Install dependencies: - - ```sh - $ ./sbin/setup - ``` - -3. Build: - ```sh - $ make build - ``` - -### Load the module to Redis - -Requirements: - -Generally, it is best to run the latest Redis version. - -If your OS has a [Redis 6.x package or later](http://redis.io/download), you can install it using the OS package manager. - -Otherwise, you can invoke - -```sh -$ ./deps/readies/bin/getredis -``` - -To load the RedisJSON module, use one of the following methods: - -* [Makefile recipe](#makefile-recipe) -* [Configuration file](#configuration-file) -* [Command-line option](#command-line-option) -* [MODULE LOAD command](/commands/module-load/) - -#### Makefile recipe - -Run Redis with RedisJSON: - -```sh -$ make run -``` - -#### Configuration file - -Or you can have Redis load the module during startup by adding the following to your `redis.conf` file: - -``` -loadmodule /path/to/module/target/release/librejson.so -``` - -On Mac OS, if this module was built as a dynamic library, run: - -``` -loadmodule /path/to/module/target/release/librejson.dylib -``` - -In the above lines replace `/path/to/module/` with the actual path to the module. - -Alternatively, you can download and run Redis from a precompiled binary: - -1. Download a precompiled version of RedisJSON from the [Redis download center](https://redis.com/download-center/modules/). - -#### Command-line option - -Alternatively, you can have Redis load the module using the following command-line argument syntax: - - ```bash - $ redis-server --loadmodule /path/to/module/librejson.so - ``` - -In the above lines replace `/path/to/module/` with the actual path to the module's library. - -#### `MODULE LOAD` command - -You can also use the `MODULE LOAD` command to load RedisJSON. Note that `MODULE LOAD` is a **dangerous command** and may be blocked/deprecated in the future due to security considerations. - -After the module has been loaded successfully, the Redis log should have lines similar to: - -``` -... -9:M 11 Aug 2022 16:24:06.701 * version: 20009 git sha: d8d4b19 branch: HEAD -9:M 11 Aug 2022 16:24:06.701 * Exported RedisJSON_V1 API -9:M 11 Aug 2022 16:24:06.701 * Enabled diskless replication -9:M 11 Aug 2022 16:24:06.701 * Created new data type 'ReJSON-RL' -9:M 11 Aug 2022 16:24:06.701 * Module 'ReJSON' loaded from /opt/redis-stack/lib/rejson.so -... -``` - -### Limitation - -A JSON value passed to a command can have a depth of up to 128. If you pass to a command a JSON value that contains an object or an array with a nesting level of more than 128, the command returns an error. diff --git a/docs/docs/developer.md b/docs/docs/developer.md deleted file mode 100644 index b7f5de0b0..000000000 --- a/docs/docs/developer.md +++ /dev/null @@ -1,162 +0,0 @@ ---- -title: "Developer notes" -linkTitle: "Developer notes" -weight: 7 -description: > - Notes on debugging, testing and documentation -aliases: - - /docs/stack/json/developer ---- - -# Developing Redis JSON - -Developing Redis JSON involves setting up the development environment (which can be either Linux-based or macOS-based), building RedisJSON (the Redis module providing JSON), running tests and benchmarks, and debugging both the JSON module and its tests. - -## Cloning the git repository -To clone the RedisJSON module and its submodules, run: -```sh -git clone --recursive https://github.com/RedisJSON/RedisJSON.git -``` -## Working in an isolated environment -There are several reasons to use an isolated environment for development, like keeping your workstation clean and developing for a different Linux distribution. - -You can use a virtual machine as an isolated development environment. To set one up, you can use [Vagrant](https://www.vagrantup.com) or Docker. - -To set up a virtual machine with Docker: - -``` -rejson=$(docker run -d -it -v $PWD:/build debian:bullseye bash) -docker exec -it $rejson bash -``` -Then run ```cd /build``` from within the container. - -In this mode, all installations remain in the scope of the Docker container. -After you exit the container, you can either restart it with the previous ```docker exec``` command or save the state of the container to an image and resume it at a later time: - -``` -docker commit $rejson redisjson1 -docker stop $rejson -rejson=$(docker run -d -it -v $PWD:/build redisjson1 bash) -docker exec -it $rejson bash -``` - -You can replace `debian:bullseye` with your OS of choice. If you use the same OS as your host machine, you can run the RedisJSON binary on your host after it is built. - -## Installing prerequisites - -To build and test RedisJSON one needs to install several packages, depending on the underlying OS. Currently, we support the Ubuntu/Debian, CentOS, Fedora, and macOS. - -Enter the `RedisJSON` directory and run: - -```sh -$ ./sbin/setup -``` - -**This will install various packages on your system** using the native package manager and pip. It will invoke `sudo` on its own, prompting for permission. - -If you prefer to avoid that, you can: - -* Review `system-setup.py` and install packages manually, -* Use `system-setup.py --nop` to display installation commands without executing them, -* Use an isolated environment like explained above, -* Use a Python virtual environment, as Python installations are known to be sensitive when not used in isolation: `python -m virtualenv venv; . ./venv/bin/activate` - -## Installing Redis -Generally, it is best to run the latest Redis version. - -If your OS has a Redis 6.x package, you can install it using the OS package manager. - -Otherwise, you can invoke -```sh -$ ./deps/readies/bin/getredis -``` - -## Getting help -```make help``` provides a quick summary of the development features: - -``` -make setup # install prerequisites - -make build - DEBUG=1 # build debug variant - SAN=type # build with LLVM sanitizer (type=address|memory|leak|thread) - VALGRIND|VG=1 # build for testing with Valgrind -make clean # remove binary files - ALL=1 # remove binary directories - -make all # build all libraries and packages - -make test # run both cargo and python tests -make cargo_test # run inbuilt rust unit tests -make pytest # run flow tests using RLTest - TEST=file:name # run test matching `name` from `file` - TEST_ARGS="..." # RLTest arguments - QUICK=1 # run only general tests - GEN=1 # run general tests on a standalone Redis topology - AOF=1 # run AOF persistency tests on a standalone Redis topology - SLAVES=1 # run replication tests on standalone Redis topology - CLUSTER=1 # run general tests on a OSS Redis Cluster topology - VALGRIND|VG=1 # run specified tests with Valgrind - VERBOSE=1 # display more RLTest-related information - -make pack # build package (RAMP file) -make upload-artifacts # copy snapshot packages to S3 - OSNICK=nick # copy snapshots for specific OSNICK -make upload-release # copy release packages to S3 - -common options for upload operations: - STAGING=1 # copy to staging lab area (for validation) - FORCE=1 # allow operation outside CI environment - VERBOSE=1 # show more details - NOP=1 # do not copy, just print commands - -make coverage # perform coverage analysis -make show-cov # show coverage analysis results (implies COV=1) -make upload-cov # upload coverage analysis results to codecov.io (implies COV=1) - -make docker # build for specific Linux distribution - OSNICK=nick # Linux distribution to build for - REDIS_VER=ver # use Redis version `ver` - TEST=1 # test after build - PACK=1 # create packages - ARTIFACTS=1 # copy artifacts from docker image - PUBLISH=1 # publish (i.e. docker push) after build - -make sanbox # create container for CLang Sanitizer tests -``` - -## Building from source -Run ```make build``` to build RedisJSON. - -Notes: - -* Binary files are placed under `target/release/`, according to platform and build variant. - -* RedisJSON uses [Cargo](https://github.com/rust-lang/cargo) as its build system. ```make build``` will invoke both Cargo and the subsequent `make` command that's required to complete the build. - -Use ```make clean``` to remove built artifacts. ```make clean ALL=1``` will remove the entire bin subdirectory. - -## Running tests -There are several sets of unit tests: -* Rust tests, integrated in the source code, run by ```make cargo_test```. -* Python tests (enabled by RLTest), located in ```tests/pytests```, run by ```make pytest```. - -You can run all tests with ```make test```. -To run only specific tests, use the ```TEST``` parameter. For example, run ```make test TEST=regex```. - -You can run the module's tests against an "embedded" disposable Redis instance or against an instance -you provide. To use the "embedded" mode, you must include the `redis-server` executable in your `PATH`. - -You can override the spawning of the embedded server by specifying a Redis port via the `REDIS_PORT` -environment variable, e.g.: - -```bash -$ # use an existing local Redis instance for testing the module -$ REDIS_PORT=6379 make test -``` - -## Debugging -To include debugging information, you need to set the `DEBUG` environment variable before you compile RedisJSON. For example, run `export DEBUG=1`. - -You can add breakpoints to Python tests in single-test mode. To set a breakpoint, call the ```BB()``` function inside a test. - diff --git a/docs/docs/homogeneous-arrays.md b/docs/docs/homogeneous-arrays.md new file mode 100644 index 000000000..e69de29bb diff --git a/docs/docs/indexing_JSON.md b/docs/docs/indexing_JSON.md deleted file mode 100644 index 14c8130d5..000000000 --- a/docs/docs/indexing_JSON.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: Index/Search JSON documents -linkTitle: Index/Search -weight: 2 -description: Combine Redis JSON and Search and Query to index and search JSON documents -aliases: - - /docs/stack/json/indexing_json ---- - -In addition to storing JSON documents, you can also index them using the [Search and Query](/docs/stack/search) feature. This enables full-text search capabilities and document retrieval based on their content. - -To use these features, you must install two modules: RedisJSON and RediSearch. [Redis Stack](/docs/stack) automatically includes both modules. - -See the [tutorial](/docs/stack/search/indexing_json) to learn how to search and query your JSON. \ No newline at end of file diff --git a/docs/docs/path.md b/docs/docs/path.md deleted file mode 100644 index a98815f8f..000000000 --- a/docs/docs/path.md +++ /dev/null @@ -1,285 +0,0 @@ ---- -title: "Path" -linkTitle: "Path" -weight: 3 -description: Access specific elements within a JSON document -aliases: - - /docs/stack/json/path ---- - -Paths help you access specific elements within a JSON document. Since no standard for JSON path syntax exists, Redis JSON implements its own. JSON's syntax is based on common best practices and intentionally resembles [JSONPath](http://goessner.net/articles/JsonPath/). - -JSON supports two query syntaxes: [JSONPath syntax](#jsonpath-syntax) and the [legacy path syntax](#legacy-path-syntax) from the first version of JSON. - -JSON knows which syntax to use depending on the first character of the path query. If the query starts with the character `$`, it uses JSONPath syntax. Otherwise, it defaults to the legacy path syntax. - -The returned value is a JSON string with a top-level array of JSON serialized strings. -And if multi-paths are used, the return value is a JSON string with a top-level object with values that are arrays of serialized JSON values. - -## JSONPath support - -RedisJSON v2.0 introduced [JSONPath](http://goessner.net/articles/JsonPath/) support. It follows the syntax described by Goessner in his [article](http://goessner.net/articles/JsonPath/). - -A JSONPath query can resolve to several locations in a JSON document. In this case, the JSON commands apply the operation to every possible location. This is a major improvement over [legacy path](#legacy-path-syntax) queries, which only operate on the first path. - -Notice that the structure of the command response often differs when using JSONPath. See the [Commands](/commands/?group=json) page for more details. - -The new syntax supports bracket notation, which allows the use of special characters like colon ":" or whitespace in key names. - -If you want to include double quotes in your query, enclose the JSONPath within single quotes. For example: - -```sh -JSON.GET store '$.inventory["headphones"]' -``` - -### JSONPath syntax - -The following JSONPath syntax table was adapted from Goessner's [path syntax comparison](https://goessner.net/articles/JsonPath/index.html#e2). - -| Syntax element | Description | -|----------------|-------------| -| $ | The root (outermost JSON element), starts the path. | -| . or [] | Selects a child element. | -| .. | Recursively descends through the JSON document. | -| * | Wildcard, returns all elements. | -| [] | Subscript operator, accesses an array element. | -| [,] | Union, selects multiple elements. | -| [start\:end\:step] | Array slice where start, end, and step are indexes. | -| ?() | Filters a JSON object or array. Supports comparison operators (`==`, `!=`, `<`, `<=`, `>`, `>=`, `=~`), logical operators (`&&`, `\|\|`), and parenthesis (`(`, `)`). | -| () | Script expression. | -| @ | The current element, used in filter or script expressions. | - -### JSONPath examples - -The following JSONPath examples use this JSON document, which stores details about items in a store's inventory: - -```json -{ - "inventory": { - "headphones": [ - { - "id": 12345, - "name": "Noise-cancelling Bluetooth headphones", - "description": "Wireless Bluetooth headphones with noise-cancelling technology", - "wireless": true, - "connection": "Bluetooth", - "price": 99.98, - "stock": 25, - "free-shipping": false, - "colors": ["black", "silver"] - }, - { - "id": 12346, - "name": "Wireless earbuds", - "description": "Wireless Bluetooth in-ear headphones", - "wireless": true, - "connection": "Bluetooth", - "price": 64.99, - "stock": 17, - "free-shipping": false, - "colors": ["black", "white"] - }, - { - "id": 12347, - "name": "Mic headset", - "description": "Headset with built-in microphone", - "wireless": false, - "connection": "USB", - "price": 35.01, - "stock": 28, - "free-shipping": false - } - ], - "keyboards": [ - { - "id": 22345, - "name": "Wireless keyboard", - "description": "Wireless Bluetooth keyboard", - "wireless": true, - "connection": "Bluetooth", - "price": 44.99, - "stock": 23, - "free-shipping": false, - "colors": ["black", "silver"] - }, - { - "id": 22346, - "name": "USB-C keyboard", - "description": "Wired USB-C keyboard", - "wireless": false, - "connection": "USB-C", - "price": 29.99, - "stock": 30, - "free-shipping": false - } - ] - } -} -``` - -First, create the JSON document in your database: - -```sh -JSON.SET store $ '{"inventory":{"headphones":[{"id":12345,"name":"Noise-cancelling Bluetooth headphones","description":"Wireless Bluetooth headphones with noise-cancelling technology","wireless":true,"connection":"Bluetooth","price":99.98,"stock":25,"free-shipping":false,"colors":["black","silver"]},{"id":12346,"name":"Wireless earbuds","description":"Wireless Bluetooth in-ear headphones","wireless":true,"connection":"Bluetooth","price":64.99,"stock":17,"free-shipping":false,"colors":["black","white"]},{"id":12347,"name":"Mic headset","description":"Headset with built-in microphone","wireless":false,"connection":"USB","price":35.01,"stock":28,"free-shipping":false}],"keyboards":[{"id":22345,"name":"Wireless keyboard","description":"Wireless Bluetooth keyboard","wireless":true,"connection":"Bluetooth","price":44.99,"stock":23,"free-shipping":false,"colors":["black","silver"]},{"id":22346,"name":"USB-C keyboard","description":"Wired USB-C keyboard","wireless":false,"connection":"USB-C","price":29.99,"stock":30,"free-shipping":false}]}}' -``` - -#### Access JSON examples - -The following examples use the `JSON.GET` command to retrieve data from various paths in the JSON document. - -You can use the wildcard operator `*` to return a list of all items in the inventory: - -```sh -127.0.0.1:6379> JSON.GET store $.inventory.* -"[[{\"id\":12345,\"name\":\"Noise-cancelling Bluetooth headphones\",\"description\":\"Wireless Bluetooth headphones with noise-cancelling technology\",\"wireless\":true,\"connection\":\"Bluetooth\",\"price\":99.98,\"stock\":25,\"free-shipping\":false,\"colors\":[\"black\",\"silver\"]},{\"id\":12346,\"name\":\"Wireless earbuds\",\"description\":\"Wireless Bluetooth in-ear headphones\",\"wireless\":true,\"connection\":\"Bluetooth\",\"price\":64.99,\"stock\":17,\"free-shipping\":false,\"colors\":[\"black\",\"white\"]},{\"id\":12347,\"name\":\"Mic headset\",\"description\":\"Headset with built-in microphone\",\"wireless\":false,\"connection\":\"USB\",\"price\":35.01,\"stock\":28,\"free-shipping\":false}],[{\"id\":22345,\"name\":\"Wireless keyboard\",\"description\":\"Wireless Bluetooth keyboard\",\"wireless\":true,\"connection\":\"Bluetooth\",\"price\":44.99,\"stock\":23,\"free-shipping\":false,\"colors\":[\"black\",\"silver\"]},{\"id\":22346,\"name\":\"USB-C keyboard\",\"description\":\"Wired USB-C keyboard\",\"wireless\":false,\"connection\":\"USB-C\",\"price\":29.99,\"stock\":30,\"free-shipping\":false}]]" -``` - -For some queries, multiple paths can produce the same results. For example, the following paths return the names of all headphones: - -```sh -127.0.0.1:6379> JSON.GET store $.inventory.headphones[*].name -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\",\"Mic headset\"]" -127.0.0.1:6379> JSON.GET store '$.inventory["headphones"][*].name' -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\",\"Mic headset\"]" -127.0.0.1:6379> JSON.GET store $..headphones[*].name -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\",\"Mic headset\"]" -``` - -The recursive descent operator `..` can retrieve a field from multiple sections of a JSON document. The following example returns the names of all inventory items: - -```sh -127.0.0.1:6379> JSON.GET store $..name -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\",\"Mic headset\",\"Wireless keyboard\",\"USB-C keyboard\"]" -``` - -You can use an array slice to select a range of elements from an array. This example returns the names of the first two headphones: - -```sh -127.0.0.1:6379> JSON.GET store $..headphones[0:2].name -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\"]" -``` - -Filter expressions `?()` let you select JSON elements based on certain conditions. You can use comparison operators (`==`, `!=`, `<`, `<=`, `>`, `>=`, and starting with version v2.4.2, also `=~`), logical operators (`&&`, `||`), and parenthesis (`(`, `)`) within these expressions. A filter expression can be applied on an array or on an object, iterating over all the **elements** in the array or all the **values** in the object, retrieving only the ones that match the filter condition. - -Paths within the filter condition are using the dot notation with either `@` to denote the current array element or the current object value, or `$` to denote the top-level element. For example, use `@.key_name` to refer to a nested value and `$.top_level_key_name` to refer to a top-level value. - -Starting with version v2.4.2, the comparison operator `=~` can be used for matching a path of a string value on the left side against a regular expression pattern on the right side. For more information, see the [supported regular expression syntax docs](https://docs.rs/regex/latest/regex/#syntax). - -Non-string values do not match. A match can only occur when the left side is a path of a string value and the right side is either a hard-coded string, or a path of a string value. See [examples](#json-filter-examples) below. - -The regex match is partial, meaning `"foo"` regex pattern matches a string such as `"barefoots"`. -To make it exact, use the regex pattern `"^foo$"`. - -Other JSONPath engines may use regex pattern between slashes, e.g., `/foo/`, and their match is exact. -They can perform partial matches using a regex pattern such as `/.*foo.*/`. - -#### JSON Filter examples - -In the following example, the filter only returns wireless headphones with a price less than 70: - -```sh -127.0.0.1:6379> JSON.GET store $..headphones[?(@.price<70&&@.wireless==true)] -"[{\"id\":12346,\"name\":\"Wireless earbuds\",\"description\":\"Wireless Bluetooth in-ear headphones\",\"wireless\":true,\"connection\":\"Bluetooth\",\"price\":64.99,\"stock\":17,\"free-shipping\":false,\"colors\":[\"black\",\"white\"]}]" -``` - -This example filters the inventory for the names of items that support Bluetooth connections: - -```sh -127.0.0.1:6379> JSON.GET store '$.inventory.*[?(@.connection=="Bluetooth")].name' -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\",\"Wireless keyboard\"]" -``` - -This example, starting with version v2.4.2, filters only keyboards with some sort of USB connection using regex match. Notice this match is case-insensitive thanks to the prefix `(?i)` in the regular expression pattern `"(?i)usb"`: - -```sh -127.0.0.1:6379> JSON.GET store '$.inventory.keyboards[?(@.connection =~ "(?i)usb")]' -"[{\"id\":22346,\"name\":\"USB-C keyboard\",\"description\":\"Wired USB-C keyboard\",\"wireless\":false,\"connection\":\"USB-C\",\"price\":29.99,\"stock\":30,\"free-shipping\":false}]" -``` -The regular expression pattern can also be specified using a path of a string value on the right side. - -For example, let's add each keybaord object with a string value named `regex_pat`: - -```sh -127.0.0.1:6379> JSON.SET store '$.inventory.keyboards[0].regex_pat' '"(?i)bluetooth"' -OK -127.0.0.1:6379> JSON.SET store '$.inventory.keyboards[1].regex' '"usb"' -OK -``` - -Now we can match against the value of `regex_pat` instead of a hard-coded regular expression pattern, and get the keyboard with the `Bluetooth` string in its `connection` key. Notice the one with `USB-C` does not match since its regular expression pattern is case-sensitive and the regular expression pattern is using lowercase: - -```sh -127.0.0.1:6379> JSON.GET store '$.inventory.keyboards[?(@.connection =~ @.regex_pat)]' -"[{\"id\":22345,\"name\":\"Wireless keyboard\",\"description\":\"Wireless Bluetooth keyboard\",\"wireless\":true,\"connection\":\"Bluetooth\",\"price\":44.99,\"stock\":23,\"free-shipping\":false,\"colors\":[\"black\",\"silver\"],\"regex\":\"(?i)Bluetooth\",\"regex_pat\":\"(?i)bluetooth\"}]" -``` - -#### Update JSON examples - -You can also use JSONPath queries when you want to update specific sections of a JSON document. - -For example, you can pass a JSONPath to the `JSON.SET` command to update a specific field. This example changes the price of the first item in the headphones list: - -```sh -127.0.0.1:6379> JSON.GET store $..headphones[0].price -"[99.98]" -127.0.0.1:6379> JSON.SET store $..headphones[0].price 78.99 -"OK" -127.0.0.1:6379> JSON.GET store $..headphones[0].price -"[78.99]" -``` - -You can use filter expressions to update only JSON elements that match certain conditions. The following example changes `free-shipping` to `true` for any items with a price greater than 49: - -```sh -127.0.0.1:6379> JSON.SET store $.inventory.*[?(@.price>49)].free-shipping true -"OK" -127.0.0.1:6379> JSON.GET store $.inventory.*[?(@.free-shipping==true)].name -"[\"Noise-cancelling Bluetooth headphones\",\"Wireless earbuds\"]" -``` - -JSONPath queries also work with other JSON commands that accept a path as an argument. For example, you can add a new color option for a set of headphones with `JSON.ARRAPPEND`: - -```sh -127.0.0.1:6379> JSON.GET store $..headphones[0].colors -"[[\"black\",\"silver\"]]" -127.0.0.1:6379> JSON.ARRAPPEND store $..headphones[0].colors '"pink"' -1) "3" -127.0.0.1:6379> JSON.GET store $..headphones[0].colors -"[[\"black\",\"silver\",\"pink\"]]" -``` - -## Legacy path syntax - -RedisJSON v1 had the following path implementation. JSON v2 still supports this legacy path in addition to JSONPath. - -Paths always begin at the root of a Redis JSON value. The root is denoted by a period character (`.`). For paths that reference the root's children, it is optional to prefix the path with the root. - -Redis JSON supports both dot notation and bracket notation for object key access. The following paths refer to _headphones_, which is a child of _inventory_ under the root: - -* `.inventory.headphones` -* `inventory["headphones"]` -* `['inventory']["headphones"]` - -To access an array element, enclose its index within a pair of square brackets. The index is 0-based, with 0 being the first element of the array, 1 being the next element, and so on. You can use negative offsets to access elements starting from the end of the array. For example, -1 is the last element in the array, -2 is the second to last element, and so on. - -### JSON key names and path compatibility - -By definition, a JSON key can be any valid JSON string. Paths, on the other hand, are traditionally based on JavaScript's (and Java's) variable naming conventions. - -Although JSON can store objects that contain arbitrary key names, you can only use a legacy path to access these keys if they conform to these naming syntax rules: - -1. Names must begin with a letter, a dollar sign (`$`), or an underscore (`_`) character -2. Names can contain letters, digits, dollar signs, and underscores -3. Names are case-sensitive - -## Time complexity of path evaluation - -The time complexity of searching (navigating to) an element in the path is calculated from: - -1. Child level - every level along the path adds an additional search -2. Key search - O(N), where N is the number of keys in the parent object -3. Array search - O(1) - -This means that the overall time complexity of searching a path is _O(N*M)_, where N is the depth and M is the number of parent object keys. - - While this is acceptable for objects where N is small, access can be optimized for larger objects. diff --git a/docs/docs/performance/_index.md b/docs/docs/performance/_index.md deleted file mode 100644 index d78e25350..000000000 --- a/docs/docs/performance/_index.md +++ /dev/null @@ -1,291 +0,0 @@ ---- -title: "Performance" -linkTitle: "Performance" -weight: 5 -description: > - Performance benchmarks -aliases: - - /docs/stack/json/performance ---- - -To get an early sense of what Redis JSON is capable of, you can test it with `redis-benchmark` just like -any other Redis command. However, in order to have more control over the tests, we'll use a -a tool written in Go called _ReJSONBenchmark_ that we expect to release in the near future. - -The following figures were obtained from an AWS EC2 c4.8xlarge instance that ran both the Redis -server as well the as the benchmarking tool. Connections to the server are via the networking stack. -All tests are non-pipelined. - -> NOTE: The results below are measured using the preview version of Redis JSON, which is still very much unoptimized. - -## Redis JSON baseline - -### A smallish object - -We test a JSON value that, while purely synthetic, is interesting. The test subject is -[/tests/files/pass-100.json](https://github.com/RedisLabsModules/redisjson/blob/master/tests/files/pass-100.json), -who weighs in at 380 bytes and is nested. We first test SETting it, then GETting it using several -different paths: - -![ReJSONBenchmark pass-100.json](images/bench_pass_100.png) - -![ReJSONBenchmark pass-100.json percentiles](images/bench_pass_100_p.png) - -### A bigger array - -Moving on to bigger values, we use the 1.4 kB array in -[/tests/files/pass-jsonsl-1.json](https://github.com/RedisLabsModules/redisjson/blob/master/tests/files/pass-jsonsl-1.json): - - -![ReJSONBenchmark pass-jsonsl-1.json](images/bench_pass_jsonsl_1.png) - -![ReJSONBenchmark pass-jsonsl-1.json percentiles](images/bench_pass_jsonsl_1_p.png) - -### A largish object - -More of the same to wrap up, now we'll take on a behemoth of no less than 3.5 kB as given by -[/tests/files/pass-json-parser-0000.json](https://github.com/RedisLabsModules/redisjson/blob/master/tests/files/pass-json-parser-0000.json): - -![ReJSONBenchmark pass-json-parser-0000.json](images/bench_pass_json_parser_0000.png) - -![ReJSONBenchmark pass-json-parser-0000.json percentiles](images/bench_pass_json_parser_0000_p.png) - -### Number operations - -Last but not least, some adding and multiplying: - -![ReJSONBenchmark number operations](images/bench_numbers.png) - -![ReJSONBenchmark number operations percentiles](images/bench_numbers_p.png) - -### Baseline - -To establish a baseline, we'll use the Redis `PING` command. -First, lets see what `redis-benchmark` reports: - -``` -~$ redis/src/redis-benchmark -n 1000000 ping -====== ping ====== - 1000000 requests completed in 7.11 seconds - 50 parallel clients - 3 bytes payload - keep alive: 1 - -99.99% <= 1 milliseconds -100.00% <= 1 milliseconds -140587.66 requests per second -``` - -ReJSONBenchmark's concurrency is configurable, so we'll test a few settings to find a good one. Here -are the results, which indicate that 16 workers yield the best throughput: - -![ReJSONBenchmark PING](images/bench_ping.png) - -![ReJSONBenchmark PING percentiles](images/bench_ping_p.png) - -Note how our benchmarking tool does slightly worse in PINGing - producing only 116K ops, compared to -`redis-cli`'s 140K. - -### The empty string - -Another JSON benchmark is that of setting and getting an empty string - a value that's only two -bytes long (i.e. `""`). Granted, that's not very useful, but it teaches us something about the basic -performance of the module: - -![ReJSONBenchmark empty string](images/bench_empty_string.png) - -![ReJSONBenchmark empty string percentiles](images/bench_empty_string_p.png) - -## Comparison vs. server-side Lua scripting - -We compare Redis Stack's JSON performance with Redis' embedded Lua engine. For this purpose, we use the Lua -scripts at [/benchmarks/lua](https://github.com/RedisLabsModules/redisjson/tree/master/benchmarks/lua). -These scripts provide JSON's GET and SET functionality on values stored in JSON or MessagePack -formats. Each of the different operations (set root, get root, set path and get path) is executed -with each "engine" on objects of varying sizes. - -### Setting and getting the root - -Storing raw JSON performs best in this test, but that isn't really surprising as all it does is -serve unprocessed strings. While you can and should use Redis for caching opaque data, and JSON -"blobs" are just one example, this does not allow any updates other than these of the entire value. - -A more meaningful comparison therefore is between JSON and the MessagePack variant, since both -process the incoming JSON value before actually storing it. While the rates and latencies of these -two behave in a very similar way, the absolute measurements suggest that Redis JSON's performance may be -further improved. - -![VS. Lua set root](images/bench_lua_set_root.png) - -![VS. Lua set root latency](images/bench_lua_set_root_l.png) - -![VS. Lua get root](images/bench_lua_get_root.png) - -![VS. Lua get root latency](images/bench_lua_get_root_l.png) - -### Setting and getting parts of objects - -This test shows why Redis JSON exists. Not only does it outperform the Lua variants, it retains constant -rates and latencies regardless the object's overall size. There's no magic here - JSON keeps the -value deserialized so that accessing parts of it is a relatively inexpensive operation. In deep contrast -are both raw JSON as well as MessagePack, which require decoding the entire object before anything can -be done with it (a process that becomes more expensive the larger the object is). - -![VS. Lua set path to scalar](images/bench_lua_set_path.png) - -![VS. Lua set path to scalar latency](images/bench_lua_set_path_l.png) - -![VS. Lua get scalar from path](images/bench_lua_get_path.png) - -![VS. Lua get scalar from path latency](images/bench_lua_get_path_l.png) - -### Even more charts - -These charts are more of the same but independent for each file (value): - -![VS. Lua pass-100.json rate](images/bench_lua_pass_100.png) - -![VS. Lua pass-100.json average latency](images/bench_lua_pass_100_l.png) - -![VS. Lua pass-jsonsl-1.json rate](images/bench_lua_pass_jsonsl_1.png) - -![VS. Lua pass-jsonsl-1.json average latency](images/bench_lua_pass_jsonsl_1_l.png) - -![VS. Lua pass-json-parser-0000.json rate](images/bench_lua_pass_json_parser_0000.png) - -![VS. Lua pass-json-parser-0000.json latency](images/bench_lua_pass_json_parser_0000_l.png) - -![VS. Lua pass-jsonsl-yahoo2.json rate](images/bench_lua_pass_jsonsl_yahoo2.png) - -![VS. Lua pass-jsonsl-yahoo2.json latency](images/bench_lua_pass_jsonsl_yahoo2_l.png) - -![VS. Lua pass-jsonsl-yelp.json rate](images/bench_lua_pass_jsonsl_yelp.png) - -![VS. Lua pass-jsonsl-yelp.json latency](images/bench_lua_pass_jsonsl_yelp_l.png) - -## Raw results - -The following are the raw results from the benchmark in CSV format. - -### JSON results - -``` -title,concurrency,rate,average latency,50.00%-tile,90.00%-tile,95.00%-tile,99.00%-tile,99.50%-tile,100.00%-tile -[ping],1,22128.12,0.04,0.04,0.04,0.05,0.05,0.05,1.83 -[ping],2,54641.13,0.04,0.03,0.05,0.05,0.06,0.07,2.14 -[ping],4,76000.18,0.05,0.05,0.07,0.07,0.09,0.10,2.10 -[ping],8,106750.99,0.07,0.07,0.10,0.11,0.14,0.16,2.99 -[ping],12,111297.33,0.11,0.10,0.15,0.16,0.20,0.22,6.81 -[ping],16,116292.19,0.14,0.13,0.19,0.21,0.27,0.33,7.50 -[ping],20,110622.82,0.18,0.17,0.24,0.27,0.38,0.47,12.21 -[ping],24,107468.51,0.22,0.20,0.31,0.38,0.58,0.71,13.86 -[ping],28,102827.35,0.27,0.25,0.38,0.44,0.66,0.79,12.87 -[ping],32,105733.51,0.30,0.28,0.42,0.50,0.79,0.97,10.56 -[ping],36,102046.43,0.35,0.33,0.48,0.56,0.90,1.13,14.66 -JSON.SET {key} . {empty string size: 2 B},16,80276.63,0.20,0.18,0.28,0.32,0.41,0.45,6.48 -JSON.GET {key} .,16,92191.23,0.17,0.16,0.24,0.27,0.34,0.38,9.80 -JSON.SET {key} . {pass-100.json size: 380 B},16,41512.77,0.38,0.35,0.50,0.62,0.81,0.86,9.56 -JSON.GET {key} .,16,48374.10,0.33,0.29,0.47,0.56,0.72,0.79,9.36 -JSON.GET {key} sclr,16,94801.23,0.17,0.15,0.24,0.27,0.35,0.39,13.21 -JSON.SET {key} sclr 1,16,82032.08,0.19,0.18,0.27,0.31,0.40,0.44,8.97 -JSON.GET {key} sub_doc,16,81633.51,0.19,0.18,0.27,0.32,0.43,0.49,9.88 -JSON.GET {key} sub_doc.sclr,16,95052.35,0.17,0.15,0.24,0.27,0.35,0.39,7.39 -JSON.GET {key} array_of_docs,16,68223.05,0.23,0.22,0.29,0.31,0.44,0.50,8.84 -JSON.GET {key} array_of_docs[1],16,76390.57,0.21,0.19,0.30,0.34,0.44,0.49,9.99 -JSON.GET {key} array_of_docs[1].sclr,16,90202.13,0.18,0.16,0.25,0.29,0.36,0.39,7.87 -JSON.SET {key} . {pass-jsonsl-1.json size: 1.4 kB},16,16117.11,0.99,0.91,1.22,1.55,2.17,2.35,9.27 -JSON.GET {key} .,16,15193.51,1.05,0.94,1.41,1.75,2.33,2.42,7.19 -JSON.GET {key} [0],16,78198.90,0.20,0.19,0.29,0.33,0.42,0.47,10.87 -"JSON.SET {key} [0] ""foo""",16,80156.90,0.20,0.18,0.28,0.32,0.40,0.44,12.03 -JSON.GET {key} [7],16,99013.98,0.16,0.15,0.23,0.26,0.34,0.38,7.67 -JSON.GET {key} [8].zero,16,90562.19,0.17,0.16,0.25,0.28,0.35,0.38,7.03 -JSON.SET {key} . {pass-json-parser-0000.json size: 3.5 kB},16,14239.25,1.12,1.06,1.21,1.48,2.35,2.59,11.91 -JSON.GET {key} .,16,8366.31,1.91,1.86,2.00,2.04,2.92,3.51,12.92 -"JSON.GET {key} [""web-app""].servlet",16,9339.90,1.71,1.68,1.74,1.78,2.68,3.26,10.47 -"JSON.GET {key} [""web-app""].servlet[0]",16,13374.88,1.19,1.07,1.54,1.95,2.69,2.82,12.15 -"JSON.GET {key} [""web-app""].servlet[0][""servlet-name""]",16,81267.36,0.20,0.18,0.28,0.31,0.38,0.42,9.67 -"JSON.SET {key} [""web-app""].servlet[0][""servlet-name""] ""bar""",16,79955.04,0.20,0.18,0.27,0.33,0.42,0.46,6.72 -JSON.SET {key} . {pass-jsonsl.yahoo2-json size: 18 kB},16,3394.07,4.71,4.62,4.72,4.79,7.35,9.03,17.78 -JSON.GET {key} .,16,891.46,17.92,17.33,17.56,20.12,31.77,42.87,66.64 -JSON.SET {key} ResultSet.totalResultsAvailable 1,16,75513.03,0.21,0.19,0.30,0.34,0.42,0.46,9.21 -JSON.GET {key} ResultSet.totalResultsAvailable,16,91202.84,0.17,0.16,0.24,0.28,0.35,0.38,5.30 -JSON.SET {key} . {pass-jsonsl-yelp.json size: 40 kB},16,1624.86,9.84,9.67,9.86,9.94,15.86,19.36,31.94 -JSON.GET {key} .,16,442.55,36.08,35.62,37.78,38.14,55.23,81.33,88.40 -JSON.SET {key} message.code 1,16,77677.25,0.20,0.19,0.28,0.33,0.42,0.45,11.07 -JSON.GET {key} message.code,16,89206.61,0.18,0.16,0.25,0.28,0.36,0.39,8.60 -[JSON.SET num . 0],16,84498.21,0.19,0.17,0.26,0.30,0.39,0.43,8.08 -[JSON.NUMINCRBY num . 1],16,78640.20,0.20,0.18,0.28,0.33,0.44,0.48,11.05 -[JSON.NUMMULTBY num . 2],16,77170.85,0.21,0.19,0.28,0.33,0.43,0.47,6.85 -``` - -### Lua using cjson - -``` -json-set-root.lua empty string,16,86817.84,0.18,0.17,0.26,0.31,0.39,0.42,9.36 -json-get-root.lua,16,90795.08,0.17,0.16,0.25,0.28,0.36,0.39,8.75 -json-set-root.lua pass-100.json,16,84190.26,0.19,0.17,0.27,0.30,0.38,0.41,12.00 -json-get-root.lua,16,87170.45,0.18,0.17,0.26,0.29,0.38,0.45,9.81 -json-get-path.lua sclr,16,54556.80,0.29,0.28,0.35,0.38,0.57,0.64,7.53 -json-set-path.lua sclr 1,16,35907.30,0.44,0.42,0.53,0.67,0.93,1.00,8.57 -json-get-path.lua sub_doc,16,51158.84,0.31,0.30,0.36,0.39,0.50,0.62,7.22 -json-get-path.lua sub_doc sclr,16,51054.47,0.31,0.29,0.39,0.47,0.66,0.74,7.43 -json-get-path.lua array_of_docs,16,39103.77,0.41,0.37,0.57,0.68,0.87,0.94,8.02 -json-get-path.lua array_of_docs 1,16,45811.31,0.35,0.32,0.45,0.56,0.77,0.83,8.17 -json-get-path.lua array_of_docs 1 sclr,16,47346.83,0.34,0.31,0.44,0.54,0.72,0.79,8.07 -json-set-root.lua pass-jsonsl-1.json,16,82100.90,0.19,0.18,0.28,0.31,0.39,0.43,12.43 -json-get-root.lua,16,77922.14,0.20,0.18,0.30,0.34,0.66,0.86,8.71 -json-get-path.lua 0,16,38162.83,0.42,0.40,0.49,0.59,0.88,0.96,6.16 -"json-set-path.lua 0 ""foo""",16,21205.52,0.75,0.70,0.84,1.07,1.60,1.74,5.77 -json-get-path.lua 7,16,37254.89,0.43,0.39,0.55,0.69,0.92,0.98,10.24 -json-get-path.lua 8 zero,16,33772.43,0.47,0.43,0.63,0.77,1.01,1.09,7.89 -json-set-root.lua pass-json-parser-0000.json,16,76314.18,0.21,0.19,0.29,0.33,0.41,0.44,8.16 -json-get-root.lua,16,65177.87,0.24,0.21,0.35,0.42,0.89,1.01,9.02 -json-get-path.lua web-app servlet,16,15938.62,1.00,0.88,1.45,1.71,2.11,2.20,8.07 -json-get-path.lua web-app servlet 0,16,19469.27,0.82,0.78,0.90,1.07,1.67,1.84,7.59 -json-get-path.lua web-app servlet 0 servlet-name,16,24694.26,0.65,0.63,0.71,0.74,1.07,1.31,8.60 -"json-set-path.lua web-app servlet 0 servlet-name ""bar""",16,16555.74,0.96,0.92,1.05,1.25,1.98,2.20,9.08 -json-set-root.lua pass-jsonsl-yahoo2.json,16,47544.65,0.33,0.31,0.41,0.47,0.59,0.64,10.52 -json-get-root.lua,16,25369.92,0.63,0.57,0.91,1.05,1.37,1.56,9.95 -json-set-path.lua ResultSet totalResultsAvailable 1,16,5077.32,3.15,3.09,3.20,3.24,5.12,6.26,14.98 -json-get-path.lua ResultSet totalResultsAvailable,16,7652.56,2.09,2.05,2.13,2.17,3.23,3.95,9.65 -json-set-root.lua pass-jsonsl-yelp.json,16,29575.20,0.54,0.52,0.64,0.75,0.94,1.00,12.66 -json-get-root.lua,16,18424.29,0.87,0.84,1.25,1.40,1.82,1.95,7.35 -json-set-path.lua message code 1,16,2251.07,7.10,6.98,7.14,7.22,11.00,12.79,21.14 -json-get-path.lua message code,16,3380.72,4.73,4.44,5.03,6.82,10.28,11.06,14.93 -``` - -### Lua using cmsgpack - -``` -msgpack-set-root.lua empty string,16,82592.66,0.19,0.18,0.27,0.31,0.38,0.42,10.18 -msgpack-get-root.lua,16,89561.41,0.18,0.16,0.25,0.29,0.37,0.40,9.52 -msgpack-set-root.lua pass-100.json,16,44326.47,0.36,0.34,0.43,0.54,0.78,0.86,6.45 -msgpack-get-root.lua,16,41036.58,0.39,0.36,0.51,0.62,0.84,0.91,7.21 -msgpack-get-path.lua sclr,16,55845.56,0.28,0.26,0.36,0.44,0.64,0.70,11.29 -msgpack-set-path.lua sclr 1,16,43608.26,0.37,0.34,0.47,0.58,0.78,0.85,10.27 -msgpack-get-path.lua sub_doc,16,50153.07,0.32,0.29,0.41,0.50,0.69,0.75,8.56 -msgpack-get-path.lua sub_doc sclr,16,54016.35,0.29,0.27,0.38,0.46,0.62,0.67,6.38 -msgpack-get-path.lua array_of_docs,16,45394.79,0.35,0.32,0.45,0.56,0.78,0.85,11.88 -msgpack-get-path.lua array_of_docs 1,16,48336.48,0.33,0.30,0.42,0.52,0.71,0.76,7.69 -msgpack-get-path.lua array_of_docs 1 sclr,16,53689.41,0.30,0.27,0.38,0.46,0.64,0.69,11.16 -msgpack-set-root.lua pass-jsonsl-1.json,16,28956.94,0.55,0.51,0.65,0.82,1.17,1.26,8.39 -msgpack-get-root.lua,16,26045.44,0.61,0.58,0.68,0.83,1.28,1.42,8.56 -"msgpack-set-path.lua 0 ""foo""",16,29813.56,0.53,0.49,0.67,0.83,1.15,1.22,6.82 -msgpack-get-path.lua 0,16,44827.58,0.36,0.32,0.48,0.58,0.76,0.81,9.19 -msgpack-get-path.lua 7,16,47529.14,0.33,0.31,0.42,0.53,0.73,0.79,7.47 -msgpack-get-path.lua 8 zero,16,44442.72,0.36,0.33,0.45,0.56,0.77,0.85,8.11 -msgpack-set-root.lua pass-json-parser-0000.json,16,19585.82,0.81,0.78,0.85,1.05,1.66,1.86,4.33 -msgpack-get-root.lua,16,19014.08,0.84,0.73,1.23,1.45,1.76,1.84,13.52 -msgpack-get-path.lua web-app servlet,16,18992.61,0.84,0.73,1.23,1.45,1.75,1.82,8.19 -msgpack-get-path.lua web-app servlet 0,16,24328.78,0.66,0.64,0.73,0.77,1.15,1.34,8.81 -msgpack-get-path.lua web-app servlet 0 servlet-name,16,31012.81,0.51,0.49,0.57,0.65,1.02,1.13,8.11 -"msgpack-set-path.lua web-app servlet 0 servlet-name ""bar""",16,20388.54,0.78,0.73,0.88,1.08,1.63,1.78,7.22 -msgpack-set-root.lua pass-jsonsl-yahoo2.json,16,5597.60,2.85,2.81,2.89,2.94,4.57,5.59,10.19 -msgpack-get-root.lua,16,6585.01,2.43,2.39,2.52,2.66,3.76,4.80,10.59 -msgpack-set-path.lua ResultSet totalResultsAvailable 1,16,6666.95,2.40,2.35,2.43,2.47,3.78,4.59,12.08 -msgpack-get-path.lua ResultSet totalResultsAvailable,16,10733.03,1.49,1.45,1.60,1.66,2.36,2.93,13.15 -msgpack-set-root-lua pass-jsonsl-yelp.json,16,2291.53,6.97,6.87,7.01,7.12,10.54,12.89,21.75 -msgpack-get-root.lua,16,2889.59,5.53,5.45,5.71,5.86,8.80,10.48,25.55 -msgpack-set-path.lua message code 1,16,2847.85,5.61,5.44,5.56,6.01,10.58,11.90,16.91 -msgpack-get-path.lua message code,16,5030.95,3.18,3.07,3.24,3.57,6.08,6.92,12.44 -``` diff --git a/docs/docs/ram.md b/docs/docs/ram.md deleted file mode 100644 index ffc9f32ba..000000000 --- a/docs/docs/ram.md +++ /dev/null @@ -1,107 +0,0 @@ ---- -title: "Redis JSON RAM Usage" -linkTitle: "Memory Usage" -weight: 6 -description: > - Debugging memory consumption -aliases: - - /docs/stack/json/ram ---- - -Every key in Redis takes memory and requires at least the amount of RAM to store the key name, as -well as some per-key overhead that Redis uses. On top of that, the value in the key also requires -RAM. - -Redis JSON stores JSON values as binary data after deserializing them. This representation is often more -expensive, size-wize, than the serialized form. The JSON data type uses at least 24 bytes (on -64-bit architectures) for every value, as can be seen by sampling an empty string with the -`JSON.DEBUG MEMORY` command: - -``` -127.0.0.1:6379> JSON.SET emptystring . '""' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY emptystring -(integer) 24 -``` - -This RAM requirement is the same for all scalar values, but strings require additional space -depending on their actual length. For example, a 3-character string will use 3 additional bytes: - -``` -127.0.0.1:6379> JSON.SET foo . '"bar"' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY foo -(integer) 27 -``` - -Empty containers take up 32 bytes to set up: - -``` -127.0.0.1:6379> JSON.SET arr . '[]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 32 -127.0.0.1:6379> JSON.SET obj . '{}' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY obj -(integer) 32 -``` - -The actual size of a container is the sum of sizes of all items in it on top of its own -overhead. To avoid expensive memory reallocations, containers' capacity is scaled by multiples of 2 -until a treshold size is reached, from which they grow by fixed chunks. - -A container with a single scalar is made up of 32 and 24 bytes, respectively: -``` -127.0.0.1:6379> JSON.SET arr . '[""]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 56 -``` - -A container with two scalars requires 40 bytes for the container (each pointer to an entry in the -container is 8 bytes), and 2 * 24 bytes for the values themselves: -``` -127.0.0.1:6379> JSON.SET arr . '["", ""]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 88 -``` - -A 3-item (each 24 bytes) container will be allocated with capacity for 4 items, i.e. 56 bytes: - -``` -127.0.0.1:6379> JSON.SET arr . '["", "", ""]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 128 -``` - -The next item will not require an allocation in the container, so usage will increase only by that -scalar's requirement, but another value will scale the container again: - -``` -127.0.0.1:6379> JSON.SET arr . '["", "", "", ""]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 152 -127.0.0.1:6379> JSON.SET arr . '["", "", "", "", ""]' -OK -127.0.0.1:6379> JSON.DEBUG MEMORY arr -(integer) 208 -``` - -This table gives the size (in bytes) of a few of the test files on disk and when stored using -JSON. The _MessagePack_ column is for reference purposes and reflects the length of the value -when stored using MessagePack. - -| File | Filesize | Redis JSON | MessagePack | -| -------------------------------------- | --------- | ------ | ----------- | -| /tests/files/pass-100.json | 380 | 1079 | 140 | -| /tests/files/pass-jsonsl-1.json | 1441 | 3666 | 753 | -| /tests/files/pass-json-parser-0000.json | 3468 | 7209 | 2393 | -| /tests/files/pass-jsonsl-yahoo2.json | 18446 | 37469 | 16869 | -| /tests/files/pass-jsonsl-yelp.json | 39491 | 75341 | 35469 | - -> Note: In the current version, deleting values from containers **does not** free the container's -allocated memory. diff --git a/docs/docs/use-share-string-in-rdb.md b/docs/docs/use-share-string-in-rdb.md new file mode 100644 index 000000000..2b846c799 --- /dev/null +++ b/docs/docs/use-share-string-in-rdb.md @@ -0,0 +1,65 @@ +# Design Document: Shared Strings in RDB + +## Overview + +This document proposes extending the use of the existing global cache for shared strings, implemented with a hash table, to saving/loading RDB files. The goal is to optimize memory usage and improve performance by leveraging the global cache for string deduplication during RDB operations. Additionally, it outlines enhancements for replication processes using a local cache. + +## Current Approach + +- **Global Cache for Keyspace**: + - A global hash table is already used to store shared strings across all keys in the keyspace. + - This approach reduces memory usage and ensures consistency across the keyspace. + - However, it is not currently utilized during RDB save/load or replication operations. + +## Proposed Approach + +### Extend Global Cache to RDB + +- Use the existing global hash table for shared strings during RDB save/load operations. +- **RDB Save**: + - First, store the cache, then the keys. + - Requires access to the cache table and its properties outside of `ijson`. +- **RDB Load**: + - First, load the cache, then the keys. + - Requires exposing an API to load the cache externally. + +### Extend Local Cache to Replication + +- Introduce a local cache for each key during the full sync process. +- **Full Sync Process (Slave)**: + - Read the local cache and remove it after loading the key. +- Support replication with and without the local cache using a feature flag. + +### Requirements + +- Should not affect `JSONHDT`, meaning the implementation must be confined to `ijson`. +- Must work with `flex`. +- Should include the ability to disable this feature. + +## Implementation Details + +1. **Reuse Existing Hash Table**: + - Leverage the current global hash table for shared strings. + - Ensure compatibility with RDB save/load workflows. + - Allow saving/loading the cache directly from the module. + +2. **Prepare Keys for Replication**: + - Support replication with and without the local cache. + +3. **Update RDB Version**: + - Modify the RDB version to accommodate the new format. + +4. **RDB Format**: + - Include the following in the RDB format: + - Cache size. + - Cache key-value pairs. + - JSON keys/key. + +5. **Cache Usage During Load**: + - Use the existing mechanism to store the global cache during load. + - Drop the cache after use. + +## Trade-offs + +- **Concurrency**: + - Requires synchronization mechanisms to handle concurrent access to the global cache during RDB operations. It is unclear if this is relevant to `flex`. diff --git a/docs/docs/use_cases.md b/docs/docs/use_cases.md deleted file mode 100644 index d935a1df0..000000000 --- a/docs/docs/use_cases.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -title: "Use cases" -linkTitle: "Use cases" -weight: 4 -description: > - JSON use cases ---- - -You can of course use Redis native data structures to store JSON objects, and that's a common practice. For example, you can serialize JSON and save it in a Redis String. - -However, Redis JSON provides several benefits over this approach. - -**Access and retrieval of subvalues** - -With JSON, you can get nested values without having to transmit the entire object over the network. Being able to access sub-objects can lead to greater efficiencies when you're storing large JSON objects in Redis. - -**Atomic partial updates** - -JSON allows you to atomically run operations like incrementing a value, adding, or removing elements from an array, append strings, and so on. To do the same with a serialized object, you have to retrieve and then reserialize the entire object, which can be expensive and also lack atomicity. - -**Indexing and querying** - -When you store JSON objects as Redis strings, there's no good way to query those objects. On the other hand, storing these objects as JSON using Redis Stack lets you index and query them. This is provided by the search and query capabilities of Redis Stack. \ No newline at end of file diff --git a/json_path/Cargo.toml b/json_path/Cargo.toml index 41558f32e..ba7d0853c 100644 --- a/json_path/Cargo.toml +++ b/json_path/Cargo.toml @@ -14,9 +14,10 @@ serde.workspace = true ijson.workspace = true log = "0.4" regex = "1" +itertools = "0.13" [dev-dependencies] -env_logger = "0.10.0" +env_logger = "0.10" # do not change this version without running a full ci cycle [[bin]] name = "jsonpath" diff --git a/json_path/src/json_node.rs b/json_path/src/json_node.rs index 6df0d7dec..1fdca0d89 100644 --- a/json_path/src/json_node.rs +++ b/json_path/src/json_node.rs @@ -1,12 +1,15 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ /// Use `SelectValue` use crate::select_value::{SelectValue, SelectValueType}; -use ijson::{IValue, ValueType}; +use ijson::{DestructuredRef, IString, IValue, ValueType}; use serde_json::Value; impl SelectValue for Value { @@ -17,15 +20,9 @@ impl SelectValue for Value { Self::Null => SelectValueType::Null, Self::Array(_) => SelectValueType::Array, Self::Object(_) => SelectValueType::Object, - Self::Number(n) => { - if n.is_i64() || n.is_u64() { - SelectValueType::Long - } else if n.is_f64() { - SelectValueType::Double - } else { - panic!("bad type for Number value"); - } - } + Self::Number(n) if n.is_i64() => SelectValueType::Long, + Self::Number(n) if n.is_f64() | n.is_u64() => SelectValueType::Double, + _ => panic!("bad type for Number value"), } } @@ -92,60 +89,46 @@ impl SelectValue for Value { matches!(self, Self::Array(_)) } + fn is_double(&self) -> Option { + match self { + Self::Number(num) => Some(num.is_f64()), + _ => None, + } + } + fn get_str(&self) -> String { match self { Self::String(s) => s.to_string(), - _ => { - panic!("not a string"); - } + _ => panic!("not a string"), } } fn as_str(&self) -> &str { match self { Self::String(s) => s.as_str(), - _ => { - panic!("not a string"); - } + _ => panic!("not a string"), } } fn get_bool(&self) -> bool { match self { Self::Bool(b) => *b, - _ => { - panic!("not a bool"); - } + _ => panic!("not a bool"), } } fn get_long(&self) -> i64 { match self { - Self::Number(n) => { - if let Some(n) = n.as_i64() { - n - } else { - panic!("not a long"); - } - } - _ => { - panic!("not a long"); - } + Self::Number(n) if n.is_i64() => n.as_i64().unwrap(), + _ => panic!("not a long"), } } fn get_double(&self) -> f64 { match self { - Self::Number(n) => { - if n.is_f64() { - n.as_f64().unwrap() - } else { - panic!("not a double"); - } - } - _ => { - panic!("not a double"); - } + Self::Number(n) if n.is_f64() => n.as_f64().unwrap(), + Self::Number(n) if n.is_u64() => n.as_u64().unwrap() as _, + _ => panic!("not a double"), } } } @@ -160,7 +143,7 @@ impl SelectValue for IValue { ValueType::Object => SelectValueType::Object, ValueType::Number => { let num = self.as_number().unwrap(); - if num.has_decimal_point() { + if num.has_decimal_point() | num.to_i64().is_none() { SelectValueType::Double } else { SelectValueType::Long @@ -174,32 +157,33 @@ impl SelectValue for IValue { } fn values<'a>(&'a self) -> Option + 'a>> { - if let Some(arr) = self.as_array() { - Some(Box::new(arr.iter())) - } else if let Some(o) = self.as_object() { - Some(Box::new(o.values())) - } else { - None + match self.destructure_ref() { + DestructuredRef::Array(arr) => Some(Box::new(arr.iter())), + DestructuredRef::Object(o) => Some(Box::new(o.values())), + _ => None, } } fn keys<'a>(&'a self) -> Option + 'a>> { - self.as_object() - .map_or(None, |o| Some(Box::new(o.keys().map(|k| &k[..])))) + match self.destructure_ref() { + DestructuredRef::Object(o) => Some(Box::new(o.keys().map(IString::as_str))), + _ => None, + } } fn items<'a>(&'a self) -> Option + 'a>> { - match self.as_object() { - Some(o) => Some(Box::new(o.iter().map(|(k, v)| (&k[..], v)))), + match self.destructure_ref() { + DestructuredRef::Object(o) => Some(Box::new(o.iter().map(|(k, v)| (k.as_str(), v)))), _ => None, } } fn len(&self) -> Option { - self.as_array().map_or_else( - || self.as_object().map(ijson::IObject::len), - |arr| Some(arr.len()), - ) + match self.destructure_ref() { + DestructuredRef::Array(arr) => Some(arr.len()), + DestructuredRef::Object(o) => Some(o.len()), + _ => None, + } } fn is_empty(&self) -> Option { @@ -218,6 +202,10 @@ impl SelectValue for IValue { self.is_array() } + fn is_double(&self) -> Option { + Some(self.as_number()?.has_decimal_point()) + } + fn get_str(&self) -> String { self.as_string().expect("not a string").to_string() } @@ -231,20 +219,13 @@ impl SelectValue for IValue { } fn get_long(&self) -> i64 { - let n = self.as_number().expect("not a number"); - if n.has_decimal_point() { - panic!("not a long"); - } else { - n.to_i64().unwrap() - } + self.as_number() + .expect("not a number") + .to_i64() + .expect("not a long") } fn get_double(&self) -> f64 { - let n = self.as_number().expect("not a number"); - if n.has_decimal_point() { - n.to_f64().unwrap() - } else { - panic!("not a double"); - } + self.as_number().expect("not a number").to_f64_lossy() } } diff --git a/json_path/src/json_path.rs b/json_path/src/json_path.rs index 05f7dd35b..f869af115 100644 --- a/json_path/src/json_path.rs +++ b/json_path/src/json_path.rs @@ -1,9 +1,13 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ +use itertools::Itertools; use pest::iterators::{Pair, Pairs}; use pest::Parser; use pest_derive::Parser; @@ -161,7 +165,7 @@ pub(crate) fn compile(path: &str) -> Result { positives .iter() .map(|v| format!("{v}")) - .collect::>() + .collect_vec() .join(", "), ) }; @@ -172,7 +176,7 @@ pub(crate) fn compile(path: &str) -> Result { negatives .iter() .map(|v| format!("{v}")) - .collect::>() + .collect_vec() .join(", "), ) }; @@ -488,6 +492,48 @@ struct PathCalculatorData<'i, S: SelectValue, UPT: UserPathTracker> { root: &'i S, } +// The following block of code is used to create a unified iterator for arrays and objects. +// This can be used in places where we need to iterate over both arrays and objects, create a path tracker from them. +enum Item<'a, S: SelectValue> { + ArrayItem(usize, &'a S), + ObjectItem(&'a str, &'a S), +} + +impl<'a, S: SelectValue> Item<'a, S> { + fn value(&self) -> &'a S { + match self { + Item::ArrayItem(_, v) => v, + Item::ObjectItem(_, v) => v, + } + } + + fn create_tracker<'i, 'j>(&self, parent: &'j PathTracker<'i, 'j>) -> PathTracker<'i, 'j> + where + 'a: 'i, + { + match self { + Item::ArrayItem(index, _) => create_index_tracker(*index, parent), + Item::ObjectItem(key, _) => create_str_tracker(key, parent), + } + } +} + +enum UnifiedIter<'a, S: SelectValue> { + Array(std::iter::Enumerate + 'a>>), + Object(Box + 'a>), +} + +impl<'a, S: SelectValue> Iterator for UnifiedIter<'a, S> { + type Item = Item<'a, S>; + + fn next(&mut self) -> Option { + match self { + UnifiedIter::Array(iter) => iter.next().map(|(i, v)| Item::ArrayItem(i, v)), + UnifiedIter::Object(iter) => iter.next().map(|(k, v)| Item::ObjectItem(k, v)), + } + } +} + impl<'i, UPTG: UserPathTrackerGenerator> PathCalculator<'i, UPTG> { #[must_use] pub const fn create(query: &'i Query<'i>) -> PathCalculator<'i, UPTG> { @@ -985,18 +1031,24 @@ impl<'i, UPTG: UserPathTrackerGenerator> PathCalculator<'i, UPTG> { { /* lets expend the array, this is how most json path engines work. * Personally, I think this if should not exists. */ - let values = json.values().unwrap(); + let unified_iter = if json.get_type() == SelectValueType::Object { + UnifiedIter::Object(json.items().unwrap()) + } else { + UnifiedIter::Array(json.values().unwrap().enumerate()) + }; + if let Some(pt) = path_tracker { trace!( "calc_internal type {:?} path_tracker {:?}", json.get_type(), &pt ); - for (i, v) in values.enumerate() { + for item in unified_iter { + let v = item.value(); trace!("calc_internal v {:?}", &v); if self.evaluate_filter(curr.clone().into_inner(), v, calc_data) { - let new_tracker = Some(create_index_tracker(i, &pt)); + let new_tracker = Some(item.create_tracker(&pt)); self.calc_internal( pairs.clone(), v, @@ -1010,7 +1062,8 @@ impl<'i, UPTG: UserPathTrackerGenerator> PathCalculator<'i, UPTG> { "calc_internal type {:?} path_tracker None", json.get_type() ); - for v in values { + for item in unified_iter { + let v = item.value(); trace!("calc_internal v {:?}", &v); if self.evaluate_filter(curr.clone().into_inner(), v, calc_data) { diff --git a/json_path/src/lib.rs b/json_path/src/lib.rs index 6381b98b2..f337fea25 100644 --- a/json_path/src/lib.rs +++ b/json_path/src/lib.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ pub mod json_node; @@ -587,4 +590,30 @@ mod json_path_tests { }, results:[[foo, 0, val], [foo, 1, val]]); } + + #[test] + fn test_query_inside_object_values_indicates_array_path() { + setup(); + verify_json_path!(path:"$.root[?(@.value > 2)]", + json:{ + "root": { + "1": { + "value": 1 + }, + "2": { + "value": 2 + }, + "3": { + "value": 3 + }, + "4": { + "value": 4 + }, + "5": { + "value": 5 + } + } + }, + results:[[root, 3], [root, 4], [root, 5]]); + } } diff --git a/json_path/src/main.rs b/json_path/src/main.rs index e4a347ef6..52b9f1381 100644 --- a/json_path/src/main.rs +++ b/json_path/src/main.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ mod json_node; mod json_path; diff --git a/json_path/src/select_value.rs b/json_path/src/select_value.rs index 3410a6650..d56f7b410 100644 --- a/json_path/src/select_value.rs +++ b/json_path/src/select_value.rs @@ -1,9 +1,12 @@ -/* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). - */ - +/* + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). + */ + use serde::Serialize; use std::fmt::Debug; @@ -29,6 +32,7 @@ pub trait SelectValue: Debug + Eq + PartialEq + Default + Clone + Serialize { fn get_key<'a>(&'a self, key: &str) -> Option<&'a Self>; fn get_index(&self, index: usize) -> Option<&Self>; fn is_array(&self) -> bool; + fn is_double(&self) -> Option; fn get_str(&self) -> String; fn as_str(&self) -> &str; diff --git a/json_path/tests/array_filter.rs b/json_path/tests/array_filter.rs index 87e6bb00b..af2196c4f 100644 --- a/json_path/tests/array_filter.rs +++ b/json_path/tests/array_filter.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ #[macro_use] diff --git a/json_path/tests/common.rs b/json_path/tests/common.rs index 0f8eda868..ac51697d8 100644 --- a/json_path/tests/common.rs +++ b/json_path/tests/common.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ use std::io::Read; diff --git a/json_path/tests/filter.rs b/json_path/tests/filter.rs index 9229715e4..e9296bd0e 100644 --- a/json_path/tests/filter.rs +++ b/json_path/tests/filter.rs @@ -1,9 +1,12 @@ -/* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). - */ - +/* + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). + */ + #[macro_use] extern crate serde_json; diff --git a/json_path/tests/jsonpath_examples.rs b/json_path/tests/jsonpath_examples.rs index 46a44f99a..fd4b24358 100644 --- a/json_path/tests/jsonpath_examples.rs +++ b/json_path/tests/jsonpath_examples.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ #[macro_use] diff --git a/json_path/tests/op.rs b/json_path/tests/op.rs index c10f90956..5809d4b7e 100644 --- a/json_path/tests/op.rs +++ b/json_path/tests/op.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ #[macro_use] diff --git a/json_path/tests/paths.rs b/json_path/tests/paths.rs index 3fb949234..074e78c7a 100644 --- a/json_path/tests/paths.rs +++ b/json_path/tests/paths.rs @@ -1,9 +1,12 @@ -/* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). - */ - +/* + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). + */ + #[macro_use] extern crate serde_json; diff --git a/json_path/tests/return_type.rs b/json_path/tests/return_type.rs index 5f2d60a9e..666b05809 100644 --- a/json_path/tests/return_type.rs +++ b/json_path/tests/return_type.rs @@ -1,9 +1,12 @@ -/* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). - */ - +/* + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). + */ + #[macro_use] extern crate serde_json; diff --git a/licenses/AGPLv3.txt b/licenses/AGPLv3.txt new file mode 100644 index 000000000..c5f57ac3c --- /dev/null +++ b/licenses/AGPLv3.txt @@ -0,0 +1,661 @@ +GNU AFFERO GENERAL PUBLIC LICENSE, Version 3, 19 Nov 2007 +======================================================== + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/licenses/SSPLv1.txt b/licenses/SSPLv1.txt index 5851192fd..ea3921393 100644 --- a/licenses/SSPLv1.txt +++ b/licenses/SSPLv1.txt @@ -555,101 +555,3 @@ return for a fee. END OF TERMS AND CONDITIONS - - - - - -Elastic License 2.0 - -URL: https://www.elastic.co/licensing/elastic-license - -## Acceptance - -By using the software, you agree to all of the terms and conditions below. - -## Copyright License - -The licensor grants you a non-exclusive, royalty-free, worldwide, -non-sublicensable, non-transferable license to use, copy, distribute, make -available, and prepare derivative works of the software, in each case subject to -the limitations and conditions below. - -## Limitations - -You may not provide the software to third parties as a hosted or managed -service, where the service provides users with access to any substantial set of -the features or functionality of the software. - -You may not move, change, disable, or circumvent the license key functionality -in the software, and you may not remove or obscure any functionality in the -software that is protected by the license key. - -You may not alter, remove, or obscure any licensing, copyright, or other notices -of the licensor in the software. Any use of the licensor’s trademarks is subject -to applicable law. - -## Patents - -The licensor grants you a license, under any patent claims the licensor can -license, or becomes able to license, to make, have made, use, sell, offer for -sale, import and have imported the software, in each case subject to the -limitations and conditions in this license. This license does not cover any -patent claims that you cause to be infringed by modifications or additions to -the software. If you or your company make any written claim that the software -infringes or contributes to infringement of any patent, your patent license for -the software granted under these terms ends immediately. If your company makes -such a claim, your patent license ends immediately for work on behalf of your -company. - -## Notices - -You must ensure that anyone who gets a copy of any part of the software from you -also gets a copy of these terms. - -If you modify the software, you must include in any modified copies of the -software prominent notices stating that you have modified the software. - -## No Other Rights - -These terms do not imply any licenses other than those expressly granted in -these terms. - -## Termination - -If you use the software in violation of these terms, such use is not licensed, -and your licenses will automatically terminate. If the licensor provides you -with a notice of your violation, and you cease all violation of this license no -later than 30 days after you receive that notice, your licenses will be -reinstated retroactively. However, if you violate these terms after such -reinstatement, any additional violation of these terms will cause your licenses -to terminate automatically and permanently. - -## No Liability - -*As far as the law allows, the software comes as is, without any warranty or -condition, and the licensor will not be liable to you for any damages arising -out of these terms or the use or nature of the software, under any kind of -legal claim.* - -## Definitions - -The **licensor** is the entity offering these terms, and the **software** is the -software the licensor makes available under these terms, including any portion -of it. - -**you** refers to the individual or entity agreeing to these terms. - -**your company** is any legal entity, sole proprietorship, or other kind of -organization that you work for, plus all organizations that have control over, -are under the control of, or are under common control with that -organization. **control** means ownership of substantially all the assets of an -entity, or the power to direct its management and policies by vote, contract, or -otherwise. Control can be direct or indirect. - -**your licenses** are all the licenses granted to you for the software under -these terms. - -**use** means anything you do with the software requiring one of your licenses. - -**trademark** means trademarks, service marks, and similar rights. diff --git a/ramp.yml b/pack/ramp.yml similarity index 72% rename from ramp.yml rename to pack/ramp.yml index 5fb95ae68..8f4b2874d 100644 --- a/ramp.yml +++ b/pack/ramp.yml @@ -4,10 +4,12 @@ author: Redis Labs email: redismodules@redislabs.com description: Native JSON Data Type for Redis homepage: http://redisjson.io -license: Redis Source Available License 2.0 (RSALv2) or the Server Side Public License v1 (SSPLv1) +license: Redis Source Available License 2.0 (RSALv2) or the Server Side Public License v1 (SSPLv1) or the GNU Affero General Public License version 3 (AGPLv3) command_line_args: "" -min_redis_version: "7.1" -min_redis_pack_version: "7.2" +compatible_redis_version: "7.4" +min_redis_version: "7.4" +min_redis_pack_version: "7.6.0" +bigstore_version_2_support: true capabilities: - types - no_multi_key diff --git a/redis_json/Cargo.toml b/redis_json/Cargo.toml index 6dd29fb3d..fba96ac3e 100644 --- a/redis_json/Cargo.toml +++ b/redis_json/Cargo.toml @@ -5,7 +5,7 @@ authors = ["Guy Korland ", "Meir Shpilraien ( null() } +pub fn json_api_open_key_with_flags_internal( + manager: M, + ctx: *mut rawmod::RedisModuleCtx, + key: RedisString, + flags: KeyFlags, +) -> *const M::V { + let ctx: Context = Context::new(ctx); + if let Ok(h) = manager.open_key_read_with_flags(&ctx, &key, flags) { + if let Ok(Some(v)) = h.get_value() { + return v; + } + } + null() +} + pub fn json_api_get_at(_: M, json: *const c_void, index: size_t) -> *const c_void { let json = unsafe { &*(json.cast::()) }; match json.get_type() { @@ -309,7 +324,10 @@ pub fn get_llapi_ctx() -> Context { #[macro_export] macro_rules! redis_json_module_export_shared_api { ( - get_manage: $get_manager_expr:expr, + get_manage: { + $( $condition:expr => $manager_ident:ident { $($field:ident: $value:expr),* $(,)? } ),* $(,)? + _ => $default_manager:expr $(,)? + }, pre_command_function: $pre_command_function_expr:expr, ) => { use std::ptr::NonNull; @@ -321,11 +339,37 @@ macro_rules! redis_json_module_export_shared_api { ) -> *mut c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_open_key_internal(mngr, ctx, RedisString::new(NonNull::new(ctx), key_str))as *mut c_void}, ) } + #[no_mangle] + pub extern "C" fn JSONAPI_openKey_withFlags( + ctx: *mut rawmod::RedisModuleCtx, + key_str: *mut rawmod::RedisModuleString, + flags: c_int, + ) -> *mut c_void { + run_on_manager!( + pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, + run: |mngr| { + json_api_open_key_with_flags_internal( + mngr, + ctx, + RedisString::new(NonNull::new(ctx), key_str), + KeyFlags::from_bits_truncate(flags as i32), + ) as *mut c_void + }, + ) + } + #[no_mangle] #[allow(clippy::not_unsafe_ptr_arg_deref)] pub extern "C" fn JSONAPI_openKeyFromStr( @@ -335,7 +379,10 @@ macro_rules! redis_json_module_export_shared_api { let key = unsafe { CStr::from_ptr(path).to_str().unwrap() }; run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_open_key_internal(mngr, ctx, RedisString::create(NonNull::new(ctx), key)) as *mut c_void}, ) } @@ -344,7 +391,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_get(key: *const c_void, path: *const c_char) -> *const c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get(mngr, key, path)}, ) } @@ -353,7 +403,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_next(iter: *mut c_void) -> *const c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_next(mngr, iter)}, ) } @@ -362,7 +415,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_len(iter: *const c_void) -> size_t { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_len(mngr, iter)}, ) } @@ -371,7 +427,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_freeIter(iter: *mut c_void) { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_free_iter(mngr, iter)}, ) } @@ -380,7 +439,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getAt(json: *const c_void, index: size_t) -> *const c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_at(mngr, json, index)}, ) } @@ -389,7 +451,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getLen(json: *const c_void, count: *mut size_t) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_len(mngr, json, count)}, ) } @@ -398,7 +463,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getType(json: *const c_void) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_type(mngr, json)}, ) } @@ -407,7 +475,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getInt(json: *const c_void, val: *mut c_longlong) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_int(mngr, json, val)}, ) } @@ -416,7 +487,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getDouble(json: *const c_void, val: *mut c_double) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_double(mngr, json, val)}, ) } @@ -425,7 +499,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getBoolean(json: *const c_void, val: *mut c_int) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_boolean(mngr, json, val)}, ) } @@ -438,7 +515,10 @@ macro_rules! redis_json_module_export_shared_api { ) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_string(mngr, json, str, len)}, ) } @@ -451,7 +531,10 @@ macro_rules! redis_json_module_export_shared_api { ) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_json(mngr, json, ctx, str)}, ) } @@ -462,7 +545,10 @@ macro_rules! redis_json_module_export_shared_api { str: *mut *mut rawmod::RedisModuleString) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_json_from_iter(mngr, iter, ctx, str)}, ) } @@ -471,7 +557,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_isJSON(key: *mut rawmod::RedisModuleKey) -> c_int { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_is_json(mngr, key)}, ) } @@ -491,7 +580,7 @@ macro_rules! redis_json_module_export_shared_api { #[no_mangle] pub extern "C" fn JSONAPI_pathFree(json_path: *mut c_void) { - unsafe { Box::from_raw(json_path.cast::()) }; + unsafe { drop(Box::from_raw(json_path.cast::())) }; } #[no_mangle] @@ -510,7 +599,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_resetIter(iter: *mut c_void) { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_reset_iter(mngr, iter)}, ) } @@ -519,7 +611,10 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_getKeyValues(json: *const c_void) -> *const c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_get_key_value(mngr, json)}, ) } @@ -529,7 +624,10 @@ macro_rules! redis_json_module_export_shared_api { str: *mut *mut rawmod::RedisModuleString) -> *const c_void { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_next_key_value(mngr, iter, str)}, ) } @@ -538,44 +636,32 @@ macro_rules! redis_json_module_export_shared_api { pub extern "C" fn JSONAPI_freeKeyValuesIter(iter: *mut c_void) { run_on_manager!( pre_command: ||$pre_command_function_expr(&get_llapi_ctx(), &Vec::new()), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|{json_api_free_key_values_iter(mngr, iter)}, ) } - static REDISJSON_GETAPI_V1: &str = concat!("RedisJSON_V1", "\0"); - static REDISJSON_GETAPI_V2: &str = concat!("RedisJSON_V2", "\0"); - static REDISJSON_GETAPI_V3: &str = concat!("RedisJSON_V3", "\0"); - static REDISJSON_GETAPI_V4: &str = concat!("RedisJSON_V4", "\0"); + // The apiname argument of export_shared_api should be a string literal with static lifetime + static mut VEC_EXPORT_SHARED_API_NAME : Vec = Vec::new(); pub fn export_shared_api(ctx: &Context) { unsafe { LLAPI_CTX = Some(rawmod::RedisModule_GetThreadSafeContext.unwrap()( std::ptr::null_mut(), )); - ctx.export_shared_api( - (&JSONAPI_CURRENT as *const RedisJSONAPI_CURRENT).cast::(), - REDISJSON_GETAPI_V1.as_ptr().cast::(), - ); - ctx.log_notice("Exported RedisJSON_V1 API"); - - ctx.export_shared_api( - (&JSONAPI_CURRENT as *const RedisJSONAPI_CURRENT).cast::(), - REDISJSON_GETAPI_V2.as_ptr().cast::(), - ); - ctx.log_notice("Exported RedisJSON_V2 API"); - - ctx.export_shared_api( - (&JSONAPI_CURRENT as *const RedisJSONAPI_CURRENT).cast::(), - REDISJSON_GETAPI_V3.as_ptr().cast::(), - ); - ctx.log_notice("Exported RedisJSON_V3 API"); - - ctx.export_shared_api( - (&JSONAPI_CURRENT as *const RedisJSONAPI_CURRENT).cast::(), - REDISJSON_GETAPI_V4.as_ptr().cast::(), - ); - ctx.log_notice("Exported RedisJSON_V4 API"); + + for v in 1..6 { + let version = format!("RedisJSON_V{}", v); + VEC_EXPORT_SHARED_API_NAME.push(CString::new(version.as_str()).unwrap()); + ctx.export_shared_api( + (&JSONAPI_CURRENT as *const RedisJSONAPI_CURRENT).cast::(), + VEC_EXPORT_SHARED_API_NAME[v-1].as_ptr().cast::(), + ); + ctx.log_notice(&format!("Exported {} API", version)); + } }; } @@ -608,6 +694,8 @@ macro_rules! redis_json_module_export_shared_api { getKeyValues: JSONAPI_getKeyValues, nextKeyValue: JSONAPI_nextKeyValue, freeKeyValuesIter: JSONAPI_freeKeyValuesIter, + // V5 entries + openKeyWithFlags: JSONAPI_openKey_withFlags, }; #[repr(C)] @@ -657,6 +745,13 @@ macro_rules! redis_json_module_export_shared_api { str: *mut *mut rawmod::RedisModuleString ) -> *const c_void, pub freeKeyValuesIter: extern "C" fn(iter: *mut c_void), + // V5 + pub openKeyWithFlags: extern "C" fn( + ctx: *mut rawmod::RedisModuleCtx, + key_str: *mut rawmod::RedisModuleString, + flags: c_int, + ) -> *mut c_void, + } }; } diff --git a/redis_json/src/commands.rs b/redis_json/src/commands.rs index aedd397b0..ce2779b42 100644 --- a/redis_json/src/commands.rs +++ b/redis_json/src/commands.rs @@ -1,16 +1,21 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ +use crate::defrag::defrag_info; use crate::error::Error; use crate::formatter::ReplyFormatOptions; use crate::key_value::KeyValue; -use crate::manager::err_msg_json_path_doesnt_exist_with_param; -use crate::manager::err_msg_json_path_doesnt_exist_with_param_or; -use crate::manager::{Manager, ReadHolder, UpdateInfo, WriteHolder}; -use crate::redisjson::{Format, Path, ReplyFormat}; +use crate::manager::{ + err_msg_json_path_doesnt_exist_with_param, err_msg_json_path_doesnt_exist_with_param_or, + Manager, ReadHolder, UpdateInfo, WriteHolder, +}; +use crate::redisjson::{Format, Path, ReplyFormat, SetOptions, JSON_ROOT_PATH}; use json_path::select_value::{SelectValue, SelectValueType}; use redis_module::{Context, RedisValue}; use redis_module::{NextArg, RedisError, RedisResult, RedisString, REDIS_OK}; @@ -19,16 +24,12 @@ use std::str::FromStr; use json_path::{calc_once_with_paths, compile, json_path::UserPathTracker}; -use crate::redisjson::SetOptions; - use serde_json::{Number, Value}; use itertools::FoldWhile::{Continue, Done}; use itertools::{EitherOrBoth, Itertools}; use serde::{Serialize, Serializer}; -const JSON_ROOT_PATH: &str = "$"; -const JSON_ROOT_PATH_LEGACY: &str = "."; const CMD_ARG_NOESCAPE: &str = "NOESCAPE"; const CMD_ARG_INDENT: &str = "INDENT"; const CMD_ARG_NEWLINE: &str = "NEWLINE"; @@ -85,15 +86,6 @@ fn is_resp3(ctx: &Context) -> bool { .contains(redis_module::ContextFlags::FLAGS_RESP3) } -/// Returns the deault path for the given RESP version -fn default_path(ctx: &Context) -> &str { - if is_resp3(ctx) { - JSON_ROOT_PATH - } else { - JSON_ROOT_PATH_LEGACY - } -} - /// /// JSON.GET /// [INDENT indentation-string] @@ -107,7 +99,7 @@ pub fn json_get(manager: M, ctx: &Context, args: Vec) - let key = args.next_arg()?; // Set Capacity to 1 assuming the common case has one path - let mut paths: Vec = Vec::with_capacity(1); + let mut paths = Vec::with_capacity(1); let mut format_options = ReplyFormatOptions::new(is_resp3(ctx), ReplyFormat::STRING); @@ -146,12 +138,12 @@ pub fn json_get(manager: M, ctx: &Context, args: Vec) - // path is optional -> no path found we use legacy root "." if paths.is_empty() { - paths.push(Path::new(default_path(ctx))); + paths.push(Path::default()); } let key = manager.open_key_read(ctx, &key)?; let value = match key.get_value()? { - Some(doc) => KeyValue::new(doc).to_json(&mut paths, &format_options)?, + Some(doc) => KeyValue::new(doc).to_json(paths, &format_options)?, None => RedisValue::Null, }; @@ -193,34 +185,32 @@ pub fn json_set(manager: M, ctx: &Context, args: Vec) - match (current, set_option) { (Some(doc), op) => { - if path.get_path() == JSON_ROOT_PATH { + if path == JSON_ROOT_PATH { if op != SetOptions::NotExists { - redis_key.set_value(Vec::new(), val)?; - redis_key.apply_changes(ctx, "json.set")?; + redis_key.set_value(vec![], val)?; + redis_key.notify_keyspace_event(ctx, "json.set")?; + manager.apply_changes(ctx); REDIS_OK } else { Ok(RedisValue::Null) } } else { - let update_info = KeyValue::new(doc).find_paths(path.get_path(), &op)?; - if !update_info.is_empty() { - let updated = apply_updates::(&mut redis_key, val, update_info); - if updated { - redis_key.apply_changes(ctx, "json.set")?; - REDIS_OK - } else { - Ok(RedisValue::Null) - } + let update_info = KeyValue::new(doc).find_paths(path.get_path(), op)?; + if !update_info.is_empty() && apply_updates::(&mut redis_key, val, update_info) { + redis_key.notify_keyspace_event(ctx, "json.set")?; + manager.apply_changes(ctx); + REDIS_OK } else { Ok(RedisValue::Null) } } } (None, SetOptions::AlreadyExists) => Ok(RedisValue::Null), - (None, _) => { - if path.get_path() == JSON_ROOT_PATH { + _ => { + if path == JSON_ROOT_PATH { redis_key.set_value(Vec::new(), val)?; - redis_key.apply_changes(ctx, "json.set")?; + redis_key.notify_keyspace_event(ctx, "json.set")?; + manager.apply_changes(ctx); REDIS_OK } else { Err(RedisError::Str( @@ -259,13 +249,14 @@ pub fn json_merge(manager: M, ctx: &Context, args: Vec) match current { Some(doc) => { - if path.get_path() == JSON_ROOT_PATH { + if path == JSON_ROOT_PATH { redis_key.merge_value(Vec::new(), val)?; - redis_key.apply_changes(ctx, "json.merge")?; + redis_key.notify_keyspace_event(ctx, "json.merge")?; + manager.apply_changes(ctx); REDIS_OK } else { let mut update_info = - KeyValue::new(doc).find_paths(path.get_path(), &SetOptions::None)?; + KeyValue::new(doc).find_paths(path.get_path(), SetOptions::MergeExisting)?; if !update_info.is_empty() { let mut res = false; if update_info.len() == 1 { @@ -286,7 +277,8 @@ pub fn json_merge(manager: M, ctx: &Context, args: Vec) } } if res { - redis_key.apply_changes(ctx, "json.merge")?; + redis_key.notify_keyspace_event(ctx, "json.merge")?; + manager.apply_changes(ctx); REDIS_OK } else { Ok(RedisValue::Null) @@ -297,10 +289,11 @@ pub fn json_merge(manager: M, ctx: &Context, args: Vec) } } None => { - if path.get_path() == JSON_ROOT_PATH { + if path == JSON_ROOT_PATH { // Nothing to merge with it's a new doc redis_key.set_value(Vec::new(), val)?; - redis_key.apply_changes(ctx, "json.merge")?; + redis_key.notify_keyspace_event(ctx, "json.merge")?; + manager.apply_changes(ctx); REDIS_OK } else { Err(RedisError::Str( @@ -331,10 +324,10 @@ pub fn json_mset(manager: M, ctx: &Context, args: Vec) // Verify the path is valid and get all the update info let path = Path::new(args.next_str()?); - let update_info = if path.get_path() == JSON_ROOT_PATH { + let update_info = if path == JSON_ROOT_PATH { None } else if let Some(value) = key_value { - Some(KeyValue::new(value).find_paths(path.get_path(), &SetOptions::None)?) + Some(KeyValue::new(value).find_paths(path.get_path(), SetOptions::None)?) } else { return Err(RedisError::Str( "ERR new objects must be created at the root", @@ -348,7 +341,7 @@ pub fn json_mset(manager: M, ctx: &Context, args: Vec) actions.push((redis_key, update_info, value)); } - actions + let res = actions .into_iter() .fold(REDIS_OK, |res, (mut redis_key, update_info, value)| { let updated = if let Some(update_info) = update_info { @@ -358,10 +351,13 @@ pub fn json_mset(manager: M, ctx: &Context, args: Vec) redis_key.set_value(Vec::new(), value)? }; if updated { - redis_key.apply_changes(ctx, "json.mset")? + redis_key.notify_keyspace_event(ctx, "json.mset")? } res - }) + }); + + manager.apply_changes(ctx); + res } fn apply_updates( @@ -372,24 +368,18 @@ fn apply_updates( // If there is only one update info, we can avoid cloning the value if update_info.len() == 1 { match update_info.pop().unwrap() { - UpdateInfo::SUI(sui) => redis_key.set_value(sui.path, value).unwrap_or(false), - UpdateInfo::AUI(aui) => redis_key - .dict_add(aui.path, &aui.key, value) - .unwrap_or(false), + UpdateInfo::SUI(sui) => redis_key.set_value(sui.path, value), + UpdateInfo::AUI(aui) => redis_key.dict_add(aui.path, &aui.key, value), } + .unwrap_or(false) } else { - let mut updated = false; - for ui in update_info { - updated = match ui { - UpdateInfo::SUI(sui) => redis_key - .set_value(sui.path, value.clone()) - .unwrap_or(false), - UpdateInfo::AUI(aui) => redis_key - .dict_add(aui.path, &aui.key, value.clone()) - .unwrap_or(false), - } || updated - } - updated + update_info.into_iter().fold(false, |updated, ui| { + match ui { + UpdateInfo::SUI(sui) => redis_key.set_value(sui.path, value.clone()), + UpdateInfo::AUI(aui) => redis_key.dict_add(aui.path, &aui.key, value.clone()), + } + .unwrap_or(updated) + }) } } @@ -433,11 +423,8 @@ where { values_and_paths .into_iter() - .map(|(v, p)| match f(v) { - true => Some(p), - _ => None, - }) - .collect::>>>() + .map(|(v, p)| f(v).then_some(p)) + .collect() } /// Returns a Vec of Values with `None` for Values that do not match the filter @@ -447,11 +434,8 @@ where { values_and_paths .into_iter() - .map(|(v, _)| match f(v) { - true => Some(v), - _ => None, - }) - .collect::>>() + .map(|(v, _)| f(v).then_some(v)) + .collect() } fn find_all_paths( @@ -491,13 +475,13 @@ where values .into_iter() .map(|n| n.map_or_else(|| none_value.clone(), |t| t.into())) - .collect::>() + .collect() } /// Sort the paths so higher indices precede lower indices on the same array, /// And longer paths precede shorter paths /// And if a path is a sub-path of the other, then only paths with shallower hierarchy (closer to the top-level) remain -fn prepare_paths_for_deletion(paths: &mut Vec>) { +pub fn prepare_paths_for_updating(paths: &mut Vec>) { if paths.len() < 2 { // No need to reorder when there are less than 2 paths return; @@ -537,21 +521,28 @@ fn prepare_paths_for_deletion(paths: &mut Vec>) { }); // Remove paths which are nested by others (on each sub-tree only top most ancestor should be deleted) // (TODO: Add a mode in which the jsonpath selector will already skip nested paths) - let mut string_paths = Vec::new(); - paths.iter().for_each(|v| { - string_paths.push(v.join(",")); + let mut string_paths = paths.iter().map(|v| v.join(",")).collect_vec(); + string_paths.sort_by(|a, b| { + let i_a = a.parse::(); + let i_b = b.parse::(); + match (i_a, i_b) { + (Ok(i1), Ok(i2)) => i1.cmp(&i2), + _ => a.cmp(b), + } }); - string_paths.sort(); paths.retain(|v| { let path = v.join(","); - let found = string_paths.binary_search(&path).unwrap(); - for p in string_paths.iter().take(found) { - if path.starts_with(p.as_str()) { - return false; - } - } - true + string_paths + .iter() + .skip_while(|p| { + // Check if path is a proper nested path of p + // A path is nested if it starts with p followed by a comma, or if it equals p + !path.starts_with(*p) || (path.len() > p.len() && !path[p.len()..].starts_with(",")) + }) + .next() + .map(|found| path == *found) + .unwrap_or(false) }); } @@ -563,35 +554,38 @@ pub fn json_del(manager: M, ctx: &Context, args: Vec) - let key = args.next_arg()?; let path = match args.next() { - None => Path::new(default_path(ctx)), + None => Path::default(), Some(s) => Path::new(s.try_as_str()?), }; let mut redis_key = manager.open_key_write(ctx, key)?; - let deleted = match redis_key.get_value()? { - Some(doc) => { - let res = if path.get_path() == JSON_ROOT_PATH { - redis_key.delete()?; - 1 - } else { - let mut paths = find_paths(path.get_path(), doc, |_| true)?; - prepare_paths_for_deletion(&mut paths); - let mut changed = 0; - for p in paths { - if redis_key.delete_path(p)? { - changed += 1; - } - } - changed - }; - if res > 0 { - redis_key.apply_changes(ctx, "json.del")?; - } - res + let deleted = if let Some(doc) = redis_key.get_value()? { + if path != JSON_ROOT_PATH { + let mut paths = find_paths(path.get_path(), doc, |_| true)?; + prepare_paths_for_updating(&mut paths); + paths + .into_iter() + .try_fold(0, |acc, p| redis_key.delete_path(p).map(|v| acc + v as i64))? + } else { + 1 } - None => 0, + } else { + 0 }; - Ok((deleted as i64).into()) + + if deleted > 0 { + let is_empty = redis_key + .get_value()? + .and_then(|v| v.is_empty()) + .unwrap_or(false); + if is_empty || path == JSON_ROOT_PATH { + redis_key.delete()?; + } + redis_key.notify_keyspace_event(ctx, "json.del")?; + manager.apply_changes(ctx); + } + + Ok(deleted.into()) } /// @@ -613,8 +607,6 @@ pub fn json_mget(manager: M, ctx: &Context, args: Vec) return Err(RedisError::WrongArity); } - let is_legacy = path.is_legacy(); - let results: Result, RedisError> = keys .iter() .map(|key| { @@ -624,16 +616,12 @@ pub fn json_mget(manager: M, ctx: &Context, args: Vec) json_key.get_value().map_or(Ok(RedisValue::Null), |value| { value.map_or(Ok(RedisValue::Null), |doc| { let key_value = KeyValue::new(doc); - if format_options.is_resp3_reply() { - Ok(key_value.to_resp3_path(&path, &format_options)) + let res = if !path.is_legacy() { + key_value.to_string_multi(path.get_path(), &format_options) } else { - let res = if !is_legacy { - key_value.to_string_multi(path.get_path(), &format_options) - } else { - key_value.to_string_single(path.get_path(), &format_options) - }; - Ok(res.map_or(RedisValue::Null, |v| v.into())) - } + key_value.to_string_single(path.get_path(), &format_options) + }; + Ok(res.map_or(RedisValue::Null, |v| v.into())) }) }) }) @@ -649,7 +637,7 @@ pub fn json_mget(manager: M, ctx: &Context, args: Vec) pub fn json_type(manager: M, ctx: &Context, args: Vec) -> RedisResult { let mut args = args.into_iter().skip(1); let key = args.next_arg()?; - let path = Path::new(args.next_str().unwrap_or(default_path(ctx))); + let path = args.next_str().map(Path::new).unwrap_or_default(); let key = manager.open_key_read(ctx, &key)?; @@ -676,8 +664,8 @@ where Some(root) => KeyValue::new(root) .get_values(path)? .iter() - .map(|v| (KeyValue::value_name(*v)).into()) - .collect::>() + .map(|v| RedisValue::from(KeyValue::value_name(*v))) + .collect_vec() .into(), None => RedisValue::Null, }; @@ -688,14 +676,11 @@ fn json_type_legacy(redis_key: &M::ReadHolder, path: &str) -> RedisResult where M: Manager, { - let value = redis_key.get_value()?.map_or_else( - || RedisValue::Null, - |doc| { - KeyValue::new(doc) - .get_type(path) - .map_or(RedisValue::Null, |s| s.into()) - }, - ); + let value = redis_key.get_value()?.map_or(RedisValue::Null, |doc| { + KeyValue::new(doc) + .get_type(path) + .map_or(RedisValue::Null, |s| s.into()) + }); Ok(value) } @@ -725,24 +710,48 @@ where // check context flags to see if RESP3 is enabled if is_resp3(ctx) { - let res = json_num_op_impl::(&mut redis_key, ctx, path.get_path(), number, op, cmd)? - .drain(..) - .map(|v| { - v.map_or(RedisValue::Null, |v| { - if let Some(i) = v.as_i64() { - RedisValue::Integer(i) - } else { - RedisValue::Float(v.as_f64().unwrap_or_default()) - } - }) + let res = json_num_op_impl::( + manager, + &mut redis_key, + ctx, + path.get_path(), + number, + op, + cmd, + )? + .into_iter() + .map(|v| { + v.map_or(RedisValue::Null, |v| { + if let Some(i) = v.as_i64() { + RedisValue::Integer(i) + } else { + RedisValue::Float(v.as_f64().unwrap_or_default()) + } }) - .collect::>() - .into(); + }) + .collect_vec() + .into(); Ok(res) } else if path.is_legacy() { - json_num_op_legacy::(&mut redis_key, ctx, path.get_path(), number, op, cmd) + json_num_op_legacy::( + manager, + &mut redis_key, + ctx, + path.get_path(), + number, + op, + cmd, + ) } else { - let results = json_num_op_impl::(&mut redis_key, ctx, path.get_path(), number, op, cmd)?; + let results = json_num_op_impl::( + manager, + &mut redis_key, + ctx, + path.get_path(), + number, + op, + cmd, + )?; // Convert to RESP2 format return as one JSON array let values = to_json_value::(results, Value::Null); @@ -751,6 +760,7 @@ where } fn json_num_op_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -771,28 +781,30 @@ where ) })?; - let mut res = vec![]; let mut need_notify = false; - for p in paths { - res.push(match p { - Some(p) => { + let res = paths + .into_iter() + .map(|p| { + p.map(|p| { need_notify = true; - Some(match op { - NumOp::Incr => redis_key.incr_by(p, number)?, - NumOp::Mult => redis_key.mult_by(p, number)?, - NumOp::Pow => redis_key.pow_by(p, number)?, - }) - } - _ => None, - }); - } + match op { + NumOp::Incr => redis_key.incr_by(p, number), + NumOp::Mult => redis_key.mult_by(p, number), + NumOp::Pow => redis_key.pow_by(p, number), + } + }) + .transpose() + }) + .try_collect()?; if need_notify { - redis_key.apply_changes(ctx, cmd)?; + redis_key.notify_keyspace_event(ctx, cmd)?; + manager.apply_changes(ctx); } Ok(res) } fn json_num_op_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -818,7 +830,8 @@ where NumOp::Pow => redis_key.pow_by(p, number)?, }); } - redis_key.apply_changes(ctx, cmd)?; + redis_key.notify_keyspace_event(ctx, cmd)?; + manager.apply_changes(ctx); Ok(res.unwrap().to_string().into()) } else { Err(RedisError::String( @@ -874,13 +887,14 @@ pub fn json_bool_toggle( let mut redis_key = manager.open_key_write(ctx, key)?; if path.is_legacy() { - json_bool_toggle_legacy::(&mut redis_key, ctx, path.get_path()) + json_bool_toggle_legacy::(manager, &mut redis_key, ctx, path.get_path()) } else { - json_bool_toggle_impl::(&mut redis_key, ctx, path.get_path()) + json_bool_toggle_impl::(manager, &mut redis_key, ctx, path.get_path()) } } fn json_bool_toggle_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -904,12 +918,14 @@ where }); } if need_notify { - redis_key.apply_changes(ctx, "json.toggle")?; + redis_key.notify_keyspace_event(ctx, "json.toggle")?; + manager.apply_changes(ctx); } Ok(res.into()) } fn json_bool_toggle_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -926,7 +942,8 @@ where for p in paths { res = redis_key.bool_toggle(p)?; } - redis_key.apply_changes(ctx, "json.toggle")?; + redis_key.notify_keyspace_event(ctx, "json.toggle")?; + manager.apply_changes(ctx); Ok(res.to_string().into()) } else { Err(RedisError::String( @@ -956,20 +973,21 @@ pub fn json_str_append( path = Path::new(path_or_json); json = val.try_as_str()?; } else { - path = Path::new(default_path(ctx)); + path = Path::default(); json = path_or_json; } let mut redis_key = manager.open_key_write(ctx, key)?; if path.is_legacy() { - json_str_append_legacy::(&mut redis_key, ctx, path.get_path(), json) + json_str_append_legacy::(manager, &mut redis_key, ctx, path.get_path(), json) } else { - json_str_append_impl::(&mut redis_key, ctx, path.get_path(), json) + json_str_append_impl::(manager, &mut redis_key, ctx, path.get_path(), json) } } fn json_str_append_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -996,12 +1014,14 @@ where }); } if need_notify { - redis_key.apply_changes(ctx, "json.strappend")?; + redis_key.notify_keyspace_event(ctx, "json.strappend")?; + manager.apply_changes(ctx); } Ok(res.into()) } fn json_str_append_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1020,7 +1040,8 @@ where for p in paths { res = Some(redis_key.str_append(p, json.to_string())?); } - redis_key.apply_changes(ctx, "json.strappend")?; + redis_key.notify_keyspace_event(ctx, "json.strappend")?; + manager.apply_changes(ctx); Ok(res.unwrap().into()) } else { Err(RedisError::String( @@ -1035,7 +1056,7 @@ where pub fn json_str_len(manager: M, ctx: &Context, args: Vec) -> RedisResult { let mut args = args.into_iter().skip(1); let key = args.next_arg()?; - let path = Path::new(args.next_str().unwrap_or(default_path(ctx))); + let path = args.next_str().map(Path::new).unwrap_or_default(); let key = manager.open_key_read(ctx, &key)?; @@ -1099,13 +1120,14 @@ pub fn json_arr_append( let mut redis_key = manager.open_key_write(ctx, key)?; if path.is_legacy() { - json_arr_append_legacy::(&mut redis_key, ctx, &path, args) + json_arr_append_legacy::(manager, &mut redis_key, ctx, &path, args) } else { - json_arr_append_impl::(&mut redis_key, ctx, path.get_path(), args) + json_arr_append_impl::(manager, &mut redis_key, ctx, path.get_path(), args) } } fn json_arr_append_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &Path, @@ -1126,19 +1148,22 @@ where )) } else if paths.len() == 1 { let res = redis_key.arr_append(paths.pop().unwrap(), args)?; - redis_key.apply_changes(ctx, "json.arrappend")?; + redis_key.notify_keyspace_event(ctx, "json.arrappend")?; + manager.apply_changes(ctx); Ok(res.into()) } else { let mut res = 0; for p in paths { res = redis_key.arr_append(p, args.clone())?; } - redis_key.apply_changes(ctx, "json.arrappend")?; + redis_key.notify_keyspace_event(ctx, "json.arrappend")?; + manager.apply_changes(ctx); Ok(res.into()) } } fn json_arr_append_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1164,7 +1189,8 @@ where }); } if need_notify { - redis_key.apply_changes(ctx, "json.arrappend")?; + redis_key.notify_keyspace_event(ctx, "json.arrappend")?; + manager.apply_changes(ctx); } Ok(res.into()) } @@ -1257,13 +1283,14 @@ pub fn json_arr_insert( )?; let mut redis_key = manager.open_key_write(ctx, key)?; if path.is_legacy() { - json_arr_insert_legacy::(&mut redis_key, ctx, path.get_path(), index, args) + json_arr_insert_legacy::(manager, &mut redis_key, ctx, path.get_path(), index, args) } else { - json_arr_insert_impl::(&mut redis_key, ctx, path.get_path(), index, args) + json_arr_insert_impl::(manager, &mut redis_key, ctx, path.get_path(), index, args) } } fn json_arr_insert_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1292,12 +1319,14 @@ where } if need_notify { - redis_key.apply_changes(ctx, "json.arrinsert")?; + redis_key.notify_keyspace_event(ctx, "json.arrinsert")?; + manager.apply_changes(ctx); } Ok(res.into()) } fn json_arr_insert_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1321,7 +1350,8 @@ where for p in paths { res = Some(redis_key.arr_insert(p, &args, index)?); } - redis_key.apply_changes(ctx, "json.arrinsert")?; + redis_key.notify_keyspace_event(ctx, "json.arrinsert")?; + manager.apply_changes(ctx); Ok(res.unwrap().into()) } } @@ -1332,7 +1362,7 @@ where pub fn json_arr_len(manager: M, ctx: &Context, args: Vec) -> RedisResult { let mut args = args.into_iter().skip(1); let key = args.next_arg()?; - let path = Path::new(args.next_str().unwrap_or(default_path(ctx))); + let path = args.next_str().map(Path::new).unwrap_or_default(); let is_legacy = path.is_legacy(); let key = manager.open_key_read(ctx, &key)?; let root = match key.get_value()? { @@ -1417,14 +1447,7 @@ pub fn json_arr_pop(manager: M, ctx: &Context, args: Vec { - if format_options.is_resp3_reply() { - (Path::new(JSON_ROOT_PATH), i64::MAX) - } else { - // Legacy behavior for backward compatibility - (Path::new(JSON_ROOT_PATH_LEGACY), i64::MAX) - } - } + None => (Path::default(), i64::MAX), Some(s) => { let path = Path::new(s.try_as_str()?); let index = args.next_i64().unwrap_or(-1); @@ -1441,13 +1464,21 @@ pub fn json_arr_pop(manager: M, ctx: &Context, args: Vec(&mut redis_key, ctx, path.get_path(), index) + json_arr_pop_legacy::(manager, &mut redis_key, ctx, path.get_path(), index) } else { - json_arr_pop_impl::(&mut redis_key, ctx, path.get_path(), index, &format_options) + json_arr_pop_impl::( + manager, + &mut redis_key, + ctx, + path.get_path(), + index, + &format_options, + ) } } fn json_arr_pop_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1480,12 +1511,14 @@ where }); } if need_notify { - redis_key.apply_changes(ctx, "json.arrpop")?; + redis_key.notify_keyspace_event(ctx, "json.arrpop")?; + manager.apply_changes(ctx); } Ok(res.into()) } fn json_arr_pop_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1507,7 +1540,8 @@ where None => Ok(().into()), })?); } - redis_key.apply_changes(ctx, "json.arrpop")?; + redis_key.notify_keyspace_event(ctx, "json.arrpop")?; + manager.apply_changes(ctx); res } else { Err(RedisError::String( @@ -1530,12 +1564,13 @@ pub fn json_arr_trim(manager: M, ctx: &Context, args: Vec(&mut redis_key, ctx, path.get_path(), start, stop) + json_arr_trim_legacy::(manager, &mut redis_key, ctx, path.get_path(), start, stop) } else { - json_arr_trim_impl::(&mut redis_key, ctx, path.get_path(), start, stop) + json_arr_trim_impl::(manager, &mut redis_key, ctx, path.get_path(), start, stop) } } fn json_arr_trim_impl( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1562,12 +1597,14 @@ where }); } if need_notify { - redis_key.apply_changes(ctx, "json.arrtrim")?; + redis_key.notify_keyspace_event(ctx, "json.arrtrim")?; + manager.apply_changes(ctx); } Ok(res.into()) } fn json_arr_trim_legacy( + manager: M, redis_key: &mut M::WriteHolder, ctx: &Context, path: &str, @@ -1591,7 +1628,8 @@ where for p in paths { res = Some(redis_key.arr_trim(p, start, stop)?); } - redis_key.apply_changes(ctx, "json.arrtrim")?; + redis_key.notify_keyspace_event(ctx, "json.arrtrim")?; + manager.apply_changes(ctx); Ok(res.unwrap().into()) } } @@ -1602,7 +1640,7 @@ where pub fn json_obj_keys(manager: M, ctx: &Context, args: Vec) -> RedisResult { let mut args = args.into_iter().skip(1); let key = args.next_arg()?; - let path = Path::new(args.next_str().unwrap_or(default_path(ctx))); + let path = args.next_str().map(Path::new).unwrap_or_default(); let mut key = manager.open_key_read(ctx, &key)?; if path.is_legacy() { @@ -1623,9 +1661,7 @@ where let values = find_all_values(path, root, |v| v.get_type() == SelectValueType::Object)?; let mut res: Vec = vec![]; for v in values { - res.push(v.map_or(RedisValue::Null, |v| { - v.keys().unwrap().collect::>().into() - })); + res.push(v.map_or(RedisValue::Null, |v| v.keys().unwrap().collect_vec().into())); } res.into() }; @@ -1642,7 +1678,7 @@ where }; let value = match KeyValue::new(root).get_first(path) { Ok(v) => match v.get_type() { - SelectValueType::Object => v.keys().unwrap().collect::>().into(), + SelectValueType::Object => v.keys().unwrap().collect_vec().into(), _ => { return Err(RedisError::String( err_msg_json_path_doesnt_exist_with_param_or(path, "not an object"), @@ -1661,7 +1697,7 @@ pub fn json_obj_len(manager: M, ctx: &Context, args: Vec>() + .collect_vec() .into(), None => { return Err(RedisError::String( @@ -1724,7 +1760,7 @@ pub fn json_clear(manager: M, ctx: &Context, args: Vec) )?; let paths = if paths.is_empty() { - vec![Path::new(JSON_ROOT_PATH)] + vec![Path::default()] } else { paths }; @@ -1743,14 +1779,12 @@ pub fn json_clear(manager: M, ctx: &Context, args: Vec) SelectValueType::Double => v.get_double() != 0.0, _ => false, })?; - let mut cleared = 0; - if !paths.is_empty() { - for p in paths { - cleared += redis_key.clear(p)?; - } - } + let cleared = paths + .into_iter() + .try_fold(0, |acc, p| redis_key.clear(p).map(|v| acc + v))?; if cleared > 0 { - redis_key.apply_changes(ctx, "json.clear")?; + redis_key.notify_keyspace_event(ctx, "json.clear")?; + manager.apply_changes(ctx); } Ok(cleared.into()) } @@ -1767,14 +1801,12 @@ pub fn json_debug(manager: M, ctx: &Context, args: Vec) match args.next_str()?.to_uppercase().as_str() { "MEMORY" => { let key = args.next_arg()?; - let path = Path::new(args.next_str().unwrap_or(default_path(ctx))); + let path = args.next_str().map(Path::new).unwrap_or_default(); let key = manager.open_key_read(ctx, &key)?; if path.is_legacy() { Ok(match key.get_value()? { - Some(doc) => { - manager.get_memory(KeyValue::new(doc).get_first(path.get_path())?)? - } + Some(doc) => M::get_memory(KeyValue::new(doc).get_first(path.get_path())?)?, None => 0, } .into()) @@ -1782,14 +1814,15 @@ pub fn json_debug(manager: M, ctx: &Context, args: Vec) Ok(match key.get_value()? { Some(doc) => KeyValue::new(doc) .get_values(path.get_path())? - .iter() - .map(|v| manager.get_memory(v).unwrap()) - .collect::>(), + .into_iter() + .map(M::get_memory) + .try_collect()?, None => vec![], } .into()) } } + "DEFRAG_INFO" => defrag_info(ctx), "HELP" => { let results = vec![ "MEMORY [path] - reports memory usage", @@ -1811,13 +1844,43 @@ pub fn json_resp(manager: M, ctx: &Context, args: Vec) let key = args.next_arg()?; let path = match args.next() { - None => Path::new(default_path(ctx)), + None => Path::default(), Some(s) => Path::new(s.try_as_str()?), }; let key = manager.open_key_read(ctx, &key)?; - key.get_value()?.map_or_else( - || Ok(RedisValue::Null), - |doc| KeyValue::new(doc).resp_serialize(path), - ) + key.get_value()?.map_or(Ok(RedisValue::Null), |doc| { + KeyValue::new(doc).resp_serialize(path) + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_prepare_paths_for_updating_with_numeric_pathes() { + let mut pathes = vec![ + vec!["0".to_string()], + vec!["1".to_string()], + vec!["2".to_string()], + vec!["3".to_string()], + vec!["4".to_string()], + vec!["5".to_string()], + vec!["6".to_string()], + vec!["7".to_string()], + vec!["8".to_string()], + vec!["9".to_string()], + vec!["10".to_string()], + vec!["20".to_string()], + vec!["30".to_string()], + vec!["40".to_string()], + vec!["50".to_string()], + vec!["60".to_string()], + vec!["100".to_string()], + ]; + let pathes_expected = pathes.clone().into_iter().rev().collect::>(); + prepare_paths_for_updating(&mut pathes); + assert_eq!(pathes, pathes_expected); + } } diff --git a/redis_json/src/defrag.rs b/redis_json/src/defrag.rs new file mode 100644 index 000000000..49e6ea24c --- /dev/null +++ b/redis_json/src/defrag.rs @@ -0,0 +1,106 @@ +use std::{ + alloc::Layout, + os::raw::{c_int, c_void}, +}; + +use ijson::{Defrag, DefragAllocator}; +use lazy_static::lazy_static; +use redis_module::{ + defrag::DefragContext, raw, redisvalue::RedisValueKey, Context, RedisGILGuard, RedisResult, + RedisValue, +}; +use redis_module_macros::{defrag_end_function, defrag_start_function}; + +use crate::redisjson::RedisJSON; + +#[derive(Default)] +pub(crate) struct DefragStats { + defrag_started: usize, + defrag_ended: usize, + keys_defrag: usize, +} + +lazy_static! { + pub(crate) static ref DEFRAG_STATS: RedisGILGuard = RedisGILGuard::default(); +} + +struct DefragCtxAllocator<'dc> { + defrag_ctx: &'dc DefragContext, +} + +impl<'dc> DefragAllocator for DefragCtxAllocator<'dc> { + unsafe fn realloc_ptr(&mut self, ptr: *mut T, _layout: Layout) -> *mut T { + self.defrag_ctx.defrag_realloc(ptr) + } + + /// Allocate memory for defrag + unsafe fn alloc(&mut self, layout: Layout) -> *mut u8 { + self.defrag_ctx.defrag_alloc(layout) + } + + /// Free memory for defrag + unsafe fn free(&mut self, ptr: *mut T, layout: Layout) { + self.defrag_ctx.defrag_dealloc(ptr, layout) + } +} + +#[defrag_start_function] +fn defrag_start(defrag_ctx: &DefragContext) { + let mut defrag_stats = DEFRAG_STATS.lock(defrag_ctx); + defrag_stats.defrag_started += 1; + ijson::reinit_shared_string_cache(); +} + +#[defrag_end_function] +fn defrag_end(defrag_ctx: &DefragContext) { + let mut defrag_stats = DEFRAG_STATS.lock(defrag_ctx); + defrag_stats.defrag_ended += 1; +} + +#[allow(non_snake_case, unused)] +pub unsafe extern "C" fn defrag( + ctx: *mut raw::RedisModuleDefragCtx, + key: *mut raw::RedisModuleString, + value: *mut *mut c_void, +) -> c_int { + let defrag_ctx = DefragContext::new(ctx); + + let mut defrag_stats = DEFRAG_STATS.lock(&defrag_ctx); + defrag_stats.keys_defrag += 1; + + let mut defrag_allocator = DefragCtxAllocator { + defrag_ctx: &defrag_ctx, + }; + let value = value.cast::<*mut RedisJSON>(); + let new_val = defrag_allocator.realloc_ptr(*value, Layout::new::>()); + if !new_val.is_null() { + std::ptr::write(value, new_val); + } + std::ptr::write( + &mut (**value).data as *mut ijson::IValue, + std::ptr::read(*value).data.defrag(&mut defrag_allocator), + ); + 0 +} + +pub(crate) fn defrag_info(ctx: &Context) -> RedisResult { + let defrag_stats = DEFRAG_STATS.lock(ctx); + Ok(RedisValue::OrderedMap( + [ + ( + RedisValueKey::String("defrag_started".to_owned()), + RedisValue::Integer(defrag_stats.defrag_started as i64), + ), + ( + RedisValueKey::String("defrag_ended".to_owned()), + RedisValue::Integer(defrag_stats.defrag_ended as i64), + ), + ( + RedisValueKey::String("keys_defrag".to_owned()), + RedisValue::Integer(defrag_stats.keys_defrag as i64), + ), + ] + .into_iter() + .collect(), + )) +} diff --git a/redis_json/src/error.rs b/redis_json/src/error.rs index b7113d4d9..de3adb9d8 100644 --- a/redis_json/src/error.rs +++ b/redis_json/src/error.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ use json_path::json_path::QueryCompilationError; diff --git a/redis_json/src/formatter.rs b/redis_json/src/formatter.rs index a26ca7908..5c575b441 100644 --- a/redis_json/src/formatter.rs +++ b/redis_json/src/formatter.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ // Custom serde_json formatter supporting ReJSON formatting options. @@ -35,7 +38,7 @@ DEALINGS IN THE SOFTWARE. use serde_json::ser::Formatter; use std::io; -pub use crate::redisjson::{Format, ReplyFormat}; +pub use crate::redisjson::ReplyFormat; pub struct ReplyFormatOptions<'a> { pub format: ReplyFormat, diff --git a/redis_json/src/include/rejson_api.h b/redis_json/src/include/rejson_api.h index 15b11110b..c0e04de72 100644 --- a/redis_json/src/include/rejson_api.h +++ b/redis_json/src/include/rejson_api.h @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ #pragma once @@ -115,8 +118,15 @@ typedef struct RedisJSONAPI { // Free the iterator void (*freeKeyValuesIter)(JSONKeyValuesIterator iter); + //////////////// + // V5 entries // + //////////////// + + RedisJSON (*openKeyWithFlags)(RedisModuleCtx *ctx, RedisModuleString *key_name, int flags); + } RedisJSONAPI; +#define RedisJSONAPI_LATEST_API_VER 5 #ifdef __cplusplus } #endif diff --git a/redis_json/src/ivalue_manager.rs b/redis_json/src/ivalue_manager.rs index c9114cf83..82f05b6f0 100644 --- a/redis_json/src/ivalue_manager.rs +++ b/redis_json/src/ivalue_manager.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ use crate::error::Error; @@ -10,10 +13,10 @@ use crate::manager::{Manager, ReadHolder, WriteHolder}; use crate::redisjson::normalize_arr_start_index; use crate::Format; use crate::REDIS_JSON_TYPE; -use bson::decode_document; -use ijson::object::Entry; -use ijson::{DestructuredMut, INumber, IObject, IString, IValue, ValueType}; -use redis_module::key::{verify_type, RedisKey, RedisKeyWritable}; +use bson::{from_document, Document}; +use ijson::{DestructuredMut, INumber, IObject, IString, IValue}; +use json_path::select_value::{SelectValue, SelectValueType}; +use redis_module::key::{verify_type, KeyFlags, RedisKey, RedisKeyWritable}; use redis_module::raw::{RedisModuleKey, Status}; use redis_module::rediserror::RedisError; use redis_module::{Context, NotifyEvent, RedisResult, RedisString}; @@ -33,219 +36,89 @@ pub struct IValueKeyHolderWrite<'a> { val: Option<&'a mut RedisJSON>, } -/// -/// Replaces a value at a given `path`, starting from `root` -/// -/// The new value is the value returned from `func`, which is called on the current value. -/// -/// If the returned value from `func` is [`None`], the current value is removed. -/// If the returned value from `func` is [`Err`], the current value remains (although it could be modified by `func`) -/// -fn replace Result, Error>>( - path: &[String], - root: &mut IValue, - mut func: F, -) -> Result<(), Error> { - let mut target = root; - - let last_index = path.len().saturating_sub(1); - for (i, token) in path.iter().enumerate() { - let target_once = target; - let is_last = i == last_index; - let target_opt = match target_once.type_() { - ValueType::Object => { - let obj = target_once.as_object_mut().unwrap(); - if is_last { - if let Entry::Occupied(mut e) = obj.entry(token) { - let v = e.get_mut(); - if let Some(res) = (func)(v)? { - *v = res; - } else { - e.remove(); - } - } - return Ok(()); - } - obj.get_mut(token.as_str()) - } - ValueType::Array => { - let arr = target_once.as_array_mut().unwrap(); - if let Ok(x) = token.parse::() { - if is_last { - if x < arr.len() { - let v = &mut arr.as_mut_slice()[x]; - if let Some(res) = (func)(v)? { - *v = res; - } else { - arr.remove(x); - } - } - return Ok(()); - } - arr.get_mut(x) - } else { - panic!("Array index should have been parsed successfully before reaching here") - } - } +fn follow_path(path: Vec, root: &mut IValue) -> Option<&mut IValue> { + path.into_iter() + .try_fold(root, |target, token| match target.destructure_mut() { + DestructuredMut::Object(obj) => obj.get_mut(token.as_str()), + DestructuredMut::Array(arr) => arr.get_mut(token.parse::().ok()?), _ => None, - }; - - if let Some(t) = target_opt { - target = t; - } else { - break; - } - } - - Ok(()) + }) } /// /// Updates a value at a given `path`, starting from `root` /// /// The value is modified by `func`, which is called on the current value. -/// If the returned value from `func` is [`None`], the current value is removed. /// If the returned value from `func` is [`Err`], the current value remains (although it could be modified by `func`) /// -fn update Result, Error>>( - path: &[String], - root: &mut IValue, - mut func: F, -) -> Result<(), Error> { - let mut target = root; - - let last_index = path.len().saturating_sub(1); - for (i, token) in path.iter().enumerate() { - let target_once = target; - let is_last = i == last_index; - let target_opt = match target_once.type_() { - ValueType::Object => { - let obj = target_once.as_object_mut().unwrap(); - if is_last { - if let Entry::Occupied(mut e) = obj.entry(token) { - let v = e.get_mut(); - match (func)(v) { - Ok(res) => { - if res.is_none() { - e.remove(); - } - } - Err(err) => return Err(err), - } - } - return Ok(()); - } - obj.get_mut(token.as_str()) - } - ValueType::Array => { - let arr = target_once.as_array_mut().unwrap(); - if let Ok(x) = token.parse::() { - if is_last { - if x < arr.len() { - let v = &mut arr.as_mut_slice()[x]; - match (func)(v) { - Ok(res) => { - if res.is_none() { - arr.remove(x); - } - } - Err(err) => return Err(err), - } - } - return Ok(()); - } - arr.get_mut(x) - } else { - panic!("Array index should have been parsed successfully before reaching here") - } - } - _ => None, - }; - - if let Some(t) = target_opt { - target = t; - } else { - break; - } - } +fn update(path: Vec, root: &mut IValue, func: F) -> RedisResult +where + F: FnOnce(&mut IValue) -> RedisResult, +{ + follow_path(path, root).map_or_else( + || Err(RedisError::String(err_msg_json_path_doesnt_exist())), + func, + ) +} - Ok(()) +/// +/// Removes a value at a given `path`, starting from `root` +/// +fn remove(mut path: Vec, root: &mut IValue) -> bool { + let token = path.pop().unwrap(); + follow_path(path, root) + .and_then(|target| match target.destructure_mut() { + DestructuredMut::Object(obj) => obj.remove(token.as_str()), + DestructuredMut::Array(arr) => arr.remove(token.parse::().ok()?), + _ => None, + }) + .is_some() } impl<'a> IValueKeyHolderWrite<'a> { - fn do_op(&mut self, paths: &[String], mut op_fun: F) -> Result<(), RedisError> + fn do_op(&mut self, paths: Vec, op_fun: F) -> RedisResult where - F: FnMut(&mut IValue) -> Result, Error>, + F: FnOnce(&mut IValue) -> RedisResult, { - if paths.is_empty() { - // updating the root require special treatment - let root = self.get_value().unwrap().unwrap(); - let res = (op_fun)(root); - match res { - Ok(res) => { - if res.is_none() { - root.take(); - } - } - Err(err) => { - return Err(RedisError::String(err.msg)); - } - } - } else { - update(paths, self.get_value().unwrap().unwrap(), op_fun)?; - } - - Ok(()) + let root = self.get_value()?.unwrap(); + update(paths, root, op_fun) } fn do_num_op( &mut self, path: Vec, num: &str, - mut op1_fun: F1, - mut op2_fun: F2, - ) -> Result + op1: F1, + op2: F2, + ) -> RedisResult where - F1: FnMut(i64, i64) -> i64, - F2: FnMut(f64, f64) -> f64, + F1: FnOnce(i64, i64) -> i64, + F2: FnOnce(f64, f64) -> f64, { let in_value = &serde_json::from_str(num)?; if let serde_json::Value::Number(in_value) = in_value { - let mut res = None; - self.do_op(&path, |v| { - let num_res = match ( - v.as_number().unwrap().has_decimal_point(), - in_value.as_i64(), - ) { - (false, Some(num2)) => Ok(((op1_fun)(v.to_i64().unwrap(), num2)).into()), + let n = self.do_op(path, |v| { + let new_val = match (v.get_type(), in_value.as_i64()) { + (SelectValueType::Long, Some(num2)) => { + let num1 = v.get_long(); + Ok(op1(num1, num2).into()) + } _ => { - let num1 = v.to_f64().unwrap(); + let num1 = v.get_double(); let num2 = in_value.as_f64().unwrap(); - INumber::try_from((op2_fun)(num1, num2)) + INumber::try_from(op2(num1, num2)) .map_err(|_| RedisError::Str("result is not a number")) } - }; - let new_val = IValue::from(num_res?); - *v = new_val.clone(); - res = Some(new_val); - Ok(Some(())) + }?; + *v = IValue::from(new_val.clone()); + Ok(new_val) })?; - match res { - None => Err(RedisError::String(err_msg_json_path_doesnt_exist())), - Some(n) => { - if let Some(n) = n.as_number() { - if !n.has_decimal_point() { - Ok(n.to_i64().unwrap().into()) - } else if let Some(f) = n.to_f64() { - Ok(serde_json::Number::from_f64(f).unwrap()) - } else { - Err(RedisError::Str("result is not a number")) - } - } else { - Err(RedisError::Str("result is not a number")) - } - } + if n.has_decimal_point() { + n.to_f64().and_then(serde_json::Number::from_f64) + } else { + n.to_i64().map(Into::into) } + .ok_or_else(|| RedisError::Str("result is not a number")) } else { Err(RedisError::Str("bad input number")) } @@ -258,32 +131,22 @@ impl<'a> IValueKeyHolderWrite<'a> { Ok(()) } - fn set_root(&mut self, v: Option) -> Result<(), RedisError> { - match v { - Some(inner) => { - self.get_json_holder()?; - match &mut self.val { - Some(v) => v.data = inner, - None => self - .key - .set_value(&REDIS_JSON_TYPE, RedisJSON { data: inner })?, - } - } - None => { - self.val = None; - self.key.delete()?; - } + fn set_root(&mut self, data: IValue) -> RedisResult { + self.get_json_holder()?; + if let Some(val) = &mut self.val { + val.data = data + } else { + self.key.set_value(&REDIS_JSON_TYPE, RedisJSON { data })? } - Ok(()) + Ok(true) } } impl<'a> WriteHolder for IValueKeyHolderWrite<'a> { - fn apply_changes(&mut self, ctx: &Context, command: &str) -> Result<(), RedisError> { + fn notify_keyspace_event(&mut self, ctx: &Context, command: &str) -> Result<(), RedisError> { if ctx.notify_keyspace_event(NotifyEvent::MODULE, command, &self.key_name) != Status::Ok { Err(RedisError::Str("failed notify key space event")) } else { - ctx.replicate_verbatim(); Ok(()) } } @@ -302,238 +165,170 @@ impl<'a> WriteHolder for IValueKeyHolderWrite<'a> { } } - fn set_value(&mut self, path: Vec, mut v: IValue) -> Result { - let mut updated = false; + fn set_value(&mut self, path: Vec, mut v: IValue) -> RedisResult { if path.is_empty() { // update the root - self.set_root(Some(v))?; - updated = true; + self.set_root(v) } else { - replace(&path, self.get_value()?.unwrap(), |_v| { - updated = true; - Ok(Some(v.take())) - })?; + let root = self.get_value()?.unwrap(); + Ok(update(path, root, |val| Ok(*val = v.take())).is_ok()) } - Ok(updated) } - fn merge_value(&mut self, path: Vec, v: IValue) -> Result { - let mut updated = false; - if path.is_empty() { - merge(self.get_value()?.unwrap(), &v); - // update the root - updated = true; - } else { - replace(&path, self.get_value()?.unwrap(), |current| { - updated = true; - merge(current, &v); - Ok(Some(current.take())) - })?; - } - Ok(updated) + fn merge_value(&mut self, path: Vec, mut v: IValue) -> RedisResult { + let root = self.get_value()?.unwrap(); + Ok(update(path, root, |current| Ok(merge(current, v.take()))).is_ok()) } - fn dict_add( - &mut self, - path: Vec, - key: &str, - mut v: IValue, - ) -> Result { - let mut updated = false; - if path.is_empty() { - // update the root - let root = self.get_value().unwrap().unwrap(); - if let Some(o) = root.as_object_mut() { - if !o.contains_key(key) { - updated = true; + fn dict_add(&mut self, path: Vec, key: &str, mut v: IValue) -> RedisResult { + self.do_op(path, |val| { + val.as_object_mut().map_or(Ok(false), |o| { + let res = !o.contains_key(key); + if res { o.insert(key.to_string(), v.take()); } - } - } else { - update(&path, self.get_value().unwrap().unwrap(), |val| { - if val.is_object() { - let o = val.as_object_mut().unwrap(); - if !o.contains_key(key) { - updated = true; - o.insert(key.to_string(), v.take()); - } - } - Ok(Some(())) - })?; - } - Ok(updated) + Ok(res) + }) + }) } - fn delete_path(&mut self, path: Vec) -> Result { - let mut deleted = false; - update(&path, self.get_value().unwrap().unwrap(), |_v| { - deleted = true; // might delete more than a single value - Ok(None) - })?; - Ok(deleted) + fn delete_path(&mut self, path: Vec) -> RedisResult { + self.get_value().map(|root| remove(path, root.unwrap())) } - fn incr_by(&mut self, path: Vec, num: &str) -> Result { - self.do_num_op(path, num, |i1, i2| i1 + i2, |f1, f2| f1 + f2) + fn incr_by(&mut self, path: Vec, num: &str) -> RedisResult { + self.do_num_op(path, num, i64::wrapping_add, |f1, f2| f1 + f2) } - fn mult_by(&mut self, path: Vec, num: &str) -> Result { - self.do_num_op(path, num, |i1, i2| i1 * i2, |f1, f2| f1 * f2) + fn mult_by(&mut self, path: Vec, num: &str) -> RedisResult { + self.do_num_op(path, num, i64::wrapping_mul, |f1, f2| f1 * f2) } - fn pow_by(&mut self, path: Vec, num: &str) -> Result { + fn pow_by(&mut self, path: Vec, num: &str) -> RedisResult { self.do_num_op(path, num, |i1, i2| i1.pow(i2 as u32), f64::powf) } - fn bool_toggle(&mut self, path: Vec) -> Result { - let mut res = None; - self.do_op(&path, |v| { + fn bool_toggle(&mut self, path: Vec) -> RedisResult { + self.do_op(path, |v| { if let DestructuredMut::Bool(mut bool_mut) = v.destructure_mut() { //Using DestructuredMut in order to modify a `Bool` variant let val = bool_mut.get() ^ true; bool_mut.set(val); - res = Some(val); + Ok(val) + } else { + Err(err_json(v, "bool").into()) } - Ok(Some(())) - })?; - res.ok_or_else(|| RedisError::String(err_msg_json_path_doesnt_exist())) - } - - fn str_append(&mut self, path: Vec, val: String) -> Result { - let json = serde_json::from_str(&val)?; - if let serde_json::Value::String(s) = json { - let mut res = None; - self.do_op(&path, |v| { - let v_str = v.as_string_mut().unwrap(); - let new_str = [v_str.as_str(), s.as_str()].concat(); - res = Some(new_str.len()); - *v_str = IString::intern(&new_str); - Ok(Some(())) - })?; - res.ok_or_else(|| RedisError::String(err_msg_json_path_doesnt_exist())) - } else { - Err(RedisError::String(err_msg_json_expected( + }) + } + + fn str_append(&mut self, path: Vec, val: String) -> RedisResult { + match serde_json::from_str(&val)? { + serde_json::Value::String(s) => self.do_op(path, |v| { + v.as_string_mut() + .map(|v_str| { + let new_str = [v_str.as_str(), s.as_str()].concat(); + *v_str = IString::intern(&new_str); + Ok(new_str.len()) + }) + .unwrap_or_else(|| Err(err_json(v, "string").into())) + }), + _ => Err(RedisError::String(err_msg_json_expected( "string", val.as_str(), - ))) + ))), } } - fn arr_append(&mut self, path: Vec, args: Vec) -> Result { - let mut res = None; - self.do_op(&path, |v| { - let arr = v.as_array_mut().unwrap(); - for a in &args { - arr.push(a.clone()); - } - res = Some(arr.len()); - Ok(Some(())) - })?; - res.ok_or_else(|| RedisError::String(err_msg_json_path_doesnt_exist())) + fn arr_append(&mut self, path: Vec, args: Vec) -> RedisResult { + self.do_op(path, |v| { + v.as_array_mut() + .map(|arr| { + arr.extend(args); + Ok(arr.len()) + }) + .unwrap_or_else(|| Err(err_json(v, "array").into())) + }) } - fn arr_insert( - &mut self, - paths: Vec, - args: &[IValue], - index: i64, - ) -> Result { - let mut res = None; - self.do_op(&paths, |v: &mut IValue| { - // Verify legal index in bounds - let len = v.len().unwrap() as i64; - let index = if index < 0 { len + index } else { index }; - if !(0..=len).contains(&index) { - return Err("ERR index out of bounds".into()); - } - let mut index = index as usize; - let curr = v.as_array_mut().unwrap(); - curr.reserve(args.len()); - for a in args { - curr.insert(index, a.clone()); - index += 1; - } - res = Some(curr.len()); - Ok(Some(())) - })?; - res.ok_or_else(|| RedisError::String(err_msg_json_path_doesnt_exist())) + fn arr_insert(&mut self, paths: Vec, args: &[IValue], idx: i64) -> RedisResult { + self.do_op(paths, |v| { + v.as_array_mut() + .map(|arr| { + // Verify legal index in bounds + let len = arr.len() as _; + let idx = if idx < 0 { len + idx } else { idx }; + if !(0..=len).contains(&idx) { + return Err(RedisError::Str("ERR index out of bounds")); + } + arr.extend(args.iter().cloned()); + arr[idx as _..].rotate_right(args.len()); + Ok(arr.len()) + }) + .unwrap_or_else(|| Err(err_json(v, "array").into())) + }) } - fn arr_pop) -> RedisResult>( - &mut self, - path: Vec, - index: i64, - serialize_callback: C, - ) -> RedisResult { - let mut res = None; - self.do_op(&path, |v| { - if let Some(array) = v.as_array_mut() { - if array.is_empty() { - return Ok(Some(())); - } - // Verify legal index in bounds - let len = array.len() as i64; - let index = normalize_arr_start_index(index, len) as usize; - res = Some(array.remove(index).unwrap()); - Ok(Some(())) - } else { - Err(err_json(v, "array")) - } + fn arr_pop(&mut self, path: Vec, index: i64, serialize_callback: C) -> RedisResult + where + C: FnOnce(Option<&IValue>) -> RedisResult, + { + let res = self.do_op(path, |v| { + v.as_array_mut() + .map(|array| { + if array.is_empty() { + return None; + } + // Verify legal index in bounds + let len = array.len() as i64; + let index = normalize_arr_start_index(index, len) as usize; + array.remove(index) + }) + .ok_or_else(|| err_json(v, "array").into()) })?; serialize_callback(res.as_ref()) } - fn arr_trim(&mut self, path: Vec, start: i64, stop: i64) -> Result { - let mut res = None; - self.do_op(&path, |v| { - if let Some(array) = v.as_array_mut() { - let len = array.len() as i64; - let stop = stop.normalize(len); - let start = if start < 0 || start < len { - start.normalize(len) - } else { - stop + 1 // start >=0 && start >= len - }; - let range = if start > stop || len == 0 { - 0..0 // Return an empty array - } else { - start..(stop + 1) - }; - - array.rotate_left(range.start); - array.truncate(range.end - range.start); - res = Some(array.len()); - Ok(Some(())) - } else { - Err(err_json(v, "array")) - } - })?; - res.ok_or_else(|| RedisError::String(err_msg_json_path_doesnt_exist())) + fn arr_trim(&mut self, path: Vec, start: i64, stop: i64) -> RedisResult { + self.do_op(path, |v| { + v.as_array_mut() + .map(|array| { + let len = array.len() as i64; + let stop = stop.normalize(len); + let start = if start < 0 || start < len { + start.normalize(len) + } else { + stop + 1 // start >=0 && start >= len + }; + let range = if start > stop || len == 0 { + 0..0 // Return an empty array + } else { + start..(stop + 1) + }; + + array.rotate_left(range.start); + array.truncate(range.end - range.start); + array.len() + }) + .ok_or_else(|| err_json(v, "array").into()) + }) } - fn clear(&mut self, path: Vec) -> Result { - let mut cleared = 0; - self.do_op(&path, |v| match v.type_() { - ValueType::Object => { - let obj = v.as_object_mut().unwrap(); + fn clear(&mut self, path: Vec) -> RedisResult { + self.do_op(path, |v| match v.destructure_mut() { + DestructuredMut::Object(obj) => { obj.clear(); - cleared += 1; - Ok(Some(())) + Ok(1) } - ValueType::Array => { - let arr = v.as_array_mut().unwrap(); + DestructuredMut::Array(arr) => { arr.clear(); - cleared += 1; - Ok(Some(())) + Ok(1) } - ValueType::Number => { - *v = IValue::from(0); - cleared += 1; - Ok(Some(())) + DestructuredMut::Number(n) => { + *n = INumber::from(0); + Ok(1) } - _ => Ok(Some(())), - })?; - Ok(cleared) + _ => Ok(0), + }) } } @@ -548,9 +343,9 @@ impl ReadHolder for IValueKeyHolderRead { } } -fn merge(doc: &mut IValue, patch: &IValue) { +fn merge(doc: &mut IValue, mut patch: IValue) { if !patch.is_object() { - *doc = patch.clone(); + *doc = patch; return; } @@ -558,13 +353,20 @@ fn merge(doc: &mut IValue, patch: &IValue) { *doc = IObject::new().into(); } let map = doc.as_object_mut().unwrap(); - for (key, value) in patch.as_object().unwrap() { - if value.is_null() { - map.remove(key.as_str()); - } else { - merge(map.entry(key.as_str()).or_insert(IValue::NULL), value); - } - } + patch + .as_object_mut() + .unwrap() + .into_iter() + .for_each(|(key, value)| { + if value.is_null() { + map.remove(key.as_str()); + } else { + merge( + map.entry(key.as_str()).or_insert(IValue::NULL), + value.take(), + ) + } + }) } pub struct RedisIValueJsonKeyManager<'a> { @@ -586,6 +388,16 @@ impl<'a> Manager for RedisIValueJsonKeyManager<'a> { Ok(IValueKeyHolderRead { key }) } + fn open_key_read_with_flags( + &self, + ctx: &Context, + key: &RedisString, + flags: KeyFlags, + ) -> Result { + let key = ctx.open_key_with_flags(key, flags); + Ok(IValueKeyHolderRead { key }) + } + fn open_key_write( &self, ctx: &Context, @@ -598,6 +410,13 @@ impl<'a> Manager for RedisIValueJsonKeyManager<'a> { val: None, }) } + /** + * This function is used to apply changes to the slave and AOF. + * It is called after the command is executed. + */ + fn apply_changes(&self, ctx: &Context) { + ctx.replicate_verbatim(); + } fn from_str(&self, val: &str, format: Format, limit_depth: bool) -> Result { match format { @@ -608,85 +427,32 @@ impl<'a> Manager for RedisIValueJsonKeyManager<'a> { } IValue::deserialize(&mut deserializer).map_err(|e| e.into()) } - Format::BSON => decode_document(&mut Cursor::new(val.as_bytes())).map_or_else( + Format::BSON => from_document( + Document::from_reader(&mut Cursor::new(val.as_bytes())) + .map_err(|e| e.to_string())?, + ) + .map_or_else( |e| Err(e.to_string().into()), - |docs| { - let v = if docs.is_empty() { - IValue::NULL - } else { - docs.iter().next().map_or_else( - || IValue::NULL, - |(_, b)| { - let v: serde_json::Value = b.clone().into(); - let mut out = serde_json::Serializer::new(Vec::new()); - v.serialize(&mut out).unwrap(); - self.from_str( - &String::from_utf8(out.into_inner()).unwrap(), - Format::JSON, - limit_depth, - ) - .unwrap() - }, + |docs: Document| { + let v = docs.iter().next().map_or(IValue::NULL, |(_, b)| { + let v: serde_json::Value = b.clone().into(); + let mut out = serde_json::Serializer::new(Vec::new()); + v.serialize(&mut out).unwrap(); + self.from_str( + &String::from_utf8(out.into_inner()).unwrap(), + Format::JSON, + limit_depth, ) - }; + .unwrap() + }); Ok(v) }, ), } } - /// - /// following https://github.com/Diggsey/ijson/issues/23#issuecomment-1377270111 - /// - fn get_memory(&self, v: &Self::V) -> Result { - let res = size_of::() - + match v.type_() { - ValueType::Null | ValueType::Bool => 0, - ValueType::Number => { - let num = v.as_number().unwrap(); - if num.has_decimal_point() { - // 64bit float - 16 - } else if num >= &INumber::from(-128) && num <= &INumber::from(383) { - // 8bit - 0 - } else if num > &INumber::from(-8_388_608) && num <= &INumber::from(8_388_607) { - // 24bit - 4 - } else { - // 64bit - 16 - } - } - ValueType::String => v.as_string().unwrap().len(), - ValueType::Array => { - let arr = v.as_array().unwrap(); - let capacity = arr.capacity(); - if capacity == 0 { - 0 - } else { - size_of::() * (capacity + 2) - + arr - .into_iter() - .map(|v| self.get_memory(v).unwrap()) - .sum::() - } - } - ValueType::Object => { - let val = v.as_object().unwrap(); - let capacity = val.capacity(); - if capacity == 0 { - 0 - } else { - size_of::() * (capacity * 3 + 2) - + val - .into_iter() - .map(|(s, v)| s.len() + self.get_memory(v).unwrap()) - .sum::() - } - } - }; - Ok(res) + fn get_memory(v: &Self::V) -> Result { + Ok(v.mem_allocated() + size_of::()) } fn is_json(&self, key: *mut RedisModuleKey) -> Result { @@ -702,17 +468,18 @@ impl<'a> Manager for RedisIValueJsonKeyManager<'a> { mod tests { use super::*; + static SINGLE_THREAD_TEST_MUTEX: std::sync::Mutex<()> = std::sync::Mutex::new(()); + #[test] fn test_get_memory() { - let manager = RedisIValueJsonKeyManager { - phantom: PhantomData, - }; + let _guard = SINGLE_THREAD_TEST_MUTEX.lock(); + let json = r#"{ - "a": 100.12, - "b": "foo", - "c": true, - "d": 126, - "e": -112, + "a": 100.12, + "b": "foo", + "c": true, + "d": 126, + "e": -112, "f": 7388608, "g": -6388608, "h": 9388608, @@ -723,7 +490,35 @@ mod tests { "m": {"t": "f"} }"#; let value = serde_json::from_str(json).unwrap(); - let res = manager.get_memory(&value).unwrap(); - assert_eq!(res, 903); + let res = RedisIValueJsonKeyManager::get_memory(&value).unwrap(); + assert_eq!(res, 736); + } + + /// Tests the deserialiser of IValue for a string with unicode + /// characters, to ensure that the deserialiser can handle + /// unicode characters well. + #[test] + fn test_unicode_characters() { + let _guard = SINGLE_THREAD_TEST_MUTEX.lock(); + + let json = r#""\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0""#; + let value: IValue = serde_json::from_str(json).expect("IValue parses fine."); + assert_eq!( + value.as_string().unwrap(), + "\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}" + ); + + let json = r#"{"\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0":"\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0"}"#; + let value: IValue = serde_json::from_str(json).expect("IValue parses fine."); + assert_eq!( + value + .as_object() + .unwrap() + .get("\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}") + .unwrap() + .as_string() + .unwrap(), + "\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}\u{a0}" + ); } } diff --git a/redis_json/src/key_value.rs b/redis_json/src/key_value.rs index c78b3e33f..dc4d49176 100644 --- a/redis_json/src/key_value.rs +++ b/redis_json/src/key_value.rs @@ -1,3 +1,4 @@ +use itertools::Itertools; use std::collections::HashMap; use json_path::{ @@ -10,7 +11,7 @@ use serde::Serialize; use serde_json::Value; use crate::{ - commands::{FoundIndex, ObjectLen, Values}, + commands::{prepare_paths_for_updating, FoundIndex, ObjectLen, Values}, error::Error, formatter::{RedisJsonFormatter, ReplyFormatOptions}, manager::{ @@ -46,9 +47,9 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { } else { Ok(self .get_values(path.get_path())? - .iter() + .into_iter() .map(|v| Self::resp_serialize_inner(v)) - .collect::>() + .collect_vec() .into()) } } @@ -112,7 +113,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { fn to_json_multi( &self, - paths: &mut Vec, + paths: Vec, format: &ReplyFormatOptions, is_legacy: bool, ) -> Result { @@ -124,32 +125,23 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { let path_len = paths.len(); let temp_doc = paths - .drain(..) + .into_iter() .fold(HashMap::with_capacity(path_len), |mut acc, path: Path| { - let query = compile(path.get_path()); - // If we can't compile the path, we can't continue - if query.is_err() { - return acc; - } - - let query = query.unwrap(); - let results = calc_once(query, self.val); + if let Ok(query) = compile(path.get_path()) { + let results = calc_once(query, self.val); - let value = if is_legacy { - if results.is_empty() { - None + let value = if is_legacy { + (!results.is_empty()).then(|| Values::Single(results[0])) } else { - Some(Values::Single(results[0])) - } - } else { - Some(Values::Multi(results)) - }; + Some(Values::Multi(results)) + }; - if value.is_none() && missing_path.is_none() { - missing_path = Some(path.get_original().to_string()); + if value.is_none() && missing_path.is_none() { + missing_path = Some(path.get_original().to_string()); + } + acc.insert(path.get_original(), value); } - acc.insert(path.get_original(), value); acc }); if let Some(p) = missing_path { @@ -159,17 +151,17 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { // If we're using RESP3, we need to convert the HashMap to a RedisValue::Map unless we're using the legacy format let res = if format.is_resp3_reply() { let map = temp_doc - .iter() + .into_iter() .map(|(k, v)| { let key = RedisValueKey::String(k.to_string()); let value = match v { Some(Values::Single(value)) => Self::value_to_resp3(value, format), - Some(Values::Multi(values)) => Self::values_to_resp3(values, format), + Some(Values::Multi(values)) => Self::values_to_resp3(&values, format), None => RedisValue::Null, }; (key, value) }) - .collect::>(); + .collect(); RedisValue::Map(map) } else { Self::serialize_object(&temp_doc, format).into() @@ -177,24 +169,18 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { Ok(res) } - fn to_resp3( - &self, - paths: &mut Vec, - format: &ReplyFormatOptions, - ) -> Result { + fn to_resp3(&self, paths: Vec, format: &ReplyFormatOptions) -> Result { let results = paths - .drain(..) + .into_iter() .map(|path: Path| self.to_resp3_path(&path, format)) - .collect::>(); - + .collect(); Ok(RedisValue::Array(results)) } pub fn to_resp3_path(&self, path: &Path, format: &ReplyFormatOptions) -> RedisValue { - compile(path.get_path()).map_or_else( - |_| RedisValue::Array(vec![]), - |q| Self::values_to_resp3(&calc_once(q, self.val), format), - ) + compile(path.get_path()).map_or(RedisValue::Array(vec![]), |q| { + Self::values_to_resp3(&calc_once(q, self.val), format) + }) } fn to_json_single( @@ -218,7 +204,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { values .iter() .map(|v| Self::value_to_resp3(v, format)) - .collect::>() + .collect_vec() .into() } @@ -235,7 +221,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { .values() .unwrap() .map(|v| Self::value_to_resp3(v, format)) - .collect::>(), + .collect(), ), SelectValueType::Object => RedisValue::Map( value @@ -247,7 +233,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { Self::value_to_resp3(v, format), ) }) - .collect::>(), + .collect(), ), } } else { @@ -263,7 +249,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { pub fn to_json( &self, - paths: &mut Vec, + paths: Vec, format: &ReplyFormatOptions, ) -> Result { let is_legacy = !paths.iter().any(|p| !p.is_legacy()); @@ -329,15 +315,13 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { } } - pub fn find_paths( - &mut self, - path: &str, - option: &SetOptions, - ) -> Result, Error> { - if SetOptions::NotExists != *option { + pub fn find_paths(&mut self, path: &str, option: SetOptions) -> Result, Error> { + if option != SetOptions::NotExists { let query = compile(path)?; - let res = calc_once_paths(query, self.val); - + let mut res = calc_once_paths(query, self.val); + if option != SetOptions::MergeExisting { + prepare_paths_for_updating(&mut res); + } if !res.is_empty() { return Ok(res .into_iter() @@ -345,7 +329,7 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { .collect()); } } - if SetOptions::AlreadyExists == *option { + if option == SetOptions::AlreadyExists { Ok(Vec::new()) // empty vector means no updates } else { self.find_add_paths(path) @@ -380,7 +364,15 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { SelectValueType::Null => "null", SelectValueType::Bool => "boolean", SelectValueType::Long => "integer", - SelectValueType::Double => "number", + // For dealing with u64 values over i64::MAX, get_type() replies + // that they are SelectValueType::Double to prevent panics from + // incorrect casts. However when querying the type of such a value, + // any response other than 'integer' is a breaking change + SelectValueType::Double => match value.is_double() { + Some(true) => "number", + Some(false) => "integer", + _ => unreachable!(), + }, SelectValueType::String => "string", SelectValueType::Array => "array", SelectValueType::Object => "object", @@ -414,45 +406,30 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { } pub fn is_equal(a: &T1, b: &T2) -> bool { - match (a.get_type(), b.get_type()) { - (SelectValueType::Null, SelectValueType::Null) => true, - (SelectValueType::Bool, SelectValueType::Bool) => a.get_bool() == b.get_bool(), - (SelectValueType::Long, SelectValueType::Long) => a.get_long() == b.get_long(), - (SelectValueType::Double, SelectValueType::Double) => a.get_double() == b.get_double(), - (SelectValueType::String, SelectValueType::String) => a.get_str() == b.get_str(), - (SelectValueType::Array, SelectValueType::Array) => { - if a.len().unwrap() == b.len().unwrap() { - for (i, e) in a.values().unwrap().enumerate() { - if !Self::is_equal(e, b.get_index(i).unwrap()) { - return false; - } - } - true - } else { - false + a.get_type() == b.get_type() + && match a.get_type() { + SelectValueType::Null => true, + SelectValueType::Bool => a.get_bool() == b.get_bool(), + SelectValueType::Long => a.get_long() == b.get_long(), + SelectValueType::Double => a.get_double() == b.get_double(), + SelectValueType::String => a.get_str() == b.get_str(), + SelectValueType::Array => { + a.len().unwrap() == b.len().unwrap() + && a.values() + .unwrap() + .zip(b.values().unwrap()) + .all(|(a, b)| Self::is_equal(a, b)) } - } - (SelectValueType::Object, SelectValueType::Object) => { - if a.len().unwrap() == b.len().unwrap() { - for k in a.keys().unwrap() { - let temp1 = a.get_key(k); - let temp2 = b.get_key(k); - match (temp1, temp2) { - (Some(a1), Some(b1)) => { - if !Self::is_equal(a1, b1) { - return false; - } - } - (_, _) => return false, - } - } - true - } else { - false + SelectValueType::Object => { + a.len().unwrap() == b.len().unwrap() + && a.keys() + .unwrap() + .all(|k| match (a.get_key(k), b.get_key(k)) { + (Some(a), Some(b)) => Self::is_equal(a, b), + _ => false, + }) } } - (_, _) => false, - } } pub fn arr_index( @@ -464,9 +441,11 @@ impl<'a, V: SelectValue + 'a> KeyValue<'a, V> { ) -> Result { let res = self .get_values(path)? - .iter() - .map(|value| Self::arr_first_index_single(value, &json_value, start, end).into()) - .collect::>(); + .into_iter() + .map(|value| { + RedisValue::from(Self::arr_first_index_single(value, &json_value, start, end)) + }) + .collect_vec(); Ok(res.into()) } diff --git a/redis_json/src/lib.rs b/redis_json/src/lib.rs index ae2fb5824..0dd85ecbd 100644 --- a/redis_json/src/lib.rs +++ b/redis_json/src/lib.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ extern crate redis_module; @@ -11,6 +14,8 @@ use commands::*; use redis_module::native_types::RedisType; use redis_module::raw::RedisModuleTypeMethods; #[cfg(not(feature = "as-library"))] +use redis_module::AclCategory; +#[cfg(not(feature = "as-library"))] use redis_module::InfoContext; #[cfg(not(feature = "as-library"))] @@ -18,13 +23,17 @@ use redis_module::Status; #[cfg(not(feature = "as-library"))] use redis_module::{Context, RedisResult}; +#[cfg(not(feature = "as-library"))] +use redis_module::key::KeyFlags; + #[cfg(not(feature = "as-library"))] use crate::c_api::{ get_llapi_ctx, json_api_free_iter, json_api_free_key_values_iter, json_api_get, json_api_get_at, json_api_get_boolean, json_api_get_double, json_api_get_int, json_api_get_json, json_api_get_json_from_iter, json_api_get_key_value, json_api_get_len, json_api_get_string, json_api_get_type, json_api_is_json, json_api_len, json_api_next, - json_api_next_key_value, json_api_open_key_internal, json_api_reset_iter, LLAPI_CTX, + json_api_next_key_value, json_api_open_key_internal, json_api_open_key_with_flags_internal, + json_api_reset_iter, LLAPI_CTX, }; use crate::redisjson::Format; @@ -32,6 +41,7 @@ mod array_index; mod backward; pub mod c_api; pub mod commands; +pub mod defrag; pub mod error; mod formatter; pub mod ivalue_manager; @@ -69,12 +79,13 @@ pub static REDIS_JSON_TYPE: RedisType = RedisType::new( free_effort: None, unlink: None, copy: Some(redisjson::type_methods::copy), - defrag: None, + defrag: Some(defrag::defrag), free_effort2: None, unlink2: None, copy2: None, mem_usage2: None, + aux_save2: None, }, ); ///////////////////////////////////////////////////// @@ -83,40 +94,63 @@ pub static REDIS_JSON_TYPE: RedisType = RedisType::new( macro_rules! run_on_manager { ( pre_command: $pre_command_expr:expr, - get_mngr: $get_mngr_expr:expr, + get_manage: { + $( $condition:expr => $manager_ident:ident { $($field:ident: $value:expr),* $(,)? } ),* $(,)? + _ => $default_manager:expr $(,)? + }, run: $run_expr:expr, ) => {{ $pre_command_expr(); - let m = $get_mngr_expr; - match m { + + $( + if $condition { + let mngr = $manager_ident { + $( $field: $value, )* + }; + return $run_expr(mngr); + } + )* + + // Handle default case (Option) + match $default_manager { Some(mngr) => $run_expr(mngr), - None => $run_expr($crate::ivalue_manager::RedisIValueJsonKeyManager { - phantom: PhantomData, - }), + None => { + let mngr = $crate::ivalue_manager::RedisIValueJsonKeyManager { + phantom: PhantomData, + }; + $run_expr(mngr) + } } }}; } #[macro_export] -macro_rules! redis_json_module_create {( +macro_rules! redis_json_module_create { + ( data_types: [ $($data_type:ident),* $(,)* ], pre_command_function: $pre_command_function_expr:expr, - get_manage: $get_manager_expr:expr, + get_manage: { + $( $condition:expr => $manager_ident:ident { $($field:ident: $value:expr),* $(,)? } ),* $(,)? + _ => $default_manager:expr $(,)? + }, version: $version:expr, init: $init_func:expr, info: $info_func:ident, ) => { - use redis_module::{redis_module, RedisString}; + use redis_module::RedisString; use std::marker::PhantomData; use std::os::raw::{c_double, c_int, c_longlong}; - use redis_module::{raw as rawmod, LogLevel}; + use redis_module::raw as rawmod; use rawmod::ModuleOptions; + use redis_module::redis_module; + use redis_module::logging::RedisLogLevel; + use redis_module::RedisValue; use std::{ - ffi::CStr, + ffi::{CStr, CString}, os::raw::{c_char, c_void}, }; use libc::size_t; @@ -128,7 +162,10 @@ macro_rules! redis_json_module_create {( |ctx: &Context, args: Vec| -> RedisResult { run_on_manager!( pre_command: ||$pre_command_function_expr(ctx, &args), - get_mngr: $get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, run: |mngr|$cmd(mngr, ctx, args), ) } @@ -150,7 +187,10 @@ macro_rules! redis_json_module_create {( } redis_json_module_export_shared_api! { - get_manage:$get_manager_expr, + get_manage: { + $( $condition => $manager_ident { $($field: $value),* } ),* + _ => $default_manager + }, pre_command_function: $pre_command_function_expr, } @@ -163,12 +203,23 @@ macro_rules! redis_json_module_create {( export_shared_api(ctx); ctx.set_module_options(ModuleOptions::HANDLE_IO_ERRORS); ctx.log_notice("Enabled diskless replication"); + let is_bigredis = + ctx.call("config", &["get", "bigredis-enabled"]) + .map_or(false, |res| match res { + RedisValue::Array(a) => !a.is_empty(), + _ => false, + }); + ctx.log_notice(&format!("Initialized shared string cache, thread safe: {is_bigredis}.")); + if let Err(e) = $crate::init_ijson_shared_string_cache(is_bigredis) { + ctx.log(RedisLogLevel::Warning, &format!("Failed initializing shared string cache, {e}.")); + return Status::Err; + } $init_func(ctx, args) } fn json_init_config(ctx: &Context, args: &[RedisString]) -> Status{ if args.len() % 2 != 0 { - ctx.log(LogLevel::Warning, "RedisJson arguments must be key:value pairs"); + ctx.log(RedisLogLevel::Warning, "RedisJson arguments must be key:value pairs"); return Status::Err; } let mut args_map = HashMap::::new(); @@ -179,40 +230,42 @@ macro_rules! redis_json_module_create {( Status::Ok } + use AclCategory as ACL; redis_module! { name: $crate::MODULE_NAME, version: $version, allocator: (get_allocator!(), get_allocator!()), data_types: [$($data_type,)*], + acl_categories: [ACL::from("json"), ], init: json_init_config, init: initialize, info: $info_func, commands: [ - ["json.del", json_command!(json_del), "write", 1,1,1], - ["json.get", json_command!(json_get), "readonly", 1,1,1], - ["json.mget", json_command!(json_mget), "readonly", 1,1,1], - ["json.set", json_command!(json_set), "write deny-oom", 1,1,1], - ["json.mset", json_command!(json_mset), "write deny-oom", 1,-1,3], - ["json.type", json_command!(json_type), "readonly", 1,1,1], - ["json.numincrby", json_command!(json_num_incrby), "write", 1,1,1], - ["json.toggle", json_command!(json_bool_toggle), "write deny-oom", 1,1,1], - ["json.nummultby", json_command!(json_num_multby), "write", 1,1,1], - ["json.numpowby", json_command!(json_num_powby), "write", 1,1,1], - ["json.strappend", json_command!(json_str_append), "write deny-oom", 1,1,1], - ["json.strlen", json_command!(json_str_len), "readonly", 1,1,1], - ["json.arrappend", json_command!(json_arr_append), "write deny-oom", 1,1,1], - ["json.arrindex", json_command!(json_arr_index), "readonly", 1,1,1], - ["json.arrinsert", json_command!(json_arr_insert), "write deny-oom", 1,1,1], - ["json.arrlen", json_command!(json_arr_len), "readonly", 1,1,1], - ["json.arrpop", json_command!(json_arr_pop), "write", 1,1,1], - ["json.arrtrim", json_command!(json_arr_trim), "write", 1,1,1], - ["json.objkeys", json_command!(json_obj_keys), "readonly", 1,1,1], - ["json.objlen", json_command!(json_obj_len), "readonly", 1,1,1], - ["json.clear", json_command!(json_clear), "write", 1,1,1], - ["json.debug", json_command!(json_debug), "readonly", 2,2,1], - ["json.forget", json_command!(json_del), "write", 1,1,1], - ["json.resp", json_command!(json_resp), "readonly", 1,1,1], - ["json.merge", json_command!(json_merge), "write deny-oom", 1,1,1], + ["json.del", json_command!(json_del), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.get", json_command!(json_get), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.mget", json_command!(json_mget), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.set", json_command!(json_set), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], + ["json.mset", json_command!(json_mset), "write deny-oom", 1,-1,3, ACL::Write, ACL::from("json")], + ["json.type", json_command!(json_type), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.numincrby", json_command!(json_num_incrby), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.toggle", json_command!(json_bool_toggle), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], + ["json.nummultby", json_command!(json_num_multby), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.numpowby", json_command!(json_num_powby), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.strappend", json_command!(json_str_append), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], + ["json.strlen", json_command!(json_str_len), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.arrappend", json_command!(json_arr_append), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], + ["json.arrindex", json_command!(json_arr_index), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.arrinsert", json_command!(json_arr_insert), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], + ["json.arrlen", json_command!(json_arr_len), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.arrpop", json_command!(json_arr_pop), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.arrtrim", json_command!(json_arr_trim), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.objkeys", json_command!(json_obj_keys), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.objlen", json_command!(json_obj_len), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.clear", json_command!(json_clear), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.debug", json_command!(json_debug), "readonly", 2,2,1, ACL::Read, ACL::from("json")], + ["json.forget", json_command!(json_del), "write", 1,1,1, ACL::Write, ACL::from("json")], + ["json.resp", json_command!(json_resp), "readonly", 1,1,1, ACL::Read, ACL::from("json")], + ["json.merge", json_command!(json_merge), "write deny-oom", 1,1,1, ACL::Write, ACL::from("json")], ], } } @@ -226,15 +279,51 @@ const fn dummy_init(_ctx: &Context, _args: &[RedisString]) -> Status { Status::Ok } +pub fn init_ijson_shared_string_cache(is_bigredis: bool) -> Result<(), String> { + ijson::init_shared_string_cache(is_bigredis) +} + #[cfg(not(feature = "as-library"))] const fn dummy_info(_ctx: &InfoContext, _for_crash_report: bool) {} +const fn version() -> i32 { + let string = env!("CARGO_PKG_VERSION"); + let mut bytes = string.as_bytes(); + let mut value: i32 = 0; + let mut result = 0; + let mut multiplier = 10000; + + while let [byte, rest @ ..] = bytes { + bytes = rest; + match byte { + b'0'..=b'9' => { + value = value * 10 + (*byte - b'0') as i32; + } + b'.' => { + result += value * multiplier; + multiplier /= 100; + value = 0; + } + _ => { + // The provided string is not a valid version specification. + unreachable!() + } + } + } + + result + value +} + #[cfg(not(feature = "as-library"))] redis_json_module_create! { data_types: [REDIS_JSON_TYPE], pre_command_function: pre_command, - get_manage: Some(ivalue_manager::RedisIValueJsonKeyManager{phantom:PhantomData}), - version: 99_99_99, + get_manage: { + _ => Some(crate::ivalue_manager::RedisIValueJsonKeyManager { + phantom: PhantomData, + }) + }, + version: version(), init: dummy_init, info: dummy_info, } diff --git a/redis_json/src/manager.rs b/redis_json/src/manager.rs index fa900d68e..d9f0977f7 100644 --- a/redis_json/src/manager.rs +++ b/redis_json/src/manager.rs @@ -1,10 +1,14 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ use json_path::select_value::SelectValue; +use redis_module::key::KeyFlags; use serde_json::Number; use redis_module::raw::RedisModuleKey; @@ -62,7 +66,7 @@ pub trait WriteHolder { ) -> RedisResult; fn arr_trim(&mut self, path: Vec, start: i64, stop: i64) -> Result; fn clear(&mut self, path: Vec) -> Result; - fn apply_changes(&mut self, ctx: &Context, command: &str) -> Result<(), RedisError>; + fn notify_keyspace_event(&mut self, ctx: &Context, command: &str) -> Result<(), RedisError>; } pub trait Manager { @@ -80,14 +84,21 @@ pub trait Manager { ctx: &Context, key: &RedisString, ) -> Result; + fn open_key_read_with_flags( + &self, + ctx: &Context, + key: &RedisString, + flags: KeyFlags, + ) -> Result; fn open_key_write( &self, ctx: &Context, key: RedisString, ) -> Result; + fn apply_changes(&self, ctx: &Context); #[allow(clippy::wrong_self_convention)] fn from_str(&self, val: &str, format: Format, limit_depth: bool) -> Result; - fn get_memory(&self, v: &Self::V) -> Result; + fn get_memory(v: &Self::V) -> Result; fn is_json(&self, key: *mut RedisModuleKey) -> Result; } diff --git a/redis_json/src/redisjson.rs b/redis_json/src/redisjson.rs index a8c56c9c1..c5871c3d9 100644 --- a/redis_json/src/redisjson.rs +++ b/redis_json/src/redisjson.rs @@ -1,7 +1,10 @@ /* - * Copyright Redis Ltd. 2016 - present - * Licensed under your choice of the Redis Source Available License 2.0 (RSALv2) or - * the Server Side Public License v1 (SSPLv1). + * Copyright (c) 2006-Present, Redis Ltd. + * All rights reserved. + * + * Licensed under your choice of (a) the Redis Source Available License 2.0 + * (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the + * GNU Affero General Public License v3 (AGPLv3). */ // RedisJSON Redis module. @@ -53,6 +56,7 @@ pub fn normalize_arr_indices(start: i64, end: i64, len: i64) -> (i64, i64) { pub enum SetOptions { NotExists, AlreadyExists, + MergeExisting, None, } @@ -145,6 +149,31 @@ impl<'a> Path<'a> { } } +const JSON_ROOT_PATH_LEGACY: &str = "."; +const JSON_ROOT_PATH_S: &str = "$"; +pub const JSON_ROOT_PATH: Path = Path { + original_path: JSON_ROOT_PATH_S, + fixed_path: None, +}; + +/// Returns the deault path for the given RESP version +impl Default for Path<'_> { + fn default() -> Self { + Path { + original_path: JSON_ROOT_PATH_LEGACY, + fixed_path: Some(JSON_ROOT_PATH_S.into()), + } + } +} + +impl PartialEq for Path<'_> { + fn eq(&self, other: &Self) -> bool { + self.get_path() == other.get_path() + } +} + +impl Eq for Path<'_> {} + impl Display for Path<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.get_path()) @@ -250,9 +279,6 @@ pub mod type_methods { #[allow(non_snake_case, unused)] pub unsafe extern "C" fn mem_usage(value: *const c_void) -> usize { let json = unsafe { &*(value as *mut RedisJSON) }; - let manager = RedisIValueJsonKeyManager { - phantom: PhantomData, - }; - manager.get_memory(&json.data).unwrap_or(0) + RedisIValueJsonKeyManager::get_memory(&json.data).unwrap_or(0) } } diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 000000000..e04778c16 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly-2025-07-30" diff --git a/sbin/get-redisearch b/sbin/get-redisearch index 6d1ae68e1..34193cd3d 100755 --- a/sbin/get-redisearch +++ b/sbin/get-redisearch @@ -66,7 +66,9 @@ else nick=ubuntu18.04 fi elif [[ $dist == centos || $dist == redhat || $dist == fedora || $dist == ol ]]; then - if [[ $nick == centos8 || $nick == ol8 || $nick == rocky8 ]]; then + if [[ $nick == centos9 || $nick == ol9 || $nick == rocky9 || $nick == rhel9 ]]; then + nick="rhel9" + elif [[ $nick == centos8 || $nick == ol8 || $nick == rocky8 ]]; then nick="rhel8" else nick="rhel7" diff --git a/sbin/install_clang.sh b/sbin/install_clang.sh new file mode 100755 index 000000000..53fa323de --- /dev/null +++ b/sbin/install_clang.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env sh + +export CWD=$(dirname `which "${0}"`) +export CLANG_VERSION=18 +export DEBIAN_FRONTEND=noninteractive + +wget https://apt.llvm.org/llvm.sh -O llvm.sh + +chmod u+x llvm.sh + +# expected to fail: +./llvm.sh $CLANG_VERSION + +apt-get install python3-lldb-18 --yes --force-yes + +./llvm.sh $CLANG_VERSION + +$CWD/update_clang_alternatives.sh $CLANG_VERSION 1 diff --git a/sbin/pack.sh b/sbin/pack.sh index 4f1c14f40..91d029c28 100755 --- a/sbin/pack.sh +++ b/sbin/pack.sh @@ -18,14 +18,17 @@ if [[ $1 == --help || $1 == help || $HELP == 1 ]]; then cat <<-END Generate RedisJSON distribution packages. - [ARGVARS...] pack.sh [--help|help] + [ARGVARS...] pack.sh [--help|help] [] Argument variables: - MODULE=path Path of module .so - RAMP=0|1 Build RAMP package DEPS=0|1 Build dependencies files SYM=0|1 Build debug symbols file + RELEASE=1 Generate "release" packages (artifacts/release/) + SNAPSHOT=1 Generate "shapshot" packages (artifacts/snapshot/) + + MODULE_NAME=name Module name (default: bf) + PACKAGE_NAME=name Package stem name BRANCH=name Branch name for snapshot packages WITH_GITSHA=1 Append Git SHA to shapshot package names @@ -33,7 +36,7 @@ if [[ $1 == --help || $1 == help || $HELP == 1 ]]; then RAMP_VARIANT=name RAMP variant (e.g. ramp-{name}.yml) ARTDIR=dir Directory in which packages are created (default: bin/artifacts) - + RAMP_YAML=path RAMP configuration file path RAMP_ARGS=args Extra arguments to RAMP @@ -53,8 +56,11 @@ OP="" # RLEC naming conventions -ARCH=$($READIES/bin/platform --arch) -[[ $ARCH == x64 ]] && ARCH=x86_64 +ARCH=$(uname -m) + +[[ $ARCH == x64 ]] && ARCH=x86_64 +[[ $ARCH == arm64 ]] && ARCH=aarch64 +[[ $ARCH == arm64v8 ]] && ARCH=aarch64 OS=$($READIES/bin/platform --os) [[ $OS == linux ]] && OS=Linux @@ -67,28 +73,31 @@ OSNICK=$($READIES/bin/platform --osnick) [[ $OSNICK == jammy ]] && OSNICK=ubuntu22.04 [[ $OSNICK == centos7 ]] && OSNICK=rhel7 [[ $OSNICK == centos8 ]] && OSNICK=rhel8 +[[ $OSNICK == centos9 ]] && OSNICK=rhel9 [[ $OSNICK == ol8 ]] && OSNICK=rhel8 [[ $OSNICK == rocky8 ]] && OSNICK=rhel8 +[[ $OSNICK == rocky9 ]] && OSNICK=rhel9 -[[ $OSNICK == bigsur ]] && OSNICK=catalina PLATFORM="$OS-$OSNICK-$ARCH" #---------------------------------------------------------------------------------------------- -if [[ -z $MODULE || ! -f $MODULE ]]; then - eprint "MODULE is not defined or does not refer to a file" - exit 1 -fi +MODULE="$1" RAMP=${RAMP:-1} DEPS=${DEPS:-1} SYM=${SYM:-1} +RELEASE=${RELEASE:-1} +SNAPSHOT=${SNAPSHOT:-1} + [[ -z $ARTDIR ]] && ARTDIR=bin/artifacts mkdir -p $ARTDIR $ARTDIR/snapshots ARTDIR=$(cd $ARTDIR && pwd) +#---------------------------------------------------------------------------------------------- + MODULE_NAME=${MODULE_NAME:-ReJSON} PACKAGE_NAME=rejson-oss @@ -102,24 +111,43 @@ pack_ramp() { cd $ROOT local stem=${PACKAGE_NAME}.${PLATFORM} + local stem_debug=${PACKAGE_NAME}.debug.${PLATFORM} + + if [[ $SNAPSHOT == 0 ]]; then + local verspec=${SEMVER}${VARIANT} + local packdir=. + local s3base="" + else + local verspec=${BRANCH}${VARIANT} + local packdir=snapshots + local s3base=snapshots/ + fi - local verspec=${SEMVER}${_VARIANT} - local fq_package=$stem.${verspec}.zip + local fq_package_debug=$stem_debug.${verspec}.zip - [[ ! -d $ARTDIR ]] && mkdir -p $ARTDIR + [[ ! -d $ARTDIR/$packdir ]] && mkdir -p $ARTDIR/$packdir - local packfile="$ARTDIR/$fq_package" + local packfile=$ARTDIR/$packdir/$fq_package + local packfile_debug=$ARTDIR/$packdir/$fq_package_debug local xtx_vars="" - local dep_fname="${PACKAGE_NAME}.${PLATFORM}.${verspec}.tgz" + for dep in $DEP_NAMES; do + eval "export NAME_${dep}=${PACKAGE_NAME}_${dep}" + local dep_fname="${PACKAGE_NAME}.${dep}.${PLATFORM}.${verspec}.tgz" + eval "export PATH_${dep}=${s3base}${dep_fname}" + local dep_sha256="$ARTDIR/$packdir/${dep_fname}.sha256" + eval "export SHA256_${dep}=$(cat $dep_sha256)" - if [[ -z $RAMP_YAML ]]; then - RAMP_YAML=$ROOT/ramp.yml + xtx_vars+=" -e NAME_$dep -e PATH_$dep -e SHA256_$dep" + done + + if [[ -n $RAMP_YAML ]]; then + RAMP_YAML="$(realpath $RAMP_YAML)" elif [[ -z $RAMP_VARIANT ]]; then - RAMP_YAML=$ROOT/ramp.yml + RAMP_YAML="$ROOT/pack/ramp.yml" else - RAMP_YAML=$ROOT/ramp${_RAMP_VARIANT}.yml + RAMP_YAML="$ROOT/pack/ramp${_RAMP_VARIANT}.yml" fi python3 $READIES/bin/xtx \ @@ -132,7 +160,7 @@ pack_ramp() { fi runn rm -f /tmp/ramp.fname $packfile - + # ROOT is required so ramp will detect the right git commit cd $ROOT runn @ <<-EOF @@ -154,18 +182,35 @@ pack_ramp() { exit 1 else local packname=`cat /tmp/ramp.fname` - echo "# Created $packname" + echo "# Created $(realpath $packname)" fi fi - cd $ARTDIR/snapshots - if [[ ! -z $BRANCH ]]; then - local snap_package=$stem.${BRANCH}${_VARIANT}.zip - ln -sf ../$fq_package $snap_package + if [[ -f $MODULE.debug ]]; then + runn @ <<-EOF + $RAMP_CMD pack -m /tmp/ramp.yml \ + $RAMP_ARGS \ + -n $MODULE_NAME \ + --verbose \ + --debug \ + --packname-file /tmp/ramp.fname \ + -o $packfile_debug \ + $MODULE.debug \ + >/tmp/ramp.err 2>&1 || true + EOF + + if [[ $NOP != 1 ]]; then + if [[ ! -e $packfile_debug ]]; then + eprint "Error generating RAMP file:" + >&2 cat /tmp/ramp.err + exit 1 + else + local packname=`cat /tmp/ramp.fname` + echo "# Created $(realpath $packname)" + fi + fi fi - local packname=`cat /tmp/ramp.fname` - echo "Created $packname" cd $ROOT } @@ -174,37 +219,47 @@ pack_ramp() { pack_deps() { local dep="$1" + cd $ROOT + local stem=${PACKAGE_NAME}.${dep}.${PLATFORM} - local verspec=${SEMVER}${_VARIANT} + local verspec=${SEMVER}${VARIANT} + local fq_package=$stem.${verspec}.tgz local depdir=$(cat $ARTDIR/$dep.dir) - - local fq_dep=$stem.${verspec}.tgz - local tar_path=$ARTDIR/$fq_dep + local tar_path=$ARTDIR/$fq_package local dep_prefix_dir=$(cat $ARTDIR/$dep.prefix) - - { cd $depdir ;\ - cat $ARTDIR/$dep.files | \ - xargs tar -c --sort=name --owner=root:0 --group=root:0 --mtime='UTC 1970-01-01' \ - --transform "s,^,$dep_prefix_dir," 2> /tmp/pack.err | \ - gzip -n - > $tar_path ; E=$?; } || true - rm -f $ARTDIR/$dep.prefix $ARTDIR/$dep.files $ARTDIR/$dep.dir - cd $ROOT - if [[ $E != 0 || -s /tmp/pack.err ]]; then - eprint "Error creating $tar_path:" - cat /tmp/pack.err >&2 - exit 1 + rm -f $tar_path + if [[ $NOP != 1 ]]; then + { cd $depdir ;\ + cat $ARTDIR/$dep.files | \ + xargs tar -c --sort=name --owner=root:0 --group=root:0 --mtime='UTC 1970-01-01' \ + --transform "s,^,$dep_prefix_dir," 2> /tmp/pack.err | \ + gzip -n - > $tar_path ; E=$?; } || true + if [[ ! -e $tar_path || -z $(tar tzf $tar_path) ]]; then + eprint "Count not create $tar_path. Aborting." + rm -f $tar_path + exit 1 + fi + else + runn @ <<-EOF + cd $depdir + cat $ARTDIR/$dep.files | \ + xargs tar -c --sort=name --owner=root:0 --group=root:0 --mtime='UTC 1970-01-01' \ + --transform "s,^,$dep_prefix_dir," 2> /tmp/pack.err | \ + gzip -n - > $tar_path ; E=$?; } || true + EOF fi runn @ <<-EOF - sha256sum $tar_path | awk '{print $1}' > $tar_path.sha256 + sha256sum $tar_path | gawk '{print $1}' > $tar_path.sha256 EOF + mkdir -p $ARTDIR/snapshots cd $ARTDIR/snapshots if [[ -n $BRANCH ]]; then - local snap_dep=$stem.${BRANCH}${_VARIANT}.tgz - runn ln -sf ../$fq_dep $snap_dep - runn ln -sf ../$fq_dep.sha256 $snap_dep.sha256 + local snap_package=$stem.${BRANCH}${VARIANT}.tgz + runn ln -sf ../$fq_package $snap_package + runn ln -sf ../$fq_package.sha256 $snap_package.sha256 fi cd $ROOT @@ -212,18 +267,6 @@ pack_deps() { #---------------------------------------------------------------------------------------------- -prepare_symbols_dep() { - if [[ ! -f $MODULE.debug ]]; then return 0; fi - echo "# Preparing debug symbols dependencies ..." - echo $(cd "$(dirname $MODULE)" && pwd) > $ARTDIR/debug.dir - echo $(basename $MODULE.debug) > $ARTDIR/debug.files - echo "" > $ARTDIR/debug.prefix - pack_deps debug - echo "# Done." -} - -#---------------------------------------------------------------------------------------------- - NUMVER="$(NUMERIC=1 $SBIN/getver)" SEMVER="$($SBIN/getver)" @@ -236,7 +279,16 @@ fi #---------------------------------------------------------------------------------------------- +git_config_add_ifnx() { + local key="$1" + local val="$2" + if [[ -z $(git config --global --get $key $val) ]]; then + git config --global --add $key $val + fi +} + if [[ -z $BRANCH ]]; then + git_config_add_ifnx safe.directory $ROOT BRANCH=$(git rev-parse --abbrev-ref HEAD) # this happens of detached HEAD if [[ $BRANCH == HEAD ]]; then @@ -245,21 +297,33 @@ if [[ -z $BRANCH ]]; then fi BRANCH=${BRANCH//[^A-Za-z0-9._-]/_} if [[ $WITH_GITSHA == 1 ]]; then + git_config_add_ifnx safe.directory $ROOT GIT_COMMIT=$(git rev-parse --short HEAD) BRANCH="${BRANCH}-${GIT_COMMIT}" fi -export BRANCH + +#---------------------------------------------------------------------------------------------- + +RELEASE_ramp=${PACKAGE_NAME}.$OS-$OSNICK-$ARCH.$SEMVER${VARIANT}.zip +SNAPSHOT_ramp=${PACKAGE_NAME}.$OS-$OSNICK-$ARCH.${BRANCH}${VARIANT}.zip + +RELEASE_deps= +SNAPSHOT_deps= +for dep in $DEP_NAMES; do + RELEASE_deps+=" ${PACKAGE_NAME}.${dep}.$OS-$OSNICK-$ARCH.$SEMVER${VARIANT}.tgz" + SNAPSHOT_deps+=" ${PACKAGE_NAME}.${dep}.$OS-$OSNICK-$ARCH.${BRANCH}${VARIANT}.tgz" +done #---------------------------------------------------------------------------------------------- if [[ $JUST_PRINT == 1 ]]; then if [[ $RAMP == 1 ]]; then - echo "${PACKAGE_NAME}.${OS}-${OSNICK}-${ARCH}.${SEMVER}${VARIANT}.zip" + [[ $RELEASE == 1 ]] && echo $RELEASE_ramp + [[ $SNAPSHOT == 1 ]] && echo $SNAPSHOT_ramp fi if [[ $DEPS == 1 ]]; then - for dep in $DEP_NAMES; do - echo "${PACKAGE_NAME}.${dep}.${OS}-${OSNICK}-${ARCH}.${SEMVER}${VARIANT}.tgz" - done + [[ $RELEASE == 1 ]] && echo $RELEASE_deps + [[ $SNAPSHOT == 1 ]] && echo $SNAPSHOT_deps fi exit 0 fi @@ -268,14 +332,19 @@ fi mkdir -p $ARTDIR -if [[ $DEPS == 1 ]]; then - echo "# Building dependencies ..." +if [[ $DEPS == 1 && -n $DEP_NAMES ]]; then + # set up `debug` dep + dirname "$(realpath "$MODULE")" > "$ARTDIR/debug.dir" + echo "$(basename "$(realpath "$MODULE")").debug" > "$ARTDIR/debug.files" + echo "" > $ARTDIR/debug.prefix - [[ $SYM == 1 ]] && prepare_symbols_dep + echo "# Building dependencies ..." for dep in $DEP_NAMES; do - echo "# $dep ..." - pack_deps $dep + if [[ $OS != macos ]]; then + echo "# $dep ..." + pack_deps $dep + fi done echo "# Done." fi @@ -291,7 +360,14 @@ if [[ $RAMP == 1 ]]; then fi echo "# Building RAMP $RAMP_VARIANT files ..." - pack_ramp + + [[ -z $MODULE ]] && { eprint "Nothing to pack. Aborting."; exit 1; } + [[ ! -f $MODULE ]] && { eprint "$MODULE does not exist. Aborting."; exit 1; } + MODULE=$(realpath $MODULE) + + [[ $RELEASE == 1 ]] && SNAPSHOT=0 pack_ramp + [[ $SNAPSHOT == 1 ]] && pack_ramp + echo "# Done." fi diff --git a/sbin/system-setup.py b/sbin/system-setup.py index 3f32935e7..629ef4e60 100755 --- a/sbin/system-setup.py +++ b/sbin/system-setup.py @@ -32,7 +32,8 @@ def debian_compat(self): self.run(f"{READIES}/bin/getgcc") def redhat_compat(self): - self.install("redhat-lsb-core") + if self.dist == "centos" and self.os_version[0] < 9: + self.install("redhat-lsb-core") self.install("which") self.run(f"{READIES}/bin/getgcc --modern") diff --git a/sbin/update_clang_alternatives.sh b/sbin/update_clang_alternatives.sh new file mode 100755 index 000000000..ec140e224 --- /dev/null +++ b/sbin/update_clang_alternatives.sh @@ -0,0 +1,88 @@ +#!/usr/bin/env bash + +# --slave /usr/bin/$1 $1 /usr/bin/$1-\${version} \\ + +function register_clang_version { + local version=$1 + local priority=$2 + + update-alternatives \ + --verbose \ + --install /usr/bin/llvm-config llvm-config /usr/bin/llvm-config-${version} ${priority} \ + --slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-${version} \ + --slave /usr/bin/llvm-as llvm-as /usr/bin/llvm-as-${version} \ + --slave /usr/bin/llvm-bcanalyzer llvm-bcanalyzer /usr/bin/llvm-bcanalyzer-${version} \ + --slave /usr/bin/llvm-c-test llvm-c-test /usr/bin/llvm-c-test-${version} \ + --slave /usr/bin/llvm-cat llvm-cat /usr/bin/llvm-cat-${version} \ + --slave /usr/bin/llvm-cfi-verify llvm-cfi-verify /usr/bin/llvm-cfi-verify-${version} \ + --slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-${version} \ + --slave /usr/bin/llvm-cvtres llvm-cvtres /usr/bin/llvm-cvtres-${version} \ + --slave /usr/bin/llvm-cxxdump llvm-cxxdump /usr/bin/llvm-cxxdump-${version} \ + --slave /usr/bin/llvm-cxxfilt llvm-cxxfilt /usr/bin/llvm-cxxfilt-${version} \ + --slave /usr/bin/llvm-diff llvm-diff /usr/bin/llvm-diff-${version} \ + --slave /usr/bin/llvm-dis llvm-dis /usr/bin/llvm-dis-${version} \ + --slave /usr/bin/llvm-dlltool llvm-dlltool /usr/bin/llvm-dlltool-${version} \ + --slave /usr/bin/llvm-dwarfdump llvm-dwarfdump /usr/bin/llvm-dwarfdump-${version} \ + --slave /usr/bin/llvm-dwp llvm-dwp /usr/bin/llvm-dwp-${version} \ + --slave /usr/bin/llvm-exegesis llvm-exegesis /usr/bin/llvm-exegesis-${version} \ + --slave /usr/bin/llvm-extract llvm-extract /usr/bin/llvm-extract-${version} \ + --slave /usr/bin/llvm-lib llvm-lib /usr/bin/llvm-lib-${version} \ + --slave /usr/bin/llvm-link llvm-link /usr/bin/llvm-link-${version} \ + --slave /usr/bin/llvm-lto llvm-lto /usr/bin/llvm-lto-${version} \ + --slave /usr/bin/llvm-lto2 llvm-lto2 /usr/bin/llvm-lto2-${version} \ + --slave /usr/bin/llvm-mc llvm-mc /usr/bin/llvm-mc-${version} \ + --slave /usr/bin/llvm-mca llvm-mca /usr/bin/llvm-mca-${version} \ + --slave /usr/bin/llvm-modextract llvm-modextract /usr/bin/llvm-modextract-${version} \ + --slave /usr/bin/llvm-mt llvm-mt /usr/bin/llvm-mt-${version} \ + --slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-${version} \ + --slave /usr/bin/llvm-objcopy llvm-objcopy /usr/bin/llvm-objcopy-${version} \ + --slave /usr/bin/llvm-objdump llvm-objdump /usr/bin/llvm-objdump-${version} \ + --slave /usr/bin/llvm-opt-report llvm-opt-report /usr/bin/llvm-opt-report-${version} \ + --slave /usr/bin/llvm-pdbutil llvm-pdbutil /usr/bin/llvm-pdbutil-${version} \ + --slave /usr/bin/llvm-PerfectShuffle llvm-PerfectShuffle /usr/bin/llvm-PerfectShuffle-${version} \ + --slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-${version} \ + --slave /usr/bin/llvm-ranlib llvm-ranlib /usr/bin/llvm-ranlib-${version} \ + --slave /usr/bin/llvm-rc llvm-rc /usr/bin/llvm-rc-${version} \ + --slave /usr/bin/llvm-readelf llvm-readelf /usr/bin/llvm-readelf-${version} \ + --slave /usr/bin/llvm-readobj llvm-readobj /usr/bin/llvm-readobj-${version} \ + --slave /usr/bin/llvm-rtdyld llvm-rtdyld /usr/bin/llvm-rtdyld-${version} \ + --slave /usr/bin/llvm-size llvm-size /usr/bin/llvm-size-${version} \ + --slave /usr/bin/llvm-split llvm-split /usr/bin/llvm-split-${version} \ + --slave /usr/bin/llvm-stress llvm-stress /usr/bin/llvm-stress-${version} \ + --slave /usr/bin/llvm-strings llvm-strings /usr/bin/llvm-strings-${version} \ + --slave /usr/bin/llvm-strip llvm-strip /usr/bin/llvm-strip-${version} \ + --slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-${version} \ + --slave /usr/bin/llvm-tblgen llvm-tblgen /usr/bin/llvm-tblgen-${version} \ + --slave /usr/bin/llvm-undname llvm-undname /usr/bin/llvm-undname-${version} \ + --slave /usr/bin/llvm-xray llvm-xray /usr/bin/llvm-xray-${version} + + + update-alternatives \ + --verbose \ + --install /usr/bin/clang clang /usr/bin/clang-${version} ${priority} \ + --slave /usr/bin/clang++ clang++ /usr/bin/clang++-${version} \ + --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-${version} \ + --slave /usr/bin/clang-cpp clang-cpp /usr/bin/clang-cpp-${version} \ + --slave /usr/bin/clang-cl clang-cl /usr/bin/clang-cl-${version} \ + --slave /usr/bin/clangd clangd /usr/bin/clangd-${version} \ + --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-${version} \ + --slave /usr/bin/clang-check clang-check /usr/bin/clang-check-${version} \ + --slave /usr/bin/clang-query clang-query /usr/bin/clang-query-${version} \ + --slave /usr/bin/asan_symbolize asan_symbolize /usr/bin/asan_symbolize-${version} \ + --slave /usr/bin/bugpoint bugpoint /usr/bin/bugpoint-${version} \ + --slave /usr/bin/dsymutil dsymutil /usr/bin/dsymutil-${version} \ + --slave /usr/bin/lld lld /usr/bin/lld-${version} \ + --slave /usr/bin/ld.lld ld.lld /usr/bin/ld.lld-${version} \ + --slave /usr/bin/lld-link lld-link /usr/bin/lld-link-${version} \ + --slave /usr/bin/llc llc /usr/bin/llc-${version} \ + --slave /usr/bin/lli lli /usr/bin/lli-${version} \ + --slave /usr/bin/obj2yaml obj2yaml /usr/bin/obj2yaml-${version} \ + --slave /usr/bin/opt opt /usr/bin/opt-${version} \ + --slave /usr/bin/sanstats sanstats /usr/bin/sanstats-${version} \ + --slave /usr/bin/verify-uselistorder verify-uselistorder /usr/bin/verify-uselistorder-${version} \ + --slave /usr/bin/wasm-ld wasm-ld /usr/bin/wasm-ld-${version} \ + --slave /usr/bin/yaml2obj yaml2obj /usr/bin/yaml2obj-${version} + +} + +register_clang_version $1 $2 diff --git a/sbin/upload-artifacts b/sbin/upload-artifacts index e6ec59ed4..be4f4620a 100755 --- a/sbin/upload-artifacts +++ b/sbin/upload-artifacts @@ -6,6 +6,7 @@ ROOT=$(cd $HERE/.. && pwd) READIES=$ROOT/deps/readies . $READIES/shibumi/defs +set -e #---------------------------------------------------------------------------------------------- if [[ $1 == --help || $1 == help || $HELP == 1 ]]; then @@ -21,7 +22,6 @@ if [[ $1 == --help || $1 == help || $HELP == 1 ]]; then RELEASE=1 Upload release artifacts STAGING=1 Upload into staging area - FORCE=1 Allow uploading outside of CI NOP=1 No operation VERBOSE=1 Show artifacts details HELP=1 Show help @@ -34,22 +34,23 @@ fi ARCH=$($READIES/bin/platform --arch) [[ $ARCH == x64 ]] && ARCH="x86_64" +[[ $ARCH == arm64v8 ]] && ARCH="aarch64" OS=$($READIES/bin/platform --os) [[ $OS == linux ]] && OS="Linux" [[ -z $OSNICK ]] && OSNICK=$($READIES/bin/platform --osnick) -[[ $OSNICK == trusty ]] && OSNICK=ubuntu14.04 -[[ $OSNICK == xenial ]] && OSNICK=ubuntu16.04 -[[ $OSNICK == bionic ]] && OSNICK=ubuntu18.04 -[[ $OSNICK == focal ]] && OSNICK=ubuntu20.04 -[[ $OSNICK == jammy ]] && OSNICK=ubuntu22.04 +[[ $OSNICK == trusty ]] && OSNICK=ubuntu14.04 +[[ $OSNICK == xenial ]] && OSNICK=ubuntu16.04 +[[ $OSNICK == bionic ]] && OSNICK=ubuntu18.04 +[[ $OSNICK == focal ]] && OSNICK=ubuntu20.04 +[[ $OSNICK == jammy ]] && OSNICK=ubuntu22.04 [[ $OSNICK == centos7 ]] && OSNICK=rhel7 [[ $OSNICK == centos8 ]] && OSNICK=rhel8 -[[ $OSNICK == ol8 ]] && OSNICK=rhel8 -[[ $OSNICK == rocky8 ]] && OSNICK=rhel8 - -[[ $OSNICK == bigsur ]] && OSNICK=catalina +[[ $OSNICK == centos9 ]] && OSNICK=rhel9 +[[ $OSNICK == ol8 ]] && OSNICK=rhel8 +[[ $OSNICK == rocky8 ]] && OSNICK=rhel8 +[[ $OSNICK == rocky9 ]] && OSNICK=rhel9 PLATFORM="$OS-$OSNICK-$ARCH" @@ -65,11 +66,6 @@ else fi if [[ $FORCE != 1 ]]; then - if [[ -z $CIRCLECI ]]; then - eprint "Cannot upload outside of CircleCI. Override with FORCE=1." - exit 1 - fi - if [[ -z $AWS_ACCESS_KEY_ID || -z $AWS_SECRET_ACCESS_KEY ]]; then eprint "No credentials for S3 upload." exit 1 @@ -85,7 +81,10 @@ else fi cd artifacts${MAYBE_SNAP} -[[ $VERBOSE == 1 ]] && du -ah --apparent-size * +if du --help | grep -q -- --apparent-size; then + DU_ARGS='--apparent-size' +fi +[[ $VERBOSE == 1 ]] && du -ah ${DU_ARGS} * #---------------------------------------------------------------------------------------------- @@ -93,15 +92,17 @@ s3_upload_file() { local file="$1" local s3_dir="$2" [[ $s3_dir != */ ]] && s3_dir="${s3_dir}/" - + $OP aws s3 cp $file $s3_dir --acl public-read --no-progress } s3_ls() { local s3_dir="$1" [[ $s3_dir != */ ]] && s3_dir="${s3_dir}/" - + + echo "::group::S3 ls $s3_dir" $OP aws s3 ls $s3_dir + echo "::endgroup::" } s3_upload() { diff --git a/tests/benchmarks/defaults.yml b/tests/benchmarks/defaults.yml index a2d2bbd19..b52de40f2 100644 --- a/tests/benchmarks/defaults.yml +++ b/tests/benchmarks/defaults.yml @@ -1,4 +1,10 @@ version: 0.2 + +remote: + - type: oss-standalone + - setup: redisearch-m5d + - spot_instance: oss-redisearch-m5-spot-instances + exporter: redistimeseries: break_by: diff --git a/tests/benchmarks/json_arrappend_geojson.yml b/tests/benchmarks/json_arrappend_geojson.yml index 45e38832f..0c05ca729 100644 --- a/tests/benchmarks/json_arrappend_geojson.yml +++ b/tests/benchmarks/json_arrappend_geojson.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_arrappend_geojson" description: "JSON.ARRAPPEND path:sonde:foo $.properties.coordinateProperties || https://github.com/RedisJSON/RedisJSON/issues/295" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/redisjson-gh295-dump.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_ResultSet.totalResultsAvailable_jsonsl-yahoo2_json.yml b/tests/benchmarks/json_get_ResultSet.totalResultsAvailable_jsonsl-yahoo2_json.yml index 1619c36bc..c3c1bd29f 100644 --- a/tests/benchmarks/json_get_ResultSet.totalResultsAvailable_jsonsl-yahoo2_json.yml +++ b/tests/benchmarks/json_get_ResultSet.totalResultsAvailable_jsonsl-yahoo2_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_ResultSet.totalResultsAvailable_jsonsl-yahoo2_json" description: "JSON.GET jsonsl-yahoo2 $.ResultSet.totalResultsAvailable || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[0]_jsonsl-1.yml b/tests/benchmarks/json_get_[0]_jsonsl-1.yml index b6081f50e..715f6ea5d 100644 --- a/tests/benchmarks/json_get_[0]_jsonsl-1.yml +++ b/tests/benchmarks/json_get_[0]_jsonsl-1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[0]_jsonsl-1" description: "JSON.GET jsonsl-1 $.[0] || {jsonsl-1.json size: 1.4 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[7]_jsonsl-1.yml b/tests/benchmarks/json_get_[7]_jsonsl-1.yml index 7e89ca405..68da37b6f 100644 --- a/tests/benchmarks/json_get_[7]_jsonsl-1.yml +++ b/tests/benchmarks/json_get_[7]_jsonsl-1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[7]_jsonsl-1" description: "JSON.GET jsonsl-1 $.[7] || {jsonsl-1.json size: 1.4 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[8].zero_jsonsl-1.yml b/tests/benchmarks/json_get_[8].zero_jsonsl-1.yml index 4045f38a2..e6d6632c2 100644 --- a/tests/benchmarks/json_get_[8].zero_jsonsl-1.yml +++ b/tests/benchmarks/json_get_[8].zero_jsonsl-1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[8].zero_jsonsl-1" description: "JSON.GET jsonsl-1 $.[8].0 || {jsonsl-1.json size: 1.4 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[web-app].servlet[0][servlet-name]_json-parser-0000.yml b/tests/benchmarks/json_get_[web-app].servlet[0][servlet-name]_json-parser-0000.yml index bedd2bea2..0c5f55b0d 100644 --- a/tests/benchmarks/json_get_[web-app].servlet[0][servlet-name]_json-parser-0000.yml +++ b/tests/benchmarks/json_get_[web-app].servlet[0][servlet-name]_json-parser-0000.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[web-app].servlet[0][servlet-name]_json-parser-0000" description: "JSON.GET json-parser-0000 $[web-app].servlet[0][servlet-name] {json-parser-0000.json size: 3.5K} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[web-app].servlet[0]_json-parser-0000.yml b/tests/benchmarks/json_get_[web-app].servlet[0]_json-parser-0000.yml index e60d63562..c46139bfe 100644 --- a/tests/benchmarks/json_get_[web-app].servlet[0]_json-parser-0000.yml +++ b/tests/benchmarks/json_get_[web-app].servlet[0]_json-parser-0000.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[web-app].servlet[0]_json-parser-0000" description: "JSON.GET json-parser-0000 $[web-app].servlet[0] {json-parser-0000.json size: 3.5K} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_[web-app].servlet_json-parser-0000.yml b/tests/benchmarks/json_get_[web-app].servlet_json-parser-0000.yml index 568f2805c..dbf78c0e0 100644 --- a/tests/benchmarks/json_get_[web-app].servlet_json-parser-0000.yml +++ b/tests/benchmarks/json_get_[web-app].servlet_json-parser-0000.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_[web-app].servlet_json-parser-0000" description: "JSON.GET json-parser-0000 $[web-app].servlet {json-parser-0000.json size: 3.5K} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_array_of_docs[1]_pass_100_json.yml b/tests/benchmarks/json_get_array_of_docs[1]_pass_100_json.yml index f78e06d52..6fb2ce91f 100644 --- a/tests/benchmarks/json_get_array_of_docs[1]_pass_100_json.yml +++ b/tests/benchmarks/json_get_array_of_docs[1]_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_array_of_docs[1]_pass_100_json" description: "JSON.GET pass-100 $.array_of_docs[1] || {Full document: pass-100.json https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_array_of_docs[1]sclr_pass_100_json.yml b/tests/benchmarks/json_get_array_of_docs[1]sclr_pass_100_json.yml index 3a124b6e2..77b926a83 100644 --- a/tests/benchmarks/json_get_array_of_docs[1]sclr_pass_100_json.yml +++ b/tests/benchmarks/json_get_array_of_docs[1]sclr_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_array_of_docs[1]sclr_pass_100_json" description: "JSON.GET pass-100 $.array_of_docs[1] || {Full document: pass-100.json https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_array_of_docs_pass_100_json.yml b/tests/benchmarks/json_get_array_of_docs_pass_100_json.yml index 4c1149586..d312b1dea 100644 --- a/tests/benchmarks/json_get_array_of_docs_pass_100_json.yml +++ b/tests/benchmarks/json_get_array_of_docs_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_array_of_docs_pass_100_json" description: "JSON.GET pass-100 $.array_of_docs || {Full document: pass-100.json https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_fulldoc_json-parser-0000.yml b/tests/benchmarks/json_get_fulldoc_json-parser-0000.yml index 01df63a78..a1f1284bb 100644 --- a/tests/benchmarks/json_get_fulldoc_json-parser-0000.yml +++ b/tests/benchmarks/json_get_fulldoc_json-parser-0000.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_fulldoc_json-parser-0000" description: "JSON.GET json-parser-0000 $ {json-parser-0000.json size: 3.5K} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_fulldoc_jsonsl-1.yml b/tests/benchmarks/json_get_fulldoc_jsonsl-1.yml index 2268ec034..c55660a26 100644 --- a/tests/benchmarks/json_get_fulldoc_jsonsl-1.yml +++ b/tests/benchmarks/json_get_fulldoc_jsonsl-1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_fulldoc_jsonsl-1" description: "JSON.GET jsonsl-1 $ || {jsonsl-1.json size: 1.4 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_fulldoc_jsonsl-yahoo2_json.yml b/tests/benchmarks/json_get_fulldoc_jsonsl-yahoo2_json.yml index da0820a53..f022b5f18 100644 --- a/tests/benchmarks/json_get_fulldoc_jsonsl-yahoo2_json.yml +++ b/tests/benchmarks/json_get_fulldoc_jsonsl-yahoo2_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_fulldoc_jsonsl-yahoo2_json" description: "JSON.GET jsonsl-yahoo2 $ || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_fulldoc_jsonsl-yelp_json.yml b/tests/benchmarks/json_get_fulldoc_jsonsl-yelp_json.yml index 08bfbe190..8d952ae4e 100644 --- a/tests/benchmarks/json_get_fulldoc_jsonsl-yelp_json.yml +++ b/tests/benchmarks/json_get_fulldoc_jsonsl-yelp_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_fulldoc_jsonsl-yelp_json" description: "JSON.GET jsonsl-yelp $ || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_fulldoc_pass_100_json.yml b/tests/benchmarks/json_get_fulldoc_pass_100_json.yml index 684b68b93..1634ae493 100644 --- a/tests/benchmarks/json_get_fulldoc_pass_100_json.yml +++ b/tests/benchmarks/json_get_fulldoc_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_fulldoc_pass_100_json" description: "JSON.GET pass-100 $ || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_key_empty.yml b/tests/benchmarks/json_get_key_empty.yml index d54637617..4d0995ca5 100644 --- a/tests/benchmarks/json_get_key_empty.yml +++ b/tests/benchmarks/json_get_key_empty.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_key_empty" description: "JSON.GET key $ || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_message.code_jsonsl-yelp_json.yml b/tests/benchmarks/json_get_message.code_jsonsl-yelp_json.yml index dfb581a64..527601c4a 100644 --- a/tests/benchmarks/json_get_message.code_jsonsl-yelp_json.yml +++ b/tests/benchmarks/json_get_message.code_jsonsl-yelp_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_message.code_jsonsl-yelp_json" description: "JSON.GET jsonsl-yelp $.message.code || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_sclr_pass_100_json.yml b/tests/benchmarks/json_get_sclr_pass_100_json.yml index 97aec8e37..e5c1b398c 100644 --- a/tests/benchmarks/json_get_sclr_pass_100_json.yml +++ b/tests/benchmarks/json_get_sclr_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_sclr_pass_100_json" description: "JSON.GET pass-100 $.sclr || {pass-100.json size: 380 B} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_sub_doc.sclr_pass_100_json.yml b/tests/benchmarks/json_get_sub_doc.sclr_pass_100_json.yml index 93212c8b5..5fa298308 100644 --- a/tests/benchmarks/json_get_sub_doc.sclr_pass_100_json.yml +++ b/tests/benchmarks/json_get_sub_doc.sclr_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_sub_doc.sclr_pass_100_json" description: "JSON.GET pass-100 $.sub_doc.sclr || {Full document: pass-100.json 380B} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_get_sub_doc_pass_100_json.yml b/tests/benchmarks/json_get_sub_doc_pass_100_json.yml index 3b0f35eec..2131ba662 100644 --- a/tests/benchmarks/json_get_sub_doc_pass_100_json.yml +++ b/tests/benchmarks/json_get_sub_doc_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_get_sub_doc_pass_100_json" description: "JSON.GET pass-100 $.sub_doc || {Full document: pass-100.json} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_numincrby_num_1.yml b/tests/benchmarks/json_numincrby_num_1.yml index 8a8baf6f9..942190acd 100644 --- a/tests/benchmarks/json_numincrby_num_1.yml +++ b/tests/benchmarks/json_numincrby_num_1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_numincrby_num_1" description: "JSON.NUMINCRBY num $ 1 || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_nummultby_num_2.yml b/tests/benchmarks/json_nummultby_num_2.yml index c8f8e1d59..37a635b46 100644 --- a/tests/benchmarks/json_nummultby_num_2.yml +++ b/tests/benchmarks/json_nummultby_num_2.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_nummultby_num_2" description: "JSON.NUMMULTBY num $ 2 || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_recursive_descent_with_filter_uid_issue674.yml b/tests/benchmarks/json_recursive_descent_with_filter_uid_issue674.yml index 263939b26..765a4c7ee 100644 --- a/tests/benchmarks/json_recursive_descent_with_filter_uid_issue674.yml +++ b/tests/benchmarks/json_recursive_descent_with_filter_uid_issue674.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_recursive_descent_with_filter_uid_issue674" description: "JSON.GET test '$..[?(@.uid==1198)].MD5ModelUID' || {issue674.json size: 618 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.doc_674.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_ResultSet.totalResultsAvailable_1_jsonsl-yahoo2_json.yml b/tests/benchmarks/json_set_ResultSet.totalResultsAvailable_1_jsonsl-yahoo2_json.yml index 74a7a879a..bc3011ebc 100644 --- a/tests/benchmarks/json_set_ResultSet.totalResultsAvailable_1_jsonsl-yahoo2_json.yml +++ b/tests/benchmarks/json_set_ResultSet.totalResultsAvailable_1_jsonsl-yahoo2_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_ResultSet.totalResultsAvailable_1_jsonsl-yahoo2_json" description: "JSON.SET jsonsl-yahoo2 $.ResultSet.totalResultsAvailable 1 || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_[0]foo_jsonsl-1.yml b/tests/benchmarks/json_set_[0]foo_jsonsl-1.yml index 3fdbc7b98..9966d8600 100644 --- a/tests/benchmarks/json_set_[0]foo_jsonsl-1.yml +++ b/tests/benchmarks/json_set_[0]foo_jsonsl-1.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_[0]foo_jsonsl-1" description: "JSON.SET jsonsl-1 $.[0] foo || {jsonsl-1.json size: 1.4 KB} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_[web-app].servlet[0][servlet-name]_bar_json-parser-0000.yml b/tests/benchmarks/json_set_[web-app].servlet[0][servlet-name]_bar_json-parser-0000.yml index 2492bc99e..fd4497c4d 100644 --- a/tests/benchmarks/json_set_[web-app].servlet[0][servlet-name]_bar_json-parser-0000.yml +++ b/tests/benchmarks/json_set_[web-app].servlet[0][servlet-name]_bar_json-parser-0000.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_[web-app].servlet[0][servlet-name]_bar_json-parser-0000" description: "JSON.SET json-parser-0000 $.[web-app].servlet[0][servlet-name] [bar] {json-parser-0000.json size: 3.5K} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_fulldoc_api_replies_q1_google_autocomplete.yml b/tests/benchmarks/json_set_fulldoc_api_replies_q1_google_autocomplete.yml index 6de9acf22..085d6001a 100644 --- a/tests/benchmarks/json_set_fulldoc_api_replies_q1_google_autocomplete.yml +++ b/tests/benchmarks/json_set_fulldoc_api_replies_q1_google_autocomplete.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_api_replies_q1_google_autocomplete" description: "JSON.SET of full docs using as data the API replies of common services on the internet" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_api_replies_q2_gmaps_areatraffic.yml b/tests/benchmarks/json_set_fulldoc_api_replies_q2_gmaps_areatraffic.yml index 29c168e53..6d12234c4 100644 --- a/tests/benchmarks/json_set_fulldoc_api_replies_q2_gmaps_areatraffic.yml +++ b/tests/benchmarks/json_set_fulldoc_api_replies_q2_gmaps_areatraffic.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_api_replies_q2_gmaps_areatraffic" description: "JSON.SET of full docs using as data the API replies of common services on the internet" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_api_replies_q3_gmaps_passiveassist.yml b/tests/benchmarks/json_set_fulldoc_api_replies_q3_gmaps_passiveassist.yml index 833bee450..825855f17 100644 --- a/tests/benchmarks/json_set_fulldoc_api_replies_q3_gmaps_passiveassist.yml +++ b/tests/benchmarks/json_set_fulldoc_api_replies_q3_gmaps_passiveassist.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_api_replies_q3_gmaps_passiveassist" description: "JSON.SET of full docs using as data the API replies of common services on the internet" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_api_replies_q4_gmaps_assist.yml b/tests/benchmarks/json_set_fulldoc_api_replies_q4_gmaps_assist.yml index 1bfe2e078..80c9d966c 100644 --- a/tests/benchmarks/json_set_fulldoc_api_replies_q4_gmaps_assist.yml +++ b/tests/benchmarks/json_set_fulldoc_api_replies_q4_gmaps_assist.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_api_replies_q4_gmaps_assist" description: "JSON.SET of full docs using as data the API replies of common services on the internet" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_api_replies_q5_gmaps_place.yml b/tests/benchmarks/json_set_fulldoc_api_replies_q5_gmaps_place.yml index 7b5f41251..836390c2f 100644 --- a/tests/benchmarks/json_set_fulldoc_api_replies_q5_gmaps_place.yml +++ b/tests/benchmarks/json_set_fulldoc_api_replies_q5_gmaps_place.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_api_replies_q5_gmaps_place" description: "JSON.SET of full docs using as data the API replies of common services on the internet" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_pass-json-parser-0000.yml b/tests/benchmarks/json_set_fulldoc_pass-json-parser-0000.yml index 625848572..9815c9180 100644 --- a/tests/benchmarks/json_set_fulldoc_pass-json-parser-0000.yml +++ b/tests/benchmarks/json_set_fulldoc_pass-json-parser-0000.yml @@ -1,8 +1,6 @@ version: 0.2 name: "json_set_fulldoc_pass-json-parser-0000" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_pass_100_json.yml b/tests/benchmarks/json_set_fulldoc_pass_100_json.yml index e6d17d5cc..709bce4ed 100644 --- a/tests/benchmarks/json_set_fulldoc_pass_100_json.yml +++ b/tests/benchmarks/json_set_fulldoc_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_fulldoc_pass_100_json" description: "JSON.SET pass-100 $ {pass-100.json size: 380 B} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_fulldoc_yahoo2.yml b/tests/benchmarks/json_set_fulldoc_yahoo2.yml index 3548a4ac0..031986bc6 100644 --- a/tests/benchmarks/json_set_fulldoc_yahoo2.yml +++ b/tests/benchmarks/json_set_fulldoc_yahoo2.yml @@ -1,8 +1,6 @@ version: 0.2 name: "json_set_fulldoc_yahoo2" -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_set_key_empty.yml b/tests/benchmarks/json_set_key_empty.yml index fa284f6dd..b9753e592 100644 --- a/tests/benchmarks/json_set_key_empty.yml +++ b/tests/benchmarks/json_set_key_empty.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_key_empty" description: "JSON.SET key $ {} || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_message.code_1_jsonsl-yelp_json.yml b/tests/benchmarks/json_set_message.code_1_jsonsl-yelp_json.yml index f0366cabb..3d94e6b7f 100644 --- a/tests/benchmarks/json_set_message.code_1_jsonsl-yelp_json.yml +++ b/tests/benchmarks/json_set_message.code_1_jsonsl-yelp_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_message.code_1_jsonsl-yelp_json" description: "JSON.SET jsonsl-yelp $.message.code 1 || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_num_0.yml b/tests/benchmarks/json_set_num_0.yml index 79078063e..0da905cde 100644 --- a/tests/benchmarks/json_set_num_0.yml +++ b/tests/benchmarks/json_set_num_0.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_num_0" description: "JSON.SET num $ 0 || https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_sclr_1_pass_100_json.yml b/tests/benchmarks/json_set_sclr_1_pass_100_json.yml index 258bc69cb..690b33b28 100644 --- a/tests/benchmarks/json_set_sclr_1_pass_100_json.yml +++ b/tests/benchmarks/json_set_sclr_1_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_sclr_1_pass_100_json" description: "JSON.SET pass-100 $.sclr 1 || {pass-100.json size: 380 B} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_set_sclr_pass_100_json.yml b/tests/benchmarks/json_set_sclr_pass_100_json.yml index 69ae03cc3..ad1291b0f 100644 --- a/tests/benchmarks/json_set_sclr_pass_100_json.yml +++ b/tests/benchmarks/json_set_sclr_pass_100_json.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_set_sclr_pass_100_json" description: "JSON.SET pass-100 $.sclr 1 || {pass-100.json size: 380 B} https://oss.redislabs.com/redisjson/performance/" -remote: - - type: oss-standalone - - setup: redisearch-m5d + dbconfig: - dataset: "https://s3.amazonaws.com/benchmarks.redislabs/redisjson/performance.docs/performance.docs.rdb" clientconfig: diff --git a/tests/benchmarks/json_vs_hashes_hset_key_simple.yml b/tests/benchmarks/json_vs_hashes_hset_key_simple.yml index 76cdcbdf9..b1406d5c6 100644 --- a/tests/benchmarks/json_vs_hashes_hset_key_simple.yml +++ b/tests/benchmarks/json_vs_hashes_hset_key_simple.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_vs_hashes_hset_key_simple" description: 'HSET key_hash field1 value1 field2 value2 || Use-case to compare against JSON.SET key_json $ {"field1":"value1","field2":"value2"}' -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/benchmarks/json_vs_hashes_json.set_key_simple.yml b/tests/benchmarks/json_vs_hashes_json.set_key_simple.yml index f62c0425a..03e97e15d 100644 --- a/tests/benchmarks/json_vs_hashes_json.set_key_simple.yml +++ b/tests/benchmarks/json_vs_hashes_json.set_key_simple.yml @@ -1,9 +1,7 @@ version: 0.2 name: "json_vs_hashes_json.set_key_simple" description: 'JSON.SET key_json $ {"field1":"value1","field2":"value2"} || Use-case to compare against HSET key_hash field1 value1 field2 value2' -remote: - - type: oss-standalone - - setup: redisearch-m5d + clientconfig: - tool: redis-benchmark - min-tool-version: "6.2.0" diff --git a/tests/pytest/common.py b/tests/pytest/common.py index adeee7c91..42bcbd330 100644 --- a/tests/pytest/common.py +++ b/tests/pytest/common.py @@ -3,6 +3,9 @@ from functools import wraps from includes import * from packaging import version +from unittest import SkipTest +from RLTest import Env +import inspect @contextmanager def TimeLimit(timeout): @@ -44,3 +47,32 @@ def wrapper(env, *args, **kwargs): return return f(env, *args, **kwargs) return wrapper + +def skip_redis_less_than(redis_less_than=None): + def decorate(f): + def wrapper(): + if redis_less_than and server_version_is_less_than(redis_less_than): + raise SkipTest() + if len(inspect.signature(f).parameters) > 0: + env = Env() + return f(env) + else: + return f() + return wrapper + return decorate + + +server_ver = None +def server_version_is_at_least(ver): + global server_ver + if server_ver is None: + import subprocess + # Expecting something like "Redis server v=7.2.3 sha=******** malloc=jemalloc-5.3.0 bits=64 build=***************" + v = subprocess.run([Defaults.binary, '--version'], stdout=subprocess.PIPE).stdout.decode().split()[2].split('=')[1] + server_ver = version.parse(v) + if not isinstance(ver, version.Version): + ver = version.parse(ver) + return server_ver >= ver + +def server_version_is_less_than(ver): + return not server_version_is_at_least(ver) diff --git a/tests/pytest/requirements.txt b/tests/pytest/requirements.txt index 2d9fd6e79..04e6bb889 100644 --- a/tests/pytest/requirements.txt +++ b/tests/pytest/requirements.txt @@ -1,7 +1,4 @@ -# redis ~= 5.0.0b3 -# RLTest ~= 0.7.0 -# ramp-packer ~= 2.5.4 -# psutil >= 5.8.0 - +RLTest ~= 0.7.2 +redis ~= 5.0.8 six >= 1.10.0 -gevent ~= 22.10.1 +gevent >= 22.10.2 diff --git a/tests/pytest/test.py b/tests/pytest/test.py index f0d824cb9..0ebcf8083 100644 --- a/tests/pytest/test.py +++ b/tests/pytest/test.py @@ -6,6 +6,7 @@ import os import redis import json +import time from RLTest import Env from includes import * from redis.client import NEVER_DECODE @@ -136,8 +137,8 @@ def testSetRootWithJSONValuesShouldSucceed(env): r.assertEqual(v, s) def testSetAddNewImmediateChild(env): - - r = env + + r = env r.assertOk(r.execute_command('JSON.SET', 'test', '$', json.dumps(docs))) # Make sure children are initially missing r.assertEqual(r.execute_command('JSON.GET', 'test', '$.basic.dict.new_child_1'), '[]') @@ -183,7 +184,7 @@ def testSetGetWholeBasicDocumentShouldBeEqual(env): def testSetBehaviorModifyingSubcommands(env): """Test JSON.SET's NX and XX subcommands""" r = env - + # test against the root r.assertIsNone(r.execute_command('JSON.SET', 'test', '.', '{}', 'XX')) r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{}', 'NX')) @@ -239,7 +240,7 @@ def testSetGetWithSpecialKey(env): "$a": "$a", "$a[": "$a[" } - + # Set doc using individual keys using legacy syntax (with implicit `$` root) r.assertOk(r.execute_command('JSON.SET', 'x', '$', '{"$":"$"}')) r.assertOk(r.execute_command('JSON.SET', 'x', 'a', '"a"')) @@ -254,7 +255,7 @@ def testSetGetWithSpecialKey(env): # Get key "$" r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$.$')), ["$"]) # dot notation r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$["$"]')), ["$"]) # bracket notation - r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$')), [doc]) + r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$')), [doc]) # Get key "a" r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$.a')), ["a"]) # dot notation r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$["a"]')), ["a"]) # bracket notation @@ -265,7 +266,7 @@ def testSetGetWithSpecialKey(env): r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$a')), "$a") # legacy # Get key "$a[" r.assertEqual(json.loads(r.execute_command('JSON.GET', 'x', '$["$a["]')), ["$a["]) # bracket notation (cannot use dot notation) - + def testSetWithPathErrors(env): r = env @@ -348,7 +349,7 @@ def testGetFormatting(env): r.assertEqual(res, f.format(newline=newline, space=space, indent=indent)) def testBackwardRDB(env): - env.skipOnCluster() + env.skipOnCluster() if env.useAof: env.skip() dbFileName = env.cmd('config', 'get', 'dbfilename')[1] @@ -380,28 +381,28 @@ def testMgetCommand(env): # Set up a few keys for d in range(0, 5): - key = 'doc:{}'.format(d) + key = '{{doc}}:{}'.format(d) r.cmd('DEL', key) r.expect('JSON.SET', key, '.', json.dumps(docs['basic'])).ok() # Test an MGET that succeeds on all keys - raw = r.execute_command('JSON.MGET', *['doc:{}'.format(d) for d in range(0, 5)] + ['.']) + raw = r.execute_command('JSON.MGET', *['{{doc}}:{}'.format(d) for d in range(0, 5)] + ['.']) r.assertEqual(len(raw), 5) for d in range(0, 5): - key = 'doc:{}'.format(d) + key = '{{doc}}:{}'.format(d) r.assertEqual(json.loads(raw[d]), docs['basic'], d) # Test an MGET that fails for one key r.cmd('DEL', 'test') - r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{"bool":false}')) - raw = r.execute_command('JSON.MGET', 'test', 'doc:0', 'foo', '.bool') + r.assertOk(r.execute_command('JSON.SET', '{doc}:test', '.', '{"bool":false}')) + raw = r.execute_command('JSON.MGET', '{doc}:test', '{doc}:0', '{doc}:foo', '.bool') r.assertEqual(len(raw), 3) r.assertFalse(json.loads(raw[0])) r.assertTrue(json.loads(raw[1])) r.assertEqual(raw[2], None) # Test that MGET on missing path - raw = r.execute_command('JSON.MGET', 'doc:0', 'doc:1', '42isnotapath') + raw = r.execute_command('JSON.MGET', '{doc}:0', '{doc}:1', '42isnotapath') r.assertEqual(len(raw), 2) r.assertEqual(raw[0], None) r.assertEqual(raw[1], None) @@ -409,7 +410,7 @@ def testMgetCommand(env): # Test that MGET fails on path errors r.cmd('DEL', 'test') r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{"bull":4.2}')) - raw = r.execute_command('JSON.MGET', 'doc:0', 'test', 'doc:1', '.bool') + raw = r.execute_command('JSON.MGET', '{doc}:0', 'test', '{doc}:1', '.bool') r.assertEqual(len(raw), 3) r.assertTrue(json.loads(raw[0])) r.assertEqual(raw[1], None) @@ -425,7 +426,7 @@ def testToggleCommand(env): # Test Toggeling Empty Path r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{"foo":"bar"}')) r.expect('JSON.TOGGLE', 'test', '.bar').raiseError() - + # Test Toggeling Non Boolean r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{"foo":"bar"}')) r.expect('JSON.TOGGLE','test','.foo').raiseError() @@ -444,9 +445,7 @@ def testDelCommand(env): r.assertEqual(r.execute_command('JSON.OBJLEN', 'test', '.'), 1) r.assertIsNone(r.execute_command('JSON.TYPE', 'test', '.baz')) r.assertEqual(r.execute_command('JSON.DEL', 'test', '.foo'), 1) - r.assertEqual(r.execute_command('JSON.OBJLEN', 'test', '.'), 0) - r.assertIsNone(r.execute_command('JSON.TYPE', 'test', '.foo')) - r.assertEqual(r.execute_command('JSON.TYPE', 'test', '.'), 'object') + r.assertIsNone(r.execute_command('JSON.GET', 'test')) # Test deleting some keys from an object r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{}')) @@ -456,11 +455,10 @@ def testDelCommand(env): r.assertEqual(r.execute_command('JSON.OBJLEN', 'test', '.'), 1) r.assertIsNone(r.execute_command('JSON.TYPE', 'test', '.baz')) r.assertEqual(r.execute_command('JSON.DEL', 'test', '.foo'), 1) - r.assertEqual(r.execute_command('JSON.OBJLEN', 'test', '.'), 0) - r.assertIsNone(r.execute_command('JSON.TYPE', 'test', '.foo')) - r.assertEqual(r.execute_command('JSON.TYPE', 'test', '.'), 'object') + r.assertIsNone(r.execute_command('JSON.GET', 'test')) # Test with an array + r.assertOk(r.execute_command('JSON.SET', 'test', '.', '{}')) r.assertOk(r.execute_command('JSON.SET', 'test', '.foo', '"bar"')) r.assertOk(r.execute_command('JSON.SET', 'test', '.baz', '"qux"')) r.assertOk(r.execute_command('JSON.SET', 'test', '.arr', '[1.2,1,2]')) @@ -591,7 +589,7 @@ def testClear(env): r.expect('JSON.CLEAR', 'test', '$.b..a').equal(0) r.expect('JSON.GET', 'test', '$').equal('[{"a":[1,2],"b":{"c":"d"}}]') - # Key doesn't exist + # Key doesn't exist r.expect('JSON.CLEAR', 'not_test_key', '$').raiseError() def testClearScalar(env): @@ -613,7 +611,7 @@ def testClearScalar(env): r.assertEqual(r.execute_command('JSON.CLEAR', 'test', '$.*'), 2) res = r.execute_command('JSON.GET', 'test', '$.*') r.assertEqual(json.loads(res), ['string value', None, True, 0, 0]) - + # Do not clear already cleared values r.assertEqual(r.execute_command('JSON.CLEAR', 'test', '$.*'), 0) @@ -730,9 +728,9 @@ def testArrInsertCommand(env): r.assertEqual(r.execute_command('JSON.GET', 'test', jpath), "[3,2,1,4]") r.assertEqual(r.execute_command('JSON.ARRINSERT', 'test', jpath, 1, '5'), 5) - r.assertEqual(r.execute_command('JSON.ARRINSERT', 'test', jpath, -2, '6'), 6) + r.assertEqual(r.execute_command('JSON.ARRINSERT', 'test', jpath, -2, '6'), 6) r.assertEqual(r.execute_command('JSON.GET', 'test', jpath), "[3,5,2,6,1,4]") - + r.assertEqual(r.execute_command('JSON.ARRINSERT', 'test', jpath, -3, '7', '{"A":"Z"}', '9'), 9) r.assertEqual(r.execute_command('JSON.GET', 'test', jpath), '[3,5,2,7,{"A":"Z"},9,6,1,4]') @@ -925,14 +923,14 @@ def testNumIncrCommand(env): def testNumCommandOverflow(env): """Test JSON.NUMINCRBY and JSON.NUMMULTBY commands overflow """ r = env - + # test overflow on root r.assertOk(r.execute_command('JSON.SET', 'big_num', '.', '1.6350000000001313e+308')) r.expect('JSON.NUMINCRBY', 'big_num', '.', '1.6350000000001313e+308').raiseError() r.expect('JSON.NUMMULTBY', 'big_num', '.', '2').raiseError() # (value remains) r.assertEqual(r.execute_command('JSON.GET', 'big_num', '.'), '1.6350000000001313e308') - + # test overflow on nested object value r.assertOk(r.execute_command('JSON.SET', 'nested_obj_big_num', '$', '{"l1":{"l2_a":1.6350000000001313e+308,"l2_b":2}}')) r.expect('JSON.NUMINCRBY', 'nested_obj_big_num', '$.l1.l2_a', '1.6350000000001313e+308').raiseError() @@ -1081,7 +1079,7 @@ def testMSET(env): def testMSET_Partial(env): - # Make sure MSET doesn't stop on the first update that can't be updated + # Make sure MSET doesn't stop on the first update that can't be updated env.expect("JSON.SET", "a{s}", '$', '{"x": {"y":[10,20], "z":[30,40]}}').ok() env.expect("JSON.SET", "b{s}", '$', '{"x": 60}').ok() env.expect("JSON.MSET", "a{s}", '$.x', '{}', "a{s}", '$.x.z[1]', '50', 'b{s}', '$.x', '70').ok() @@ -1121,13 +1119,13 @@ def testCrashInParserMOD2099(env): r = env r.assertOk(r.execute_command('JSON.SET', 'test', '$', '{"a":{"x":{"i":10}}, "b":{"x":{"i":20, "j":5}}}')) - + res = r.execute_command('JSON.GET', 'test', '$..x[?(@>10)]') r.assertEqual(res, '[20]') - + res = r.execute_command('JSON.GET', 'test', '$..x[?($>10)]') r.assertEqual(res, '[]') - + def testInfoEverything(env): @@ -1138,12 +1136,12 @@ def testInfoEverything(env): def testCopyCommand(env): """Test COPY command and make sure behavior of json keys is similar to hash keys""" - env.skipOnCluster() + env.skipOnCluster() env.skipOnVersionSmaller('6.2') r = env - + values = {"foo": "bar", "fu": "wunderbar"} - + ### Copy json to a new key (from json1 to json2) r.assertOk(r.execute_command('JSON.SET', 'json1', '$', json.dumps(values))) r.assertTrue(r.execute_command('COPY', 'json1', 'json2')) @@ -1160,16 +1158,16 @@ def testCopyCommand(env): # Check new values r.assertEqual(r.execute_command('HGETALL', 'hash1'), values) r.assertEqual(r.execute_command('HGETALL', 'hash2'), values) - + new_values = {"ganz": "neue"} - + ### Copy hash to an existing key - hash_values = list(reduce(lambda acc, v: acc + v, new_values.items())) + hash_values = list(reduce(lambda acc, v: acc + v, new_values.items())) r.assertEqual(r.execute_command('HSET', 'hash3', *hash_values), int(len(hash_values) / 2)) # Do not overwrite without REPLACE (from hash to hash) r.assertFalse(r.execute_command('COPY', 'hash3', 'hash2')) # Do not overwrite without REPLACE (from hash to json) - r.assertFalse(r.execute_command('COPY', 'hash3', 'json2')) + r.assertFalse(r.execute_command('COPY', 'hash3', 'json2')) # Overwrite with REPLACE (from hash to hash) r.assertTrue(r.execute_command('COPY', 'hash3', 'hash2', 'REPLACE')) # Overwrite with REPLACE (from hash to json) @@ -1268,13 +1266,13 @@ def testFilter(env): "pat_not_str5": [".*foo"], } r.expect('JSON.SET', 'doc', '$', json.dumps(doc)).ok() - + # regex match using a static regex pattern r.expect('JSON.GET', 'doc', '$.arr[?(@ =~ ".*foo")]').equal('["kafoosh","foolish","ffool"]') - + # regex match using a field r.expect('JSON.GET', 'doc', '$.arr[?(@ =~ $.pat_regex)]').equal('["kafoosh","foolish","ffool"]') - + # regex case-insensitive match using a field (notice the `.*` before the filter) r.expect('JSON.GET', 'doc', '$.arr.*[?(@ =~ $.pat_plain)]').equal('["foo","FoO"]') @@ -1295,7 +1293,7 @@ def testFilter(env): # plain string match r.expect('JSON.GET', 'doc', '$.arr[?(@ == $.pat_plain)]').equal('["(?i)^[f][o][o]$"]') - + def testFilterExpression(env): # Test JSONPath filter with 3 or more operands r = env @@ -1337,14 +1335,14 @@ def testMerge(env): # Test merge error - invalid JSON r.expect('JSON.MERGE', 'test_merge', '$.a', '{"b":{"h":"i" "bye"}}').error().contains("expected") - # Test with none existing key with path $.a + # Test with none existing key with path $.a r.expect('JSON.MERGE', 'test_merge_new', '$.a', '{"a":"i"}').raiseError() # Test with none existing key -> create key r.assertOk(r.execute_command('JSON.MERGE', 'test_merge_new', '$', '{"h":"i"}')) r.expect('JSON.GET', 'test_merge_new').equal('{"h":"i"}') - - + + def testMergeArray(env): # Test JSON.MERGE with arrays r = env @@ -1414,12 +1412,165 @@ def testRDBUnboundedDepth(env): # concat the string_126 at the end of itself json_value = nest_object(3, 5, "__deep_leaf", 420) r.expect('JSON.SET', 'doc', '$..__leaf', json_value).ok() - + # RDB dump and restore the key 'doc' and check that the key is still valid dump = env.execute_command('dump', 'doc', **{NEVER_DECODE: []}) r.expect('RESTORE', 'doc1', 0, dump).ok() r.expect('JSON.GET', 'doc1', '$..__leaf..__deep_leaf').equal('[420]') +def testUnicodeCharacters(env): + # Test unicode strings parsing and processing. + + r = env + + r.assertOk(r.execute_command('JSON.SET', 'test', '$', '{"\u00a0": "⍢∪⇰"}')) + r.expect('JSON.GET', 'test', '$').equal('[{"\u00a0":"⍢∪⇰"}]') + + r.assertOk(r.execute_command('JSON.SET', 'test', '$', '{"\u00a0": { "name": "\u00a0\u00a0" } }')) + r.expect('JSON.GET', 'test', '$').equal('[{"\u00a0":{"name":"\u00a0\u00a0"}}]') + +def test_promote_u64_to_f64(env): + r = env + i64max = 2 ** 63 - 1 + + # i64 + i64 behaves normally + r.expect('JSON.SET', 'num', '$', 0).ok() + r.expect('JSON.TYPE', 'num', '$').equal(['integer']) + res = r.execute_command('JSON.GET', 'num', '$') + val = json.loads(res)[0] + r.assertEqual(val, 0) + res = r.execute_command('JSON.NUMINCRBY', 'num', '$', i64max) + val = json.loads(res)[0] + r.assertEqual(val, i64max) # i64 + i64 no overflow + r.assertNotEqual(val, float(i64max)) # i64max is not representable as f64 + r.expect('JSON.TYPE', 'num', '$').equal(['integer']) # no promotion + res = r.execute_command('JSON.NUMINCRBY', 'num', '$', 1) + val = json.loads(res)[0] + r.assertEqual(val, -(i64max + 1)) # i64 + i64 overflow wraps. as prior, not breaking + r.assertNotEqual(val, i64max + 1) # i64 + i64 is not promoted to u64 + r.assertNotEqual(val, float(i64max) + float(1)) # i64 + i64 is not promoted to f64 + r.expect('JSON.TYPE', 'num', '$').equal(['integer']) # no promotion + + # i64 + u64 used to have inconsistent behavior + r.expect('JSON.SET', 'num', '$', 0).ok() + res = r.execute_command('JSON.NUMINCRBY', 'num', '$', i64max + 2) + val = json.loads(res)[0] + r.assertNotEqual(val, -(i64max + 1) + 1) # i64 + u64 is not i64 + r.assertNotEqual(val, i64max + 2) # i64 + u64 is not u64 + r.assertEqual(val, float(i64max + 2)) # i64 + u64 promotes to f64. as prior, not breaking + r.expect('JSON.TYPE', 'num', '$').equal(['number']) # promoted + + # u64 + i64 used to crash + r.expect('JSON.SET', 'num', '$', i64max + 1).ok() + r.expect('JSON.TYPE', 'num', '$').equal(['integer']) # as prior, not breaking + res = r.execute_command('JSON.GET', 'num', '$') + val = json.loads(res)[0] + r.assertNotEqual(val, -(i64max + 1)) # not i64 + r.assertEqual(val, i64max + 1) # as prior, not breaking + res = r.execute_command('JSON.NUMINCRBY', 'num', '$', 1) + val = json.loads(res)[0] + r.assertNotEqual(val, -(i64max + 1) + 1) # u64 + i64 is not i64 + r.assertNotEqual(val, i64max + 2) # u64 + i64 is not u64 + r.assertEqual(val, float(i64max + 2)) # u64 + i64 promotes to f64. used to crash + r.expect('JSON.TYPE', 'num', '$').equal(['number']) # promoted + + # u64 + u64 used to have inconsistent behavior + r.expect('JSON.SET', 'num', '$', i64max + 1).ok() + r.expect('JSON.CLEAR', 'num', '$').equal(1) # clear u64 used to crash + r.expect('JSON.SET', 'num', '$', i64max + 1).ok() + res = r.execute_command('JSON.NUMINCRBY', 'num', '$', i64max + 2) + val = json.loads(res)[0] + r.assertNotEqual(val, -(i64max + 1) + i64max + 2) # u64 + u64 is not i64 + r.assertNotEqual(val, 2) # u64 + u64 is not u64 + r.assertEqual(val, float(2 * i64max + 3)) # u64 + u64 promotes to f64. as prior, not breaking + r.expect('JSON.TYPE', 'num', '$').equal(['number']) # promoted + + +def test_mset_replication_in_aof(env): + env.skipOnCluster() + env = Env(useAof=True) + with env.getClusterConnectionIfNeeded() as r: + r.execute_command('config', 'set', 'notify-keyspace-events', 'KEA') + + pubsub = r.pubsub() + pubsub.psubscribe('__key*') + + time.sleep(1) + env.assertEqual('psubscribe', pubsub.get_message(timeout=1)['type']) + + command = [b'JSON.MSET'] + data = 'a' * 100 + num_params = 5 + for i in range(0, num_params): + key = f'k:{i}' + value = json.dumps({"data": data}) + command.append(key.encode()) + command.append(b'$') + command.append(value.encode()) + env.expect(*command).ok() + + for i in range(0, num_params): + msg = pubsub.get_message(timeout=1) + env.assertEqual(msg['data'], 'json.mset') + env.assertEqual(msg['type'], 'pmessage') + msg = pubsub.get_message(timeout=1) + env.assertEqual(msg['data'], f'k:{i}') + env.assertEqual(msg['type'], 'pmessage') + + # verify JSON.MSET is appearing in the AOF once only + role = 'master' + aof_fn = env.envRunner._getFileName(role, '.aof.1.incr.aof') + with open(f'{env.logDir}/appendonlydir/{aof_fn}', 'r') as fd: + aof_content = [l for l in fd.readlines() if 'JSON.MSET' in l] + assert(len(aof_content) == 1) + + +def test_recursive_descent(env): + r = env + r.expect('JSON.SET', 'k', '$', '[{"a":1}]').ok() + r.expect('JSON.SET', 'k', '$..*', '[{"a":1}]').ok() + r.expect('JSON.GET', 'k', '$').equal('[[[{"a":1}]]]') + +def test_json_del_matches_with_numeric_pathes(env): + r = env + r.expect( + "JSON.SET", + "k", + "$", + '[{"x":1},{"x":2},{"x":3},{"x":4},{"x":5},{"x":6},{"x":7},{"x":8},{"x":9},{"x":10},{"x":11},{"x":12}]', + ).ok() + r.expect("JSON.DEL", "k", "$[?(@.x>0)]").equal(12) + + r.expect( + "JSON.SET", + "k", + "$", + '[{"x":11},{"x":2},{"x":3},{"x":4},{"x":5},{"x":6},{"x":7},{"x":8},{"x":9},{"x":10},{"x":20},{"x":30},{"x":40},{"x":50},{"x":60},{"x":70},{"x":80},{"x":90},{"x":100}]', + ).ok() + r.expect("JSON.DEL", "k", "$[?(@.x>10)]").equal(10) + + r.expect( + "JSON.SET", + "k", + "$", + '[{"x":10},{"x":20},{"x":30},{"x":40},{"x":50},{"x":60},{"x":70},{"x":80},{"x":90},{"x":100},{"x":9},{"x":120}]', + ).ok() + r.expect("JSON.DEL", "k", "$[?(@.x>10)]").equal(10) + + r.expect( + "JSON.SET", + "k", + "$", + '[{"x":11}, {"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11},{"x":11}]', + ).ok() + r.expect("JSON.DEL", "k", "$[?(@.x>10)]").equal(21) + +def test_json_del_matches_with_object_pathes(env): + r = env + r.expect("JSON.SET", "test", "$", '{"root" : {"1" : {"value":1},"2" : {"value":2},"3" : {"value":3},"4" : {"value":4},"5" : {"value":5}}}').ok() + r.expect("JSON.DEL", "test", "$.root[?(@.value > 2)]").equal(3) + r.expect("JSON.GET", "test", "$").equal('[{"root":{"1":{"value":1},"2":{"value":2}}}]') + # class CacheTestCase(BaseReJSONTest): # @property # def module_args(env): diff --git a/tests/pytest/test_acl.py b/tests/pytest/test_acl.py new file mode 100644 index 000000000..42d8b7889 --- /dev/null +++ b/tests/pytest/test_acl.py @@ -0,0 +1,82 @@ +from common import * + + +@skip_redis_less_than(redis_less_than="7.4.1") +def test_acl_category(env): + """Test that the `json` category was added appropriately in module load""" + res = env.cmd('ACL', 'CAT') + env.assertTrue('json' in res) + +@skip_redis_less_than(redis_less_than="7.99.99") +def test_acl_json_commands(env): + """Tests that the RedisJSON commands are registered to the `json` ACL category""" + res = env.cmd('ACL', 'CAT', 'json') + COMMANDS = [ + 'json.del', 'json.get', 'json.mget', 'json.set', 'json.mset', 'json.type', 'json.numincrby', 'json.toggle', + 'json.nummultby', 'json.numpowby', 'json.strappend', 'json.strlen', 'json.arrappend', 'json.arrindex', + 'json.arrinsert', 'json.arrlen', 'json.arrpop', 'json.arrtrim', 'json.objkeys', 'json.objlen', 'json.clear', + 'json.debug', 'json.forget', 'json.resp', 'json.merge', + ] + + # Use a set since the order of the response is not consistent. + env.assertEqual(set(res), set(COMMANDS)) + + # Check that one of our commands is listed in a non-json category + res = env.cmd('ACL', 'CAT', 'read') + env.assertTrue('json.get' in res) + +@skip_redis_less_than(redis_less_than="7.4.1") +def test_acl_non_default_user(env): + """Tests that a user with a non-default ACL can't access the json category""" + + # Create a user with no command permissions (full keyspace and pubsub access) + env.expect('ACL', 'SETUSER', 'testusr', 'on', '>123', '~*', '&*').ok() + + env.expect('AUTH', 'testusr', '123').true() + + # Such a user shouldn't be able to run any RedisJSON commands (or any other commands) + env.expect('json.get', 'idx', '$', '*').error().contains( + "User testusr has no permissions to run the 'json.get' command") + + # Add `testusr` read permissions + env.expect('AUTH', 'default', '').true() + env.expect('ACL', 'SETUSER', 'testusr', '+@read').ok() + env.expect('AUTH', 'testusr', '123').true() + + READ_JSON_COMMANDS = [ + 'json.get', 'json.type', 'json.strlen', 'json.arrlen', 'json.objlen', + 'json.debug', 'json.arrindex', 'json.objkeys', + ] + + # `testusr` should now be able to run `read` commands like `json.get' + for cmd in READ_JSON_COMMANDS: + env.expect(cmd).error().notContains( + "User testusr has no permissions") + + # `testusr` should not be able to run `json` commands that are not `read` + env.expect('json.set', 'idx', '$', '0').error().contains( + "User testusr has no permissions to run the 'json.set' command") + + # Add `write` permissions to `testusr` + env.expect('AUTH', 'default', '').true() + env.expect('ACL', 'SETUSER', 'testusr', '+@write').ok() + env.expect('AUTH', 'testusr', '123').true() + + WRITE_JSON_COMMANDS = [ + 'json.del', 'json.set', 'json.merge', 'json.clear', 'json.forget', 'json.strappend', + 'json.arrappend', 'json.arrinsert', 'json.arrpop', 'json.arrtrim', + ] + + # `testusr` should now be able to run `write` commands + for cmd in WRITE_JSON_COMMANDS: + env.expect(cmd).error().notContains( + "User testusr has no permissions") + + # Add `testusr` `json` permissions + env.expect('AUTH', 'default', '').true() + env.expect('ACL', 'SETUSER', 'testusr', '+@json').ok() + env.expect('AUTH', 'testusr', '123').true() + + # `testusr` should now be able to run `json` commands like `json.set` + env.expect('json.set', 'idx', '$', '0').ok() + env.expect('json.get', 'idx', '$').equal('[0]') diff --git a/tests/pytest/test_defrag.py b/tests/pytest/test_defrag.py new file mode 100644 index 000000000..8249c61ac --- /dev/null +++ b/tests/pytest/test_defrag.py @@ -0,0 +1,133 @@ +import time +import json +from RLTest import Defaults + +Defaults.decode_responses = True + +def enableDefrag(env): + # make defrag as aggressive as possible + env.cmd('CONFIG', 'SET', 'hz', '100') + env.cmd('CONFIG', 'SET', 'active-defrag-ignore-bytes', '1') + env.cmd('CONFIG', 'SET', 'active-defrag-threshold-lower', '0') + env.cmd('CONFIG', 'SET', 'active-defrag-cycle-min', '99') + + try: + env.cmd('CONFIG', 'SET', 'activedefrag', 'yes') + except Exception: + # If active defrag is not supported by the current Redis, simply skip the test. + env.skip() + +def defragOnObj(env, obj): + enableDefrag(env) + json_str = json.dumps(obj) + env.expect('JSON.SET', 'test', '$', json_str).ok() + for i in range(10000): + env.expect('JSON.SET', 'test%d' % i, '$', json_str).ok() + i += 1 + env.expect('JSON.SET', 'test%d' % i, '$', json_str).ok() + for i in range(10000): + env.expect('DEL', 'test%d' % i).equal(1) + i += 1 + _, _, _, _, _, keysDefrag = env.cmd('JSON.DEBUG', 'DEFRAG_INFO') + startTime = time.time() + # Wait for at least 2 defrag full cycles + # We verify only the 'keysDefrag' value because the other values + # are not promised to be updated. It depends if Redis support + # the start/end defrag callbacks. + while keysDefrag < 2: + time.sleep(0.1) + _, _, _, _, _, keysDefrag = env.cmd('JSON.DEBUG', 'DEFRAG_INFO') + if time.time() - startTime > 30: + # We will wait for up to 30 seconds and then we consider it a failure + env.assertTrue(False, message='Failed waiting for defrag to run') + return + # make sure json is still valid. + res = json.loads(env.cmd('JSON.GET', 'test%d' % i, '$'))[0] + env.assertEqual(res, obj) + env.assertGreater(env.cmd('info', 'Stats')['active_defrag_key_hits'], 0) + +def testDefragNumber(env): + defragOnObj(env, 1) + +def testDefragBigNumber(env): + defragOnObj(env, 100000000000000000000) + +def testDefragDouble(env): + defragOnObj(env, 1.111111111111) + +def testDefragNegativeNumber(env): + defragOnObj(env, -100000000000000000000) + +def testDefragNegativeDouble(env): + defragOnObj(env, -1.111111111111) + +def testDefragTrue(env): + defragOnObj(env, True) + +def testDefragFalse(env): + defragOnObj(env, True) + +def testDefragNone(env): + defragOnObj(env, None) + +def testDefragEmptyString(env): + defragOnObj(env, "") + +def testDefragString(env): + defragOnObj(env, "foo") + +def testDefragEmptyArray(env): + defragOnObj(env, []) + +def testDefragArray(env): + defragOnObj(env, [1, 2, 3]) + +def testDefragEmptyObject(env): + defragOnObj(env, {}) + +def testDefragObject(env): + defragOnObj(env, {"foo": "bar"}) + +def testDefragComplex(env): + defragOnObj(env, {"foo": ["foo", 1, None, True, False, {}, {"foo": [], "bar": 1}]}) + +def testDefragBigJsons(env): + enableDefrag(env) + + # Disable defrag so we can actually create fragmentation + env.cmd('CONFIG', 'SET', 'activedefrag', 'no') + + env.expect('JSON.SET', 'key1', '$', "[]").ok() + env.expect('JSON.SET', 'key2', '$', "[]").ok() + + for i in range(100000): + env.cmd('JSON.ARRAPPEND', 'key1', '$', "[1.11111111111]") + env.cmd('JSON.ARRAPPEND', 'key2', '$', "[1.11111111111]") + + # Now we delete key2 which should cause fragmenation + env.expect('DEL', 'key2').equal(1) + + # wait for fragmentation for up to 30 seconds + frag = env.cmd('info', 'memory')['allocator_frag_ratio'] + startTime = time.time() + while frag < 1.4: + time.sleep(0.1) + frag = env.cmd('info', 'memory')['allocator_frag_ratio'] + if time.time() - startTime > 30: + # We will wait for up to 30 seconds and then we consider it a failure + env.assertTrue(False, message='Failed waiting for fragmentation, current value %s which is expected to be above 1.4.' % frag) + return + + #enable active defrag + env.cmd('CONFIG', 'SET', 'activedefrag', 'yes') + + # wait for fragmentation for go down for up to 30 seconds + frag = env.cmd('info', 'memory')['allocator_frag_ratio'] + startTime = time.time() + while frag > 1.125: + time.sleep(0.1) + frag = env.cmd('info', 'memory')['allocator_frag_ratio'] + if time.time() - startTime > 30: + # We will wait for up to 30 seconds and then we consider it a failure + env.assertTrue(False, message='Failed waiting for fragmentation to go down, current value %s which is expected to be bellow 1.125.' % frag) + return diff --git a/tests/pytest/test_multi.py b/tests/pytest/test_multi.py index 5db932ee4..7486161e2 100644 --- a/tests/pytest/test_multi.py +++ b/tests/pytest/test_multi.py @@ -228,60 +228,60 @@ def testSetAndGetCommands(env): r.assertEqual(res, '"inizio"') - def testMGetCommand(env): """Test REJSON.MGET command""" r = env # Test mget with multi paths - r.assertOk(r.execute_command('JSON.SET', 'doc1', '$', '{"a":1, "b": 2, "nested1": {"a": 3}, "c": null, "nested2": {"a": null}}')) - r.assertOk(r.execute_command('JSON.SET', 'doc2', '$', '{"a":4, "b": 5, "nested3": {"a": 6}, "c": null, "nested4": {"a": [null]}}')) + r.assertOk(r.execute_command('JSON.SET', '{doc}:1', '$', '{"a":1, "b": 2, "nested1": {"a": 3}, "c": null, "nested2": {"a": null}}')) + r.assertOk(r.execute_command('JSON.SET', '{doc}:2', '$', '{"a":4, "b": 5, "nested3": {"a": 6}, "c": null, "nested4": {"a": [null]}}')) # Compare also to single JSON.GET - res1 = r.execute_command('JSON.GET', 'doc1', '$..a') - res2 = r.execute_command('JSON.GET', 'doc2', '$..a') + res1 = r.execute_command('JSON.GET', '{doc}:1', '$..a') + res2 = r.execute_command('JSON.GET', '{doc}:2', '$..a') r.assertEqual(res1, '[1,3,null]') r.assertEqual(res2, '[4,6,[null]]') - r.assertTrue(r.execute_command('SET', 'wrong_key_type', 'not a json key')) + r.assertTrue(r.execute_command('SET', '{doc}:wrong_key_type', 'not a json key')) # Test mget with single path - res = r.execute_command('JSON.MGET', 'doc1', '$..a') + res = r.execute_command('JSON.MGET', '{doc}:1', '$..a') r.assertEqual([res1], res) + # Test mget with multi path - res = r.execute_command('JSON.MGET', 'doc1', 'wrong_key_type', 'doc2', '$..a') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:wrong_key_type', '{doc}:2', '$..a') r.assertEqual(res, [res1, None, res2]) # Test missing/wrong key / missing path - res = r.execute_command('JSON.MGET', 'doc1', 'missing_doc', '$..a') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:missing', '$..a') r.assertEqual(res, [res1, None]) - res = r.execute_command('JSON.MGET', 'doc1', 'doc2', 'wrong_key_type', 'missing_doc', '$.nested1.a') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:2', '{doc}:wrong_key_type', '{doc}:missing', '$.nested1.a') r.assertEqual(res, [json.dumps([json.loads(res1)[1]]), '[]', None, None]) - res = r.execute_command('JSON.MGET', 'missing_doc1', 'missing_doc2', '$..a') + res = r.execute_command('JSON.MGET', '{doc}:missing1', '{doc}:missing2', '$..a') r.assertEqual(res, [None, None]) # Test missing path - res = r.execute_command('JSON.MGET', 'doc1', 'wrong_key_type', 'missing_doc2', '$..niente') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:wrong_key_type', '{doc}:missing2', '$..niente') r.assertEqual(res, ['[]', None, None]) # Test legacy (for each path only the first value is returned as a json string) # Test mget with single path - res = r.execute_command('JSON.MGET', 'doc1', '..a') + res = r.execute_command('JSON.MGET', '{doc}:1', '..a') r.assertEqual(res, [json.dumps(json.loads(res1)[0])]) # Test mget with multi path - res = r.execute_command('JSON.MGET', 'doc1', 'doc2', '..a') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:2', '..a') r.assertEqual(res, [json.dumps(json.loads(res1)[0]), json.dumps(json.loads(res2)[0])]) - # Test wrong key - res = r.execute_command('JSON.MGET', 'doc1', 'wrong_key_type', 'doc2', '..a') + # Test wrong key + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:wrong_key_type', '{doc}:2', '..a') r.assertEqual(res, [json.dumps(json.loads(res1)[0]), None, json.dumps(json.loads(res2)[0])]) # Test missing key/path - res = r.execute_command('JSON.MGET', 'doc1', 'doc2', 'wrong_key_type', 'missing_doc', '.nested1.a') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:2', '{doc}:wrong_key_type', '{doc}:missing', '.nested1.a') r.assertEqual(res, [json.dumps(json.loads(res1)[1]), None, None, None]) - res = r.execute_command('JSON.MGET', 'missing_doc1', 'missing_doc2', '..a') + res = r.execute_command('JSON.MGET', '{doc}:missing1', '{doc}:missing2', '..a') r.assertEqual(res, [None, None]) # Test missing path - res = r.execute_command('JSON.MGET', 'doc1', 'wrong_key_type', 'missing_doc2', '.niente') + res = r.execute_command('JSON.MGET', '{doc}:1', '{doc}:wrong_key_type', '{doc}:missing2', '.niente') r.assertEqual(res, [None, None, None]) @@ -820,7 +820,7 @@ def testToggleCommand(env): # Test MEMORY USAGE key # """ # r = env -# jdata, jtypes = load_types_data('a') +# jdata, jtypes = load_types_data('a') # r.assertOk(r.execute_command('JSON.SET', 'doc1', '$', json.dumps(jdata))) # res = r.execute_command('MEMORY', 'USAGE', 'doc1') # r.assertEqual(res, 211) @@ -843,7 +843,7 @@ def testDebugCommand(env): # Test multi # json.get a $..a ==> "[{},[],\"str\",42,1.2,false,null]" res = r.execute_command('JSON.DEBUG', 'MEMORY', 'doc1', '$..a') - r.assertEqual(res, [8, 8, 11, 8, 24, 8, 8]) + r.assertEqual(res, [8, 8, 8, 8, 16, 8, 8]) # Test single res = r.execute_command('JSON.DEBUG', 'MEMORY', 'doc1', '$.nested2.a') @@ -855,7 +855,7 @@ def testDebugCommand(env): # Test missing path (defaults to root) res = r.execute_command('JSON.DEBUG', 'MEMORY', 'doc1') - r.assertEqual(res, 1187) + r.assertEqual(res, 1080) # Test missing subcommand r.expect('JSON.DEBUG', 'non_existing_doc', '$..a').raiseError() @@ -1071,7 +1071,7 @@ def testErrorMessage(env): json.arrpop doc1 .bzzz 1 (error) ERR key 'bzzz' does not exist at level 0 in path json.arrpop doc1zzz .b 1 - (error) WRONGTYPE Operation against a key holding the wrong kind of value + (error) WRONGTYPE Operation against a key holding the wrong kind of value """ # ARRINDEX @@ -1268,11 +1268,11 @@ def testErrorMessage(env): """ Legacy 1.0.8: json.set doc1 . '{"a":[0, 1, 2, 3, 4, 5], "b":{"x":100}}' OK - json.strlen doc1 .b + json.strlen doc1 .b (error) ERR wrong type of path value - expected string but found object - json.strlen doc1 .bzzz + json.strlen doc1 .bzzz (error) ERR key 'bzzz' does not exist at level 0 in path - json.strlen doc1zzz .b + json.strlen doc1zzz .b (nil) """ @@ -1344,7 +1344,7 @@ def testErrorMessage(env): def testFilterDup_issue667(env): """Test issue #667 """ r = env - + r.assertOk(r.execute_command('JSON.SET', 'test', '$', diff --git a/tests/pytest/test_resp3.py b/tests/pytest/test_resp3.py index 8bc875af6..d41530639 100644 --- a/tests/pytest/test_resp3.py +++ b/tests/pytest/test_resp3.py @@ -304,14 +304,14 @@ def test_resp_json_mget(self): r = self.env r.skipOnVersionSmaller('7.0') - r.assertTrue(r.execute_command('JSON.SET', 'test_resp3_1', '$', '{"a":1, "b":{"f":"g"}, "c":3}')) - r.assertTrue(r.execute_command('JSON.SET', 'test_resp3_2', '$', '{"a":5, "b":[true, 3, null], "d":7}')) + r.assertTrue(r.execute_command('JSON.SET', '{test}resp3_1', '$', '{"a":1, "b":{"f":"g"}, "c":3}')) + r.assertTrue(r.execute_command('JSON.SET', '{test}resp3_2', '$', '{"a":5, "b":[true, 3, null], "d":7}')) # Test JSON.MGET RESP3 with default FORMAT STRING - r.assertEqual(list(map(lambda x:json.loads(x) if x else None, r.execute_command('JSON.MGET', 'test_resp3_1', 'test_resp3_2', '$.not'))), [[], []]) - r.assertEqual(list(map(lambda x:json.loads(x) if x else None, r.execute_command('JSON.MGET', 'test_resp3_1', 'test_resp3_2', 'test_not_JSON', '$.b'))), [[{'f': 'g'}], [[True, 3, None]], None]) - r.assertEqual(list(map(lambda x:json.loads(x), r.execute_command('JSON.MGET', 'test_resp3_1', 'test_resp3_2', '$'))), [[{'a': 1, 'b': {'f': 'g'}, 'c': 3}], [{'b': [True, 3, None], 'd': 7, 'a': 5}]]) - r.assertEqual(list(map(lambda x:json.loads(x), r.execute_command('JSON.MGET', 'test_resp3_1', 'test_resp3_2', '$..*'))), [[1, {'f': 'g'}, 3, 'g'], [5, [True, 3, None], 7, True, 3, None]]) + r.assertEqual(list(map(lambda x:json.loads(x) if x else None, r.execute_command('JSON.MGET', '{test}resp3_1', '{test}resp3_2', '$.not'))), [[], []]) + r.assertEqual(list(map(lambda x:json.loads(x) if x else None, r.execute_command('JSON.MGET', '{test}resp3_1', '{test}resp3_2', '{test}not_JSON', '$.b'))), [[{'f': 'g'}], [[True, 3, None]], None]) + r.assertEqual(list(map(lambda x:json.loads(x), r.execute_command('JSON.MGET', '{test}resp3_1', '{test}resp3_2', '$'))), [[{'a': 1, 'b': {'f': 'g'}, 'c': 3}], [{'b': [True, 3, None], 'd': 7, 'a': 5}]]) + r.assertEqual(list(map(lambda x:json.loads(x), r.execute_command('JSON.MGET', '{test}resp3_1', '{test}resp3_2', '$..*'))), [[1, {'f': 'g'}, 3, 'g'], [5, [True, 3, None], 7, True, 3, None]]) # Test different commands with RESP3 when default path is used def test_resp_default_path(self): @@ -321,20 +321,28 @@ def test_resp_default_path(self): # Test JSON.X commands on object type when default path is used r.assertTrue(r.execute_command('JSON.SET', 'test_resp3', '$', '{"a":[{"b":2},{"g":[1,2]},3]}')) r.assertEqual(r.execute_command('JSON.GET', 'test_resp3', 'FORMAT', 'EXPAND'), [[{"a":[{"b":2},{"g":[1,2]},3]}]]) - r.assertEqual(json.loads(r.execute_command('JSON.GET', 'test_resp3')), [{"a":[{"b":2},{"g":[1,2]},3]}]) - r.assertEqual(r.execute_command('JSON.OBJKEYS', 'test_resp3'), [['a']]) - r.assertEqual(r.execute_command('JSON.OBJLEN', 'test_resp3'), [1]) - r.assertEqual(r.execute_command('JSON.TYPE', 'test_resp3'), [['object']]) - r.assertEqual(r.execute_command('JSON.DEBUG', 'MEMORY', 'test_resp3'), [507]) + r.assertEqual(json.loads(r.execute_command('JSON.GET', 'test_resp3')), {"a":[{"b":2},{"g":[1,2]},3]}) + r.assertEqual(r.execute_command('JSON.OBJKEYS', 'test_resp3'), ['a']) + r.assertEqual(r.execute_command('JSON.OBJLEN', 'test_resp3'), 1) + r.assertEqual(r.execute_command('JSON.TYPE', 'test_resp3'), ['object']) + r.assertEqual(r.execute_command('JSON.DEBUG', 'MEMORY', 'test_resp3'), 464) r.assertEqual(r.execute_command('JSON.DEL', 'test_resp3'), 1) # Test JSON.strX commands on object type when default path is used - r.assertTrue(r.execute_command('JSON.SET', 'test_resp3_str', '$', '"test_resp3_str"')) - r.assertEqual(r.execute_command('JSON.STRLEN', 'test_resp3_str'), [14]) + string = 'test_resp3_str' + length = len(string) + r.assertTrue(r.execute_command('JSON.SET', 'test_resp3_str', '$', fr'"{string}"')) + r.assertEqual(r.execute_command('JSON.STRLEN', 'test_resp3_str'), length) + string = '_append' + length = length + len(string) + r.assertTrue(r.execute_command('JSON.STRAPPEND', 'test_resp3_str', fr'"{string}"')) + r.assertEqual(r.execute_command('JSON.STRLEN', 'test_resp3_str'), length) # Test JSON.arrX commands on object type when default path is used r.assertTrue(r.execute_command('JSON.SET', 'test_resp3_arr', '$', '[true, 1, "dud"]')) - r.assertEqual(r.execute_command('JSON.ARRLEN', 'test_resp3_arr'), [3]) + r.assertEqual(r.execute_command('JSON.ARRLEN', 'test_resp3_arr'), 3) + r.assertEqual(r.executeCommand('JSON.ARRPOP', 'test_resp3_arr'), '"dud"') + r.assertEqual(r.execute_command('JSON.ARRLEN', 'test_resp3_arr'), 2) def test_fail_with_resp2(): r = Env(protocol=2) diff --git a/tests/pytest/test_short_read.py b/tests/pytest/test_short_read.py index 832c43d12..53bb96915 100644 --- a/tests/pytest/test_short_read.py +++ b/tests/pytest/test_short_read.py @@ -20,7 +20,7 @@ Defaults.no_log = True CREATE_INDICES_TARGET_DIR = '/tmp/test' -BASE_RDBS_URL = 'https://s3.amazonaws.com/redismodules/redisearch-oss/rdbs/' +BASE_RDBS_URL = 'https://dev.cto.redis.s3.amazonaws.com/RediSearch/rdbs/' SHORT_READ_BYTES_DELTA = int(os.getenv('SHORT_READ_BYTES_DELTA', '1')) OS = os.getenv('OS') @@ -49,7 +49,7 @@ def downloadFiles(target_dir): if not os.path.exists(path_dir): os.makedirs(path_dir) if not os.path.exists(path): - subprocess.call(['wget', '-q', BASE_RDBS_URL + f, '-O', path]) + subprocess.call(['wget', '--no-check-certificate', '-q', BASE_RDBS_URL + f, '-O', path]) _, ext = os.path.splitext(f) if ext == '.zip': if not unzip(path, path_dir): diff --git a/tests/pytest/tests.sh b/tests/pytest/tests.sh index 5fb31bfd8..7d5a31b13 100755 --- a/tests/pytest/tests.sh +++ b/tests/pytest/tests.sh @@ -10,9 +10,9 @@ READIES=$ROOT/deps/readies export PYTHONUNBUFFERED=1 -VALGRIND_REDIS_VER=7.2 -SAN_REDIS_VER=7.2-rc2 -SAN_REDIS_SUFFIX=7.2 +VALGRIND_REDIS_VER=7.4 +SAN_REDIS_VER=7.4 +SAN_REDIS_SUFFIX=7.4 # SAN_REDIS_VER=6.2 # SAN_REDIS_SUFFIX=6.2 @@ -23,9 +23,9 @@ cd $HERE help() { cat <<-'END' Run Python tests using RLTest - + [ARGVARS...] tests.sh [--help|help] [] - + Argument variables: MODULE=path Path to redisjson.so MODARGS=args RediSearch module arguments @@ -57,7 +57,7 @@ help() { COV=1 Run with coverage analysis VG=1 Run with Valgrind VG_LEAKS=0 Do not detect leaks - SAN=type Use LLVM sanitizer (type=address|memory|leak|thread) + SAN=type Use LLVM sanitizer (type=address|memory|leak|thread) BB=1 Enable Python debugger (break using BB() in tests) GDB=1 Enable interactive gdb debugging (in single-test mode) @@ -87,7 +87,7 @@ help() { END } -#---------------------------------------------------------------------------------------------- +#---------------------------------------------------------------------------------------------- traps() { local func="$1" @@ -120,7 +120,7 @@ stop() { traps 'stop' SIGINT -#---------------------------------------------------------------------------------------------- +#---------------------------------------------------------------------------------------------- setup_rltest() { if [[ $RLTEST == view ]]; then @@ -143,7 +143,7 @@ setup_rltest() { echo "PYTHONPATH=$PYTHONPATH" fi fi - + if [[ $RLTEST_VERBOSE == 1 ]]; then RLTEST_ARGS+=" -v" fi @@ -156,6 +156,7 @@ setup_rltest() { if [[ $RLTEST_CONSOLE == 1 ]]; then RLTEST_ARGS+=" -i" fi + RLTEST_ARGS+=" --enable-debug-command --enable-protected-configs" } #---------------------------------------------------------------------------------------------- @@ -177,7 +178,7 @@ setup_clang_sanitizer() { # for RLTest export SANITIZER="$SAN" export SHORT_READ_BYTES_DELTA=512 - + # --no-output-catch --exit-on-failure --check-exitcode RLTEST_SAN_ARGS="--sanitizer $SAN" @@ -185,6 +186,7 @@ setup_clang_sanitizer() { REDIS_SERVER=${REDIS_SERVER:-redis-server-asan-$SAN_REDIS_SUFFIX} if ! command -v $REDIS_SERVER > /dev/null; then echo "Building Redis $SAN_REDIS_VER for clang-asan ..." + runn sudo apt -qq update -y runn $READIES/bin/getredis --force -v $SAN_REDIS_VER --own-openssl --no-run \ --suffix asan-${SAN_REDIS_SUFFIX} --clang-asan --clang-san-blacklist $ignorelist fi @@ -401,7 +403,7 @@ run_tests() { fi [[ $RLEC == 1 ]] && export RLEC_CLUSTER=1 - + local E=0 if [[ $NOP != 1 ]]; then { $OP python3 -m RLTest @$rltest_config; (( E |= $? )); } || true @@ -415,7 +417,7 @@ run_tests() { echo "killing external redis-server: $XREDIS_PID" kill -TERM $XREDIS_PID fi - + if [[ -n $GITHUB_ACTIONS ]]; then echo "::endgroup::" fi @@ -551,7 +553,7 @@ if [[ $LIST == 1 ]]; then RLTEST_ARGS+=" --collect-only" fi -#---------------------------------------------------------------------------------------------- +#---------------------------------------------------------------------------------------------- if [[ $QUICK == 1 ]]; then GEN=${GEN:-1} @@ -665,7 +667,8 @@ fi if [[ -n $STATFILE ]]; then mkdir -p "$(dirname "$STATFILE")" if [[ -f $STATFILE ]]; then - (( E |= $(cat $STATFILE || echo 1) )) || true + VALUE=$(cat $STATFILE 2>/dev/null || echo 1) + (( E |= VALUE )) || true fi echo $E > $STATFILE fi