From 7b2234355b9434f9aca9339a04c8f295845339be Mon Sep 17 00:00:00 2001 From: Umar Hayat Date: Sat, 22 Feb 2025 03:20:38 +0900 Subject: [PATCH 1/7] Add support for PG17 (#2130) - A new node type is introduced for JSON support, that is JsonConstructorExpr - wrapper over FuncExpr/Aggref/WindowFunc for SQL/JSON constructors. - Added additional checks for JsonConstructorExpr expression node for which the walker would crash. - Removed palloc0fast function call (which is not available in PG17) --- src/backend/nodes/ag_nodes.c | 2 +- src/backend/parser/cypher_analyze.c | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/backend/nodes/ag_nodes.c b/src/backend/nodes/ag_nodes.c index e20670b2b..54bd27314 100644 --- a/src/backend/nodes/ag_nodes.c +++ b/src/backend/nodes/ag_nodes.c @@ -156,7 +156,7 @@ ExtensibleNode *_new_ag_node(Size size, ag_node_tag tag) { ExtensibleNode *n; - n = (ExtensibleNode *)palloc0fast(size); + n = (ExtensibleNode *)palloc0(size); n->type = T_ExtensibleNode; n->extnodename = node_names[tag]; diff --git a/src/backend/parser/cypher_analyze.c b/src/backend/parser/cypher_analyze.c index 128acd0fb..d293df8b0 100644 --- a/src/backend/parser/cypher_analyze.c +++ b/src/backend/parser/cypher_analyze.c @@ -174,6 +174,8 @@ static bool convert_cypher_walker(Node *node, ParseState *pstate) * OpExpr - expression node for an operator invocation * Const - constant value or expression node * BoolExpr - expression node for the basic Boolean operators AND, OR, NOT + * JsonConstructorExpr - wrapper over FuncExpr/Aggref/WindowFunc for + * SQL/JSON constructors * * These are a special case that needs to be ignored. * @@ -181,7 +183,8 @@ static bool convert_cypher_walker(Node *node, ParseState *pstate) if (IsA(funcexpr, SQLValueFunction) || IsA(funcexpr, CoerceViaIO) || IsA(funcexpr, Var) || IsA(funcexpr, OpExpr) - || IsA(funcexpr, Const) || IsA(funcexpr, BoolExpr)) + || IsA(funcexpr, Const) || IsA(funcexpr, BoolExpr) + || IsA(funcexpr, JsonConstructorExpr)) { return false; } @@ -346,6 +349,8 @@ static bool is_func_cypher(FuncExpr *funcexpr) * OpExpr - expression node for an operator invocation * Const - constant value or expression node * BoolExpr - expression node for the basic Boolean operators AND, OR, NOT + * JsonConstructorExpr - wrapper over FuncExpr/Aggref/WindowFunc for + * SQL/JSON constructors * * These are a special case that needs to be ignored. * @@ -353,7 +358,8 @@ static bool is_func_cypher(FuncExpr *funcexpr) if (IsA(funcexpr, SQLValueFunction) || IsA(funcexpr, CoerceViaIO) || IsA(funcexpr, Var) || IsA(funcexpr, OpExpr) - || IsA(funcexpr, Const) || IsA(funcexpr, BoolExpr)) + || IsA(funcexpr, Const) || IsA(funcexpr, BoolExpr) + || IsA(funcexpr, JsonConstructorExpr)) { return false; } From 43dcfa57f19a689ef696fad7d3d9b88401f7b2eb Mon Sep 17 00:00:00 2001 From: Muhammad Taha Naveed Date: Tue, 4 Mar 2025 20:25:25 +0500 Subject: [PATCH 2/7] Update CI, README and repo settings for PG17 (#2156) - Currently, all workflows are targeting the `PG17_prepare` branch, which will be changed to `PG17` once the branch is renamed. - Updated all the github workflows - Updated the README - Updated repo settings - Updated the Dockerfiles --- .asf.yaml | 4 +++ .github/labeler.yml | 3 ++ .github/workflows/go-driver.yml | 4 +-- .github/workflows/installcheck.yaml | 41 ++++++++++++++++------------ .github/workflows/jdbc-driver.yaml | 4 +-- .github/workflows/nodejs-driver.yaml | 4 +-- .github/workflows/python-driver.yaml | 4 +-- README.md | 10 +++---- docker/Dockerfile | 12 ++++---- docker/Dockerfile.dev | 4 +-- drivers/docker-compose.yml | 2 +- 11 files changed, 52 insertions(+), 40 deletions(-) diff --git a/.asf.yaml b/.asf.yaml index 049387b12..75419f24f 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -49,6 +49,10 @@ github: required_pull_request_reviews: required_approving_review_count: 2 + PG17: + required_pull_request_reviews: + required_approving_review_count: 2 + PG16: required_pull_request_reviews: required_approving_review_count: 2 diff --git a/.github/labeler.yml b/.github/labeler.yml index 92ab6db8e..6dfd5f530 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -13,5 +13,8 @@ PG14: PG15: - base-branch: 'PG15' +PG17: +- base-branch: 'PG17' + master: - base-branch: 'master' \ No newline at end of file diff --git a/.github/workflows/go-driver.yml b/.github/workflows/go-driver.yml index 5b6d15030..ea0092b6e 100644 --- a/.github/workflows/go-driver.yml +++ b/.github/workflows/go-driver.yml @@ -2,10 +2,10 @@ name: Go Driver Tests on: push: - branches: [ "master" ] + branches: [ "PG17_prepare" ] pull_request: - branches: [ "master" ] + branches: [ "PG17_prepare" ] jobs: build: diff --git a/.github/workflows/installcheck.yaml b/.github/workflows/installcheck.yaml index dddefa48a..276b2709e 100644 --- a/.github/workflows/installcheck.yaml +++ b/.github/workflows/installcheck.yaml @@ -2,57 +2,62 @@ name: Build / Regression on: push: - branches: [ "master" ] + branches: [ "PG17_prepare" ] pull_request: - branches: [ "master" ] + branches: [ "PG17_prepare" ] jobs: build: runs-on: ubuntu-latest steps: - - name: Get latest commit id of PostgreSQL 16 + - name: Get latest commit id of PostgreSQL 17 run: | - echo "PG_COMMIT_HASH=$(git ls-remote git://git.postgresql.org/git/postgresql.git refs/heads/REL_16_STABLE | awk '{print $1}')" >> $GITHUB_ENV + echo "PG_COMMIT_HASH=$(git ls-remote git://git.postgresql.org/git/postgresql.git refs/heads/REL_17_STABLE | awk '{print $1}')" >> $GITHUB_ENV - - name: Cache PostgreSQL 16 + - name: Cache PostgreSQL 17 uses: actions/cache@v3 - id: pg16cache + id: pg17cache with: - path: ~/pg16 - key: ${{ runner.os }}-v1-pg16-${{ env.PG_COMMIT_HASH }} + path: ~/pg17 + key: ${{ runner.os }}-v1-pg17-${{ env.PG_COMMIT_HASH }} - - name: Install PostgreSQL 16 and some extensions - if: steps.pg16cache.outputs.cache-hit != 'true' + - name: Install dependencies run: | - git clone --depth 1 --branch REL_16_STABLE git://git.postgresql.org/git/postgresql.git ~/pg16source - cd ~/pg16source - ./configure --prefix=$HOME/pg16 CFLAGS="-std=gnu99 -ggdb -O0" --enable-cassert + sudo apt-get update + sudo apt-get install -y build-essential libreadline-dev zlib1g-dev flex bison + + - name: Install PostgreSQL 17 and some extensions + if: steps.pg17cache.outputs.cache-hit != 'true' + run: | + git clone --depth 1 --branch REL_17_STABLE git://git.postgresql.org/git/postgresql.git ~/pg17source + cd ~/pg17source + ./configure --prefix=$HOME/pg17 CFLAGS="-std=gnu99 -ggdb -O0" --enable-cassert make install -j$(nproc) > /dev/null cd contrib cd fuzzystrmatch - make PG_CONFIG=$HOME/pg16/bin/pg_config install -j$(nproc) > /dev/null + make PG_CONFIG=$HOME/pg17/bin/pg_config install -j$(nproc) > /dev/null cd ../pg_trgm - make PG_CONFIG=$HOME/pg16/bin/pg_config install -j$(nproc) > /dev/null + make PG_CONFIG=$HOME/pg17/bin/pg_config install -j$(nproc) > /dev/null - uses: actions/checkout@v3 - name: Build AGE id: build run: | - make PG_CONFIG=$HOME/pg16/bin/pg_config install -j$(nproc) + make PG_CONFIG=$HOME/pg17/bin/pg_config install -j$(nproc) - name: Pull and build pgvector id: pgvector run: | git clone https://github.com/pgvector/pgvector.git cd pgvector - make PG_CONFIG=$HOME/pg16/bin/pg_config install -j$(nproc) > /dev/null + make PG_CONFIG=$HOME/pg17/bin/pg_config install -j$(nproc) > /dev/null - name: Regression tests id: regression_tests run: | - make PG_CONFIG=$HOME/pg16/bin/pg_config installcheck EXTRA_TESTS="pgvector fuzzystrmatch pg_trgm" + make PG_CONFIG=$HOME/pg17/bin/pg_config installcheck EXTRA_TESTS="pgvector fuzzystrmatch pg_trgm" continue-on-error: true - name: Dump regression test errors diff --git a/.github/workflows/jdbc-driver.yaml b/.github/workflows/jdbc-driver.yaml index 29b368438..2b074b855 100644 --- a/.github/workflows/jdbc-driver.yaml +++ b/.github/workflows/jdbc-driver.yaml @@ -2,10 +2,10 @@ name: JDBC Driver Tests on: push: - branches: [ "master" ] + branches: [ "PG17_prepare" ] pull_request: - branches: [ "master" ] + branches: [ "PG17_prepare" ] jobs: build: diff --git a/.github/workflows/nodejs-driver.yaml b/.github/workflows/nodejs-driver.yaml index 3d9e07023..8e8d2af67 100644 --- a/.github/workflows/nodejs-driver.yaml +++ b/.github/workflows/nodejs-driver.yaml @@ -2,10 +2,10 @@ name: Nodejs Driver Tests on: push: - branches: [ "master" ] + branches: [ "PG17_prepare" ] pull_request: - branches: [ "master" ] + branches: [ "PG17_prepare" ] jobs: build: diff --git a/.github/workflows/python-driver.yaml b/.github/workflows/python-driver.yaml index 099b5c871..03f1ca84a 100644 --- a/.github/workflows/python-driver.yaml +++ b/.github/workflows/python-driver.yaml @@ -2,10 +2,10 @@ name: Python Driver Tests on: push: - branches: [ "master" ] + branches: [ "PG17_prepare" ] pull_request: - branches: [ "master" ] + branches: [ "PG17_prepare" ] jobs: build: diff --git a/README.md b/README.md index a89c6f65a..613a6643c 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,8 @@   - - + +   @@ -125,7 +125,7 @@ Apache AGE is intended to be simple to install and run. It can be installed with  Install PostgreSQL -You will need to install an AGE compatible version of Postgres, for now AGE supports Postgres 11, 12, 13, 14, 15 & 16. Supporting the latest versions is on AGE roadmap. +You will need to install an AGE compatible version of Postgres, for now AGE supports Postgres 11, 12, 13, 14, 15, 16 & 17. Supporting the latest versions is on AGE roadmap.

 Installation via Package Manager @@ -143,7 +143,7 @@ sudo apt install postgresql  Installation From Source Code

-You can
download the Postgres source code and install your own instance of Postgres. You can read instructions on how to install from source code for different versions on the official Postgres Website. +You can download the Postgres source code and install your own instance of Postgres. You can read instructions on how to install from source code for different versions on the official Postgres Website. @@ -152,7 +152,7 @@ You can download the Postgres Clone the github repository or download the download an official release. -Run the pg_config utility and check the version of PostgreSQL. Currently, only PostgreSQL versions 11, 12, 13, 14, 15 & 16 are supported. If you have any other version of Postgres, you will need to install PostgreSQL version 11, 12, 13, 14, 15, or 16. +Run the pg_config utility and check the version of PostgreSQL. Currently, only PostgreSQL versions 11, 12, 13, 14, 15, 16 & 17 are supported. If you have any other version of Postgres, you will need to install PostgreSQL version 11, 12, 13, 14, 15, 16 & 17.
```bash diff --git a/docker/Dockerfile b/docker/Dockerfile index 336070589..91c626d63 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -17,14 +17,14 @@ # # Build stage: Install necessary development tools for compilation and installation -FROM postgres:16 AS build +FROM postgres:17 AS build RUN apt-get update \ && apt-get install -y --no-install-recommends --no-install-suggests \ bison \ build-essential \ flex \ - postgresql-server-dev-16 + postgresql-server-dev-17 COPY . /age @@ -34,7 +34,7 @@ RUN make && make install # Final stage: Create a final image by copying the files created in the build stage -FROM postgres:16 +FROM postgres:17 RUN apt-get update \ && apt-get install -y --no-install-recommends --no-install-suggests \ @@ -48,9 +48,9 @@ ENV LANG=en_US.UTF-8 ENV LC_COLLATE=en_US.UTF-8 ENV LC_CTYPE=en_US.UTF-8 -COPY --from=build /usr/lib/postgresql/16/lib/age.so /usr/lib/postgresql/16/lib/ -COPY --from=build /usr/share/postgresql/16/extension/age--1.5.0.sql /usr/share/postgresql/16/extension/ -COPY --from=build /usr/share/postgresql/16/extension/age.control /usr/share/postgresql/16/extension/ +COPY --from=build /usr/lib/postgresql/17/lib/age.so /usr/lib/postgresql/17/lib/ +COPY --from=build /usr/share/postgresql/17/extension/age--1.5.0.sql /usr/share/postgresql/17/extension/ +COPY --from=build /usr/share/postgresql/17/extension/age.control /usr/share/postgresql/17/extension/ COPY docker/docker-entrypoint-initdb.d/00-create-extension-age.sql /docker-entrypoint-initdb.d/00-create-extension-age.sql CMD ["postgres", "-c", "shared_preload_libraries=age"] diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index bdf0c40d0..48b2db3ed 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -17,14 +17,14 @@ # -FROM postgres:16 +FROM postgres:17 RUN apt-get update RUN apt-get install --assume-yes --no-install-recommends --no-install-suggests \ bison \ build-essential \ flex \ - postgresql-server-dev-16 \ + postgresql-server-dev-17 \ locales ENV LANG=en_US.UTF-8 diff --git a/drivers/docker-compose.yml b/drivers/docker-compose.yml index 9ec072db5..3789fe4a9 100644 --- a/drivers/docker-compose.yml +++ b/drivers/docker-compose.yml @@ -1,7 +1,7 @@ version: "3.3" services: db: - image: apache/age:dev_snapshot_master + image: apache/age:dev_snapshot_PG17_prepare environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=agens From ad425f4857623490bc8114f372595ad97795de8a Mon Sep 17 00:00:00 2001 From: Muhammad Taha Naveed Date: Mon, 12 May 2025 19:40:33 +0500 Subject: [PATCH 3/7] Remove stale bot and update .asf.yaml settings (#2171) - Removed stale bot. (https://lists.apache.org/thread/qh4h2z6hsjy2v7wg8mwfnl6cbjp28y08) - Decrease required PR approvals by one. (https://lists.apache.org/thread/kmz155t6k0h3b26fjpz36924zthqjlpm) - Fixed a warning reported by apache infra i.e. "An error occurred while processing the github feature in .asf.yaml: GitHub discussions can only be enabled if a mailing list target exists for it." --- .asf.yaml | 15 ++++++++------- .github/workflows/stale.yaml | 26 -------------------------- 2 files changed, 8 insertions(+), 33 deletions(-) delete mode 100644 .github/workflows/stale.yaml diff --git a/.asf.yaml b/.asf.yaml index 75419f24f..da422d3bd 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -19,6 +19,7 @@ notifications: commits: commits@age.apache.org pullrequests: commits@age.apache.org + discussions: dev@age.apache.org github: description: "Graph database optimized for fast analysis and real-time data processing. @@ -47,7 +48,7 @@ github: protected_branches: master: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG17: required_pull_request_reviews: @@ -55,24 +56,24 @@ github: PG16: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG15: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG14: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG13: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG12: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 PG11: required_pull_request_reviews: - required_approving_review_count: 2 + required_approving_review_count: 1 diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml deleted file mode 100644 index cd3bcd16e..000000000 --- a/.github/workflows/stale.yaml +++ /dev/null @@ -1,26 +0,0 @@ -name: 'Close stale issues and PRs' -on: - schedule: - - cron: '0 0 * * *' - -jobs: - stale: - runs-on: ubuntu-latest - steps: - - uses: actions/stale@v9 - with: - stale-issue-message: 'This issue is stale because it has been open 60 days with no activity. Remove "Abondoned" label or comment or this will be closed in 14 days.' - close-issue-message: 'This issue was closed because it has been stalled for further 14 days with no activity.' - stale-pr-message: 'This PR is stale because it has been open 60 days with no activity. Remove "Abondoned" label or comment or this will be closed in 14 days.' - close-pr-message: 'This PR was closed because it has been stalled for further 14 days with no activity' - stale-issue-label: Stale - exempt-issue-labels: 'override-stale' - stale-pr-label: Stale - exempt-pr-labels: 'override-stale' - days-before-issue-stale: 60 - days-before-issue-close: 14 - days-before-pr-stale: 60 - days-before-pr-close: 14 - # only stale issue/PR created after the 1st Jan 2023: - start-date: '2023-01-01T00:00:00Z' - operations-per-run: 500 \ No newline at end of file From 1d957808c331e68b188e464dc41873fa2a605a70 Mon Sep 17 00:00:00 2001 From: John Gemignani Date: Fri, 6 Jun 2025 20:32:15 -0700 Subject: [PATCH 4/7] Update labeler.yml Adjust workflow/labeler.yml to add permissions. --- .github/workflows/labeler.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 4234e3582..5c302618e 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -5,8 +5,9 @@ on: jobs: triage: permissions: - contents: read + contents: write pull-requests: write + issues: write runs-on: ubuntu-latest steps: - name: Apply branch labels From 1d87379741a37e33990bda705d436a3db6c054de Mon Sep 17 00:00:00 2001 From: John Gemignani Date: Sun, 8 Jun 2025 00:42:56 -0700 Subject: [PATCH 5/7] Adjust CI for PG17 after rename from PG17_prepare (#2182) Adjusted the following CI files (workflows) for PG17, they originally pointed to PG17_prepare - modified: .github/workflows/go-driver.yml modified: .github/workflows/installcheck.yaml modified: .github/workflows/jdbc-driver.yaml modified: .github/workflows/nodejs-driver.yaml modified: .github/workflows/python-driver.yaml modified: drivers/docker-compose.yml modified: .github/labeler.yml --- .github/labeler.yml | 5 ++++- .github/workflows/go-driver.yml | 4 ++-- .github/workflows/installcheck.yaml | 8 ++++---- .github/workflows/jdbc-driver.yaml | 4 ++-- .github/workflows/labeler.yml | 3 +++ .github/workflows/nodejs-driver.yaml | 4 ++-- .github/workflows/python-driver.yaml | 4 ++-- drivers/docker-compose.yml | 2 +- 8 files changed, 20 insertions(+), 14 deletions(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 6dfd5f530..6baa297c5 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -13,8 +13,11 @@ PG14: PG15: - base-branch: 'PG15' +PG16: +- base-branch: 'PG16' + PG17: - base-branch: 'PG17' master: -- base-branch: 'master' \ No newline at end of file +- base-branch: 'master' diff --git a/.github/workflows/go-driver.yml b/.github/workflows/go-driver.yml index ea0092b6e..64044f91e 100644 --- a/.github/workflows/go-driver.yml +++ b/.github/workflows/go-driver.yml @@ -2,10 +2,10 @@ name: Go Driver Tests on: push: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] pull_request: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] jobs: build: diff --git a/.github/workflows/installcheck.yaml b/.github/workflows/installcheck.yaml index 276b2709e..f2b69a02f 100644 --- a/.github/workflows/installcheck.yaml +++ b/.github/workflows/installcheck.yaml @@ -2,9 +2,9 @@ name: Build / Regression on: push: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] pull_request: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] jobs: build: @@ -13,7 +13,7 @@ jobs: steps: - name: Get latest commit id of PostgreSQL 17 run: | - echo "PG_COMMIT_HASH=$(git ls-remote git://git.postgresql.org/git/postgresql.git refs/heads/REL_17_STABLE | awk '{print $1}')" >> $GITHUB_ENV + echo "PG_COMMIT_HASH=$(git ls-remote https://git.postgresql.org/git/postgresql.git refs/heads/REL_17_STABLE | awk '{print $1}')" >> $GITHUB_ENV - name: Cache PostgreSQL 17 uses: actions/cache@v3 @@ -30,7 +30,7 @@ jobs: - name: Install PostgreSQL 17 and some extensions if: steps.pg17cache.outputs.cache-hit != 'true' run: | - git clone --depth 1 --branch REL_17_STABLE git://git.postgresql.org/git/postgresql.git ~/pg17source + git clone --depth 1 --branch REL_17_STABLE https://git.postgresql.org/git/postgresql.git ~/pg17source cd ~/pg17source ./configure --prefix=$HOME/pg17 CFLAGS="-std=gnu99 -ggdb -O0" --enable-cassert make install -j$(nproc) > /dev/null diff --git a/.github/workflows/jdbc-driver.yaml b/.github/workflows/jdbc-driver.yaml index 2b074b855..54ca612f1 100644 --- a/.github/workflows/jdbc-driver.yaml +++ b/.github/workflows/jdbc-driver.yaml @@ -2,10 +2,10 @@ name: JDBC Driver Tests on: push: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] pull_request: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] jobs: build: diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 5c302618e..d5fc8c835 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -10,6 +10,9 @@ jobs: issues: write runs-on: ubuntu-latest steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Apply branch labels uses: actions/labeler@v5.0.0 diff --git a/.github/workflows/nodejs-driver.yaml b/.github/workflows/nodejs-driver.yaml index 8e8d2af67..156cb8518 100644 --- a/.github/workflows/nodejs-driver.yaml +++ b/.github/workflows/nodejs-driver.yaml @@ -2,10 +2,10 @@ name: Nodejs Driver Tests on: push: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] pull_request: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] jobs: build: diff --git a/.github/workflows/python-driver.yaml b/.github/workflows/python-driver.yaml index 03f1ca84a..70488e738 100644 --- a/.github/workflows/python-driver.yaml +++ b/.github/workflows/python-driver.yaml @@ -2,10 +2,10 @@ name: Python Driver Tests on: push: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] pull_request: - branches: [ "PG17_prepare" ] + branches: [ "PG17" ] jobs: build: diff --git a/drivers/docker-compose.yml b/drivers/docker-compose.yml index 3789fe4a9..c83d26f47 100644 --- a/drivers/docker-compose.yml +++ b/drivers/docker-compose.yml @@ -1,7 +1,7 @@ version: "3.3" services: db: - image: apache/age:dev_snapshot_PG17_prepare + image: apache/age:dev_snapshot_PG17 environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=agens From a66436a2f362ada807cb4c26a297740e895fcaf5 Mon Sep 17 00:00:00 2001 From: sanchayanghosh Date: Sun, 20 Apr 2025 18:55:51 +0000 Subject: [PATCH 6/7] Fixes #2164: Adds support for PostgreSQL 18 1. In PostgreSQL 18, TupleDescAttr is now used to access ScanTuple->tts_tupleDescriptor->attrs since attrs is now replaced by compact_attrs to save on memory in PostgreSQL 18. In PostgreSQL 16, 17 and 18 we have a function TupleDescAttr which allows to acess pg_attrs 2. In PostgreSQL palloc0fast is now merged into palloc0. 3. Few funcitons now reuiqre executor/executor.h and are no longer present in the other includes. --- Makefile | 3 +- regress/expected/cypher_path_delete.out | 87 +++++++++++++++++++++++++ regress/sql/cypher_path_delete.sql | 52 +++++++++++++++ src/backend/catalog/ag_label.c | 1 + src/backend/executor/cypher_create.c | 2 + src/backend/executor/cypher_delete.c | 11 +++- src/backend/executor/cypher_merge.c | 1 + src/backend/executor/cypher_set.c | 15 ++++- src/backend/executor/cypher_utils.c | 1 + src/backend/nodes/ag_nodes.c | 1 + src/backend/parser/cypher_clause.c | 9 +++ src/backend/utils/adt/agtype_parser.c | 1 + 12 files changed, 180 insertions(+), 4 deletions(-) create mode 100644 regress/expected/cypher_path_delete.out create mode 100644 regress/sql/cypher_path_delete.sql diff --git a/Makefile b/Makefile index c0a847830..04e0f23cb 100644 --- a/Makefile +++ b/Makefile @@ -112,7 +112,8 @@ REGRESS = scan \ name_validation \ jsonb_operators \ list_comprehension \ - map_projection + map_projection \ + cypher_path_delete ifneq ($(EXTRA_TESTS),) REGRESS += $(EXTRA_TESTS) diff --git a/regress/expected/cypher_path_delete.out b/regress/expected/cypher_path_delete.out new file mode 100644 index 000000000..e1e03309b --- /dev/null +++ b/regress/expected/cypher_path_delete.out @@ -0,0 +1,87 @@ +LOAD 'age'; +SET search_path TO ag_catalog; +SELECT create_graph('cypher_path'); +NOTICE: graph "cypher_path" has been created + create_graph +-------------- + +(1 row) + +-- Create vertex +SELECT * FROM cypher('cypher_path', $$ + CREATE (:label_name_1 {i: 0}) +$$) as (a agtype); + a +--- +(0 rows) + +-- Create a path to test our create, set and delete on. +SELECT * +FROM cypher('cypher_path', $$ + CREATE p = (andres {name:'Andres'})-[:WORKS_AT]->(neo)<-[:WORKS_AT]-(michael {name:'Michael'})-[:WORKS_WITH]->(jordan {name: 'Jordan'}) + RETURN p +$$) as (p agtype); + p +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + [{"id": 281474976710657, "label": "", "properties": {"name": "Andres"}}::vertex, {"id": 1125899906842626, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710657, "properties": {}}::edge, {"id": 281474976710658, "label": "", "properties": {}}::vertex, {"id": 1125899906842625, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710659, "properties": {}}::edge, {"id": 281474976710659, "label": "", "properties": {"name": "Michael"}}::vertex, {"id": 1407374883553281, "label": "WORKS_WITH", "end_id": 281474976710660, "start_id": 281474976710659, "properties": {}}::edge, {"id": 281474976710660, "label": "", "properties": {"name": "Jordan"}}::vertex]::path +(1 row) + +-- Now delete one of the relationships nodes and the output should be return unchanged. +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = ()-[]->()<-[]-()-[]->(j) + DETACH DELETE j + RETURN p +$$) AS (a agtype); + a +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + [{"id": 281474976710657, "label": "", "properties": {"name": "Andres"}}::vertex, {"id": 1125899906842626, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710657, "properties": {}}::edge, {"id": 281474976710658, "label": "", "properties": {}}::vertex, {"id": 1125899906842625, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710659, "properties": {}}::edge, {"id": 281474976710659, "label": "", "properties": {"name": "Michael"}}::vertex, {"id": 1407374883553281, "label": "WORKS_WITH", "end_id": 281474976710660, "start_id": 281474976710659, "properties": {}}::edge, {"id": 281474976710660, "label": "", "properties": {"name": "Jordan"}}::vertex]::path +(1 row) + +-- Now delete one of the edges and the path should be updated but output is still unchanged +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = (andres {name: 'Andres'})-[r:WORKS_AT]->(neo)<-[g:WORKS_AT]-(michael {name: 'Michael'}) + DELETE g + RETURN p +$$) as (a agtype); + a +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + [{"id": 281474976710657, "label": "", "properties": {"name": "Andres"}}::vertex, {"id": 1125899906842626, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710657, "properties": {}}::edge, {"id": 281474976710658, "label": "", "properties": {}}::vertex, {"id": 1125899906842625, "label": "WORKS_AT", "end_id": 281474976710658, "start_id": 281474976710659, "properties": {}}::edge, {"id": 281474976710659, "label": "", "properties": {"name": "Michael"}}::vertex]::path +(1 row) + +-- Create a path to test our create, set and delete on. +SELECT * +FROM cypher('cypher_path', $$ + CREATE p = (andres {name:'Andres'})-[:CHILLS_AT]->(neo)<-[:CHILLS_SOME_MORE]-(michael {name:'Michael'}) + RETURN p +$$) as (p agtype); + p +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + [{"id": 281474976710661, "label": "", "properties": {"name": "Andres"}}::vertex, {"id": 1688849860263937, "label": "CHILLS_AT", "end_id": 281474976710662, "start_id": 281474976710661, "properties": {}}::edge, {"id": 281474976710662, "label": "", "properties": {}}::vertex, {"id": 1970324836974593, "label": "CHILLS_SOME_MORE", "end_id": 281474976710662, "start_id": 281474976710663, "properties": {}}::edge, {"id": 281474976710663, "label": "", "properties": {"name": "Michael"}}::vertex]::path +(1 row) + +-- Attempt to delete anything other than vertex or edge. This should fail +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = (andres {name: 'Andres'})-[]->() + DELETE p + RETURN p +$$) as (a agtype); +ERROR: DELETE clause can only delete vertices and edges +-- Cleanup +SELECT drop_graph('cypher_path', true); +NOTICE: drop cascades to 7 other objects +DETAIL: drop cascades to table cypher_path._ag_label_vertex +drop cascades to table cypher_path._ag_label_edge +drop cascades to table cypher_path.label_name_1 +drop cascades to table cypher_path."WORKS_AT" +drop cascades to table cypher_path."WORKS_WITH" +drop cascades to table cypher_path."CHILLS_AT" +drop cascades to table cypher_path."CHILLS_SOME_MORE" +NOTICE: graph "cypher_path" has been dropped + drop_graph +------------ + +(1 row) + diff --git a/regress/sql/cypher_path_delete.sql b/regress/sql/cypher_path_delete.sql new file mode 100644 index 000000000..97cdd4f7a --- /dev/null +++ b/regress/sql/cypher_path_delete.sql @@ -0,0 +1,52 @@ + +LOAD 'age'; + +SET search_path TO ag_catalog; + +SELECT create_graph('cypher_path'); + +-- Create vertex +SELECT * FROM cypher('cypher_path', $$ + CREATE (:label_name_1 {i: 0}) +$$) as (a agtype); + +-- Create a path to test our create, set and delete on. +SELECT * +FROM cypher('cypher_path', $$ + CREATE p = (andres {name:'Andres'})-[:WORKS_AT]->(neo)<-[:WORKS_AT]-(michael {name:'Michael'})-[:WORKS_WITH]->(jordan {name: 'Jordan'}) + RETURN p +$$) as (p agtype); + +-- Now delete one of the relationships nodes and the output should be return unchanged. +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = ()-[]->()<-[]-()-[]->(j) + DETACH DELETE j + RETURN p +$$) AS (a agtype); + +-- Now delete one of the edges and the path should be updated but output is still unchanged +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = (andres {name: 'Andres'})-[r:WORKS_AT]->(neo)<-[g:WORKS_AT]-(michael {name: 'Michael'}) + DELETE g + RETURN p +$$) as (a agtype); + +-- Create a path to test our create, set and delete on. +SELECT * +FROM cypher('cypher_path', $$ + CREATE p = (andres {name:'Andres'})-[:CHILLS_AT]->(neo)<-[:CHILLS_SOME_MORE]-(michael {name:'Michael'}) + RETURN p +$$) as (p agtype); + +-- Attempt to delete anything other than vertex or edge. This should fail +SELECT * +FROM cypher('cypher_path', $$ + MATCH p = (andres {name: 'Andres'})-[]->() + DELETE p + RETURN p +$$) as (a agtype); + +-- Cleanup +SELECT drop_graph('cypher_path', true); diff --git a/src/backend/catalog/ag_label.c b/src/backend/catalog/ag_label.c index 3c242a000..89a21717a 100644 --- a/src/backend/catalog/ag_label.c +++ b/src/backend/catalog/ag_label.c @@ -29,6 +29,7 @@ #include "catalog/ag_label.h" #include "commands/label_commands.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" #include "utils/ag_cache.h" /* diff --git a/src/backend/executor/cypher_create.c b/src/backend/executor/cypher_create.c index 2091ea29c..254c3aee7 100644 --- a/src/backend/executor/cypher_create.c +++ b/src/backend/executor/cypher_create.c @@ -20,8 +20,10 @@ #include "postgres.h" #include "catalog/ag_label.h" +#include "catalog/indexing.h" #include "executor/cypher_executor.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" static void begin_cypher_create(CustomScanState *node, EState *estate, int eflags); diff --git a/src/backend/executor/cypher_delete.c b/src/backend/executor/cypher_delete.c index 6bb869833..72a6fe254 100644 --- a/src/backend/executor/cypher_delete.c +++ b/src/backend/executor/cypher_delete.c @@ -25,6 +25,8 @@ #include "catalog/ag_label.h" #include "executor/cypher_executor.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" +#include "pg_config.h" static void begin_cypher_delete(CustomScanState *node, EState *estate, int eflags); @@ -55,6 +57,11 @@ const CustomExecMethods cypher_delete_exec_methods = {DELETE_SCAN_STATE_NAME, NULL, NULL}; +#if PG_VERSION_NUM >= 18000 +#define TUPLE_DESC_ATTR_TYPE_ID TupleDescAttr(tupleDescriptor, entity_position - 1)->atttypid +#else +#define TUPLE_DESC_ATTR_TYPE_ID tupleDescriptor->attrs[entity_position - 1]->atttypid +#endif /* * Initialization at the beginning of execution. Setup the child node, * setup its scan tuple slot and projection info, expression context, @@ -257,13 +264,15 @@ static agtype_value *extract_entity(CustomScanState *node, tupleDescriptor = scanTupleSlot->tts_tupleDescriptor; /* type checking, make sure the entity is an agtype vertex or edge */ - if (tupleDescriptor->attrs[entity_position -1].atttypid != AGTYPEOID) + // First make sure the entity is an agtype. this is set per extension. Also, from postgresql 16 onwards use TupleDescAttr so that it works on both attrs and compact_attrs. + if (TUPLE_DESC_ATTR_TYPE_ID != AGTYPEOID) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("DELETE clause can only delete agtype"))); original_entity = DATUM_GET_AGTYPE_P(scanTupleSlot->tts_values[entity_position - 1]); original_entity_value = get_ith_agtype_value_from_container(&original_entity->root, 0); + // We are not deleting anything other than vertices and edges. if (original_entity_value->type != AGTV_VERTEX && original_entity_value->type != AGTV_EDGE) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("DELETE clause can only delete vertices and edges"))); diff --git a/src/backend/executor/cypher_merge.c b/src/backend/executor/cypher_merge.c index 9136825ab..2013e225a 100644 --- a/src/backend/executor/cypher_merge.c +++ b/src/backend/executor/cypher_merge.c @@ -22,6 +22,7 @@ #include "catalog/ag_label.h" #include "executor/cypher_executor.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" #include "utils/datum.h" /* diff --git a/src/backend/executor/cypher_set.c b/src/backend/executor/cypher_set.c index d1837fb16..c26724805 100644 --- a/src/backend/executor/cypher_set.c +++ b/src/backend/executor/cypher_set.c @@ -24,6 +24,8 @@ #include "executor/cypher_executor.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" +#include "pg_config.h" static void begin_cypher_set(CustomScanState *node, EState *estate, int eflags); static TupleTableSlot *exec_cypher_set(CustomScanState *node); @@ -49,6 +51,13 @@ const CustomExecMethods cypher_set_exec_methods = {SET_SCAN_STATE_NAME, NULL, NULL}; +#if PG_VERSION_NUM >= 18000 +#define TUPLE_DESC_ATTR_TYPE_ID TupleDescAttr(scanTupleSlot->tts_tupleDescriptor, i)->atttypid +#define TUPLE_DESC_ATTR_TYPE_ID_ENTITY TupleDescAttr(scanTupleSlot->tts_tupleDescriptor, update_item->entity_position - 1)->atttypid +#else +#define TUPLE_DESC_ATTR_TYPE_ID scanTupleSlot->tts_tupleDescriptor->attrs[i]->atttypid +#define TUPLE_DESC_ATTR_TYPE_ID_ENTITY scanTupleSlot->tts_tupleDescriptor->attrs[update_item->entity_position - 1]->atttypid +#endif static void begin_cypher_set(CustomScanState *node, EState *estate, int eflags) { @@ -310,7 +319,8 @@ static void update_all_paths(CustomScanState *node, graphid id, agtype_value *original_entity_value; /* skip nulls */ - if (scanTupleSlot->tts_tupleDescriptor->attrs[i].atttypid != AGTYPEOID) + // Starting postgresql version 16, tupleDescriptor->attrs access is not recommended. Instead, we must use tupleDescAttr which handles both compact_attr and attr access. + if (TUPLE_DESC_ATTR_TYPE_ID != AGTYPEOID) { continue; } @@ -414,7 +424,8 @@ static void process_update_list(CustomScanState *node) continue; } - if (scanTupleSlot->tts_tupleDescriptor->attrs[update_item->entity_position -1].atttypid != AGTYPEOID) + // Starting from postgresql 16, we need to use TupleDescAttr for accessing tupleDescriptor because attr is now no longer present under tupleDescriptor and is now compact_attr + if (TUPLE_DESC_ATTR_TYPE_ID_ENTITY != AGTYPEOID) { ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), diff --git a/src/backend/executor/cypher_utils.c b/src/backend/executor/cypher_utils.c index c8d568831..2d5a46986 100644 --- a/src/backend/executor/cypher_utils.c +++ b/src/backend/executor/cypher_utils.c @@ -30,6 +30,7 @@ #include "catalog/ag_label.h" #include "commands/label_commands.h" #include "executor/cypher_utils.h" +#include "executor/executor.h" #include "utils/ag_cache.h" /* diff --git a/src/backend/nodes/ag_nodes.c b/src/backend/nodes/ag_nodes.c index 54bd27314..fc3aaca68 100644 --- a/src/backend/nodes/ag_nodes.c +++ b/src/backend/nodes/ag_nodes.c @@ -25,6 +25,7 @@ #include "nodes/cypher_readfuncs.h" #include "nodes/cypher_nodes.h" +#include "utils/palloc.h" static bool equal_ag_node(const ExtensibleNode *a, const ExtensibleNode *b); /* This list must match ag_node_tag. */ diff --git a/src/backend/parser/cypher_clause.c b/src/backend/parser/cypher_clause.c index e301daa0f..7444e64cc 100644 --- a/src/backend/parser/cypher_clause.c +++ b/src/backend/parser/cypher_clause.c @@ -51,6 +51,8 @@ #include "utils/ag_cache.h" #include "utils/ag_func.h" #include "utils/ag_guc.h" +#include "nodes/primnodes.h" +#include "pg_config.h" /* * Variable string names for makeTargetEntry. As they are going to be variable @@ -2555,10 +2557,17 @@ static void get_res_cols(ParseState *pstate, ParseNamespaceItem *l_pnsi, List *colnames = NIL; List *colvars = NIL; + #if PG_VERSION_NUM >= 180000 + expandRTE(l_pnsi->p_rte, l_pnsi->p_rtindex, 0, VAR_RETURNING_DEFAULT, -1, false, + &l_colnames, &l_colvars); + expandRTE(r_pnsi->p_rte, r_pnsi->p_rtindex, 0, VAR_RETURNING_DEFAULT, -1, false, + &r_colnames, &r_colvars); + #else expandRTE(l_pnsi->p_rte, l_pnsi->p_rtindex, 0, -1, false, &l_colnames, &l_colvars); expandRTE(r_pnsi->p_rte, r_pnsi->p_rtindex, 0, -1, false, &r_colnames, &r_colvars); + #endif /* add in all colnames and colvars from the l_rte. */ *res_colnames = list_concat(*res_colnames, l_colnames); diff --git a/src/backend/utils/adt/agtype_parser.c b/src/backend/utils/adt/agtype_parser.c index c485cb925..730290127 100644 --- a/src/backend/utils/adt/agtype_parser.c +++ b/src/backend/utils/adt/agtype_parser.c @@ -59,6 +59,7 @@ typedef enum /* contexts of agtype parser */ AGTYPE_PARSE_END /* saw the end of a document, expect nothing */ } agtype_parse_context; +#define pg_attribute_noreturn() __attribute__((noreturn)) static inline void agtype_lex(agtype_lex_context *lex); static inline void agtype_lex_string(agtype_lex_context *lex); static inline void agtype_lex_number(agtype_lex_context *lex, char *s, From 02c372a1144c3c3048f1e17c5b6467691cce430d Mon Sep 17 00:00:00 2001 From: sanchayanghosh Date: Thu, 9 Oct 2025 16:00:07 +0530 Subject: [PATCH 7/7] Modifies fix for #2164 : Removes the macro but this fix will only provide compatbility going forward from postgreSQL 16 onwards. --- src/backend/executor/cypher_delete.c | 5 +---- src/backend/executor/cypher_set.c | 6 +----- src/backend/parser/cypher_clause.c | 7 ------- 3 files changed, 2 insertions(+), 16 deletions(-) diff --git a/src/backend/executor/cypher_delete.c b/src/backend/executor/cypher_delete.c index 72a6fe254..ad69ad192 100644 --- a/src/backend/executor/cypher_delete.c +++ b/src/backend/executor/cypher_delete.c @@ -57,11 +57,8 @@ const CustomExecMethods cypher_delete_exec_methods = {DELETE_SCAN_STATE_NAME, NULL, NULL}; -#if PG_VERSION_NUM >= 18000 #define TUPLE_DESC_ATTR_TYPE_ID TupleDescAttr(tupleDescriptor, entity_position - 1)->atttypid -#else -#define TUPLE_DESC_ATTR_TYPE_ID tupleDescriptor->attrs[entity_position - 1]->atttypid -#endif + /* * Initialization at the beginning of execution. Setup the child node, * setup its scan tuple slot and projection info, expression context, diff --git a/src/backend/executor/cypher_set.c b/src/backend/executor/cypher_set.c index c26724805..d72f2890f 100644 --- a/src/backend/executor/cypher_set.c +++ b/src/backend/executor/cypher_set.c @@ -51,13 +51,9 @@ const CustomExecMethods cypher_set_exec_methods = {SET_SCAN_STATE_NAME, NULL, NULL}; -#if PG_VERSION_NUM >= 18000 #define TUPLE_DESC_ATTR_TYPE_ID TupleDescAttr(scanTupleSlot->tts_tupleDescriptor, i)->atttypid #define TUPLE_DESC_ATTR_TYPE_ID_ENTITY TupleDescAttr(scanTupleSlot->tts_tupleDescriptor, update_item->entity_position - 1)->atttypid -#else -#define TUPLE_DESC_ATTR_TYPE_ID scanTupleSlot->tts_tupleDescriptor->attrs[i]->atttypid -#define TUPLE_DESC_ATTR_TYPE_ID_ENTITY scanTupleSlot->tts_tupleDescriptor->attrs[update_item->entity_position - 1]->atttypid -#endif + static void begin_cypher_set(CustomScanState *node, EState *estate, int eflags) { diff --git a/src/backend/parser/cypher_clause.c b/src/backend/parser/cypher_clause.c index 7444e64cc..452227728 100644 --- a/src/backend/parser/cypher_clause.c +++ b/src/backend/parser/cypher_clause.c @@ -2557,17 +2557,10 @@ static void get_res_cols(ParseState *pstate, ParseNamespaceItem *l_pnsi, List *colnames = NIL; List *colvars = NIL; - #if PG_VERSION_NUM >= 180000 expandRTE(l_pnsi->p_rte, l_pnsi->p_rtindex, 0, VAR_RETURNING_DEFAULT, -1, false, &l_colnames, &l_colvars); expandRTE(r_pnsi->p_rte, r_pnsi->p_rtindex, 0, VAR_RETURNING_DEFAULT, -1, false, &r_colnames, &r_colvars); - #else - expandRTE(l_pnsi->p_rte, l_pnsi->p_rtindex, 0, -1, false, - &l_colnames, &l_colvars); - expandRTE(r_pnsi->p_rte, r_pnsi->p_rtindex, 0, -1, false, - &r_colnames, &r_colvars); - #endif /* add in all colnames and colvars from the l_rte. */ *res_colnames = list_concat(*res_colnames, l_colnames);