Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .github/workflows/go-driver.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: Go Driver Tests

on:
push:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

pull_request:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

jobs:
build:
Expand All @@ -26,14 +26,14 @@ jobs:
if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then
if [[ "$GITHUB_REF" == "refs/heads/master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
elif [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
if [[ "$GITHUB_BASE_REF" == "master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
fi

Expand Down
30 changes: 15 additions & 15 deletions .github/workflows/installcheck.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,45 +2,45 @@ name: Build / Regression

on:
push:
branches: [ 'master', 'PG15' ]
branches: [ 'master', 'PG16' ]
pull_request:
branches: [ 'master', 'PG15' ]
branches: [ 'master', 'PG16' ]

jobs:
build:
runs-on: ubuntu-latest

steps:
- name: Get latest commit id of PostgreSQL 15
- name: Get latest commit id of PostgreSQL 16
run: |
echo "PG_COMMIT_HASH=$(git ls-remote git://git.postgresql.org/git/postgresql.git refs/heads/REL_15_STABLE | awk '{print $1}')" >> $GITHUB_ENV
echo "PG_COMMIT_HASH=$(git ls-remote git://git.postgresql.org/git/postgresql.git refs/heads/REL_16_STABLE | awk '{print $1}')" >> $GITHUB_ENV

- name: Cache PostgreSQL 15
- name: Cache PostgreSQL 16
uses: actions/cache@v3
id: pg15cache
id: pg16cache
with:
path: ~/pg15
key: ${{ runner.os }}-v1-pg15-${{ env.PG_COMMIT_HASH }}
path: ~/pg16
key: ${{ runner.os }}-v1-pg16-${{ env.PG_COMMIT_HASH }}

- name: Install PostgreSQL 15
if: steps.pg15cache.outputs.cache-hit != 'true'
- name: Install PostgreSQL 16
if: steps.pg16cache.outputs.cache-hit != 'true'
run: |
git clone --depth 1 --branch REL_15_STABLE git://git.postgresql.org/git/postgresql.git ~/pg15source
cd ~/pg15source
./configure --prefix=$HOME/pg15 CFLAGS="-std=gnu99 -ggdb -O0" --enable-cassert
git clone --depth 1 --branch REL_16_STABLE git://git.postgresql.org/git/postgresql.git ~/pg16source
cd ~/pg16source
./configure --prefix=$HOME/pg16 CFLAGS="-std=gnu99 -ggdb -O0" --enable-cassert
make install -j$(nproc) > /dev/null

- uses: actions/checkout@v3

- name: Build
id: build
run: |
make PG_CONFIG=$HOME/pg15/bin/pg_config install -j$(nproc)
make PG_CONFIG=$HOME/pg16/bin/pg_config install -j$(nproc)

- name: Regression tests
id: regression_tests
run: |
make PG_CONFIG=$HOME/pg15/bin/pg_config installcheck
make PG_CONFIG=$HOME/pg16/bin/pg_config installcheck
continue-on-error: true

- name: Dump regression test errors
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/jdbc-driver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: JDBC Driver Tests

on:
push:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

pull_request:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

jobs:
build:
Expand All @@ -28,14 +28,14 @@ jobs:
if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then
if [[ "$GITHUB_REF" == "refs/heads/master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
elif [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
if [[ "$GITHUB_BASE_REF" == "master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
fi

Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/nodejs-driver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: Nodejs Driver Tests

on:
push:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

pull_request:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

jobs:
build:
Expand All @@ -23,14 +23,14 @@ jobs:
if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then
if [[ "$GITHUB_REF" == "refs/heads/master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
elif [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
if [[ "$GITHUB_BASE_REF" == "master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
fi

Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/python-driver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: Python Driver Tests

on:
push:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

pull_request:
branches: [ "master", "PG15" ]
branches: [ "master", "PG16" ]

jobs:
build:
Expand All @@ -23,14 +23,14 @@ jobs:
if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then
if [[ "$GITHUB_REF" == "refs/heads/master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_REF" == "refs/heads/PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
elif [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
if [[ "$GITHUB_BASE_REF" == "master" ]]; then
echo "TAG=latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG15" ]]; then
echo "TAG=PG15_latest" >> $GITHUB_ENV
elif [[ "$GITHUB_BASE_REF" == "PG16" ]]; then
echo "TAG=PG16_latest" >> $GITHUB_ENV
fi
fi

Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ age--*.*.*.sql
.DS_Store
*.tokens
*.interp
*.dylib
7 changes: 7 additions & 0 deletions src/backend/catalog/ag_catalog.c
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,15 @@ void ag_ProcessUtility_hook(PlannedStmt *pstmt, const char *queryString, bool re
(*prev_process_utility_hook) (pstmt, queryString, readOnlyTree, context, params,
queryEnv, dest, qc);
else
{
Assert(IsA(pstmt, PlannedStmt));
Assert(pstmt->commandType == CMD_UTILITY);
Assert(queryString != NULL); /* required as of 8.4 */
Assert(qc == NULL || qc->commandTag == CMDTAG_UNKNOWN);
standard_ProcessUtility(pstmt, queryString, readOnlyTree, context, params, queryEnv,
dest, qc);
}

}

static void drop_age_extension(DropStmt *stmt)
Expand Down
4 changes: 2 additions & 2 deletions src/backend/catalog/ag_graph.c
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ void insert_graph(const Name graph_name, const Oid nsp_id)
HeapTuple tuple;


AssertArg(graph_name);
AssertArg(OidIsValid(nsp_id));
Assert(graph_name);
Assert(OidIsValid(nsp_id));

ag_graph = table_open(ag_graph_relation_id(), RowExclusiveLock);
values[Anum_ag_graph_oid - 1] = ObjectIdGetDatum(nsp_id);
Expand Down
17 changes: 9 additions & 8 deletions src/backend/catalog/ag_label.c
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,12 @@ void insert_label(const char *label_name, Oid graph_oid, int32 label_id,
* NOTE: Is it better to make use of label_id and label_kind domain types
* than to use assert to check label_id and label_kind are valid?
*/
AssertArg(label_name);
AssertArg(label_id_is_valid(label_id));
AssertArg(label_kind == LABEL_KIND_VERTEX ||
Assert(label_name);
Assert(label_id_is_valid(label_id));
Assert(label_kind == LABEL_KIND_VERTEX ||
label_kind == LABEL_KIND_EDGE);
AssertArg(OidIsValid(label_relation));
AssertArg(seq_name);
Assert(OidIsValid(label_relation));
Assert(seq_name);

ag_label = table_open(ag_label_relation_id(), RowExclusiveLock);

Expand Down Expand Up @@ -188,8 +188,9 @@ Datum _label_name(PG_FUNCTION_ARGS)
uint32 label_id;

if (PG_ARGISNULL(0) || PG_ARGISNULL(1))
ereport(ERROR, (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("graph_oid and label_id must not be null")));
PG_RETURN_NULL();
//ereport(ERROR, (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
// errmsg("graph_oid and label_id must not be null")));

graph = PG_GETARG_OID(0);

Expand Down Expand Up @@ -241,7 +242,7 @@ Datum _extract_label_id(PG_FUNCTION_ARGS)
}
graph_oid = AG_GETARG_GRAPHID(0);

PG_RETURN_INT32(get_graphid_label_id(graph_oid));
PG_RETURN_INT64(get_graphid_label_id(graph_oid));
}

bool label_id_exists(Oid graph_oid, int32 label_id)
Expand Down
5 changes: 2 additions & 3 deletions src/backend/commands/label_commands.c
Original file line number Diff line number Diff line change
Expand Up @@ -808,7 +808,7 @@ static void remove_relation(List *qname)
Oid rel_oid;
ObjectAddress address;

AssertArg(list_length(qname) == 2);
Assert(list_length(qname) == 2);

// concurrent is false so lockmode is AccessExclusiveLock

Expand Down Expand Up @@ -868,8 +868,7 @@ static void range_var_callback_for_remove_relation(const RangeVar *rel,

// relkind == expected_relkind

if (!pg_class_ownercheck(rel_oid, GetUserId()) &&
!pg_namespace_ownercheck(get_rel_namespace(rel_oid), GetUserId()))
if (!object_ownercheck(rel_oid, get_rel_namespace(rel_oid), GetUserId()))
{
aclcheck_error(ACLCHECK_NOT_OWNER,
get_relkind_objtype(get_rel_relkind(rel_oid)),
Expand Down
4 changes: 2 additions & 2 deletions src/backend/executor/cypher_create.c
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ static void create_edge(cypher_create_custom_scan_state *css,

result = make_edge(
id, start_id, end_id, CStringGetDatum(node->label_name),
PointerGetDatum(scanTupleSlot->tts_values[node->prop_attr_num]));
scanTupleSlot->tts_values[node->prop_attr_num]);

if (CYPHER_TARGET_NODE_IN_PATH(node->flags))
{
Expand Down Expand Up @@ -528,7 +528,7 @@ static Datum create_vertex(cypher_create_custom_scan_state *css,

// make the vertex agtype
result = make_vertex(id, CStringGetDatum(node->label_name),
PointerGetDatum(scanTupleSlot->tts_values[node->prop_attr_num]));
scanTupleSlot->tts_values[node->prop_attr_num]);

// append to the path list
if (CYPHER_TARGET_NODE_IN_PATH(node->flags))
Expand Down
5 changes: 3 additions & 2 deletions src/backend/executor/cypher_merge.c
Original file line number Diff line number Diff line change
Expand Up @@ -472,8 +472,9 @@ static TupleTableSlot *exec_cypher_merge(CustomScanState *node)
* So we will need to create a TupleTableSlot and populate with the
* information from the newly created path that the query needs.
*/
ExprContext *econtext = node->ss.ps.ps_ExprContext;
SubqueryScanState *sss = (SubqueryScanState *)node->ss.ps.lefttree;
SubqueryScanState *sss = NULL;
econtext = node->ss.ps.ps_ExprContext;
sss = (SubqueryScanState *)node->ss.ps.lefttree;

/*
* Our child execution node is always a subquery. If not there
Expand Down
7 changes: 4 additions & 3 deletions src/backend/executor/cypher_set.c
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ static HeapTuple update_entity_tuple(ResultRelInfo *resultRelInfo,
TM_FailureData hufd;
TM_Result lock_result;
Buffer buffer;
bool update_indexes;
TU_UpdateIndexes update_indexes;
TM_Result result;
CommandId cid = GetCurrentCommandId(true);
ResultRelInfo **saved_resultRels = estate->es_result_relations;
Expand Down Expand Up @@ -167,9 +167,10 @@ static HeapTuple update_entity_tuple(ResultRelInfo *resultRelInfo,
}

// Insert index entries for the tuple
if (resultRelInfo->ri_NumIndices > 0 && update_indexes)
if (resultRelInfo->ri_NumIndices > 0 && update_indexes != TU_None)
{
ExecInsertIndexTuples(resultRelInfo, elemTupleSlot, estate, false, false, NULL, NIL);
ExecInsertIndexTuples(resultRelInfo, elemTupleSlot, estate, false, false, NULL, NIL,
(update_indexes == TU_Summarizing));
}

ExecCloseIndices(resultRelInfo);
Expand Down
6 changes: 3 additions & 3 deletions src/backend/executor/cypher_utils.c
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ ResultRelInfo *create_entity_result_rel_info(EState *estate, char *graph_name,

// initialize the resultRelInfo
InitResultRelInfo(resultRelInfo, label_relation,
list_length(estate->es_range_table), NULL,
0, NULL,
estate->es_instrument);

// open the parse state
Expand Down Expand Up @@ -254,8 +254,8 @@ HeapTuple insert_entity_tuple_cid(ResultRelInfo *resultRelInfo,
// Insert index entries for the tuple
if (resultRelInfo->ri_NumIndices > 0)
{
ExecInsertIndexTuples(resultRelInfo, elemTupleSlot, estate, false,
false, NULL, NIL);
ExecInsertIndexTuples(resultRelInfo, elemTupleSlot, estate,
false, false, NULL, NIL, false);
}

return tuple;
Expand Down
2 changes: 1 addition & 1 deletion src/backend/nodes/cypher_readfuncs.c
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@
((length) == 0 ? NULL : debackslash(token, length))

#define non_nullable_string(token,length) \
((length) == 0 ? "" : debackslash(token, length))
((length == 2 && token[0] == '"' && token[1] == '"') ? "" : debackslash(token, length))

/*
* Default read function for cypher nodes. For most nodes, we don't expect
Expand Down
1 change: 1 addition & 0 deletions src/backend/parser/cypher_analyze.c
Original file line number Diff line number Diff line change
Expand Up @@ -868,6 +868,7 @@ static Query *analyze_cypher_and_coerce(List *stmt, RangeTblFunction *rtfunc,
}

query->rtable = pstate->p_rtable;
query->rteperminfos = pstate->p_rteperminfos;
query->jointree = makeFromExpr(pstate->p_joinlist, NULL);

assign_query_collations(pstate, query);
Expand Down
Loading