Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 33 additions & 0 deletions crates/pgt_plpgsql_check/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,39 @@ mod tests {
Ok((diagnostics, span_texts))
}

#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")]
async fn test_plpgsql_check_composite_types(test_db: PgPool) {
let setup = r#"
create extension if not exists plpgsql_check;

create table if not exists _fetch_cycle_continuation_data (
next_id bigint,
next_state jsonb null default '{}'::jsonb
constraint abstract_no_data check(false) no inherit
);
"#;

let create_fn_sql = r#"
create or replace function continue_fetch_cycle_prototype (
) returns _fetch_cycle_continuation_data language plpgsql as $prototype$
declare
result _fetch_cycle_continuation_data := null;
begin
result.next_id := 0;
result.next_state := '{}'::jsonb;

return result;
end;
$prototype$;
"#;

let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql)
.await
.expect("Failed to run plpgsql_check test");

assert_eq!(diagnostics.len(), 0);
}

#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")]
async fn test_plpgsql_check_if_expr(test_db: PgPool) {
let setup = r#"
Expand Down
1 change: 1 addition & 0 deletions crates/pgt_workspace/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ pgt_text_size.workspace = true
pgt_tokenizer = { workspace = true }
pgt_typecheck = { workspace = true }
pgt_workspace_macros = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
Expand Down
53 changes: 53 additions & 0 deletions crates/pgt_workspace/src/workspace/server.tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,59 @@ async fn test_dedupe_diagnostics(test_db: PgPool) {
);
}

#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")]
async fn test_plpgsql_assign_composite_types(test_db: PgPool) {
let conf = PartialConfiguration::init();

let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace");

let path = PgTPath::new("test.sql");

let setup_sql = r"
create table if not exists _fetch_cycle_continuation_data (
next_id bigint,
next_state jsonb null default '{}'::jsonb
constraint abstract_no_data check(false) no inherit
);
";
test_db.execute(setup_sql).await.expect("setup sql failed");

let content = r#"
create or replace function continue_fetch_cycle_prototype ()
returns _fetch_cycle_continuation_data language plpgsql as $prototype$
declare
result _fetch_cycle_continuation_data := null;
begin
result.next_id := 0;
result.next_state := '{}'::jsonb

return result;
end;
$prototype$
"#;

workspace
.open_file(OpenFileParams {
path: path.clone(),
content: content.into(),
version: 1,
})
.expect("Unable to open test file");

let diagnostics = workspace
.pull_diagnostics(crate::workspace::PullDiagnosticsParams {
path: path.clone(),
categories: RuleCategories::all(),
max_diagnostics: 100,
only: vec![],
skip: vec![],
})
.expect("Unable to pull diagnostics")
.diagnostics;

assert_eq!(diagnostics.len(), 0, "Expected no diagnostic");
}

#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")]
async fn test_positional_params(test_db: PgPool) {
let mut conf = PartialConfiguration::init();
Expand Down
19 changes: 17 additions & 2 deletions crates/pgt_workspace/src/workspace/server/pg_query.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
use std::collections::HashMap;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::sync::{Arc, LazyLock, Mutex};

use lru::LruCache;
use pgt_query_ext::diagnostics::*;
use pgt_text_size::TextRange;
use pgt_tokenizer::tokenize;
use regex::Regex;

use super::statement_identifier::StatementId;

Expand Down Expand Up @@ -82,13 +83,27 @@ impl PgQueryStore {
let range = TextRange::new(start.try_into().unwrap(), end.try_into().unwrap());

let r = pgt_query::parse_plpgsql(statement.content())
.map_err(|err| SyntaxDiagnostic::new(err.to_string(), Some(range)));
.or_else(|e| match &e {
// ignore `is not a known variable` for composite types because libpg_query reports a false positive.
// https://github.com/pganalyze/libpg_query/issues/159
pgt_query::Error::Parse(err) if is_composite_type_error(err) => Ok(()),
_ => Err(e),
})
.map_err(|e| SyntaxDiagnostic::new(e.to_string(), Some(range)));

cache.put(statement.clone(), r.clone());

Some(r)
}
}

static COMPOSITE_TYPE_ERROR_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"\\?"([^"\\]+\.[^"\\]+)\\?" is not a known variable"#).unwrap());
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

brauchen wir den ganzen bleb da vorne oder würde auch ein match auf is not a known variable reichen?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wir wollen den error nur ignorieren wenn es sich um ein Feld in einem Composite Type handelt


fn is_composite_type_error(err: &str) -> bool {
COMPOSITE_TYPE_ERROR_RE.is_match(err)
}

/// Converts named parameters in a SQL query string to positional parameters.
///
/// This function scans the input SQL string for named parameters (e.g., `@param`, `:param`, `:'param'`)
Expand Down