Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 7 additions & 0 deletions crates/pgt_configuration/src/analyser/linter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ pub struct LinterConfiguration {
/// match these patterns.
#[partial(bpaf(hide))]
pub include: StringSet,

/// Default search path schemas for type checking.
/// Can be a list of schema names or glob patterns like ["public", "app_*"].
/// If not specified, defaults to ["public"].
#[partial(bpaf(long("search_path")))]
pub search_path_patterns: StringSet,
}

impl LinterConfiguration {
Expand All @@ -43,6 +49,7 @@ impl Default for LinterConfiguration {
rules: Default::default(),
ignore: Default::default(),
include: Default::default(),
search_path_patterns: ["public".to_string()].into_iter().collect(),
}
}
}
Expand Down
1 change: 1 addition & 0 deletions crates/pgt_schema_cache/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ sqlx.workspace = true
strum = { workspace = true }
tokio.workspace = true


[dev-dependencies]
pgt_test_utils.workspace = true

Expand Down
1 change: 1 addition & 0 deletions crates/pgt_schema_cache/src/schema_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ impl SchemaCache {
.filter(|t| t.name == name && schema.is_none_or(|s| s == t.schema.as_str()))
.collect()
}

}

pub trait SchemaCacheItem {
Expand Down
2 changes: 2 additions & 0 deletions crates/pgt_typecheck/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ version = "0.0.0"


[dependencies]
globset = "0.4.16"
pgt_console.workspace = true
pgt_diagnostics.workspace = true
pgt_query.workspace = true
Expand All @@ -22,6 +23,7 @@ sqlx.workspace = true
tokio.workspace = true
tree-sitter.workspace = true
tree_sitter_sql.workspace = true
itertools ={ version = "0.14.0" }

[dev-dependencies]
insta.workspace = true
Expand Down
52 changes: 52 additions & 0 deletions crates/pgt_typecheck/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ mod typed_identifier;

pub use diagnostics::TypecheckDiagnostic;
use diagnostics::create_type_error;
use globset::Glob;
use itertools::Itertools;
use pgt_schema_cache::SchemaCache;
use sqlx::postgres::PgDatabaseError;
pub use sqlx::postgres::PgSeverity;
use sqlx::{Executor, PgPool};
Expand All @@ -17,6 +20,9 @@ pub struct TypecheckParams<'a> {
pub tree: &'a tree_sitter::Tree,
pub schema_cache: &'a pgt_schema_cache::SchemaCache,
pub identifiers: Vec<TypedIdentifier>,
/// Set of glob patterns that will be matched against the schemas in the database.
/// Each matching schema will be added to the search_path for the typecheck.
pub search_path_patterns: Vec<String>,
}

pub async fn check_sql(
Expand Down Expand Up @@ -49,6 +55,19 @@ pub async fn check_sql(
params.sql,
);

let mut search_path_schemas =
get_schemas_in_search_path(params.schema_cache, params.search_path_patterns);

if !search_path_schemas.is_empty() {
// Always include public if we have any schemas in search path
if !search_path_schemas.contains(&"public") {
search_path_schemas.push("public");
}

let search_path_query = format!("SET search_path TO {};", search_path_schemas.join(", "));
conn.execute(&*search_path_query).await?;
}

let res = conn.prepare(&prepared).await;

match res {
Expand All @@ -64,3 +83,36 @@ pub async fn check_sql(
Err(err) => Err(err),
}
}

fn get_schemas_in_search_path<'a>(
schema_cache: &'a SchemaCache,
glob_patterns: Vec<String>,
) -> Vec<&'a str> {
// iterate over glob_patterns on the outside to keep the order
glob_patterns
.iter()
.filter_map(|pattern| {
if let Ok(glob) = Glob::new(pattern) {
let matcher = glob.compile_matcher();

Some(
schema_cache
.schemas
.iter()
.filter_map(|s| {
if matcher.is_match(s.name.as_str()) {
Some(s.name.as_str())
} else {
None
}
})
.collect::<Vec<&str>>(),
)
} else {
None
}
})
.flatten()
.unique()
.collect()
}
1 change: 1 addition & 0 deletions crates/pgt_typecheck/tests/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ async fn test(name: &str, query: &str, setup: Option<&str>, test_db: &PgPool) {
ast: &root,
tree: &tree,
schema_cache: &schema_cache,
search_path_patterns: vec![],
identifiers: vec![],
})
.await;
Expand Down
27 changes: 27 additions & 0 deletions crates/pgt_workspace/src/settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,7 @@ fn to_linter_settings(
rules: Some(conf.rules),
ignored_files: to_matcher(working_directory.clone(), Some(&conf.ignore))?,
included_files: to_matcher(working_directory.clone(), Some(&conf.include))?,
search_path_patterns: conf.search_path_patterns.into_iter().collect(),
})
}

Expand Down Expand Up @@ -388,6 +389,31 @@ pub struct LinterSettings {

/// List of included paths/files to match
pub included_files: Matcher,

/// Glob patterns for additional schemas to check when typechecking
pub search_path_patterns: Vec<String>,
}

impl LinterSettings {
/// Returns schema names that match the configured search path patterns.
/// Supports glob patterns like "app_*" or exact matches.
pub fn get_matching_schemas<'a>(&self, schema_names: &'a [&str]) -> Vec<&'a str> {
schema_names
.iter()
.filter(|&&schema_name| {
self.search_path_patterns.iter().any(|pattern| {
if let Ok(glob) = Glob::new(pattern) {
let matcher = glob.compile_matcher();
matcher.is_match(schema_name)
} else {
// fallback to exact match if glob pattern is invalid
pattern == schema_name
}
})
})
.copied()
.collect()
}
}

impl Default for LinterSettings {
Expand All @@ -397,6 +423,7 @@ impl Default for LinterSettings {
rules: Some(pgt_configuration::analyser::linter::Rules::default()),
ignored_files: Matcher::empty(),
included_files: Matcher::empty(),
search_path_patterns: vec!["public".to_string()],
}
}
}
Expand Down
4 changes: 4 additions & 0 deletions crates/pgt_workspace/src/workspace/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,7 @@ impl Workspace for WorkspaceServer {
let path_clone = params.path.clone();
let schema_cache = self.schema_cache.load(pool.clone())?;
let input = doc.iter(TypecheckDiagnosticsMapper).collect::<Vec<_>>();
let search_path_patterns = settings.linter.search_path_patterns.clone();

// Combined async context for both typecheck and plpgsql_check
let async_results = run_async(async move {
Expand All @@ -463,6 +464,8 @@ impl Workspace for WorkspaceServer {
let pool = pool.clone();
let path = path_clone.clone();
let schema_cache = Arc::clone(&schema_cache);
let search_path_patterns = search_path_patterns.clone();

async move {
let mut diagnostics = Vec::new();

Expand All @@ -474,6 +477,7 @@ impl Workspace for WorkspaceServer {
ast: &ast,
tree: &cst,
schema_cache: schema_cache.as_ref(),
search_path_patterns,
identifiers: sign
.map(|s| {
s.args
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ impl SchemaCacheManager {

// Load schema cache
let pool_clone = pool.clone();
let schema_cache = Arc::new(run_async(
async move { SchemaCache::load(&pool_clone).await },
)??);
let schema_cache = Arc::new(run_async(async move {
SchemaCache::load(&pool_clone).await
})??);

schemas.insert(key, schema_cache.clone());
Ok(schema_cache)
Expand Down
11 changes: 11 additions & 0 deletions docs/schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,17 @@
"type": "null"
}
]
},
"searchPathPatterns": {
"description": "Default search path schemas for type checking. Can be a list of schema names or glob patterns like [\"public\", \"app_*\"]. If not specified, defaults to [\"public\"].",
"anyOf": [
{
"$ref": "#/definitions/StringSet"
},
{
"type": "null"
}
]
}
},
"additionalProperties": false
Expand Down
4 changes: 4 additions & 0 deletions packages/@postgrestools/backend-jsonrpc/src/workspace.ts
Original file line number Diff line number Diff line change
Expand Up @@ -326,6 +326,10 @@ export interface PartialLinterConfiguration {
* List of rules
*/
rules?: Rules;
/**
* Default search path schemas for type checking. Can be a list of schema names or glob patterns like ["public", "app_*"]. If not specified, defaults to ["public"].
*/
searchPathPatterns?: StringSet;
}
/**
* The configuration of the filesystem
Expand Down
3 changes: 2 additions & 1 deletion postgrestools.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
"enabled": true,
"rules": {
"recommended": true
}
},
"search_path_patterns": ["private", "public"]
},
// YOU CAN COMMENT ME OUT :)
"db": {
Expand Down
51 changes: 19 additions & 32 deletions test.sql
Original file line number Diff line number Diff line change
@@ -1,34 +1,21 @@
create table
unknown_users (id serial primary key, address text, email text);

drop table unknown_users;

select
*
from
unknown_users;

sel 1;



create function test_organisation_id ()
returns setof text
language plpgsql
security invoker
as $$
declre
v_organisation_id uuid;
begin
return next is(private.organisation_id(), v_organisation_id, 'should return organisation_id of token');
end
$$;


create function f1()
returns void as $$
declare b constant int;
begin
call p1(10, b);
-- create schema private;

create table if not exists private.something (
id serial primary key,
arr double precision[]
);

create or replace function private.head(
arr double precision[]
) returns double precision as $$
begin
if cardinality(arr) = 0 then
raise exception 'Empty array!';
else
return arr[0];
end if;
end;
$$ language plpgsql;


select head (arr) from private.something;
Loading