Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 11 additions & 0 deletions crates/pgt_configuration/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pub mod diagnostics;
pub mod files;
pub mod generated;
pub mod migrations;
pub mod typecheck;
pub mod vcs;

pub use crate::diagnostics::ConfigurationDiagnostic;
Expand All @@ -33,6 +34,9 @@ use migrations::{
MigrationsConfiguration, PartialMigrationsConfiguration, partial_migrations_configuration,
};
use serde::{Deserialize, Serialize};
pub use typecheck::{
PartialTypecheckConfiguration, TypecheckConfiguration, partial_typecheck_configuration,
};
use vcs::VcsClientKind;

pub const VERSION: &str = match option_env!("PGT_VERSION") {
Expand Down Expand Up @@ -77,6 +81,10 @@ pub struct Configuration {
#[partial(type, bpaf(external(partial_linter_configuration), optional))]
pub linter: LinterConfiguration,

/// The configuration for type checking
#[partial(type, bpaf(external(partial_typecheck_configuration), optional))]
pub typecheck: TypecheckConfiguration,

/// The configuration of the database connection
#[partial(
type,
Expand Down Expand Up @@ -110,6 +118,9 @@ impl PartialConfiguration {
}),
..Default::default()
}),
typecheck: Some(PartialTypecheckConfiguration {
..Default::default()
}),
db: Some(PartialDatabaseConfiguration {
host: Some("127.0.0.1".to_string()),
port: Some(5432),
Expand Down
25 changes: 25 additions & 0 deletions crates/pgt_configuration/src/typecheck.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
use biome_deserialize::StringSet;
use biome_deserialize_macros::{Merge, Partial};
use bpaf::Bpaf;
use serde::{Deserialize, Serialize};

/// The configuration for type checking.
#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)]
#[partial(derive(Bpaf, Clone, Eq, PartialEq, Merge))]
#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))]
#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))]
pub struct TypecheckConfiguration {
/// Default search path schemas for type checking.
/// Can be a list of schema names or glob patterns like ["public", "app_*"].
/// If not specified, defaults to ["public"].
#[partial(bpaf(long("search_path")))]
pub search_path: StringSet,
}

impl Default for TypecheckConfiguration {
fn default() -> Self {
Self {
search_path: ["public".to_string()].into_iter().collect(),
}
}
}
2 changes: 2 additions & 0 deletions crates/pgt_typecheck/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ version = "0.0.0"


[dependencies]
globset = "0.4.16"
itertools = { version = "0.14.0" }
pgt_console.workspace = true
pgt_diagnostics.workspace = true
pgt_query.workspace = true
Expand Down
49 changes: 49 additions & 0 deletions crates/pgt_typecheck/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ mod typed_identifier;

pub use diagnostics::TypecheckDiagnostic;
use diagnostics::create_type_error;
use globset::Glob;
use itertools::Itertools;
use pgt_schema_cache::SchemaCache;
use sqlx::postgres::PgDatabaseError;
pub use sqlx::postgres::PgSeverity;
use sqlx::{Executor, PgPool};
Expand All @@ -17,6 +20,9 @@ pub struct TypecheckParams<'a> {
pub tree: &'a tree_sitter::Tree,
pub schema_cache: &'a pgt_schema_cache::SchemaCache,
pub identifiers: Vec<TypedIdentifier>,
/// Set of glob patterns that will be matched against the schemas in the database.
/// Each matching schema will be added to the search_path for the typecheck.
pub search_path_patterns: Vec<String>,
}

pub async fn check_sql(
Expand Down Expand Up @@ -49,6 +55,19 @@ pub async fn check_sql(
params.sql,
);

let mut search_path_schemas =
get_schemas_in_search_path(params.schema_cache, params.search_path_patterns);

if !search_path_schemas.is_empty() {
// Always include public if we have any schemas in search path
if !search_path_schemas.contains(&"public") {
search_path_schemas.push("public");
}

let search_path_query = format!("SET search_path TO {};", search_path_schemas.join(", "));
conn.execute(&*search_path_query).await?;
}

let res = conn.prepare(&prepared).await;

match res {
Expand All @@ -64,3 +83,33 @@ pub async fn check_sql(
Err(err) => Err(err),
}
}

fn get_schemas_in_search_path(schema_cache: &SchemaCache, glob_patterns: Vec<String>) -> Vec<&str> {
// iterate over glob_patterns on the outside to keep the order
glob_patterns
.iter()
.filter_map(|pattern| {
if let Ok(glob) = Glob::new(pattern) {
let matcher = glob.compile_matcher();

Some(
schema_cache
.schemas
.iter()
.filter_map(|s| {
if matcher.is_match(s.name.as_str()) {
Some(s.name.as_str())
} else {
None
}
})
.collect::<Vec<&str>>(),
)
} else {
None
}
})
.flatten()
.unique()
.collect()
}
1 change: 1 addition & 0 deletions crates/pgt_typecheck/tests/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ async fn test(name: &str, query: &str, setup: Option<&str>, test_db: &PgPool) {
ast: &root,
tree: &tree,
schema_cache: &schema_cache,
search_path_patterns: vec![],
identifiers: vec![],
})
.await;
Expand Down
31 changes: 30 additions & 1 deletion crates/pgt_workspace/src/settings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use tracing::trace;

use ignore::gitignore::{Gitignore, GitignoreBuilder};
use pgt_configuration::{
ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration,
ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, TypecheckConfiguration,
database::PartialDatabaseConfiguration,
diagnostics::InvalidIgnorePattern,
files::FilesConfiguration,
Expand Down Expand Up @@ -210,6 +210,9 @@ pub struct Settings {
/// Linter settings applied to all files in the workspace
pub linter: LinterSettings,

/// Type checking settings for the workspace
pub typecheck: TypecheckSettings,

/// Migrations settings
pub migrations: Option<MigrationSettings>,
}
Expand Down Expand Up @@ -245,6 +248,11 @@ impl Settings {
to_linter_settings(working_directory.clone(), LinterConfiguration::from(linter))?;
}

// typecheck part
if let Some(typecheck) = configuration.typecheck {
self.typecheck = to_typecheck_settings(TypecheckConfiguration::from(typecheck));
}

// Migrations settings
if let Some(migrations) = configuration.migrations {
self.migrations = to_migration_settings(
Expand Down Expand Up @@ -294,6 +302,12 @@ fn to_linter_settings(
})
}

fn to_typecheck_settings(conf: TypecheckConfiguration) -> TypecheckSettings {
TypecheckSettings {
search_path: conf.search_path.into_iter().collect(),
}
}

fn to_file_settings(
working_directory: Option<PathBuf>,
config: Option<FilesConfiguration>,
Expand Down Expand Up @@ -401,6 +415,21 @@ impl Default for LinterSettings {
}
}

/// Type checking settings for the entire workspace
#[derive(Debug)]
pub struct TypecheckSettings {
/// Default search path schemas for type checking
pub search_path: Vec<String>,
}

impl Default for TypecheckSettings {
fn default() -> Self {
Self {
search_path: vec!["public".to_string()],
}
}
}

/// Database settings for the entire workspace
#[derive(Debug)]
pub struct DatabaseSettings {
Expand Down
4 changes: 4 additions & 0 deletions crates/pgt_workspace/src/workspace/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,7 @@ impl Workspace for WorkspaceServer {
let path_clone = params.path.clone();
let schema_cache = self.schema_cache.load(pool.clone())?;
let input = doc.iter(TypecheckDiagnosticsMapper).collect::<Vec<_>>();
let search_path_patterns = settings.typecheck.search_path.clone();
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: why not passing the entire config struct?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

because of a circular dependency: workspace depends on typecheck, typecheck depends on workspace::TypecheckSettings


// Combined async context for both typecheck and plpgsql_check
let async_results = run_async(async move {
Expand All @@ -463,6 +464,8 @@ impl Workspace for WorkspaceServer {
let pool = pool.clone();
let path = path_clone.clone();
let schema_cache = Arc::clone(&schema_cache);
let search_path_patterns = search_path_patterns.clone();

async move {
let mut diagnostics = Vec::new();

Expand All @@ -474,6 +477,7 @@ impl Workspace for WorkspaceServer {
ast: &ast,
tree: &cst,
schema_cache: schema_cache.as_ref(),
search_path_patterns,
identifiers: sign
.map(|s| {
s.args
Expand Down
113 changes: 112 additions & 1 deletion crates/pgt_workspace/src/workspace/server.tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use std::sync::Arc;
use biome_deserialize::{Merge, StringSet};
use pgt_analyse::RuleCategories;
use pgt_configuration::{
PartialConfiguration, database::PartialDatabaseConfiguration, files::PartialFilesConfiguration,
PartialConfiguration, PartialTypecheckConfiguration, database::PartialDatabaseConfiguration,
files::PartialFilesConfiguration,
};
use pgt_diagnostics::Diagnostic;
use pgt_fs::PgTPath;
Expand Down Expand Up @@ -331,3 +332,113 @@ async fn test_positional_params(test_db: PgPool) {

assert_eq!(diagnostics.len(), 0, "Expected no diagnostic");
}

#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")]
async fn test_search_path_configuration(test_db: PgPool) {
// Setup test schemas and functions
let setup_sql = r#"
create schema if not exists private;

create or replace function private.get_user_id() returns integer as $$
select 1;
$$ language sql;
"#;
test_db.execute(setup_sql).await.expect("setup sql failed");

let path_glob = PgTPath::new("test_glob.sql");
let file_content = r#"
select get_user_id(); -- on private schema
"#;

// first check that the we get a valid typecheck
let mut glob_conf = PartialConfiguration::init();
glob_conf.merge_with(PartialConfiguration {
db: Some(PartialDatabaseConfiguration {
database: Some(
test_db
.connect_options()
.get_database()
.unwrap()
.to_string(),
),
..Default::default()
}),
..Default::default()
});

// without glob
{
let workspace =
get_test_workspace(Some(glob_conf.clone())).expect("Unable to create test workspace");

workspace
.open_file(OpenFileParams {
path: path_glob.clone(),
content: file_content.into(),
version: 1,
})
.expect("Unable to open test file");

let diagnostics_glob = workspace
.pull_diagnostics(crate::workspace::PullDiagnosticsParams {
path: path_glob.clone(),
categories: RuleCategories::all(),
max_diagnostics: 100,
only: vec![],
skip: vec![],
})
.expect("Unable to pull diagnostics")
.diagnostics;

assert_eq!(
diagnostics_glob.len(),
1,
"get_user_id() should not be found in search_path"
);

// yep, type error!
assert_eq!(
diagnostics_glob[0].category().map(|c| c.name()),
Some("typecheck")
);
}

// adding the glob
glob_conf.merge_with(PartialConfiguration {
typecheck: Some(PartialTypecheckConfiguration {
// Adding glob pattern to match the "private" schema
search_path: Some(StringSet::from_iter(vec!["pr*".to_string()])),
}),
..Default::default()
}); // checking with the pattern should yield no diagnostics

{
let workspace =
get_test_workspace(Some(glob_conf.clone())).expect("Unable to create test workspace");

workspace
.open_file(OpenFileParams {
path: path_glob.clone(),
content: file_content.into(),
version: 1,
})
.expect("Unable to open test file");

let diagnostics_glob = workspace
.pull_diagnostics(crate::workspace::PullDiagnosticsParams {
path: path_glob.clone(),
categories: RuleCategories::all(),
max_diagnostics: 100,
only: vec![],
skip: vec![],
})
.expect("Unable to pull diagnostics")
.diagnostics;

assert_eq!(
diagnostics_glob.len(),
0,
"Glob pattern should put private schema in search path"
);
}
}
Loading