Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,8 @@ jobs:
run: cargo run -p xtask_codegen -- configuration
- name: Run the bindings codegen
run: cargo run -p xtask_codegen -- bindings
- name: Run the splinter codegen
run: cargo run -p xtask_codegen -- splinter
- name: Run the docs codegen
run: cargo run -p docs_codegen
- name: Check for git diff -- run "just ready" if you see an error
Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,12 @@ cargo insta review

## Development Notes

### Code Quality Guidelines
**IMPORTANT**: Always run `cargo clippy --all-targets --all-features` and fix all warnings after making code changes. Clippy warnings must be resolved before committing code to maintain code quality standards.

### Git Commit and PR Guidelines
**IMPORTANT**: NEVER add "Claude" or any AI assistant name to commit messages or pull request descriptions. Commits and PRs should appear as authored by the human developer only.

### Code Generation
Many parser structures are generated from PostgreSQL's protobuf definitions using procedural macros in `pgls_query_macros`. Run `just gen-lint` after modifying analyzer rules or configurations.

Expand Down
14 changes: 14 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ pgls_query = { path = "./crates/pgls_query", version = "0.0.0"
pgls_query_ext = { path = "./crates/pgls_query_ext", version = "0.0.0" }
pgls_query_macros = { path = "./crates/pgls_query_macros", version = "0.0.0" }
pgls_schema_cache = { path = "./crates/pgls_schema_cache", version = "0.0.0" }
pgls_splinter = { path = "./crates/pgls_splinter", version = "0.0.0" }
pgls_statement_splitter = { path = "./crates/pgls_statement_splitter", version = "0.0.0" }
pgls_suppressions = { path = "./crates/pgls_suppressions", version = "0.0.0" }
pgls_text_edit = { path = "./crates/pgls_text_edit", version = "0.0.0" }
Expand Down
1 change: 0 additions & 1 deletion crates/pgls_diagnostics/src/serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,6 @@ impl From<super::Location<'_>> for Location {
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[cfg_attr(test, derive(Eq, PartialEq))]

struct Advices {
advices: Vec<Advice>,
}
Expand Down
31 changes: 31 additions & 0 deletions crates/pgls_diagnostics_categories/src/categories.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,30 @@ define_categories! {
"lint/safety/runningStatementWhileHoldingAccessExclusive": "https://pg-language-server.com/latest/rules/running-statement-while-holding-access-exclusive",
"lint/safety/transactionNesting": "https://pg-language-server.com/latest/rules/transaction-nesting",
// end lint rules
// splinter rules start
"splinter/performance/authRlsInitplan": "https://supabase.com/docs/guides/database/database-linter?lint=0003_auth_rls_initplan",
"splinter/performance/duplicateIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0009_duplicate_index",
"splinter/performance/multiplePermissivePolicies": "https://supabase.com/docs/guides/database/database-linter?lint=0006_multiple_permissive_policies",
"splinter/performance/noPrimaryKey": "https://supabase.com/docs/guides/database/database-linter?lint=0004_no_primary_key",
"splinter/performance/tableBloat": "https://supabase.com/docs/guides/database/database-linter",
"splinter/performance/unindexedForeignKeys": "https://supabase.com/docs/guides/database/database-linter?lint=0001_unindexed_foreign_keys",
"splinter/performance/unusedIndex": "https://supabase.com/docs/guides/database/database-linter?lint=0005_unused_index",
"splinter/security/authUsersExposed": "https://supabase.com/docs/guides/database/database-linter?lint=0002_auth_users_exposed",
"splinter/security/extensionInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0014_extension_in_public",
"splinter/security/extensionVersionsOutdated": "https://supabase.com/docs/guides/database/database-linter?lint=0022_extension_versions_outdated",
"splinter/security/fkeyToAuthUnique": "https://supabase.com/docs/guides/database/database-linter",
"splinter/security/foreignTableInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0017_foreign_table_in_api",
"splinter/security/functionSearchPathMutable": "https://supabase.com/docs/guides/database/database-linter?lint=0011_function_search_path_mutable",
"splinter/security/insecureQueueExposedInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0019_insecure_queue_exposed_in_api",
"splinter/security/materializedViewInApi": "https://supabase.com/docs/guides/database/database-linter?lint=0016_materialized_view_in_api",
"splinter/security/policyExistsRlsDisabled": "https://supabase.com/docs/guides/database/database-linter?lint=0007_policy_exists_rls_disabled",
"splinter/security/rlsDisabledInPublic": "https://supabase.com/docs/guides/database/database-linter?lint=0013_rls_disabled_in_public",
"splinter/security/rlsEnabledNoPolicy": "https://supabase.com/docs/guides/database/database-linter?lint=0008_rls_enabled_no_policy",
"splinter/security/rlsReferencesUserMetadata": "https://supabase.com/docs/guides/database/database-linter?lint=0015_rls_references_user_metadata",
"splinter/security/securityDefinerView": "https://supabase.com/docs/guides/database/database-linter?lint=0010_security_definer_view",
"splinter/security/unsupportedRegTypes": "https://supabase.com/docs/guides/database/database-linter?lint=unsupported_reg_types",
"splinter/unknown/unknown": "https://supabase.com/docs/guides/database/database-linter",
// splinter rules end
;
// General categories
"stdin",
Expand All @@ -69,4 +93,11 @@ define_categories! {
"lint/performance",
"lint/safety",
// Lint groups end

// Splinter groups start
"splinter",
"splinter/performance",
"splinter/security",
"splinter/unknown",
// Splinter groups end
}

Large diffs are not rendered by default.

Large diffs are not rendered by default.

28 changes: 28 additions & 0 deletions crates/pgls_splinter/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[package]
authors.workspace = true
categories.workspace = true
description = "<DESCRIPTION>"
edition.workspace = true
homepage.workspace = true
keywords.workspace = true
license.workspace = true
name = "pgls_splinter"
repository.workspace = true
version = "0.0.0"

[dependencies]
pgls_diagnostics.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx.workspace = true

[build-dependencies]
ureq = "2.10"

[dev-dependencies]
insta.workspace = true
pgls_console.workspace = true
pgls_test_utils.workspace = true

[lib]
doctest = false
166 changes: 166 additions & 0 deletions crates/pgls_splinter/build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
use std::env;
use std::fs;
use std::path::Path;

// Update this commit SHA to pull in a new version of splinter.sql
const SPLINTER_COMMIT_SHA: &str = "27ea2ece65464213e466cd969cc61b6940d16219";

// Rules that require Supabase-specific infrastructure (auth schema, anon/authenticated roles, pgrst.db_schemas)
const SUPABASE_ONLY_RULES: &[&str] = &[
"auth_users_exposed",
"auth_rls_initplan",
"rls_disabled_in_public",
"security_definer_view",
"rls_references_user_metadata",
"materialized_view_in_api",
"foreign_table_in_api",
"insecure_queue_exposed_in_api",
"fkey_to_auth_unique",
];

fn main() {
let out_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let vendor_dir = Path::new(&out_dir).join("vendor");
let generic_sql_file = vendor_dir.join("splinter_generic.sql");
let supabase_sql_file = vendor_dir.join("splinter_supabase.sql");
let sha_file = vendor_dir.join("COMMIT_SHA.txt");

// Create vendor directory if it doesn't exist
if !vendor_dir.exists() {
fs::create_dir_all(&vendor_dir).expect("Failed to create vendor directory");
}

// Check if we need to download
let needs_download =
if !generic_sql_file.exists() || !supabase_sql_file.exists() || !sha_file.exists() {
true
} else {
// Check if stored SHA matches current constant
let stored_sha = fs::read_to_string(&sha_file)
.expect("Failed to read COMMIT_SHA.txt")
.trim()
.to_string();
stored_sha != SPLINTER_COMMIT_SHA
};

if needs_download {
println!(
"cargo:warning=Downloading splinter.sql from GitHub (commit: {SPLINTER_COMMIT_SHA})"
);
download_and_process_sql(&generic_sql_file, &supabase_sql_file);
fs::write(&sha_file, SPLINTER_COMMIT_SHA).expect("Failed to write COMMIT_SHA.txt");
}

// Tell cargo to rerun if build.rs or SHA file changes
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=vendor/COMMIT_SHA.txt");
}

fn download_and_process_sql(generic_dest: &Path, supabase_dest: &Path) {
let url = format!(
"https://raw.githubusercontent.com/supabase/splinter/{SPLINTER_COMMIT_SHA}/splinter.sql"
);

// Download the file
let response = ureq::get(&url)
.call()
.expect("Failed to download splinter.sql");

let content = response
.into_string()
.expect("Failed to read response body");

// Remove the SET LOCAL search_path section
let mut processed_content = remove_set_search_path(&content);

// Add "!" suffix to column aliases for sqlx non-null checking
processed_content = add_not_null_markers(&processed_content);

// Split into generic and Supabase-specific queries
let (generic_queries, supabase_queries) = split_queries(&processed_content);

// Write to destination files
fs::write(generic_dest, generic_queries).expect("Failed to write splinter_generic.sql");
fs::write(supabase_dest, supabase_queries).expect("Failed to write splinter_supabase.sql");

println!(
"cargo:warning=Successfully downloaded and processed splinter.sql into generic and Supabase-specific files"
);
}

fn remove_set_search_path(content: &str) -> String {
content
.lines()
.filter(|line| {
let trimmed = line.trim();
!trimmed.to_lowercase().starts_with("set local search_path")
})
.collect::<Vec<_>>()
.join("\n")
}

fn add_not_null_markers(content: &str) -> String {
// Add "!" suffix to all column aliases to mark them as non-null for sqlx
// This transforms patterns like: 'value' as name
// Into: 'value' as "name!"

let columns_to_mark = [
"name",
"title",
"level",
"facing",
"categories",
"description",
"detail",
"remediation",
"metadata",
"cache_key",
];

let mut result = content.to_string();

for column in &columns_to_mark {
// Match patterns like: as name, as name)
let pattern_comma = format!(" as {column}");
let replacement_comma = format!(" as \"{column}!\"");
result = result.replace(&pattern_comma, &replacement_comma);
}

result
}

fn split_queries(content: &str) -> (String, String) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

as I understand it, rules are per default put into the generic category.
what happens if supabase adds a new rule to the spliter that's supabase-specific?
that might break linting in case a table isn't defined for non-supabase databases, right?
should we throw during build if there's an unknown rule in the splinter?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

great catch!

// Split the union all queries based on rule names
let queries: Vec<&str> = content.split("union all").collect();

let mut generic_queries = Vec::new();
let mut supabase_queries = Vec::new();

for query in queries {
// Extract the rule name from the query (it's the first 'name' field)
let is_supabase = SUPABASE_ONLY_RULES
.iter()
.any(|rule| query.contains(&format!("'{rule}' as \"name!\"")));

if is_supabase {
supabase_queries.push(query);
} else {
generic_queries.push(query);
}
}

// Join queries with "union all" and wrap in parentheses
let generic_sql = if generic_queries.is_empty() {
String::new()
} else {
generic_queries.join("union all\n")
};

let supabase_sql = if supabase_queries.is_empty() {
String::new()
} else {
supabase_queries.join("union all\n")
};

(generic_sql, supabase_sql)
}
Loading