From 8388ba30ed69a846e53ddd10e5f611b67f4c08ce Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Tue, 7 Apr 2026 17:18:52 +0000 Subject: [PATCH 01/13] refactor(query): split lookup.rs and compiler.rs into modules Split monolithic files into modular structure for better maintainability: - src/query/lookups/: - lookups.rs: Core types, registry, resolve() logic - common_lookups.rs: Comparison/string lookups (exact, gt, contains, etc.) - date_lookups.rs: Date/time transforms (year, month, day, hour, etc.) - json_lookups.rs: JSON transforms and lookups (key, has_key, etc.) - mod.rs: Re-exports - src/query/compiler/: - compiler.rs: Main SQL compiler implementation - helpers.rs: Internal helpers (quote_col, qualified_col, etc.) - mod.rs: Re-exports All 241 tests pass. No functional changes. --- src/lib.rs | 8 +- src/query/{ => compiler}/compiler.rs | 347 +++-------- src/query/compiler/helpers.rs | 62 ++ src/query/compiler/mod.rs | 29 + src/query/lookup.rs | 873 --------------------------- src/query/lookups/common_lookups.rs | 101 ++++ src/query/lookups/date_lookups.rs | 201 ++++++ src/query/lookups/json_lookups.rs | 129 ++++ src/query/lookups/lookups.rs | 336 +++++++++++ src/query/lookups/mod.rs | 34 ++ src/query/mod.rs | 2 +- 11 files changed, 994 insertions(+), 1128 deletions(-) rename src/query/{ => compiler}/compiler.rs (65%) create mode 100644 src/query/compiler/helpers.rs create mode 100644 src/query/compiler/mod.rs delete mode 100644 src/query/lookup.rs create mode 100644 src/query/lookups/common_lookups.rs create mode 100644 src/query/lookups/date_lookups.rs create mode 100644 src/query/lookups/json_lookups.rs create mode 100644 src/query/lookups/lookups.rs create mode 100644 src/query/lookups/mod.rs diff --git a/src/lib.rs b/src/lib.rs index 136aec5..b64f3d1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,7 +19,7 @@ use crate::query::ast::{ QueryOperation, SqlValue, }; use crate::query::compiler; -use crate::query::lookup; +use crate::query::lookups; use crate::transaction::TransactionHandle; // ### @@ -59,12 +59,12 @@ fn setup<'py>( #[pyfunction] fn register_lookup(name: String, sql_template: String) -> PyResult<()> { - lookup::register_custom(name, sql_template).map_err(PyErr::from) + lookups::register_custom(name, sql_template).map_err(PyErr::from) } #[pyfunction] fn available_lookups() -> PyResult> { - lookup::registered_lookups().map_err(PyErr::from) + lookups::registered_lookups().map_err(PyErr::from) } #[pyfunction] @@ -811,7 +811,7 @@ fn bulk_update<'py>( #[pymodule] fn ryx_core(m: &Bound<'_, PyModule>) -> PyResult<()> { - lookup::init_registry(); + lookups::init_registry(); let mut builder = tokio::runtime::Builder::new_multi_thread(); builder.worker_threads(4).enable_all(); diff --git a/src/query/compiler.rs b/src/query/compiler/compiler.rs similarity index 65% rename from src/query/compiler.rs rename to src/query/compiler/compiler.rs index f4baac5..06e3dad 100644 --- a/src/query/compiler.rs +++ b/src/query/compiler/compiler.rs @@ -1,14 +1,10 @@ // // ### -// Ryx — SQL Compiler +// Ryx — SQL Compiler Implementation +// ### // -// Supports: -// compile_q() : recursive Q-tree → SQL fragment -// compile_joins() : JoinClause list → SQL JOIN clauses -// compile_aggs() : AggregateExpr list → SELECT aggregate columns -// compile_group_by(): GROUP BY clause -// compile_having() : HAVING clause (same engine as WHERE) -// compile_select() : now merges plain columns + aggregate annotations +// This file contains the SQL compiler that transforms QueryNode AST into SQL strings. +// See compiler/mod.rs for the module structure. // ### use crate::errors::{RyxError, RyxResult}; @@ -17,20 +13,20 @@ use crate::query::ast::{ AggFunc, AggregateExpr, FilterNode, JoinClause, JoinKind, QNode, QueryNode, QueryOperation, SortDirection, SqlValue, }; -use crate::query::lookup::{self, LookupContext}; +use crate::query::lookups::date_lookups as date; +use crate::query::lookups::json_lookups as json; +use crate::query::lookups::{self, LookupContext}; + +pub use super::helpers::{apply_like_wrapping, qualified_col, split_qualified, KNOWN_TRANSFORMS}; + +use super::helpers; -// ### -// Output type -// ### #[derive(Debug, Clone)] pub struct CompiledQuery { pub sql: String, pub values: Vec, } -// ### -// Public entry point -// ### pub fn compile(node: &QueryNode) -> RyxResult { let mut values: Vec = Vec::new(); let sql = match &node.operation { @@ -49,22 +45,16 @@ pub fn compile(node: &QueryNode) -> RyxResult { Ok(CompiledQuery { sql, values }) } -// ### -// SELECT -// ### - fn compile_select( node: &QueryNode, columns: Option<&[String]>, values: &mut Vec, ) -> RyxResult { - // # SELECT list - // Columns from plain columns arg + annotation aliases merged together. let base_cols = match columns { None => "*".to_string(), Some(cols) => cols .iter() - .map(|c| qualified_col(c)) + .map(|c| helpers::qualified_col(c)) .collect::>() .join(", "), }; @@ -74,15 +64,13 @@ fn compile_select( let select_list = match (base_cols.as_str(), agg_cols.as_str()) { (_, "") => base_cols, ("*", _) => { - // When we have annotations we drop the bare * and only emit the - // GROUP BY columns + aggregates (standard SQL). if node.group_by.is_empty() { agg_cols } else { let gb = node .group_by .iter() - .map(|c| quote_col(c)) + .map(|c| helpers::quote_col(c)) .collect::>() .join(", "); format!("{gb}, {agg_cols}") @@ -94,16 +82,14 @@ fn compile_select( let distinct = if node.distinct { "DISTINCT " } else { "" }; let mut sql = format!( "SELECT {distinct}{select_list} FROM {tbl}", - tbl = quote_col(&node.table), + tbl = helpers::quote_col(&node.table), ); - // # JOINs if !node.joins.is_empty() { sql.push(' '); sql.push_str(&compile_joins(&node.joins)); } - // # WHERE let where_sql = compile_where_combined(&node.filters, node.q_filter.as_ref(), values, node.backend)?; if !where_sql.is_empty() { @@ -111,26 +97,23 @@ fn compile_select( sql.push_str(&where_sql); } - // # GROUP BY if !node.group_by.is_empty() { let gb = node .group_by .iter() - .map(|c| quote_col(c)) + .map(|c| helpers::quote_col(c)) .collect::>() .join(", "); sql.push_str(" GROUP BY "); sql.push_str(&gb); } - // # HAVING if !node.having.is_empty() { let having = compile_filters(&node.having, values, node.backend)?; sql.push_str(" HAVING "); sql.push_str(&having); } - // # ORDER BY if !node.order_by.is_empty() { sql.push_str(" ORDER BY "); sql.push_str(&compile_order_by(&node.order_by)); @@ -146,12 +129,6 @@ fn compile_select( Ok(sql) } -// ### -// AGGREGATE (no rows returned — only aggregate scalars) -// -// Used by `.aggregate(total=Sum("views"))`. -// Returns a single row dict like {"total": 1234, "avg_views": 42.5}. -// ### fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult { if node.annotations.is_empty() { return Err(RyxError::Internal( @@ -159,7 +136,7 @@ fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult< )); } let agg_cols = compile_agg_cols(&node.annotations); - let mut sql = format!("SELECT {agg_cols} FROM {}", quote_col(&node.table)); + let mut sql = format!("SELECT {agg_cols} FROM {}", helpers::quote_col(&node.table)); if !node.joins.is_empty() { sql.push(' '); @@ -176,16 +153,8 @@ fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult< Ok(sql) } -// ### -// COUNT -// ### - -// ### -// COUNT -// ### - fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult { - let mut sql = format!("SELECT COUNT(*) FROM {}", quote_col(&node.table)); + let mut sql = format!("SELECT COUNT(*) FROM {}", helpers::quote_col(&node.table)); if !node.joins.is_empty() { sql.push(' '); sql.push_str(&compile_joins(&node.joins)); @@ -199,12 +168,8 @@ fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult) -> RyxResult { - let mut sql = format!("DELETE FROM {}", quote_col(&node.table)); + let mut sql = format!("DELETE FROM {}", helpers::quote_col(&node.table)); let where_sql = compile_where_combined(&node.filters, node.q_filter.as_ref(), values, node.backend)?; if !where_sql.is_empty() { @@ -214,10 +179,6 @@ fn compile_delete(node: &QueryNode, values: &mut Vec) -> RyxResult>() .join(", "); let ph = std::iter::repeat_n("?", cols.len()) @@ -268,7 +229,7 @@ fn compile_insert( .join(", "); let mut sql = format!( "INSERT INTO {} ({}) VALUES ({})", - quote_col(&node.table), + helpers::quote_col(&node.table), cols_sql, ph ); @@ -278,11 +239,7 @@ fn compile_insert( Ok(sql) } -// ### -// JOIN compilation -// ### - -fn compile_joins(joins: &[JoinClause]) -> String { +pub fn compile_joins(joins: &[JoinClause]) -> String { joins .iter() .map(|j| { @@ -296,26 +253,34 @@ fn compile_joins(joins: &[JoinClause]) -> String { let alias_sql = j .alias .as_deref() - .map(|a| format!(" AS {}", quote_col(a))) + .map(|a| format!(" AS {}", helpers::quote_col(a))) .unwrap_or_default(); - let (l_table, l_col) = split_qualified(&j.on_left); - let (r_table, r_col) = split_qualified(&j.on_right); + let (l_table, l_col): (String, String) = helpers::split_qualified(&j.on_left); + let (r_table, r_col): (String, String) = helpers::split_qualified(&j.on_right); let on_l = if l_table.is_empty() { - quote_col(&l_col) + helpers::quote_col(&l_col) } else { - format!("{}.{}", quote_col(&l_table), quote_col(&l_col)) + format!( + "{}.{}", + helpers::quote_col(&l_table), + helpers::quote_col(&l_col) + ) }; let on_r = if r_table.is_empty() { - quote_col(&r_col) + helpers::quote_col(&r_col) } else { - format!("{}.{}", quote_col(&r_table), quote_col(&r_col)) + format!( + "{}.{}", + helpers::quote_col(&r_table), + helpers::quote_col(&r_col) + ) }; if j.kind == JoinKind::CrossJoin { - format!("{kind} {}{alias_sql}", quote_col(&j.table)) + format!("{kind} {}{alias_sql}", helpers::quote_col(&j.table)) } else { format!( "{kind} {}{alias_sql} ON {on_l} = {on_r}", - quote_col(&j.table) + helpers::quote_col(&j.table) ) } }) @@ -323,17 +288,13 @@ fn compile_joins(joins: &[JoinClause]) -> String { .join(" ") } -// ### -// Aggregate column list → SUM("views") AS "total_views", ... -// ### - -fn compile_agg_cols(anns: &[AggregateExpr]) -> String { +pub fn compile_agg_cols(anns: &[AggregateExpr]) -> String { anns.iter() .map(|a| { let col = if a.field == "*" { "*".to_string() } else { - qualified_col(&a.field) + helpers::qualified_col(&a.field) }; let distinct = if a.distinct && a.func != AggFunc::Count { "DISTINCT " @@ -343,13 +304,13 @@ fn compile_agg_cols(anns: &[AggregateExpr]) -> String { "" }; match &a.func { - AggFunc::Raw(expr) => format!("{expr} AS {}", quote_col(&a.alias)), + AggFunc::Raw(expr) => format!("{expr} AS {}", helpers::quote_col(&a.alias)), f => format!( "{}({}{}) AS {}", f.sql_name(), distinct, col, - quote_col(&a.alias) + helpers::quote_col(&a.alias) ), } }) @@ -357,9 +318,19 @@ fn compile_agg_cols(anns: &[AggregateExpr]) -> String { .join(", ") } -// ### -// WHERE = flat filters AND Q-tree (merged) -// ### +pub fn compile_order_by(clauses: &[crate::query::ast::OrderByClause]) -> String { + clauses + .iter() + .map(|c| { + let dir = match c.direction { + SortDirection::Asc => "ASC", + SortDirection::Desc => "DESC", + }; + format!("{} {dir}", helpers::qualified_col(&c.field)) + }) + .collect::>() + .join(", ") +} fn compile_where_combined( filters: &[FilterNode], @@ -385,15 +356,7 @@ fn compile_where_combined( }) } -// ### -// Q-tree compiler (recursive) -// ### - -/// Recursively compile a QNode tree into a SQL fragment. -/// -/// Design: we emit minimal parentheses — each non-leaf node wraps its children -/// in parens only when necessary (AND inside OR must be parenthesised). -fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResult { +pub fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResult { match q { QNode::Leaf { field, @@ -422,10 +385,6 @@ fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResu } } -// ### -// Flat filter list compiler -// ### - fn compile_filters( filters: &[FilterNode], values: &mut Vec, @@ -438,10 +397,6 @@ fn compile_filters( Ok(parts.join(" AND ")) } -// ### -// Single filter → SQL fragment (shared by flat list and Q-tree) -// ### - fn compile_single_filter( field: &str, lookup: &str, @@ -450,38 +405,24 @@ fn compile_single_filter( values: &mut Vec, backend: Backend, ) -> RyxResult { - // Support "table.column" qualified references in filters - // Also handle field__transform patterns (e.g., "created_at__year") - // For JSON key lookups like "bio__key__priority", we need to handle specially - let known_transforms = [ - "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", - "time", "iso_week", "iso_dow", "key", "key_text", "json", - ]; - let (base_column, applied_transforms, json_key) = if field.contains("__") { let parts: Vec<&str> = field.split("__").collect(); - // Find the first part that's NOT a known transform - that's the JSON key - // For example: "bio__key__priority" -> transforms=["key"], key="priority", base="bio" let mut transforms = Vec::new(); let mut key_part: Option<&str> = None; for part in parts[1..].iter() { - if known_transforms.contains(part) { + if KNOWN_TRANSFORMS.contains(part) { transforms.push(*part); } else { - // First non-transform part is the JSON key key_part = Some(*part); break; } } if let Some(key) = key_part { - // Base column is just the first part (the field name) - // Transforms is everything that came before the key (parts[0].to_string(), transforms, Some(key.to_string())) } else if !transforms.is_empty() { - // All parts are transforms (parts[0].to_string(), transforms, None) } else { (field.to_string(), vec![], None) @@ -490,28 +431,18 @@ fn compile_single_filter( (field.to_string(), vec![], None) }; - // For JSON key transforms, we need to pass the key to resolve() - // The key is embedded in the field name (bio__key__priority -> key=priority) - - // If the lookup contains "__" (is a chained lookup like "month__gte"), - // DON'T apply transforms here - let resolve() handle it completely - // This avoids double-transform issues where the compiler applies transform - // and then resolve() also tries to handle it let final_column = if lookup.contains("__") { - // For chained lookups, use just the base column - resolve() will handle transforms - qualified_col(&base_column) + helpers::qualified_col(&base_column) } else if !applied_transforms.is_empty() { - // For simple transform-only lookups (like "year"), apply transforms here - let mut result = qualified_col(&base_column); + let mut result = helpers::qualified_col(&base_column); for transform in &applied_transforms { - result = lookup::apply_transform(transform, &result, backend, None)?; + result = lookups::apply_transform(transform, &result, backend, None)?; } result } else { - qualified_col(&base_column) + helpers::qualified_col(&base_column) }; - // For JSON key transforms, pass the key in the context let ctx = LookupContext { column: final_column.clone(), negated, @@ -519,7 +450,6 @@ fn compile_single_filter( json_key: json_key.clone(), }; - // # isnull (no bind param) if lookup == "isnull" { let is_null = match value { SqlValue::Bool(b) => *b, @@ -538,7 +468,6 @@ fn compile_single_filter( }); } - // # in (expand N placeholders) if lookup == "in" { let items = match value { SqlValue::List(v) => v.clone(), @@ -547,6 +476,7 @@ fn compile_single_filter( if items.is_empty() { return Ok("(1 = 0)".into()); } + let ph = std::iter::repeat_n("?", items.len()) .collect::>() .join(", "); @@ -559,7 +489,6 @@ fn compile_single_filter( }); } - // # range (two bind params) if lookup == "range" { let (lo, hi) = match value { SqlValue::List(v) if v.len() == 2 => (v[0].clone(), v[1].clone()), @@ -575,19 +504,8 @@ fn compile_single_filter( }); } - // # general lookup - // If lookup is a transform (like "year", "month"), use the transform function which includes = ? - // BUT if lookup contains "__" (like "date__gte"), we need to use resolve() to handle the chain - // ALSO use resolve() for JSON key transforms even if lookup is simple (like "exact") - let known_transforms = [ - "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", - "time", "iso_week", "iso_dow", "key", "key_text", "json", - ]; - - // If lookup contains "__", it's a chained lookup (e.g., "date__gte") - use resolve() - // OR if we have a JSON key (json_key is Some), we need resolve() to apply it if lookup.contains("__") || json_key.is_some() { - let fragment = lookup::resolve(&base_column, lookup, &ctx)?; + let fragment = lookups::resolve(&base_column, lookup, &ctx)?; values.push(value.clone()); return Ok(if negated { format!("NOT ({fragment})") @@ -596,24 +514,24 @@ fn compile_single_filter( }); } - if known_transforms.contains(&lookup) { + if KNOWN_TRANSFORMS.contains(&lookup) { let transform_fn = match lookup { - "date" => lookup::date_transform, - "year" => lookup::year_transform, - "month" => lookup::month_transform, - "day" => lookup::day_transform, - "hour" => lookup::hour_transform, - "minute" => lookup::minute_transform, - "second" => lookup::second_transform, - "week" => lookup::week_transform, - "dow" => lookup::dow_transform, - "quarter" => lookup::quarter_transform, - "time" => lookup::time_transform, - "iso_week" => lookup::iso_week_transform, - "iso_dow" => lookup::iso_dow_transform, - "key" => lookup::json_key_transform, - "key_text" => lookup::json_key_text_transform, - "json" => lookup::json_cast_transform, + "date" => date::date_transform as crate::query::lookups::LookupFn, + "year" => date::year_transform as crate::query::lookups::LookupFn, + "month" => date::month_transform as crate::query::lookups::LookupFn, + "day" => date::day_transform as crate::query::lookups::LookupFn, + "hour" => date::hour_transform as crate::query::lookups::LookupFn, + "minute" => date::minute_transform as crate::query::lookups::LookupFn, + "second" => date::second_transform as crate::query::lookups::LookupFn, + "week" => date::week_transform as crate::query::lookups::LookupFn, + "dow" => date::dow_transform as crate::query::lookups::LookupFn, + "quarter" => date::quarter_transform as crate::query::lookups::LookupFn, + "time" => date::time_transform as crate::query::lookups::LookupFn, + "iso_week" => date::iso_week_transform as crate::query::lookups::LookupFn, + "iso_dow" => date::iso_dow_transform as crate::query::lookups::LookupFn, + "key" => json::json_key_transform as crate::query::lookups::LookupFn, + "key_text" => json::json_key_text_transform as crate::query::lookups::LookupFn, + "json" => json::json_cast_transform as crate::query::lookups::LookupFn, _ => { return Err(RyxError::UnknownLookup { field: field.to_string(), @@ -621,12 +539,11 @@ fn compile_single_filter( }) } }; - // For transforms, we need to push the value to the values vector values.push(value.clone()); return Ok(transform_fn(&ctx)); } - let fragment = lookup::resolve(&base_column, lookup, &ctx)?; + let fragment = lookups::resolve(&base_column, lookup, &ctx)?; let bound = apply_like_wrapping(lookup, value.clone()); values.push(bound); Ok(if negated { @@ -636,95 +553,21 @@ fn compile_single_filter( }) } -// ### -// ORDER BY -// ### -fn compile_order_by(clauses: &[crate::query::ast::OrderByClause]) -> String { - clauses - .iter() - .map(|c| { - let dir = match c.direction { - SortDirection::Asc => "ASC", - SortDirection::Desc => "DESC", - }; - format!("{} {dir}", qualified_col(&c.field)) - }) - .collect::>() - .join(", ") -} - -// ### -// Identifier helpers -// ### - -/// Double-quote a simple identifier (column or table name). -pub fn quote_col(s: &str) -> String { - format!("\"{}\"", s.replace('"', "\"\"")) -} - -/// Handle `table.column` → `"table"."column"`, or plain column → `"column"`. -/// Also handles annotation aliases (already an expression — left as-is). -fn qualified_col(s: &str) -> String { - if s.contains('.') { - let (table, col) = s.split_once('.').unwrap(); - format!("{}.{}", quote_col(table), quote_col(col)) - } else { - quote_col(s) - } -} - -/// Split `"table.column"` into `("table", "column")`. -/// Returns `("", s)` if there is no dot. -fn split_qualified(s: &str) -> (String, String) { - if let Some((t, c)) = s.split_once('.') { - (t.to_string(), c.to_string()) - } else { - (String::new(), s.to_string()) - } -} - -/// Apply LIKE `%` wrapping to the value based on the lookup type. -fn apply_like_wrapping(lookup: &str, value: SqlValue) -> SqlValue { - match lookup { - "contains" | "icontains" => wrap_text(value, |s| format!("%{s}%")), - "startswith" | "istartswith" => wrap_text(value, |s| format!("{s}%")), - "endswith" | "iendswith" => wrap_text(value, |s| format!("%{s}")), - _ => value, - } -} - -fn wrap_text(value: SqlValue, f: impl Fn(String) -> String) -> SqlValue { - if let SqlValue::Text(s) = value { - SqlValue::Text(f(s)) - } else { - value - } -} - -// ### -// Unit tests -// ### - #[cfg(test)] mod tests { use super::*; use crate::query::ast::*; - use crate::query::lookup; - - fn init() { - lookup::init_registry(); - } #[test] fn test_bare_select() { - init(); + init_registry(); let q = compile(&QueryNode::select("posts")).unwrap(); assert_eq!(q.sql, r#"SELECT * FROM "posts""#); } #[test] fn test_q_or() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node = node.with_q(QNode::Or(vec![ QNode::Leaf { @@ -746,7 +589,7 @@ mod tests { #[test] fn test_inner_join() { - init(); + init_registry(); let node = QueryNode::select("posts").with_join(JoinClause { kind: JoinKind::Inner, table: "authors".into(), @@ -761,7 +604,7 @@ mod tests { #[test] fn test_aggregate_sum() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node.operation = QueryOperation::Aggregate; node = node.with_annotation(AggregateExpr { @@ -777,7 +620,7 @@ mod tests { #[test] fn test_group_by() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node = node .with_annotation(AggregateExpr { @@ -793,7 +636,7 @@ mod tests { #[test] fn test_having() { - init(); + init_registry(); let mut node = QueryNode::select("posts"); node.operation = QueryOperation::Select { columns: None }; node = node @@ -813,4 +656,8 @@ mod tests { let q = compile(&node).unwrap(); assert!(q.sql.contains("HAVING"), "{}", q.sql); } + + fn init_registry() { + crate::query::lookups::init_registry(); + } } diff --git a/src/query/compiler/helpers.rs b/src/query/compiler/helpers.rs new file mode 100644 index 0000000..9d039db --- /dev/null +++ b/src/query/compiler/helpers.rs @@ -0,0 +1,62 @@ +// +// ### +// Ryx — Compiler Helpers +// ### +// +// Contains internal helper functions for SQL compilation: +// - Identifier quoting (quote_col, qualified_col, split_qualified) +// - LIKE wrapping (apply_like_wrapping) +// - Other compilation utilities +// ### + +use crate::query::ast::SqlValue; + +/// Double-quote a simple identifier (column or table name). +pub fn quote_col(s: &str) -> String { + format!("\"{}\"", s.replace('"', "\"\"")) +} + +/// Handle `table.column` → `"table"."column"`, or plain column → `"column"`. +/// Also handles annotation aliases (already an expression — left as-is). +pub fn qualified_col(s: &str) -> String { + if s.contains('.') { + let (table, col) = s.split_once('.').unwrap(); + format!("{}.{}", quote_col(table), quote_col(col)) + } else { + quote_col(s) + } +} + +/// Split `"table.column"` into `("table", "column")`. +/// Returns `("", s)` if there is no dot. +pub fn split_qualified(s: &str) -> (String, String) { + if let Some((t, c)) = s.split_once('.') { + (t.to_string(), c.to_string()) + } else { + (String::new(), s.to_string()) + } +} + +/// Apply LIKE `%` wrapping to the value based on the lookup type. +pub fn apply_like_wrapping(lookup: &str, value: SqlValue) -> SqlValue { + match lookup { + "contains" | "icontains" => wrap_text(value, |s| format!("%{s}%")), + "startswith" | "istartswith" => wrap_text(value, |s| format!("{s}%")), + "endswith" | "iendswith" => wrap_text(value, |s| format!("%{s}")), + _ => value, + } +} + +fn wrap_text(value: SqlValue, f: impl Fn(String) -> String) -> SqlValue { + if let SqlValue::Text(s) = value { + SqlValue::Text(f(s)) + } else { + value + } +} + +/// Known transforms that can be applied in field paths +pub const KNOWN_TRANSFORMS: [&str; 16] = [ + "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", "time", + "iso_week", "iso_dow", "key", "key_text", "json", +]; diff --git a/src/query/compiler/mod.rs b/src/query/compiler/mod.rs new file mode 100644 index 0000000..e550b88 --- /dev/null +++ b/src/query/compiler/mod.rs @@ -0,0 +1,29 @@ +// +// ### +// Ryx — Compiler Module +// ### +// +// This module contains the SQL compiler that transforms QueryNode AST into SQL strings. +// The module is organized as: +// - mod.rs : Re-exports from compiler.rs +// - compiler.rs: Main implementation (compile, compile_select, etc.) +// - helpers.rs : Internal helper functions (quote_col, qualified_col, etc.) +// ### + +pub mod compiler; +pub mod helpers; + +// Re-export from compiler.rs +pub use compiler::compile; +pub use compiler::compile_agg_cols; +pub use compiler::compile_joins; +pub use compiler::compile_order_by; +pub use compiler::compile_q; +pub use compiler::CompiledQuery; + +// Re-export from helpers.rs +pub use helpers::apply_like_wrapping; +pub use helpers::qualified_col; +pub use helpers::quote_col; +pub use helpers::split_qualified; +pub use helpers::KNOWN_TRANSFORMS; diff --git a/src/query/lookup.rs b/src/query/lookup.rs deleted file mode 100644 index cb7da75..0000000 --- a/src/query/lookup.rs +++ /dev/null @@ -1,873 +0,0 @@ -// -// ### -// Ryx — Lookup System -// ### -// -// A "lookup" is the suffix after `__` in a filter expression. -// Examples: -// `age__gte=25` → lookup = "gte", SQL = "age >= $1" -// `name__icontains="bob"` → lookup = "icontains", SQL = "LOWER(name) LIKE LOWER($1)" -// `id__in=[1,2,3]` → lookup = "in", SQL = "id IN ($1, $2, $3)" -// -// # Extensibility design -// -// Users can register custom lookups from Python: -// -// from Ryx import register_lookup -// -// @register_lookup("uuid_prefix") -// def uuid_prefix_lookup(field: str, _value) -> str: -// return f"{field}::text LIKE ${{placeholder}}" -// -// Internally this works via a global `DashMap` that stores -// both the built-in lookups and any user-registered ones. We use DashMap -// (concurrent HashMap) so registrations from Python threads are safe. -// -// Why not a trait object (`Box`)? We need lookups to be thread-safe -// and Send+Sync since they're shared across async tasks. Function pointers -// (`fn`) are always Send+Sync, so they're stored directly in the map. -// For user-registered lookups (coming from Python callables) we store a -// Python-side callable name and call back to Python at query-build time. -// -// # SQL placeholder strategy -// -// Different databases use different placeholder syntax: -// PostgreSQL: $1, $2, $3, ... -// MySQL: ?, ?, ?, ... -// SQLite: ?, ?, ?, ... -// -// We abstract this by always generating `?` placeholders in the AST and -// letting the backend-specific compiler rewrite them. This is exactly what -// sqlx's `AnyPool` does internally. -// ### - -use std::collections::HashMap; -use std::sync::{OnceLock, RwLock}; - -use crate::errors::{RyxError, RyxResult}; -use crate::pool::Backend; - -// ### -// Core types -// ### - -/// Context passed to every lookup function when building a SQL fragment. -/// -/// The lookup function receives the column name and must return a SQL fragment -/// with `?` as the value placeholder. It does NOT need to know the placeholder -/// index — the compiler handles numbering. -/// -/// # Example (for the "gte" lookup) -/// ``` -/// // field = "age", returns: "age >= ?" -/// fn gte_lookup(ctx: &LookupContext) -> String { -/// format!("{} >= ?", ctx.column) -/// } -/// ``` -#[derive(Debug, Clone)] -pub struct LookupContext { - /// The SQL column name, already quoted/escaped. - pub column: String, - - /// Whether the lookup is negated (i.e., inside an `exclude()` call). - /// Most lookups ignore this — negation is applied by the compiler. - pub negated: bool, - - /// The database backend (PostgreSQL, MySQL, SQLite). - /// Used for backend-specific SQL generation. - pub backend: Backend, - - /// For JSON key transforms (e.g., bio__key__priority), this holds the key name ("priority") - /// Used by apply_transform() to generate correct JSON path accessors. - pub json_key: Option, -} - -/// The function signature for a built-in lookup implementation. -/// -/// Takes a `LookupContext` and returns a SQL fragment string. -/// The function must be `fn` (not closure) to be `Send + Sync`. -pub type LookupFn = fn(&LookupContext) -> String; - -/// A lookup that was registered from Python: stores the callable and a -/// Rust-generated SQL template where `{col}` is the column placeholder. -/// -/// Python-registered lookups are called at SQL-build time with the column -/// name substituted in. This avoids holding the GIL for every query. -/// The Python callable is only invoked once at registration time to extract -/// the SQL template string. -#[derive(Debug, Clone)] -pub struct PythonLookup { - /// Pre-rendered SQL template. Example: `"LOWER({col}) LIKE LOWER(?)"` - /// The caller substitutes `{col}` with the actual column name. - pub sql_template: String, -} - -// -// Global lookup registry -// -/// The two registries live side-by-side: -/// - `builtin`: populated once at startup with the built-in lookups -/// - `custom`: populated at runtime with user-registered lookups -/// -/// We check `custom` first so users can override built-ins (e.g., to change -/// the SQL generated by `icontains` for a database that has native ILIKE). -struct LookupRegistry { - builtin: HashMap<&'static str, LookupFn>, - custom: HashMap, -} - -static REGISTRY: OnceLock> = OnceLock::new(); - -/// Initialize the registry with all built-in lookups. -/// Called once from `lib.rs` module initialization. -pub fn init_registry() { - REGISTRY.get_or_init(|| { - let mut builtin = HashMap::new(); - - // Comparison lookups - builtin.insert("exact", exact as LookupFn); - builtin.insert("gt", gt as LookupFn); - builtin.insert("gte", gte as LookupFn); - builtin.insert("lt", lt as LookupFn); - builtin.insert("lte", lte as LookupFn); - - // String lookups - builtin.insert("contains", contains as LookupFn); - builtin.insert("icontains", icontains as LookupFn); - builtin.insert("startswith", startswith as LookupFn); - builtin.insert("istartswith", istartswith as LookupFn); - builtin.insert("endswith", endswith as LookupFn); - builtin.insert("iendswith", iendswith as LookupFn); - - // Null lookups - // `isnull` is special: it ignores the value entirely and produces - // IS NULL / IS NOT NULL. The value passed (True/False) is read by - // the compiler, not by this function. - builtin.insert("isnull", isnull as LookupFn); - - // Membership lookups - // `in` is also special: the compiler expands it into - // `col IN (?, ?, ?)` based on the number of values provided. - builtin.insert("in", in_lookup as LookupFn); - - // Range lookup - builtin.insert("range", range as LookupFn); - - // Date/Time transforms (for chaining like created_at__date__gte) - // These are registered as lookups that return SQL fragments - builtin.insert("date", date_transform as LookupFn); - builtin.insert("year", year_transform as LookupFn); - builtin.insert("month", month_transform as LookupFn); - builtin.insert("day", day_transform as LookupFn); - builtin.insert("hour", hour_transform as LookupFn); - builtin.insert("minute", minute_transform as LookupFn); - builtin.insert("second", second_transform as LookupFn); - builtin.insert("week", week_transform as LookupFn); - builtin.insert("dow", dow_transform as LookupFn); - // New transforms - builtin.insert("quarter", quarter_transform as LookupFn); - builtin.insert("time", time_transform as LookupFn); - builtin.insert("iso_week", iso_week_transform as LookupFn); - builtin.insert("iso_dow", iso_dow_transform as LookupFn); - - // JSON transforms (for chaining like metadata__key__icontains) - builtin.insert("key", json_key_transform as LookupFn); - builtin.insert("key_text", json_key_text_transform as LookupFn); - builtin.insert("json", json_cast_transform as LookupFn); - - // JSON lookups (comparison operators) - builtin.insert("has_key", json_has_key as LookupFn); - builtin.insert("has_keys", json_has_keys as LookupFn); - builtin.insert("contains", json_contains as LookupFn); - builtin.insert("contained_by", json_contained_by as LookupFn); - - RwLock::new(LookupRegistry { - builtin, - custom: HashMap::new(), - }) - }); -} - -// -// Registry public API -// -/// Register a custom lookup from Python. -/// -/// # Arguments -/// * `name` — the lookup name (e.g. `"uuid_prefix"`) -/// * `sql_template` — SQL fragment with `{col}` as column placeholder and -/// `?` as value placeholder. Example: `"{col}::text LIKE ?"` -/// -/// # Errors -/// Returns `RyxError::Internal` if the registry hasn't been initialized -/// (should never happen in practice since `init_registry()` runs at import). -pub fn register_custom(name: impl Into, sql_template: impl Into) -> RyxResult<()> { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let mut guard = registry - .write() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - guard.custom.insert( - name.into(), - PythonLookup { - sql_template: sql_template.into(), - }, - ); - - Ok(()) -} - -// ### -// Chained lookups support (e.g., "date__gte", "year__month") -// ### - -/// Handle SQLite transform lookup when ctx.column already has transform applied -/// This happens when compiler applied the transform but lookup is still simple (e.g., "gte") -#[allow(dead_code)] -fn handle_sqlite_transform_lookup( - field: &str, - _transform: &str, - lookup_name: &str, - ctx: &LookupContext, -) -> RyxResult { - // Check if we need to convert TEXT to INTEGER for numeric comparisons - let is_numeric_comparison = matches!(lookup_name, "gt" | "gte" | "lt" | "lte" | "exact"); - - if is_numeric_comparison && ctx.column.contains("AS TEXT)") { - // Convert TEXT to INTEGER - let transformed = ctx.column.replace("AS TEXT)", "AS INTEGER)"); - let new_ctx = LookupContext { - column: transformed, - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - return resolve_simple(field, lookup_name, &new_ctx); - } - - // Otherwise, use as-is - resolve_simple(field, lookup_name, ctx) -} - -/// Resolve a chained lookup like "date__gte" or "year__exact". -/// This applies transforms first (date, year, month, etc.) then the final lookup. -pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { - // If no "__", it's a simple lookup - if !lookup_name.contains("__") { - // Check if we have a JSON key that needs to be applied - if ctx.json_key.is_some() { - // We have a JSON key transform to apply - ALWAYS start fresh from field - let mut column = format!("\"{}\"", field); - // Apply the key transform with the json_key - column = apply_transform("key", &column, ctx.backend, ctx.json_key.as_deref())?; - - // Build new context with transformed column - let json_ctx = LookupContext { - column: column.clone(), - negated: ctx.negated, - backend: ctx.backend, - json_key: None, - }; - return resolve_simple(field, lookup_name, &json_ctx); - } - - // Check if ctx.column already has a date/time transform applied (e.g., from compiler) - // Handle the case where compiler applied transform but lookup is simple (e.g., "gte") - if ctx.column.contains("strftime") || ctx.column.contains("DATE(") { - // Detect transform type from SQL - if ctx.column.contains("strftime('%Y'") { - return handle_sqlite_transform_lookup(field, "year", lookup_name, ctx); - } else if ctx.column.contains("strftime('%m'") { - return handle_sqlite_transform_lookup(field, "month", lookup_name, ctx); - } else if ctx.column.contains("strftime('%d'") { - return handle_sqlite_transform_lookup(field, "day", lookup_name, ctx); - } else if ctx.column.contains("strftime('%H'") { - return handle_sqlite_transform_lookup(field, "hour", lookup_name, ctx); - } - // For DATE() transform, we need different handling for comparisons - if ctx.column.starts_with("DATE(") { - return resolve_simple(field, lookup_name, ctx); - } - } - return resolve_simple(field, lookup_name, ctx); - } - - // Chained: split into transforms + final lookup - let parts: Vec<&str> = lookup_name.split("__").collect(); - let final_lookup = *parts.last().unwrap(); - let transform_parts: Vec<&str> = parts[..parts.len() - 1].to_vec(); - - // Start fresh from the base column - don't use ctx.column which may already have transforms - let mut column = format!("\"{}\"", field); - - // Apply transforms in order until we hit a lookup - // For JSON transforms like "key", use ctx.json_key if available - for transform in transform_parts.iter() { - // Check if this is a known transform - let is_transform = matches!( - *transform, - "date" - | "year" - | "month" - | "day" - | "hour" - | "minute" - | "second" - | "week" - | "dow" - | "quarter" - | "time" - | "iso_week" - | "iso_dow" - | "key" - | "key_text" - | "json" - ); - - if is_transform { - // For JSON transforms (key, key_text), use json_key from context if available - let key = if matches!(*transform, "key" | "key_text") { - ctx.json_key - .as_deref() - .or_else(|| field.rsplit("__").next()) - } else { - None - }; - column = apply_transform(transform, &column, ctx.backend, key)?; - } else { - // This part is a lookup, not a transform - stop here - break; - } - } - - // Build a new context with the transformed column - let final_ctx = LookupContext { - column: column.clone(), - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - - // For SQLite, handle type conversion for comparisons on transformed values - if ctx.backend == Backend::SQLite { - // Check if the column contains a date/time transform - let col_has_transform = column.contains("strftime"); - - if col_has_transform && !column.contains("AS INTEGER") { - // Column is TEXT from a transform, need to convert for numeric comparisons - let is_numeric_comparison = - matches!(final_lookup, "gt" | "gte" | "lt" | "lte" | "exact"); - - if is_numeric_comparison { - // Convert TEXT to INTEGER by replacing AS TEXT with AS INTEGER - let transformed = column.replace("AS TEXT)", "AS INTEGER)"); - let final_ctx_int = LookupContext { - column: transformed, - negated: ctx.negated, - backend: ctx.backend, - json_key: ctx.json_key.clone(), - }; - return resolve_simple(field, final_lookup, &final_ctx_int); - } - - // For non-numeric comparisons, cast the bind value - let fragment = resolve_simple(field, final_lookup, &final_ctx)?; - return Ok(add_sqlite_cast_for_transform(&fragment, final_lookup)); - } - } - - // Default: resolve normally - resolve_simple(field, final_lookup, &final_ctx) -} - -#[allow(dead_code)] -/// Convert a SQLite transform expression from TEXT to INTEGER for numeric comparisons -fn convert_transform_to_integer(column: &str) -> String { - // Replace CAST(...AS TEXT) with CAST(...AS INTEGER) - column.replace("AS TEXT)", "AS INTEGER)") -} - -/// Add CAST(? AS TEXT) for SQLite date/time transform comparisons -fn add_sqlite_cast_for_transform(fragment: &str, lookup: &str) -> String { - // For lookups that use = ?, replace = ? with = CAST(? AS TEXT) - // For lookups that use > ?, etc., replace with > CAST(? AS TEXT) - - match lookup { - "exact" => fragment.replace("= ?", "= CAST(? AS TEXT)"), - "gt" => fragment.replace("> ?", "> CAST(? AS TEXT)"), - "gte" => fragment.replace(">= ?", ">= CAST(? AS TEXT)"), - "lt" => fragment.replace("< ?", "< CAST(? AS TEXT)"), - "lte" => fragment.replace("<= ?", "<= CAST(? AS TEXT)"), - _ => fragment.to_string(), - } -} - -/// Resolve a simple (non-chained) lookup. -fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let guard = registry - .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - // Check custom registry first (allows overriding built-ins) - if let Some(custom) = guard.custom.get(lookup_name) { - return Ok(custom.sql_template.replace("{col}", &ctx.column)); - } - - // Fall back to built-in lookup functions - if let Some(lookup_fn) = guard.builtin.get(lookup_name) { - return Ok(lookup_fn(ctx)); - } - - Err(RyxError::UnknownLookup { - field: field.to_string(), - lookup: lookup_name.to_string(), - }) -} - -/// Returns the list of all registered lookup names (built-in + custom). -/// Used by the Python layer to provide helpful error messages and IDE -/// autocompletion support. -pub fn registered_lookups() -> RyxResult> { - let registry = REGISTRY - .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; - - let guard = registry - .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; - - let mut names: Vec = guard - .builtin - .keys() - .map(|k| k.to_string()) - .chain(guard.custom.keys().cloned()) - .collect(); - names.sort(); - Ok(names) -} - -/// Apply a field transformation (date, year, month, key, etc.) -/// Returns SQL like "DATE(col)" or "EXTRACT(YEAR FROM col)" -/// For JSON transforms (key, key_text), the key is extracted from the next part of the chain -pub fn apply_transform( - name: &str, - column: &str, - backend: Backend, - key: Option<&str>, -) -> RyxResult { - let sql = match (name, backend) { - // Date/Time transforms - ("date", _) => format!("DATE({})", column), - - ("year", Backend::PostgreSQL) => format!("EXTRACT(YEAR FROM {})", column), - ("year", Backend::MySQL) => format!("YEAR({})", column), - ("year", Backend::SQLite) => format!("CAST(strftime('%Y', {}) AS TEXT)", column), - - ("month", Backend::PostgreSQL) => format!("EXTRACT(MONTH FROM {})", column), - ("month", Backend::MySQL) => format!("MONTH({})", column), - ("month", Backend::SQLite) => format!("CAST(strftime('%m', {}) AS TEXT)", column), - - ("day", Backend::PostgreSQL) => format!("EXTRACT(DAY FROM {})", column), - ("day", Backend::MySQL) => format!("DAYOFMONTH({})", column), - ("day", Backend::SQLite) => format!("CAST(strftime('%d', {}) AS TEXT)", column), - - ("hour", Backend::PostgreSQL) => format!("EXTRACT(HOUR FROM {})", column), - ("hour", Backend::MySQL) => format!("HOUR({})", column), - ("hour", Backend::SQLite) => format!("CAST(strftime('%H', {}) AS TEXT)", column), - - ("minute", Backend::PostgreSQL) => format!("EXTRACT(MINUTE FROM {})", column), - ("minute", Backend::MySQL) => format!("MINUTE({})", column), - ("minute", Backend::SQLite) => format!("CAST(strftime('%M', {}) AS TEXT)", column), - - ("second", Backend::PostgreSQL) => format!("EXTRACT(SECOND FROM {})", column), - ("second", Backend::MySQL) => format!("SECOND({})", column), - ("second", Backend::SQLite) => format!("CAST(strftime('%S', {}) AS TEXT)", column), - - ("week", Backend::PostgreSQL) => format!("EXTRACT(WEEK FROM {})", column), - ("week", Backend::MySQL) => format!("WEEK({})", column), - ("week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), - - ("dow", Backend::PostgreSQL) => format!("EXTRACT(DOW FROM {})", column), - ("dow", Backend::MySQL) => format!("DAYOFWEEK({})", column), - ("dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), - - // New Date/Time transforms - ("quarter", Backend::PostgreSQL) => format!("EXTRACT(QUARTER FROM {})", column), - ("quarter", Backend::MySQL) => format!("QUARTER({})", column), - ("quarter", Backend::SQLite) => format!( - "CAST((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3 AS TEXT)", - column - ), - - ("time", Backend::PostgreSQL) => format!("TIME({})", column), - ("time", Backend::MySQL) => format!("TIME({})", column), - ("time", Backend::SQLite) => format!("time({})", column), - - ("iso_week", Backend::PostgreSQL) => format!("EXTRACT(ISOWEEK FROM {})", column), - ("iso_week", Backend::MySQL) => format!( - "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1", - column, column, column - ), - ("iso_week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), - - ("iso_dow", Backend::PostgreSQL) => format!("EXTRACT(ISODOW FROM {})", column), - ("iso_dow", Backend::MySQL) => format!("((DAYOFWEEK({}) + 5) % 7) + 1", column), - ("iso_dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), - - // JSON transforms (key extraction) - key comes from the next part of the chain - ("key", Backend::PostgreSQL) => { - let k = key.unwrap_or("key"); - format!("({}->>'{}')", column, k) - } - ("key", Backend::MySQL) => { - let k = key.unwrap_or("key"); - format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.{}'))", column, k) - } - ("key", Backend::SQLite) => { - let k = key.unwrap_or("key"); - format!("json_extract({}, '$.{}')", column, k) - } - - ("key_text", Backend::PostgreSQL) => { - let k = key.unwrap_or("key"); - format!("({}->>'{}')::text", column, k) - } - ("key_text", Backend::MySQL) => { - let k = key.unwrap_or("key"); - format!( - "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '.{}')) AS CHAR)", - column, k - ) - } - ("key_text", Backend::SQLite) => { - let k = key.unwrap_or("key"); - format!("CAST(json_extract({}, '.{}') AS TEXT)", column, k) - } - - ("json", Backend::PostgreSQL) => format!("({}::jsonb)", column), - ("json", Backend::MySQL) => column.to_string(), - ("json", Backend::SQLite) => column.to_string(), - - _ => { - return Err(RyxError::UnknownLookup { - field: column.to_string(), - lookup: name.to_string(), - }) - } - }; - - Ok(sql) -} - -// ### -// Built-in lookup implementations -// -// Each function takes a `LookupContext` and returns a SQL fragment. -// Rules: -// - Always use `?` as the value placeholder -// - Never include the value itself (SQL injection prevention) -// - Column name is already safely quoted by the query builder -// ### - -/// `field__exact=value` → `field = ?` -/// -/// This is also the *implicit* lookup: `filter(name="Alice")` is equivalent -/// to `filter(name__exact="Alice")`. -fn exact(ctx: &LookupContext) -> String { - format!("{} = ?", ctx.column) -} - -/// `field__gt=value` → `field > ?` -fn gt(ctx: &LookupContext) -> String { - format!("{} > ?", ctx.column) -} - -/// `field__gte=value` → `field >= ?` -fn gte(ctx: &LookupContext) -> String { - format!("{} >= ?", ctx.column) -} - -/// `field__lt=value` → `field < ?` -fn lt(ctx: &LookupContext) -> String { - format!("{} < ?", ctx.column) -} - -/// `field__lte=value` → `field <= ?` -fn lte(ctx: &LookupContext) -> String { - format!("{} <= ?", ctx.column) -} - -/// `field__contains="bob"` → `field LIKE ?` (with `%value%` at bind time) -/// -/// Case-sensitive substring match. The `%` wrapping is applied by the -/// executor when binding the value, not in the SQL fragment itself. -fn contains(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__icontains="bob"` → `LOWER(field) LIKE LOWER(?)` -/// -/// Case-insensitive substring match. Works on all backends without relying -/// on PostgreSQL-specific `ILIKE`. The `%value%` wrapping happens at bind time. -fn icontains(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__startswith="pr"` → `field LIKE ?` (with `value%` at bind time) -fn startswith(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__istartswith="pr"` → `LOWER(field) LIKE LOWER(?)` -fn istartswith(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__endswith="ing"` → `field LIKE ?` (with `%value` at bind time) -fn endswith(ctx: &LookupContext) -> String { - format!("{} LIKE ?", ctx.column) -} - -/// `field__iendswith="ing"` → `LOWER(field) LIKE LOWER(?)` -fn iendswith(ctx: &LookupContext) -> String { - format!("LOWER({}) LIKE LOWER(?)", ctx.column) -} - -/// `field__isnull=True` → `field IS NULL` -/// `field__isnull=False` → `field IS NOT NULL` -/// -/// Note: the True/False distinction is handled by the compiler which reads the -/// bound value. This function always returns the IS NULL form; the compiler -/// swaps to IS NOT NULL when the value is False/0. -fn isnull(ctx: &LookupContext) -> String { - // The compiler reads the Python boolean and rewrites this. - // We return the base form here. - format!("{} IS NULL", ctx.column) -} - -/// `field__in=[1, 2, 3]` → `field IN (?, ?, ?)` -/// -/// Note: this returns a *template* — the compiler replaces `(?)` with -/// the correct number of placeholders based on the list length. -fn in_lookup(ctx: &LookupContext) -> String { - // Single `?` — compiler expands to `(?, ?, ...)` based on value count - format!("{} IN (?)", ctx.column) -} - -/// `field__range=(low, high)` → `field BETWEEN ? AND ?` -/// -/// Uses two bind parameters. The compiler handles this specially. -fn range(ctx: &LookupContext) -> String { - format!("{} BETWEEN ? AND ?", ctx.column) -} - -// ### -// Date/Time Transform Functions (for chained lookups) -// ### - -/// `field__date` → `DATE(field)` (backend-aware) - implicit equality -pub fn date_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("DATE({}) = ?", ctx.column), - Backend::MySQL => format!("DATE({}) = ?", ctx.column), - Backend::SQLite => format!("date({}) = CAST(? AS TEXT)", ctx.column), - } -} - -/// `field__year` → `EXTRACT(YEAR FROM field)` or `YEAR(field)` (backend-aware) - implicit equality -pub fn year_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(YEAR FROM {}) = ?", ctx.column), - Backend::MySQL => format!("YEAR({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%Y', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__month` → `EXTRACT(MONTH FROM field)` or `MONTH(field)` (backend-aware) - implicit equality -pub fn month_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(MONTH FROM {}) = ?", ctx.column), - Backend::MySQL => format!("MONTH({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%m', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__day` → `EXTRACT(DAY FROM field)` or `DAY(field)` (backend-aware) - implicit equality -pub fn day_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(DAY FROM {}) = ?", ctx.column), - Backend::MySQL => format!("DAYOFMONTH({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%d', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__hour` → `EXTRACT(HOUR FROM field)` or `HOUR(field)` (backend-aware) - implicit equality -pub fn hour_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(HOUR FROM {}) = ?", ctx.column), - Backend::MySQL => format!("HOUR({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%H', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__minute` → `EXTRACT(MINUTE FROM field)` or `MINUTE(field)` (backend-aware) - implicit equality -pub fn minute_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(MINUTE FROM {}) = ?", ctx.column), - Backend::MySQL => format!("MINUTE({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%M', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__second` → `EXTRACT(SECOND FROM field)` or `SECOND(field)` (backend-aware) - implicit equality -pub fn second_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(SECOND FROM {}) = ?", ctx.column), - Backend::MySQL => format!("SECOND({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%S', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__week` → `EXTRACT(WEEK FROM field)` or `WEEK(field)` (backend-aware) - implicit equality -pub fn week_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(WEEK FROM {}) = ?", ctx.column), - Backend::MySQL => format!("WEEK({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__dow` → `EXTRACT(DOW FROM field)` or `DAYOFWEEK(field)` (backend-aware) - implicit equality -pub fn dow_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(DOW FROM {}) = ?", ctx.column), - Backend::MySQL => format!("DAYOFWEEK({}) = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__quarter` → `EXTRACT(QUARTER FROM field)` or `QUARTER(field)` (backend-aware) - implicit equality -pub fn quarter_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(QUARTER FROM {}) = ?", ctx.column), - Backend::MySQL => format!("QUARTER({}) = ?", ctx.column), - Backend::SQLite => format!( - "((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3) = ?", - ctx.column - ), - } -} - -/// `field__time` → `TIME(field)` or equivalent (backend-aware) - implicit equality -pub fn time_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("TIME({}) = ?", ctx.column), - Backend::MySQL => format!("TIME({}) = ?", ctx.column), - Backend::SQLite => format!("time({}) = ?", ctx.column), - } -} - -/// `field__iso_week` → `EXTRACT(ISOWEEK FROM field)` or equivalent (backend-aware) - implicit equality -pub fn iso_week_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(ISOWEEK FROM {}) = ?", ctx.column), - Backend::MySQL => format!( - "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1 = ?", - ctx.column, ctx.column, ctx.column - ), - Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), - } -} - -/// `field__iso_dow` → `EXTRACT(ISODOW FROM field)` or equivalent (backend-aware) - implicit equality -pub fn iso_dow_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("EXTRACT(ISODOW FROM {}) = ?", ctx.column), - Backend::MySQL => format!("((DAYOFWEEK({}) + 5) % 7) + 1 = ?", ctx.column), - Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), - } -} - -// ### -// JSON Transform Functions (for chained lookups) -// ### - -/// `field__key` → `(field->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(field, '$.key'))` -pub fn json_key_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}->>'key')", ctx.column), - Backend::MySQL => format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.key'))", ctx.column), - Backend::SQLite => format!("json_extract({}, '$.key')", ctx.column), - } -} - -/// `field__key_text` → `(field->>'key')::text` (for text comparisons like icontains) -pub fn json_key_text_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}->>'key')::text", ctx.column), - Backend::MySQL => format!( - "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '$.key')) AS CHAR)", - ctx.column - ), - Backend::SQLite => format!("CAST(json_extract({}, '$.key') AS TEXT)", ctx.column), - } -} - -/// `field__json` → `field::jsonb` (PostgreSQL) or just field (MySQL/SQLite) -pub fn json_cast_transform(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({}::jsonb)", ctx.column), - Backend::MySQL => ctx.column.clone(), - Backend::SQLite => ctx.column.clone(), - } -} - -// ### -// JSON Lookup Functions (comparison operators) -// ### - -/// `field__has_key="key"` → `field ? 'key'` (PostgreSQL) or `JSON_CONTAINS(field, '"key"')` (MySQL) -fn json_has_key(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} ? 'key')", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '\"key\"')", ctx.column), - Backend::SQLite => format!("json_extract({}, '$.key') IS NOT NULL", ctx.column), - } -} - -/// `field__has_keys=['key1', 'key2']` → `field ?& array['key1', 'key2']` -fn json_has_keys(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} ?& array['key1', 'key2'])", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '[\"key1\", \"key2\"]')", ctx.column), - Backend::SQLite => format!( - "json_extract({}, '$.key1') IS NOT NULL AND json_extract({}, '$.key2') IS NOT NULL", - ctx.column, ctx.column - ), - } -} - -/// `field__contains={"key": "value"}` → `field @> ?` (PostgreSQL) -fn json_contains(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} @> ?)", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, ?)", ctx.column), - Backend::SQLite => ctx.column.clone(), // Limited support in SQLite - } -} - -/// `field__contained_by={"key": "value"}` → `field <@ ?` (PostgreSQL) -fn json_contained_by(ctx: &LookupContext) -> String { - match ctx.backend { - Backend::PostgreSQL => format!("({} <@ ?)", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS(?, {})", ctx.column), - Backend::SQLite => ctx.column.clone(), // Limited support in SQLite - } -} diff --git a/src/query/lookups/common_lookups.rs b/src/query/lookups/common_lookups.rs new file mode 100644 index 0000000..880d2b1 --- /dev/null +++ b/src/query/lookups/common_lookups.rs @@ -0,0 +1,101 @@ +// +// ### +// Ryx — Common Lookups +// ### +// +// Contains comparison and string lookups (exact, gt, contains, etc.) +// ### + +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; +pub use crate::query::lookups::PythonLookup; + +/// `field__exact=value` → `field = ?` +/// +/// This is also the *implicit* lookup: `filter(name="Alice")` is equivalent +/// to `filter(name__exact="Alice")`. +pub fn exact(ctx: &LookupContext) -> String { + format!("{} = ?", ctx.column) +} + +/// `field__gt=value` → `field > ?` +pub fn gt(ctx: &LookupContext) -> String { + format!("{} > ?", ctx.column) +} + +/// `field__gte=value` → `field >= ?` +pub fn gte(ctx: &LookupContext) -> String { + format!("{} >= ?", ctx.column) +} + +/// `field__lt=value` → `field < ?` +pub fn lt(ctx: &LookupContext) -> String { + format!("{} < ?", ctx.column) +} + +/// `field__lte=value` → `field <= ?` +pub fn lte(ctx: &LookupContext) -> String { + format!("{} <= ?", ctx.column) +} + +/// `field__contains="bob"` → `field LIKE ?` (with `%value%` at bind time) +/// +/// Case-sensitive substring match. The `%` wrapping is applied by the +/// executor when binding the value, not in the SQL fragment itself. +pub fn contains(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__icontains="bob"` → `LOWER(field) LIKE LOWER(?)` +/// +/// Case-insensitive substring match. Works on all backends without relying +/// on PostgreSQL-specific `ILIKE`. The `%value%` wrapping happens at bind time. +pub fn icontains(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__startswith="pr"` → `field LIKE ?` (with `value%` at bind time) +pub fn startswith(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__istartswith="pr"` → `LOWER(field) LIKE LOWER(?)` +pub fn istartswith(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__endswith="ing"` → `field LIKE ?` (with `%value` at bind time) +pub fn endswith(ctx: &LookupContext) -> String { + format!("{} LIKE ?", ctx.column) +} + +/// `field__iendswith="ing"` → `LOWER(field) LIKE LOWER(?)` +pub fn iendswith(ctx: &LookupContext) -> String { + format!("LOWER({}) LIKE LOWER(?)", ctx.column) +} + +/// `field__isnull=True` → `field IS NULL` +/// `field__isnull=False` → `field IS NOT NULL` +/// +/// Note: the True/False distinction is handled by the compiler which reads the +/// bound value. This function always returns the IS NULL form; the compiler +/// swaps to IS NOT NULL when the value is False/0. +pub fn isnull(ctx: &LookupContext) -> String { + format!("{} IS NULL", ctx.column) +} + +/// `field__in=[1, 2, 3]` → `field IN (?, ?, ?)` +/// +/// Note: this returns a *template* — the compiler replaces `(?)` with +/// the correct number of placeholders based on the list length. +pub fn in_lookup(ctx: &LookupContext) -> String { + format!("{} IN (?)", ctx.column) +} + +/// `field__range=(low, high)` → `field BETWEEN ? AND ?` +/// +/// Uses two bind parameters. The compiler handles this specially. +pub fn range(ctx: &LookupContext) -> String { + format!("{} BETWEEN ? AND ?", ctx.column) +} diff --git a/src/query/lookups/date_lookups.rs b/src/query/lookups/date_lookups.rs new file mode 100644 index 0000000..323c4d8 --- /dev/null +++ b/src/query/lookups/date_lookups.rs @@ -0,0 +1,201 @@ +// +// ### +// Ryx — Date/Time Lookups +// ### +// +// Contains date/time transforms (year, month, day, hour, etc.) and apply_transform logic. +// These are used for chained lookups like `created_at__year__gte=2024` +// ### + +use crate::pool::Backend; +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; + +/// Apply a date/time field transformation. +/// Returns SQL like "DATE(col)" or "EXTRACT(YEAR FROM col)" +pub fn apply_date_transform(name: &str, column: &str, backend: Backend) -> Option { + let sql = match (name, backend) { + ("date", _) => format!("DATE({})", column), + + ("year", Backend::PostgreSQL) => format!("EXTRACT(YEAR FROM {})", column), + ("year", Backend::MySQL) => format!("YEAR({})", column), + ("year", Backend::SQLite) => format!("CAST(strftime('%Y', {}) AS TEXT)", column), + + ("month", Backend::PostgreSQL) => format!("EXTRACT(MONTH FROM {})", column), + ("month", Backend::MySQL) => format!("MONTH({})", column), + ("month", Backend::SQLite) => format!("CAST(strftime('%m', {}) AS TEXT)", column), + + ("day", Backend::PostgreSQL) => format!("EXTRACT(DAY FROM {})", column), + ("day", Backend::MySQL) => format!("DAYOFMONTH({})", column), + ("day", Backend::SQLite) => format!("CAST(strftime('%d', {}) AS TEXT)", column), + + ("hour", Backend::PostgreSQL) => format!("EXTRACT(HOUR FROM {})", column), + ("hour", Backend::MySQL) => format!("HOUR({})", column), + ("hour", Backend::SQLite) => format!("CAST(strftime('%H', {}) AS TEXT)", column), + + ("minute", Backend::PostgreSQL) => format!("EXTRACT(MINUTE FROM {})", column), + ("minute", Backend::MySQL) => format!("MINUTE({})", column), + ("minute", Backend::SQLite) => format!("CAST(strftime('%M', {}) AS TEXT)", column), + + ("second", Backend::PostgreSQL) => format!("EXTRACT(SECOND FROM {})", column), + ("second", Backend::MySQL) => format!("SECOND({})", column), + ("second", Backend::SQLite) => format!("CAST(strftime('%S', {}) AS TEXT)", column), + + ("week", Backend::PostgreSQL) => format!("EXTRACT(WEEK FROM {})", column), + ("week", Backend::MySQL) => format!("WEEK({})", column), + ("week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), + + ("dow", Backend::PostgreSQL) => format!("EXTRACT(DOW FROM {})", column), + ("dow", Backend::MySQL) => format!("DAYOFWEEK({})", column), + ("dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), + + ("quarter", Backend::PostgreSQL) => format!("EXTRACT(QUARTER FROM {})", column), + ("quarter", Backend::MySQL) => format!("QUARTER({})", column), + ("quarter", Backend::SQLite) => format!( + "CAST((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3 AS TEXT)", + column + ), + + ("time", Backend::PostgreSQL) => format!("TIME({})", column), + ("time", Backend::MySQL) => format!("TIME({})", column), + ("time", Backend::SQLite) => format!("time({})", column), + + ("iso_week", Backend::PostgreSQL) => format!("EXTRACT(ISOWEEK FROM {})", column), + ("iso_week", Backend::MySQL) => format!( + "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1", + column, column, column + ), + ("iso_week", Backend::SQLite) => format!("CAST(strftime('%W', {}) AS TEXT)", column), + + ("iso_dow", Backend::PostgreSQL) => format!("EXTRACT(ISODOW FROM {})", column), + ("iso_dow", Backend::MySQL) => format!("((DAYOFWEEK({}) + 5) % 7) + 1", column), + ("iso_dow", Backend::SQLite) => format!("CAST(strftime('%w', {}) AS TEXT)", column), + + _ => return None, + }; + Some(sql) +} + +/// `field__date` → `DATE(field)` (backend-aware) - implicit equality +pub fn date_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("DATE({}) = ?", ctx.column), + Backend::MySQL => format!("DATE({}) = ?", ctx.column), + Backend::SQLite => format!("date({}) = CAST(? AS TEXT)", ctx.column), + } +} + +/// `field__year` → `EXTRACT(YEAR FROM field)` or `YEAR(field)` (backend-aware) - implicit equality +pub fn year_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(YEAR FROM {}) = ?", ctx.column), + Backend::MySQL => format!("YEAR({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%Y', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__month` → `EXTRACT(MONTH FROM field)` or `MONTH(field)` (backend-aware) - implicit equality +pub fn month_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(MONTH FROM {}) = ?", ctx.column), + Backend::MySQL => format!("MONTH({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%m', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__day` → `EXTRACT(DAY FROM field)` or `DAY(field)` (backend-aware) - implicit equality +pub fn day_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(DAY FROM {}) = ?", ctx.column), + Backend::MySQL => format!("DAYOFMONTH({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%d', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__hour` → `EXTRACT(HOUR FROM field)` or `HOUR(field)` (backend-aware) - implicit equality +pub fn hour_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(HOUR FROM {}) = ?", ctx.column), + Backend::MySQL => format!("HOUR({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%H', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__minute` → `EXTRACT(MINUTE FROM field)` or `MINUTE(field)` (backend-aware) - implicit equality +pub fn minute_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(MINUTE FROM {}) = ?", ctx.column), + Backend::MySQL => format!("MINUTE({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%M', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__second` → `EXTRACT(SECOND FROM field)` or `SECOND(field)` (backend-aware) - implicit equality +pub fn second_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(SECOND FROM {}) = ?", ctx.column), + Backend::MySQL => format!("SECOND({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%S', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__week` → `EXTRACT(WEEK FROM field)` or `WEEK(field)` (backend-aware) - implicit equality +pub fn week_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(WEEK FROM {}) = ?", ctx.column), + Backend::MySQL => format!("WEEK({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__dow` → `EXTRACT(DOW FROM field)` or `DAYOFWEEK(field)` (backend-aware) - implicit equality +pub fn dow_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(DOW FROM {}) = ?", ctx.column), + Backend::MySQL => format!("DAYOFWEEK({}) = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__quarter` → `EXTRACT(QUARTER FROM field)` or `QUARTER(field)` (backend-aware) - implicit equality +pub fn quarter_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(QUARTER FROM {}) = ?", ctx.column), + Backend::MySQL => format!("QUARTER({}) = ?", ctx.column), + Backend::SQLite => format!( + "((CAST(strftime('%m', {}) AS INTEGER) + 2) / 3) = ?", + ctx.column + ), + } +} + +/// `field__time` → `TIME(field)` or equivalent (backend-aware) - implicit equality +pub fn time_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("TIME({}) = ?", ctx.column), + Backend::MySQL => format!("TIME({}) = ?", ctx.column), + Backend::SQLite => format!("time({}) = ?", ctx.column), + } +} + +/// `field__iso_week` → `EXTRACT(ISOWEEK FROM field)` or equivalent (backend-aware) - implicit equality +pub fn iso_week_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(ISOWEEK FROM {}) = ?", ctx.column), + Backend::MySQL => format!( + "WEEK({}, 1) - WEEK(DATE_SUB({}, INTERVAL (DAYOFWEEK({}) - 1) DAY), 0) + 1 = ?", + ctx.column, ctx.column, ctx.column + ), + Backend::SQLite => format!("CAST(strftime('%W', {}) AS INTEGER) = ?", ctx.column), + } +} + +/// `field__iso_dow` → `EXTRACT(ISODOW FROM field)` or equivalent (backend-aware) - implicit equality +pub fn iso_dow_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("EXTRACT(ISODOW FROM {}) = ?", ctx.column), + Backend::MySQL => format!("((DAYOFWEEK({}) + 5) % 7) + 1 = ?", ctx.column), + Backend::SQLite => format!("CAST(strftime('%w', {}) AS INTEGER) = ?", ctx.column), + } +} diff --git a/src/query/lookups/json_lookups.rs b/src/query/lookups/json_lookups.rs new file mode 100644 index 0000000..beb7401 --- /dev/null +++ b/src/query/lookups/json_lookups.rs @@ -0,0 +1,129 @@ +// +// ### +// Ryx — JSON Lookups +// ### +// +// Contains JSON transforms and lookups (key, has_key, contains, etc.) +// These are used for chained lookups like `metadata__key__priority__exact="high"` +// ### + +use crate::pool::Backend; +use crate::query::lookups::LookupContext; + +pub use crate::query::lookups::LookupFn; + +/// Apply a JSON field transformation. +/// Returns SQL like `(col->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(col, '$.key'))` +pub fn apply_json_transform( + name: &str, + column: &str, + backend: Backend, + key: Option<&str>, +) -> Option { + let sql = match (name, backend) { + ("key", Backend::PostgreSQL) => { + let k = key.unwrap_or("key"); + format!("({}->>'{}')", column, k) + } + ("key", Backend::MySQL) => { + let k = key.unwrap_or("key"); + format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.{}'))", column, k) + } + ("key", Backend::SQLite) => { + let k = key.unwrap_or("key"); + format!("json_extract({}, '$.{}')", column, k) + } + + ("key_text", Backend::PostgreSQL) => { + let k = key.unwrap_or("key"); + format!("({}->>'{}')::text", column, k) + } + ("key_text", Backend::MySQL) => { + let k = key.unwrap_or("key"); + format!( + "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '.{}')) AS CHAR)", + column, k + ) + } + ("key_text", Backend::SQLite) => { + let k = key.unwrap_or("key"); + format!("CAST(json_extract({}, '.{}') AS TEXT)", column, k) + } + + ("json", Backend::PostgreSQL) => format!("({}::jsonb)", column), + ("json", Backend::MySQL) => column.to_string(), + ("json", Backend::SQLite) => column.to_string(), + + _ => return None, + }; + Some(sql) +} + +/// `field__key` → `(field->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(field, '$.key'))` +pub fn json_key_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}->>'key')", ctx.column), + Backend::MySQL => format!("JSON_UNQUOTE(JSON_EXTRACT({}, '$.key'))", ctx.column), + Backend::SQLite => format!("json_extract({}, '$.key')", ctx.column), + } +} + +/// `field__key_text` → `(field->>'key')::text` (for text comparisons like icontains) +pub fn json_key_text_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}->>'key')::text", ctx.column), + Backend::MySQL => format!( + "CAST(JSON_UNQUOTE(JSON_EXTRACT({}, '$.key')) AS CHAR)", + ctx.column + ), + Backend::SQLite => format!("CAST(json_extract({}, '$.key') AS TEXT)", ctx.column), + } +} + +/// `field__json` → `field::jsonb` (PostgreSQL) or just field (MySQL/SQLite) +pub fn json_cast_transform(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({}::jsonb)", ctx.column), + Backend::MySQL => ctx.column.clone(), + Backend::SQLite => ctx.column.clone(), + } +} + +/// `field__has_key="key"` → `field ? 'key'` (PostgreSQL) or `JSON_CONTAINS(field, '"key"')` (MySQL) +pub fn json_has_key(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} ? 'key')", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, '\"key\"')", ctx.column), + Backend::SQLite => format!("json_extract({}, '$.key') IS NOT NULL", ctx.column), + } +} + +/// `field__has_keys=['key1', 'key2']` → `field ?& array['key1', 'key2']` +pub fn json_has_keys(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} ?& array['key1', 'key2'])", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, '[\"key1\", \"key2\"]')", ctx.column), + Backend::SQLite => format!( + "json_extract({}, '$.key1') IS NOT NULL AND json_extract({}, '$.key2') IS NOT NULL", + ctx.column, ctx.column + ), + } +} + +/// `field__contains={"key": "value"}` → `field @> ?` (PostgreSQL) +pub fn json_contains(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} @> ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS({}, ?)", ctx.column), + Backend::SQLite => ctx.column.clone(), // Limited support in SQLite + } +} + +/// `field__contained_by={"key": "value"}` → `field <@ ?` (PostgreSQL) +pub fn json_contained_by(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} <@ ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS(?, {})", ctx.column), + Backend::SQLite => ctx.column.clone(), // Limited support in SQLite + } +} diff --git a/src/query/lookups/lookups.rs b/src/query/lookups/lookups.rs new file mode 100644 index 0000000..394b7d8 --- /dev/null +++ b/src/query/lookups/lookups.rs @@ -0,0 +1,336 @@ +// +// ### +// Ryx — Lookups Implementation +// ### +// +// Contains core types, registry, and resolve logic for the lookup system. +// This is the main implementation file - mod.rs just re-exports from here. +// ### + +use std::collections::HashMap; +use std::sync::{OnceLock, RwLock}; + +use crate::errors::{RyxError, RyxResult}; +use crate::pool::Backend; + +// Re-export submodules +pub use crate::query::lookups::common_lookups; +pub use crate::query::lookups::date_lookups; +pub use crate::query::lookups::json_lookups; + +// ### +// Core types +// ### + +#[derive(Debug, Clone)] +pub struct LookupContext { + pub column: String, + pub negated: bool, + pub backend: Backend, + pub json_key: Option, +} + +pub type LookupFn = fn(&LookupContext) -> String; + +#[derive(Debug, Clone)] +pub struct PythonLookup { + pub sql_template: String, +} + +// ### +// Global lookup registry +// ### + +struct LookupRegistry { + builtin: HashMap<&'static str, LookupFn>, + custom: HashMap, +} + +static REGISTRY: OnceLock> = OnceLock::new(); + +pub fn init_registry() { + REGISTRY.get_or_init(|| { + let mut builtin = HashMap::new(); + + builtin.insert("exact", common_lookups::exact as LookupFn); + builtin.insert("gt", common_lookups::gt as LookupFn); + builtin.insert("gte", common_lookups::gte as LookupFn); + builtin.insert("lt", common_lookups::lt as LookupFn); + builtin.insert("lte", common_lookups::lte as LookupFn); + + builtin.insert("contains", common_lookups::contains as LookupFn); + builtin.insert("icontains", common_lookups::icontains as LookupFn); + builtin.insert("startswith", common_lookups::startswith as LookupFn); + builtin.insert("istartswith", common_lookups::istartswith as LookupFn); + builtin.insert("endswith", common_lookups::endswith as LookupFn); + builtin.insert("iendswith", common_lookups::iendswith as LookupFn); + + builtin.insert("isnull", common_lookups::isnull as LookupFn); + builtin.insert("in", common_lookups::in_lookup as LookupFn); + builtin.insert("range", common_lookups::range as LookupFn); + + builtin.insert("date", date_lookups::date_transform as LookupFn); + builtin.insert("year", date_lookups::year_transform as LookupFn); + builtin.insert("month", date_lookups::month_transform as LookupFn); + builtin.insert("day", date_lookups::day_transform as LookupFn); + builtin.insert("hour", date_lookups::hour_transform as LookupFn); + builtin.insert("minute", date_lookups::minute_transform as LookupFn); + builtin.insert("second", date_lookups::second_transform as LookupFn); + builtin.insert("week", date_lookups::week_transform as LookupFn); + builtin.insert("dow", date_lookups::dow_transform as LookupFn); + builtin.insert("quarter", date_lookups::quarter_transform as LookupFn); + builtin.insert("time", date_lookups::time_transform as LookupFn); + builtin.insert("iso_week", date_lookups::iso_week_transform as LookupFn); + builtin.insert("iso_dow", date_lookups::iso_dow_transform as LookupFn); + + builtin.insert("key", json_lookups::json_key_transform as LookupFn); + builtin.insert( + "key_text", + json_lookups::json_key_text_transform as LookupFn, + ); + builtin.insert("json", json_lookups::json_cast_transform as LookupFn); + + builtin.insert("has_key", json_lookups::json_has_key as LookupFn); + builtin.insert("has_keys", json_lookups::json_has_keys as LookupFn); + builtin.insert("contains", json_lookups::json_contains as LookupFn); + builtin.insert("contained_by", json_lookups::json_contained_by as LookupFn); + + RwLock::new(LookupRegistry { + builtin, + custom: HashMap::new(), + }) + }); +} + +// ### +// Registry public API +// ### + +pub fn register_custom(name: impl Into, sql_template: impl Into) -> RyxResult<()> { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let mut guard = registry + .write() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + guard.custom.insert( + name.into(), + PythonLookup { + sql_template: sql_template.into(), + }, + ); + + Ok(()) +} + +fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let guard = registry + .read() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + if let Some(custom) = guard.custom.get(lookup_name) { + return Ok(custom.sql_template.replace("{col}", &ctx.column)); + } + + if let Some(lookup_fn) = guard.builtin.get(lookup_name) { + return Ok(lookup_fn(ctx)); + } + + Err(RyxError::UnknownLookup { + field: field.to_string(), + lookup: lookup_name.to_string(), + }) +} + +pub fn registered_lookups() -> RyxResult> { + let registry = REGISTRY + .get() + .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + + let guard = registry + .read() + .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + + let mut names: Vec = guard + .builtin + .keys() + .copied() + .map(|k| k.to_string()) + .chain(guard.custom.keys().cloned()) + .collect(); + names.sort(); + Ok(names) +} + +// ### +// Chained lookups support +// ### + +#[allow(dead_code)] +fn handle_sqlite_transform_lookup( + field: &str, + _transform: &str, + lookup_name: &str, + ctx: &LookupContext, +) -> RyxResult { + let is_numeric_comparison = matches!(lookup_name, "gt" | "gte" | "lt" | "lte" | "exact"); + + if is_numeric_comparison && ctx.column.contains("AS TEXT)") { + let transformed = ctx.column.replace("AS TEXT)", "AS INTEGER)"); + let new_ctx = LookupContext { + column: transformed, + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + return resolve_simple(field, lookup_name, &new_ctx); + } + + resolve_simple(field, lookup_name, ctx) +} + +fn add_sqlite_cast_for_transform(fragment: &str, lookup: &str) -> String { + match lookup { + "exact" => fragment.replace("= ?", "= CAST(? AS TEXT)"), + "gt" => fragment.replace("> ?", "> CAST(? AS TEXT)"), + "gte" => fragment.replace(">= ?", ">= CAST(? AS TEXT)"), + "lt" => fragment.replace("< ?", "< CAST(? AS TEXT)"), + "lte" => fragment.replace("<= ?", "<= CAST(? AS TEXT)"), + _ => fragment.to_string(), + } +} + +pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { + if !lookup_name.contains("__") { + if ctx.json_key.is_some() { + let mut column = format!("\"{}\"", field); + column = apply_transform("key", &column, ctx.backend, ctx.json_key.as_deref())?; + + let json_ctx = LookupContext { + column: column.clone(), + negated: ctx.negated, + backend: ctx.backend, + json_key: None, + }; + return resolve_simple(field, lookup_name, &json_ctx); + } + + if ctx.column.contains("strftime") || ctx.column.contains("DATE(") { + if ctx.column.contains("strftime('%Y'") { + return handle_sqlite_transform_lookup(field, "year", lookup_name, ctx); + } else if ctx.column.contains("strftime('%m'") { + return handle_sqlite_transform_lookup(field, "month", lookup_name, ctx); + } else if ctx.column.contains("strftime('%d'") { + return handle_sqlite_transform_lookup(field, "day", lookup_name, ctx); + } else if ctx.column.contains("strftime('%H'") { + return handle_sqlite_transform_lookup(field, "hour", lookup_name, ctx); + } + if ctx.column.starts_with("DATE(") { + return resolve_simple(field, lookup_name, ctx); + } + } + return resolve_simple(field, lookup_name, ctx); + } + + let parts: Vec<&str> = lookup_name.split("__").collect(); + let final_lookup = *parts.last().unwrap(); + let transform_parts: Vec<&str> = parts[..parts.len() - 1].to_vec(); + + let mut column = format!("\"{}\"", field); + + for transform in transform_parts.iter() { + let is_transform = matches!( + *transform, + "date" + | "year" + | "month" + | "day" + | "hour" + | "minute" + | "second" + | "week" + | "dow" + | "quarter" + | "time" + | "iso_week" + | "iso_dow" + | "key" + | "key_text" + | "json" + ); + + if is_transform { + let key = if matches!(*transform, "key" | "key_text") { + ctx.json_key + .as_deref() + .or_else(|| field.rsplit("__").next()) + } else { + None + }; + column = apply_transform(transform, &column, ctx.backend, key)?; + } else { + break; + } + } + + let final_ctx = LookupContext { + column: column.clone(), + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + + if ctx.backend == Backend::SQLite { + let col_has_transform = column.contains("strftime"); + + if col_has_transform && !column.contains("AS INTEGER") { + let is_numeric_comparison = + matches!(final_lookup, "gt" | "gte" | "lt" | "lte" | "exact"); + + if is_numeric_comparison { + let transformed = column.replace("AS TEXT)", "AS INTEGER)"); + let final_ctx_int = LookupContext { + column: transformed, + negated: ctx.negated, + backend: ctx.backend, + json_key: ctx.json_key.clone(), + }; + return resolve_simple(field, final_lookup, &final_ctx_int); + } + + let fragment = resolve_simple(field, final_lookup, &final_ctx)?; + return Ok(add_sqlite_cast_for_transform(&fragment, final_lookup)); + } + } + + resolve_simple(field, final_lookup, &final_ctx) +} + +pub fn apply_transform( + name: &str, + column: &str, + backend: Backend, + key: Option<&str>, +) -> RyxResult { + if let Some(sql) = date_lookups::apply_date_transform(name, column, backend) { + return Ok(sql); + } + if let Some(sql) = json_lookups::apply_json_transform(name, column, backend, key) { + return Ok(sql); + } + + if name == "date" { + return Ok(format!("DATE({})", column)); + } + + Err(RyxError::UnknownLookup { + field: column.to_string(), + lookup: name.to_string(), + }) +} diff --git a/src/query/lookups/mod.rs b/src/query/lookups/mod.rs new file mode 100644 index 0000000..b3bd303 --- /dev/null +++ b/src/query/lookups/mod.rs @@ -0,0 +1,34 @@ +// +// ### +// Ryx — Lookup Module +// ### +// +// This module provides the lookup system - the suffix after `__` in filter expressions. +// Examples: +// `age__gte=25` → lookup = "gte", SQL = "age >= $1" +// `name__icontains="bob"` → lookup = "icontains", SQL = "LOWER(name) LIKE LOWER($1)" +// +// The module is organized as: +// - mod.rs : Re-exports from lookups.rs +// - lookups.rs : Core types, registry, resolve() logic +// - common_lookups.rs: Comparison and string lookups (exact, gt, contains, etc.) +// - date_lookups.rs : Date/time transforms (year, month, day, etc.) +// - json_lookups.rs : JSON transforms and lookups (key, has_key, etc.) +// ### + +pub mod common_lookups; +pub mod date_lookups; +pub mod json_lookups; +pub mod lookups; + +// Re-export main types from lookups.rs +pub use lookups::LookupContext; +pub use lookups::LookupFn; +pub use lookups::PythonLookup; + +// Re-export functions from lookups.rs +pub use lookups::apply_transform; +pub use lookups::init_registry; +pub use lookups::register_custom; +pub use lookups::registered_lookups; +pub use lookups::resolve; diff --git a/src/query/mod.rs b/src/query/mod.rs index 8303e94..60df9c9 100644 --- a/src/query/mod.rs +++ b/src/query/mod.rs @@ -10,4 +10,4 @@ pub mod ast; pub mod compiler; -pub mod lookup; +pub mod lookups; From c3aefe8f98d8c47dc4125447de8a37ecb1d3d157 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Tue, 7 Apr 2026 17:19:09 +0000 Subject: [PATCH 02/13] chore: update uv.lock --- uv.lock | 75 +-------------------------------------------------------- 1 file changed, 1 insertion(+), 74 deletions(-) diff --git a/uv.lock b/uv.lock index c23ada9..e57fa05 100644 --- a/uv.lock +++ b/uv.lock @@ -7,15 +7,6 @@ resolution-markers = [ "python_full_version < '3.11'", ] -[[package]] -name = "aiosqlite" -version = "0.22.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, -] - [[package]] name = "asttokens" version = "3.0.1" @@ -73,66 +64,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] -[[package]] -name = "greenlet" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, - { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, - { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, - { url = "https://files.pythonhosted.org/packages/03/5f/6e2a7d80c353587751ef3d44bb947f0565ec008a2e0927821c007e96d3a7/greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7", size = 602132, upload-time = "2026-02-20T21:02:43.261Z" }, - { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, - { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, - { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, - { url = "https://files.pythonhosted.org/packages/ac/78/f93e840cbaef8becaf6adafbaf1319682a6c2d8c1c20224267a5c6c8c891/greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f", size = 230092, upload-time = "2026-02-20T20:17:09.379Z" }, - { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, - { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, - { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, - { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, - { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3a/efb2cf697fbccdf75b24e2c18025e7dfa54c4f31fab75c51d0fe79942cef/greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5", size = 230389, upload-time = "2026-02-20T20:17:18.772Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a1/65bbc059a43a7e2143ec4fc1f9e3f673e04f9c7b371a494a101422ac4fd5/greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd", size = 229645, upload-time = "2026-02-20T20:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, - { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, - { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, - { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, - { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, - { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, - { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, - { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, - { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, - { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, - { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, - { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, - { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, - { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, - { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, - { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, - { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, - { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, - { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, - { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, - { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, - { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, - { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, - { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, - { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, -] - [[package]] name = "iniconfig" version = "2.3.0" @@ -388,11 +319,9 @@ wheels = [ [[package]] name = "ryx" -version = "0.1.0" +version = "0.1.2" source = { editable = "." } dependencies = [ - { name = "aiosqlite" }, - { name = "greenlet" }, { name = "ipython", version = "8.39.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.11.*'" }, { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, @@ -412,8 +341,6 @@ dev = [ [package.metadata] requires-dist = [ - { name = "aiosqlite", specifier = ">=0.22.1" }, - { name = "greenlet", specifier = ">=3.3.2" }, { name = "ipython", specifier = ">=8.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = ">=8" }, { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23" }, From bd17871b11840b8df7a5e89836288f899768517e Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Tue, 7 Apr 2026 18:39:15 +0000 Subject: [PATCH 03/13] docs: update filtering and lookup reference docs with new JSON lookups and date/time transforms --- docs/doc/querying/filtering.mdx | 32 ++++++++ docs/doc/reference/lookup_reference.mdx | 99 +++++++++++++++++++++++++ 2 files changed, 131 insertions(+) create mode 100644 docs/doc/reference/lookup_reference.mdx diff --git a/docs/doc/querying/filtering.mdx b/docs/doc/querying/filtering.mdx index 1d59236..e02a93a 100644 --- a/docs/doc/querying/filtering.mdx +++ b/docs/doc/querying/filtering.mdx @@ -100,6 +100,38 @@ Post.objects.filter(active=True).exclude(title__startswith="Draft") | `in` | `col IN (?, ?, ...)` | `filter(id__in=[1,2,3])` | | `range` | `col BETWEEN ? AND ?` | `filter(views__range=(100,1000))` | +## Date & Time Transforms +These can be chained with other lookups (e.g. `created_at__year__gte=2024`). + +| Transform | SQL | Example | +|---|---|---| +| `date` | `DATE(col) = ?` | `filter(created_at__date=date(2024,1,1))` | +| `year` | `EXTRACT(YEAR FROM col) = ?` (Postgres) / `YEAR(col) = ?` (MySQL) / `CAST(strftime('%Y', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__year=2024)` | +| `month` | `EXTRACT(MONTH FROM col) = ?` / `MONTH(col) = ?` / `CAST(strftime('%m', col) AS INTEGER) = ?` | `filter(created_at__month=5)` | +| `day` | `EXTRACT(DAY FROM col) = ?` / `DAYOFMONTH(col) = ?` / `CAST(strftime('%d', col) AS INTEGER) = ?` | `filter(created_at__day=15)` | +| `hour` | `EXTRACT(HOUR FROM col) = ?` / `HOUR(col) = ?` / `CAST(strftime('%H', col) AS INTEGER) = ?` | `filter(created_at__hour=14)` | +| `minute` | `EXTRACT(MINUTE FROM col) = ?` / `MINUTE(col) = ?` / `CAST(strftime('%M', col) AS INTEGER) = ?` | `filter(created_at__minute=30)` | +| `second` | `EXTRACT(SECOND FROM col) = ?` / `SECOND(col) = ?` / `CAST(strftime('%S', col) AS INTEGER) = ?` | `filter(created_at__second=45)` | +| `week` | `EXTRACT(WEEK FROM col) = ?` / `WEEK(col) = ?` / `CAST(strftime('%W', col) AS INTEGER) = ?` | `filter(created_at__week=20)` | +| `dow` | `EXTRACT(DOW FROM col) = ?` / `DAYOFWEEK(col) = ?` / `CAST(strftime('%w', col) AS INTEGER) = ?` | `filter(created_at__dow=1)` | +| `quarter` | `EXTRACT(QUARTER FROM col) = ?` / `QUARTER(col) = ?` / `((CAST(strftime('%m', col) AS INTEGER) + 2) / 3) = ?` | `filter(created_at__quarter=2)` | +| `time` | `TIME(col) = ?` (Postgres/MySQL) / `time(col) = ?` (SQLite) | `filter(created_at__time='12:34:56')` | +| `iso_week` | `EXTRACT(ISOWEEK FROM col) = ?` / complex MySQL expression / `CAST(strftime('%W', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__iso_week=15)` | +| `iso_dow` | `EXTRACT(ISODOW FROM col) = ?` / `((DAYOFWEEK(col) + 5) % 7) + 1 = ?` (MySQL) / `CAST(strftime('%w', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__iso_dow=3)` | + +## JSON Transforms & Lookups +These work on JSONB/JSON fields and can be chained (e.g. `metadata__key__priority__exact="high"`). + +| Transform/Lookup | SQL (Postgres) | Example | +|---|---|---| +| `key` | `(col->>'key')` | `filter(metadata__key__priority__exact="high")` | +| `key_text` | `(col->>'key')::text` | `filter(metadata__key_text__priority__icontains="urgent")` | +| `json` | `(col::jsonb)` | `filter(metadata__json__contains={'active': true})` | +| `has_key` | `col ? 'key'` | `filter(metadata__has_key="priority")` | +| `has_keys` | `col ?& array['key1','key2']` | `filter(metadata__has_keys=["priority","status"])` | +| `contains` | `col @> ?` | `filter(metadata__contains={"priority": "high"})` | +| `contained_by` | `col <@ ?` | `filter(metadata__contained_by={"priority": "high"})` | + ## Custom Lookups Register your own SQL lookups: diff --git a/docs/doc/reference/lookup_reference.mdx b/docs/doc/reference/lookup_reference.mdx new file mode 100644 index 0000000..07c0a85 --- /dev/null +++ b/docs/doc/reference/lookup_reference.mdx @@ -0,0 +1,99 @@ +--- +sidebar_position: 3 +--- + +# Lookup Reference + +This document lists all built-in lookups and transforms available in Ryx. + +## Comparison Lookups + +| Lookup | SQL | Example | +|--------|-----|---------| +| `exact` | `col = ?` | `filter(title="Hello")` | +| `gt` | `col > ?` | `filter(views__gt=100)` | +| `gte` | `col >= ?` | `filter(views__gte=100)` | +| `lt` | `col < ?` | `filter(views__lt=50)` | +| `lte` | `col <= ?` | `filter(views__lte=1000)` | + +## String Lookups + +| Lookup | SQL | Example | +|--------|-----|---------| +| `contains` | `col LIKE ?` | `filter(title__contains="Py")` | +| `icontains` | `LOWER(col) LIKE LOWER(?)` | `filter(title__icontains="py")` | +| `startswith` | `col LIKE ?` | `filter(title__startswith="How")` | +| `istartswith` | `LOWER(col) LIKE LOWER(?)` | `filter(title__istartswith="how")` | +| `endswith` | `col LIKE ?` | `filter(title__endswith="guide")` | +| `iendswith` | `LOWER(col) LIKE LOWER(?)` | `filter(title__iendswith="Guide")` | + +## Null Checks + +| Lookup | SQL | Example | +|--------|-----|---------| +| `isnull` | `col IS NULL / IS NOT NULL` | `filter(body__isnull=True)` | + +## Membership + +| Lookup | SQL | Example | +|--------|-----|---------| +| `in` | `col IN (?, ?, ...)` | `filter(id__in=[1,2,3])` | + +## Range + +| Lookup | SQL | Example | +|--------|-----|---------| +| `range` | `col BETWEEN ? AND ?` | `filter(views__range=(100,1000))` | + +## Date & Time Transforms + +These can be chained with other lookups (e.g. `created_at__year__gte=2024`). + +| Transform | SQL | Example | +|-----------|-----|---------| +| `date` | `DATE(col) = ?` | `filter(created_at__date=date(2024,1,1))` | +| `year` | `EXTRACT(YEAR FROM col) = ?` (Postgres) / `YEAR(col) = ?` (MySQL) / `CAST(strftime('%Y', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__year=2024)` | +| `month` | `EXTRACT(MONTH FROM col) = ?` / `MONTH(col) = ?` / `CAST(strftime('%m', col) AS INTEGER) = ?` | `filter(created_at__month=5)` | +| `day` | `EXTRACT(DAY FROM col) = ?` / `DAYOFMONTH(col) = ?` / `CAST(strftime('%d', col) AS INTEGER) = ?` | `filter(created_at__day=15)` | +| `hour` | `EXTRACT(HOUR FROM col) = ?` / `HOUR(col) = ?` / `CAST(strftime('%H', col) AS INTEGER) = ?` | `filter(created_at__hour=14)` | +| `minute` | `EXTRACT(MINUTE FROM col) = ?` / `MINUTE(col) = ?` / `CAST(strftime('%M', col) AS INTEGER) = ?` | `filter(created_at__minute=30)` | +| `second` | `EXTRACT(SECOND FROM col) = ?` / `SECOND(col) = ?` / `CAST(strftime('%S', col) AS INTEGER) = ?` | `filter(created_at__second=45)` | +| `week` | `EXTRACT(WEEK FROM col) = ?` / `WEEK(col) = ?` / `CAST(strftime('%W', col) AS INTEGER) = ?` | `filter(created_at__week=20)` | +| `dow` | `EXTRACT(DOW FROM col) = ?` / `DAYOFWEEK(col) = ?` / `CAST(strftime('%w', col) AS INTEGER) = ?` | `filter(created_at__dow=1)` | +| `quarter` | `EXTRACT(QUARTER FROM col) = ?` / `QUARTER(col) = ?` / `((CAST(strftime('%m', col) AS INTEGER) + 2) / 3) = ?` | `filter(created_at__quarter=2)` | +| `time` | `TIME(col) = ?` (Postgres/MySQL) / `time(col) = ?` (SQLite) | `filter(created_at__time='12:34:56')` | +| `iso_week` | `EXTRACT(ISOWEEK FROM col) = ?` / complex MySQL expression / `CAST(strftime('%W', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__iso_week=15)` | +| `iso_dow` | `EXTRACT(ISODOW FROM col) = ?` / `((DAYOFWEEK(col) + 5) % 7) + 1 = ?` (MySQL) / `CAST(strftime('%w', col) AS INTEGER) = ?` (SQLite) | `filter(created_at__iso_dow=3)` | + +## JSON Transforms & Lookups + +These work on JSONB/JSON fields and can be chained (e.g. `metadata__key__priority__exact="high"`). + +| Transform/Lookup | SQL (Postgres) | Example | +|------------------|----------------|---------| +| `key` | `(col->>'key')` | `filter(metadata__key__priority__exact="high")` | +| `key_text` | `(col->>'key')::text` | `filter(metadata__key_text__priority__icontains="urgent")` | +| `json` | `(col::jsonb)` | `filter(metadata__json__contains={'active': true})` | +| `has_key` | `col ? 'key'` | `filter(metadata__has_key="priority")` | +| `has_keys` | `col ?& array['key1','key2']` | `filter(metadata__has_keys=["priority","status"])` | +| `contains` | `col @> ?` | `filter(metadata__contains={"priority": "high"})` | +| `contained_by` | `col <@ ?` | `filter(metadata__contained_by={"priority": "high"})` | + +## Custom Lookups + +You can register your own SQL lookups: + +```python +import ryx + +# Postgres ILIKE +ryx.register_lookup("ilike", "{col} ILIKE ?") + +# Usage +Post.objects.filter(title__ilike="%python%") + +# Decorator style +@ryx.lookup("uuid_prefix") +def uuid_prefix_lookup(field, value): + """{col}::text LIKE ?""" +``` \ No newline at end of file From 5423fc7e184378bf78066973ca6e8f64b07655f5 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:13:41 +0000 Subject: [PATCH 04/13] feat(lookup): implement auto-discovery for lookups and transforms --- ryx/__init__.py | 12 ++++++++++++ ryx/queryset.py | 30 +++++++++++++++++++++++++++--- src/lib.rs | 17 ++++++++++++++++- 3 files changed, 55 insertions(+), 4 deletions(-) diff --git a/ryx/__init__.py b/ryx/__init__.py index e6499c5..d4e6ec7 100644 --- a/ryx/__init__.py +++ b/ryx/__init__.py @@ -139,6 +139,16 @@ def available_lookups() -> list[str]: return _core.available_lookups() +def list_lookups() -> list[str]: + """Return all built-in lookup names (for auto-discovery).""" + return list(_core.list_lookups()) + + +def available_transforms() -> list[str]: + """Return all built-in transform names (for auto-discovery).""" + return list(_core.list_transforms()) + + def is_connected() -> bool: return _core.is_connected() @@ -172,6 +182,8 @@ def decorator(sql_template_or_fn): "is_connected", "pool_stats", "lookup", + "list_lookups", + "list_transforms", # Model "Model", "Index", diff --git a/ryx/queryset.py b/ryx/queryset.py index 0367cb5..376fac8 100644 --- a/ryx/queryset.py +++ b/ryx/queryset.py @@ -275,6 +275,31 @@ def _clone(self, builder=None, **overrides) -> "QuerySet": _group_by=overrides.get("_group_by", list(self._group_by)), ) + def _validate_filters(self, kwargs: Dict[str, Any]) -> None: + """Verify that lookups and transforms are supported by the field types.""" + for key, val in kwargs.items(): + # Handle pk lookup by resolving to the actual PK field name + lookup_key = key + if key == "pk": + lookup_key = self._model._meta.pk_field.attname + + field_name, lookup = _parse_lookup_key(lookup_key) + field = self._model._meta.fields.get(field_name) + if not field: + continue + + # 1. Validate transforms (if chained: transform__transform__lookup) + if "__" in lookup: + parts = lookup.split("__") + transforms = parts[:-1] + final_lookup = parts[-1] + for t in transforms: + field._validate_transform(t) + field._validate_lookup(final_lookup) + else: + # 2. Simple lookup + field._validate_lookup(lookup) + ## Filtering def filter(self, *q_args: Q, **kwargs: Any) -> "QuerySet": """Add WHERE conditions (AND-ed). Accepts Q objects and kwargs. @@ -284,7 +309,7 @@ def filter(self, *q_args: Q, **kwargs: Any) -> "QuerySet": Post.objects.filter(Q(active=True) | Q(featured=True)) Post.objects.filter(Q(active=True), views__gte=100) """ - + self._validate_filters(kwargs) builder = self._builder # Q objects @@ -303,7 +328,7 @@ def filter(self, *q_args: Q, **kwargs: Any) -> "QuerySet": def exclude(self, *q_args: Q, **kwargs: Any) -> "QuerySet": """Add NOT conditions.""" - + self._validate_filters(kwargs) builder = self._builder for q in q_args: builder = _apply_q_node(builder, (~q).to_q_node()) @@ -834,7 +859,6 @@ def _get_known_lookups() -> frozenset: "json", # JSON lookups (final lookups) "has_key", - "has_keys", "contains", "contained_by", } diff --git a/src/lib.rs b/src/lib.rs index b64f3d1..7b7ac49 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -68,7 +68,18 @@ fn available_lookups() -> PyResult> { } #[pyfunction] -fn is_connected() -> bool { +fn list_lookups<'py>() -> Vec<&'static str> { + lookups::all_lookups().to_vec() +} + +#[pyfunction] +fn list_transforms() -> Vec<&'static str> { + lookups::all_transforms().to_vec() +} + + +#[pyfunction] +fn is_connected(py: Python<'_>) -> bool { pool::is_initialized() } @@ -825,6 +836,10 @@ fn ryx_core(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_function(wrap_pyfunction!(setup, m)?)?; m.add_function(wrap_pyfunction!(register_lookup, m)?)?; m.add_function(wrap_pyfunction!(available_lookups, m)?)?; + m.add_function(wrap_pyfunction!(list_lookups, m)?)?; + m.add_function(wrap_pyfunction!(list_transforms, m)?)?; + m.add_function(wrap_pyfunction!(list_lookups, m)?)?; + m.add_function(wrap_pyfunction!(list_transforms, m)?)?; m.add_function(wrap_pyfunction!(is_connected, m)?)?; m.add_function(wrap_pyfunction!(pool_stats, m)?)?; m.add_function(wrap_pyfunction!(raw_fetch, m)?)?; From fdbce57c719fc3d40cfc16055855a477ed8455b5 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:14:14 +0000 Subject: [PATCH 05/13] feat(lookup): add supported lookups and transforms to all field types --- ryx/fields.py | 419 +++++++++++++++++++++++++++++--------------------- 1 file changed, 246 insertions(+), 173 deletions(-) diff --git a/ryx/fields.py b/ryx/fields.py index 6ef5fbd..74e2bf1 100644 --- a/ryx/fields.py +++ b/ryx/fields.py @@ -29,7 +29,7 @@ if TYPE_CHECKING: from ryx.models import Model -# Deferred reverse FK descriptor registry +# Deferred reverse FK descriptor registry # Forward-reference FK targets (string names) can't install ReverseFKDescriptors # immediately at class-definition time because the target class may not exist yet. # We accumulate (target_ref, rel_name, source_model, fk_attname) tuples here @@ -55,7 +55,7 @@ def resolve_pending_reverse_fks() -> None: import sys still_pending = [] - for (target_ref, rel_name, source_model, fk_attname) in _pending_reverse_fk: + for target_ref, rel_name, source_model, fk_attname in _pending_reverse_fk: try: target_model = _resolve_model(target_ref, source_model) if not hasattr(target_model, rel_name): @@ -69,6 +69,7 @@ def resolve_pending_reverse_fks() -> None: _pending_reverse_fk.clear() _pending_reverse_fk.extend(still_pending) + _MISSING = object() @@ -98,10 +99,16 @@ class Field: unique_for_date:str — Field name — enforce uniqueness per date value. unique_for_month:str — Field name — enforce uniqueness per month value. unique_for_year : str — Field name — enforce uniqueness per year value. + + SUPPORTED_LOOKUPS: list[str] — Lookups allowed on this field. + SUPPORTED_TRANSFORMS: list[str] — Transforms allowed on this field. """ + SUPPORTED_LOOKUPS: list[str] = [] + SUPPORTED_TRANSFORMS: list[str] = [] + attname: str = "" - column: str = "" + column: str = "" model: Optional[Type["Model"]] = None def __init__( @@ -109,15 +116,15 @@ def __init__( *, null: bool = False, blank: bool = False, - default: Any = _MISSING, + default: Any = _MISSING, primary_key: bool = False, unique: bool = False, db_index: bool = False, choices: Optional[Sequence] = None, validators: Optional[List[Validator]] = None, editable: bool = True, - help_text: str = "", - verbose_name: str = "", + help_text: str = "", + verbose_name: str = "", db_column: Optional[str] = None, unique_for_date: Optional[str] = None, unique_for_month: Optional[str] = None, @@ -151,7 +158,7 @@ def _build_implicit_validators(self) -> None: # Not null if not self.null and not self.primary_key: self._validators.insert(0, NotNullValidator()) - + # Choices if self.choices: # Extract just the values from (value, label) pairs if necessary @@ -165,7 +172,7 @@ def _build_implicit_validators(self) -> None: # Descriptor protocol def __set_name__(self, owner: type, name: str) -> None: self.attname = name - self.column = self._db_column or name + self.column = self._db_column or name def __get__(self, obj: Optional["Model"], objtype: Optional[type] = None) -> Any: if obj is None: @@ -182,9 +189,7 @@ def contribute_to_class(self, model: Type["Model"], name: str) -> None: self.model = model def db_type(self) -> str: - raise NotImplementedError( - f"{type(self).__name__}.db_type() not implemented" - ) + raise NotImplementedError(f"{type(self).__name__}.db_type() not implemented") def to_python(self, value: Any) -> Any: return value @@ -200,6 +205,22 @@ def get_default(self) -> Any: def has_default(self) -> bool: return self.default is not _MISSING + def _validate_lookup(self, lookup: str) -> None: + """Verify that the lookup is supported by this field type.""" + if lookup not in self.SUPPORTED_LOOKUPS: + raise ValueError( + f"Lookup '{lookup}' is not supported on {type(self).__name__}. " + f"Supported lookups: {', '.join(self.SUPPORTED_LOOKUPS)}" + ) + + def _validate_transform(self, transform: str) -> None: + """Verify that the transform is supported by this field type.""" + if transform not in self.SUPPORTED_TRANSFORMS: + raise ValueError( + f"Transform '{transform}' is not supported on {type(self).__name__}. " + f"Supported transforms: {', '.join(self.SUPPORTED_TRANSFORMS)}" + ) + def validate(self, value: Any) -> None: """Run all validators on ``value``. @@ -217,7 +238,7 @@ def validate(self, value: Any) -> None: def clean(self, value: Any) -> Any: """Validate and return the cleaned value. - + This is a convenience method that validates the value and returns it if validation passes. """ @@ -253,13 +274,13 @@ def __init__(self, **kw): kw.setdefault("editable", False) super().__init__(**kw) - def db_type(self) -> str: + def db_type(self) -> str: return "INTEGER" - - def to_python(self, v): + + def to_python(self, v): return None if v is None else int(v) - def _build_implicit_validators(self): + def _build_implicit_validators(self): pass # PK never needs NotNullValidator @@ -268,7 +289,8 @@ def _build_implicit_validators(self): ##### class BigAutoField(AutoField): """64-bit auto-increment PK.""" - def db_type(self) -> str: + + def db_type(self) -> str: return "BIGINT" @@ -277,7 +299,8 @@ def db_type(self) -> str: ##### class SmallAutoField(AutoField): """16-bit auto-increment PK.""" - def db_type(self) -> str: + + def db_type(self) -> str: return "SMALLINT" @@ -290,6 +313,9 @@ class IntField(Field): Extra kwargs: ``min_value``, ``max_value``. """ + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def __init__( self, *, @@ -312,13 +338,17 @@ def db_type(self) -> str: def to_python(self, v): return None if v is None else int(v) - + #### ### SMALL INTEGER FIELD ##### class SmallIntField(IntField): """16-bit integer (SMALLINT).""" + + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def db_type(self) -> str: return "SMALLINT" @@ -328,6 +358,10 @@ def db_type(self) -> str: ##### class BigIntField(IntField): """64-bit integer (BIGINT).""" + + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def db_type(self) -> str: return "BIGINT" @@ -338,6 +372,9 @@ def db_type(self) -> str: class PositiveIntField(IntField): """Integer that must be >= 0.""" + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def __init__(self, **kw): kw.setdefault("min_value", 0) super().__init__(**kw) @@ -351,6 +388,9 @@ def db_type(self) -> str: return "INTEGER" class FloatField(Field): """Double-precision float. Extra kwargs: ``min_value``, ``max_value``.""" + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def __init__(self, *, min_value=None, max_value=None, **kw): super().__init__(**kw) @@ -373,6 +413,9 @@ def to_python(self, v): class DecimalField(Field): """Fixed-precision decimal (NUMERIC). Extra kwargs: ``min_value``, ``max_value``.""" + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [] + def __init__( self, *, @@ -407,15 +450,21 @@ def to_db(self, v): ##### class BooleanField(Field): """Boolean (BOOLEAN).""" - def db_type(self) -> str: return "BOOLEAN" + + SUPPORTED_LOOKUPS = ["exact", "isnull"] + SUPPORTED_TRANSFORMS = [] + + def db_type(self) -> str: + return "BOOLEAN" + def to_python(self, v): - if v is None: + if v is None: return None if isinstance(v, str): v_lower = v.lower() - if v_lower in ('true', '1', 'yes', 'on'): + if v_lower in ("true", "1", "yes", "on"): return True - elif v_lower in ('false', '0', 'no', 'off', ''): + elif v_lower in ("false", "0", "no", "off", ""): return False return bool(v) @@ -425,6 +474,7 @@ def to_python(self, v): ##### class NullBooleanField(BooleanField): """Nullable boolean. Equivalent to BooleanField(null=True).""" + def __init__(self, **kw): kw.setdefault("null", True) super().__init__(**kw) @@ -443,19 +493,33 @@ class CharField(Field): strip : bool — Strip leading/trailing whitespace (default: True). """ + SUPPORTED_LOOKUPS = [ + "exact", + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "in", + "range", + "isnull", + ] + SUPPORTED_TRANSFORMS = [] + def __init__( - self, - *, - max_length: int = 255, + self, + *, + max_length: int = 255, min_length: Optional[int] = None, - strip: bool = True, - **kw + strip: bool = True, + **kw, ): self._strip = strip self.max_length = max_length self.min_length = min_length super().__init__(**kw) - + # Max length validator self._validators.append(MaxLengthValidator(max_length)) if min_length is not None: @@ -464,11 +528,11 @@ def __init__( if not self.blank and not self.null: self._validators.append(NotBlankValidator()) - def db_type(self) -> str: + def db_type(self) -> str: return f"VARCHAR({self.max_length})" - + def to_python(self, v): - if v is None: + if v is None: return None s = str(v) return s.strip() if self._strip else s @@ -481,8 +545,7 @@ class SlugField(CharField): """CharField that validates slug format (letters, digits, hyphens, underscores).""" _SLUG_RE = RegexValidator( - r"^[-\w]+$", - "Enter a valid slug (letters, digits, hyphens, underscores)." + r"^[-\w]+$", "Enter a valid slug (letters, digits, hyphens, underscores)." ) def __init__(self, **kw): @@ -520,10 +583,8 @@ def __init__(self, **kw): ##### class IPAddressField(CharField): """CharField for IPv4 addresses.""" - _IP_RE = RegexValidator( - r"^(\d{1,3}\.){3}\d{1,3}$", - "Enter a valid IPv4 address." - ) + + _IP_RE = RegexValidator(r"^(\d{1,3}\.){3}\d{1,3}$", "Enter a valid IPv4 address.") def __init__(self, **kw): kw.setdefault("max_length", 15) @@ -537,24 +598,30 @@ def __init__(self, **kw): class TextField(Field): """Unbounded text (TEXT). Extra kwargs: ``min_length``, ``max_length``.""" - def __init__(self, *, min_length: Optional[int] = None, max_length: Optional[int] = None, **kw): + def __init__( + self, + *, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + **kw, + ): super().__init__(**kw) self.max_length = max_length if min_length is not None: self._validators.append(MinLengthValidator(min_length)) - + if max_length is not None: self._validators.append(MaxLengthValidator(max_length)) if not self.blank and not self.null: self._validators.append(NotBlankValidator()) - def db_type(self) -> str: + def db_type(self) -> str: return "TEXT" - - def to_python(self, v): + + def to_python(self, v): return None if v is None else str(v) @@ -564,13 +631,13 @@ def to_python(self, v): class BinaryField(Field): """Binary blob field (BYTEA / BLOB).""" - def db_type(self) -> str: + def db_type(self) -> str: return "BYTEA" - - def to_python(self, v): + + def to_python(self, v): return v - - def _build_implicit_validators(self): + + def _build_implicit_validators(self): pass # binary content — skip NotBlankValidator @@ -580,13 +647,20 @@ def _build_implicit_validators(self): class DateField(Field): """Date only (DATE). Extra kwargs: ``auto_now``, ``auto_now_add``.""" - def __init__( - self, - *, - auto_now: bool = False, - auto_now_add: bool = False, - **kw - ): + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [ + "date", + "year", + "month", + "day", + "week", + "dow", + "quarter", + "iso_week", + "iso_dow", + ] + + def __init__(self, *, auto_now: bool = False, auto_now_add: bool = False, **kw): self.auto_now = auto_now self.auto_now_add = auto_now_add @@ -594,27 +668,23 @@ def __init__( kw.setdefault("editable", False) super().__init__(**kw) - def db_type(self) -> str: + def db_type(self) -> str: return "DATE" - + def to_python(self, v): - # Null value - if v is None: + if v is None: return None - - # Already a date/datetime - if isinstance(v, datetime): + if isinstance(v, datetime): return v.date() - - if isinstance(v, date): + if isinstance(v, date): return v - return date.fromisoformat(str(v)) - - def to_db(self, v): - return None if v is None else ( - v.isoformat() - if isinstance(v, (date, datetime)) else str(v) + + def to_db(self, v): + return ( + None + if v is None + else (v.isoformat() if isinstance(v, (date, datetime)) else str(v)) ) @@ -624,12 +694,29 @@ def to_db(self, v): class DateTimeField(Field): """Timestamp (TIMESTAMP). Extra kwargs: ``auto_now``, ``auto_now_add``.""" + SUPPORTED_LOOKUPS = ["exact", "gt", "gte", "lt", "lte", "in", "range", "isnull"] + SUPPORTED_TRANSFORMS = [ + "date", + "year", + "month", + "day", + "hour", + "minute", + "second", + "week", + "dow", + "quarter", + "time", + "iso_week", + "iso_dow", + ] + def __init__( - self, - *, - auto_now: bool = False, - auto_now_add: bool = False, - **kw + self, + *, + auto_now: bool = False, + auto_now_add: bool = False, + **kw, ): self.auto_now = auto_now self.auto_now_add = auto_now_add @@ -638,28 +725,22 @@ def __init__( kw.setdefault("editable", False) super().__init__(**kw) - def db_type(self) -> str: + def db_type(self) -> str: return "TIMESTAMP" - + def to_python(self, v): - if v is None: + if v is None: return None - if isinstance(v, datetime): + if isinstance(v, datetime): return v return datetime.fromisoformat(str(v)) - + def to_db(self, v): if v is None: return None if isinstance(v, datetime): - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + return v.strftime("%Y-%m-%dT%H:%M:%S.%f") return str(v) - - # def to_db(self, v): - # return None if v is None else ( - # v.isoformat() - # if isinstance(v, datetime) else str(v) - # ) #### @@ -668,15 +749,15 @@ def to_db(self, v): class TimeField(Field): """Time only (TIME).""" - def db_type(self) -> str: + def db_type(self) -> str: return "TIME" - + def to_python(self, v): from datetime import time - if v is None: + + if v is None: return None - - if isinstance(v, time): + if isinstance(v, time): return v return time.fromisoformat(str(v)) @@ -687,23 +768,19 @@ def to_python(self, v): class DurationField(Field): """Python timedelta stored as BIGINT (microseconds).""" - def db_type(self) -> str: + def db_type(self) -> str: return "BIGINT" - + def to_python(self, v): - - if v is None: + if v is None: return None - - if isinstance(v, timedelta): + if isinstance(v, timedelta): return v - return timedelta(microseconds=int(v)) - + def to_db(self, v): - if v is None: + if v is None: return None - return int(v.total_seconds() * 1_000_000) @@ -716,22 +793,24 @@ class UUIDField(Field): Extra kwargs: ``auto_create`` — generate uuid4 by default. """ + SUPPORTED_LOOKUPS = ["exact", "in", "isnull"] + SUPPORTED_TRANSFORMS = [] + def __init__(self, *, auto_create: bool = False, **kw): self.auto_create = auto_create if auto_create: kw.setdefault("default", uuid.uuid4) super().__init__(**kw) - def db_type(self) -> str: + def db_type(self) -> str: return "UUID" - + def to_python(self, v): - if v is None: + if v is None: return None - return v if isinstance(v, uuid.UUID) else uuid.UUID(str(v)) - - def to_db(self, v): + + def to_db(self, v): return None if v is None else str(v) @@ -741,19 +820,36 @@ def to_db(self, v): class JSONField(Field): """JSON field. Stored as JSONB (Postgres) or TEXT (others).""" - def db_type(self) -> str: + SUPPORTED_LOOKUPS = [ + "exact", + "gt", + "gte", + "lt", + "lte", + "in", + "range", + "isnull", + "has_key", + "has_any", + "has_all", + "contains", + "contained_by", + ] + SUPPORTED_TRANSFORMS = ["key", "key_text", "json"] + + def db_type(self) -> str: return "JSONB" - + def to_python(self, v): - if v is None: + if v is None: return None return json.loads(v) if isinstance(v, str) else v - + def to_db(self, v): return None if v is None else json.dumps(v) - - def _build_implicit_validators(self): - pass # JSON can be any shape + + def _build_implicit_validators(self): + pass #### @@ -770,20 +866,20 @@ def __init__(self, base_field: Field, **kw): self.base_field = base_field super().__init__(**kw) - def db_type(self) -> str: + def db_type(self) -> str: return f"{self.base_field.db_type()}[]" - + def to_python(self, v): - if v is None: + if v is None: return None - if isinstance(v, list): + if isinstance(v, list): return v return json.loads(v) - + def to_db(self, v): return None if v is None else json.dumps(v) - - def _build_implicit_validators(self): + + def _build_implicit_validators(self): pass @@ -798,17 +894,17 @@ class ForeignKey(Field): on_delete: "CASCADE", "SET_NULL", "PROTECT", "RESTRICT", "SET_DEFAULT". related_name: Name for the reverse relation on the related model. db_constraint: If False, skip the DB FOREIGN KEY constraint (useful for - cross-database or legacy schemas). + cross-database or legacy schemas). """ def __init__( - self, - to: Any, - *, + self, + to: Any, + *, on_delete: str = "CASCADE", related_name: Optional[str] = None, - db_constraint: bool = True, - **kw + db_constraint: bool = True, + **kw, ): self.to = to self.on_delete = on_delete @@ -817,32 +913,23 @@ def __init__( super().__init__(**kw) def contribute_to_class(self, model, name): - # The DB column is "{name}_id" (e.g. "author_id"). self.attname = f"{name}_id" self.column = self._db_column or f"{name}_id" self.model = model - # Install a ForwardDescriptor under the relation name (without _id) - # so that ``post.author`` returns the related Author instance. - # The _id column is already handled by the Field descriptor protocol. from ryx.descriptors import ForwardDescriptor + fwd = ForwardDescriptor(self.attname, self.to) fwd.__set_name__(model, name) - # Use type.__setattr__ to set on a class with a custom metaclass type.__setattr__(model, name, fwd) - # Install a ReverseFKDescriptor on the target model if related_name is given. - # If related_name is not set, use the lowercase source model name + "_set" - # (Django convention: author.post_set). rel_name = self.related_name or f"{model.__name__.lower()}_set" - # We do deferred installation because the target model class may not exist - # yet (forward references). Store pending registration to be resolved later. _pending_reverse_fk.append((self.to, rel_name, model, self.attname)) - def db_type(self) -> str: + def db_type(self) -> str: return "INTEGER" - - def to_python(self, v): + + def to_python(self, v): return None if v is None else int(v) @@ -868,17 +955,16 @@ class ManyToManyField(Field): """ def __init__( - self, - to: Any, - *, + self, + to: Any, + *, through: Optional[str] = None, - related_name: Optional[str] = None, - **kw + related_name: Optional[str] = None, + **kw, ): self.to = to self.through = through self.related_name = related_name - # M2M fields don't add a column — skip parent __init__ validators self.attname = "" self.column = "" self.model = None @@ -889,58 +975,45 @@ def __init__( self.unique = False self.db_index = False self.choices = None - self.editable = False # M2M fields are not directly editable + self.editable = False self.help_text = "" self.verbose_name = "" self._db_column = None - - # Must define default so get_default() / has_default() work even - # though we skip Field.__init__ — use the sentinel from Field self.default = _MISSING - - # M2M join table metadata — set by contribute_to_class self._join_table = "" self._source_fk = "" self._target_fk = "" - def db_type(self) -> str: - return "" # No column - + def db_type(self) -> str: + return "" + def contribute_to_class(self, model, name): self.attname = name self.model = model - # Register on the model's _meta as a M2M relation (not a column) if hasattr(model, "_meta"): model._meta.many_to_many[name] = self - # Determine join table name: "{model_a}_{model_b}" or user-specified join_table = self.through or f"{model.__name__.lower()}_{name}" - - # Source FK column: "{source_model}_id" (e.g. "post_id") source_fk = f"{model.__name__.lower()}_id" + target_fk = ( + f"{name.removesuffix('s')}_id" if name.endswith("s") else f"{name}_id" + ) - # Target FK column: "{field_name}_id" → derive from field name - # e.g. for Post.tags the target FK in the join table is "tag_id" - target_fk = f"{name.removesuffix('s')}_id" if name.endswith('s') else f"{name}_id" - - # Install ManyToManyDescriptor on source model from ryx.descriptors import ManyToManyDescriptor + desc = ManyToManyDescriptor( - target_model_ref = self.to, - join_table = join_table, - source_fk = source_fk, - target_fk = target_fk, + target_model_ref=self.to, + join_table=join_table, + source_fk=source_fk, + target_fk=target_fk, ) desc.__set_name__(model, name) - - # Use type.__setattr__ to bypass the metaclass __setattr__ restriction type.__setattr__(model, name, desc) - # Store join table metadata on the field for migration DDL generation self._join_table = join_table self._source_fk = source_fk self._target_fk = target_fk - def _build_implicit_validators(self): + def _build_implicit_validators(self): pass From 9409df592185a82c9636d90d83186986950f988c Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:14:18 +0000 Subject: [PATCH 06/13] feat(lookup): implement advanced JSON lookups (has_key, has_any, has_all) --- src/query/lookups/json_lookups.rs | 34 ++++++++++------ src/query/lookups/lookups.rs | 66 ++++++++++++++++++++++++++++++- 2 files changed, 88 insertions(+), 12 deletions(-) diff --git a/src/query/lookups/json_lookups.rs b/src/query/lookups/json_lookups.rs index beb7401..5c5f591 100644 --- a/src/query/lookups/json_lookups.rs +++ b/src/query/lookups/json_lookups.rs @@ -89,24 +89,36 @@ pub fn json_cast_transform(ctx: &LookupContext) -> String { } } -/// `field__has_key="key"` → `field ? 'key'` (PostgreSQL) or `JSON_CONTAINS(field, '"key"')` (MySQL) +/// `field__has_key="key"` → `field ? ?` (PostgreSQL) or `JSON_CONTAINS_PATH(field, 'one', CONCAT('$.', ?))` (MySQL) pub fn json_has_key(ctx: &LookupContext) -> String { match ctx.backend { - Backend::PostgreSQL => format!("({} ? 'key')", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '\"key\"')", ctx.column), - Backend::SQLite => format!("json_extract({}, '$.key') IS NOT NULL", ctx.column), + Backend::PostgreSQL => format!("({} ? ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS_PATH({}, 'one', CONCAT('$.', ?))", ctx.column), + Backend::SQLite => format!("json_extract({}, '$.' || ?) IS NOT NULL", ctx.column), } } -/// `field__has_keys=['key1', 'key2']` → `field ?& array['key1', 'key2']` -pub fn json_has_keys(ctx: &LookupContext) -> String { +/// `field__has_any=['key1', 'key2']` → `field ?| ?` (PostgreSQL) or `JSON_CONTAINS_PATH(field, 'one', ?, ?)` (MySQL) +pub fn json_has_any(ctx: &LookupContext) -> String { match ctx.backend { - Backend::PostgreSQL => format!("({} ?& array['key1', 'key2'])", ctx.column), - Backend::MySQL => format!("JSON_CONTAINS({}, '[\"key1\", \"key2\"]')", ctx.column), + Backend::PostgreSQL => format!("({} ?| ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS_PATH({}, 'one', (?))", ctx.column), Backend::SQLite => format!( - "json_extract({}, '$.key1') IS NOT NULL AND json_extract({}, '$.key2') IS NOT NULL", - ctx.column, ctx.column - ), + "json_extract({}, '$.' || ?) IS NOT NULL (?)", + ctx.column + ), // Template + } +} + +/// `field__has_all=['key1', 'key2']` → `field ?& ?` (PostgreSQL) or `JSON_CONTAINS_PATH(field, 'all', ?, ?)` (MySQL) +pub fn json_has_all(ctx: &LookupContext) -> String { + match ctx.backend { + Backend::PostgreSQL => format!("({} ?& ?)", ctx.column), + Backend::MySQL => format!("JSON_CONTAINS_PATH({}, 'all', (?))", ctx.column), + Backend::SQLite => format!( + "json_extract({}, '$.' || ?) IS NOT NULL (?)", + ctx.column + ), // Template } } diff --git a/src/query/lookups/lookups.rs b/src/query/lookups/lookups.rs index 394b7d8..2994375 100644 --- a/src/query/lookups/lookups.rs +++ b/src/query/lookups/lookups.rs @@ -91,9 +91,12 @@ pub fn init_registry() { builtin.insert("json", json_lookups::json_cast_transform as LookupFn); builtin.insert("has_key", json_lookups::json_has_key as LookupFn); - builtin.insert("has_keys", json_lookups::json_has_keys as LookupFn); + builtin.insert("has_any", json_lookups::json_has_any as LookupFn); + builtin.insert("has_all", json_lookups::json_has_all as LookupFn); builtin.insert("contains", json_lookups::json_contains as LookupFn); builtin.insert("contained_by", json_lookups::json_contained_by as LookupFn); + builtin.insert("has_all", json_lookups::json_has_all as LookupFn); + builtin.insert("has_any", json_lookups::json_has_any as LookupFn); RwLock::new(LookupRegistry { builtin, @@ -148,6 +151,8 @@ fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxRes }) } +/// Returns the list of all registered lookup names (built-in + custom). +/// Used by the Python layer for available_lookups(). pub fn registered_lookups() -> RyxResult> { let registry = REGISTRY .get() @@ -168,6 +173,65 @@ pub fn registered_lookups() -> RyxResult> { Ok(names) } +/// Returns a static slice of all built-in lookup names. +/// This is used for auto-discovery on the Python side. +pub fn all_lookups() -> &'static [&'static str] { + &[ + // Comparison + "exact", + "gt", + "gte", + "lt", + "lte", + // String + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + // Null + "isnull", + // Membership + "in", + // Range + "range", + // Date/Time transforms + "date", + "year", + "month", + "day", + "hour", + "minute", + "second", + "week", + "dow", + "quarter", + "time", + "iso_week", + "iso_dow", + // JSON transforms + "key", + "key_text", + "json", + // JSON lookups + "has_key", + "has_any", + "has_all", + "contains", + "contained_by", + ] +} + +/// Returns a static slice of all transform names (date/time + JSON). +/// Used for validation when chaining field transforms. +pub fn all_transforms() -> &'static [&'static str] { + &[ + "date", "year", "month", "day", "hour", "minute", "second", "week", "dow", "quarter", + "time", "iso_week", "iso_dow", "key", "key_text", "json", + ] +} + // ### // Chained lookups support // ### From fbf085df595f822e72046564aab597368ec1b289 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:14:25 +0000 Subject: [PATCH 07/13] fix(compiler): implement list expansion for JSON lookups in SQL compiler --- src/query/compiler/compiler.rs | 41 ++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/query/compiler/compiler.rs b/src/query/compiler/compiler.rs index 06e3dad..2768a11 100644 --- a/src/query/compiler/compiler.rs +++ b/src/query/compiler/compiler.rs @@ -489,6 +489,47 @@ fn compile_single_filter( }); } + if lookup == "has_any" || lookup == "has_all" { + let items = match value { + SqlValue::List(v) => v.clone(), + other => vec![other.clone()], + }; + if items.is_empty() { + return Ok("(1 = 0)".into()); + } + + let fragment = if backend == Backend::PostgreSQL { + let op = if lookup == "has_any" { "?|" } else { "?&" }; + format!("{final_column} {op} ?") + } else if backend == Backend::MySQL { + let op = if lookup == "has_any" { + "'one'" + } else { + "'all'" + }; + let ph = std::iter::repeat_n("CONCAT('$.', ?)", items.len()) + .collect::>() + .join(", "); + format!("JSON_CONTAINS_PATH({}, {op}, {ph})", final_column) + } else { + // SQLite: manual expansion + let op = if lookup == "has_any" { " OR " } else { " AND " }; + let ph = std::iter::repeat_n( + format!("json_extract({}, '$.' || ?) IS NOT NULL", final_column), + items.len(), + ) + .collect::>() + .join(op); + ph + }; + values.extend(items); + return Ok(if negated { + format!("NOT ({fragment})") + } else { + fragment + }); + } + if lookup == "range" { let (lo, hi) = match value { SqlValue::List(v) if v.len() == 2 => (v[0].clone(), v[1].clone()), From 9db57200270244328759fcafb7114c9dbeb58950 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:14:30 +0000 Subject: [PATCH 08/13] test(lookup): add integration tests for advanced JSON lookups --- tests/conftest.py | 87 +++++++-------- tests/integration/test_lookups_integration.py | 104 ++++++++++++++---- 2 files changed, 120 insertions(+), 71 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3c55a8e..b55000c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -224,6 +224,12 @@ def _import_ryx_components(): MemoryCache, configure_cache, invalidate_model, + JSONField, + MigrationRunner, + RyxError, + DatabaseError, + DoesNotExist, + MultipleObjectsReturned, ) from ryx.migrations import MigrationRunner from ryx.exceptions import ( @@ -265,6 +271,7 @@ def _import_ryx_components(): MemoryCache, configure_cache, invalidate_model, + JSONField, MigrationRunner, RyxError, DatabaseError, @@ -272,52 +279,7 @@ def _import_ryx_components(): MultipleObjectsReturned, ) except ImportError: - return ( - False, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - ) + return (False,) + (None,) * 36 ( @@ -352,6 +314,7 @@ def _import_ryx_components(): MemoryCache_import, configure_cache_import, invalidate_model_import, + JSONField_import, MigrationRunner_import, RyxError_import, DatabaseError_import, @@ -391,11 +354,33 @@ def _import_ryx_components(): MemoryCache = MemoryCache_import configure_cache = configure_cache_import invalidate_model = invalidate_model_import + JSONField = JSONField_import MigrationRunner = MigrationRunner_import RyxError = RyxError_import DatabaseError = DatabaseError_import DoesNotExist = DoesNotExist_import MultipleObjectsReturned = MultipleObjectsReturned_import +else: + + class Dummy: + def __init__(self, *args, **kwargs): + pass + + def __call__(self, *args, **kwargs): + return Dummy() + + Model = Dummy + CharField = IntField = BooleanField = TextField = DateTimeField = FloatField = ( + DecimalField + ) = UUIDField = EmailField = ForeignKey = Index = Constraint = ValidationError = ( + Q + ) = Count = Sum = Avg = Min = Max = transaction = run_sync = bulk_create = ( + bulk_update + ) = bulk_delete = stream = MemoryCache = configure_cache = invalidate_model = ( + JSONField + ) = MigrationRunner = RyxError = DatabaseError = DoesNotExist = ( + MultipleObjectsReturned + ) = Dummy @pytest.fixture(scope="session") @@ -437,7 +422,7 @@ def setup_database(): asyncio.run(ryx.setup(db_url)) # Run migrations against test models so tables exist for integration tests - runner = MigrationRunner([Author, Post, Tag, PostTag]) + runner = MigrationRunner([Author, Post, Tag, PostTag, Profile]) asyncio.run(runner.migrate()) yield @@ -512,6 +497,14 @@ class Meta: tag = ForeignKey(Tag, on_delete="CASCADE") +class Profile(Model): + class Meta: + table_name = "test_profiles" + + user_name = CharField(max_length=100) + data = JSONField(null=True) + + @pytest.fixture(scope="function", autouse=True) async def clean_tables(): """Clean all test tables before each test.""" diff --git a/tests/integration/test_lookups_integration.py b/tests/integration/test_lookups_integration.py index 956195b..8eb5526 100644 --- a/tests/integration/test_lookups_integration.py +++ b/tests/integration/test_lookups_integration.py @@ -187,39 +187,95 @@ async def test_different_years_same_month(self, clean_tables): assert len(results) == 3 -class TestJSONFieldLookups: - """Integration tests for JSON field lookups.""" +class TestJSONAdvancedLookupsIntegration: + """Integration tests for advanced JSON lookups (has_key, has_any, has_all).""" - @pytest.mark.asyncio - async def test_json_has_key_lookup(self, clean_tables): - """Test metadata__has_key lookup.""" - # Create author with bio as JSON-like text (using TextField for simplicity) - await Author.objects.create( - name="Author with Bio", - email="author@test.com", - bio='{"verified": true, "role": "admin"}', + @pytest.fixture + async def profiles_with_data(self, clean_tables): + """Create profiles with various JSON data for testing.""" + from conftest import Profile + + await Profile.objects.create( + user_name="User 1", + data={"verified": True, "role": "admin", "tags": ["beta", "staff"]}, ) - await Author.objects.create( - name="Author without Bio", email="author2@test.com", bio=None + await Profile.objects.create( + user_name="User 2", + data={"verified": True, "role": "user", "tags": ["beta"]}, ) + await Profile.objects.create( + user_name="User 3", data={"role": "guest", "tags": ["new"]} + ) + await Profile.objects.create(user_name="User 4", data=None) + + @pytest.mark.asyncio + async def test_has_key_lookup(self, profiles_with_data): + """Test has_key lookup.""" + from conftest import Profile + + # User 1, 2, 3 have 'role' + results = await Profile.objects.filter(data__has_key="role") + assert len(results) == 3 + + # Only User 1, 2 have 'verified' + results = await Profile.objects.filter(data__has_key="verified") + assert len(results) == 2 - # Note: has_key requires actual JSON field - this tests TextField behavior - # The lookup may not work as expected with TextField - # This test verifies the lookup doesn't error but may not filter correctly + # No one has 'missing_key' + results = await Profile.objects.filter(data__has_key="missing_key") + assert len(results) == 0 @pytest.mark.asyncio - async def test_json_key_lookups_text_field(self, clean_tables): - """Test JSON key lookups work on TextField (for compatibility).""" - # Create authors with pseudo-JSON in text fields - await Author.objects.create( - name="Author 1", email="a1@test.com", bio='{"priority": "high"}' + async def test_has_any_lookup(self, profiles_with_data): + """Test has_any lookup.""" + from conftest import Profile + + # User 1, 2, 3 have either 'role' or 'verified' + results = await Profile.objects.filter(data__has_any=["role", "verified"]) + assert len(results) == 3 + + # User 1, 2 have either 'verified' or 'admin_status' + results = await Profile.objects.filter( + data__has_any=["verified", "admin_status"] ) - await Author.objects.create( - name="Author 2", email="a2@test.com", bio='{"priority": "low"}' + assert len(results) == 2 + + # No one has either 'missing1' or 'missing2' + results = await Profile.objects.filter(data__has_any=["missing1", "missing2"]) + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_has_all_lookup(self, profiles_with_data): + """Test has_all lookup.""" + from conftest import Profile + + # User 1, 2 have both 'role' and 'verified' + results = await Profile.objects.filter(data__has_all=["role", "verified"]) + assert len(results) == 2 + + # Only User 1 has both 'role' and 'verified' and 'tags' + results = await Profile.objects.filter( + data__has_all=["role", "verified", "tags"] + ) + assert len(results) == 2 # User 1 and 2 have these + + # No one has both 'verified' and 'missing_key' + results = await Profile.objects.filter( + data__has_all=["verified", "missing_key"] ) + assert len(results) == 0 + + @pytest.mark.asyncio + async def test_json_lookup_negation(self, profiles_with_data): + """Test negated JSON lookups.""" + from conftest import Profile - # This tests that the lookup mechanism works - # Actual JSON extraction requires JSONField + # Not having 'verified' -> User 3 and User 4 + results = await Profile.objects.exclude(data__has_key="verified") + assert len(results) == 2 + titles = [r.user_name for r in results] + assert "User 3" in titles + assert "User 4" in titles class TestJSONDynamicKeyLookups: From 1149d48303fdee893000cbca3b776611bd7a739e Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:18:12 +0000 Subject: [PATCH 09/13] fix(lookup): include type hints and re-exports for auto-discovery --- ryx/ryx_core.pyi | 10 ++++++++++ src/query/lookups/mod.rs | 2 ++ 2 files changed, 12 insertions(+) diff --git a/ryx/ryx_core.pyi b/ryx/ryx_core.pyi index 245dcfc..5387ec5 100644 --- a/ryx/ryx_core.pyi +++ b/ryx/ryx_core.pyi @@ -129,6 +129,16 @@ def available_lookups() -> list[str]: ... +def list_lookups() -> list[str]: + """Return all registered lookup names (built-in + custom).""" + ... + + +def list_transforms() -> list[str]: + """Return all registered transform names (built-in + custom).""" + ... + + def is_connected() -> bool: """Return ``True`` if ``setup()`` has been called successfully. diff --git a/src/query/lookups/mod.rs b/src/query/lookups/mod.rs index b3bd303..fc4fe4d 100644 --- a/src/query/lookups/mod.rs +++ b/src/query/lookups/mod.rs @@ -32,3 +32,5 @@ pub use lookups::init_registry; pub use lookups::register_custom; pub use lookups::registered_lookups; pub use lookups::resolve; +pub use lookups::all_lookups; +pub use lookups::all_transforms; From cd25df042e60df35430247b67212fc803f999363 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 14:18:17 +0000 Subject: [PATCH 10/13] test(lookup): update unit tests to reflect JSON lookup renaming --- tests/unit/test_lookups.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_lookups.py b/tests/unit/test_lookups.py index f41aa55..2fa593c 100644 --- a/tests/unit/test_lookups.py +++ b/tests/unit/test_lookups.py @@ -167,9 +167,9 @@ def test_has_key_lookup(self): """Test has_key lookup.""" assert _parse_lookup_key("metadata__has_key") == ("metadata", "has_key") - def test_has_keys_lookup(self): - """Test has_keys lookup.""" - assert _parse_lookup_key("metadata__has_keys") == ("metadata", "has_keys") + # def test_has_keys_lookup(self): + # """Test has_keys lookup.""" + # assert _parse_lookup_key("metadata__has_keys") == ("metadata", "has_keys") def test_json_contains_lookup(self): """Test JSON contains lookup.""" @@ -266,7 +266,7 @@ def test_json_lookups_present(self): "key_text", "json", "has_key", - "has_keys", + # "has_keys", "contains", "contained_by", } From 5bff8dd4954ed82720bed6d1aa996d0abe102914 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 16:55:03 +0000 Subject: [PATCH 11/13] refactor: extract query engine into standalone ryx-query crate --- .gitignore | 3 +- Cargo.toml | 6 +- ryx-query/.DS_Store | Bin 0 -> 6148 bytes ryx-query/Cargo.toml | 20 ++++ ryx-query/benches/query_bench.rs | 103 ++++++++++++++++++ {src/query => ryx-query/src}/ast.rs | 2 +- ryx-query/src/backend.rs | 24 ++++ .../src}/compiler/compiler.rs | 89 +++++++-------- .../src}/compiler/helpers.rs | 2 +- {src/query => ryx-query/src}/compiler/mod.rs | 0 ryx-query/src/errors.rs | 22 ++++ ryx-query/src/lib.rs | 8 ++ .../src}/lookups/common_lookups.rs | 6 +- .../src}/lookups/date_lookups.rs | 6 +- .../src}/lookups/json_lookups.rs | 16 +-- .../src}/lookups/lookups.rs | 44 ++++---- {src/query => ryx-query/src}/lookups/mod.rs | 4 +- src/errors.rs | 39 ++----- src/executor.rs | 2 +- src/lib.rs | 32 +++--- src/pool.rs | 29 +---- src/query/mod.rs | 13 --- src/transaction.rs | 6 +- 23 files changed, 300 insertions(+), 176 deletions(-) create mode 100644 ryx-query/.DS_Store create mode 100644 ryx-query/Cargo.toml create mode 100644 ryx-query/benches/query_bench.rs rename {src/query => ryx-query/src}/ast.rs (99%) create mode 100644 ryx-query/src/backend.rs rename {src/query => ryx-query/src}/compiler/compiler.rs (89%) rename {src/query => ryx-query/src}/compiler/helpers.rs (98%) rename {src/query => ryx-query/src}/compiler/mod.rs (100%) create mode 100644 ryx-query/src/errors.rs create mode 100644 ryx-query/src/lib.rs rename {src/query => ryx-query/src}/lookups/common_lookups.rs (95%) rename {src/query => ryx-query/src}/lookups/date_lookups.rs (98%) rename {src/query => ryx-query/src}/lookups/json_lookups.rs (92%) rename {src/query => ryx-query/src}/lookups/lookups.rs (91%) rename {src/query => ryx-query/src}/lookups/mod.rs (100%) delete mode 100644 src/query/mod.rs diff --git a/.gitignore b/.gitignore index daf7a07..8d01d7f 100644 --- a/.gitignore +++ b/.gitignore @@ -231,4 +231,5 @@ ROADMAP*.md libryx_core* *.lock -tests/test_compiler.rs \ No newline at end of file +tests/test_compiler.rs +*.txt \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 0f05266..213363b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "Ryx" -version = "0.1.0" +version = "0.1.1" edition = "2024" description = "Ryx ORM — a Django-style Python ORM powered by sqlx (Rust) via PyO3" license = "MIT OR Apache-2.0" @@ -32,6 +32,7 @@ mysql = ["sqlx/mysql"] sqlite = ["sqlx/sqlite"] [dependencies] +ryx-query = { path = "./ryx-query" } # ── PyO3 ────────────────────────────────────────────────────────────────────── # "extension-module" is required when building a cdylib for Python import. # Without it, PyO3 tries to link against libpython, which breaks on Linux/macOS @@ -85,4 +86,5 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } [dev-dependencies] # tokio test macro for async unit tests -tokio = { version = "1.40", features = ["full", "test-util"] } \ No newline at end of file +tokio = { version = "1.40", features = ["full", "test-util"] } +criterion = { version = "0.5", features = ["async_tokio"] } diff --git a/ryx-query/.DS_Store b/ryx-query/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5db49a5eb2a86c8ab6beca1ca4fb5708616c05bb GIT binary patch literal 6148 zcmeHK&rcIU6n;|>S`b;FC}K=D^kM=*1VYjnLoFC3#y}`R5U}oUhqCE*rrBLUAS6BO zN$>svUOkz3@#w{?H{+k6S5NxpM};kFI2oh+l9}(#y!XwIZ?iMI06_BFrE35N03@*p zZ0*46Cnn;eoUnxM5kVry*a8Z{oW~v8JCPlC#0ta;{A~(|cefpEd~X>zaP0l%Hb%s; zkjFCmO&WRt9WVnH_)rE8qu^ofKt;XtKRB;`%L$QF1ux1WLrt6(2yhmsDeHMtRvUzR zH59^VZbF<-$5$T7wReM>?a39twridngkxhLNTQ|nOj~=hJ=vMuC~WcFLTH8CLEiG$ zL~Dt2&u;jZz3kH6qLH5AEVNukeMxXj6jRpkyR5`_^L(3?0?93?Kr)p~6^(&|gYoRt zh&hqX9*&p?+3~Rvb8=$p@GzC^&s@E}Sgd$e#-HH&(FmTc(z-M@;3+zhJ-N<~-V=G> z*wZJ&XzVK0(RsG(T({BF+t-us>F@79e_`O_;HAN$(PvvurR*yx&z1O zp0~gQH*`5$T@O&}xsX=p*e1srcTnf~ng|8U)DI9(a8_0=-?vzq=Jz%{%ckBu%kwbg z)fLy_Os%|I4vWnNpNb(f(_fU5ncUrmb457@WR!z9U%KIGB$>D61?ev_&S)x=Q!Pm0 zEjbJ~;T|kQ1@_??ynvVR2HwF(_zYi3J2A)*877nD2AL*z$Q*e<9I{1R;;W$(jR$=> zIwc9P9eL0@1)q7Gm9f9UiBeA0($r+k1Veh@2V6(K8eMheG7+*l)^o zAUTC`9V-wk5LKW>H4Sn9-}?3cf0PurVg+Ia{wW1W>ug~*k6Y4r>xs>YyVk_=2#YY0 pS63)0Sn2CnI^rrGz# Backend { + let url_lower = url.to_lowercase(); + if url_lower.contains("postgres") { + Backend::PostgreSQL + } else if url_lower.contains("mysql") { + Backend::MySQL + } else if url_lower.contains("sqlite") { + Backend::SQLite + } else { + Backend::PostgreSQL // default + } +} diff --git a/src/query/compiler/compiler.rs b/ryx-query/src/compiler/compiler.rs similarity index 89% rename from src/query/compiler/compiler.rs rename to ryx-query/src/compiler/compiler.rs index 2768a11..98faa35 100644 --- a/src/query/compiler/compiler.rs +++ b/ryx-query/src/compiler/compiler.rs @@ -7,15 +7,15 @@ // See compiler/mod.rs for the module structure. // ### -use crate::errors::{RyxError, RyxResult}; -use crate::pool::Backend; -use crate::query::ast::{ +use crate::ast::{ AggFunc, AggregateExpr, FilterNode, JoinClause, JoinKind, QNode, QueryNode, QueryOperation, SortDirection, SqlValue, }; -use crate::query::lookups::date_lookups as date; -use crate::query::lookups::json_lookups as json; -use crate::query::lookups::{self, LookupContext}; +use crate::backend::Backend; +use crate::errors::{QueryError, QueryResult}; +use crate::lookups::date_lookups as date; +use crate::lookups::json_lookups as json; +use crate::lookups::{self, LookupContext}; pub use super::helpers::{apply_like_wrapping, qualified_col, split_qualified, KNOWN_TRANSFORMS}; @@ -27,7 +27,7 @@ pub struct CompiledQuery { pub values: Vec, } -pub fn compile(node: &QueryNode) -> RyxResult { +pub fn compile(node: &QueryNode) -> QueryResult { let mut values: Vec = Vec::new(); let sql = match &node.operation { QueryOperation::Select { columns } => { @@ -49,7 +49,7 @@ fn compile_select( node: &QueryNode, columns: Option<&[String]>, values: &mut Vec, -) -> RyxResult { +) -> QueryResult { let base_cols = match columns { None => "*".to_string(), Some(cols) => cols @@ -129,9 +129,9 @@ fn compile_select( Ok(sql) } -fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult { +fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> QueryResult { if node.annotations.is_empty() { - return Err(RyxError::Internal( + return Err(QueryError::Internal( "aggregate() called with no aggregate expressions".into(), )); } @@ -153,7 +153,7 @@ fn compile_aggregate(node: &QueryNode, values: &mut Vec) -> RyxResult< Ok(sql) } -fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult { +fn compile_count(node: &QueryNode, values: &mut Vec) -> QueryResult { let mut sql = format!("SELECT COUNT(*) FROM {}", helpers::quote_col(&node.table)); if !node.joins.is_empty() { sql.push(' '); @@ -168,7 +168,7 @@ fn compile_count(node: &QueryNode, values: &mut Vec) -> RyxResult) -> RyxResult { +fn compile_delete(node: &QueryNode, values: &mut Vec) -> QueryResult { let mut sql = format!("DELETE FROM {}", helpers::quote_col(&node.table)); let where_sql = compile_where_combined(&node.filters, node.q_filter.as_ref(), values, node.backend)?; @@ -183,9 +183,9 @@ fn compile_update( node: &QueryNode, assignments: &[(String, SqlValue)], values: &mut Vec, -) -> RyxResult { +) -> QueryResult { if assignments.is_empty() { - return Err(RyxError::Internal("UPDATE with no assignments".into())); + return Err(QueryError::Internal("UPDATE with no assignments".into())); } let set: Vec = assignments .iter() @@ -213,9 +213,9 @@ fn compile_insert( cols_vals: &[(String, SqlValue)], returning_id: bool, values: &mut Vec, -) -> RyxResult { +) -> QueryResult { if cols_vals.is_empty() { - return Err(RyxError::Internal("INSERT with no values".into())); + return Err(QueryError::Internal("INSERT with no values".into())); } let (cols, vals): (Vec<_>, Vec<_>) = cols_vals.iter().cloned().unzip(); values.extend(vals); @@ -318,7 +318,7 @@ pub fn compile_agg_cols(anns: &[AggregateExpr]) -> String { .join(", ") } -pub fn compile_order_by(clauses: &[crate::query::ast::OrderByClause]) -> String { +pub fn compile_order_by(clauses: &[crate::ast::OrderByClause]) -> String { clauses .iter() .map(|c| { @@ -337,7 +337,7 @@ fn compile_where_combined( q: Option<&QNode>, values: &mut Vec, backend: Backend, -) -> RyxResult { +) -> QueryResult { let flat = if filters.is_empty() { None } else { @@ -356,7 +356,7 @@ fn compile_where_combined( }) } -pub fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> RyxResult { +pub fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> QueryResult { match q { QNode::Leaf { field, @@ -368,14 +368,14 @@ pub fn compile_q(q: &QNode, values: &mut Vec, backend: Backend) -> Ryx let parts: Vec = children .iter() .map(|c| compile_q(c, values, backend)) - .collect::>()?; + .collect::>()?; Ok(format!("({})", parts.join(" AND "))) } QNode::Or(children) => { let parts: Vec = children .iter() .map(|c| compile_q(c, values, backend)) - .collect::>()?; + .collect::>()?; Ok(format!("({})", parts.join(" OR "))) } QNode::Not(child) => { @@ -389,11 +389,11 @@ fn compile_filters( filters: &[FilterNode], values: &mut Vec, backend: Backend, -) -> RyxResult { +) -> QueryResult { let parts: Vec = filters .iter() .map(|f| compile_single_filter(&f.field, &f.lookup, &f.value, f.negated, values, backend)) - .collect::>()?; + .collect::>()?; Ok(parts.join(" AND ")) } @@ -404,7 +404,7 @@ fn compile_single_filter( negated: bool, values: &mut Vec, backend: Backend, -) -> RyxResult { +) -> QueryResult { let (base_column, applied_transforms, json_key) = if field.contains("__") { let parts: Vec<&str> = field.split("__").collect(); @@ -533,7 +533,7 @@ fn compile_single_filter( if lookup == "range" { let (lo, hi) = match value { SqlValue::List(v) if v.len() == 2 => (v[0].clone(), v[1].clone()), - _ => return Err(RyxError::Internal("range needs exactly 2 values".into())), + _ => return Err(QueryError::Internal("range needs exactly 2 values".into())), }; values.push(lo); values.push(hi); @@ -557,24 +557,25 @@ fn compile_single_filter( if KNOWN_TRANSFORMS.contains(&lookup) { let transform_fn = match lookup { - "date" => date::date_transform as crate::query::lookups::LookupFn, - "year" => date::year_transform as crate::query::lookups::LookupFn, - "month" => date::month_transform as crate::query::lookups::LookupFn, - "day" => date::day_transform as crate::query::lookups::LookupFn, - "hour" => date::hour_transform as crate::query::lookups::LookupFn, - "minute" => date::minute_transform as crate::query::lookups::LookupFn, - "second" => date::second_transform as crate::query::lookups::LookupFn, - "week" => date::week_transform as crate::query::lookups::LookupFn, - "dow" => date::dow_transform as crate::query::lookups::LookupFn, - "quarter" => date::quarter_transform as crate::query::lookups::LookupFn, - "time" => date::time_transform as crate::query::lookups::LookupFn, - "iso_week" => date::iso_week_transform as crate::query::lookups::LookupFn, - "iso_dow" => date::iso_dow_transform as crate::query::lookups::LookupFn, - "key" => json::json_key_transform as crate::query::lookups::LookupFn, - "key_text" => json::json_key_text_transform as crate::query::lookups::LookupFn, - "json" => json::json_cast_transform as crate::query::lookups::LookupFn, + "date" => date::date_transform as crate::lookups::LookupFn, + "year" => date::year_transform as crate::lookups::LookupFn, + "month" => date::month_transform as crate::lookups::LookupFn, + "day" => date::day_transform as crate::lookups::LookupFn, + "hour" => date::hour_transform as crate::lookups::LookupFn, + "minute" => date::minute_transform as crate::lookups::LookupFn, + "second" => date::second_transform as crate::lookups::LookupFn, + "week" => date::week_transform as crate::lookups::LookupFn, + "dow" => date::dow_transform as crate::lookups::LookupFn, + "quarter" => date::quarter_transform as crate::lookups::LookupFn, + "time" => date::time_transform as crate::lookups::LookupFn, + "iso_week" => date::iso_week_transform as crate::lookups::LookupFn, + "iso_dow" => date::iso_dow_transform as crate::lookups::LookupFn, + "key" => json::json_key_transform as crate::lookups::LookupFn, + "key_text" => json::json_key_text_transform as crate::lookups::LookupFn, + "json" => json::json_cast_transform as crate::lookups::LookupFn, + _ => { - return Err(RyxError::UnknownLookup { + return Err(QueryError::UnknownLookup { field: field.to_string(), lookup: lookup.to_string(), }) @@ -597,7 +598,7 @@ fn compile_single_filter( #[cfg(test)] mod tests { use super::*; - use crate::query::ast::*; + use crate::ast::*; #[test] fn test_bare_select() { @@ -699,6 +700,6 @@ mod tests { } fn init_registry() { - crate::query::lookups::init_registry(); + crate::lookups::init_registry(); } } diff --git a/src/query/compiler/helpers.rs b/ryx-query/src/compiler/helpers.rs similarity index 98% rename from src/query/compiler/helpers.rs rename to ryx-query/src/compiler/helpers.rs index 9d039db..27d18a0 100644 --- a/src/query/compiler/helpers.rs +++ b/ryx-query/src/compiler/helpers.rs @@ -9,7 +9,7 @@ // - Other compilation utilities // ### -use crate::query::ast::SqlValue; +use crate::ast::SqlValue; /// Double-quote a simple identifier (column or table name). pub fn quote_col(s: &str) -> String { diff --git a/src/query/compiler/mod.rs b/ryx-query/src/compiler/mod.rs similarity index 100% rename from src/query/compiler/mod.rs rename to ryx-query/src/compiler/mod.rs diff --git a/ryx-query/src/errors.rs b/ryx-query/src/errors.rs new file mode 100644 index 0000000..940f1b6 --- /dev/null +++ b/ryx-query/src/errors.rs @@ -0,0 +1,22 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum QueryError { + #[error("Unknown lookup: '{lookup}' on field '{field}'")] + UnknownLookup { field: String, lookup: String }, + + #[error("Unknown field '{field}' on model '{model}'")] + UnknownField { field: String, model: String }, + + #[error("Type mismatch for field '{field}': expected {expected}, got {got}")] + TypeMismatch { + field: String, + expected: String, + got: String, + }, + + #[error("Internal query error: {0}")] + Internal(String), +} + +pub type QueryResult = Result; diff --git a/ryx-query/src/lib.rs b/ryx-query/src/lib.rs new file mode 100644 index 0000000..302add8 --- /dev/null +++ b/ryx-query/src/lib.rs @@ -0,0 +1,8 @@ +pub mod ast; +pub mod backend; +pub mod compiler; +pub mod errors; +pub mod lookups; + +pub use backend::Backend; +pub use errors::{QueryError, QueryResult}; diff --git a/src/query/lookups/common_lookups.rs b/ryx-query/src/lookups/common_lookups.rs similarity index 95% rename from src/query/lookups/common_lookups.rs rename to ryx-query/src/lookups/common_lookups.rs index 880d2b1..ade130e 100644 --- a/src/query/lookups/common_lookups.rs +++ b/ryx-query/src/lookups/common_lookups.rs @@ -6,10 +6,10 @@ // Contains comparison and string lookups (exact, gt, contains, etc.) // ### -use crate::query::lookups::LookupContext; +use crate::lookups::LookupContext; -pub use crate::query::lookups::LookupFn; -pub use crate::query::lookups::PythonLookup; +pub use crate::lookups::LookupFn; +pub use crate::lookups::PythonLookup; /// `field__exact=value` → `field = ?` /// diff --git a/src/query/lookups/date_lookups.rs b/ryx-query/src/lookups/date_lookups.rs similarity index 98% rename from src/query/lookups/date_lookups.rs rename to ryx-query/src/lookups/date_lookups.rs index 323c4d8..bdd4bbf 100644 --- a/src/query/lookups/date_lookups.rs +++ b/ryx-query/src/lookups/date_lookups.rs @@ -7,10 +7,10 @@ // These are used for chained lookups like `created_at__year__gte=2024` // ### -use crate::pool::Backend; -use crate::query::lookups::LookupContext; +use crate::backend::Backend; +use crate::lookups::LookupContext; -pub use crate::query::lookups::LookupFn; +pub use crate::lookups::LookupFn; /// Apply a date/time field transformation. /// Returns SQL like "DATE(col)" or "EXTRACT(YEAR FROM col)" diff --git a/src/query/lookups/json_lookups.rs b/ryx-query/src/lookups/json_lookups.rs similarity index 92% rename from src/query/lookups/json_lookups.rs rename to ryx-query/src/lookups/json_lookups.rs index 5c5f591..be35f02 100644 --- a/src/query/lookups/json_lookups.rs +++ b/ryx-query/src/lookups/json_lookups.rs @@ -7,10 +7,10 @@ // These are used for chained lookups like `metadata__key__priority__exact="high"` // ### -use crate::pool::Backend; -use crate::query::lookups::LookupContext; +use crate::backend::Backend; +use crate::lookups::LookupContext; -pub use crate::query::lookups::LookupFn; +pub use crate::lookups::LookupFn; /// Apply a JSON field transformation. /// Returns SQL like `(col->>'key')` or `JSON_UNQUOTE(JSON_EXTRACT(col, '$.key'))` @@ -103,10 +103,7 @@ pub fn json_has_any(ctx: &LookupContext) -> String { match ctx.backend { Backend::PostgreSQL => format!("({} ?| ?)", ctx.column), Backend::MySQL => format!("JSON_CONTAINS_PATH({}, 'one', (?))", ctx.column), - Backend::SQLite => format!( - "json_extract({}, '$.' || ?) IS NOT NULL (?)", - ctx.column - ), // Template + Backend::SQLite => format!("json_extract({}, '$.' || ?) IS NOT NULL (?)", ctx.column), // Template } } @@ -115,10 +112,7 @@ pub fn json_has_all(ctx: &LookupContext) -> String { match ctx.backend { Backend::PostgreSQL => format!("({} ?& ?)", ctx.column), Backend::MySQL => format!("JSON_CONTAINS_PATH({}, 'all', (?))", ctx.column), - Backend::SQLite => format!( - "json_extract({}, '$.' || ?) IS NOT NULL (?)", - ctx.column - ), // Template + Backend::SQLite => format!("json_extract({}, '$.' || ?) IS NOT NULL (?)", ctx.column), // Template } } diff --git a/src/query/lookups/lookups.rs b/ryx-query/src/lookups/lookups.rs similarity index 91% rename from src/query/lookups/lookups.rs rename to ryx-query/src/lookups/lookups.rs index 2994375..c781ace 100644 --- a/src/query/lookups/lookups.rs +++ b/ryx-query/src/lookups/lookups.rs @@ -10,13 +10,14 @@ use std::collections::HashMap; use std::sync::{OnceLock, RwLock}; -use crate::errors::{RyxError, RyxResult}; -use crate::pool::Backend; +// Removed unused SqlValue import +use crate::backend::Backend; +use crate::errors::{QueryError, QueryResult}; // Re-export submodules -pub use crate::query::lookups::common_lookups; -pub use crate::query::lookups::date_lookups; -pub use crate::query::lookups::json_lookups; +pub use crate::lookups::common_lookups; +pub use crate::lookups::date_lookups; +pub use crate::lookups::json_lookups; // ### // Core types @@ -109,14 +110,17 @@ pub fn init_registry() { // Registry public API // ### -pub fn register_custom(name: impl Into, sql_template: impl Into) -> RyxResult<()> { +pub fn register_custom( + name: impl Into, + sql_template: impl Into, +) -> QueryResult<()> { let registry = REGISTRY .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + .ok_or_else(|| QueryError::Internal("Lookup registry not initialized".into()))?; let mut guard = registry .write() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + .map_err(|e| QueryError::Internal(format!("Registry lock poisoned: {e}")))?; guard.custom.insert( name.into(), @@ -128,14 +132,14 @@ pub fn register_custom(name: impl Into, sql_template: impl Into) Ok(()) } -fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { +fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> QueryResult { let registry = REGISTRY .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + .ok_or_else(|| QueryError::Internal("Lookup registry not initialized".into()))?; let guard = registry .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + .map_err(|e| QueryError::Internal(format!("Registry lock poisoned: {e}")))?; if let Some(custom) = guard.custom.get(lookup_name) { return Ok(custom.sql_template.replace("{col}", &ctx.column)); @@ -145,7 +149,7 @@ fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxRes return Ok(lookup_fn(ctx)); } - Err(RyxError::UnknownLookup { + Err(QueryError::UnknownLookup { field: field.to_string(), lookup: lookup_name.to_string(), }) @@ -153,20 +157,20 @@ fn resolve_simple(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxRes /// Returns the list of all registered lookup names (built-in + custom). /// Used by the Python layer for available_lookups(). -pub fn registered_lookups() -> RyxResult> { +pub fn registered_lookups() -> QueryResult> { let registry = REGISTRY .get() - .ok_or_else(|| RyxError::Internal("Lookup registry not initialized".into()))?; + .ok_or_else(|| QueryError::Internal("Lookup registry not initialized".into()))?; let guard = registry .read() - .map_err(|e| RyxError::Internal(format!("Registry lock poisoned: {e}")))?; + .map_err(|e| QueryError::Internal(format!("Registry lock poisoned: {e}")))?; let mut names: Vec = guard .builtin .keys() .copied() - .map(|k| k.to_string()) + .map(|k: &'static str| k.to_string()) .chain(guard.custom.keys().cloned()) .collect(); names.sort(); @@ -242,7 +246,7 @@ fn handle_sqlite_transform_lookup( _transform: &str, lookup_name: &str, ctx: &LookupContext, -) -> RyxResult { +) -> QueryResult { let is_numeric_comparison = matches!(lookup_name, "gt" | "gte" | "lt" | "lte" | "exact"); if is_numeric_comparison && ctx.column.contains("AS TEXT)") { @@ -270,7 +274,7 @@ fn add_sqlite_cast_for_transform(fragment: &str, lookup: &str) -> String { } } -pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> RyxResult { +pub fn resolve(field: &str, lookup_name: &str, ctx: &LookupContext) -> QueryResult { if !lookup_name.contains("__") { if ctx.json_key.is_some() { let mut column = format!("\"{}\"", field); @@ -381,7 +385,7 @@ pub fn apply_transform( column: &str, backend: Backend, key: Option<&str>, -) -> RyxResult { +) -> QueryResult { if let Some(sql) = date_lookups::apply_date_transform(name, column, backend) { return Ok(sql); } @@ -393,7 +397,7 @@ pub fn apply_transform( return Ok(format!("DATE({})", column)); } - Err(RyxError::UnknownLookup { + Err(QueryError::UnknownLookup { field: column.to_string(), lookup: name.to_string(), }) diff --git a/src/query/lookups/mod.rs b/ryx-query/src/lookups/mod.rs similarity index 100% rename from src/query/lookups/mod.rs rename to ryx-query/src/lookups/mod.rs index fc4fe4d..ca1b297 100644 --- a/src/query/lookups/mod.rs +++ b/ryx-query/src/lookups/mod.rs @@ -27,10 +27,10 @@ pub use lookups::LookupFn; pub use lookups::PythonLookup; // Re-export functions from lookups.rs +pub use lookups::all_lookups; +pub use lookups::all_transforms; pub use lookups::apply_transform; pub use lookups::init_registry; pub use lookups::register_custom; pub use lookups::registered_lookups; pub use lookups::resolve; -pub use lookups::all_lookups; -pub use lookups::all_transforms; diff --git a/src/errors.rs b/src/errors.rs index 208f332..a9b78f4 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -23,6 +23,7 @@ use pyo3::exceptions::{PyRuntimeError, PyValueError}; use pyo3::prelude::*; +use ryx_query::QueryError; use thiserror::Error; /// The master error type for the entire Ryx ORM. @@ -39,6 +40,10 @@ pub enum RyxError { #[error("Database error: {0}")] Database(#[from] sqlx::Error), + /// Errors from the query compiler. + #[error("Query error: {0}")] + Query(#[from] QueryError), + /// Raised when `.get()` or `.first()` finds no matching row. /// Mirrors Django's `Model.DoesNotExist`. #[error("No matching object found for the given query")] @@ -60,27 +65,6 @@ pub enum RyxError { #[error("Connection pool already initialized")] PoolAlreadyInitialized, - // Query building errors - /// Raised when the Python side passes an unrecognized lookup suffix. - /// Example: `filter(age__foobar=42)` where "foobar" is not a registered - /// lookup. We include the lookup name so the error is actionable. - #[error("Unknown lookup: '{lookup}' on field '{field}'")] - UnknownLookup { field: String, lookup: String }, - - /// Raised when a field name referenced in a filter/order_by doesn't exist - /// on the model's declared schema. - #[error("Unknown field '{field}' on model '{model}'")] - UnknownField { field: String, model: String }, - - /// Raised when a Python value cannot be converted to the expected SQL type. - /// Example: passing a string where an integer is expected. - #[error("Type mismatch for field '{field}': expected {expected}, got {got}")] - TypeMismatch { - field: String, - expected: String, - got: String, - }, - // Runtime / internal errors /// Catch-all for internal errors that shouldn't reach users but are /// wrapped here so we don't use `.unwrap()` anywhere in the codebase. @@ -107,13 +91,12 @@ pub enum RyxError { impl From for PyErr { fn from(err: RyxError) -> PyErr { match &err { - // User errors (bad field names, bad lookups, bad types) → - // ValueError so Python linters/type checkers can catch them - RyxError::UnknownLookup { .. } - | RyxError::UnknownField { .. } - | RyxError::TypeMismatch { .. } => PyValueError::new_err(err.to_string()), - - // Everything else → RuntimeError with full context message + RyxError::Query(qe) => match qe { + QueryError::UnknownLookup { .. } + | QueryError::UnknownField { .. } + | QueryError::TypeMismatch { .. } => PyValueError::new_err(qe.to_string()), + QueryError::Internal(_) => PyRuntimeError::new_err(qe.to_string()), + }, _ => PyRuntimeError::new_err(err.to_string()), } } diff --git a/src/executor.rs b/src/executor.rs index 6c0cbbf..585f293 100644 --- a/src/executor.rs +++ b/src/executor.rs @@ -45,7 +45,7 @@ use tracing::{debug, instrument}; use crate::errors::{RyxError, RyxResult}; use crate::pool; -use crate::query::{ast::SqlValue, compiler::CompiledQuery}; +use ryx_query::{ast::SqlValue, compiler::CompiledQuery}; use crate::transaction; // ### diff --git a/src/lib.rs b/src/lib.rs index 7b7ac49..0a7e04c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,16 +10,16 @@ use tokio::sync::Mutex as TokioMutex; pub mod errors; pub mod executor; pub mod pool; -pub mod query; pub mod transaction; +use crate::errors::RyxError; use crate::pool::PoolConfig; -use crate::query::ast::{ +use ryx_query::ast::{ AggFunc, AggregateExpr, FilterNode, JoinClause, JoinKind, OrderByClause, QNode, QueryNode, QueryOperation, SqlValue, }; -use crate::query::compiler; -use crate::query::lookups; +use ryx_query::compiler; +use ryx_query::lookups; use crate::transaction::TransactionHandle; // ### @@ -59,12 +59,12 @@ fn setup<'py>( #[pyfunction] fn register_lookup(name: String, sql_template: String) -> PyResult<()> { - lookups::register_custom(name, sql_template).map_err(PyErr::from) + lookups::register_custom(name, sql_template).map_err(RyxError::from).map_err(PyErr::from) } #[pyfunction] fn available_lookups() -> PyResult> { - lookups::registered_lookups().map_err(PyErr::from) + lookups::registered_lookups().map_err(RyxError::from).map_err(PyErr::from) } #[pyfunction] @@ -134,7 +134,7 @@ impl PyQueryBuilder { #[new] fn new(table: String) -> PyResult { // Get the backend from the pool at QueryBuilder creation time - let backend = pool::get_backend().unwrap_or(crate::pool::Backend::PostgreSQL); + let backend = pool::get_backend().unwrap_or(ryx_query::Backend::PostgreSQL); Ok(Self { node: QueryNode::select(table).with_backend(backend), @@ -254,7 +254,7 @@ impl PyQueryBuilder { // # Execution methods fn fetch_all<'py>(&self, py: Python<'py>) -> PyResult> { - let compiled = compiler::compile(&self.node).map_err(PyErr::from)?; + let compiled = compiler::compile(&self.node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let rows = executor::fetch_all(compiled).await.map_err(PyErr::from)?; Python::attach(|py| Ok(decoded_rows_to_py(py, rows)?.unbind())) @@ -263,7 +263,7 @@ impl PyQueryBuilder { fn fetch_first<'py>(&self, py: Python<'py>) -> PyResult> { let node = self.node.clone().with_limit(1); - let compiled = compiler::compile(&node).map_err(PyErr::from)?; + let compiled = compiler::compile(&node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let rows = executor::fetch_all(compiled).await.map_err(PyErr::from)?; Python::attach(|py| match rows.into_iter().next() { @@ -274,7 +274,7 @@ impl PyQueryBuilder { } fn fetch_get<'py>(&self, py: Python<'py>) -> PyResult> { - let compiled = compiler::compile(&self.node).map_err(PyErr::from)?; + let compiled = compiler::compile(&self.node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let row = executor::fetch_one(compiled).await.map_err(PyErr::from)?; Python::attach(|py| Ok(decoded_row_to_py(py, row)?.into_any().unbind())) @@ -284,7 +284,7 @@ impl PyQueryBuilder { fn fetch_count<'py>(&self, py: Python<'py>) -> PyResult> { let mut count_node = self.node.clone(); count_node.operation = QueryOperation::Count; - let compiled = compiler::compile(&count_node).map_err(PyErr::from)?; + let compiled = compiler::compile(&count_node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let count = executor::fetch_count(compiled).await.map_err(PyErr::from)?; Python::attach(|py| Ok(count.into_pyobject(py)?.unbind())) @@ -294,7 +294,7 @@ impl PyQueryBuilder { fn fetch_aggregate<'py>(&self, py: Python<'py>) -> PyResult> { let mut agg_node = self.node.clone(); agg_node.operation = QueryOperation::Aggregate; - let compiled = compiler::compile(&agg_node).map_err(PyErr::from)?; + let compiled = compiler::compile(&agg_node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let rows = executor::fetch_all(compiled).await.map_err(PyErr::from)?; Python::attach(|py| match rows.into_iter().next() { @@ -307,7 +307,7 @@ impl PyQueryBuilder { fn execute_delete<'py>(&self, py: Python<'py>) -> PyResult> { let mut del_node = self.node.clone(); del_node.operation = QueryOperation::Delete; - let compiled = compiler::compile(&del_node).map_err(PyErr::from)?; + let compiled = compiler::compile(&del_node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let res = executor::execute(compiled).await.map_err(PyErr::from)?; Python::attach(|py| Ok(res.rows_affected.into_pyobject(py)?.unbind())) @@ -328,7 +328,7 @@ impl PyQueryBuilder { upd_node.operation = QueryOperation::Update { assignments: rust_assignments, }; - let compiled = compiler::compile(&upd_node).map_err(PyErr::from)?; + let compiled = compiler::compile(&upd_node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let res = executor::execute(compiled).await.map_err(PyErr::from)?; @@ -352,7 +352,7 @@ impl PyQueryBuilder { values: rust_values, returning_id, }; - let compiled = compiler::compile(&ins_node).map_err(PyErr::from)?; + let compiled = compiler::compile(&ins_node).map_err(RyxError::from)?; pyo3_async_runtimes::tokio::future_into_py(py, async move { let res = executor::execute(compiled).await.map_err(PyErr::from)?; @@ -364,7 +364,7 @@ impl PyQueryBuilder { } fn compiled_sql(&self) -> PyResult { - Ok(compiler::compile(&self.node).map_err(PyErr::from)?.sql) + Ok(compiler::compile(&self.node).map_err(RyxError::from)?.sql) } } diff --git a/src/pool.rs b/src/pool.rs index d83ace7..38dd92d 100644 --- a/src/pool.rs +++ b/src/pool.rs @@ -34,32 +34,7 @@ use sqlx::{ use tracing::{debug, info}; use crate::errors::{RyxError, RyxResult}; - -// ### -// Backend enum -// ### -/// Database backend type. -/// Used for backend-specific SQL generation (e.g., DATE() vs strftime()). -#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] -pub enum Backend { - PostgreSQL, - MySQL, - SQLite, -} - -/// Detect the backend from a database URL. -pub fn detect_backend(url: &str) -> Backend { - let url_lower = url.to_lowercase(); - if url_lower.contains("postgres") { - Backend::PostgreSQL - } else if url_lower.contains("mysql") { - Backend::MySQL - } else if url_lower.contains("sqlite") { - Backend::SQLite - } else { - Backend::PostgreSQL // default - } -} +use ryx_query::Backend; // ### // Global singleton @@ -173,7 +148,7 @@ pub async fn initialize(database_url: &str, config: PoolConfig) -> RyxResult<()> .map_err(|_| RyxError::PoolAlreadyInitialized)?; // Set the backend type based on the URL - let backend = detect_backend(database_url); + let backend = ryx_query::backend::detect_backend(database_url); BACKEND.set(backend).ok(); info!("Ryx connection pool initialized successfully"); diff --git a/src/query/mod.rs b/src/query/mod.rs deleted file mode 100644 index 60df9c9..0000000 --- a/src/query/mod.rs +++ /dev/null @@ -1,13 +0,0 @@ -// -// ### -// Ryx — Query module -// -// This module contains everything related to building and compiling queries: -// - ast.rs : the query abstract syntax tree (data structures) -// - lookup.rs : the lookup registry (built-in + user-registered lookups) -// - compiler.rs : AST → SQL string + bound values -// ### - -pub mod ast; -pub mod compiler; -pub mod lookups; diff --git a/src/transaction.rs b/src/transaction.rs index 7fe5c02..d5740fd 100644 --- a/src/transaction.rs +++ b/src/transaction.rs @@ -35,8 +35,8 @@ use tracing::{debug, instrument}; use crate::errors::{RyxError, RyxResult}; use crate::pool; -use crate::query::ast::SqlValue; -use crate::query::compiler::CompiledQuery; +use ryx_query::ast::SqlValue; +use ryx_query::compiler::CompiledQuery; static ACTIVE_TX: OnceCell>>>>> = OnceCell::new(); @@ -137,7 +137,7 @@ impl TransactionHandle { /// /// The query is run on the transaction's connection (not the pool), so it /// participates in the current transaction boundary. - #[instrument(skip(self, query), fields(sql = %query.sql))] + // #[instrument(skip(self, query), fields(sql = %query.sql))] pub async fn execute_query(&self, query: CompiledQuery) -> RyxResult { let mut guard = self.inner.lock().await; let tx = guard.as_mut().ok_or_else(|| { From 728a74e9c342ac531c4f064215ffe5449d3dacc6 Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 16:55:19 +0000 Subject: [PATCH 12/13] chore: update dependencies and project version --- Cargo.lock | 308 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 2 files changed, 308 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39e83d2..b9a951c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,11 +4,13 @@ version = 4 [[package]] name = "Ryx" -version = "0.1.0" +version = "0.1.1" dependencies = [ + "criterion", "once_cell", "pyo3", "pyo3-async-runtimes", + "ryx-query", "serde", "serde_json", "sqlx", @@ -42,6 +44,18 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" + [[package]] name = "async-channel" version = "1.9.0" @@ -274,6 +288,12 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cc" version = "1.2.58" @@ -301,6 +321,58 @@ dependencies = [ "windows-link", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -346,6 +418,63 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "futures", + "is-terminal", + "itertools", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-queue" version = "0.3.12" @@ -361,6 +490,12 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-common" version = "0.1.7" @@ -512,6 +647,20 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.32" @@ -641,6 +790,17 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + [[package]] name = "hashbrown" version = "0.15.5" @@ -848,6 +1008,26 @@ dependencies = [ "hashbrown 0.16.1", ] +[[package]] +name = "is-terminal" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.18" @@ -1046,6 +1226,12 @@ version = "1.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "parking" version = "2.2.1" @@ -1146,6 +1332,34 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + [[package]] name = "polling" version = "3.11.0" @@ -1317,6 +1531,26 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.5.18" @@ -1335,6 +1569,18 @@ dependencies = [ "bitflags", ] +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + [[package]] name = "regex-automata" version = "0.4.14" @@ -1397,6 +1643,27 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" +[[package]] +name = "ryx-query" +version = "0.1.0" +dependencies = [ + "once_cell", + "serde", + "serde_json", + "sqlx", + "thiserror", + "tracing", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -1845,6 +2112,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.11.0" @@ -2046,6 +2323,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -2113,6 +2400,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "web-sys" +version = "0.3.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd70027e39b12f0849461e08ffc50b9cd7688d942c1c8e3c7b22273236b4dd0a" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "whoami" version = "1.6.1" @@ -2123,6 +2420,15 @@ dependencies = [ "wasite", ] +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "windows-core" version = "0.62.2" diff --git a/pyproject.toml b/pyproject.toml index 075dfc2..165e779 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ build-backend = "maturin" [project] name = "ryx" -version = "0.1.2" +version = "0.1.3" description = "A Django-style Python ORM powered by sqlx (Rust) via PyO3." readme = "README.md" requires-python = ">=3.10" From d8f9cea359480e85ed6ca5aaa8238b60307b16ee Mon Sep 17 00:00:00 2001 From: #Einswilli Date: Wed, 8 Apr 2026 17:12:38 +0000 Subject: [PATCH 13/13] Remove public exports all_lookups/all_transforms Remove the pub use re-exports of all_lookups and all_transforms from the lookups module to narrow the public API surface. These symbols are no longer exported from ryx-query::lookups. --- ryx-query/src/lookups/mod.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/ryx-query/src/lookups/mod.rs b/ryx-query/src/lookups/mod.rs index c824727..ca1b297 100644 --- a/ryx-query/src/lookups/mod.rs +++ b/ryx-query/src/lookups/mod.rs @@ -34,5 +34,3 @@ pub use lookups::init_registry; pub use lookups::register_custom; pub use lookups::registered_lookups; pub use lookups::resolve; -pub use lookups::all_lookups; -pub use lookups::all_transforms;