From ecb2ad3129e0f0042fe6e558eba4391f6b420fb9 Mon Sep 17 00:00:00 2001 From: Script Raccoon Date: Tue, 31 Mar 2026 22:16:00 +0200 Subject: [PATCH 1/8] add tables for functors and their properties --- database/data/000_setup/001_clear.sql | 11 +++ database/migrations/010_functors-table.sql | 10 +++ .../011_functor-properties-tables.sql | 22 +++++ .../migrations/012_functor-implications.sql | 51 ++++++++++++ .../013_functor-implication-view.sql | 83 +++++++++++++++++++ 5 files changed, 177 insertions(+) create mode 100644 database/migrations/010_functors-table.sql create mode 100644 database/migrations/011_functor-properties-tables.sql create mode 100644 database/migrations/012_functor-implications.sql create mode 100644 database/migrations/013_functor-implication-view.sql diff --git a/database/data/000_setup/001_clear.sql b/database/data/000_setup/001_clear.sql index 3549f8d1..6f8a85c8 100644 --- a/database/data/000_setup/001_clear.sql +++ b/database/data/000_setup/001_clear.sql @@ -21,6 +21,17 @@ DELETE FROM tags; DELETE FROM related_properties; DELETE FROM properties; + +DELETE FROM functor_property_assignments; +DELETE FROM functor_properties; +DELETE FROM functors; + DELETE FROM relations; +DELETE FROM functor_implication_assumptions; +DELETE FROM functor_implication_conclusions; +DELETE FROM functor_implication_source_assumptions; +DELETE FROM functor_implication_target_assumptions; +DELETE FROM functor_implications; + PRAGMA foreign_keys = ON; \ No newline at end of file diff --git a/database/migrations/010_functors-table.sql b/database/migrations/010_functors-table.sql new file mode 100644 index 00000000..e69bb02b --- /dev/null +++ b/database/migrations/010_functors-table.sql @@ -0,0 +1,10 @@ +CREATE TABLE functors ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + source TEXT NOT NULL, + target TEXT NOT NULL, + description TEXT NOT NULL CHECK (length(description) > 0), + nlab_link TEXT CHECK (nlab_link IS NULL OR nlab_link like 'https://%'), + FOREIGN KEY (source) REFERENCES categories (id) ON DELETE CASCADE, + FOREIGN KEY (target) REFERENCES categories (id) ON DELETE CASCADE +); \ No newline at end of file diff --git a/database/migrations/011_functor-properties-tables.sql b/database/migrations/011_functor-properties-tables.sql new file mode 100644 index 00000000..605aee8f --- /dev/null +++ b/database/migrations/011_functor-properties-tables.sql @@ -0,0 +1,22 @@ +CREATE TABLE functor_properties ( + id TEXT PRIMARY KEY, + relation TEXT NOT NULL, + description TEXT NOT NULL CHECK (length(description) > 0), + nlab_link TEXT CHECK (nlab_link IS NULL OR nlab_link like 'https://%'), + invariant_under_equivalences INTEGER NOT NULL DEFAULT TRUE, + dual_property_id TEXT, + FOREIGN KEY (relation) REFERENCES relations (relation) ON DELETE RESTRICT, + FOREIGN KEY (dual_property_id) REFERENCES functor_properties (id) ON DELETE SET NULL +); + +CREATE TABLE functor_property_assignments ( + functor_id TEXT NOT NULL, + property_id TEXT NOT NULL, + is_satisfied INTEGER NOT NULL CHECK (is_satisfied IN (TRUE, FALSE)), + reason TEXT NOT NULL CHECK (length(reason) > 0), + is_deduced INTEGER NOT NULL DEFAULT FALSE CHECK (is_deduced in (TRUE, FALSE)), + position INTEGER NOT NULL DEFAULT 0, + PRIMARY KEY (functor_id, property_id), + FOREIGN KEY (functor_id) REFERENCES functors (id) ON DELETE CASCADE, + FOREIGN KEY (property_id) REFERENCES functor_properties (id) ON DELETE CASCADE +); \ No newline at end of file diff --git a/database/migrations/012_functor-implications.sql b/database/migrations/012_functor-implications.sql new file mode 100644 index 00000000..853b7e04 --- /dev/null +++ b/database/migrations/012_functor-implications.sql @@ -0,0 +1,51 @@ +CREATE TABLE functor_implications ( + id TEXT PRIMARY KEY, + reason TEXT NOT NULL CHECK (length(reason) > 0), + is_deduced INTEGER NOT NULL DEFAULT FALSE, + dualized_from TEXT, + is_equivalence INTEGER NOT NULL DEFAULT FALSE, + CHECK (dualized_from IS NULL OR is_deduced = TRUE), + FOREIGN KEY (dualized_from) REFERENCES functor_implications (id) ON DELETE SET NULL +); + +CREATE TABLE functor_implication_assumptions ( + implication_id TEXT NOT NULL, + property_id TEXT NOT NULL, + PRIMARY KEY (implication_id, property_id), + FOREIGN KEY (implication_id) REFERENCES functor_implications (id) ON DELETE CASCADE, + FOREIGN KEY (property_id) REFERENCES functor_properties (id) ON DELETE CASCADE +); + +CREATE TABLE functor_implication_conclusions ( + implication_id TEXT NOT NULL, + property_id TEXT NOT NULL, + PRIMARY KEY (implication_id, property_id), + FOREIGN KEY (implication_id) REFERENCES functor_implications (id) ON DELETE CASCADE, + FOREIGN KEY (property_id) REFERENCES functor_properties (id) ON DELETE CASCADE +); + +-- property of source category +CREATE TABLE functor_implication_source_assumptions ( + implication_id TEXT NOT NULL, + property_id TEXT NOT NULL, + PRIMARY KEY (implication_id, property_id), + FOREIGN KEY (implication_id) REFERENCES functor_implications (id) ON DELETE CASCADE, + FOREIGN KEY (property_id) REFERENCES properties (id) ON DELETE CASCADE +); + +-- property of target category +CREATE TABLE functor_implication_target_assumptions ( + implication_id TEXT NOT NULL, + property_id TEXT NOT NULL, + PRIMARY KEY (implication_id, property_id), + FOREIGN KEY (implication_id) REFERENCES functor_implications (id) ON DELETE CASCADE, + FOREIGN KEY (property_id) REFERENCES properties (id) ON DELETE CASCADE +); + +CREATE INDEX idx_assumptions_functor_property ON functor_implication_assumptions (property_id); + +CREATE INDEX idx_conclusions_functor_property ON functor_implication_conclusions (property_id); + +CREATE INDEX idx_assumptions_functor_source_property ON functor_implication_source_assumptions (property_id); + +CREATE INDEX idx_conclusions_functor_target_property ON functor_implication_target_assumptions (property_id); \ No newline at end of file diff --git a/database/migrations/013_functor-implication-view.sql b/database/migrations/013_functor-implication-view.sql new file mode 100644 index 00000000..4dee59f9 --- /dev/null +++ b/database/migrations/013_functor-implication-view.sql @@ -0,0 +1,83 @@ +CREATE VIEW functor_implications_view AS +SELECT + i.id, + i.is_equivalence, + i.reason, + i.is_deduced, + i.dualized_from, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM functor_implication_assumptions a + WHERE a.implication_id = i.id + ORDER BY lower(property_id) + ) + ) AS assumptions, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM functor_implication_source_assumptions a + WHERE a.implication_id = i.id + ORDER BY lower(property_id) + ) + ) AS source_assumptions, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM functor_implication_target_assumptions a + WHERE a.implication_id = i.id + ORDER BY lower(property_id) + ) + ) AS target_assumptions, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM functor_implication_conclusions c + WHERE c.implication_id = i.id + ORDER BY lower(property_id) + ) + ) AS conclusions +FROM functor_implications i; + + +CREATE VIEW functor_implication_input AS +SELECT + NULL AS id, + NULL AS is_equivalence, + NULL AS assumptions, + NULL AS source_assumptions, + NULL AS target_assumptions, + NULL AS conclusions, + NULL AS reason, + NULL AS is_deduced, + NULL AS dualized_from; + + +CREATE TRIGGER trg_functor_implication_input_insert +INSTEAD OF INSERT ON functor_implication_input +BEGIN + INSERT INTO functor_implications (id, reason, is_equivalence, is_deduced, dualized_from) + VALUES ( + NEW.id, + NEW.reason, + COALESCE(NEW.is_equivalence, FALSE), + COALESCE(NEW.is_deduced, FALSE), + NEW.dualized_from + ); + + INSERT INTO functor_implication_assumptions (implication_id, property_id) + SELECT NEW.id, value + FROM json_each(NEW.assumptions); + + INSERT INTO functor_implication_source_assumptions (implication_id, property_id) + SELECT NEW.id, value + FROM json_each(NEW.source_assumptions); + + INSERT INTO functor_implication_target_assumptions (implication_id, property_id) + SELECT NEW.id, value + FROM json_each(NEW.target_assumptions); + + INSERT INTO functor_implication_conclusions (implication_id, property_id) + SELECT NEW.id, value + FROM json_each(NEW.conclusions); +END; \ No newline at end of file From d45a207ba81c00abbb22e888cca3d36a8765db0e Mon Sep 17 00:00:00 2001 From: Script Raccoon Date: Tue, 31 Mar 2026 22:16:42 +0200 Subject: [PATCH 2/8] add data for functors --- .vscode/settings.json | 6 + .../008_functors/001_functor-properties.sql | 232 ++++++++++++++++++ .../008_functors/002_functor-implications.sql | 172 +++++++++++++ .../008_functors/003_functor-examples.sql | 57 +++++ .../abelianization.sql | 55 +++++ .../forget_vector.sql | 55 +++++ .../free_group.sql | 55 +++++ .../id_Set.sql | 19 ++ .../power_set_contravariant.sql | 55 +++++ .../power_set_covariant.sql | 73 ++++++ 10 files changed, 779 insertions(+) create mode 100644 database/data/008_functors/001_functor-properties.sql create mode 100644 database/data/008_functors/002_functor-implications.sql create mode 100644 database/data/008_functors/003_functor-examples.sql create mode 100644 database/data/009_functor-property-assignments/abelianization.sql create mode 100644 database/data/009_functor-property-assignments/forget_vector.sql create mode 100644 database/data/009_functor-property-assignments/free_group.sql create mode 100644 database/data/009_functor-property-assignments/id_Set.sql create mode 100644 database/data/009_functor-property-assignments/power_set_contravariant.sql create mode 100644 database/data/009_functor-property-assignments/power_set_covariant.sql diff --git a/.vscode/settings.json b/.vscode/settings.json index a4a0f50e..6c7f4745 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -19,6 +19,8 @@ ], "cSpell.words": [ "abelian", + "abelianization", + "abelianize", "Adamek", "algébriques", "anneaux", @@ -44,12 +46,15 @@ "coequalizer", "coequalizers", "cofiltered", + "cofinitary", "cogenerates", "cogenerating", "cogenerator", "cokernel", "colimit", "colimits", + "comonad", + "comonadic", "conormal", "coprime", "coproduct", @@ -141,6 +146,7 @@ "rng", "rngs", "Rosicky", + "saft", "Schapira", "semisimple", "setoid", diff --git a/database/data/008_functors/001_functor-properties.sql b/database/data/008_functors/001_functor-properties.sql new file mode 100644 index 00000000..c36f70b1 --- /dev/null +++ b/database/data/008_functors/001_functor-properties.sql @@ -0,0 +1,232 @@ +INSERT INTO functor_properties ( + id, + relation, + description, + nlab_link, + invariant_under_equivalences, + dual_property_id +) +VALUES + ( + 'faithful', + 'is', + 'A functor is faithful when it is injective on Hom-sets: If $F(f)=F(g)$, then $f=g$.', + 'https://ncatlab.org/nlab/show/faithful+functor', + TRUE, + 'faithful' + ), + ( + 'full', + 'is', + 'A functor is full when it is surjective on Hom-sets: Every morphism $F(A) \to F(B)$ is induced by a morphism $A \to B$.', + 'https://ncatlab.org/nlab/show/full+functor', + TRUE, + 'full' + ), + ( + 'essentially surjective', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is essentially surjective when every object $Y \in \mathcal{D}$ is isomorphic to $F(X)$ for some $X \in \mathcal{C}$.', + 'https://ncatlab.org/nlab/show/essentially+surjective+functor', + TRUE, + 'essentially surjective' + ), + ( + 'equivalence', + 'is an', + 'A functor is an equivalence if it has a pseudo-inverse functor.', + 'https://ncatlab.org/nlab/show/equivalence+of+categories', + TRUE, + 'equivalence' + ), + ( + 'continuous', + 'is', + 'A functor is continuous when it preserves all small limits.', + 'https://ncatlab.org/nlab/show/continuous+functor', + TRUE, + 'cocontinuous' + ), + ( + 'cocontinuous', + 'is', + 'A functor is cocontinuous when it preserves all small colimits.', + 'https://ncatlab.org/nlab/show/cocontinuous+functor', + TRUE, + 'continuous' + ), + -- Here is why we do not call this property "preserves products": + -- Either we name the property "preserves products" and choose the + -- empty relation '' (instead of 'is'). But then negations will not be + -- formatted properly ("does not preserves products"). + -- Or we name property "products" and choose the relation 'preserves'. + -- But then we will never be able to add the properties of "reflecting products", + -- "creating products", etc. + ( + 'product-preserving', + 'is', + 'A functor $F$ preserves products when for every family of objects $(A_i)$ in the source whose product $\prod_i A_i$ exists, also the product $\prod_i F(A_i)$ exists in the target and such that the canonical morphism $F(\prod_i A_i) \to \prod_i F(A_i)$ is an isomorphism.', + NULL, + TRUE, + 'coproduct-preserving' + ), + ( + 'coproduct-preserving', + 'is', + 'A functor $F$ preserves coproducts when for every family of objects $(A_i)$ in the source whose coproduct $\prod_i A_i$ exists, also the coproduct $\coprod_i F(A_i)$ exists in the target and such that the canonical morphism $\coprod_i F(A_i) \to F(\coprod_i A_i)$ is an isomorphism.', + NULL, + TRUE, + 'product-preserving' + ), + ( + 'finite-product-preserving', + 'is', + 'A functor $F$ preserves finite products when for every finite family of objects $(A_i)$ in the source whose product $\prod_i A_i$ exists, also the product $\prod_i F(A_i)$ exists in the target and such that the canonical morphism $F(\prod_i A_i) \to \prod_i F(A_i)$ is an isomorphism.', + NULL, + TRUE, + 'finite-coproduct-preserving' + ), + ( + 'finite-coproduct-preserving', + 'is', + 'A functor $F$ preserves finite coproducts when for every family of objects $(A_i)$ in the source whose coproduct $\prod_i A_i$ exists, also the coproduct $\coprod_i F(A_i)$ exists in the target and such that the canonical morphism $\coprod_i F(A_i) \to F(\coprod_i A_i)$ is an isomorphism.', + NULL, + TRUE, + 'finite-product-preserving' + ), + ( + 'terminal-object-preserving', + 'is', + 'A functor $F$ preserves terminal objects when it maps every terminal object to a terminal object. It is not assumed that the source category has a terminal object.', + NULL, + TRUE, + 'initial-object-preserving' + ), + ( + 'initial-object-preserving', + 'is', + 'A functor $F$ preserves initial objects when it maps every initial object to an initial object. It is not assumed that the source category has a initial object.', + NULL, + TRUE, + 'terminal-object-preserving' + ), + ( + 'equalizer-preserving', + 'is', + 'A functor $F$ preserves equalizers when for every parallel pair of morphisms $f,g : A \to B$ whose equalizer $i : E \to A$ exists, also $F(i) : F(E) \to F(A)$ is an equalizer of $F(f),F(g) : F(A) \to F(B)$.', + NULL, + TRUE, + 'coequalizer-preserving' + ), + ( + 'coequalizer-preserving', + 'is', + 'A functor $F$ preserves coequalizers when for every parallel pair of morphisms $f,g : A \to B$ whose coequalizer $p : B \to Q$ exists, also $F(p) : F(B) \to F(Q)$ is an coequalizer of $F(f),F(g) : F(A) \to F(B)$.', + NULL, + TRUE, + 'equalizer-preserving' + ), + ( + 'left adjoint', + 'is a', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is a left adjoint when there is a functor $G : \mathcal{D} \to \mathcal{C}$ such that there are natural bijections $\hom(F(A),B) \cong \hom(A,G(B))$.', + 'https://ncatlab.org/nlab/show/left+adjoint', + TRUE, + 'right adjoint' + ), + ( + 'right adjoint', + 'is a', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is a right adjoint when there is a functor $G : \mathcal{D} \to \mathcal{C}$ such that there are natural bijections $\hom(G(A),B) \cong \hom(A,F(B))$.', + 'https://ncatlab.org/nlab/show/right+adjoint', + TRUE, + 'left adjoint' + ), + ( + 'monadic', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is monadic when there is a monad $T$ on $\mathcal{D}$ such that $F$ is equivalent to the forgetful functor $U^T : \mathbf{Alg}(T) \to \mathcal{D}$.', + 'https://ncatlab.org/nlab/show/monadic+functor', + TRUE, + 'comonadic' + ), + ( + 'comonadic', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is comonadic when there is a comonad $T$ on $\mathcal{D}$ such that $F$ is equivalent to the forgetful functor $U^T : \mathbf{CoAlg}(T) \to \mathcal{D}$.', + 'https://ncatlab.org/nlab/show/comonadic+functor', + TRUE, + 'monadic' + ), + ( + 'conservative', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is conservative when it is isomorphic-reflecting: If $f$ is a morphism in $\mathcal{C}$ such that $F(f)$ is an isomorphism, then $f$ is an isomorphism.', + 'https://ncatlab.org/nlab/show/conservative+functor', + TRUE, + 'conservative' + ), + ( + 'finitary', + 'is', + 'A functor is finitary when it preserves filtered colimits.', + 'https://ncatlab.org/nlab/show/finitary+functor', + TRUE, + 'cofinitary' + ), + ( + 'cofinitary', + 'is', + 'A functor is cofinitary when it preserves cofiltered limits.', + NULL, + TRUE, + 'finitary' + ), + ( + 'left exact', + 'is', + 'A functor is left exact when it preserves finite limits.', + 'https://ncatlab.org/nlab/show/exact+functor', + TRUE, + 'right exact' + ), + ( + 'right exact', + 'is', + 'A functor is right exact when it preserves finite colimits.', + 'https://ncatlab.org/nlab/show/exact+functor', + TRUE, + 'left exact' + ), + ( + 'exact', + 'is', + 'A functor is exact when it is left exact and right exact.', + 'https://ncatlab.org/nlab/show/exact+functor', + TRUE, + 'exact' + ), + ( + 'representable', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ is representable if $\mathcal{C}$ is locally small, $\mathcal{D} = \mathbf{Set}$, and there is an object $A \in \mathcal{C}$ with $F \cong \mathrm{Hom}(A,-)$.', + 'https://ncatlab.org/nlab/show/representable+functor', + TRUE, + NULL + ), + ( + 'monomorphism-preserving', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ preserves monomorphisms if every monomorphism in $\mathcal{C}$ is mapped to a monomorphism in $\mathcal{D}$.
This property is useful to rule out some adjunctions.', + 'https://ncatlab.org/nlab/show/monomorphism', + TRUE, + 'epimorphism-preserving' + ), + ( + 'epimorphism-preserving', + 'is', + 'A functor $F : \mathcal{C} \to \mathcal{D}$ preserves epimorphisms if every epimorphism in $\mathcal{C}$ is mapped to an epimorphism in $\mathcal{D}$.
This property is useful to rule out some adjunctions.', + 'https://ncatlab.org/nlab/show/epimorphism', + TRUE, + 'monomorphism-preserving' + ); diff --git a/database/data/008_functors/002_functor-implications.sql b/database/data/008_functors/002_functor-implications.sql new file mode 100644 index 00000000..f503b864 --- /dev/null +++ b/database/data/008_functors/002_functor-implications.sql @@ -0,0 +1,172 @@ +INSERT INTO functor_implication_input ( + id, + assumptions, + source_assumptions, + target_assumptions, + conclusions, + reason, + is_equivalence +) +VALUES + ( + 'equivalence_criterion', + '["full", "faithful", "essentially surjective"]', + '[]', + '[]', + '["equivalence"]', + 'This is standard, see Mac Lane, Ch. IV, Theorem 4.1.', + TRUE + ), + ( + 'continuous_consequences', + '["continuous"]', + '[]', + '[]', + '["product-preserving", "equalizer-preserving", "cofinitary"]', + 'This is trivial.', + FALSE + ), + ( + 'products_consequences', + '["product-preserving"]', + '[]', + '[]', + '["finite-product-preserving"]', + 'This is trivial.', + FALSE + ), + ( + 'finite_products_consequences', + '["finite-product-preserving"]', + '[]', + '[]', + '["terminal-object-preserving"]', + 'This is trivial.', + FALSE + ), + ( + 'continuous_criterion', + '["product-preserving", "equalizer-preserving"]', + '["products"]', + '[]', + '["continuous"]', + 'This follows from the construction of limits via equalizers and products, see Mac Lane, Ch. V, Theorem 2.2.', + FALSE + ), + ( + 'continuous_criterion_filtered', + '["left exact", "cofinitary"]', + '["finitely complete"]', + '[]', + '["continuous"]', + 'This is because every limit can be written as a filtered limit of finite limits, see Mac Lane, Ch. IX, Theorem 1.1.', + FALSE + ), + ( + 'product_criterion_filtered', + '["finite-product-preserving", "cofinitary"]', + '["finite products"]', + '[]', + '["product-preserving"]', + 'This is because every product can be written as a filtered limit of finite products, see Mac Lane, Ch. IX, Theorem 1.1.', + FALSE + ), + ( + 'exact_definition', + '["exact"]', + '[]', + '[]', + '["left exact", "right exact"]', + 'This holds by definition.', + TRUE + ), + ( + 'left_exact_consequences', + '["left exact"]', + '[]', + '[]', + '["finite-product-preserving", "terminal-object-preserving"]', + 'Both finite products and terminal objects are special cases of finite limits.', + FALSE + ), + ( + 'left_exact_criterion', + '["finite-product-preserving", "equalizer-preserving"]', + '["finite products"]', + '[]', + '["left exact"]', + 'This follows from the construction of finite limits via equalizers and finite products, see Mac Lane, Ch. V, Theorem 2.2.', + FALSE + ), + ( + 'equivalence_consequences', + '["equivalence"]', + '[]', + '[]', + '["continuous", "right adjoint", "monadic"]', + 'This is easy.', + FALSE + ), + ( + 'right_adjoint_consequences', + '["right adjoint"]', + '[]', + '[]', + '["continuous"]', + 'This is standard, see Mac Lane, Ch. V, Theorem 5.1.', + FALSE + ), + ( + 'saft', + '["continuous"]', + '["complete", "locally small", "well-powered", "cogenerating set"]', + '["locally small"]', + '["right adjoint"]', + 'This is the Special Adjoint Functor Theorem. The proof can be found, for example, at the nLab, or in Mac Lane, Ch. V, Theorem 8.2.', + FALSE + ), + ( + 'monadic_consequences', + '["monadic"]', + '[]', + '[]', + '["faithful", "right adjoint", "conservative"]', + 'This is clear since for a monad $T$ the forgetful functor from the category of $T$-algebras has these properties.', + FALSE + ), + ( + 'representable_is_continuous', + '["representable"]', + '[]', + '[]', + '["continuous"]', + 'This is standard, see Mac Lane, Ch. V, Theorem 4.1.', + FALSE + ), + ( + 'left_exact_preserves_mono', + '["left exact"]', + '[]', + '[]', + '["monomorphism-preserving"]', + 'This is because $f : X \to Y$ is a monomorphism if and only the square displaying $\mathrm{id}_Y \circ f = \mathrm{id}_Y \circ f$ is a pullback square.', + FALSE + ), + ( + 'faithful-via-equalizers', + '["equalizer-preserving", "conservative"]', + '["equalizers"]', + '[]', + '["faithful"]', + 'Let $f,g : X \rightrightarrows Y$ be two morphisms in the source category, and choose an equalizer $E \hookrightarrow X$. By assumption, $F(E) \to F(X)$ is the equalizer of $F(f),F(g) : F(X) \rightrightarrows F(Y)$. Thus, if $F(f) = F(g)$, then $F(E) \to F(X)$ is an isomorphism. Since $F$ is conservative, $E \to X$ is an isomorphism, which means $f = g$.', + FALSE + ), + ( + 'monadic_full_trivial', + '["monadic", "full"]', + '[]', + '[]', + '["equivalence"]', + 'Assume that $T$ is a monad on $\mathcal{C}$ such that the forgetful functor $U : \mathbf{Alg}(T) \to \mathcal{C}$ is full. Then by general facts about adjunctions (see MSE/1994963) the unit $\eta : \mathrm{id}_{\mathcal{C}} \to T$ is an isomorphism of functors. Then it is an isomorphism of monads, so that $U$ is an equivalence.', + FALSE + ); \ No newline at end of file diff --git a/database/data/008_functors/003_functor-examples.sql b/database/data/008_functors/003_functor-examples.sql new file mode 100644 index 00000000..8c7ec3d3 --- /dev/null +++ b/database/data/008_functors/003_functor-examples.sql @@ -0,0 +1,57 @@ +INSERT INTO functors ( + id, + name, + source, + target, + description, + nlab_link +) +VALUES + ( + 'id_Set', + 'identity functor on the category of sets', + 'Set', + 'Set', + 'Every category $\mathcal{C}$ has an identity functor $\mathrm{id}_{\mathcal{C}}$. Here, we specify that it is for the category of sets.', + 'https://ncatlab.org/nlab/show/identity+functor' + ), + ( + 'abelianization', + 'abelianization functor for groups', + 'Grp', + 'Ab', + 'This functor maps a group $G$ to its abelianization $G^{\mathrm{ab}} := G/[G,G]$.', + 'https://ncatlab.org/nlab/show/abelianization' + ), + ( + 'forget_vector', + 'forgetful functor for vector spaces', + 'Vect', + 'Set', + 'This functor $U$ maps a vector space $V$ (over a fixed field $K$) to its underlying set $U(V)$.', + 'https://ncatlab.org/nlab/show/forgetful+functor' + ), + ( + 'free_group', + 'free group functor', + 'Set', + 'Grp', + 'This functor maps a set $X$ to the free group $F(X)$ on that set.', + 'https://ncatlab.org/nlab/show/free+functor' + ), + ( + 'power_set_covariant', + 'covariant power set functor', + 'Set', + 'Set', + 'This functor maps a set $X$ to its power set $P(X)$ and a map of sets $f : X \to Y$ to the induced image operator $f_* : P(X) \to P(Y)$.', + 'https://ncatlab.org/nlab/show/power+set' + ), + ( + 'power_set_contravariant', + 'contravariant power set functor', + 'Set_op', + 'Set', + 'This functor maps a set $X$ to its power set $P(X)$ and a map of sets $f : X \to Y$ to the induced preimage operator $f^* : P(Y) \to P(X)$.', + 'https://ncatlab.org/nlab/show/power+set' + ); \ No newline at end of file diff --git a/database/data/009_functor-property-assignments/abelianization.sql b/database/data/009_functor-property-assignments/abelianization.sql new file mode 100644 index 00000000..78d3d595 --- /dev/null +++ b/database/data/009_functor-property-assignments/abelianization.sql @@ -0,0 +1,55 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'abelianization', + 'left adjoint', + TRUE, + 'This functor is left adjoint to the forgetful functor.' +), +( + 'abelianization', + 'finite-product-preserving', + TRUE, + 'See MO/386144.' +), +( + 'abelianization', + 'product-preserving', + FALSE, + 'If $G$ is a group, the canonical homomorphism $(G^{\mathbb{N}})^{\mathrm{ab}} \to (G^{\mathrm{ab}})^{\mathbb{N}}$ is surjective, but does not need to be an isomorphism: otherwise, the inclusion $[G^{\mathbb{N}}, G^{\mathbb{N}}] \subseteq [G,G]^{\mathbb{N}}$ would be an equality. But this requires the commutator width of $G$ to be finite, which fails for $G = F_2$ for instance. See also The abelianization of inverse limits of groups, Remark 0.0.7.' +), +( + 'abelianization', + 'monomorphism-preserving', + FALSE, + 'The monomorphism $A_3 \hookrightarrow S_3$ is mapped to $A_3 \to 1$.' +), +( + 'abelianization', + 'essentially surjective', + TRUE, + 'For abelian groups $G$ we have $G \cong G^{\mathrm{ab}}$.' +), +( + 'abelianization', + 'faithful', + FALSE, + 'Both the inclusion $A_3 \hookrightarrow S_3$ and the trivial homomorphism $A_3 \to S_3$ are mapped to the trivial homomorphism $A_3 \to 1$.' +), +( + 'abelianization', + 'conservative', + FALSE, + 'The proper inclusion $S_3 \hookrightarrow S_4$ gets mapped to the trivial homomorphism $1 \to 1$, which is an isomorphism.' +), +( + 'abelianization', + 'full', + FALSE, + 'See MSE/716686.' +); \ No newline at end of file diff --git a/database/data/009_functor-property-assignments/forget_vector.sql b/database/data/009_functor-property-assignments/forget_vector.sql new file mode 100644 index 00000000..58eee97a --- /dev/null +++ b/database/data/009_functor-property-assignments/forget_vector.sql @@ -0,0 +1,55 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'forget_vector', + 'representable', + TRUE, + 'This functor is represented by any $1$-dimensional vector space.' +), +( + 'forget_vector', + 'conservative', + TRUE, + 'It is standard that the inverse of a bijective linear map is also linear.' +), +( + 'forget_vector', + 'finitary', + TRUE, + 'For every algebraic category the forgetful functor to the category of sets preserves filtered colimits.' +), +( + 'forget_vector', + 'monadic', + TRUE, + 'For every algebraic category the forgetful functor to the category of sets is monadic.' +), +( + 'forget_vector', + 'epimorphism-preserving', + TRUE, + 'This follows from the classifications of epimorphisms in the category of vector spaces and in the category of sets.' +), +( + 'forget_vector', + 'initial-object-preserving', + FALSE, + 'The underlying set of a trivial vector space is not empty.' +), +( + 'forget_vector', + 'essentially surjective', + FALSE, + 'The empty has has no vector space structure.' +), +( + 'forget_vector', + 'coequalizer-preserving', + FALSE, + 'The coequalizer of $0,i_1 : K \to K^2$ in $\mathbf{Vect}$ is $p_2 : K^2 \to K$, but the coequalizer in $\mathbf{Set}$ is $(K \times K^*) \cup \{(0,0)\}$.' +); \ No newline at end of file diff --git a/database/data/009_functor-property-assignments/free_group.sql b/database/data/009_functor-property-assignments/free_group.sql new file mode 100644 index 00000000..924b2775 --- /dev/null +++ b/database/data/009_functor-property-assignments/free_group.sql @@ -0,0 +1,55 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'free_group', + 'left adjoint', + TRUE, + 'This functor is left adjoint to the forgetful functor.' +), +( + 'free_group', + 'monomorphism-preserving', + TRUE, + 'This can be deduced from the description of the elements of a free group, but here is an abstract argument: Split monomorphisms are preserved by any functor. The only injective maps in $\mathbf{Set}$ that are not split are $\emptyset \hookrightarrow X$ (for non-empty $X$), and $F(\emptyset) \to F(X)$ is injective since $F(\emptyset)$ is the trivial group.' +), +( + 'free_group', + 'faithful', + TRUE, + 'A left adjoint is faithful if and only if its unit consists of monomorphisms. So we only need to check that every set embeds into (the underlying set of) its free group. But this is clear since $X$ already embeds into the free abelian group $\mathbb{Z}^{\oplus X}$, which is a quotient of the free group.' +), +( + 'free_group', + 'conservative', + TRUE, + 'Let $f : X \to Y$ be a map of sets such that $F(f) : F(X) \to F(Y)$ is an isomorphism of groups. We know that $F$ is faithful, so that it reflects monomorphisms. Thus, $f$ is injective. Choose a complement $U \subseteq Y$ of $f(X) \subseteq Y$. Then $F(X) \to F(Y) = F(X) \sqcup F(U)$ is an isomorphism. This implies $F(U)=1$ and hence $U = \emptyset$.' +), +( + 'free_group', + 'essentially surjective', + FALSE, + 'Not every group is free (consider $\mathbb{Z}/2$).' +), +( + 'free_group', + 'terminal-object-preserving', + FALSE, + 'The free group of rank $1$ is not the trivial group.' +), +( + 'free_group', + 'full', + FALSE, + 'The map $1 = \mathrm{Hom}(1,1) \to \mathrm{Hom}(F(1),F(1)) = \mathrm{Hom}(\mathbb{Z},\mathbb{Z}) = \mathbb{Z}$ is not surjective.' +), +( + 'free_group', + 'equalizer-preserving', + FALSE, + 'Let $f,g : \{a,b\} \rightrightarrows \{c,d\}$ be the two constant maps, $f \equiv c$, $g \equiv d$. Their equalizer is empty, and the free group on that equalizer is trivial. Now consider the induced group homomorphisms $F(f), F(g) : F(\{a,b\}) \rightrightarrows F(\{c,d\})$ and observe that $a \cdot b^{-1}$ lies in the kernel of both homomorphisms, hence in their equalizer.' +); diff --git a/database/data/009_functor-property-assignments/id_Set.sql b/database/data/009_functor-property-assignments/id_Set.sql new file mode 100644 index 00000000..3264a7f3 --- /dev/null +++ b/database/data/009_functor-property-assignments/id_Set.sql @@ -0,0 +1,19 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'id_Set', + 'equivalence', + TRUE, + 'This is trivial.' +), +( + 'id_Set', + 'representable', + TRUE, + 'This functor is represented by any singleton set.' +); \ No newline at end of file diff --git a/database/data/009_functor-property-assignments/power_set_contravariant.sql b/database/data/009_functor-property-assignments/power_set_contravariant.sql new file mode 100644 index 00000000..2ceb93eb --- /dev/null +++ b/database/data/009_functor-property-assignments/power_set_contravariant.sql @@ -0,0 +1,55 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'power_set_contravariant', + 'representable', + TRUE, + 'This is because there are natural bijections $P(X) \cong \mathrm{Hom}(X,2)$, sending a subset to its characteristic function.' +), +( + 'power_set_contravariant', + 'monadic', + TRUE, + 'See Johnstone, Theorem 2.2.7.' +), +( + 'power_set_contravariant', + 'epimorphism-preserving', + TRUE, + 'If $f : X \to Y$ is injective, then $f^* : P(Y) \to P(X)$ is surjective, since for all $A \subseteq X$ we have $A = f^*(f_*(A))$.' +), +( + 'power_set_contravariant', + 'coequalizer-preserving', + FALSE, + 'The power set functor preserves reflexive coequalizers, but not all coequalizers, i.e. there are maps $f,g : Y \to X$ with equalizer $E \subseteq Y$ such that $P(Y) \twoheadrightarrow P(E)$ is not the coequalizer of $f^*,g^* : P(X) \to P(Y)$: Let $X=\{0,1\}$ and $Y=\{a,b,c\}$. Define maps $f,g:Y\to X$ by $f(a)=0$, $f(b)=0$, $f(c)=1$ and $g(a)=0$, $g(b)=1$, $g(c)=0$. Their equalizer is $E = \{a\}$, so that $P(E)$ has $2$ elements. For $S=\{0\}$ one has $f^*(S)=\{a,b\}$, $g^*(S)=\{a,c\}$, and for $S=\{1\}$ one has $f^*(S)=\{c\}$, $g^*(S)=\{b\}$. Thus the coequalizer of $f^*,g^*$ is obtained from $P(Y)$ by imposing the two relations $\{a,b\} \sim \{a,c\}$ and $\{c\} \sim \{b\}$. But then it as $8-2 = 6$ elements.' +), +( + 'power_set_contravariant', + 'initial-object-preserving', + FALSE, + 'In fact, the initial object does not even lie in the essential image since $P(X) \neq 0$ for all $X$.' +), +( + 'power_set_contravariant', + 'finitary', + FALSE, + 'Consider the sequence of projections $\cdots \twoheadrightarrow \{0,1\}^2 \twoheadrightarrow \{0,1\}^1$. Its limit in $\mathbf{Set}$ (i.e. colimit in $\mathbf{Set}^{\mathrm{op}}$) is $\{0,1\}^{\mathbb{N}}$, which is uncountable, so that $P(\{0,1\}^{\mathbb{N}})$ is also uncountable. But the colimit of the induced diagram $P(\{0,1\}^1) \hookrightarrow P(\{0,1\}^2) \hookrightarrow \cdots$ is countable since each $P(\{0,1\}^n)$ is finite.' +), +( + 'power_set_contravariant', + 'essentially surjective', + FALSE, + 'The initial object does not lie in the essential image since $P(X) \neq 0$ for all $X$.' +), +( + 'power_set_contravariant', + 'full', + FALSE, + 'The maps $f^* : P(Y) \to P(X)$ preserve the empty set, so take the constant map $P(X) \to P(X)$, $T \mapsto X$ for instance (where $X$ is non-empty).' +); diff --git a/database/data/009_functor-property-assignments/power_set_covariant.sql b/database/data/009_functor-property-assignments/power_set_covariant.sql new file mode 100644 index 00000000..50036ab1 --- /dev/null +++ b/database/data/009_functor-property-assignments/power_set_covariant.sql @@ -0,0 +1,73 @@ +INSERT INTO functor_property_assignments ( + functor_id, + property_id, + is_satisfied, + reason +) +VALUES +( + 'power_set_covariant', + 'monomorphism-preserving', + TRUE, + 'If $f : X \to Y$ is injective, then $f^* \circ f_* = \mathrm{id}_{P(X)}$, so that $f_*$ is injective.' +), +( + 'power_set_covariant', + 'epimorphism-preserving', + TRUE, + 'If $f : X \to Y$ is surjective, then $f_* \circ f^* = \mathrm{id}_{P(Y)}$, so that $f^*$ is surjective.' +), +( + 'power_set_covariant', + 'faithful', + TRUE, + 'Let $f,g : X \rightrightarrows Y$ be two maps with $f_* = g_* : P(X) \rightrightarrows P(Y)$. Then $\{f(x)\} = f_*(\{x\}) = g_*(\{x\}) = \{g(x)\}$ and hence $f(x) = g(x)$ for all $x \in X$.' +), +( + 'power_set_covariant', + 'conservative', + TRUE, + 'Assume that $f : X \to Y$ is a map such that $f_* : P(X) \to P(Y)$ is an isomorphism. There is some $A \subseteq X$ with $Y = f_*(A)$, this proves that $f$ is surjective. It is also injective: If $x,y \in X$ satisfy $f(x) = f(y)$, then $f_*(\{x\}) = f_*(\{y\})$, and hence $\{x\} = \{y\}$, i.e. $x = y$.' +), +( + 'power_set_covariant', + 'full', + FALSE, + 'Take any map $P(X) \to P(X)$ that does not preserve the empty set, say the constant map with value $X$ (for $X \neq \emptyset$).' +), +( + 'power_set_covariant', + 'terminal-object-preserving', + FALSE, + 'We have $2^1 \neq 1$.' +), +( + 'power_set_covariant', + 'initial-object-preserving', + FALSE, + 'We have $2^0 \neq 0$.' +), +( + 'power_set_covariant', + 'essentially surjective', + FALSE, + 'Every power set is non-empty.' +), +( + 'power_set_covariant', + 'finitary', + FALSE, + 'The filtered colimit $\mathbb{N} = \bigcup_{n \geq 0} \mathbb{N}_{\leq n}$ is not preserved by $P$, since $\bigcup_{n \geq 0} P(\mathbb{N}_{\leq n})$ just consists of the finite subsets of $\mathbb{N}$.' +), +( + 'power_set_covariant', + 'equalizer-preserving', + FALSE, + 'Any pair of distinct surjective maps $f,g : X \rightrightarrows Y$ provides a counterexample: Their equalizer $E$ is a proper subset of $X$, so that $P(E)$ cannot contain $X$. But $f_*(X) = Y = g_*(X)$ shows that $X$ is contained in the equalizer of $f_*,g_* : P(X) \rightrightarrows P(Y)$.' +), +( + 'power_set_covariant', + 'coequalizer-preserving', + FALSE, + 'Let $X := \{x,y\}$. Consider the two maps $x,y : \{0\} \rightrightarrows X$. Their coequalizer $Q = X / (x = y)$ has just one element, so that $P(Q)$ has two elements. The induced maps $x_*,y_* : P(\{0\}) \rightrightarrows P(X)$ (which already agree on the empty set) have coequalizer $P(X) / (\{x\} = \{y\})$, which has $3$ elements. So it cannot be $P(Q)$.' +); From 855254067b8a2e602abdc708177194fcb1b1da6f Mon Sep 17 00:00:00 2001 From: Script Raccoon Date: Tue, 31 Mar 2026 22:17:18 +0200 Subject: [PATCH 3/8] deduction system for functor properties --- ...ons.ts => deduce-category-implications.ts} | 20 +- ...rties.ts => deduce-category-properties.ts} | 171 ++++++--- scripts/deduce-functor-implications.ts | 111 ++++++ scripts/deduce-functor-properties.ts | 338 ++++++++++++++++++ scripts/deduce.ts | 13 +- scripts/implication.utils.ts | 80 ----- scripts/shared.ts | 36 ++ scripts/utils.ts | 3 - 8 files changed, 618 insertions(+), 154 deletions(-) rename scripts/{deduce-implications.ts => deduce-category-implications.ts} (82%) rename scripts/{deduce-properties.ts => deduce-category-properties.ts} (69%) create mode 100644 scripts/deduce-functor-implications.ts create mode 100644 scripts/deduce-functor-properties.ts delete mode 100644 scripts/implication.utils.ts create mode 100644 scripts/shared.ts delete mode 100644 scripts/utils.ts diff --git a/scripts/deduce-implications.ts b/scripts/deduce-category-implications.ts similarity index 82% rename from scripts/deduce-implications.ts rename to scripts/deduce-category-implications.ts index 96acf5cc..60b98d3d 100644 --- a/scripts/deduce-implications.ts +++ b/scripts/deduce-category-implications.ts @@ -1,20 +1,20 @@ import { type Client } from '@libsql/client' -import { are_equal_sets } from './utils' +import { are_equal_sets } from './shared' import dotenv from 'dotenv' dotenv.config({ quiet: true }) -export async function deduce_implications(db: Client) { - await clear_deduced_implications(db) - await create_dualized_implications(db) - await create_self_dual_implications(db) +export async function deduce_category_implications(db: Client) { + await clear_deduced_category_implications(db) + await create_dualized_category_implications(db) + await create_self_dual_category_implications(db) } -async function clear_deduced_implications(db: Client) { +async function clear_deduced_category_implications(db: Client) { await db.execute(`DELETE FROM implications WHERE is_deduced = TRUE`) } -async function create_dualized_implications(db: Client) { +async function create_dualized_category_implications(db: Client) { const res = await db.execute(` SELECT v.id, @@ -88,10 +88,10 @@ async function create_dualized_implications(db: Client) { 'write', ) - console.info(`Dualized ${dualizable_implications.length} implications`) + console.info(`Dualized ${dualizable_implications.length} category implications`) } -async function create_self_dual_implications(db: Client) { +async function create_self_dual_category_implications(db: Client) { const { rows } = await db.execute(` INSERT INTO implication_input ( id, @@ -118,5 +118,5 @@ async function create_self_dual_implications(db: Client) { RETURNING id `) - console.info(`Created ${rows.length} self-dual implications`) + console.info(`Created ${rows.length} self-dual category implications`) } diff --git a/scripts/deduce-properties.ts b/scripts/deduce-category-properties.ts similarity index 69% rename from scripts/deduce-properties.ts rename to scripts/deduce-category-properties.ts index 08a217ef..00a54e6c 100644 --- a/scripts/deduce-properties.ts +++ b/scripts/deduce-category-properties.ts @@ -3,9 +3,9 @@ import dotenv from 'dotenv' import { get_assumption_string, get_conclusion_string, - get_normalized_implications, - NormalizedImplication, -} from './implication.utils' + is_subset, + NormalizedCategoryImplication, +} from './shared' dotenv.config({ quiet: true }) @@ -15,23 +15,22 @@ type CategoryMeta = { dual_category_id: string | null } -export async function deduce_all_properties(db: Client) { +export async function deduce_category_properties(db: Client) { const tx = await db.transaction() try { - const implications = await get_normalized_implications(tx) - if (!implications) return + const implications = await get_normalized_category_implications(tx) const categories = await get_categories(tx) for (const category of categories) { - await delete_deduced_properties(tx, category.id) - await deduce_satisfied_properties(tx, category.id, implications) - await deduce_unsatisfied_properties(tx, category.id, implications) + await delete_deduced_category_properties(tx, category.id) + await deduce_satisfied_category_properties(tx, category.id, implications) + await deduce_unsatisfied_category_properties(tx, category.id, implications) } for (const category of categories) { - await deduce_dual_properties(tx, category) + await deduce_dual_category_properties(tx, category) } await tx.commit() @@ -42,6 +41,66 @@ export async function deduce_all_properties(db: Client) { } } +async function get_normalized_category_implications( + tx: Transaction, +): Promise { + const res = await tx.execute(` + SELECT + v.id, + v.assumptions, + v.conclusions, + v.is_equivalence, + ( + SELECT json_group_object(p.id, p.relation) + FROM properties p + WHERE p.id IN ( + SELECT value FROM json_each(v.assumptions) + UNION + SELECT value FROM json_each(v.conclusions) + ) + ) AS relations + FROM implications_view v + `) + + const all_implications_db = res.rows as unknown as { + id: string + assumptions: string + conclusions: string + is_equivalence: number + relations: string + }[] + + const implications: NormalizedCategoryImplication[] = [] + + for (const impl of all_implications_db) { + const assumptions: string[] = JSON.parse(impl.assumptions) + const conclusions: string[] = JSON.parse(impl.conclusions) + const relations: Record = JSON.parse(impl.relations) + + for (const conclusion of conclusions) { + implications.push({ + id: impl.id, + assumptions: new Set(assumptions), + conclusion, + relations, + }) + } + + if (impl.is_equivalence) { + for (const assumption of assumptions) { + implications.push({ + id: impl.id, + assumptions: new Set(conclusions), + conclusion: assumption, + relations, + }) + } + } + } + + return implications +} + async function get_categories(tx: Transaction) { const res = await tx.execute(` SELECT id, name, dual_category_id @@ -50,7 +109,7 @@ async function get_categories(tx: Transaction) { return res.rows as unknown as CategoryMeta[] } -async function delete_deduced_properties(tx: Transaction, category_id: string) { +async function delete_deduced_category_properties(tx: Transaction, category_id: string) { await tx.execute({ sql: ` DELETE FROM category_property_assignments @@ -60,46 +119,10 @@ async function delete_deduced_properties(tx: Transaction, category_id: string) { }) } -async function deduce_dual_properties(tx: Transaction, category: CategoryMeta) { - const allowed = - category.dual_category_id !== null && - category.name.toLowerCase().startsWith('dual') // prevent circular deduction - - if (!allowed) return - - const res = await tx.execute({ - sql: ` - INSERT OR REPLACE INTO category_property_assignments - (category_id, property_id, is_satisfied, reason, is_deduced) - SELECT - ?, - p.dual_property_id, - a.is_satisfied, - CASE - WHEN a.is_satisfied THEN - 'Its dual category ' || r.relation || ' ' || a.property_id || '.' - ELSE - 'Its dual category ' || r.negation || ' ' || a.property_id || '.' - END, - TRUE - FROM category_property_assignments a - INNER JOIN properties p ON p.id = a.property_id - INNER JOIN relations r ON r.relation= p.relation - WHERE a.category_id = ? AND p.dual_property_id IS NOT NULL - ORDER BY lower(p.dual_property_id) - `, - args: [category.id, category.dual_category_id], - }) - - console.info( - `Added ${res.rowsAffected} (un-)satisfied properties for ${category.id} to the database by using its dual ${category.dual_category_id}`, - ) -} - -async function deduce_satisfied_properties( +async function deduce_satisfied_category_properties( tx: Transaction, category_id: string, - implications: NormalizedImplication[], + implications: NormalizedCategoryImplication[], ) { const satisfied_res = await tx.execute({ sql: ` @@ -123,7 +146,7 @@ async function deduce_satisfied_properties( while (true) { const implication = implications.find( ({ assumptions, conclusion }) => - [...assumptions].every((p) => satisfied_props.has(p)) && + is_subset(assumptions, satisfied_props) && !satisfied_props.has(conclusion), ) if (!implication) break @@ -164,14 +187,14 @@ async function deduce_satisfied_properties( } console.info( - `Added ${deduced_satisfied_props.length} satisfied properties for ${category_id} to the database`, + `Added ${deduced_satisfied_props.length} satisfied properties for category ${category_id} to the database`, ) } -async function deduce_unsatisfied_properties( +async function deduce_unsatisfied_category_properties( tx: Transaction, category_id: string, - implications: NormalizedImplication[], + implications: NormalizedCategoryImplication[], ) { const satisfied_res = await tx.execute({ sql: ` @@ -211,9 +234,7 @@ async function deduce_unsatisfied_properties( for (const p of implication.assumptions) { const is_valid = !unsatisfied_props.has(p) && - [...implication.assumptions].every( - (q) => q === p || satisfied_props.has(q), - ) + is_subset(implication.assumptions, satisfied_props, p) if (is_valid) return { implication, property: p } } } @@ -268,6 +289,42 @@ async function deduce_unsatisfied_properties( } console.info( - `Added ${deduced_unsatisfied_props.length} unsatisfied properties for ${category_id} to the database`, + `Added ${deduced_unsatisfied_props.length} unsatisfied properties for category ${category_id} to the database`, + ) +} + +async function deduce_dual_category_properties(tx: Transaction, category: CategoryMeta) { + const allowed = + category.dual_category_id !== null && + category.name.toLowerCase().startsWith('dual') // prevent circular deduction + + if (!allowed) return + + const res = await tx.execute({ + sql: ` + INSERT OR REPLACE INTO category_property_assignments + (category_id, property_id, is_satisfied, reason, is_deduced) + SELECT + ?, + p.dual_property_id, + a.is_satisfied, + CASE + WHEN a.is_satisfied THEN + 'Its dual category ' || r.relation || ' ' || a.property_id || '.' + ELSE + 'Its dual category ' || r.negation || ' ' || a.property_id || '.' + END, + TRUE + FROM category_property_assignments a + INNER JOIN properties p ON p.id = a.property_id + INNER JOIN relations r ON r.relation= p.relation + WHERE a.category_id = ? AND p.dual_property_id IS NOT NULL + ORDER BY lower(p.dual_property_id) + `, + args: [category.id, category.dual_category_id], + }) + + console.info( + `Added ${res.rowsAffected} (un-)satisfied properties for category ${category.id} to the database by using its dual ${category.dual_category_id}`, ) } diff --git a/scripts/deduce-functor-implications.ts b/scripts/deduce-functor-implications.ts new file mode 100644 index 00000000..c5a523ba --- /dev/null +++ b/scripts/deduce-functor-implications.ts @@ -0,0 +1,111 @@ +import { type Client } from '@libsql/client' +import { are_equal_sets } from './shared' +import dotenv from 'dotenv' + +dotenv.config({ quiet: true }) + +// TODO: remove code duplication with category implication deduction script + +export async function deduce_functor_implications(db: Client) { + await clear_deduced_functor_implications(db) + await create_dualized_functor_implications(db) +} + +async function clear_deduced_functor_implications(db: Client) { + await db.execute(`DELETE FROM functor_implications WHERE is_deduced = TRUE`) +} + +async function create_dualized_functor_implications(db: Client) { + const res = await db.execute(` + SELECT + v.id, + v.assumptions, + v.conclusions, + v.is_equivalence, + v.reason, + ( + SELECT json_group_array(p.dual_property_id) + FROM json_each(v.assumptions) a + JOIN functor_properties p ON p.id = a.value + ) AS dual_assumptions, + ( + SELECT json_group_array(p.dual_property_id) + FROM json_each(v.source_assumptions) sa + JOIN properties p ON p.id = sa.value + ) AS dual_source_assumptions, + ( + SELECT json_group_array(p.dual_property_id) + FROM json_each(v.target_assumptions) ta + JOIN properties p ON p.id = ta.value + ) AS dual_target_assumptions, + ( + SELECT json_group_array(p.dual_property_id) + FROM json_each(v.conclusions) c + JOIN functor_properties p ON p.id = c.value + ) AS dual_conclusions + FROM functor_implications_view v + WHERE v.is_deduced = FALSE + `) + + const implications = res.rows as unknown as { + id: string + assumptions: string + conclusions: string + dual_assumptions: string + dual_source_assumptions: string + dual_target_assumptions: string + dual_conclusions: string + is_equivalence: number + reason: string + }[] + + const dualizable_implications = implications.filter((impl) => { + const has_null = + JSON.parse(impl.dual_assumptions).includes(null) || + JSON.parse(impl.dual_conclusions).includes(null) || + JSON.parse(impl.dual_source_assumptions).includes(null) || + JSON.parse(impl.dual_target_assumptions).includes(null) + + if (has_null) return false + + const assumptions = new Set(JSON.parse(impl.assumptions)) + const conclusions = new Set(JSON.parse(impl.conclusions)) + const dual_assumptions = new Set(JSON.parse(impl.dual_assumptions)) + const dual_conclusions = new Set(JSON.parse(impl.dual_conclusions)) + + return ( + !are_equal_sets(assumptions, dual_assumptions) || + !are_equal_sets(conclusions, dual_conclusions) + ) + }) + + await db.batch( + dualizable_implications.map((impl) => ({ + sql: ` + INSERT INTO functor_implication_input ( + id, + assumptions, + source_assumptions, + target_assumptions, + conclusions, + is_equivalence, + reason, + dualized_from, + is_deduced + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, TRUE)`, + args: [ + `dual_${impl.id}`, + impl.dual_assumptions, + impl.dual_source_assumptions, + impl.dual_target_assumptions, + impl.dual_conclusions, + impl.is_equivalence, + 'This follows from the dual implication.', + impl.id, + ], + })), + 'write', + ) + + console.info(`Dualized ${dualizable_implications.length} functor implications`) +} diff --git a/scripts/deduce-functor-properties.ts b/scripts/deduce-functor-properties.ts new file mode 100644 index 00000000..081e2697 --- /dev/null +++ b/scripts/deduce-functor-properties.ts @@ -0,0 +1,338 @@ +import type { Transaction, Client } from '@libsql/client' +import dotenv from 'dotenv' +import { + get_assumption_string, + get_conclusion_string, + is_subset, + NormalizedFunctorImplication, +} from './shared' + +dotenv.config({ quiet: true }) + +type FunctorMeta = { + id: string + name: string + source: string + source_props: Set + target: string + target_props: Set +} + +// TODO: remove code duplication with category deduction script +// (not quite the same because we need source and target assumptions) + +export async function deduce_functor_properties(db: Client) { + const tx = await db.transaction() + + try { + const implications = await get_normalized_functor_implications(tx) + + const functors = await get_functors(tx) + + for (const functor of functors) { + await delete_deduced_functor_properties(tx, functor) + await deduce_satisfied_functor_properties(tx, functor, implications) + await deduce_unsatisfied_functor_properties(tx, functor, implications) + } + + await tx.commit() + } catch (err) { + console.error(err) + await tx.rollback() + throw err + } +} + +async function get_normalized_functor_implications( + tx: Transaction, +): Promise { + const res = await tx.execute(` + SELECT + v.id, + v.assumptions, + v.source_assumptions, + v.target_assumptions, + v.conclusions, + v.is_equivalence, + ( + SELECT json_group_object(p.id, p.relation) + FROM functor_properties p + WHERE p.id IN ( + SELECT value FROM json_each(v.assumptions) + UNION + SELECT value FROM json_each(v.conclusions) + ) + ) AS relations + FROM functor_implications_view v + `) + + const all_implications_db = res.rows as unknown as { + id: string + assumptions: string + source_assumptions: string + target_assumptions: string + conclusions: string + is_equivalence: number + relations: string + }[] + + const implications: NormalizedFunctorImplication[] = [] + + for (const impl of all_implications_db) { + const assumptions: string[] = JSON.parse(impl.assumptions) + const conclusions: string[] = JSON.parse(impl.conclusions) + const source_assumptions: string[] = JSON.parse(impl.source_assumptions) + const target_assumptions: string[] = JSON.parse(impl.target_assumptions) + const relations: Record = JSON.parse(impl.relations) + + for (const conclusion of conclusions) { + implications.push({ + id: impl.id, + assumptions: new Set(assumptions), + source_assumptions: new Set(source_assumptions), + target_assumptions: new Set(target_assumptions), + conclusion, + relations, + }) + } + + if (impl.is_equivalence) { + for (const assumption of assumptions) { + implications.push({ + id: impl.id, + assumptions: new Set(conclusions), + source_assumptions: new Set(source_assumptions), + target_assumptions: new Set(target_assumptions), + conclusion: assumption, + relations, + }) + } + } + } + + return implications +} + +async function get_functors(tx: Transaction) { + const res = await tx.execute(` + SELECT + id, + name, + source, + target, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM category_property_assignments + WHERE category_id = source AND is_satisfied = TRUE + ) + ) as source_props, + ( + SELECT json_group_array(property_id) FROM ( + SELECT property_id + FROM category_property_assignments + WHERE category_id = target AND is_satisfied = TRUE + ) + ) as target_props + FROM functors + ORDER BY lower(name) + `) + + return res.rows.map((row) => ({ + id: row.id, + name: row.name, + source: row.source, + target: row.target, + source_props: new Set(JSON.parse(row.source_props as string)), + target_props: new Set(JSON.parse(row.target_props as string)), + })) as FunctorMeta[] +} + +async function delete_deduced_functor_properties(tx: Transaction, functor: FunctorMeta) { + await tx.execute({ + sql: ` + DELETE FROM functor_property_assignments + WHERE functor_id = ? AND is_deduced = TRUE + `, + args: [functor.id], + }) +} + +async function deduce_satisfied_functor_properties( + tx: Transaction, + functor: FunctorMeta, + implications: NormalizedFunctorImplication[], +) { + const satisfied_res = await tx.execute({ + sql: ` + SELECT property_id + FROM functor_property_assignments + WHERE + functor_id = ? + AND is_satisfied = TRUE + AND is_deduced = FALSE + `, + args: [functor.id], + }) + + const satisfied_props = new Set( + satisfied_res.rows.map((row) => row.property_id) as string[], + ) as Set + + const deduced_satisfied_props: string[] = [] + const reasons: Record = {} + + while (true) { + const implication = implications.find( + ({ assumptions, conclusion, source_assumptions, target_assumptions }) => + is_subset(assumptions, satisfied_props) && + !satisfied_props.has(conclusion) && + is_subset(source_assumptions, functor.source_props) && + is_subset(target_assumptions, functor.target_props), + ) + if (!implication) break + + const { id: implication_id, conclusion } = implication + + satisfied_props.add(conclusion) + deduced_satisfied_props.push(conclusion) + + const assumption_string = get_assumption_string(implication) + const conclusion_string = get_conclusion_string(implication) + + const ref = `(see here)` + const reason = `Since it ${assumption_string}, it ${conclusion_string} ${ref}.` + + reasons[conclusion] = reason + } + + if (deduced_satisfied_props.length > 0) { + const value_fragments: string[] = [] + const values: (string | number)[] = [] + + for (let i = 0; i < deduced_satisfied_props.length; i++) { + const id = deduced_satisfied_props[i] + value_fragments.push(`(?, ?, TRUE, ?, ?, TRUE)`) + values.push(functor.id, id, reasons[id], i + 1) + } + + const insert_sql = ` + INSERT INTO functor_property_assignments ( + functor_id, property_id, is_satisfied, reason, position, is_deduced + ) + VALUES + ${value_fragments.join(',\n')} + ` + + await tx.execute({ sql: insert_sql, args: values }) + } + + console.info( + `Added ${deduced_satisfied_props.length} satisfied properties for functor ${functor.id} to the database`, + ) +} + +async function deduce_unsatisfied_functor_properties( + tx: Transaction, + functor: FunctorMeta, + implications: NormalizedFunctorImplication[], +) { + const satisfied_res = await tx.execute({ + sql: ` + SELECT property_id + FROM functor_property_assignments + WHERE functor_id = ? AND is_satisfied = TRUE + `, + args: [functor.id], + }) + + const satisfied_props = new Set( + satisfied_res.rows.map((row) => row.property_id) as string[], + ) + + const unsatisfied_res = await tx.execute({ + sql: ` + SELECT property_id + FROM functor_property_assignments + WHERE + functor_id = ? + AND is_satisfied = FALSE + AND is_deduced = FALSE + `, + args: [functor.id], + }) + + const unsatisfied_props = new Set( + unsatisfied_res.rows.map((row) => row.property_id) as string[], + ) + + const deduced_unsatisfied_props: string[] = [] + const reasons: Record = {} + + function get_next_implication() { + for (const implication of implications) { + if (!unsatisfied_props.has(implication.conclusion)) continue + for (const p of implication.assumptions) { + const is_valid = + !unsatisfied_props.has(p) && + is_subset(implication.assumptions, satisfied_props, p) && + is_subset(implication.source_assumptions, functor.source_props) && + is_subset(implication.target_assumptions, functor.target_props) + + if (is_valid) return { implication, property: p } + } + } + + return null + } + + while (true) { + const next = get_next_implication() + if (!next) break + + const { implication, property } = next + const { id: implication_id, relations } = implication + + if (satisfied_props.has(property)) { + throw new Error(`Contradiction has been found for: ${property}`) + } + + unsatisfied_props.add(property) + deduced_unsatisfied_props.push(property) + + const assumption_string = get_assumption_string(implication) + const conclusion_string = get_conclusion_string(implication) + + const ref = `(see here)` + + const reason = + `Assume that it ${relations[property]} ${property}. ` + + `But since it ${assumption_string}, it ${conclusion_string} ${ref} – contradiction.` + + reasons[property] = reason + } + + if (deduced_unsatisfied_props.length > 0) { + const value_fragments: string[] = [] + const values: (string | number)[] = [] + + for (let i = 0; i < deduced_unsatisfied_props.length; i++) { + const id = deduced_unsatisfied_props[i] + value_fragments.push('(?, ?, FALSE, ?, ?, TRUE)') + values.push(functor.id, id, reasons[id], i + 1) + } + + const insert_query = ` + INSERT INTO functor_property_assignments ( + functor_id, property_id, is_satisfied, reason, position, is_deduced + ) + VALUES + ${value_fragments.join(',\n')}` + + await tx.execute({ sql: insert_query, args: values }) + } + + console.info( + `Added ${deduced_unsatisfied_props.length} unsatisfied properties for functor ${functor.id} to the database`, + ) +} diff --git a/scripts/deduce.ts b/scripts/deduce.ts index cc1b438d..df2618ac 100644 --- a/scripts/deduce.ts +++ b/scripts/deduce.ts @@ -1,7 +1,9 @@ import { createClient } from '@libsql/client' import dotenv from 'dotenv' -import { deduce_implications } from './deduce-implications' -import { deduce_all_properties } from './deduce-properties' +import { deduce_category_implications } from './deduce-category-implications' +import { deduce_category_properties } from './deduce-category-properties' +import { deduce_functor_implications } from './deduce-functor-implications' +import { deduce_functor_properties } from './deduce-functor-properties' dotenv.config({ quiet: true }) @@ -17,5 +19,8 @@ const db = createClient({ await db.execute('PRAGMA foreign_keys = ON') -await deduce_implications(db) -await deduce_all_properties(db) +await deduce_category_implications(db) +await deduce_category_properties(db) + +await deduce_functor_implications(db) +await deduce_functor_properties(db) diff --git a/scripts/implication.utils.ts b/scripts/implication.utils.ts deleted file mode 100644 index c0430dc0..00000000 --- a/scripts/implication.utils.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { Transaction } from '@libsql/client' - -export type NormalizedImplication = { - id: string - assumptions: Set - conclusion: string - relations: Record -} - -export async function get_normalized_implications( - tx: Transaction, -): Promise { - const res = await tx.execute(` - SELECT - v.id, - v.assumptions, - v.conclusions, - v.is_equivalence, - ( - SELECT json_group_object(p.id, p.relation) - FROM properties p - WHERE p.id IN ( - SELECT value FROM json_each(v.assumptions) - UNION - SELECT value FROM json_each(v.conclusions) - ) - ) AS relations - FROM implications_view v - `) - - const all_implications_db = res.rows as unknown as { - id: string - assumptions: string - conclusions: string - is_equivalence: number - relations: string - }[] - - const implications: NormalizedImplication[] = [] - - for (const impl of all_implications_db) { - const assumptions: string[] = JSON.parse(impl.assumptions) - const conclusions: string[] = JSON.parse(impl.conclusions) - const relations: Record = JSON.parse(impl.relations) - - for (const conclusion of conclusions) { - implications.push({ - id: impl.id, - assumptions: new Set(assumptions), - conclusion, - relations, - }) - } - - if (impl.is_equivalence) { - for (const assumption of assumptions) { - implications.push({ - id: impl.id, - assumptions: new Set(conclusions), - conclusion: assumption, - relations, - }) - } - } - } - - return implications -} - -export function get_assumption_string(implication: NormalizedImplication): string { - const { assumptions, relations } = implication - return Array.from(assumptions) - .map((assumption) => `${relations[assumption]} ${assumption}`) - .join(' and ') -} - -export function get_conclusion_string(implication: NormalizedImplication): string { - const { conclusion, relations } = implication - return `${relations[conclusion]} ${conclusion}` -} diff --git a/scripts/shared.ts b/scripts/shared.ts new file mode 100644 index 00000000..883a13c1 --- /dev/null +++ b/scripts/shared.ts @@ -0,0 +1,36 @@ +export function are_equal_sets(a: Set, b: Set) { + return a.size === b.size && [...a].every((el) => b.has(el)) +} + +export function is_subset(a: Set, b: Set, exception?: T) { + for (const x of a) { + if (x !== exception && !b.has(x)) return false + } + return true +} + +type NormalizedImplication = { + id: string + assumptions: Set + conclusion: string + relations: Record +} + +export type NormalizedCategoryImplication = NormalizedImplication + +export type NormalizedFunctorImplication = NormalizedImplication & { + source_assumptions: Set + target_assumptions: Set +} + +export function get_assumption_string(implication: NormalizedImplication): string { + const { assumptions, relations } = implication + return Array.from(assumptions) + .map((assumption) => `${relations[assumption]} ${assumption}`) + .join(' and ') +} + +export function get_conclusion_string(implication: NormalizedImplication): string { + const { conclusion, relations } = implication + return `${relations[conclusion]} ${conclusion}` +} diff --git a/scripts/utils.ts b/scripts/utils.ts deleted file mode 100644 index d7d3fe54..00000000 --- a/scripts/utils.ts +++ /dev/null @@ -1,3 +0,0 @@ -export function are_equal_sets(a: Set, b: Set) { - return a.size === b.size && [...a].every((el) => b.has(el)) -} From 2bec0aa30c419c34eba947cfec8c65fb88f26cdc Mon Sep 17 00:00:00 2001 From: Script Raccoon Date: Tue, 31 Mar 2026 22:21:01 +0200 Subject: [PATCH 4/8] structure selector, support for functor pages in navigation --- .../{Heading.svelte => Header.svelte} | 48 +++++++--- src/components/Nav.svelte | 51 +++++++++- src/components/NavMobile.svelte | 95 ++++++++++++------- src/components/StructureSelector.svelte | 55 +++++++++++ src/lib/client/config.ts | 1 + src/lib/commons/types.ts | 2 + src/routes/+layout.svelte | 36 +++++-- src/routes/app.css | 6 +- 8 files changed, 234 insertions(+), 60 deletions(-) rename src/components/{Heading.svelte => Header.svelte} (89%) create mode 100644 src/components/StructureSelector.svelte create mode 100644 src/lib/client/config.ts diff --git a/src/components/Heading.svelte b/src/components/Header.svelte similarity index 89% rename from src/components/Heading.svelte rename to src/components/Header.svelte index 1873c48d..578700de 100644 --- a/src/components/Heading.svelte +++ b/src/components/Header.svelte @@ -1,12 +1,15 @@
@@ -29,6 +32,10 @@ +
+ +
+ diff --git a/src/routes/functor-implications/+page.server.ts b/src/routes/functor-implications/+page.server.ts new file mode 100644 index 00000000..8fc1e87a --- /dev/null +++ b/src/routes/functor-implications/+page.server.ts @@ -0,0 +1,31 @@ +import { render_nested_formulas } from '$lib/server/rendering' +import { query } from '$lib/server/db' +import sql from 'sql-template-tag' +import { error } from '@sveltejs/kit' +import type { FunctorImplicationDB, FunctorImplicationDisplay } from '$lib/commons/types' +import { display_functor_implication } from '$lib/server/utils' + +export const prerender = true + +export const load = async () => { + const { rows, err } = await query(sql` + SELECT + id, + is_equivalence, + reason, + assumptions, + conclusions, + source_assumptions, + target_assumptions + FROM functor_implications_view + ORDER BY lower(assumptions) || ' ' || lower(conclusions) + `) + + if (err) error(500, 'Could not load implications') + + const implications: FunctorImplicationDisplay[] = rows.map( + display_functor_implication, + ) + + return render_nested_formulas({ implications }) +} diff --git a/src/routes/functor-implications/+page.svelte b/src/routes/functor-implications/+page.svelte new file mode 100644 index 00000000..ba44d484 --- /dev/null +++ b/src/routes/functor-implications/+page.svelte @@ -0,0 +1,26 @@ + + + + +

List of implications of functors

+ + + + +

+ {pluralize(data.implications.length, { + one: 'Found {count} implication', + other: 'Found {count} implications', + })} +

+ + diff --git a/src/routes/functor-properties/+page.server.ts b/src/routes/functor-properties/+page.server.ts new file mode 100644 index 00000000..647427c9 --- /dev/null +++ b/src/routes/functor-properties/+page.server.ts @@ -0,0 +1,16 @@ +import type { PropertyShort } from '$lib/commons/types' +import { query } from '$lib/server/db' +import { error } from '@sveltejs/kit' +import sql from 'sql-template-tag' + +export const load = async () => { + const { rows: properties, err } = await query(sql` + SELECT id, relation + FROM functor_properties + ORDER BY lower(id) + `) + + if (err) error(500, 'Could not load properties') + + return { properties } +} diff --git a/src/routes/functor-properties/+page.svelte b/src/routes/functor-properties/+page.svelte new file mode 100644 index 00000000..ee5cf1ba --- /dev/null +++ b/src/routes/functor-properties/+page.svelte @@ -0,0 +1,25 @@ + + + + +

Properties of Functors

+ + + +

+ {pluralize(data.properties.length, { + one: 'Found {count} property', + other: 'Found {count} properties', + })} +

+ + diff --git a/src/routes/functor-property/[id]/+page.server.ts b/src/routes/functor-property/[id]/+page.server.ts new file mode 100644 index 00000000..9004f9c6 --- /dev/null +++ b/src/routes/functor-property/[id]/+page.server.ts @@ -0,0 +1,107 @@ +import { decode_property_ID } from '$lib/commons/property.url' +import type { + FunctorImplicationDB, + FunctorImplicationDisplay, + FunctorPropertyDB, + FunctorShort, +} from '$lib/commons/types' +import { batch } from '$lib/server/db' +import { render_nested_formulas } from '$lib/server/rendering' +import { display_functor_implication, display_functor_property } from '$lib/server/utils' +import { error } from '@sveltejs/kit' +import sql from 'sql-template-tag' + +export const load = async (event) => { + const id = decode_property_ID(event.params.id) + + const { results, err } = await batch< + [ + FunctorPropertyDB, + FunctorImplicationDB, + FunctorShort & { is_satisfied: number }, + FunctorShort, + ] + >([ + // basic information + sql` + SELECT + id, + relation, + description, + nlab_link, + invariant_under_equivalences, + dual_property_id + FROM + functor_properties + WHERE id = ${id} + `, + // relevant implications + sql` + SELECT + id, + is_equivalence, + reason, + assumptions, + source_assumptions, + target_assumptions, + conclusions, + dualized_from + FROM functor_implications_view + WHERE + EXISTS ( + SELECT 1 + FROM json_each(conclusions) + WHERE value = ${id} + ) + OR + EXISTS ( + SELECT 1 + FROM json_each(assumptions) + WHERE value = ${id} + ) + ORDER BY lower(assumptions) || ' ' || lower(conclusions) + `, + // known functors + sql` + SELECT f.id, f.name, fp.is_satisfied + FROM functor_property_assignments fp + INNER JOIN functors f ON f.id = fp.functor_id + WHERE fp.property_id = ${id} + ORDER BY lower(f.name) + `, + // unknown functors + sql` + SELECT f.id, f.name + FROM functors f + LEFT JOIN functor_property_assignments fp + ON fp.functor_id = f.id + AND fp.property_id = ${id} + WHERE + fp.property_id IS NULL + ORDER BY lower(f.name) + `, + ]) + + if (err) error(500, 'Could not load properties') + + const [properties, relevant_implications_db, known_functors, unknown_functors] = + results + + if (!properties.length) error(404, `There is no property with ID '${id}'`) + + const property = display_functor_property(properties[0]) + + const relevant_implications: FunctorImplicationDisplay[] = + relevant_implications_db.map(display_functor_implication) + + const examples = known_functors.filter((f) => f.is_satisfied) + const counterexamples = known_functors.filter((f) => !f.is_satisfied) + + return render_nested_formulas({ + property, + relevant_implications, + examples, + counterexamples, + unknown_functors, + }) +} diff --git a/src/routes/functor-property/[id]/+page.svelte b/src/routes/functor-property/[id]/+page.svelte new file mode 100644 index 00000000..6c70863b --- /dev/null +++ b/src/routes/functor-property/[id]/+page.svelte @@ -0,0 +1,89 @@ + + + + +

{property.id}

+ +

+ {@html property.description} + + {#if property.invariant_under_equivalences === false} + Warning: This property is not invariant under equivalences. + {/if} +

+ +{#if property.dual_property_id || property.nlab_link} +
    + {#if property.dual_property_id} +
  • + Dual property: + {property.dual_property_id} + {#if property.dual_property_id === property.id} + (self-dual) + {/if} +
  • + {/if} + + {#if property.nlab_link} +
  • + nLab Link +
  • + {/if} +
+{/if} + +

Relevant implications

+ + + +

Examples

+ +

+ {pluralize(data.examples.length, { + one: 'There is {count} functor with this property.', + other: 'There are {count} functors with this property.', + })} +

+ + + +

Counterexamples

+ +

+ {pluralize(data.counterexamples.length, { + one: 'There is {count} functor without this property.', + other: 'There are {count} functors without this property.', + })} +

+ + + +

Unknown

+ +

+ {pluralize(data.unknown_functors.length, { + one: 'There is {count} functor for which the database has no information on whether it satisfies this property.', + other: 'There are {count} functors for which the database has no information on whether they satisfy this property.', + })} + {#if data.unknown_functors.length > 0} + Please help us fill in the gaps by + contributing to this project. + {/if} +

+ + diff --git a/src/routes/functor-search/+page.server.ts b/src/routes/functor-search/+page.server.ts new file mode 100644 index 00000000..a648c347 --- /dev/null +++ b/src/routes/functor-search/+page.server.ts @@ -0,0 +1,116 @@ +import { decode_property_ID } from '$lib/commons/property.url' +import type { FunctorShort } from '$lib/commons/types' +import { query } from '$lib/server/db' +import { error } from '@sveltejs/kit' +import sql from 'sql-template-tag' +import { SEARCH_SEPARATOR } from '$lib/commons/search.config' +import { to_placeholders } from '$lib/server/utils' + +export const prerender = false + +export const load = async (event) => { + const { rows: all_properties_objects, err: err_all } = await query<{ + id: string + dual_property_id: string | null + }>(sql` + SELECT id, dual_property_id FROM functor_properties ORDER BY lower(id) + `) + + if (err_all) error(500, 'Failed to load properties') + + const satisfied_query = event.url.searchParams.get('satisfied') + const unsatisfied_query = event.url.searchParams.get('unsatisfied') + + const all_properties = all_properties_objects.map(({ id }) => id) + + if (!satisfied_query && !unsatisfied_query) { + return { + is_search: false, + all_properties, + satisfied_properties: [], + unsatisfied_properties: [], + found_functors: [], + dual_satisfied_properties: [], + dual_unsatisfied_properties: [], + dual_search_available: false, + } + } + + const dual_properties_dict: Record = {} + for (const row of all_properties_objects) { + dual_properties_dict[row.id] = row.dual_property_id + } + + const satisfied_properties = satisfied_query + ? satisfied_query.split(SEARCH_SEPARATOR).map(decode_property_ID) + : [] + + const unsatisfied_properties = unsatisfied_query + ? unsatisfied_query.split(SEARCH_SEPARATOR).map(decode_property_ID) + : [] + + const dual_satisfied_properties = satisfied_properties.map( + (p) => dual_properties_dict[p], + ) + + const dual_unsatisfied_properties = unsatisfied_properties.map( + (p) => dual_properties_dict[p], + ) + + const dual_search_available = + dual_satisfied_properties.every(Boolean) && + dual_unsatisfied_properties.every(Boolean) + + const all_props = satisfied_properties.concat(unsatisfied_properties) + + const search_query = ` + SELECT f.id, f.name FROM functors f + INNER JOIN functor_property_assignments fp ON fp.functor_id = f.id + WHERE property_id IN (${to_placeholders(all_props)}) + GROUP BY functor_id + HAVING + SUM ( + CASE + WHEN + property_id IN (${to_placeholders(satisfied_properties)}) + AND is_satisfied = TRUE + THEN 1 + ELSE 0 + END + ) = ${satisfied_properties.length} + AND + SUM( + CASE + WHEN + property_id IN (${to_placeholders(unsatisfied_properties)}) + AND is_satisfied = FALSE + THEN 1 + ELSE 0 + END + ) = ${unsatisfied_properties.length} + ORDER BY lower(f.name) + ` + + const { rows: found_functors, err } = await query({ + sql: search_query, + values: [...all_props, ...satisfied_properties, ...unsatisfied_properties], + }) + + if (err) error(500, 'Search failed') + + event.setHeaders({ + // shared cache for 30min + 'cache-control': 'public, max-age=0, s-maxage=1800', + }) + + return { + is_search: true, + all_properties, + satisfied_properties, + unsatisfied_properties, + found_functors, + dual_satisfied_properties, + dual_unsatisfied_properties, + dual_search_available, + } +} diff --git a/src/routes/functor-search/+page.svelte b/src/routes/functor-search/+page.svelte new file mode 100644 index 00000000..d968bd12 --- /dev/null +++ b/src/routes/functor-search/+page.svelte @@ -0,0 +1,118 @@ + + + + +

Property combo search

+ +

+ Search for functors with certain properties while excluding others. For example, you + can look + for functors that are continuous but not cocontinuous. +

+ +
+ + + + + +
+ +{#if data.is_search} +
+

Results

+ +

+ {pluralize(data.found_functors.length, { + one: 'Found {count} functor', + other: 'Found {count} functors', + })} +

+ + + {#if data.dual_search_available} +

+ All selected properties have a dual, you may perform the + + dual search + . +

+ {/if} +
+{/if} + + diff --git a/src/routes/functor/[id]/+page.server.ts b/src/routes/functor/[id]/+page.server.ts new file mode 100644 index 00000000..62245b91 --- /dev/null +++ b/src/routes/functor/[id]/+page.server.ts @@ -0,0 +1,102 @@ +import type { + FunctorDB, + FunctorPropertyAssignment, + FunctorPropertyAssignmentDB, + FunctorPropertyShort, +} from '$lib/commons/types' +import { batch } from '$lib/server/db' +import { render_nested_formulas } from '$lib/server/rendering' +import { error } from '@sveltejs/kit' +import sql from 'sql-template-tag' + +// remove code duplication with category detail page + +export const load = async (event) => { + const id = event.params.id + + const { results, err } = await batch< + [ + FunctorDB, + FunctorPropertyAssignmentDB & { is_satisfied: number }, + FunctorPropertyShort, + ] + >([ + sql` + SELECT + f.id, + f.name, + f.source, + f.target, + f.description, + f.nlab_link, + cs.name as source_name, + ct.name as target_name + FROM functors f + INNER JOIN categories cs ON cs.id = f.source + INNER JOIN categories ct ON ct.id = f.target + WHERE f.id = ${id} + `, + // properties + sql` + SELECT + fp.property_id AS id, + fp.is_satisfied, + fp.reason, + fp.is_deduced, + CASE + WHEN fp.is_satisfied = TRUE THEN p.relation + ELSE r.negation + END AS relation + FROM functor_property_assignments fp + INNER JOIN functor_properties p ON p.id = fp.property_id + INNER JOIN relations r ON r.relation = p.relation + WHERE fp.functor_id = ${id} + ORDER BY fp.position, lower(fp.property_id) + `, + // unknown properties + sql` + SELECT + p.id, + p.relation + FROM functor_properties p + WHERE NOT EXISTS ( + SELECT 1 FROM functor_property_assignments + WHERE functor_id = ${id} AND property_id = p.id + ) + ORDER BY lower(p.id) + `, + ]) + + if (err) error(500, 'Could not load functor') + + const [functors, properties_db, unknown_properties] = results + + if (!functors.length) error(404, `Could not find functor with ID '${id}'`) + + const [functor] = functors + + const satisfied_properties: FunctorPropertyAssignment[] = properties_db + .filter((obj) => obj.is_satisfied) + .map((p) => ({ + id: p.id, + reason: p.reason, + is_deduced: Boolean(p.is_deduced), + relation: p.relation, + })) + + const unsatisfied_properties: FunctorPropertyAssignment[] = properties_db + .filter((obj) => !obj.is_satisfied) + .map((p) => ({ + id: p.id, + reason: p.reason, + is_deduced: Boolean(p.is_deduced), + relation: p.relation, + })) + + return render_nested_formulas({ + functor, + satisfied_properties, + unsatisfied_properties, + unknown_properties, + }) +} diff --git a/src/routes/functor/[id]/+page.svelte b/src/routes/functor/[id]/+page.svelte new file mode 100644 index 00000000..6ed25525 --- /dev/null +++ b/src/routes/functor/[id]/+page.svelte @@ -0,0 +1,129 @@ + + + + +

{functor.name}

+ +
+ + +

{@html functor.description}

+
+ + +{#key functor.id} +
+
+

Satisfied Properties

+ + {#if category_detail_level.value === 'all'} +

Properties from the database

+ !p.is_deduced)} + type="functor" + /> + +

Deduced properties

+ p.is_deduced)} + type="functor" + /> + {:else if category_detail_level.value === 'merged'} + + {:else if category_detail_level.value === 'basic'} +

+ Properties from the database; further properties can be deduced. +

+ !p.is_deduced)} + type="functor" + /> + {/if} +
+ +
+

Unsatisfied Properties

+ + {#if category_detail_level.value === 'all'} +

Properties from the database

+ !p.is_deduced)} + type="functor" + /> + +

Deduced properties*

+ p.is_deduced)} + type="functor" + /> + +

*This also uses the deduced satisfied properties.

+ {:else if category_detail_level.value === 'merged'} + + {:else if category_detail_level.value === 'basic'} +

+ Properties from the database; further properties can be deduced +

+ !p.is_deduced)} + type="functor" + /> + {/if} +
+
+ +
+

Unknown properties

+ + {#if data.unknown_properties.length} +

+ For these properties the database currently doesn't have an answer if they + are satisfied or not. Please help to contribute the + data! +

+ {/if} + + +
+{/key} + + diff --git a/src/routes/functors/+page.server.ts b/src/routes/functors/+page.server.ts new file mode 100644 index 00000000..38d47a53 --- /dev/null +++ b/src/routes/functors/+page.server.ts @@ -0,0 +1,16 @@ +import type { FunctorShort } from '$lib/commons/types' +import { query } from '$lib/server/db' +import { error } from '@sveltejs/kit' +import sql from 'sql-template-tag' + +export const prerender = true + +export const load = async () => { + const { rows: functors, err } = await query( + sql`SELECT id, name FROM functors ORDER BY lower(name)`, + ) + + if (err) error(500, 'Functors could not be loaded') + + return { functors } +} diff --git a/src/routes/functors/+page.svelte b/src/routes/functors/+page.svelte new file mode 100644 index 00000000..2e0ac98a --- /dev/null +++ b/src/routes/functors/+page.svelte @@ -0,0 +1,30 @@ + + + + +

List of functors

+ + + +

+ + + The functor application is still in its early stages. More functors will be added soon. +

+ +

+ {pluralize(data.functors.length, { + one: 'Found {count} functor', + other: 'Found {count} functors', + })} +

+ + From e87218199f6c48b0a72502fac0a93b9ec1039dcd Mon Sep 17 00:00:00 2001 From: Script Raccoon Date: Tue, 31 Mar 2026 22:25:09 +0200 Subject: [PATCH 6/8] show missing functor data --- src/components/FunctorList.svelte | 9 ++++++++- src/routes/missing/+page.server.ts | 17 ++++++++++++++++- src/routes/missing/+page.svelte | 12 ++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/src/components/FunctorList.svelte b/src/components/FunctorList.svelte index b40c8299..851e6d0d 100644 --- a/src/components/FunctorList.svelte +++ b/src/components/FunctorList.svelte @@ -4,7 +4,7 @@ import type { FunctorShort } from '$lib/commons/types' type Props = { - functors: FunctorShort[] + functors: (FunctorShort & { count?: number })[] } let { functors }: Props = $props() @@ -17,6 +17,9 @@ {functor.name} + {#if functor.count !== undefined} + ({functor.count}) + {/if} {/each} @@ -28,4 +31,8 @@ ul { margin-block: 1rem; } + + .count { + color: var(--secondary-text-color); + } diff --git a/src/routes/missing/+page.server.ts b/src/routes/missing/+page.server.ts index b1e12a0a..222a18d9 100644 --- a/src/routes/missing/+page.server.ts +++ b/src/routes/missing/+page.server.ts @@ -1,6 +1,6 @@ import { batch } from '$lib/server/db' import sql from 'sql-template-tag' -import type { CategoryShort } from '$lib/commons/types' +import type { CategoryShort, FunctorShort } from '$lib/commons/types' import { error } from '@sveltejs/kit' import { get_missing_combinations } from '$lib/server/consistency' @@ -13,6 +13,7 @@ export const load = async () => { CategoryShort & { count: number }, CategoryShort & { count: number }, CategoryPairShort, + FunctorShort & { count: number }, ] >([ // missing special morphisms @@ -69,6 +70,18 @@ export const load = async () => { END ) = 0; `, + // functors with unknown properties + sql` + SELECT f.id, f.name, COUNT(*) AS count + FROM functors f + INNER JOIN functor_properties p + LEFT JOIN functor_property_assignments fp + ON fp.functor_id = f.id + AND fp.property_id = p.id + WHERE fp.property_id IS NULL + GROUP BY f.id + ORDER BY lower(f.name); + `, ]) if (err) error(500, 'Failed to load data') @@ -78,6 +91,7 @@ export const load = async () => { categories_with_unknown_properties, categories_with_unreasoned_properties, undistinguishable_category_pairs, + functors_with_unknown_properties, ] = results const missing_combinations = await get_missing_combinations() @@ -87,6 +101,7 @@ export const load = async () => { categories_with_missing_morphisms, categories_with_unreasoned_properties, undistinguishable_category_pairs, + functors_with_unknown_properties, missing_combinations, } } diff --git a/src/routes/missing/+page.svelte b/src/routes/missing/+page.svelte index 3eaffc38..0b3732bf 100644 --- a/src/routes/missing/+page.svelte +++ b/src/routes/missing/+page.svelte @@ -1,5 +1,6 @@