From 014f66b72c78ea0948b2d44d0c7875ac373ae2c6 Mon Sep 17 00:00:00 2001 From: Michael Zargham Date: Sat, 21 Mar 2026 00:12:34 -0400 Subject: [PATCH 1/5] make docs --- mkdocs.yml | 7 + site/404.html | 676 ++ site/api/analysis/index.html | 5255 ++++++++++++ site/api/clique/index.html | 1843 ++++ site/api/codecs/index.html | 1803 ++++ site/api/diff/index.html | 3165 +++++++ site/api/exceptions/index.html | 1089 +++ site/api/filtration/index.html | 2449 ++++++ site/api/graph/index.html | 7511 +++++++++++++++++ site/api/io/index.html | 1411 ++++ site/api/schema/index.html | 6762 +++++++++++++++ site/api/viz/index.html | 3275 +++++++ site/assets/_mkdocstrings.css | 237 + site/assets/images/favicon.png | Bin 0 -> 1870 bytes .../assets/javascripts/bundle.79ae519e.min.js | 16 + .../javascripts/bundle.79ae519e.min.js.map | 7 + .../javascripts/lunr/min/lunr.ar.min.js | 1 + .../javascripts/lunr/min/lunr.da.min.js | 18 + .../javascripts/lunr/min/lunr.de.min.js | 18 + .../javascripts/lunr/min/lunr.du.min.js | 18 + .../javascripts/lunr/min/lunr.el.min.js | 1 + .../javascripts/lunr/min/lunr.es.min.js | 18 + .../javascripts/lunr/min/lunr.fi.min.js | 18 + .../javascripts/lunr/min/lunr.fr.min.js | 18 + .../javascripts/lunr/min/lunr.he.min.js | 1 + .../javascripts/lunr/min/lunr.hi.min.js | 1 + .../javascripts/lunr/min/lunr.hu.min.js | 18 + .../javascripts/lunr/min/lunr.hy.min.js | 1 + .../javascripts/lunr/min/lunr.it.min.js | 18 + .../javascripts/lunr/min/lunr.ja.min.js | 1 + .../javascripts/lunr/min/lunr.jp.min.js | 1 + .../javascripts/lunr/min/lunr.kn.min.js | 1 + .../javascripts/lunr/min/lunr.ko.min.js | 1 + .../javascripts/lunr/min/lunr.multi.min.js | 1 + .../javascripts/lunr/min/lunr.nl.min.js | 18 + .../javascripts/lunr/min/lunr.no.min.js | 18 + .../javascripts/lunr/min/lunr.pt.min.js | 18 + .../javascripts/lunr/min/lunr.ro.min.js | 18 + .../javascripts/lunr/min/lunr.ru.min.js | 18 + .../javascripts/lunr/min/lunr.sa.min.js | 1 + .../lunr/min/lunr.stemmer.support.min.js | 1 + .../javascripts/lunr/min/lunr.sv.min.js | 18 + .../javascripts/lunr/min/lunr.ta.min.js | 1 + .../javascripts/lunr/min/lunr.te.min.js | 1 + .../javascripts/lunr/min/lunr.th.min.js | 1 + .../javascripts/lunr/min/lunr.tr.min.js | 18 + .../javascripts/lunr/min/lunr.vi.min.js | 1 + .../javascripts/lunr/min/lunr.zh.min.js | 1 + site/assets/javascripts/lunr/tinyseg.js | 206 + site/assets/javascripts/lunr/wordcut.js | 6708 +++++++++++++++ .../workers/search.2c215733.min.js | 42 + .../workers/search.2c215733.min.js.map | 7 + site/assets/stylesheets/main.484c7ddc.min.css | 1 + .../stylesheets/main.484c7ddc.min.css.map | 1 + .../stylesheets/palette.ab4e12ef.min.css | 1 + .../stylesheets/palette.ab4e12ef.min.css.map | 1 + site/index.html | 916 ++ site/objects.inv | Bin 0 -> 1757 bytes site/ontology/index.html | 872 ++ site/search/search_index.json | 1 + site/sitemap.xml | 51 + site/sitemap.xml.gz | Bin 0 -> 286 bytes 62 files changed, 44600 insertions(+) create mode 100644 site/404.html create mode 100644 site/api/analysis/index.html create mode 100644 site/api/clique/index.html create mode 100644 site/api/codecs/index.html create mode 100644 site/api/diff/index.html create mode 100644 site/api/exceptions/index.html create mode 100644 site/api/filtration/index.html create mode 100644 site/api/graph/index.html create mode 100644 site/api/io/index.html create mode 100644 site/api/schema/index.html create mode 100644 site/api/viz/index.html create mode 100644 site/assets/_mkdocstrings.css create mode 100644 site/assets/images/favicon.png create mode 100644 site/assets/javascripts/bundle.79ae519e.min.js create mode 100644 site/assets/javascripts/bundle.79ae519e.min.js.map create mode 100644 site/assets/javascripts/lunr/min/lunr.ar.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.da.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.de.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.du.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.el.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.es.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.fi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.fr.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.he.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hu.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.hy.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.it.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ja.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.jp.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.kn.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ko.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.multi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.nl.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.no.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.pt.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ro.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ru.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.sa.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.stemmer.support.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.sv.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.ta.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.te.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.th.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.tr.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.vi.min.js create mode 100644 site/assets/javascripts/lunr/min/lunr.zh.min.js create mode 100644 site/assets/javascripts/lunr/tinyseg.js create mode 100644 site/assets/javascripts/lunr/wordcut.js create mode 100644 site/assets/javascripts/workers/search.2c215733.min.js create mode 100644 site/assets/javascripts/workers/search.2c215733.min.js.map create mode 100644 site/assets/stylesheets/main.484c7ddc.min.css create mode 100644 site/assets/stylesheets/main.484c7ddc.min.css.map create mode 100644 site/assets/stylesheets/palette.ab4e12ef.min.css create mode 100644 site/assets/stylesheets/palette.ab4e12ef.min.css.map create mode 100644 site/index.html create mode 100644 site/objects.inv create mode 100644 site/ontology/index.html create mode 100644 site/search/search_index.json create mode 100644 site/sitemap.xml create mode 100644 site/sitemap.xml.gz diff --git a/mkdocs.yml b/mkdocs.yml index 96241e4..95b7ebe 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -25,4 +25,11 @@ nav: - API Reference: - Schema: api/schema.md - Graph: api/graph.md + - Visualization: api/viz.md + - Algebraic Topology: api/analysis.md + - Clique Inference: api/clique.md + - Filtrations: api/filtration.md + - Diffs & Sequences: api/diff.md + - File I/O: api/io.md + - Codecs: api/codecs.md - Exceptions: api/exceptions.md diff --git a/site/404.html b/site/404.html new file mode 100644 index 0000000..f2b16b3 --- /dev/null +++ b/site/404.html @@ -0,0 +1,676 @@ + + + + + + + + + + + + + + + + + + + + + + knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ +

404 - Not found

+ +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/analysis/index.html b/site/api/analysis/index.html new file mode 100644 index 0000000..9a64e30 --- /dev/null +++ b/site/api/analysis/index.html @@ -0,0 +1,5255 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Algebraic Topology - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+ +
+ + + + + + + + +

Algebraic Topology

+ +
+ + + + +
+ +

knowledgecomplex.analysis — Algebraic topology over knowledge complexes.

+

Boundary matrices, Betti numbers, Hodge Laplacians, edge PageRank, +and Hodge decomposition of edge flows.

+

Requires: numpy, scipy (install with pip install knowledgecomplex[analysis]).

+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ BoundaryMatrices + + + + dataclass + + +

+ + +
+ + + +

Boundary operators and element-to-index mappings.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
@dataclass
+class BoundaryMatrices:
+    """Boundary operators and element-to-index mappings."""
+    B1: sp.csr_matrix  # (n_vertices, n_edges)
+    B2: sp.csr_matrix  # (n_edges, n_faces)
+    vertex_index: dict[str, int]
+    edge_index: dict[str, int]
+    face_index: dict[str, int]
+    index_vertex: dict[int, str]
+    index_edge: dict[int, str]
+    index_face: dict[int, str]
+
+    def __repr__(self) -> str:
+        return (f"BoundaryMatrices(vertices={len(self.vertex_index)}, "
+                f"edges={len(self.edge_index)}, faces={len(self.face_index)})")
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ HodgeDecomposition + + + + dataclass + + +

+ + +
+ + + +

Orthogonal decomposition of an edge flow.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
44
+45
+46
+47
+48
+49
@dataclass
+class HodgeDecomposition:
+    """Orthogonal decomposition of an edge flow."""
+    gradient: np.ndarray   # im(B1ᵀ) — flows from vertices
+    curl: np.ndarray       # im(B2) — flows from faces
+    harmonic: np.ndarray   # ker(L₁) — topological cycles
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ EdgeInfluence + + + + dataclass + + +

+ + +
+ + + +

Influence measures for an edge's PageRank vector.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
52
+53
+54
+55
+56
+57
+58
+59
@dataclass
+class EdgeInfluence:
+    """Influence measures for an edge's PageRank vector."""
+    edge_id: str
+    spread: float             # ||v||₂ / ||v||₁
+    absolute_influence: float  # ||v||₁
+    penetration: float        # ||v||₂
+    relative_influence: float  # Σv
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ SweepCut + + + + dataclass + + +

+ + +
+ + + +

Result of a vertex sweep cut.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
62
+63
+64
+65
+66
+67
+68
+69
+70
+71
@dataclass
+class SweepCut:
+    """Result of a vertex sweep cut."""
+    vertices: set[str]
+    conductance: float
+    volume: int
+    boundary_edges: int
+
+    def __repr__(self) -> str:
+        return f"SweepCut(vertices={len(self.vertices)}, conductance={self.conductance:.4f})"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ EdgeSweepCut + + + + dataclass + + +

+ + +
+ + + +

Result of an edge sweep cut.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
74
+75
+76
+77
+78
+79
+80
+81
+82
@dataclass
+class EdgeSweepCut:
+    """Result of an edge sweep cut."""
+    edges: set[str]
+    conductance: float
+    volume: int
+
+    def __repr__(self) -> str:
+        return f"EdgeSweepCut(edges={len(self.edges)}, conductance={self.conductance:.4f})"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ HodgeAnalysisResults + + + + dataclass + + +

+ + +
+ + + +

Complete Hodge analysis output.

+ + + + + + + + +
+ Source code in knowledgecomplex/analysis.py +
85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
@dataclass
+class HodgeAnalysisResults:
+    """Complete Hodge analysis output."""
+    betti: list[int]
+    euler_characteristic: int
+    boundary_matrices: BoundaryMatrices
+    laplacian: sp.csr_matrix
+    pagerank: np.ndarray  # (n_edges, n_edges)
+    decompositions: dict[str, HodgeDecomposition]
+    influences: dict[str, EdgeInfluence]
+
+    def __repr__(self) -> str:
+        ne = len(self.decompositions)
+        return f"HodgeAnalysisResults(betti={self.betti}, edges={ne})"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ + +
+ + +

+ boundary_matrices(kc) + +

+ + +
+ +

Build the boundary operator matrices B1 (∂₁) and B2 (∂₂).

+

B1 is (n_vertices × n_edges) with entries ±1 encoding which vertices +bound each edge. B2 is (n_edges × n_faces) with entries ±1 encoding +which edges bound each face.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ BoundaryMatrices + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
def boundary_matrices(kc: "KnowledgeComplex") -> BoundaryMatrices:
+    """
+    Build the boundary operator matrices B1 (∂₁) and B2 (∂₂).
+
+    B1 is (n_vertices × n_edges) with entries ±1 encoding which vertices
+    bound each edge. B2 is (n_edges × n_faces) with entries ±1 encoding
+    which edges bound each face.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    BoundaryMatrices
+    """
+    # Enumerate elements by dimension
+    vertices = sorted(kc.skeleton(0) - kc.skeleton(1))
+    # skeleton(0) = vertices, skeleton(1) = vertices + edges
+    all_v = set()
+    all_e = set()
+    all_f = set()
+    for eid in kc.element_ids():
+        elem = kc.element(eid)
+        kind = kc._schema._types.get(elem.type, {}).get("kind")
+        if kind == "vertex":
+            all_v.add(eid)
+        elif kind == "edge":
+            all_e.add(eid)
+        elif kind == "face":
+            all_f.add(eid)
+
+    vertices = sorted(all_v)
+    edges = sorted(all_e)
+    faces = sorted(all_f)
+
+    vertex_index = {v: i for i, v in enumerate(vertices)}
+    edge_index = {e: i for i, e in enumerate(edges)}
+    face_index = {f: i for i, f in enumerate(faces)}
+
+    nv, ne, nf = len(vertices), len(edges), len(faces)
+
+    # B1: vertices × edges
+    # For each edge, find its 2 boundary vertices.
+    # Convention: for edge e = {v_i, v_j} with i < j, B1[i,e] = -1, B1[j,e] = +1
+    rows1, cols1, vals1 = [], [], []
+    for e_id in edges:
+        bnd = sorted(kc.boundary(e_id), key=lambda v: vertex_index.get(v, 0))
+        if len(bnd) == 2:
+            r0 = vertex_index[bnd[0]]
+            r1 = vertex_index[bnd[1]]
+            c = edge_index[e_id]
+            rows1.extend([r0, r1])
+            cols1.extend([c, c])
+            vals1.extend([-1.0, 1.0])
+
+    B1 = sp.csr_matrix(
+        (vals1, (rows1, cols1)), shape=(nv, ne), dtype=np.float64
+    ) if ne > 0 else sp.csr_matrix((nv, 0), dtype=np.float64)
+
+    # B2: edges × faces
+    # For each face, find its 3 boundary edges.
+    # Orientation: assign signs so that ∂₁∘∂₂ = 0.
+    # We pick a consistent orientation per face by walking the vertex cycle.
+    rows2, cols2, vals2 = [], [], []
+    for f_id in faces:
+        bnd_edges = list(kc.boundary(f_id))
+        if len(bnd_edges) == 3:
+            c = face_index[f_id]
+            # Get the vertex sets for each boundary edge
+            edge_verts = {}
+            for be in bnd_edges:
+                edge_verts[be] = kc.boundary(be)
+
+            # Orient: find a vertex ordering (v_a, v_b, v_c) and assign signs
+            # to edges based on whether they agree with the cycle orientation
+            signs = _orient_face(bnd_edges, edge_verts, vertex_index)
+            for be, sign in zip(bnd_edges, signs):
+                rows2.append(edge_index[be])
+                cols2.append(c)
+                vals2.append(sign)
+
+    B2 = sp.csr_matrix(
+        (vals2, (rows2, cols2)), shape=(ne, nf), dtype=np.float64
+    ) if nf > 0 else sp.csr_matrix((ne, 0), dtype=np.float64)
+
+    return BoundaryMatrices(
+        B1=B1, B2=B2,
+        vertex_index=vertex_index,
+        edge_index=edge_index,
+        face_index=face_index,
+        index_vertex={v: k for k, v in vertex_index.items()},
+        index_edge={v: k for k, v in edge_index.items()},
+        index_face={v: k for k, v in face_index.items()},
+    )
+
+
+
+ +
+ +
+ + +

+ betti_numbers(kc) + +

+ + +
+ +

Compute Betti numbers [β₀, β₁, β₂] of the complex.

+

β_k = nullity(∂k) - rank(∂{k+1})

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[int] + +
+

[β₀, β₁, β₂]

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
def betti_numbers(kc: "KnowledgeComplex") -> list[int]:
+    """
+    Compute Betti numbers [β₀, β₁, β₂] of the complex.
+
+    β_k = nullity(∂_k) - rank(∂_{k+1})
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    list[int]
+        [β₀, β₁, β₂]
+    """
+    bm = boundary_matrices(kc)
+    nv = bm.B1.shape[0]
+    ne = bm.B1.shape[1]
+    nf = bm.B2.shape[1]
+
+    rank_B1 = _matrix_rank(bm.B1) if ne > 0 else 0
+    rank_B2 = _matrix_rank(bm.B2) if nf > 0 else 0
+
+    # β₀ = nullity(∂₁) at dimension 0
+    # ∂₀ doesn't exist (or is zero), so β₀ = n_vertices - rank(∂₁)
+    beta0 = nv - rank_B1
+
+    # β₁ = nullity(∂₁) - rank(∂₂) = (n_edges - rank_B1) - rank_B2
+    beta1 = (ne - rank_B1) - rank_B2 if ne > 0 else 0
+
+    # β₂ = nullity(∂₂) - rank(∂₃) = (n_faces - rank_B2) - 0
+    beta2 = nf - rank_B2 if nf > 0 else 0
+
+    return [beta0, beta1, beta2]
+
+
+
+ +
+ +
+ + +

+ euler_characteristic(kc) + +

+ + +
+ +

Compute the Euler characteristic χ = V - E + F.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ int + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
def euler_characteristic(kc: "KnowledgeComplex") -> int:
+    """
+    Compute the Euler characteristic χ = V - E + F.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    int
+    """
+    bm = boundary_matrices(kc)
+    return bm.B1.shape[0] - bm.B1.shape[1] + bm.B2.shape[1]
+
+
+
+ +
+ +
+ + +

+ hodge_laplacian(kc, weighted=False, weights=None) + +

+ + +
+ +

Compute the edge Hodge Laplacian L₁.

+

Combinatorial (default): + L₁ = B1ᵀ W₀ B1 + B2 W₂ B2ᵀ

+

where W₀ and W₂ are diagonal simplex weight matrices (identity when +weights is None).

+

Degree-weighted: + L₁ = B1ᵀ D₀⁻¹ W₀ B1 + D₁⁻¹ B2 W₂ B2ᵀ

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ weighted + + bool + +
+

If True, also apply degree normalization.

+
+
+ False +
+ weights + + dict[str, float] + +
+

Map from element IDs to scalar weights. Missing elements default +to 1.0. Vertex weights enter W₀, face weights enter W₂.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ csr_matrix + +
+

(n_edges, n_edges)

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
def hodge_laplacian(
+    kc: "KnowledgeComplex",
+    weighted: bool = False,
+    weights: dict[str, float] | None = None,
+) -> sp.csr_matrix:
+    """
+    Compute the edge Hodge Laplacian L₁.
+
+    Combinatorial (default):
+        L₁ = B1ᵀ W₀ B1 + B2 W₂ B2ᵀ
+
+    where W₀ and W₂ are diagonal simplex weight matrices (identity when
+    weights is None).
+
+    Degree-weighted:
+        L₁ = B1ᵀ D₀⁻¹ W₀ B1 + D₁⁻¹ B2 W₂ B2ᵀ
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    weighted : bool
+        If True, also apply degree normalization.
+    weights : dict[str, float], optional
+        Map from element IDs to scalar weights. Missing elements default
+        to 1.0. Vertex weights enter W₀, face weights enter W₂.
+
+    Returns
+    -------
+    scipy.sparse.csr_matrix
+        (n_edges, n_edges)
+    """
+    bm = boundary_matrices(kc)
+    ne = bm.B1.shape[1]
+
+    if ne == 0:
+        return sp.csr_matrix((0, 0), dtype=np.float64)
+
+    W0, _W1, W2 = _weight_matrices(bm, weights)
+
+    if not weighted:
+        down = bm.B1.T @ W0 @ bm.B1
+        up = bm.B2 @ W2 @ bm.B2.T if bm.B2.shape[1] > 0 else sp.csr_matrix((ne, ne), dtype=np.float64)
+        L = (down + up).tocsr()
+        return ((L + L.T) / 2).tocsr()
+    else:
+        # D₀: diagonal vertex degrees
+        vertex_degrees = np.array(np.abs(bm.B1).sum(axis=1)).flatten()
+        vertex_degrees[vertex_degrees == 0] = 1.0
+        D0_inv = sp.diags(1.0 / vertex_degrees, format="csr")
+
+        # D₁: diagonal edge face-degrees
+        if bm.B2.shape[1] > 0:
+            edge_face_degrees = np.array(np.abs(bm.B2).sum(axis=1)).flatten()
+        else:
+            edge_face_degrees = np.zeros(ne)
+        edge_face_degrees[edge_face_degrees == 0] = 1.0
+        D1_inv_sqrt = sp.diags(1.0 / np.sqrt(edge_face_degrees), format="csr")
+
+        down = bm.B1.T @ D0_inv @ W0 @ bm.B1
+        up = D1_inv_sqrt @ bm.B2 @ W2 @ bm.B2.T @ D1_inv_sqrt if bm.B2.shape[1] > 0 else sp.csr_matrix((ne, ne), dtype=np.float64)
+        L = (down + up).tocsr()
+        return ((L + L.T) / 2).tocsr()
+
+
+
+ +
+ +
+ + +

+ edge_pagerank(kc, edge_id, beta=0.1, weighted=False, weights=None) + +

+ + +
+ +

Compute personalized edge PageRank for a single edge.

+

PR_e = (βI + L₁)⁻¹ χ_e

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ edge_id + + str + +
+ +
+
+ required +
+ beta + + float + +
+ +
+
+ 0.1 +
+ weighted + + bool + +
+ +
+
+ False +
+ weights + + dict[str, float] + +
+

Simplex weights (see hodge_laplacian).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

(n_edges,)

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
def edge_pagerank(
+    kc: "KnowledgeComplex",
+    edge_id: str,
+    beta: float = 0.1,
+    weighted: bool = False,
+    weights: dict[str, float] | None = None,
+) -> np.ndarray:
+    """
+    Compute personalized edge PageRank for a single edge.
+
+    PR_e = (βI + L₁)⁻¹ χ_e
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    edge_id : str
+    beta : float
+    weighted : bool
+    weights : dict[str, float], optional
+        Simplex weights (see hodge_laplacian).
+
+    Returns
+    -------
+    np.ndarray
+        (n_edges,)
+    """
+    bm = boundary_matrices(kc)
+    L1 = hodge_laplacian(kc, weighted=weighted, weights=weights)
+    ne = L1.shape[0]
+
+    A = beta * sp.eye(ne, format="csr") + L1
+    indicator = np.zeros(ne)
+    indicator[bm.edge_index[edge_id]] = 1.0
+
+    return _solve_spd(A, indicator)
+
+
+
+ +
+ +
+ + +

+ edge_pagerank_all(kc, beta=0.1, weighted=False, weights=None) + +

+ + +
+ +

Compute edge PageRank for all edges via matrix factorization.

+

Factorizes (βI + L₁) once, then solves for each column of the identity. +Equivalent to computing (βI + L₁)⁻¹.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ beta + + float + +
+ +
+
+ 0.1 +
+ weighted + + bool + +
+ +
+
+ False +
+ weights + + dict[str, float] + +
+

Simplex weights (see hodge_laplacian).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ndarray + +
+

(n_edges, n_edges) — column i is the PageRank vector for edge i.

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
def edge_pagerank_all(
+    kc: "KnowledgeComplex",
+    beta: float = 0.1,
+    weighted: bool = False,
+    weights: dict[str, float] | None = None,
+) -> np.ndarray:
+    """
+    Compute edge PageRank for all edges via matrix factorization.
+
+    Factorizes (βI + L₁) once, then solves for each column of the identity.
+    Equivalent to computing (βI + L₁)⁻¹.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    beta : float
+    weighted : bool
+    weights : dict[str, float], optional
+        Simplex weights (see hodge_laplacian).
+
+    Returns
+    -------
+    np.ndarray
+        (n_edges, n_edges) — column i is the PageRank vector for edge i.
+    """
+    L1 = hodge_laplacian(kc, weighted=weighted, weights=weights)
+    ne = L1.shape[0]
+
+    if ne == 0:
+        return np.empty((0, 0))
+
+    A = beta * sp.eye(ne, format="csc") + L1.tocsc()
+
+    # Factor once (SPD → LU on sparse, or Cholesky)
+    factor = splu(A)
+    result = np.zeros((ne, ne))
+    for i in range(ne):
+        rhs = np.zeros(ne)
+        rhs[i] = 1.0
+        result[:, i] = factor.solve(rhs)
+
+    return result
+
+
+
+ +
+ +
+ + +

+ hodge_decomposition(kc, flow, weights=None) + +

+ + +
+ +

Decompose an edge flow into gradient + curl + harmonic components.

+

flow = gradient + curl + harmonic

+

where: +- gradient ∈ im(W₀^{1/2} B1ᵀ) — vertex-driven flow +- curl ∈ im(W₂^{1/2} B2) — face-driven circulation +- harmonic ∈ ker(L₁) — topological cycles

+

When weights is None, W₀ and W₂ are identity (standard decomposition).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ flow + + ndarray + +
+

(n_edges,)

+
+
+ required +
+ weights + + dict[str, float] + +
+

Simplex weights. Affects the inner product used for projection.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ HodgeDecomposition + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
def hodge_decomposition(
+    kc: "KnowledgeComplex",
+    flow: np.ndarray,
+    weights: dict[str, float] | None = None,
+) -> HodgeDecomposition:
+    """
+    Decompose an edge flow into gradient + curl + harmonic components.
+
+    flow = gradient + curl + harmonic
+
+    where:
+    - gradient ∈ im(W₀^{1/2} B1ᵀ) — vertex-driven flow
+    - curl ∈ im(W₂^{1/2} B2) — face-driven circulation
+    - harmonic ∈ ker(L₁) — topological cycles
+
+    When weights is None, W₀ and W₂ are identity (standard decomposition).
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    flow : np.ndarray
+        (n_edges,)
+    weights : dict[str, float], optional
+        Simplex weights. Affects the inner product used for projection.
+
+    Returns
+    -------
+    HodgeDecomposition
+    """
+    bm = boundary_matrices(kc)
+    W0, _W1, W2 = _weight_matrices(bm, weights)
+
+    # Weighted projection operators
+    # gradient lives in im(B1ᵀ W₀^{1/2}), curl in im(B2 W₂^{1/2})
+    # but for the orthogonal decomposition with weighted inner product,
+    # we project onto im(B1ᵀ) with W₀-weighted inner product on vertices
+    # Practically: project onto im(sqrt(W₀) B1ᵀ) in standard inner product
+    if weights is not None:
+        w0_sqrt = sp.diags(np.sqrt(np.array(W0.diagonal())), format="csr")
+        w2_sqrt = sp.diags(np.sqrt(np.array(W2.diagonal())), format="csr") if W2.shape[0] > 0 else W2
+        # B1.T is (ne × nv), W0_sqrt is (nv × nv) → B1.T @ W0_sqrt is (ne × nv)
+        grad_op = bm.B1.T @ w0_sqrt if bm.B1.shape[1] > 0 else bm.B1.T
+        curl_op = bm.B2 @ w2_sqrt if bm.B2.shape[1] > 0 else bm.B2
+    else:
+        grad_op = bm.B1.T
+        curl_op = bm.B2
+
+    gradient = _project_onto_image(grad_op, flow)
+    curl = _project_onto_image(curl_op, flow)
+    harmonic = flow - gradient - curl
+
+    return HodgeDecomposition(
+        gradient=gradient,
+        curl=curl,
+        harmonic=harmonic,
+    )
+
+
+
+ +
+ +
+ + +

+ edge_influence(edge_id, pr_vector) + +

+ + +
+ +

Compute influence measures from a PageRank vector.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ edge_id + + str + +
+ +
+
+ required +
+ pr_vector + + ndarray + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ EdgeInfluence + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
def edge_influence(edge_id: str, pr_vector: np.ndarray) -> EdgeInfluence:
+    """
+    Compute influence measures from a PageRank vector.
+
+    Parameters
+    ----------
+    edge_id : str
+    pr_vector : np.ndarray
+
+    Returns
+    -------
+    EdgeInfluence
+    """
+    l1 = float(np.sum(np.abs(pr_vector)))
+    l2 = float(np.linalg.norm(pr_vector))
+    spread = l2 / l1 if l1 > 0 else 0.0
+    return EdgeInfluence(
+        edge_id=edge_id,
+        spread=spread,
+        absolute_influence=l1,
+        penetration=l2,
+        relative_influence=float(np.sum(pr_vector)),
+    )
+
+
+
+ +
+ +
+ + +

+ hodge_analysis(kc, beta=0.1, weighted=False, weights=None) + +

+ + +
+ +

Run complete Hodge analysis on a knowledge complex.

+

Computes boundary matrices, Betti numbers, Hodge Laplacian, +edge PageRank for all edges, Hodge decomposition, and influence measures.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ beta + + float + +
+ +
+
+ 0.1 +
+ weighted + + bool + +
+ +
+
+ False +
+ weights + + dict[str, float] + +
+

Simplex weights (see hodge_laplacian).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ HodgeAnalysisResults + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
def hodge_analysis(
+    kc: "KnowledgeComplex",
+    beta: float = 0.1,
+    weighted: bool = False,
+    weights: dict[str, float] | None = None,
+) -> HodgeAnalysisResults:
+    """
+    Run complete Hodge analysis on a knowledge complex.
+
+    Computes boundary matrices, Betti numbers, Hodge Laplacian,
+    edge PageRank for all edges, Hodge decomposition, and influence measures.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    beta : float
+    weighted : bool
+    weights : dict[str, float], optional
+        Simplex weights (see hodge_laplacian).
+
+    Returns
+    -------
+    HodgeAnalysisResults
+    """
+    bm = boundary_matrices(kc)
+    betti = betti_numbers(kc)
+    chi = euler_characteristic(kc)
+    L1 = hodge_laplacian(kc, weighted=weighted, weights=weights)
+    pr = edge_pagerank_all(kc, beta=beta, weighted=weighted, weights=weights)
+
+    decomps: dict[str, HodgeDecomposition] = {}
+    infls: dict[str, EdgeInfluence] = {}
+    for eid, idx in bm.edge_index.items():
+        pr_vec = pr[:, idx]
+        decomps[eid] = hodge_decomposition(kc, pr_vec, weights=weights)
+        infls[eid] = edge_influence(eid, pr_vec)
+
+    return HodgeAnalysisResults(
+        betti=betti,
+        euler_characteristic=chi,
+        boundary_matrices=bm,
+        laplacian=L1,
+        pagerank=pr,
+        decompositions=decomps,
+        influences=infls,
+    )
+
+
+
+ +
+ +
+ + +

+ graph_laplacian(kc) + +

+ + +
+ +

Compute the normalized graph Laplacian L = I - D⁻¹A on the 1-skeleton.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ csr_matrix + +
+

(n_vertices, n_vertices)

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
def graph_laplacian(kc: "KnowledgeComplex") -> sp.csr_matrix:
+    """
+    Compute the normalized graph Laplacian L = I - D⁻¹A on the 1-skeleton.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    scipy.sparse.csr_matrix
+        (n_vertices, n_vertices)
+    """
+    bm = boundary_matrices(kc)
+    nv = len(bm.vertex_index)
+
+    if nv == 0:
+        return sp.csr_matrix((0, 0), dtype=np.float64)
+
+    # Build adjacency matrix from B1
+    # A = |B1| |B1|ᵀ - D  but simpler: walk the edges directly
+    rows, cols, vals = [], [], []
+    for e_id, e_idx in bm.edge_index.items():
+        bnd = list(kc.boundary(e_id))
+        if len(bnd) == 2:
+            i = bm.vertex_index[bnd[0]]
+            j = bm.vertex_index[bnd[1]]
+            rows.extend([i, j])
+            cols.extend([j, i])
+            vals.extend([1.0, 1.0])
+
+    A = sp.csr_matrix((vals, (rows, cols)), shape=(nv, nv), dtype=np.float64)
+    degrees = np.array(A.sum(axis=1)).flatten()
+    degrees[degrees == 0] = 1.0
+    D_inv = sp.diags(1.0 / degrees, format="csr")
+
+    L = sp.eye(nv, format="csr") - D_inv @ A
+    return ((L + L.T) / 2).tocsr()
+
+
+
+ +
+ +
+ + +

+ approximate_pagerank(kc, seed, alpha=0.15, epsilon=0.0001) + +

+ + +
+ +

Compute approximate PageRank via the push algorithm.

+

Follows Andersen-Chung-Lang (FOCS 2006). Uses lazy random walk +W = (I + D⁻¹A)/2. Maintains invariant p + pr(α, r) = pr(α, χ_seed).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ seed + + str + +
+

Starting vertex.

+
+
+ required +
+ alpha + + float + +
+

Teleportation constant (higher = more local).

+
+
+ 0.15 +
+ epsilon + + float + +
+

Convergence threshold: stops when max r(u)/d(u) < epsilon.

+
+
+ 0.0001 +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ tuple[dict[str, float], dict[str, float]] + +
+

(p, r) — approximate PageRank vector and residual.

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
def approximate_pagerank(
+    kc: "KnowledgeComplex",
+    seed: str,
+    alpha: float = 0.15,
+    epsilon: float = 1e-4,
+) -> tuple[dict[str, float], dict[str, float]]:
+    """
+    Compute approximate PageRank via the push algorithm.
+
+    Follows Andersen-Chung-Lang (FOCS 2006). Uses lazy random walk
+    W = (I + D⁻¹A)/2. Maintains invariant p + pr(α, r) = pr(α, χ_seed).
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    seed : str
+        Starting vertex.
+    alpha : float
+        Teleportation constant (higher = more local).
+    epsilon : float
+        Convergence threshold: stops when max r(u)/d(u) < epsilon.
+
+    Returns
+    -------
+    tuple[dict[str, float], dict[str, float]]
+        (p, r) — approximate PageRank vector and residual.
+    """
+    bm = boundary_matrices(kc)
+    _, deg_map = _adjacency_and_degrees(kc, bm)
+
+    # Neighbor lookup
+    neighbors: dict[str, list[str]] = {v: [] for v in bm.vertex_index}
+    for e_id in bm.edge_index:
+        bnd = list(kc.boundary(e_id))
+        if len(bnd) == 2:
+            neighbors[bnd[0]].append(bnd[1])
+            neighbors[bnd[1]].append(bnd[0])
+
+    p: dict[str, float] = {}
+    r: dict[str, float] = {seed: 1.0}
+
+    # Push loop
+    while True:
+        # Find vertex with max r(u)/d(u)
+        best_u = None
+        best_ratio = 0.0
+        for u, rv in r.items():
+            d = max(deg_map.get(u, 1), 1)
+            ratio = rv / d
+            if ratio > best_ratio:
+                best_ratio = ratio
+                best_u = u
+
+        if best_ratio < epsilon or best_u is None:
+            break
+
+        # Push operation at best_u
+        u = best_u
+        ru = r[u]
+        d_u = max(deg_map.get(u, 1), 1)
+
+        # Move alpha fraction to p
+        p[u] = p.get(u, 0) + alpha * ru
+
+        # Spread (1-alpha) fraction via lazy walk: half stays, half spreads
+        r[u] = (1 - alpha) * ru / 2
+
+        spread = (1 - alpha) * ru / (2 * d_u)
+        for v in neighbors.get(u, []):
+            r[v] = r.get(v, 0) + spread
+
+    return p, r
+
+
+
+ +
+ +
+ + +

+ heat_kernel_pagerank(kc, seed, t=5.0, num_terms=30) + +

+ + +
+ +

Compute heat kernel PageRank ρ_{t,seed} on the 1-skeleton.

+

ρ_{t,u} = e^{-t} Σ_{k=0}^{N} (t^k / k!) χ_u W^k

+

where W = D⁻¹A is the random walk transition matrix.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ seed + + str + +
+

Starting vertex.

+
+
+ required +
+ t + + float + +
+

Heat parameter (temperature). Small t = local, large t = global.

+
+
+ 5.0 +
+ num_terms + + int + +
+

Number of terms in the Taylor expansion.

+
+
+ 30 +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict[str, float] + +
+

Mapping from vertex IDs to PageRank values.

+
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
def heat_kernel_pagerank(
+    kc: "KnowledgeComplex",
+    seed: str,
+    t: float = 5.0,
+    num_terms: int = 30,
+) -> dict[str, float]:
+    """
+    Compute heat kernel PageRank ρ_{t,seed} on the 1-skeleton.
+
+    ρ_{t,u} = e^{-t} Σ_{k=0}^{N} (t^k / k!) χ_u W^k
+
+    where W = D⁻¹A is the random walk transition matrix.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    seed : str
+        Starting vertex.
+    t : float
+        Heat parameter (temperature). Small t = local, large t = global.
+    num_terms : int
+        Number of terms in the Taylor expansion.
+
+    Returns
+    -------
+    dict[str, float]
+        Mapping from vertex IDs to PageRank values.
+    """
+    bm = boundary_matrices(kc)
+    nv = len(bm.vertex_index)
+
+    if nv == 0:
+        return {}
+
+    # Build W = D⁻¹A (random walk transition matrix)
+    rows, cols, vals = [], [], []
+    for e_id in bm.edge_index:
+        bnd = list(kc.boundary(e_id))
+        if len(bnd) == 2:
+            i = bm.vertex_index[bnd[0]]
+            j = bm.vertex_index[bnd[1]]
+            rows.extend([i, j])
+            cols.extend([j, i])
+            vals.extend([1.0, 1.0])
+
+    A = sp.csr_matrix((vals, (rows, cols)), shape=(nv, nv), dtype=np.float64)
+    degrees = np.array(A.sum(axis=1)).flatten()
+    degrees[degrees == 0] = 1.0
+    D_inv = sp.diags(1.0 / degrees, format="csr")
+    W = D_inv @ A
+
+    # Compute ρ = e^{-t} Σ (t^k / k!) χ_u W^k via Taylor expansion
+    seed_idx = bm.vertex_index[seed]
+    chi = np.zeros(nv)
+    chi[seed_idx] = 1.0
+
+    result = np.zeros(nv)
+    current = chi.copy()  # χ_u W^0 = χ_u
+    factorial = 1.0
+
+    for k in range(num_terms):
+        if k > 0:
+            factorial *= k
+            current = current @ W.toarray()
+        result += (t ** k / factorial) * current
+
+    result *= np.exp(-t)
+
+    return {bm.index_vertex[i]: float(result[i]) for i in range(nv)}
+
+
+
+ +
+ +
+ + +

+ sweep_cut(kc, distribution, max_volume=None) + +

+ + +
+ +

Sweep a vertex distribution to find a cut with minimum conductance.

+

Sorts vertices by p(v)/d(v) descending, computes conductance of each +prefix set, returns the cut with minimum conductance.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ distribution + + dict[str, float] + +
+

Vertex distribution (e.g., from approximate_pagerank).

+
+
+ required +
+ max_volume + + int + +
+

Maximum volume for the small side of the cut.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SweepCut + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
def sweep_cut(
+    kc: "KnowledgeComplex",
+    distribution: dict[str, float],
+    max_volume: int | None = None,
+) -> SweepCut:
+    """
+    Sweep a vertex distribution to find a cut with minimum conductance.
+
+    Sorts vertices by p(v)/d(v) descending, computes conductance of each
+    prefix set, returns the cut with minimum conductance.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    distribution : dict[str, float]
+        Vertex distribution (e.g., from approximate_pagerank).
+    max_volume : int, optional
+        Maximum volume for the small side of the cut.
+
+    Returns
+    -------
+    SweepCut
+    """
+    bm = boundary_matrices(kc)
+    _, deg_map = _adjacency_and_degrees(kc, bm)
+
+    # Neighbor lookup
+    neighbors: dict[str, set[str]] = {v: set() for v in bm.vertex_index}
+    for e_id in bm.edge_index:
+        bnd = list(kc.boundary(e_id))
+        if len(bnd) == 2:
+            neighbors[bnd[0]].add(bnd[1])
+            neighbors[bnd[1]].add(bnd[0])
+
+    total_volume = sum(deg_map.values())
+
+    # Sort vertices by p(v)/d(v) descending
+    scored = []
+    for vid in bm.vertex_index:
+        pv = distribution.get(vid, 0.0)
+        dv = max(deg_map.get(vid, 1), 1)
+        scored.append((vid, pv / dv))
+    scored.sort(key=lambda x: -x[1])
+
+    # Sweep: incrementally build S, track boundary edges and volume
+    best_cut = SweepCut(vertices=set(), conductance=float("inf"), volume=0, boundary_edges=0)
+    S: set[str] = set()
+    vol_S = 0
+    boundary = 0
+
+    for vid, _ in scored:
+        d_v = deg_map.get(vid, 0)
+        # Update boundary: edges from vid to S decrease boundary,
+        # edges from vid to outside S increase boundary
+        edges_to_S = len(neighbors[vid] & S)
+        edges_to_outside = d_v - edges_to_S
+        boundary = boundary - edges_to_S + edges_to_outside
+
+        S.add(vid)
+        vol_S += d_v
+
+        if vol_S == 0 or vol_S >= total_volume:
+            continue
+
+        if max_volume is not None and vol_S > max_volume:
+            break
+
+        denom = min(vol_S, total_volume - vol_S)
+        cond = boundary / denom if denom > 0 else float("inf")
+
+        if cond < best_cut.conductance:
+            best_cut = SweepCut(
+                vertices=set(S),
+                conductance=cond,
+                volume=vol_S,
+                boundary_edges=boundary,
+            )
+
+    return best_cut
+
+
+
+ +
+ +
+ + +

+ local_partition(kc, seed, target_conductance=0.5, target_volume=None, method='pagerank') + +

+ + +
+ +

Find a local partition near a seed vertex.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ seed + + str + +
+

Starting vertex.

+
+
+ required +
+ target_conductance + + float + +
+

Target conductance for setting alpha/t.

+
+
+ 0.5 +
+ target_volume + + int + +
+

Maximum volume for the small side.

+
+
+ None +
+ method + + str + +
+

"pagerank" — approximate PageRank (Andersen-Chung-Lang). +"heat_kernel" — heat kernel PageRank (Chung).

+
+
+ 'pagerank' +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SweepCut + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
 988
+ 989
+ 990
+ 991
+ 992
+ 993
+ 994
+ 995
+ 996
+ 997
+ 998
+ 999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
def local_partition(
+    kc: "KnowledgeComplex",
+    seed: str,
+    target_conductance: float = 0.5,
+    target_volume: int | None = None,
+    method: str = "pagerank",
+) -> SweepCut:
+    """
+    Find a local partition near a seed vertex.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    seed : str
+        Starting vertex.
+    target_conductance : float
+        Target conductance for setting alpha/t.
+    target_volume : int, optional
+        Maximum volume for the small side.
+    method : str
+        "pagerank" — approximate PageRank (Andersen-Chung-Lang).
+        "heat_kernel" — heat kernel PageRank (Chung).
+
+    Returns
+    -------
+    SweepCut
+    """
+    if method == "pagerank":
+        alpha = target_conductance ** 2 / (16 * np.log(sum(
+            max(kc.degree(v), 1) for v in kc.element_ids(type=None)
+            if kc._schema._types.get(kc.element(v).type, {}).get("kind") == "vertex"
+        ) + 1))
+        alpha = max(min(alpha, 0.5), 0.01)
+        p, r = approximate_pagerank(kc, seed, alpha=alpha)
+        return sweep_cut(kc, p, max_volume=target_volume)
+
+    elif method == "heat_kernel":
+        t = max(1.0, 4.0 / (target_conductance ** 2))
+        rho = heat_kernel_pagerank(kc, seed, t=t)
+        return sweep_cut(kc, rho, max_volume=target_volume)
+
+    else:
+        raise ValueError(f"Unknown method '{method}'. Use 'pagerank' or 'heat_kernel'.")
+
+
+
+ +
+ +
+ + +

+ edge_sweep_cut(kc, edge_distribution, bm=None) + +

+ + +
+ +

Sweep an edge distribution to find an edge partition with minimum conductance.

+

Sorts edges by |distribution(e)|/degree(e) descending, computes edge +conductance of each prefix. Edge conductance measures how many +vertex-boundary connections cross the partition.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ edge_distribution + + ndarray + +
+

(n_edges,) vector of edge values.

+
+
+ required +
+ bm + + BoundaryMatrices + +
+

Pre-computed boundary matrices.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ EdgeSweepCut + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
def edge_sweep_cut(
+    kc: "KnowledgeComplex",
+    edge_distribution: np.ndarray,
+    bm: BoundaryMatrices | None = None,
+) -> EdgeSweepCut:
+    """
+    Sweep an edge distribution to find an edge partition with minimum conductance.
+
+    Sorts edges by |distribution(e)|/degree(e) descending, computes edge
+    conductance of each prefix. Edge conductance measures how many
+    vertex-boundary connections cross the partition.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    edge_distribution : np.ndarray
+        (n_edges,) vector of edge values.
+    bm : BoundaryMatrices, optional
+        Pre-computed boundary matrices.
+
+    Returns
+    -------
+    EdgeSweepCut
+    """
+    if bm is None:
+        bm = boundary_matrices(kc)
+
+    ne = len(bm.edge_index)
+    if ne == 0:
+        return EdgeSweepCut(edges=set(), conductance=float("inf"), volume=0)
+
+    # Edge degree: number of faces incident to each edge + number of vertices
+    # Use coboundary size as a measure of "degree" for edges
+    edge_degrees = np.array(np.abs(bm.B2).sum(axis=1)).flatten() + 2  # +2 for boundary vertices
+
+    # Sort edges by |distribution(e)| / degree(e) descending
+    scored = []
+    for eid, idx in bm.edge_index.items():
+        val = abs(edge_distribution[idx])
+        deg = max(edge_degrees[idx], 1)
+        scored.append((eid, idx, val / deg))
+    scored.sort(key=lambda x: -x[2])
+
+    # Edge adjacency: two edges are adjacent if they share a vertex
+    # Build edge adjacency from B1
+    edge_adj: dict[str, set[str]] = {e: set() for e in bm.edge_index}
+    # For each vertex, collect incident edges
+    vertex_edges: dict[int, list[str]] = {}
+    for eid, eidx in bm.edge_index.items():
+        col = bm.B1[:, eidx]
+        for vidx in col.nonzero()[0]:
+            vertex_edges.setdefault(vidx, []).append(eid)
+
+    for vidx, eids in vertex_edges.items():
+        for i, e1 in enumerate(eids):
+            for e2 in eids[i + 1:]:
+                edge_adj[e1].add(e2)
+                edge_adj[e2].add(e1)
+
+    total_edge_vol = int(sum(edge_degrees))
+    S: set[str] = set()
+    vol_S = 0
+    boundary = 0
+
+    best = EdgeSweepCut(edges=set(), conductance=float("inf"), volume=0)
+
+    for eid, eidx, _ in scored:
+        d_e = int(edge_degrees[eidx])
+        adj_in_S = len(edge_adj[eid] & S)
+        adj_outside = len(edge_adj[eid]) - adj_in_S
+        boundary = boundary - adj_in_S + adj_outside
+
+        S.add(eid)
+        vol_S += d_e
+
+        if vol_S == 0 or vol_S >= total_edge_vol:
+            continue
+
+        denom = min(vol_S, total_edge_vol - vol_S)
+        cond = boundary / denom if denom > 0 else float("inf")
+
+        if cond < best.conductance:
+            best = EdgeSweepCut(edges=set(S), conductance=cond, volume=vol_S)
+
+    return best
+
+
+
+ +
+ +
+ + +

+ edge_local_partition(kc, seed_edge, t=5.0, beta=0.1, method='hodge_heat', weights=None) + +

+ + +
+ +

Find a local edge partition using the Hodge Laplacian.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ seed_edge + + str + +
+

Starting edge.

+
+
+ required +
+ t + + float + +
+

Heat parameter (for hodge_heat method).

+
+
+ 5.0 +
+ beta + + float + +
+

Regularization (for hodge_pagerank method).

+
+
+ 0.1 +
+ method + + str + +
+

"hodge_heat" — e^{-tL₁} χ_e (heat kernel on edges). +"hodge_pagerank" — (βI + L₁)⁻¹ χ_e (existing edge PageRank).

+
+
+ 'hodge_heat' +
+ weights + + dict[str, float] + +
+

Simplex weights.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ EdgeSweepCut + +
+ +
+
+ + +
+ Source code in knowledgecomplex/analysis.py +
1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
def edge_local_partition(
+    kc: "KnowledgeComplex",
+    seed_edge: str,
+    t: float = 5.0,
+    beta: float = 0.1,
+    method: str = "hodge_heat",
+    weights: dict[str, float] | None = None,
+) -> EdgeSweepCut:
+    """
+    Find a local edge partition using the Hodge Laplacian.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    seed_edge : str
+        Starting edge.
+    t : float
+        Heat parameter (for hodge_heat method).
+    beta : float
+        Regularization (for hodge_pagerank method).
+    method : str
+        "hodge_heat" — e^{-tL₁} χ_e (heat kernel on edges).
+        "hodge_pagerank" — (βI + L₁)⁻¹ χ_e (existing edge PageRank).
+    weights : dict[str, float], optional
+        Simplex weights.
+
+    Returns
+    -------
+    EdgeSweepCut
+    """
+    bm = boundary_matrices(kc)
+    ne = len(bm.edge_index)
+
+    if ne == 0:
+        return EdgeSweepCut(edges=set(), conductance=float("inf"), volume=0)
+
+    L1 = hodge_laplacian(kc, weights=weights)
+
+    if method == "hodge_pagerank":
+        dist = edge_pagerank(kc, seed_edge, beta=beta, weights=weights)
+    elif method == "hodge_heat":
+        # Compute e^{-tL₁} χ_e via dense matrix exponential
+        L1_dense = L1.toarray()
+        heat = expm(-t * L1_dense)
+        seed_idx = bm.edge_index[seed_edge]
+        dist = heat[:, seed_idx]
+    else:
+        raise ValueError(f"Unknown method '{method}'. Use 'hodge_heat' or 'hodge_pagerank'.")
+
+    return edge_sweep_cut(kc, dist, bm=bm)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/clique/index.html b/site/api/clique/index.html new file mode 100644 index 0000000..7d47224 --- /dev/null +++ b/site/api/clique/index.html @@ -0,0 +1,1843 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Clique Inference - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + + + + + + + +

Clique Inference

+ +
+ + + + +
+ +

knowledgecomplex.clique — Clique complex and flagification methods.

+

Two workflows for inferring higher-order simplices from the edge graph:

+

Generic exploration (fill_cliques) + Discover what higher-order structure exists before knowing the semantics. + Fill in generic simplices for all cliques up to a given order. Inspect + what shows up, then decide what types to declare.

+

Typed inference (infer_faces) + Once you've declared a face type with semantic meaning, fill in all + instances of that type from the edge graph. The face type is required — + you declare the type, then run inference to populate it.

+

find_cliques is a pure query that returns vertex cliques without +modifying the complex.

+

Typical workflow::

+
# Phase 1: Explore — what triangles exist?
+sb.add_face_type("_clique")
+kc = KnowledgeComplex(schema=sb)
+# ... add vertices and edges ...
+result = fill_cliques(kc, max_order=2)
+
+# Phase 2: Inspect
+for fid in result[2]:
+    edge_types = {kc.element(e).type for e in kc.boundary(fid)}
+    print(f"{fid}: {edge_types}")
+
+# Phase 3: Typed inference with a real schema
+sb2 = SchemaBuilder(namespace="ex")
+sb2.add_face_type("operation", attributes={...})
+kc2 = KnowledgeComplex(schema=sb2)
+# ... add vertices and edges ...
+infer_faces(kc2, "operation", edge_type="performs")
+
+ + + + + + + + + + +
+ + + + + + + + + + +
+ + +

+ find_cliques(kc, k=3, *, edge_type=None) + +

+ + +
+ +

Find all k-cliques of KC vertices in the edge graph.

+

A k-clique is a set of k vertices where every pair is connected by +an edge. This is a pure query — it does not modify the complex.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ k + + int + +
+

Clique size (default 3 for triangles).

+
+
+ 3 +
+ edge_type + + str + +
+

Only consider edges of this type when building the adjacency graph.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[frozenset[str]] + +
+

Each element is a frozenset of k vertex IDs.

+
+
+ + +
+ Source code in knowledgecomplex/clique.py +
103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
def find_cliques(
+    kc: "KnowledgeComplex",
+    k: int = 3,
+    *,
+    edge_type: str | None = None,
+) -> list[frozenset[str]]:
+    """Find all k-cliques of KC vertices in the edge graph.
+
+    A k-clique is a set of k vertices where every pair is connected by
+    an edge.  This is a pure query — it does not modify the complex.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    k : int
+        Clique size (default 3 for triangles).
+    edge_type : str, optional
+        Only consider edges of this type when building the adjacency graph.
+
+    Returns
+    -------
+    list[frozenset[str]]
+        Each element is a frozenset of k vertex IDs.
+    """
+    if k < 2:
+        raise ValueError(f"Clique size must be >= 2, got {k}")
+
+    # Build adjacency from vertex-edge structure
+    vertices = sorted(kc.skeleton(0))
+    adj: dict[str, set[str]] = {v: set() for v in vertices}
+
+    edge_ids = kc.skeleton(1) - kc.skeleton(0)
+    for eid in edge_ids:
+        if edge_type is not None and kc.element(eid).type != edge_type:
+            continue
+        boundary = kc.boundary(eid)
+        if len(boundary) == 2:
+            v1, v2 = sorted(boundary)
+            adj[v1].add(v2)
+            adj[v2].add(v1)
+
+    # Enumerate cliques via Bron-Kerbosch or brute-force for small k
+    cliques: list[frozenset[str]] = []
+    sorted_verts = sorted(vertices)
+
+    if k == 2:
+        # k=2 cliques are just edges
+        for i, v1 in enumerate(sorted_verts):
+            for v2 in sorted_verts[i + 1:]:
+                if v2 in adj[v1]:
+                    cliques.append(frozenset([v1, v2]))
+    else:
+        # General: enumerate (k-1)-subsets and check
+        for combo in combinations(sorted_verts, k):
+            if all(combo[j] in adj[combo[i]]
+                   for i in range(k) for j in range(i + 1, k)):
+                cliques.append(frozenset(combo))
+
+    return cliques
+
+
+
+ +
+ +
+ + +

+ infer_faces(kc, face_type, *, edge_type=None, id_prefix='face', dry_run=False) + +

+ + +
+ +

Infer and add faces of a declared type from 3-cliques in the edge graph.

+

Finds all triangles (3-cliques of KC vertices), resolves the 3 boundary +edges for each, and calls kc.add_face() with the specified type. +Skips triangles that already have a face.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ face_type + + str + +
+

A registered face type to assign to inferred faces.

+
+
+ required +
+ edge_type + + str + +
+

Only consider edges of this type when finding triangles.

+
+
+ None +
+ id_prefix + + str + +
+

Prefix for auto-generated face IDs (e.g. "face-0").

+
+
+ 'face' +
+ dry_run + + bool + +
+

If True, return the list of would-be face IDs and their +boundaries without adding them to the complex.

+
+
+ False +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[str] + +
+

IDs of newly added (or would-be) faces.

+
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaError + +
+

If face_type is not a registered face type.

+
+
+ + +
+ Source code in knowledgecomplex/clique.py +
167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
def infer_faces(
+    kc: "KnowledgeComplex",
+    face_type: str,
+    *,
+    edge_type: str | None = None,
+    id_prefix: str = "face",
+    dry_run: bool = False,
+) -> list[str]:
+    """Infer and add faces of a declared type from 3-cliques in the edge graph.
+
+    Finds all triangles (3-cliques of KC vertices), resolves the 3 boundary
+    edges for each, and calls ``kc.add_face()`` with the specified type.
+    Skips triangles that already have a face.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    face_type : str
+        A registered face type to assign to inferred faces.
+    edge_type : str, optional
+        Only consider edges of this type when finding triangles.
+    id_prefix : str
+        Prefix for auto-generated face IDs (e.g. ``"face-0"``).
+    dry_run : bool
+        If ``True``, return the list of would-be face IDs and their
+        boundaries without adding them to the complex.
+
+    Returns
+    -------
+    list[str]
+        IDs of newly added (or would-be) faces.
+
+    Raises
+    ------
+    SchemaError
+        If *face_type* is not a registered face type.
+    """
+    if face_type not in kc._schema._types:
+        raise SchemaError(f"Type '{face_type}' is not registered")
+    if kc._schema._types[face_type].get("kind") != "face":
+        raise SchemaError(f"Type '{face_type}' is not a face type")
+
+    triangles = find_cliques(kc, k=3, edge_type=edge_type)
+    added: list[str] = []
+    counter = 0
+
+    for tri in triangles:
+        verts = sorted(tri)
+        # Find edges for each pair
+        edges = []
+        valid = True
+        for i in range(3):
+            for j in range(i + 1, 3):
+                e = _edges_between(kc, verts[i], verts[j], edge_type=edge_type)
+                if not e:
+                    valid = False
+                    break
+                edges.append(e[0])  # take first matching edge
+            if not valid:
+                break
+
+        if not valid or len(edges) != 3:
+            continue
+
+        # Skip if face already exists for these edges
+        if _has_face_for_edges(kc, frozenset(edges)):
+            continue
+
+        face_id = f"{id_prefix}-{counter}"
+        counter += 1
+
+        if not dry_run:
+            kc.add_face(face_id, type=face_type, boundary=edges)
+        added.append(face_id)
+
+    return added
+
+
+
+ +
+ +
+ + +

+ fill_cliques(kc, max_order=2, *, edge_type=None, id_prefix='clique') + +

+ + +
+ +

Fill generic simplices for all cliques up to max_order.

+

Discovers what higher-order structure exists without requiring semantic +type declarations. For k=2 (faces), uses the first declared face type. +For k>2, uses _assert_element directly with the base kc:Element +type — these are generic, untyped simplices.

+

This is an exploration tool. Once you've inspected the structure, +declare typed face types and use :func:infer_faces for semantic +inference.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ max_order + + int + +
+

Maximum simplex dimension to fill (default 2 = faces).

+
+
+ 2 +
+ edge_type + + str + +
+

Only consider edges of this type when finding cliques.

+
+
+ None +
+ id_prefix + + str + +
+

Prefix for auto-generated IDs.

+
+
+ 'clique' +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict[int, list[str]] + +
+

Mapping from dimension to list of newly added element IDs. +E.g. {2: ["clique-0", "clique-1"], 3: ["clique-4"]}.

+
+
+ + +
+ Source code in knowledgecomplex/clique.py +
248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
def fill_cliques(
+    kc: "KnowledgeComplex",
+    max_order: int = 2,
+    *,
+    edge_type: str | None = None,
+    id_prefix: str = "clique",
+) -> dict[int, list[str]]:
+    """Fill generic simplices for all cliques up to max_order.
+
+    Discovers what higher-order structure exists without requiring semantic
+    type declarations.  For k=2 (faces), uses the first declared face type.
+    For k>2, uses ``_assert_element`` directly with the base ``kc:Element``
+    type — these are generic, untyped simplices.
+
+    This is an exploration tool.  Once you've inspected the structure,
+    declare typed face types and use :func:`infer_faces` for semantic
+    inference.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    max_order : int
+        Maximum simplex dimension to fill (default 2 = faces).
+    edge_type : str, optional
+        Only consider edges of this type when finding cliques.
+    id_prefix : str
+        Prefix for auto-generated IDs.
+
+    Returns
+    -------
+    dict[int, list[str]]
+        Mapping from dimension to list of newly added element IDs.
+        E.g. ``{2: ["clique-0", "clique-1"], 3: ["clique-4"]}``.
+    """
+    if max_order < 2:
+        raise ValueError(f"max_order must be >= 2, got {max_order}")
+
+    result: dict[int, list[str]] = {}
+
+    # k=2: faces — use first declared face type
+    if max_order >= 2:
+        face_types = kc._schema.type_names(kind="face")
+        if not face_types:
+            raise SchemaError(
+                "No face types declared. Add at least one face type "
+                "(e.g. sb.add_face_type('_clique')) before calling fill_cliques."
+            )
+        face_type = face_types[0]
+        result[2] = infer_faces(
+            kc, face_type, edge_type=edge_type, id_prefix=id_prefix,
+        )
+
+    # k>2: higher-order generic simplices
+    if max_order >= 3:
+        for dim in range(3, max_order + 1):
+            cliques = find_cliques(kc, k=dim + 1, edge_type=edge_type)
+            added: list[str] = []
+            counter = 0
+
+            for clique in cliques:
+                # Find the k boundary (dim-1)-simplices
+                # For a (dim)-simplex, boundary is (dim+1 choose dim) = dim+1
+                # (dim-1)-simplices from subsets of size dim
+                boundary_ids = []
+                valid = True
+
+                for sub in combinations(sorted(clique), dim):
+                    # Find the (dim-1)-simplex with these vertices in its closure
+                    sub_set = frozenset(sub)
+                    # For dim=3: find face whose closure vertices = sub (3 vertices)
+                    found = _find_simplex_with_vertices(kc, sub_set, dim - 1)
+                    if found is None:
+                        valid = False
+                        break
+                    boundary_ids.append(found)
+
+                if not valid:
+                    continue
+
+                # Check no duplicate
+                elem_id = f"{id_prefix}-{dim}d-{counter}"
+
+                if not dry_run_check(kc, boundary_ids):
+                    counter += 1
+                    # Use _assert_element directly for generic higher-order
+                    kc._assert_element(
+                        elem_id,
+                        face_types[0],  # reuse face type as best available
+                        boundary_ids=boundary_ids,
+                        attributes={},
+                    )
+                    added.append(elem_id)
+
+            result[dim] = added
+
+    return result
+
+
+
+ +
+ +
+ + +

+ dry_run_check(kc, boundary_ids) + +

+ + +
+ +

Check if an element with this boundary already exists.

+ + +
+ Source code in knowledgecomplex/clique.py +
391
+392
+393
+394
+395
+396
+397
+398
+399
def dry_run_check(kc, boundary_ids):
+    """Check if an element with this boundary already exists."""
+    if not boundary_ids:
+        return False
+    first = boundary_ids[0]
+    for cand in kc.coboundary(first):
+        if set(kc.boundary(cand)) == set(boundary_ids):
+            return True
+    return False
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/codecs/index.html b/site/api/codecs/index.html new file mode 100644 index 0000000..8794034 --- /dev/null +++ b/site/api/codecs/index.html @@ -0,0 +1,1803 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Codecs - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + + + + + + + +

Codecs

+ +
+ + + + +
+ +

knowledgecomplex.codecs.markdown — YAML-frontmatter + markdown codec.

+

Implements the :class:~knowledgecomplex.schema.Codec protocol for +knowledge complexes where each element is a markdown file with YAML +frontmatter (structured metadata) and a markdown body with predefined +section headers (prose content).

+

This follows the pattern used in production knowledge complexes authored +in Obsidian — each element is a .md file, the YAML header holds +structured attributes, and ## sections hold prose content.

+

Usage::

+
from knowledgecomplex.codecs import MarkdownCodec
+
+codec = MarkdownCodec(
+    frontmatter_attrs=["name", "author", "abstract"],
+    section_attrs=["notes", "methodology"],
+)
+kc.register_codec("Paper", codec)
+
+# Compile: KC element -> markdown file at its URI
+kc.element("paper-1").compile()
+
+# Decompile: markdown file -> KC element attributes
+kc.element("paper-1").decompile()
+
+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ MarkdownCodec + + +

+ + +
+ + + +

Codec for YAML-frontmatter + markdown files.

+

Each element maps to a single .md file. Attributes are stored in +two places:

+
    +
  • +

    YAML frontmatter (between --- delimiters): structured metadata + fields like name, author, description. These map 1:1 to + KC element attributes.

    +
  • +
  • +

    Markdown body sections (## Header blocks): prose content like + notes or analysis. The section header becomes the attribute name + (lowercased, spaces replaced with underscores), and the section body + becomes the attribute value.

    +
  • +
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ frontmatter_attrs + + list[str] + +
+

Attribute names stored in the YAML frontmatter.

+
+
+ required +
+ section_attrs + + list[str] + +
+

Attribute names stored as ## Section blocks in the body.

+
+
+ required +
+ + + + + + + + +
+ Source code in knowledgecomplex/codecs/markdown.py +
 40
+ 41
+ 42
+ 43
+ 44
+ 45
+ 46
+ 47
+ 48
+ 49
+ 50
+ 51
+ 52
+ 53
+ 54
+ 55
+ 56
+ 57
+ 58
+ 59
+ 60
+ 61
+ 62
+ 63
+ 64
+ 65
+ 66
+ 67
+ 68
+ 69
+ 70
+ 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
class MarkdownCodec:
+    """Codec for YAML-frontmatter + markdown files.
+
+    Each element maps to a single ``.md`` file. Attributes are stored in
+    two places:
+
+    - **YAML frontmatter** (between ``---`` delimiters): structured metadata
+      fields like ``name``, ``author``, ``description``. These map 1:1 to
+      KC element attributes.
+
+    - **Markdown body sections** (``## Header`` blocks): prose content like
+      notes or analysis. The section header becomes the attribute name
+      (lowercased, spaces replaced with underscores), and the section body
+      becomes the attribute value.
+
+    Parameters
+    ----------
+    frontmatter_attrs : list[str]
+        Attribute names stored in the YAML frontmatter.
+    section_attrs : list[str]
+        Attribute names stored as ``## Section`` blocks in the body.
+    """
+
+    def __init__(
+        self,
+        frontmatter_attrs: list[str],
+        section_attrs: list[str],
+    ) -> None:
+        self.frontmatter_attrs = list(frontmatter_attrs)
+        self.section_attrs = list(section_attrs)
+
+    def compile(self, element: dict) -> None:
+        """Write an element record to a markdown file at its URI.
+
+        Parameters
+        ----------
+        element : dict
+            Keys: ``id``, ``type``, ``uri``, plus all attribute key-value pairs.
+        """
+        uri = element["uri"]
+        path = Path(uri.replace("file://", ""))
+        path.parent.mkdir(parents=True, exist_ok=True)
+
+        # Build YAML frontmatter
+        fm: dict[str, Any] = {
+            "id": element["id"],
+            "type": element["type"],
+        }
+        for attr in self.frontmatter_attrs:
+            if attr in element:
+                fm[attr] = element[attr]
+
+        # Build markdown body
+        title = element.get("name", element["id"])
+        lines = [f"# {title}", ""]
+        for attr in self.section_attrs:
+            header = attr.replace("_", " ").title()
+            content = element.get(attr, "")
+            lines.append(f"## {header}")
+            lines.append("")
+            lines.append(content if content else "(empty)")
+            lines.append("")
+
+        # Write file
+        fm_str = yaml.dump(fm, default_flow_style=False, sort_keys=False).strip()
+        body = "\n".join(lines)
+        path.write_text(f"---\n{fm_str}\n---\n\n{body}\n")
+
+    def decompile(self, uri: str) -> dict:
+        """Read a markdown file and return attribute key-value pairs.
+
+        Parameters
+        ----------
+        uri : str
+            File URI (``file://`` prefix stripped automatically).
+
+        Returns
+        -------
+        dict
+            Attribute key-value pairs (no ``id``, ``type``, or ``uri``).
+        """
+        path = Path(uri.replace("file://", ""))
+        text = path.read_text()
+
+        # Split frontmatter from body
+        fm_match = re.match(r"^---\s*\n(.*?)\n---\s*\n(.*)$", text, re.DOTALL)
+        if not fm_match:
+            raise ValueError(f"No YAML frontmatter found in {path}")
+
+        fm_raw = fm_match.group(1)
+        body = fm_match.group(2)
+
+        # Parse YAML frontmatter
+        fm = yaml.safe_load(fm_raw) or {}
+        attrs: dict[str, str] = {}
+        for attr in self.frontmatter_attrs:
+            if attr in fm:
+                attrs[attr] = str(fm[attr])
+
+        # Parse ## sections from body
+        section_pattern = re.compile(r"^## (.+)$", re.MULTILINE)
+        sections = {}
+        matches = list(section_pattern.finditer(body))
+        for i, m in enumerate(matches):
+            header = m.group(1).strip().lower().replace(" ", "_")
+            start = m.end()
+            end = matches[i + 1].start() if i + 1 < len(matches) else len(body)
+            content = body[start:end].strip()
+            if content == "(empty)":
+                content = ""
+            sections[header] = content
+
+        for attr in self.section_attrs:
+            if attr in sections:
+                attrs[attr] = sections[attr]
+
+        return attrs
+
+
+ + + +
+ + + + + + + + + + +
+ + +

+ compile(element) + +

+ + +
+ +

Write an element record to a markdown file at its URI.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ element + + dict + +
+

Keys: id, type, uri, plus all attribute key-value pairs.

+
+
+ required +
+ + +
+ Source code in knowledgecomplex/codecs/markdown.py +
 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
def compile(self, element: dict) -> None:
+    """Write an element record to a markdown file at its URI.
+
+    Parameters
+    ----------
+    element : dict
+        Keys: ``id``, ``type``, ``uri``, plus all attribute key-value pairs.
+    """
+    uri = element["uri"]
+    path = Path(uri.replace("file://", ""))
+    path.parent.mkdir(parents=True, exist_ok=True)
+
+    # Build YAML frontmatter
+    fm: dict[str, Any] = {
+        "id": element["id"],
+        "type": element["type"],
+    }
+    for attr in self.frontmatter_attrs:
+        if attr in element:
+            fm[attr] = element[attr]
+
+    # Build markdown body
+    title = element.get("name", element["id"])
+    lines = [f"# {title}", ""]
+    for attr in self.section_attrs:
+        header = attr.replace("_", " ").title()
+        content = element.get(attr, "")
+        lines.append(f"## {header}")
+        lines.append("")
+        lines.append(content if content else "(empty)")
+        lines.append("")
+
+    # Write file
+    fm_str = yaml.dump(fm, default_flow_style=False, sort_keys=False).strip()
+    body = "\n".join(lines)
+    path.write_text(f"---\n{fm_str}\n---\n\n{body}\n")
+
+
+
+ +
+ +
+ + +

+ decompile(uri) + +

+ + +
+ +

Read a markdown file and return attribute key-value pairs.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ uri + + str + +
+

File URI (file:// prefix stripped automatically).

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict + +
+

Attribute key-value pairs (no id, type, or uri).

+
+
+ + +
+ Source code in knowledgecomplex/codecs/markdown.py +
108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
def decompile(self, uri: str) -> dict:
+    """Read a markdown file and return attribute key-value pairs.
+
+    Parameters
+    ----------
+    uri : str
+        File URI (``file://`` prefix stripped automatically).
+
+    Returns
+    -------
+    dict
+        Attribute key-value pairs (no ``id``, ``type``, or ``uri``).
+    """
+    path = Path(uri.replace("file://", ""))
+    text = path.read_text()
+
+    # Split frontmatter from body
+    fm_match = re.match(r"^---\s*\n(.*?)\n---\s*\n(.*)$", text, re.DOTALL)
+    if not fm_match:
+        raise ValueError(f"No YAML frontmatter found in {path}")
+
+    fm_raw = fm_match.group(1)
+    body = fm_match.group(2)
+
+    # Parse YAML frontmatter
+    fm = yaml.safe_load(fm_raw) or {}
+    attrs: dict[str, str] = {}
+    for attr in self.frontmatter_attrs:
+        if attr in fm:
+            attrs[attr] = str(fm[attr])
+
+    # Parse ## sections from body
+    section_pattern = re.compile(r"^## (.+)$", re.MULTILINE)
+    sections = {}
+    matches = list(section_pattern.finditer(body))
+    for i, m in enumerate(matches):
+        header = m.group(1).strip().lower().replace(" ", "_")
+        start = m.end()
+        end = matches[i + 1].start() if i + 1 < len(matches) else len(body)
+        content = body[start:end].strip()
+        if content == "(empty)":
+            content = ""
+        sections[header] = content
+
+    for attr in self.section_attrs:
+        if attr in sections:
+            attrs[attr] = sections[attr]
+
+    return attrs
+
+
+
+ +
+ + + +
+ +
+ +
+ + +
+ + +

+ verify_documents(kc, directory) + +

+ + +
+ +

Check consistency between KC elements and markdown files on disk.

+

Verifies:

+
    +
  • Every element with a URI has a corresponding file.
  • +
  • Every .md file in the directory has a corresponding element.
  • +
  • Attribute values in files match the KC (via decompile).
  • +
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ directory + + str or Path + +
+

Root directory containing the markdown files.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[str] + +
+

Discrepancy messages. Empty list means everything is consistent.

+
+
+ + +
+ Source code in knowledgecomplex/codecs/markdown.py +
159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
def verify_documents(
+    kc: "KnowledgeComplex",
+    directory: str | Path,
+) -> list[str]:
+    """Check consistency between KC elements and markdown files on disk.
+
+    Verifies:
+
+    - Every element with a URI has a corresponding file.
+    - Every ``.md`` file in the directory has a corresponding element.
+    - Attribute values in files match the KC (via decompile).
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    directory : str or Path
+        Root directory containing the markdown files.
+
+    Returns
+    -------
+    list[str]
+        Discrepancy messages. Empty list means everything is consistent.
+    """
+    directory = Path(directory)
+    issues: list[str] = []
+
+    # Collect URIs from KC elements
+    uri_to_id: dict[str, str] = {}
+    for eid in kc.element_ids():
+        elem = kc.element(eid)
+        if elem.uri:
+            uri_to_id[elem.uri] = eid
+            fpath = Path(elem.uri.replace("file://", ""))
+            if not fpath.exists():
+                issues.append(f"MISSING FILE: {eid} -> {fpath}")
+
+    # Check for orphan files (in directory but not in KC)
+    for md_file in sorted(directory.rglob("*.md")):
+        file_uri = f"file://{md_file}"
+        if file_uri not in uri_to_id:
+            issues.append(f"ORPHAN FILE: {md_file} (no element in KC)")
+
+    # Check attribute consistency
+    for uri, eid in sorted(uri_to_id.items()):
+        fpath = Path(uri.replace("file://", ""))
+        if not fpath.exists():
+            continue
+        elem = kc.element(eid)
+        try:
+            codec = kc._resolve_codec(elem.type)
+            file_attrs = codec.decompile(uri)
+            kc_attrs = elem.attrs
+            for key in file_attrs:
+                if key in kc_attrs and file_attrs[key] != kc_attrs[key]:
+                    issues.append(
+                        f"MISMATCH: {eid}.{key} — "
+                        f"KC='{kc_attrs[key][:40]}' vs file='{file_attrs[key][:40]}'"
+                    )
+        except Exception as e:
+            issues.append(f"ERROR reading {eid}: {e}")
+
+    return issues
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/diff/index.html b/site/api/diff/index.html new file mode 100644 index 0000000..9bf45ba --- /dev/null +++ b/site/api/diff/index.html @@ -0,0 +1,3165 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Diffs & Sequences - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+ +
+ + + + + + + + +

Diffs & Sequences

+ +
+ + + + +
+ +

knowledgecomplex.diff — Complex diffs and sequences for time-varying complexes.

+

A ComplexDiff records element additions and removals. It can be applied +to a KnowledgeComplex to mutate it, exported to a SPARQL UPDATE string +for interoperability with RDF-native systems (e.g. flexo MMS), or imported +from a SPARQL UPDATE string received from a remote system.

+

A ComplexSequence wraps a base complex and an ordered list of diffs, +representing a time series of complex states. Element ID sets at each step +are computed by applying diffs cumulatively.

+

Example::

+
diff = ComplexDiff()
+diff.add_vertex("eve", type="Person")
+diff.add_edge("e-ae", type="Link", vertices={"alice", "eve"})
+diff.remove("old-edge")
+
+diff.apply(kc)                  # mutate the complex
+sparql = diff.to_sparql(kc)     # export as SPARQL UPDATE
+
+# Import a diff from a remote system
+remote_diff = ComplexDiff.from_sparql(sparql, kc)
+remote_diff.apply(kc2)
+
+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ ComplexDiff + + +

+ + +
+ + + +

A set of element additions and removals that transform a complex.

+

Build a diff by chaining add_vertex, add_edge, add_face, +and remove calls. Then apply it to a KnowledgeComplex via +:meth:apply, or export it to a SPARQL UPDATE string via :meth:to_sparql.

+ + + + + + + + +
+ Source code in knowledgecomplex/diff.py +
 39
+ 40
+ 41
+ 42
+ 43
+ 44
+ 45
+ 46
+ 47
+ 48
+ 49
+ 50
+ 51
+ 52
+ 53
+ 54
+ 55
+ 56
+ 57
+ 58
+ 59
+ 60
+ 61
+ 62
+ 63
+ 64
+ 65
+ 66
+ 67
+ 68
+ 69
+ 70
+ 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
class ComplexDiff:
+    """A set of element additions and removals that transform a complex.
+
+    Build a diff by chaining ``add_vertex``, ``add_edge``, ``add_face``,
+    and ``remove`` calls.  Then apply it to a ``KnowledgeComplex`` via
+    :meth:`apply`, or export it to a SPARQL UPDATE string via :meth:`to_sparql`.
+    """
+
+    def __init__(self) -> None:
+        self._additions: list[dict[str, Any]] = []
+        self._removals: list[str] = []
+
+    @property
+    def additions(self) -> list[dict[str, Any]]:
+        """Element additions: list of dicts with id, type, kind, boundary, attrs."""
+        return list(self._additions)
+
+    @property
+    def removals(self) -> list[str]:
+        """Element removals: list of element IDs."""
+        return list(self._removals)
+
+    # ── Builder methods (chainable) ────────────────────────────────────────
+
+    def add_vertex(
+        self, id: str, type: str, uri: str | None = None, **attrs: Any
+    ) -> "ComplexDiff":
+        """Record a vertex addition."""
+        self._additions.append({
+            "id": id, "type": type, "kind": "vertex",
+            "boundary": None, "uri": uri, "attrs": attrs,
+        })
+        return self
+
+    def add_edge(
+        self, id: str, type: str, vertices: set[str] | list[str],
+        uri: str | None = None, **attrs: Any,
+    ) -> "ComplexDiff":
+        """Record an edge addition."""
+        self._additions.append({
+            "id": id, "type": type, "kind": "edge",
+            "boundary": list(vertices), "uri": uri, "attrs": attrs,
+        })
+        return self
+
+    def add_face(
+        self, id: str, type: str, boundary: list[str],
+        uri: str | None = None, **attrs: Any,
+    ) -> "ComplexDiff":
+        """Record a face addition."""
+        self._additions.append({
+            "id": id, "type": type, "kind": "face",
+            "boundary": list(boundary), "uri": uri, "attrs": attrs,
+        })
+        return self
+
+    def remove(self, id: str) -> "ComplexDiff":
+        """Record an element removal."""
+        self._removals.append(id)
+        return self
+
+    # ── Apply ──────────────────────────────────────────────────────────────
+
+    def apply(self, kc: "KnowledgeComplex", validate: bool = True) -> None:
+        """Apply this diff to a KnowledgeComplex.
+
+        Removals are processed first (highest dimension first to avoid
+        boundary-closure violations), then additions.
+
+        Parameters
+        ----------
+        kc : KnowledgeComplex
+        validate : bool
+            If True (default), run SHACL validation after all changes.
+            Raises ``ValidationError`` on failure (changes are NOT rolled back).
+        """
+        # Sort removals: higher-dim first to avoid intermediate violations
+        # Determine dimension of each removal from the live complex
+        dim_order = {"face": 2, "edge": 1, "vertex": 0}
+        removals_with_dim = []
+        for rid in self._removals:
+            try:
+                elem = kc.element(rid)
+                kind = kc._schema._types.get(elem.type, {}).get("kind", "vertex")
+                removals_with_dim.append((dim_order.get(kind, 0), rid))
+            except ValueError:
+                pass  # already removed or doesn't exist — skip
+        removals_with_dim.sort(key=lambda x: -x[0])  # highest dim first
+
+        for _, rid in removals_with_dim:
+            kc.remove_element(rid)
+
+        # Process additions sorted by dimension (vertices first, edges, then faces)
+        dim_order = {"vertex": 0, "edge": 1, "face": 2}
+        sorted_additions = sorted(
+            self._additions, key=lambda a: dim_order.get(a["kind"], 0)
+        )
+        for add in sorted_additions:
+            kind = add["kind"]
+            if kind == "vertex":
+                kc.add_vertex(
+                    add["id"], type=add["type"], uri=add["uri"], **add["attrs"]
+                )
+            elif kind == "edge":
+                kc.add_edge(
+                    add["id"], type=add["type"], vertices=add["boundary"],
+                    uri=add["uri"], **add["attrs"],
+                )
+            elif kind == "face":
+                kc.add_face(
+                    add["id"], type=add["type"], boundary=add["boundary"],
+                    uri=add["uri"], **add["attrs"],
+                )
+
+    # ── SPARQL export ──────────────────────────────────────────────────────
+
+    def to_sparql(self, kc: "KnowledgeComplex") -> str:
+        """Export this diff as a SPARQL UPDATE string.
+
+        Generates ``DELETE DATA`` blocks for removals and ``INSERT DATA``
+        blocks for additions, using the KC's namespace for IRI construction.
+
+        Parameters
+        ----------
+        kc : KnowledgeComplex
+            Used to read existing triples for removals and to resolve namespaces.
+
+        Returns
+        -------
+        str
+            A SPARQL UPDATE string.
+        """
+        ns = kc._schema._base_iri
+        parts = []
+
+        # DELETE DATA for removals
+        if self._removals:
+            delete_triples = []
+            for rid in self._removals:
+                iri = URIRef(f"{ns}{rid}")
+                # Collect all triples involving this element
+                for s, p, o in kc._instance_graph.triples((iri, None, None)):
+                    delete_triples.append(f"  <{s}> <{p}> {_sparql_obj(o)} .")
+                for s, p, o in kc._instance_graph.triples((None, None, iri)):
+                    delete_triples.append(f"  <{s}> <{p}> <{o}> .")
+            if delete_triples:
+                parts.append(
+                    "DELETE DATA {\n" + "\n".join(delete_triples) + "\n}"
+                )
+
+        # INSERT DATA for additions
+        if self._additions:
+            insert_triples = []
+            for add in self._additions:
+                iri = f"<{ns}{add['id']}>"
+                type_iri = f"<{ns}{add['type']}>"
+                insert_triples.append(f"  {iri} <{RDF.type}> {type_iri} .")
+
+                if add.get("boundary"):
+                    for bid in add["boundary"]:
+                        b_iri = f"<{ns}{bid}>"
+                        insert_triples.append(f"  {iri} <{_KC.boundedBy}> {b_iri} .")
+
+                if add.get("uri"):
+                    insert_triples.append(
+                        f'  {iri} <{_KC.uri}> "{add["uri"]}"^^<{XSD.anyURI}> .'
+                    )
+
+                for attr_name, attr_value in add.get("attrs", {}).items():
+                    attr_iri = f"<{ns}{attr_name}>"
+                    if isinstance(attr_value, (list, tuple)):
+                        for v in attr_value:
+                            insert_triples.append(f'  {iri} {attr_iri} "{v}" .')
+                    else:
+                        insert_triples.append(f'  {iri} {attr_iri} "{attr_value}" .')
+
+                # Add to complex
+                complex_iri = f"<{ns}_complex>"
+                insert_triples.append(
+                    f"  {complex_iri} <{_KC.hasElement}> {iri} ."
+                )
+
+            parts.append(
+                "INSERT DATA {\n" + "\n".join(insert_triples) + "\n}"
+            )
+
+        return " ;\n".join(parts)
+
+    # ── SPARQL import ──────────────────────────────────────────────────────
+
+    @classmethod
+    def from_sparql(cls, sparql: str, kc: "KnowledgeComplex") -> "ComplexDiff":
+        """Parse a SPARQL UPDATE string into a ComplexDiff.
+
+        Extracts ``INSERT DATA`` and ``DELETE DATA`` blocks, parses their
+        triple content, and reconstructs element additions and removals.
+
+        Parameters
+        ----------
+        sparql : str
+            SPARQL UPDATE string (as produced by :meth:`to_sparql`).
+        kc : KnowledgeComplex
+            Used to resolve namespaces and determine element kinds.
+
+        Returns
+        -------
+        ComplexDiff
+        """
+        ns = kc._schema._base_iri
+        diff = cls()
+
+        # Extract DELETE DATA blocks → removals
+        for match in re.finditer(
+            r"DELETE\s+DATA\s*\{(.*?)\}", sparql, re.DOTALL | re.IGNORECASE
+        ):
+            block = match.group(1)
+            removed_ids = set()
+            for triple_match in re.finditer(r"<([^>]+)>\s+<[^>]+>\s+", block):
+                subj = triple_match.group(1)
+                if subj.startswith(ns) and not subj.endswith("_complex"):
+                    removed_ids.add(subj[len(ns):])
+            for rid in sorted(removed_ids):
+                diff.remove(rid)
+
+        # Extract INSERT DATA blocks → additions
+        for match in re.finditer(
+            r"INSERT\s+DATA\s*\{(.*?)\}", sparql, re.DOTALL | re.IGNORECASE
+        ):
+            block = match.group(1)
+            # Parse triples to reconstruct elements
+            g = Graph()
+            # Convert to N-Triples-like format for parsing
+            nt_lines = []
+            for line in block.strip().split("\n"):
+                line = line.strip()
+                if line:
+                    nt_lines.append(line)
+            nt_data = "\n".join(nt_lines)
+            try:
+                g.parse(data=nt_data, format="nt")
+            except Exception:
+                continue
+
+            # Find elements (subjects with rdf:type in model namespace)
+            has_element = _KC.hasElement
+            bounded_by = _KC.boundedBy
+            kc_uri = _KC.uri
+
+            for subj in set(g.subjects(RDF.type, None)):
+                subj_str = str(subj)
+                if not subj_str.startswith(ns):
+                    continue
+                elem_id = subj_str[len(ns):]
+
+                # Get type
+                type_iri = g.value(subj, RDF.type)
+                if type_iri is None:
+                    continue
+                type_str = str(type_iri)
+                if not type_str.startswith(ns):
+                    continue
+                type_name = type_str[len(ns):]
+
+                # Determine kind from schema
+                kind = kc._schema._types.get(type_name, {}).get("kind", "vertex")
+
+                # Get boundary
+                boundary = []
+                for _, _, o in g.triples((subj, bounded_by, None)):
+                    bid = str(o)
+                    if bid.startswith(ns):
+                        boundary.append(bid[len(ns):])
+
+                # Get uri
+                uri_val = g.value(subj, kc_uri)
+                uri = str(uri_val) if uri_val else None
+
+                # Get model attributes
+                attrs = {}
+                for _, p, o in g.triples((subj, None, None)):
+                    p_str = str(p)
+                    if (p_str.startswith(ns) and p_str != str(type_iri)
+                            and p != RDF.type and p != bounded_by
+                            and p != kc_uri and p != has_element):
+                        attr_name = p_str[len(ns):]
+                        attrs[attr_name] = str(o)
+
+                if kind == "vertex":
+                    diff.add_vertex(elem_id, type=type_name, uri=uri, **attrs)
+                elif kind == "edge":
+                    diff.add_edge(
+                        elem_id, type=type_name, vertices=boundary,
+                        uri=uri, **attrs,
+                    )
+                elif kind == "face":
+                    diff.add_face(
+                        elem_id, type=type_name, boundary=boundary,
+                        uri=uri, **attrs,
+                    )
+
+        return diff
+
+    def __repr__(self) -> str:
+        return (
+            f"ComplexDiff(+{len(self._additions)} additions, "
+            f"-{len(self._removals)} removals)"
+        )
+
+
+ + + +
+ + + + + + + +
+ + + +

+ additions + + + property + + +

+ + +
+ +

Element additions: list of dicts with id, type, kind, boundary, attrs.

+ +
+ +
+ +
+ + + +

+ removals + + + property + + +

+ + +
+ +

Element removals: list of element IDs.

+ +
+ +
+ + + + +
+ + +

+ add_vertex(id, type, uri=None, **attrs) + +

+ + +
+ +

Record a vertex addition.

+ + +
+ Source code in knowledgecomplex/diff.py +
63
+64
+65
+66
+67
+68
+69
+70
+71
def add_vertex(
+    self, id: str, type: str, uri: str | None = None, **attrs: Any
+) -> "ComplexDiff":
+    """Record a vertex addition."""
+    self._additions.append({
+        "id": id, "type": type, "kind": "vertex",
+        "boundary": None, "uri": uri, "attrs": attrs,
+    })
+    return self
+
+
+
+ +
+ +
+ + +

+ add_edge(id, type, vertices, uri=None, **attrs) + +

+ + +
+ +

Record an edge addition.

+ + +
+ Source code in knowledgecomplex/diff.py +
73
+74
+75
+76
+77
+78
+79
+80
+81
+82
def add_edge(
+    self, id: str, type: str, vertices: set[str] | list[str],
+    uri: str | None = None, **attrs: Any,
+) -> "ComplexDiff":
+    """Record an edge addition."""
+    self._additions.append({
+        "id": id, "type": type, "kind": "edge",
+        "boundary": list(vertices), "uri": uri, "attrs": attrs,
+    })
+    return self
+
+
+
+ +
+ +
+ + +

+ add_face(id, type, boundary, uri=None, **attrs) + +

+ + +
+ +

Record a face addition.

+ + +
+ Source code in knowledgecomplex/diff.py +
84
+85
+86
+87
+88
+89
+90
+91
+92
+93
def add_face(
+    self, id: str, type: str, boundary: list[str],
+    uri: str | None = None, **attrs: Any,
+) -> "ComplexDiff":
+    """Record a face addition."""
+    self._additions.append({
+        "id": id, "type": type, "kind": "face",
+        "boundary": list(boundary), "uri": uri, "attrs": attrs,
+    })
+    return self
+
+
+
+ +
+ +
+ + +

+ remove(id) + +

+ + +
+ +

Record an element removal.

+ + +
+ Source code in knowledgecomplex/diff.py +
95
+96
+97
+98
def remove(self, id: str) -> "ComplexDiff":
+    """Record an element removal."""
+    self._removals.append(id)
+    return self
+
+
+
+ +
+ +
+ + +

+ apply(kc, validate=True) + +

+ + +
+ +

Apply this diff to a KnowledgeComplex.

+

Removals are processed first (highest dimension first to avoid +boundary-closure violations), then additions.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ validate + + bool + +
+

If True (default), run SHACL validation after all changes. +Raises ValidationError on failure (changes are NOT rolled back).

+
+
+ True +
+ + +
+ Source code in knowledgecomplex/diff.py +
102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
def apply(self, kc: "KnowledgeComplex", validate: bool = True) -> None:
+    """Apply this diff to a KnowledgeComplex.
+
+    Removals are processed first (highest dimension first to avoid
+    boundary-closure violations), then additions.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    validate : bool
+        If True (default), run SHACL validation after all changes.
+        Raises ``ValidationError`` on failure (changes are NOT rolled back).
+    """
+    # Sort removals: higher-dim first to avoid intermediate violations
+    # Determine dimension of each removal from the live complex
+    dim_order = {"face": 2, "edge": 1, "vertex": 0}
+    removals_with_dim = []
+    for rid in self._removals:
+        try:
+            elem = kc.element(rid)
+            kind = kc._schema._types.get(elem.type, {}).get("kind", "vertex")
+            removals_with_dim.append((dim_order.get(kind, 0), rid))
+        except ValueError:
+            pass  # already removed or doesn't exist — skip
+    removals_with_dim.sort(key=lambda x: -x[0])  # highest dim first
+
+    for _, rid in removals_with_dim:
+        kc.remove_element(rid)
+
+    # Process additions sorted by dimension (vertices first, edges, then faces)
+    dim_order = {"vertex": 0, "edge": 1, "face": 2}
+    sorted_additions = sorted(
+        self._additions, key=lambda a: dim_order.get(a["kind"], 0)
+    )
+    for add in sorted_additions:
+        kind = add["kind"]
+        if kind == "vertex":
+            kc.add_vertex(
+                add["id"], type=add["type"], uri=add["uri"], **add["attrs"]
+            )
+        elif kind == "edge":
+            kc.add_edge(
+                add["id"], type=add["type"], vertices=add["boundary"],
+                uri=add["uri"], **add["attrs"],
+            )
+        elif kind == "face":
+            kc.add_face(
+                add["id"], type=add["type"], boundary=add["boundary"],
+                uri=add["uri"], **add["attrs"],
+            )
+
+
+
+ +
+ +
+ + +

+ to_sparql(kc) + +

+ + +
+ +

Export this diff as a SPARQL UPDATE string.

+

Generates DELETE DATA blocks for removals and INSERT DATA +blocks for additions, using the KC's namespace for IRI construction.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+

Used to read existing triples for removals and to resolve namespaces.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ str + +
+

A SPARQL UPDATE string.

+
+
+ + +
+ Source code in knowledgecomplex/diff.py +
155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
def to_sparql(self, kc: "KnowledgeComplex") -> str:
+    """Export this diff as a SPARQL UPDATE string.
+
+    Generates ``DELETE DATA`` blocks for removals and ``INSERT DATA``
+    blocks for additions, using the KC's namespace for IRI construction.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+        Used to read existing triples for removals and to resolve namespaces.
+
+    Returns
+    -------
+    str
+        A SPARQL UPDATE string.
+    """
+    ns = kc._schema._base_iri
+    parts = []
+
+    # DELETE DATA for removals
+    if self._removals:
+        delete_triples = []
+        for rid in self._removals:
+            iri = URIRef(f"{ns}{rid}")
+            # Collect all triples involving this element
+            for s, p, o in kc._instance_graph.triples((iri, None, None)):
+                delete_triples.append(f"  <{s}> <{p}> {_sparql_obj(o)} .")
+            for s, p, o in kc._instance_graph.triples((None, None, iri)):
+                delete_triples.append(f"  <{s}> <{p}> <{o}> .")
+        if delete_triples:
+            parts.append(
+                "DELETE DATA {\n" + "\n".join(delete_triples) + "\n}"
+            )
+
+    # INSERT DATA for additions
+    if self._additions:
+        insert_triples = []
+        for add in self._additions:
+            iri = f"<{ns}{add['id']}>"
+            type_iri = f"<{ns}{add['type']}>"
+            insert_triples.append(f"  {iri} <{RDF.type}> {type_iri} .")
+
+            if add.get("boundary"):
+                for bid in add["boundary"]:
+                    b_iri = f"<{ns}{bid}>"
+                    insert_triples.append(f"  {iri} <{_KC.boundedBy}> {b_iri} .")
+
+            if add.get("uri"):
+                insert_triples.append(
+                    f'  {iri} <{_KC.uri}> "{add["uri"]}"^^<{XSD.anyURI}> .'
+                )
+
+            for attr_name, attr_value in add.get("attrs", {}).items():
+                attr_iri = f"<{ns}{attr_name}>"
+                if isinstance(attr_value, (list, tuple)):
+                    for v in attr_value:
+                        insert_triples.append(f'  {iri} {attr_iri} "{v}" .')
+                else:
+                    insert_triples.append(f'  {iri} {attr_iri} "{attr_value}" .')
+
+            # Add to complex
+            complex_iri = f"<{ns}_complex>"
+            insert_triples.append(
+                f"  {complex_iri} <{_KC.hasElement}> {iri} ."
+            )
+
+        parts.append(
+            "INSERT DATA {\n" + "\n".join(insert_triples) + "\n}"
+        )
+
+    return " ;\n".join(parts)
+
+
+
+ +
+ +
+ + +

+ from_sparql(sparql, kc) + + + classmethod + + +

+ + +
+ +

Parse a SPARQL UPDATE string into a ComplexDiff.

+

Extracts INSERT DATA and DELETE DATA blocks, parses their +triple content, and reconstructs element additions and removals.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ sparql + + str + +
+

SPARQL UPDATE string (as produced by :meth:to_sparql).

+
+
+ required +
+ kc + + KnowledgeComplex + +
+

Used to resolve namespaces and determine element kinds.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ ComplexDiff + +
+ +
+
+ + +
+ Source code in knowledgecomplex/diff.py +
229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
@classmethod
+def from_sparql(cls, sparql: str, kc: "KnowledgeComplex") -> "ComplexDiff":
+    """Parse a SPARQL UPDATE string into a ComplexDiff.
+
+    Extracts ``INSERT DATA`` and ``DELETE DATA`` blocks, parses their
+    triple content, and reconstructs element additions and removals.
+
+    Parameters
+    ----------
+    sparql : str
+        SPARQL UPDATE string (as produced by :meth:`to_sparql`).
+    kc : KnowledgeComplex
+        Used to resolve namespaces and determine element kinds.
+
+    Returns
+    -------
+    ComplexDiff
+    """
+    ns = kc._schema._base_iri
+    diff = cls()
+
+    # Extract DELETE DATA blocks → removals
+    for match in re.finditer(
+        r"DELETE\s+DATA\s*\{(.*?)\}", sparql, re.DOTALL | re.IGNORECASE
+    ):
+        block = match.group(1)
+        removed_ids = set()
+        for triple_match in re.finditer(r"<([^>]+)>\s+<[^>]+>\s+", block):
+            subj = triple_match.group(1)
+            if subj.startswith(ns) and not subj.endswith("_complex"):
+                removed_ids.add(subj[len(ns):])
+        for rid in sorted(removed_ids):
+            diff.remove(rid)
+
+    # Extract INSERT DATA blocks → additions
+    for match in re.finditer(
+        r"INSERT\s+DATA\s*\{(.*?)\}", sparql, re.DOTALL | re.IGNORECASE
+    ):
+        block = match.group(1)
+        # Parse triples to reconstruct elements
+        g = Graph()
+        # Convert to N-Triples-like format for parsing
+        nt_lines = []
+        for line in block.strip().split("\n"):
+            line = line.strip()
+            if line:
+                nt_lines.append(line)
+        nt_data = "\n".join(nt_lines)
+        try:
+            g.parse(data=nt_data, format="nt")
+        except Exception:
+            continue
+
+        # Find elements (subjects with rdf:type in model namespace)
+        has_element = _KC.hasElement
+        bounded_by = _KC.boundedBy
+        kc_uri = _KC.uri
+
+        for subj in set(g.subjects(RDF.type, None)):
+            subj_str = str(subj)
+            if not subj_str.startswith(ns):
+                continue
+            elem_id = subj_str[len(ns):]
+
+            # Get type
+            type_iri = g.value(subj, RDF.type)
+            if type_iri is None:
+                continue
+            type_str = str(type_iri)
+            if not type_str.startswith(ns):
+                continue
+            type_name = type_str[len(ns):]
+
+            # Determine kind from schema
+            kind = kc._schema._types.get(type_name, {}).get("kind", "vertex")
+
+            # Get boundary
+            boundary = []
+            for _, _, o in g.triples((subj, bounded_by, None)):
+                bid = str(o)
+                if bid.startswith(ns):
+                    boundary.append(bid[len(ns):])
+
+            # Get uri
+            uri_val = g.value(subj, kc_uri)
+            uri = str(uri_val) if uri_val else None
+
+            # Get model attributes
+            attrs = {}
+            for _, p, o in g.triples((subj, None, None)):
+                p_str = str(p)
+                if (p_str.startswith(ns) and p_str != str(type_iri)
+                        and p != RDF.type and p != bounded_by
+                        and p != kc_uri and p != has_element):
+                    attr_name = p_str[len(ns):]
+                    attrs[attr_name] = str(o)
+
+            if kind == "vertex":
+                diff.add_vertex(elem_id, type=type_name, uri=uri, **attrs)
+            elif kind == "edge":
+                diff.add_edge(
+                    elem_id, type=type_name, vertices=boundary,
+                    uri=uri, **attrs,
+                )
+            elif kind == "face":
+                diff.add_face(
+                    elem_id, type=type_name, boundary=boundary,
+                    uri=uri, **attrs,
+                )
+
+    return diff
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + +

+ ComplexSequence + + +

+ + +
+ + + +

A base complex + ordered list of diffs, representing a time series.

+

Computes element ID sets at each step by applying diffs cumulatively +to the base complex's element set. This is a lightweight representation +that does not reconstruct full KnowledgeComplex instances at each step.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+

The base complex (state at step -1, before any diffs).

+
+
+ required +
+ diffs + + list[ComplexDiff] + +
+

Ordered sequence of diffs to apply.

+
+
+ required +
+ + + + + + + + +
+ Source code in knowledgecomplex/diff.py +
359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
class ComplexSequence:
+    """A base complex + ordered list of diffs, representing a time series.
+
+    Computes element ID sets at each step by applying diffs cumulatively
+    to the base complex's element set.  This is a lightweight representation
+    that does not reconstruct full ``KnowledgeComplex`` instances at each step.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+        The base complex (state at step -1, before any diffs).
+    diffs : list[ComplexDiff]
+        Ordered sequence of diffs to apply.
+    """
+
+    def __init__(
+        self,
+        kc: "KnowledgeComplex",
+        diffs: list[ComplexDiff],
+    ) -> None:
+        self._kc = kc
+        self._diffs = list(diffs)
+        # Pre-compute element ID sets at each step
+        self._steps: list[frozenset[str]] = []
+        current = set(kc.element_ids())
+        for diff in diffs:
+            for rid in diff.removals:
+                current.discard(rid)
+            for add in diff.additions:
+                current.add(add["id"])
+            self._steps.append(frozenset(current))
+
+    @property
+    def complex(self) -> "KnowledgeComplex":
+        """The base KnowledgeComplex."""
+        return self._kc
+
+    @property
+    def diffs(self) -> list[ComplexDiff]:
+        """The ordered list of diffs."""
+        return list(self._diffs)
+
+    def __len__(self) -> int:
+        return len(self._steps)
+
+    def __getitem__(self, index: int) -> set[str]:
+        """Element IDs present at step ``index``."""
+        return set(self._steps[index])
+
+    def __iter__(self):
+        for step in self._steps:
+            yield set(step)
+
+    def new_at(self, index: int) -> set[str]:
+        """Elements added at step ``index`` (not present in previous step)."""
+        current = self._steps[index]
+        if index == 0:
+            base = frozenset(self._kc.element_ids())
+            return set(current - base)
+        return set(current - self._steps[index - 1])
+
+    def removed_at(self, index: int) -> set[str]:
+        """Elements removed at step ``index`` (present in previous, absent now)."""
+        current = self._steps[index]
+        if index == 0:
+            base = frozenset(self._kc.element_ids())
+            return set(base - current)
+        return set(self._steps[index - 1] - current)
+
+    def __repr__(self) -> str:
+        return f"ComplexSequence({len(self._steps)} steps)"
+
+
+ + + +
+ + + + + + + +
+ + + +

+ complex + + + property + + +

+ + +
+ +

The base KnowledgeComplex.

+ +
+ +
+ +
+ + + +

+ diffs + + + property + + +

+ + +
+ +

The ordered list of diffs.

+ +
+ +
+ + + + +
+ + +

+ __getitem__(index) + +

+ + +
+ +

Element IDs present at step index.

+ + +
+ Source code in knowledgecomplex/diff.py +
404
+405
+406
def __getitem__(self, index: int) -> set[str]:
+    """Element IDs present at step ``index``."""
+    return set(self._steps[index])
+
+
+
+ +
+ +
+ + +

+ new_at(index) + +

+ + +
+ +

Elements added at step index (not present in previous step).

+ + +
+ Source code in knowledgecomplex/diff.py +
412
+413
+414
+415
+416
+417
+418
def new_at(self, index: int) -> set[str]:
+    """Elements added at step ``index`` (not present in previous step)."""
+    current = self._steps[index]
+    if index == 0:
+        base = frozenset(self._kc.element_ids())
+        return set(current - base)
+    return set(current - self._steps[index - 1])
+
+
+
+ +
+ +
+ + +

+ removed_at(index) + +

+ + +
+ +

Elements removed at step index (present in previous, absent now).

+ + +
+ Source code in knowledgecomplex/diff.py +
420
+421
+422
+423
+424
+425
+426
def removed_at(self, index: int) -> set[str]:
+    """Elements removed at step ``index`` (present in previous, absent now)."""
+    current = self._steps[index]
+    if index == 0:
+        base = frozenset(self._kc.element_ids())
+        return set(base - current)
+    return set(self._steps[index - 1] - current)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/exceptions/index.html b/site/api/exceptions/index.html new file mode 100644 index 0000000..06184ce --- /dev/null +++ b/site/api/exceptions/index.html @@ -0,0 +1,1089 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + Exceptions - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + + + + + + + +

Exceptions

+ + +
+ + + + +
+ +

knowledgecomplex.exceptions — Public exception types.

+

These are the only knowledgecomplex types that cross the API boundary on failure.

+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ ValidationError + + +

+ + +
+

+ Bases: Exception

+ + + +

Raised when a SHACL validation check fails on write.

+ + +

Attributes:

+ + + + + + + + + + + + + + + +
NameTypeDescription
report + str + +
+

Human-readable SHACL validation report text.

+
+
+ + + + + + + + +
+ Source code in knowledgecomplex/exceptions.py +
 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
class ValidationError(Exception):
+    """
+    Raised when a SHACL validation check fails on write.
+
+    Attributes
+    ----------
+    report : str
+        Human-readable SHACL validation report text.
+    """
+
+    def __init__(self, message: str, report: str) -> None:
+        super().__init__(message)
+        self.report = report
+
+    def __str__(self) -> str:
+        return f"{super().__str__()}\n\nSHACL Report:\n{self.report}"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ UnknownQueryError + + +

+ + +
+

+ Bases: Exception

+ + + +

Raised when KnowledgeComplex.query() is called with an unregistered template name.

+ + + + + + + + +
+ Source code in knowledgecomplex/exceptions.py +
26
+27
+28
+29
+30
class UnknownQueryError(Exception):
+    """
+    Raised when KnowledgeComplex.query() is called with an unregistered template name.
+    """
+    pass
+
+
+ +
+ +
+ +
+ + + +

+ SchemaError + + +

+ + +
+

+ Bases: Exception

+ + + +

Raised when a SchemaBuilder method is called with invalid arguments, +e.g. referencing an undefined type.

+ + + + + + + + +
+ Source code in knowledgecomplex/exceptions.py +
33
+34
+35
+36
+37
+38
class SchemaError(Exception):
+    """
+    Raised when a SchemaBuilder method is called with invalid arguments,
+    e.g. referencing an undefined type.
+    """
+    pass
+
+
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/filtration/index.html b/site/api/filtration/index.html new file mode 100644 index 0000000..e0386d6 --- /dev/null +++ b/site/api/filtration/index.html @@ -0,0 +1,2449 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Filtrations - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+ +
+ + + +
+ +
+ + + + + + + + +

Filtrations

+ +
+ + + + +
+ +

knowledgecomplex.filtration — Filtrations over knowledge complexes.

+

A filtration F = (C₀, C₁, …, Cₘ) is a nested sequence of subcomplexes +where each Cₚ is a valid simplicial complex (closed under boundary) and +Cₚ₋₁ ⊆ Cₚ. Filtrations are semantics-agnostic — they could represent +temporal evolution, thematic layers, trust levels, or any ordering.

+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ Filtration + + +

+ + +
+ + + +

An indexed sequence of nested subcomplexes over a KnowledgeComplex.

+

Each step is a valid subcomplex (closed under boundary) and each step +contains all elements from the previous step (monotone nesting).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+

The parent complex that this filtration is defined over.

+
+
+ required +
+ + +
+ Example +
+
+
+

filt = Filtration(kc) +filt.append({"v1"}) +filt.append({"v1", "v2", "e12"}) +filt.append({"v1", "v2", "v3", "e12", "e23", "e13", "f123"}) +len(filt) +3 +filt.birth("e12") +1

+
+
+
+
+ + + + + + + +
+ Source code in knowledgecomplex/filtration.py +
 18
+ 19
+ 20
+ 21
+ 22
+ 23
+ 24
+ 25
+ 26
+ 27
+ 28
+ 29
+ 30
+ 31
+ 32
+ 33
+ 34
+ 35
+ 36
+ 37
+ 38
+ 39
+ 40
+ 41
+ 42
+ 43
+ 44
+ 45
+ 46
+ 47
+ 48
+ 49
+ 50
+ 51
+ 52
+ 53
+ 54
+ 55
+ 56
+ 57
+ 58
+ 59
+ 60
+ 61
+ 62
+ 63
+ 64
+ 65
+ 66
+ 67
+ 68
+ 69
+ 70
+ 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
class Filtration:
+    """
+    An indexed sequence of nested subcomplexes over a KnowledgeComplex.
+
+    Each step is a valid subcomplex (closed under boundary) and each step
+    contains all elements from the previous step (monotone nesting).
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+        The parent complex that this filtration is defined over.
+
+    Example
+    -------
+    >>> filt = Filtration(kc)
+    >>> filt.append({"v1"})
+    >>> filt.append({"v1", "v2", "e12"})
+    >>> filt.append({"v1", "v2", "v3", "e12", "e23", "e13", "f123"})
+    >>> len(filt)
+    3
+    >>> filt.birth("e12")
+    1
+    """
+
+    def __init__(self, kc: "KnowledgeComplex") -> None:
+        self._kc = kc
+        self._steps: list[frozenset[str]] = []
+
+    def __repr__(self) -> str:
+        return f"Filtration(steps={len(self._steps)}, complete={self.is_complete})"
+
+    @property
+    def complex(self) -> "KnowledgeComplex":
+        """The parent KnowledgeComplex."""
+        return self._kc
+
+    @property
+    def length(self) -> int:
+        """Number of steps in the filtration."""
+        return len(self._steps)
+
+    @property
+    def is_complete(self) -> bool:
+        """True if the last step contains all elements in the complex."""
+        if not self._steps:
+            return False
+        all_ids = set(self._kc.element_ids())
+        return set(self._steps[-1]) == all_ids
+
+    def append(self, ids: set[str]) -> "Filtration":
+        """
+        Append a subcomplex to the filtration.
+
+        Parameters
+        ----------
+        ids : set[str]
+            Element IDs forming the next step. Must be a valid subcomplex
+            and a superset of the previous step.
+
+        Returns
+        -------
+        Filtration (self, for chaining)
+
+        Raises
+        ------
+        ValueError
+            If ids is not a valid subcomplex or violates monotonicity.
+        """
+        ids_set = set(ids)
+
+        if not self._kc.is_subcomplex(ids_set):
+            raise ValueError(
+                "Cannot append: the given element set is not a valid subcomplex "
+                "(not closed under boundary)"
+            )
+
+        if self._steps and not ids_set >= set(self._steps[-1]):
+            raise ValueError(
+                "Cannot append: monotone nesting violated — new step must be "
+                "a superset of the previous step"
+            )
+
+        self._steps.append(frozenset(ids_set))
+        return self
+
+    def append_closure(self, ids: set[str]) -> "Filtration":
+        """
+        Append the closure of a set of elements, unioned with the previous step.
+
+        Takes the closure of ids (ensuring a valid subcomplex), unions it
+        with the previous step (ensuring monotonicity), and appends.
+
+        Parameters
+        ----------
+        ids : set[str]
+            Element IDs to close over.
+
+        Returns
+        -------
+        Filtration (self, for chaining)
+        """
+        closed = self._kc.closure(ids)
+        if self._steps:
+            closed = closed | set(self._steps[-1])
+        self._steps.append(frozenset(closed))
+        return self
+
+    @classmethod
+    def from_function(
+        cls,
+        kc: "KnowledgeComplex",
+        fn: Callable[[str], int | float],
+    ) -> "Filtration":
+        """
+        Build a filtration by grouping elements by a function value.
+
+        Calls fn(id) for every element in the complex, groups by return
+        value, sorts groups, and builds closure at each cumulative step.
+
+        Parameters
+        ----------
+        kc : KnowledgeComplex
+            The parent complex.
+        fn : Callable[[str], int | float]
+            Function mapping element IDs to filtration values.
+
+        Returns
+        -------
+        Filtration
+        """
+        all_ids = kc.element_ids()
+        groups: dict[int | float, list[str]] = defaultdict(list)
+        for eid in all_ids:
+            groups[fn(eid)].append(eid)
+
+        filt = cls(kc)
+        accumulated: set[str] = set()
+        for key in sorted(groups.keys()):
+            accumulated = accumulated | set(groups[key])
+            closed = kc.closure(accumulated)
+            filt._steps.append(frozenset(closed))
+
+        return filt
+
+    def __getitem__(self, index: int) -> set[str]:
+        return set(self._steps[index])
+
+    def __len__(self) -> int:
+        return len(self._steps)
+
+    def __iter__(self) -> Iterator[set[str]]:
+        for step in self._steps:
+            yield set(step)
+
+    def birth(self, id: str) -> int:
+        """
+        Return the index of the first step containing this element.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+
+        Returns
+        -------
+        int
+
+        Raises
+        ------
+        ValueError
+            If the element does not appear in any step.
+        """
+        for i, step in enumerate(self._steps):
+            if id in step:
+                return i
+        raise ValueError(f"Element '{id}' not found in any filtration step")
+
+    def new_at(self, index: int) -> set[str]:
+        """
+        Return elements added at step index (Cₚ \\ Cₚ₋₁).
+
+        Parameters
+        ----------
+        index : int
+            Step index.
+
+        Returns
+        -------
+        set[str]
+        """
+        current = set(self._steps[index])
+        if index == 0:
+            return current
+        return current - set(self._steps[index - 1])
+
+    def elements_at(self, index: int) -> set[str]:
+        """
+        Return all elements at step index (same as self[index]).
+
+        Parameters
+        ----------
+        index : int
+            Step index.
+
+        Returns
+        -------
+        set[str]
+        """
+        return set(self._steps[index])
+
+
+ + + +
+ + + + + + + +
+ + + +

+ complex + + + property + + +

+ + +
+ +

The parent KnowledgeComplex.

+ +
+ +
+ +
+ + + +

+ length + + + property + + +

+ + +
+ +

Number of steps in the filtration.

+ +
+ +
+ +
+ + + +

+ is_complete + + + property + + +

+ + +
+ +

True if the last step contains all elements in the complex.

+ +
+ +
+ + + + +
+ + +

+ append(ids) + +

+ + +
+ +

Append a subcomplex to the filtration.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ ids + + set[str] + +
+

Element IDs forming the next step. Must be a valid subcomplex +and a superset of the previous step.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Filtration (self, for chaining) + +
+ +
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If ids is not a valid subcomplex or violates monotonicity.

+
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
 67
+ 68
+ 69
+ 70
+ 71
+ 72
+ 73
+ 74
+ 75
+ 76
+ 77
+ 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
def append(self, ids: set[str]) -> "Filtration":
+    """
+    Append a subcomplex to the filtration.
+
+    Parameters
+    ----------
+    ids : set[str]
+        Element IDs forming the next step. Must be a valid subcomplex
+        and a superset of the previous step.
+
+    Returns
+    -------
+    Filtration (self, for chaining)
+
+    Raises
+    ------
+    ValueError
+        If ids is not a valid subcomplex or violates monotonicity.
+    """
+    ids_set = set(ids)
+
+    if not self._kc.is_subcomplex(ids_set):
+        raise ValueError(
+            "Cannot append: the given element set is not a valid subcomplex "
+            "(not closed under boundary)"
+        )
+
+    if self._steps and not ids_set >= set(self._steps[-1]):
+        raise ValueError(
+            "Cannot append: monotone nesting violated — new step must be "
+            "a superset of the previous step"
+        )
+
+    self._steps.append(frozenset(ids_set))
+    return self
+
+
+
+ +
+ +
+ + +

+ append_closure(ids) + +

+ + +
+ +

Append the closure of a set of elements, unioned with the previous step.

+

Takes the closure of ids (ensuring a valid subcomplex), unions it +with the previous step (ensuring monotonicity), and appends.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ ids + + set[str] + +
+

Element IDs to close over.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Filtration (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
def append_closure(self, ids: set[str]) -> "Filtration":
+    """
+    Append the closure of a set of elements, unioned with the previous step.
+
+    Takes the closure of ids (ensuring a valid subcomplex), unions it
+    with the previous step (ensuring monotonicity), and appends.
+
+    Parameters
+    ----------
+    ids : set[str]
+        Element IDs to close over.
+
+    Returns
+    -------
+    Filtration (self, for chaining)
+    """
+    closed = self._kc.closure(ids)
+    if self._steps:
+        closed = closed | set(self._steps[-1])
+    self._steps.append(frozenset(closed))
+    return self
+
+
+
+ +
+ +
+ + +

+ from_function(kc, fn) + + + classmethod + + +

+ + +
+ +

Build a filtration by grouping elements by a function value.

+

Calls fn(id) for every element in the complex, groups by return +value, sorts groups, and builds closure at each cumulative step.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+

The parent complex.

+
+
+ required +
+ fn + + Callable[[str], int | float] + +
+

Function mapping element IDs to filtration values.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Filtration + +
+ +
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
@classmethod
+def from_function(
+    cls,
+    kc: "KnowledgeComplex",
+    fn: Callable[[str], int | float],
+) -> "Filtration":
+    """
+    Build a filtration by grouping elements by a function value.
+
+    Calls fn(id) for every element in the complex, groups by return
+    value, sorts groups, and builds closure at each cumulative step.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+        The parent complex.
+    fn : Callable[[str], int | float]
+        Function mapping element IDs to filtration values.
+
+    Returns
+    -------
+    Filtration
+    """
+    all_ids = kc.element_ids()
+    groups: dict[int | float, list[str]] = defaultdict(list)
+    for eid in all_ids:
+        groups[fn(eid)].append(eid)
+
+    filt = cls(kc)
+    accumulated: set[str] = set()
+    for key in sorted(groups.keys()):
+        accumulated = accumulated | set(groups[key])
+        closed = kc.closure(accumulated)
+        filt._steps.append(frozenset(closed))
+
+    return filt
+
+
+
+ +
+ +
+ + +

+ birth(id) + +

+ + +
+ +

Return the index of the first step containing this element.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ int + +
+ +
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If the element does not appear in any step.

+
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
def birth(self, id: str) -> int:
+    """
+    Return the index of the first step containing this element.
+
+    Parameters
+    ----------
+    id : str
+        Element identifier.
+
+    Returns
+    -------
+    int
+
+    Raises
+    ------
+    ValueError
+        If the element does not appear in any step.
+    """
+    for i, step in enumerate(self._steps):
+        if id in step:
+            return i
+    raise ValueError(f"Element '{id}' not found in any filtration step")
+
+
+
+ +
+ +
+ + +

+ new_at(index) + +

+ + +
+ +

Return elements added at step index (Cₚ \ Cₚ₋₁).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ index + + int + +
+

Step index.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
def new_at(self, index: int) -> set[str]:
+    """
+    Return elements added at step index (Cₚ \\ Cₚ₋₁).
+
+    Parameters
+    ----------
+    index : int
+        Step index.
+
+    Returns
+    -------
+    set[str]
+    """
+    current = set(self._steps[index])
+    if index == 0:
+        return current
+    return current - set(self._steps[index - 1])
+
+
+
+ +
+ +
+ + +

+ elements_at(index) + +

+ + +
+ +

Return all elements at step index (same as self[index]).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ index + + int + +
+

Step index.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/filtration.py +
213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
def elements_at(self, index: int) -> set[str]:
+    """
+    Return all elements at step index (same as self[index]).
+
+    Parameters
+    ----------
+    index : int
+        Step index.
+
+    Returns
+    -------
+    set[str]
+    """
+    return set(self._steps[index])
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/graph/index.html b/site/api/graph/index.html new file mode 100644 index 0000000..21c5502 --- /dev/null +++ b/site/api/graph/index.html @@ -0,0 +1,7511 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Graph - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+ +
+ + + + + + + + +

Graph

+ + +
+ + + + +
+ +

knowledgecomplex.graph — KnowledgeComplex instance I/O.

+

Public API. Never exposes rdflib, pyshacl, or owlrl objects.

+

A KnowledgeComplex corresponds to a kc:Complex individual in the RDF graph. +Each add_vertex / add_edge / add_face call asserts the new element AND its +kc:hasElement membership in the complex. SHACL validation on every write +enforces both per-element constraints (EdgeShape, FaceShape) and +boundary-closure (ComplexShape): if a simplex is in the complex, all its +boundary elements must be too.

+

This enforces the "slice rule": at every point during construction, the +elements added so far must form a valid complex. Concretely, an element's +boundary elements must already be members before it can be added. This is +a partial ordering — types can be interleaved as long as each element's +boundary predecessors are present (e.g., add v1, v2, edge(v1,v2), v3, +edge(v2,v3), ...). The simplest strategy is to add all vertices, then all +edges, then all faces, but this is not required.

+

SPARQL queries are encapsulated as named templates. The framework provides +generic queries in knowledgecomplex/queries/; domain models can supply +additional query directories via the query_dirs parameter.

+

The optional uri parameter on add_vertex / add_edge / add_face allows +callers to attach a file URI to any element via the kc:uri property. This +is particularly useful for domain applications where each element corresponds +to an actual document file (e.g. file:///path/to/doc.md).

+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ Element + + +

+ + +
+ + + +

Lightweight proxy for an element in a KnowledgeComplex.

+

Provides read-only access to element properties and compile/decompile +methods that delegate to the codec registered for this element's type. +Properties read live from the instance graph on each access.

+ + + + + + + + +
+ Source code in knowledgecomplex/graph.py +
 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
class Element:
+    """
+    Lightweight proxy for an element in a KnowledgeComplex.
+
+    Provides read-only access to element properties and compile/decompile
+    methods that delegate to the codec registered for this element's type.
+    Properties read live from the instance graph on each access.
+    """
+
+    def __init__(self, kc: "KnowledgeComplex", id: str) -> None:
+        self._kc = kc
+        self._id = id
+        self._iri = URIRef(f"{kc._schema._base_iri}{id}")
+
+    def __repr__(self) -> str:
+        try:
+            t = self.type
+        except ValueError:
+            t = "?"
+        return f"Element({self._id!r}, type={t!r})"
+
+    @property
+    def id(self) -> str:
+        return self._id
+
+    @property
+    def type(self) -> str:
+        ns_str = self._kc._schema._base_iri
+        for _, _, o in self._kc._instance_graph.triples((self._iri, RDF.type, None)):
+            type_str = str(o)
+            if type_str.startswith(ns_str):
+                return type_str[len(ns_str):]
+        raise ValueError(f"Element '{self._id}' has no user type")
+
+    @property
+    def uri(self) -> str | None:
+        obj = self._kc._instance_graph.value(self._iri, _KC.uri)
+        return str(obj) if obj is not None else None
+
+    @property
+    def attrs(self) -> dict[str, Any]:
+        ns_str = self._kc._schema._base_iri
+        attrs: dict[str, Any] = {}
+        for _, p, o in self._kc._instance_graph.triples((self._iri, None, None)):
+            p_str = str(p)
+            if p_str.startswith(ns_str):
+                attr_name = p_str[len(ns_str):]
+                attrs[attr_name] = str(o)
+        return attrs
+
+    def compile(self) -> None:
+        """Write this element's record to the artifact at its URI."""
+        codec = self._kc._resolve_codec(self.type)
+        uri = self.uri
+        if uri is None:
+            raise ValueError(f"Element '{self._id}' has no kc:uri — cannot compile")
+        element_dict = {"id": self._id, "type": self.type, "uri": uri, **self.attrs}
+        codec.compile(element_dict)
+
+    def decompile(self) -> None:
+        """Read the artifact at this element's URI and update attributes."""
+        codec = self._kc._resolve_codec(self.type)
+        uri = self.uri
+        if uri is None:
+            raise ValueError(f"Element '{self._id}' has no kc:uri — cannot decompile")
+        new_attrs = codec.decompile(uri)
+
+        # Remove existing model-namespace attribute triples
+        ns_str = self._kc._schema._base_iri
+        to_remove = []
+        for s, p, o in self._kc._instance_graph.triples((self._iri, None, None)):
+            if str(p).startswith(ns_str):
+                to_remove.append((s, p, o))
+        for triple in to_remove:
+            self._kc._instance_graph.remove(triple)
+
+        # Add new attribute triples
+        for attr_name, attr_value in new_attrs.items():
+            attr_iri = self._kc._ns[attr_name]
+            if isinstance(attr_value, (list, tuple)):
+                for v in attr_value:
+                    self._kc._instance_graph.add((self._iri, attr_iri, Literal(v)))
+            else:
+                self._kc._instance_graph.add((self._iri, attr_iri, Literal(attr_value)))
+
+        # Re-validate
+        self._kc._validate(self._id)
+
+
+ + + +
+ + + + + + + + + + +
+ + +

+ compile() + +

+ + +
+ +

Write this element's record to the artifact at its URI.

+ + +
+ Source code in knowledgecomplex/graph.py +
135
+136
+137
+138
+139
+140
+141
+142
def compile(self) -> None:
+    """Write this element's record to the artifact at its URI."""
+    codec = self._kc._resolve_codec(self.type)
+    uri = self.uri
+    if uri is None:
+        raise ValueError(f"Element '{self._id}' has no kc:uri — cannot compile")
+    element_dict = {"id": self._id, "type": self.type, "uri": uri, **self.attrs}
+    codec.compile(element_dict)
+
+
+
+ +
+ +
+ + +

+ decompile() + +

+ + +
+ +

Read the artifact at this element's URI and update attributes.

+ + +
+ Source code in knowledgecomplex/graph.py +
144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
def decompile(self) -> None:
+    """Read the artifact at this element's URI and update attributes."""
+    codec = self._kc._resolve_codec(self.type)
+    uri = self.uri
+    if uri is None:
+        raise ValueError(f"Element '{self._id}' has no kc:uri — cannot decompile")
+    new_attrs = codec.decompile(uri)
+
+    # Remove existing model-namespace attribute triples
+    ns_str = self._kc._schema._base_iri
+    to_remove = []
+    for s, p, o in self._kc._instance_graph.triples((self._iri, None, None)):
+        if str(p).startswith(ns_str):
+            to_remove.append((s, p, o))
+    for triple in to_remove:
+        self._kc._instance_graph.remove(triple)
+
+    # Add new attribute triples
+    for attr_name, attr_value in new_attrs.items():
+        attr_iri = self._kc._ns[attr_name]
+        if isinstance(attr_value, (list, tuple)):
+            for v in attr_value:
+                self._kc._instance_graph.add((self._iri, attr_iri, Literal(v)))
+        else:
+            self._kc._instance_graph.add((self._iri, attr_iri, Literal(attr_value)))
+
+    # Re-validate
+    self._kc._validate(self._id)
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + +

+ KnowledgeComplex + + +

+ + +
+ + + +

Manage a knowledge complex instance: add elements, validate, query.

+

Maps to a kc:Complex individual in the RDF graph. Each element added +via add_vertex / add_edge / add_face becomes a kc:hasElement member of +this complex. Boundary-closure is enforced by ComplexShape on every write +(the "slice rule": every prefix of the insertion sequence is a valid complex). +An element's boundary elements must be added before the element itself.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ schema + + SchemaBuilder + +
+

A fully configured schema. The merged OWL + SHACL is loaded into +the internal graph at construction time.

+
+
+ required +
+ query_dirs + + list[Path] + +
+

Additional directories containing .sparql query templates +(e.g. from domain models). Merged with the framework's built-in queries.

+
+
+ None +
+ + +
+ Example +
+
+
+

from knowledgecomplex import SchemaBuilder, KnowledgeComplex, vocab +sb = SchemaBuilder(namespace="aaa") +sb.add_vertex_type("spec") +sb.add_vertex_type("guidance") +sb.add_edge_type("verification", +... attributes={"status": vocab("passing", "failing", "pending")}) +kc = KnowledgeComplex(schema=sb) +kc.add_vertex("spec-001", type="spec", uri="file:///docs/spec-001.md") +kc.add_vertex("guidance-001", type="guidance") +kc.add_edge("ver-001", type="verification", +... vertices={"spec-001", "guidance-001"}, status="passing")

+
+
+
+
+ + + + + + + +
+ Source code in knowledgecomplex/graph.py +
 174
+ 175
+ 176
+ 177
+ 178
+ 179
+ 180
+ 181
+ 182
+ 183
+ 184
+ 185
+ 186
+ 187
+ 188
+ 189
+ 190
+ 191
+ 192
+ 193
+ 194
+ 195
+ 196
+ 197
+ 198
+ 199
+ 200
+ 201
+ 202
+ 203
+ 204
+ 205
+ 206
+ 207
+ 208
+ 209
+ 210
+ 211
+ 212
+ 213
+ 214
+ 215
+ 216
+ 217
+ 218
+ 219
+ 220
+ 221
+ 222
+ 223
+ 224
+ 225
+ 226
+ 227
+ 228
+ 229
+ 230
+ 231
+ 232
+ 233
+ 234
+ 235
+ 236
+ 237
+ 238
+ 239
+ 240
+ 241
+ 242
+ 243
+ 244
+ 245
+ 246
+ 247
+ 248
+ 249
+ 250
+ 251
+ 252
+ 253
+ 254
+ 255
+ 256
+ 257
+ 258
+ 259
+ 260
+ 261
+ 262
+ 263
+ 264
+ 265
+ 266
+ 267
+ 268
+ 269
+ 270
+ 271
+ 272
+ 273
+ 274
+ 275
+ 276
+ 277
+ 278
+ 279
+ 280
+ 281
+ 282
+ 283
+ 284
+ 285
+ 286
+ 287
+ 288
+ 289
+ 290
+ 291
+ 292
+ 293
+ 294
+ 295
+ 296
+ 297
+ 298
+ 299
+ 300
+ 301
+ 302
+ 303
+ 304
+ 305
+ 306
+ 307
+ 308
+ 309
+ 310
+ 311
+ 312
+ 313
+ 314
+ 315
+ 316
+ 317
+ 318
+ 319
+ 320
+ 321
+ 322
+ 323
+ 324
+ 325
+ 326
+ 327
+ 328
+ 329
+ 330
+ 331
+ 332
+ 333
+ 334
+ 335
+ 336
+ 337
+ 338
+ 339
+ 340
+ 341
+ 342
+ 343
+ 344
+ 345
+ 346
+ 347
+ 348
+ 349
+ 350
+ 351
+ 352
+ 353
+ 354
+ 355
+ 356
+ 357
+ 358
+ 359
+ 360
+ 361
+ 362
+ 363
+ 364
+ 365
+ 366
+ 367
+ 368
+ 369
+ 370
+ 371
+ 372
+ 373
+ 374
+ 375
+ 376
+ 377
+ 378
+ 379
+ 380
+ 381
+ 382
+ 383
+ 384
+ 385
+ 386
+ 387
+ 388
+ 389
+ 390
+ 391
+ 392
+ 393
+ 394
+ 395
+ 396
+ 397
+ 398
+ 399
+ 400
+ 401
+ 402
+ 403
+ 404
+ 405
+ 406
+ 407
+ 408
+ 409
+ 410
+ 411
+ 412
+ 413
+ 414
+ 415
+ 416
+ 417
+ 418
+ 419
+ 420
+ 421
+ 422
+ 423
+ 424
+ 425
+ 426
+ 427
+ 428
+ 429
+ 430
+ 431
+ 432
+ 433
+ 434
+ 435
+ 436
+ 437
+ 438
+ 439
+ 440
+ 441
+ 442
+ 443
+ 444
+ 445
+ 446
+ 447
+ 448
+ 449
+ 450
+ 451
+ 452
+ 453
+ 454
+ 455
+ 456
+ 457
+ 458
+ 459
+ 460
+ 461
+ 462
+ 463
+ 464
+ 465
+ 466
+ 467
+ 468
+ 469
+ 470
+ 471
+ 472
+ 473
+ 474
+ 475
+ 476
+ 477
+ 478
+ 479
+ 480
+ 481
+ 482
+ 483
+ 484
+ 485
+ 486
+ 487
+ 488
+ 489
+ 490
+ 491
+ 492
+ 493
+ 494
+ 495
+ 496
+ 497
+ 498
+ 499
+ 500
+ 501
+ 502
+ 503
+ 504
+ 505
+ 506
+ 507
+ 508
+ 509
+ 510
+ 511
+ 512
+ 513
+ 514
+ 515
+ 516
+ 517
+ 518
+ 519
+ 520
+ 521
+ 522
+ 523
+ 524
+ 525
+ 526
+ 527
+ 528
+ 529
+ 530
+ 531
+ 532
+ 533
+ 534
+ 535
+ 536
+ 537
+ 538
+ 539
+ 540
+ 541
+ 542
+ 543
+ 544
+ 545
+ 546
+ 547
+ 548
+ 549
+ 550
+ 551
+ 552
+ 553
+ 554
+ 555
+ 556
+ 557
+ 558
+ 559
+ 560
+ 561
+ 562
+ 563
+ 564
+ 565
+ 566
+ 567
+ 568
+ 569
+ 570
+ 571
+ 572
+ 573
+ 574
+ 575
+ 576
+ 577
+ 578
+ 579
+ 580
+ 581
+ 582
+ 583
+ 584
+ 585
+ 586
+ 587
+ 588
+ 589
+ 590
+ 591
+ 592
+ 593
+ 594
+ 595
+ 596
+ 597
+ 598
+ 599
+ 600
+ 601
+ 602
+ 603
+ 604
+ 605
+ 606
+ 607
+ 608
+ 609
+ 610
+ 611
+ 612
+ 613
+ 614
+ 615
+ 616
+ 617
+ 618
+ 619
+ 620
+ 621
+ 622
+ 623
+ 624
+ 625
+ 626
+ 627
+ 628
+ 629
+ 630
+ 631
+ 632
+ 633
+ 634
+ 635
+ 636
+ 637
+ 638
+ 639
+ 640
+ 641
+ 642
+ 643
+ 644
+ 645
+ 646
+ 647
+ 648
+ 649
+ 650
+ 651
+ 652
+ 653
+ 654
+ 655
+ 656
+ 657
+ 658
+ 659
+ 660
+ 661
+ 662
+ 663
+ 664
+ 665
+ 666
+ 667
+ 668
+ 669
+ 670
+ 671
+ 672
+ 673
+ 674
+ 675
+ 676
+ 677
+ 678
+ 679
+ 680
+ 681
+ 682
+ 683
+ 684
+ 685
+ 686
+ 687
+ 688
+ 689
+ 690
+ 691
+ 692
+ 693
+ 694
+ 695
+ 696
+ 697
+ 698
+ 699
+ 700
+ 701
+ 702
+ 703
+ 704
+ 705
+ 706
+ 707
+ 708
+ 709
+ 710
+ 711
+ 712
+ 713
+ 714
+ 715
+ 716
+ 717
+ 718
+ 719
+ 720
+ 721
+ 722
+ 723
+ 724
+ 725
+ 726
+ 727
+ 728
+ 729
+ 730
+ 731
+ 732
+ 733
+ 734
+ 735
+ 736
+ 737
+ 738
+ 739
+ 740
+ 741
+ 742
+ 743
+ 744
+ 745
+ 746
+ 747
+ 748
+ 749
+ 750
+ 751
+ 752
+ 753
+ 754
+ 755
+ 756
+ 757
+ 758
+ 759
+ 760
+ 761
+ 762
+ 763
+ 764
+ 765
+ 766
+ 767
+ 768
+ 769
+ 770
+ 771
+ 772
+ 773
+ 774
+ 775
+ 776
+ 777
+ 778
+ 779
+ 780
+ 781
+ 782
+ 783
+ 784
+ 785
+ 786
+ 787
+ 788
+ 789
+ 790
+ 791
+ 792
+ 793
+ 794
+ 795
+ 796
+ 797
+ 798
+ 799
+ 800
+ 801
+ 802
+ 803
+ 804
+ 805
+ 806
+ 807
+ 808
+ 809
+ 810
+ 811
+ 812
+ 813
+ 814
+ 815
+ 816
+ 817
+ 818
+ 819
+ 820
+ 821
+ 822
+ 823
+ 824
+ 825
+ 826
+ 827
+ 828
+ 829
+ 830
+ 831
+ 832
+ 833
+ 834
+ 835
+ 836
+ 837
+ 838
+ 839
+ 840
+ 841
+ 842
+ 843
+ 844
+ 845
+ 846
+ 847
+ 848
+ 849
+ 850
+ 851
+ 852
+ 853
+ 854
+ 855
+ 856
+ 857
+ 858
+ 859
+ 860
+ 861
+ 862
+ 863
+ 864
+ 865
+ 866
+ 867
+ 868
+ 869
+ 870
+ 871
+ 872
+ 873
+ 874
+ 875
+ 876
+ 877
+ 878
+ 879
+ 880
+ 881
+ 882
+ 883
+ 884
+ 885
+ 886
+ 887
+ 888
+ 889
+ 890
+ 891
+ 892
+ 893
+ 894
+ 895
+ 896
+ 897
+ 898
+ 899
+ 900
+ 901
+ 902
+ 903
+ 904
+ 905
+ 906
+ 907
+ 908
+ 909
+ 910
+ 911
+ 912
+ 913
+ 914
+ 915
+ 916
+ 917
+ 918
+ 919
+ 920
+ 921
+ 922
+ 923
+ 924
+ 925
+ 926
+ 927
+ 928
+ 929
+ 930
+ 931
+ 932
+ 933
+ 934
+ 935
+ 936
+ 937
+ 938
+ 939
+ 940
+ 941
+ 942
+ 943
+ 944
+ 945
+ 946
+ 947
+ 948
+ 949
+ 950
+ 951
+ 952
+ 953
+ 954
+ 955
+ 956
+ 957
+ 958
+ 959
+ 960
+ 961
+ 962
+ 963
+ 964
+ 965
+ 966
+ 967
+ 968
+ 969
+ 970
+ 971
+ 972
+ 973
+ 974
+ 975
+ 976
+ 977
+ 978
+ 979
+ 980
+ 981
+ 982
+ 983
+ 984
+ 985
+ 986
+ 987
+ 988
+ 989
+ 990
+ 991
+ 992
+ 993
+ 994
+ 995
+ 996
+ 997
+ 998
+ 999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
class KnowledgeComplex:
+    """
+    Manage a knowledge complex instance: add elements, validate, query.
+
+    Maps to a kc:Complex individual in the RDF graph. Each element added
+    via add_vertex / add_edge / add_face becomes a kc:hasElement member of
+    this complex. Boundary-closure is enforced by ComplexShape on every write
+    (the "slice rule": every prefix of the insertion sequence is a valid complex).
+    An element's boundary elements must be added before the element itself.
+
+    Parameters
+    ----------
+    schema : SchemaBuilder
+        A fully configured schema. The merged OWL + SHACL is loaded into
+        the internal graph at construction time.
+    query_dirs : list[Path], optional
+        Additional directories containing .sparql query templates
+        (e.g. from domain models). Merged with the framework's built-in queries.
+
+    Example
+    -------
+    >>> from knowledgecomplex import SchemaBuilder, KnowledgeComplex, vocab
+    >>> sb = SchemaBuilder(namespace="aaa")
+    >>> sb.add_vertex_type("spec")
+    >>> sb.add_vertex_type("guidance")
+    >>> sb.add_edge_type("verification",
+    ...     attributes={"status": vocab("passing", "failing", "pending")})
+    >>> kc = KnowledgeComplex(schema=sb)
+    >>> kc.add_vertex("spec-001", type="spec", uri="file:///docs/spec-001.md")
+    >>> kc.add_vertex("guidance-001", type="guidance")
+    >>> kc.add_edge("ver-001", type="verification",
+    ...             vertices={"spec-001", "guidance-001"}, status="passing")
+    """
+
+    def __init__(
+        self,
+        schema: SchemaBuilder,
+        query_dirs: list[Path] | None = None,
+    ) -> None:
+        self._schema = schema
+        self._query_dirs_raw = query_dirs or []
+        self._query_templates = _load_query_templates(extra_dirs=query_dirs)
+        self._instance_graph: Any = None  # rdflib.Graph, populated in _init_graph()
+        self._complex_iri: Any = None     # URIRef for the kc:Complex individual
+        self._ns = schema._ns
+        self._codecs: dict[str, Codec] = {}
+        self._defer_verification = False
+        self._init_graph()
+
+    def __repr__(self) -> str:
+        n = len(self.element_ids())
+        return f"KnowledgeComplex(namespace={self._schema._namespace!r}, elements={n})"
+
+    def _init_graph(self) -> None:
+        """
+        Initialize the instance graph and create the kc:Complex individual.
+
+        Parses the merged OWL from schema into the instance graph.
+        Creates a kc:Complex individual to serve as the container for all
+        elements added via add_vertex / add_edge / add_face.
+        Stores the ontology + shapes graphs separately for pyshacl calls.
+        """
+        # Parse merged OWL into instance graph (TBox + ABox in one graph)
+        self._instance_graph = Graph()
+        self._instance_graph.parse(data=self._schema.dump_owl(), format="turtle")
+
+        # Separate graphs for pyshacl validation
+        self._ont_graph = Graph()
+        self._ont_graph.parse(data=self._schema.dump_owl(), format="turtle")
+        self._shacl_graph = Graph()
+        self._shacl_graph.parse(data=self._schema.dump_shacl(), format="turtle")
+
+        # Create kc:Complex individual
+        self._complex_iri = URIRef(f"{self._schema._base_iri}_complex")
+        self._instance_graph.add((self._complex_iri, RDF.type, _KC.Complex))
+
+        # Bind prefixes for SPARQL queries and serialization
+        self._instance_graph.bind("kc", _KC)
+        self._instance_graph.bind(self._schema._namespace, self._ns)
+        self._instance_graph.bind("rdfs", RDFS)
+        self._instance_graph.bind("rdf", RDF)
+        self._instance_graph.bind("owl", OWL)
+        self._instance_graph.bind("xsd", XSD)
+
+    def _validate(self, focus_node_id: str | None = None) -> None:
+        """
+        Run pyshacl against the current instance graph.
+
+        Skipped when deferred_verification() context manager is active.
+
+        Parameters
+        ----------
+        focus_node_id : str, optional
+            If provided, used in the error message to identify which element
+            triggered the failure.
+
+        Raises
+        ------
+        ValidationError
+            If verification fails. report attribute contains human-readable text.
+        """
+        if self._defer_verification:
+            return
+
+        conforms, _, results_text = pyshacl.validate(
+            data_graph=self._instance_graph,
+            shacl_graph=self._shacl_graph,
+            ont_graph=self._ont_graph,
+            inference="rdfs",
+            abort_on_first=False,
+        )
+        if not conforms:
+            msg = "SHACL validation failed"
+            if focus_node_id:
+                msg += f" (after adding '{focus_node_id}')"
+            raise ValidationError(msg, report=results_text)
+
+    def verify(self) -> None:
+        """
+        Run SHACL verification on the current instance graph.
+
+        Checks all topological and ontological constraints. Raises on failure.
+        Use :meth:`audit` for a non-throwing alternative.
+
+        Raises
+        ------
+        ValidationError
+            If any SHACL constraint is violated.
+        """
+        # Bypass the deferral flag — verify() is an explicit user request
+        conforms, _, results_text = pyshacl.validate(
+            data_graph=self._instance_graph,
+            shacl_graph=self._shacl_graph,
+            ont_graph=self._ont_graph,
+            inference="rdfs",
+            abort_on_first=False,
+        )
+        if not conforms:
+            raise ValidationError("SHACL verification failed", report=results_text)
+
+    def audit(self) -> "AuditReport":
+        """
+        Run SHACL verification and return a structured report.
+
+        Unlike :meth:`verify`, this never raises — it returns an
+        :class:`~knowledgecomplex.audit.AuditReport` with ``conforms``,
+        ``violations``, and ``text`` fields.
+
+        Returns
+        -------
+        AuditReport
+        """
+        from knowledgecomplex.audit import _build_report
+        conforms, _, results_text = pyshacl.validate(
+            data_graph=self._instance_graph,
+            shacl_graph=self._shacl_graph,
+            ont_graph=self._ont_graph,
+            inference="rdfs",
+            abort_on_first=False,
+        )
+        return _build_report(conforms, results_text, self._schema._namespace)
+
+    def deferred_verification(self) -> "_DeferredVerification":
+        """
+        Context manager that suppresses per-write SHACL verification.
+
+        Inside the context, ``add_vertex``, ``add_edge``, and ``add_face``
+        skip SHACL checks. On exit, a single verification pass runs over
+        the entire graph. If verification fails, ``ValidationError`` is raised.
+
+        This is much faster for bulk construction — one SHACL pass instead
+        of one per element.
+
+        Example
+        -------
+        >>> with kc.deferred_verification():
+        ...     kc.add_vertex("v1", type="Node")
+        ...     kc.add_vertex("v2", type="Node")
+        ...     kc.add_edge("e1", type="Link", vertices={"v1", "v2"})
+        """
+        return _DeferredVerification(self)
+
+    def _assert_element(
+        self,
+        id: str,
+        type: str,
+        boundary_ids: list[str] | None,
+        attributes: dict[str, Any],
+        uri: str | None = None,
+    ) -> None:
+        """Common logic for add_vertex, add_edge, add_face."""
+        # Step 0: Python-side type guard
+        if type not in self._schema._types:
+            raise ValidationError(
+                f"Unregistered type: '{type}'",
+                report=f"Type '{type}' is not registered in the schema. "
+                       f"Registered types: {sorted(self._schema._types.keys())}",
+            )
+
+        type_iri = self._ns[type]
+        id_iri = URIRef(f"{self._schema._base_iri}{id}")
+
+        # Track triples added for rollback on validation failure
+        added_triples = []
+
+        def add(s, p, o):
+            self._instance_graph.add((s, p, o))
+            added_triples.append((s, p, o))
+
+        # Assert type
+        add(id_iri, RDF.type, type_iri)
+
+        # Assert boundary relations
+        if boundary_ids:
+            for bid in boundary_ids:
+                b_iri = URIRef(f"{self._schema._base_iri}{bid}")
+                add(id_iri, _KC.boundedBy, b_iri)
+
+        # Assert kc:uri (superstructure attribute — uses kc: namespace, not model namespace)
+        if uri is not None:
+            add(id_iri, _KC.uri, Literal(uri, datatype=XSD.anyURI))
+
+        # Assert attributes (in model namespace)
+        for attr_name, attr_value in attributes.items():
+            if isinstance(attr_value, (list, tuple)):
+                for v in attr_value:
+                    add(id_iri, self._ns[attr_name], Literal(v))
+            else:
+                add(id_iri, self._ns[attr_name], Literal(attr_value))
+
+        # Add to complex
+        add(self._complex_iri, _KC.hasElement, id_iri)
+
+        # Validate — rollback on failure
+        try:
+            self._validate(id)
+        except ValidationError:
+            for s, p, o in added_triples:
+                self._instance_graph.remove((s, p, o))
+            raise
+
+    def add_vertex(
+        self,
+        id: str,
+        type: str,
+        uri: str | None = None,
+        **attributes: Any,
+    ) -> None:
+        """
+        Assert a vertex individual and add it to the complex.
+
+        Asserts the vertex as an individual of the given type (subclass of
+        KC:Vertex), then asserts kc:hasElement on the complex. Validates via
+        SHACL. Vertices have empty boundary (k=0), so boundary-closure is
+        trivially satisfied.
+
+        Parameters
+        ----------
+        id : str
+            Local identifier for the vertex.
+        type : str
+            Vertex type name (must be a registered subclass of KC:Vertex).
+        uri : str, optional
+            Source file URI for this element (e.g. "file:///path/to/doc.md").
+            Stored as kc:uri (xsd:anyURI). At-most-one per element.
+        **attributes : Any
+            Additional attribute values for the vertex.
+
+        Raises
+        ------
+        ValidationError
+            If SHACL validation fails after assertion.
+        """
+        self._assert_element(id, type, boundary_ids=None, attributes=attributes, uri=uri)
+
+    def add_edge(
+        self,
+        id: str,
+        type: str,
+        vertices: set[str] | list[str],
+        uri: str | None = None,
+        **attributes: Any,
+    ) -> None:
+        """
+        Assert an edge individual, link to boundary vertices, and add to complex.
+
+        Asserts the edge as an individual of the given type (subclass of
+        KC:Edge), links it to exactly 2 boundary vertices via kc:boundedBy,
+        then asserts kc:hasElement on the complex. Validates via SHACL including:
+        - EdgeShape: exactly 2 distinct boundary vertices
+        - ComplexShape: boundary vertices must already be members of the complex
+
+        Parameters
+        ----------
+        id : str
+            Local identifier for the edge.
+        type : str
+            Edge type name (must be a registered subclass of KC:Edge).
+        vertices : set[str] | list[str]
+            Exactly 2 vertex IDs forming the boundary of this edge.
+            Unordered (edges are unoriented).
+        uri : str, optional
+            Source file URI for this element.
+        **attributes : Any
+            Attribute values (e.g. status="passing").
+
+        Raises
+        ------
+        ValueError
+            If len(vertices) != 2.
+        ValidationError
+            If SHACL validation fails after assertion.
+        """
+        if len(vertices) != 2:
+            raise ValueError(f"add_edge requires exactly 2 vertices; got {len(vertices)}")
+        self._assert_element(id, type, boundary_ids=list(vertices), attributes=attributes, uri=uri)
+
+    def add_face(
+        self,
+        id: str,
+        type: str,
+        boundary: list[str],
+        uri: str | None = None,
+        **attributes: Any,
+    ) -> None:
+        """
+        Assert a face individual, link to boundary edges, and add to complex.
+
+        Asserts the face as an individual of the given type (subclass of
+        KC:Face), links it to exactly 3 boundary edges via kc:boundedBy,
+        then asserts kc:hasElement on the complex. Validates via SHACL including:
+        - FaceShape: exactly 3 boundary edges forming a closed triangle
+        - ComplexShape: boundary edges must already be members of the complex
+
+        Parameters
+        ----------
+        id : str
+            Local identifier for the face.
+        type : str
+            Face type name (must be a registered subclass of KC:Face).
+        boundary : list[str]
+            Exactly 3 edge IDs forming the boundary of this face.
+        uri : str, optional
+            Source file URI for this element.
+        **attributes : Any
+            Attribute values.
+
+        Raises
+        ------
+        ValueError
+            If len(boundary) != 3.
+        ValidationError
+            If SHACL validation fails after assertion.
+        """
+        if len(boundary) != 3:
+            raise ValueError(f"add_face requires exactly 3 boundary edges; got {len(boundary)}")
+        self._assert_element(id, type, boundary_ids=boundary, attributes=attributes, uri=uri)
+
+    def remove_element(self, id: str) -> None:
+        """Remove an element and all its triples from the complex.
+
+        Removes the element's type assertion, boundary relations (both
+        directions), attributes, kc:uri, and kc:hasElement membership.
+
+        No validation is performed after removal — the caller is responsible
+        for ensuring the resulting complex is valid (e.g. removing faces
+        before their boundary edges).
+
+        Parameters
+        ----------
+        id : str
+            Element identifier to remove.
+
+        Raises
+        ------
+        ValueError
+            If no element with that ID exists.
+        """
+        iri = URIRef(f"{self._schema._base_iri}{id}")
+        if (iri, RDF.type, None) not in self._instance_graph:
+            raise ValueError(f"No element with id '{id}' in the complex")
+
+        # Remove all triples where element is subject
+        for s, p, o in list(self._instance_graph.triples((iri, None, None))):
+            self._instance_graph.remove((s, p, o))
+
+        # Remove all triples where element is object (coboundary, hasElement)
+        for s, p, o in list(self._instance_graph.triples((None, None, iri))):
+            self._instance_graph.remove((s, p, o))
+
+    def query(self, template_name: str, **kwargs: Any) -> pd.DataFrame:
+        """
+        Execute a named SPARQL template and return results as a DataFrame.
+
+        Parameters
+        ----------
+        template_name : str
+            Name of a registered query template (filename stem from
+            framework or model query directories).
+        **kwargs : Any
+            Substitution values for {placeholder} tokens in the template.
+
+        Returns
+        -------
+        pd.DataFrame
+            One row per SPARQL result binding.
+
+        Raises
+        ------
+        UnknownQueryError
+            If template_name is not registered.
+        """
+        if template_name not in self._query_templates:
+            raise UnknownQueryError(
+                f"No query template named '{template_name}'. "
+                f"Available: {sorted(self._query_templates)}"
+            )
+        sparql = self._query_templates[template_name]
+
+        # Substitute {placeholder} tokens with kwargs values
+        for key, value in kwargs.items():
+            sparql = sparql.replace(f"{{{key}}}", str(value))
+
+        # Provide namespace bindings for queries that may not declare all prefixes
+        init_ns = {
+            "kc": _KC,
+            "rdf": RDF,
+            "rdfs": RDFS,
+            "owl": OWL,
+            "xsd": XSD,
+            self._schema._namespace: self._ns,
+        }
+        results = self._instance_graph.query(sparql, initNs=init_ns)
+
+        columns = [str(v) for v in results.vars]
+        rows = []
+        for row in results:
+            rows.append([str(val) if val is not None else None for val in row])
+        return pd.DataFrame(rows, columns=columns)
+
+    def query_ids(self, template_name: str, **kwargs: Any) -> set[str]:
+        """Execute a named SPARQL template and return the first column as element IDs.
+
+        Like :meth:`query` but returns a ``set[str]`` of element IDs
+        (namespace prefix stripped) instead of a DataFrame.  Useful for
+        obtaining subcomplexes from parameterized queries.
+
+        Parameters
+        ----------
+        template_name : str
+            Name of a registered query template.
+        **kwargs : Any
+            Substitution values for ``{placeholder}`` tokens in the template.
+
+        Returns
+        -------
+        set[str]
+
+        Raises
+        ------
+        UnknownQueryError
+            If template_name is not registered.
+        """
+        if template_name not in self._query_templates:
+            raise UnknownQueryError(
+                f"No query template named '{template_name}'. "
+                f"Available: {sorted(self._query_templates)}"
+            )
+        sparql = self._query_templates[template_name]
+        for key, value in kwargs.items():
+            sparql = sparql.replace(f"{{{key}}}", str(value))
+        return self._ids_from_query(sparql)
+
+    def dump_graph(self) -> str:
+        """Return the instance graph as a Turtle string."""
+        return self._instance_graph.serialize(format="turtle")
+
+    def export(self, path: str | Path) -> Path:
+        """
+        Export the schema, queries, and instance graph to a directory.
+
+        Writes ontology.ttl, shapes.ttl, queries/*.sparql, and instance.ttl.
+
+        Parameters
+        ----------
+        path : str | Path
+            Target directory. Created if it does not exist.
+
+        Returns
+        -------
+        Path
+            The export directory.
+        """
+        p = Path(path)
+        self._schema.export(p, query_dirs=self._query_dirs_raw)
+        (p / "instance.ttl").write_text(self.dump_graph())
+        return p
+
+    @classmethod
+    def load(cls, path: str | Path) -> "KnowledgeComplex":
+        """
+        Load a knowledge complex from a directory.
+
+        Reads ontology.ttl and shapes.ttl to reconstruct the schema,
+        queries/*.sparql for query templates, and instance.ttl (if present)
+        for the instance graph.
+
+        Parameters
+        ----------
+        path : str | Path
+            Directory containing at minimum ontology.ttl and shapes.ttl.
+
+        Returns
+        -------
+        KnowledgeComplex
+        """
+        p = Path(path)
+        schema = SchemaBuilder.load(p)
+        query_dir = p / "queries"
+        query_dirs = [query_dir] if query_dir.exists() else []
+        kc = cls(schema=schema, query_dirs=query_dirs)
+        instance_file = p / "instance.ttl"
+        if instance_file.exists():
+            kc._instance_graph.parse(str(instance_file), format="turtle")
+        return kc
+
+    # --- Element handles and listing ---
+
+    def element(self, id: str) -> Element:
+        """
+        Get an Element handle for the given element ID.
+
+        Parameters
+        ----------
+        id : str
+            Local identifier of the element.
+
+        Returns
+        -------
+        Element
+
+        Raises
+        ------
+        ValueError
+            If no element with that ID exists in the graph.
+        """
+        iri = URIRef(f"{self._schema._base_iri}{id}")
+        if (iri, RDF.type, None) not in self._instance_graph:
+            raise ValueError(f"No element with id '{id}' in the complex")
+        return Element(self, id)
+
+    def element_ids(self, type: str | None = None) -> list[str]:
+        """
+        List element IDs, optionally filtered by type (includes subtypes).
+
+        Parameters
+        ----------
+        type : str, optional
+            Filter to elements of this type or any subtype.
+
+        Returns
+        -------
+        list[str]
+        """
+        ns_str = self._schema._base_iri
+        if type is not None:
+            if type not in self._schema._types:
+                raise SchemaError(f"Type '{type}' is not registered")
+            type_iri = self._ns[type]
+            # Use SPARQL with subClassOf* to include subtypes
+            sparql = f"""
+            SELECT ?elem WHERE {{
+                ?elem a/rdfs:subClassOf* <{type_iri}> .
+                <{self._complex_iri}> <https://example.org/kc#hasElement> ?elem .
+            }}
+            """
+            results = self._instance_graph.query(
+                sparql, initNs={"rdfs": RDFS, "rdf": RDF}
+            )
+            ids = []
+            for row in results:
+                elem_str = str(row[0])
+                if elem_str.startswith(ns_str):
+                    ids.append(elem_str[len(ns_str):])
+            return sorted(ids)
+        else:
+            # All elements in the complex
+            ids = []
+            for _, _, o in self._instance_graph.triples(
+                (self._complex_iri, _KC.hasElement, None)
+            ):
+                elem_str = str(o)
+                if elem_str.startswith(ns_str):
+                    ids.append(elem_str[len(ns_str):])
+            return sorted(ids)
+
+    def elements(self, type: str | None = None) -> list[Element]:
+        """
+        List Element handles, optionally filtered by type (includes subtypes).
+
+        Parameters
+        ----------
+        type : str, optional
+            Filter to elements of this type or any subtype.
+
+        Returns
+        -------
+        list[Element]
+        """
+        return [Element(self, id) for id in self.element_ids(type=type)]
+
+    def is_subcomplex(self, ids: set[str]) -> bool:
+        """
+        Check whether a set of element IDs forms a valid subcomplex.
+
+        A set is a valid subcomplex iff it is closed under the boundary
+        operator: for every element in the set, all its boundary elements
+        are also in the set.
+
+        Parameters
+        ----------
+        ids : set[str]
+            Element identifiers to check.
+
+        Returns
+        -------
+        bool
+        """
+        if not ids:
+            return True
+        return set(ids) == self.closure(ids)
+
+    # --- Topological query helpers ---
+
+    def _iri(self, id: str) -> str:
+        """Return the full IRI string for an element ID."""
+        return f"{self._schema._base_iri}{id}"
+
+    def _type_filter_clause(self, var: str, type: str | None) -> str:
+        """Return a SPARQL clause filtering ?var by type, or empty string."""
+        if type is None:
+            return ""
+        if type not in self._schema._types:
+            raise SchemaError(f"Type '{type}' is not registered")
+        type_iri = self._ns[type]
+        return f"?{var} a/rdfs:subClassOf* <{type_iri}> ."
+
+    def _ids_from_query(self, sparql: str) -> set[str]:
+        """Execute SPARQL and return the first column as a set of element IDs."""
+        ns_str = self._schema._base_iri
+        init_ns = {
+            "kc": _KC, "rdf": RDF, "rdfs": RDFS,
+            "owl": OWL, "xsd": XSD,
+            self._schema._namespace: self._ns,
+        }
+        results = self._instance_graph.query(sparql, initNs=init_ns)
+        ids: set[str] = set()
+        for row in results:
+            val = str(row[0])
+            if val.startswith(ns_str):
+                ids.add(val[len(ns_str):])
+        return ids
+
+    # --- Topological query methods ---
+
+    def boundary(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return ∂(id): the direct faces of element id via kc:boundedBy.
+
+        For a vertex, returns the empty set.
+        For an edge, returns its 2 boundary vertices.
+        For a face, returns its 3 boundary edges.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        sparql = (
+            self._query_templates["boundary"]
+            .replace("{simplex}", f"<{self._iri(id)}>")
+            .replace("{type_filter}", self._type_filter_clause("boundary", type))
+        )
+        return self._ids_from_query(sparql)
+
+    def coboundary(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return the cofaces of id: all simplices whose boundary contains id.
+
+        Computes {τ ∈ K : id ∈ ∂(τ)} — the set of (k+1)-simplices that
+        have id as a boundary element.  This is the combinatorial coface
+        relation, not the algebraic coboundary operator δ on cochains.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        tf = self._type_filter_clause("coboundary", type)
+        sparql = f"""\
+PREFIX kc: <https://example.org/kc#>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+SELECT ?coboundary WHERE {{
+    ?coboundary kc:boundedBy <{self._iri(id)}> .
+    {tf}
+}}"""
+        return self._ids_from_query(sparql)
+
+    def star(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return St(id): all simplices containing id as a face (transitive coboundary + self).
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        sparql = (
+            self._query_templates["star"]
+            .replace("{simplex}", f"<{self._iri(id)}>")
+            .replace("{type_filter}", self._type_filter_clause("star", type))
+        )
+        return self._ids_from_query(sparql)
+
+    def closure(self, ids: str | set[str], *, type: str | None = None) -> set[str]:
+        """Return Cl(ids): the smallest subcomplex containing ids.
+
+        Accepts a single ID or a set of IDs. When given a set, returns the
+        union of closures.
+
+        Parameters
+        ----------
+        ids : str or set[str]
+            Element identifier(s).
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        if isinstance(ids, str):
+            sparql = (
+                self._query_templates["closure"]
+                .replace("{simplex}", f"<{self._iri(ids)}>")
+                .replace("{type_filter}", self._type_filter_clause("closure", type))
+            )
+            return self._ids_from_query(sparql)
+        # Set input: use VALUES clause
+        values = " ".join(f"(<{self._iri(i)}>)" for i in ids)
+        tf = self._type_filter_clause("closure", type)
+        sparql = f"""\
+PREFIX kc: <https://example.org/kc#>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+SELECT ?closure WHERE {{
+    VALUES (?sigma) {{ {values} }}
+    ?sigma kc:boundedBy* ?closure .
+    {tf}
+}}"""
+        return self._ids_from_query(sparql)
+
+    def closed_star(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return Cl(St(id)): the closure of the star.
+
+        Always a valid subcomplex.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        return self.closure(self.star(id), type=type)
+
+    def link(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return Lk(id): Cl(St(id)) \\ St(id).
+
+        The link is the set of simplices in the closed star that do not
+        themselves contain id as a face.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        result = self.closed_star(id) - self.star(id)
+        if type is not None:
+            typed = set(self.element_ids(type=type))
+            result &= typed
+        return result
+
+    def skeleton(self, k: int) -> set[str]:
+        """Return sk_k(K): all elements of dimension <= k.
+
+        k=0: vertices only
+        k=1: vertices and edges
+        k=2: vertices, edges, and faces (everything)
+
+        Parameters
+        ----------
+        k : int
+            Maximum dimension (0, 1, or 2).
+
+        Returns
+        -------
+        set[str]
+
+        Raises
+        ------
+        ValueError
+            If k < 0 or k > 2.
+        """
+        if k < 0 or k > 2:
+            raise ValueError(f"skeleton dimension must be 0, 1, or 2; got {k}")
+        dim_classes_map = {
+            0: [_KC.Vertex],
+            1: [_KC.Vertex, _KC.Edge],
+            2: [_KC.Vertex, _KC.Edge, _KC.Face],
+        }
+        classes = dim_classes_map[k]
+        unions = " UNION ".join(
+            f"{{ ?elem a/rdfs:subClassOf* <{c}> }}" for c in classes
+        )
+        sparql = (
+            self._query_templates["skeleton"]
+            .replace("{complex}", f"<{self._complex_iri}>")
+            .replace("{dim_classes}", unions)
+        )
+        return self._ids_from_query(sparql)
+
+    def degree(self, id: str) -> int:
+        """Return deg(id): the number of edges incident to vertex id.
+
+        Parameters
+        ----------
+        id : str
+            Vertex identifier.
+
+        Returns
+        -------
+        int
+        """
+        sparql = (
+            self._query_templates["degree"]
+            .replace("{simplex}", f"<{self._iri(id)}>")
+        )
+        init_ns = {
+            "kc": _KC, "rdf": RDF, "rdfs": RDFS,
+            "owl": OWL, "xsd": XSD,
+            self._schema._namespace: self._ns,
+        }
+        results = self._instance_graph.query(sparql, initNs=init_ns)
+        for row in results:
+            return int(row[0])
+        return 0
+
+    # --- Codec registration and resolution ---
+
+    def register_codec(self, type_name: str, codec: Codec) -> None:
+        """
+        Register a codec for the given type.
+
+        Parameters
+        ----------
+        type_name : str
+            Must be a registered type in the schema.
+        codec : Codec
+            Object implementing compile() and decompile().
+        """
+        if type_name not in self._schema._types:
+            raise SchemaError(f"Type '{type_name}' is not registered")
+        if not isinstance(codec, Codec):
+            raise TypeError(
+                f"Expected a Codec instance, got {type(codec).__name__}"
+            )
+        self._codecs[type_name] = codec
+
+    def _resolve_codec(self, type_name: str) -> Codec:
+        """Walk type hierarchy to find the nearest registered codec."""
+        current: str | None = type_name
+        while current is not None:
+            if current in self._codecs:
+                return self._codecs[current]
+            current = self._schema._types.get(current, {}).get("parent")
+        raise SchemaError(
+            f"No codec registered for type '{type_name}' or any of its ancestors"
+        )
+
+    def decompile_uri(self, type_name: str, uri: str) -> dict:
+        """
+        Decompile an artifact at a URI without adding it to the graph.
+
+        Parameters
+        ----------
+        type_name : str
+            The element type (used to resolve the codec).
+        uri : str
+            URI of the artifact to read.
+
+        Returns
+        -------
+        dict
+            Attribute key-value pairs.
+        """
+        if type_name not in self._schema._types:
+            raise SchemaError(f"Type '{type_name}' is not registered")
+        codec = self._resolve_codec(type_name)
+        return codec.decompile(uri)
+
+
+ + + +
+ + + + + + + + + + +
+ + +

+ verify() + +

+ + +
+ +

Run SHACL verification on the current instance graph.

+

Checks all topological and ontological constraints. Raises on failure. +Use :meth:audit for a non-throwing alternative.

+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValidationError + +
+

If any SHACL constraint is violated.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
def verify(self) -> None:
+    """
+    Run SHACL verification on the current instance graph.
+
+    Checks all topological and ontological constraints. Raises on failure.
+    Use :meth:`audit` for a non-throwing alternative.
+
+    Raises
+    ------
+    ValidationError
+        If any SHACL constraint is violated.
+    """
+    # Bypass the deferral flag — verify() is an explicit user request
+    conforms, _, results_text = pyshacl.validate(
+        data_graph=self._instance_graph,
+        shacl_graph=self._shacl_graph,
+        ont_graph=self._ont_graph,
+        inference="rdfs",
+        abort_on_first=False,
+    )
+    if not conforms:
+        raise ValidationError("SHACL verification failed", report=results_text)
+
+
+
+ +
+ +
+ + +

+ audit() + +

+ + +
+ +

Run SHACL verification and return a structured report.

+

Unlike :meth:verify, this never raises — it returns an +:class:~knowledgecomplex.audit.AuditReport with conforms, +violations, and text fields.

+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ AuditReport + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
def audit(self) -> "AuditReport":
+    """
+    Run SHACL verification and return a structured report.
+
+    Unlike :meth:`verify`, this never raises — it returns an
+    :class:`~knowledgecomplex.audit.AuditReport` with ``conforms``,
+    ``violations``, and ``text`` fields.
+
+    Returns
+    -------
+    AuditReport
+    """
+    from knowledgecomplex.audit import _build_report
+    conforms, _, results_text = pyshacl.validate(
+        data_graph=self._instance_graph,
+        shacl_graph=self._shacl_graph,
+        ont_graph=self._ont_graph,
+        inference="rdfs",
+        abort_on_first=False,
+    )
+    return _build_report(conforms, results_text, self._schema._namespace)
+
+
+
+ +
+ +
+ + +

+ deferred_verification() + +

+ + +
+ +

Context manager that suppresses per-write SHACL verification.

+

Inside the context, add_vertex, add_edge, and add_face +skip SHACL checks. On exit, a single verification pass runs over +the entire graph. If verification fails, ValidationError is raised.

+

This is much faster for bulk construction — one SHACL pass instead +of one per element.

+ + +
+ Example +
+
+
+

with kc.deferred_verification(): +... kc.add_vertex("v1", type="Node") +... kc.add_vertex("v2", type="Node") +... kc.add_edge("e1", type="Link", vertices={"v1", "v2"})

+
+
+
+
+ +
+ Source code in knowledgecomplex/graph.py +
336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
def deferred_verification(self) -> "_DeferredVerification":
+    """
+    Context manager that suppresses per-write SHACL verification.
+
+    Inside the context, ``add_vertex``, ``add_edge``, and ``add_face``
+    skip SHACL checks. On exit, a single verification pass runs over
+    the entire graph. If verification fails, ``ValidationError`` is raised.
+
+    This is much faster for bulk construction — one SHACL pass instead
+    of one per element.
+
+    Example
+    -------
+    >>> with kc.deferred_verification():
+    ...     kc.add_vertex("v1", type="Node")
+    ...     kc.add_vertex("v2", type="Node")
+    ...     kc.add_edge("e1", type="Link", vertices={"v1", "v2"})
+    """
+    return _DeferredVerification(self)
+
+
+
+ +
+ +
+ + +

+ add_vertex(id, type, uri=None, **attributes) + +

+ + +
+ +

Assert a vertex individual and add it to the complex.

+

Asserts the vertex as an individual of the given type (subclass of +KC:Vertex), then asserts kc:hasElement on the complex. Validates via +SHACL. Vertices have empty boundary (k=0), so boundary-closure is +trivially satisfied.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Local identifier for the vertex.

+
+
+ required +
+ type + + str + +
+

Vertex type name (must be a registered subclass of KC:Vertex).

+
+
+ required +
+ uri + + str + +
+

Source file URI for this element (e.g. "file:///path/to/doc.md"). +Stored as kc:uri (xsd:anyURI). At-most-one per element.

+
+
+ None +
+ **attributes + + Any + +
+

Additional attribute values for the vertex.

+
+
+ {} +
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValidationError + +
+

If SHACL validation fails after assertion.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
def add_vertex(
+    self,
+    id: str,
+    type: str,
+    uri: str | None = None,
+    **attributes: Any,
+) -> None:
+    """
+    Assert a vertex individual and add it to the complex.
+
+    Asserts the vertex as an individual of the given type (subclass of
+    KC:Vertex), then asserts kc:hasElement on the complex. Validates via
+    SHACL. Vertices have empty boundary (k=0), so boundary-closure is
+    trivially satisfied.
+
+    Parameters
+    ----------
+    id : str
+        Local identifier for the vertex.
+    type : str
+        Vertex type name (must be a registered subclass of KC:Vertex).
+    uri : str, optional
+        Source file URI for this element (e.g. "file:///path/to/doc.md").
+        Stored as kc:uri (xsd:anyURI). At-most-one per element.
+    **attributes : Any
+        Additional attribute values for the vertex.
+
+    Raises
+    ------
+    ValidationError
+        If SHACL validation fails after assertion.
+    """
+    self._assert_element(id, type, boundary_ids=None, attributes=attributes, uri=uri)
+
+
+
+ +
+ +
+ + +

+ add_edge(id, type, vertices, uri=None, **attributes) + +

+ + +
+ +

Assert an edge individual, link to boundary vertices, and add to complex.

+

Asserts the edge as an individual of the given type (subclass of +KC:Edge), links it to exactly 2 boundary vertices via kc:boundedBy, +then asserts kc:hasElement on the complex. Validates via SHACL including: +- EdgeShape: exactly 2 distinct boundary vertices +- ComplexShape: boundary vertices must already be members of the complex

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Local identifier for the edge.

+
+
+ required +
+ type + + str + +
+

Edge type name (must be a registered subclass of KC:Edge).

+
+
+ required +
+ vertices + + set[str] | list[str] + +
+

Exactly 2 vertex IDs forming the boundary of this edge. +Unordered (edges are unoriented).

+
+
+ required +
+ uri + + str + +
+

Source file URI for this element.

+
+
+ None +
+ **attributes + + Any + +
+

Attribute values (e.g. status="passing").

+
+
+ {} +
+ + +

Raises:

+ + + + + + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If len(vertices) != 2.

+
+
+ ValidationError + +
+

If SHACL validation fails after assertion.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
def add_edge(
+    self,
+    id: str,
+    type: str,
+    vertices: set[str] | list[str],
+    uri: str | None = None,
+    **attributes: Any,
+) -> None:
+    """
+    Assert an edge individual, link to boundary vertices, and add to complex.
+
+    Asserts the edge as an individual of the given type (subclass of
+    KC:Edge), links it to exactly 2 boundary vertices via kc:boundedBy,
+    then asserts kc:hasElement on the complex. Validates via SHACL including:
+    - EdgeShape: exactly 2 distinct boundary vertices
+    - ComplexShape: boundary vertices must already be members of the complex
+
+    Parameters
+    ----------
+    id : str
+        Local identifier for the edge.
+    type : str
+        Edge type name (must be a registered subclass of KC:Edge).
+    vertices : set[str] | list[str]
+        Exactly 2 vertex IDs forming the boundary of this edge.
+        Unordered (edges are unoriented).
+    uri : str, optional
+        Source file URI for this element.
+    **attributes : Any
+        Attribute values (e.g. status="passing").
+
+    Raises
+    ------
+    ValueError
+        If len(vertices) != 2.
+    ValidationError
+        If SHACL validation fails after assertion.
+    """
+    if len(vertices) != 2:
+        raise ValueError(f"add_edge requires exactly 2 vertices; got {len(vertices)}")
+    self._assert_element(id, type, boundary_ids=list(vertices), attributes=attributes, uri=uri)
+
+
+
+ +
+ +
+ + +

+ add_face(id, type, boundary, uri=None, **attributes) + +

+ + +
+ +

Assert a face individual, link to boundary edges, and add to complex.

+

Asserts the face as an individual of the given type (subclass of +KC:Face), links it to exactly 3 boundary edges via kc:boundedBy, +then asserts kc:hasElement on the complex. Validates via SHACL including: +- FaceShape: exactly 3 boundary edges forming a closed triangle +- ComplexShape: boundary edges must already be members of the complex

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Local identifier for the face.

+
+
+ required +
+ type + + str + +
+

Face type name (must be a registered subclass of KC:Face).

+
+
+ required +
+ boundary + + list[str] + +
+

Exactly 3 edge IDs forming the boundary of this face.

+
+
+ required +
+ uri + + str + +
+

Source file URI for this element.

+
+
+ None +
+ **attributes + + Any + +
+

Attribute values.

+
+
+ {} +
+ + +

Raises:

+ + + + + + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If len(boundary) != 3.

+
+
+ ValidationError + +
+

If SHACL validation fails after assertion.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
def add_face(
+    self,
+    id: str,
+    type: str,
+    boundary: list[str],
+    uri: str | None = None,
+    **attributes: Any,
+) -> None:
+    """
+    Assert a face individual, link to boundary edges, and add to complex.
+
+    Asserts the face as an individual of the given type (subclass of
+    KC:Face), links it to exactly 3 boundary edges via kc:boundedBy,
+    then asserts kc:hasElement on the complex. Validates via SHACL including:
+    - FaceShape: exactly 3 boundary edges forming a closed triangle
+    - ComplexShape: boundary edges must already be members of the complex
+
+    Parameters
+    ----------
+    id : str
+        Local identifier for the face.
+    type : str
+        Face type name (must be a registered subclass of KC:Face).
+    boundary : list[str]
+        Exactly 3 edge IDs forming the boundary of this face.
+    uri : str, optional
+        Source file URI for this element.
+    **attributes : Any
+        Attribute values.
+
+    Raises
+    ------
+    ValueError
+        If len(boundary) != 3.
+    ValidationError
+        If SHACL validation fails after assertion.
+    """
+    if len(boundary) != 3:
+        raise ValueError(f"add_face requires exactly 3 boundary edges; got {len(boundary)}")
+    self._assert_element(id, type, boundary_ids=boundary, attributes=attributes, uri=uri)
+
+
+
+ +
+ +
+ + +

+ remove_element(id) + +

+ + +
+ +

Remove an element and all its triples from the complex.

+

Removes the element's type assertion, boundary relations (both +directions), attributes, kc:uri, and kc:hasElement membership.

+

No validation is performed after removal — the caller is responsible +for ensuring the resulting complex is valid (e.g. removing faces +before their boundary edges).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier to remove.

+
+
+ required +
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If no element with that ID exists.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
def remove_element(self, id: str) -> None:
+    """Remove an element and all its triples from the complex.
+
+    Removes the element's type assertion, boundary relations (both
+    directions), attributes, kc:uri, and kc:hasElement membership.
+
+    No validation is performed after removal — the caller is responsible
+    for ensuring the resulting complex is valid (e.g. removing faces
+    before their boundary edges).
+
+    Parameters
+    ----------
+    id : str
+        Element identifier to remove.
+
+    Raises
+    ------
+    ValueError
+        If no element with that ID exists.
+    """
+    iri = URIRef(f"{self._schema._base_iri}{id}")
+    if (iri, RDF.type, None) not in self._instance_graph:
+        raise ValueError(f"No element with id '{id}' in the complex")
+
+    # Remove all triples where element is subject
+    for s, p, o in list(self._instance_graph.triples((iri, None, None))):
+        self._instance_graph.remove((s, p, o))
+
+    # Remove all triples where element is object (coboundary, hasElement)
+    for s, p, o in list(self._instance_graph.triples((None, None, iri))):
+        self._instance_graph.remove((s, p, o))
+
+
+
+ +
+ +
+ + +

+ query(template_name, **kwargs) + +

+ + +
+ +

Execute a named SPARQL template and return results as a DataFrame.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ template_name + + str + +
+

Name of a registered query template (filename stem from +framework or model query directories).

+
+
+ required +
+ **kwargs + + Any + +
+

Substitution values for {placeholder} tokens in the template.

+
+
+ {} +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ DataFrame + +
+

One row per SPARQL result binding.

+
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ UnknownQueryError + +
+

If template_name is not registered.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
def query(self, template_name: str, **kwargs: Any) -> pd.DataFrame:
+    """
+    Execute a named SPARQL template and return results as a DataFrame.
+
+    Parameters
+    ----------
+    template_name : str
+        Name of a registered query template (filename stem from
+        framework or model query directories).
+    **kwargs : Any
+        Substitution values for {placeholder} tokens in the template.
+
+    Returns
+    -------
+    pd.DataFrame
+        One row per SPARQL result binding.
+
+    Raises
+    ------
+    UnknownQueryError
+        If template_name is not registered.
+    """
+    if template_name not in self._query_templates:
+        raise UnknownQueryError(
+            f"No query template named '{template_name}'. "
+            f"Available: {sorted(self._query_templates)}"
+        )
+    sparql = self._query_templates[template_name]
+
+    # Substitute {placeholder} tokens with kwargs values
+    for key, value in kwargs.items():
+        sparql = sparql.replace(f"{{{key}}}", str(value))
+
+    # Provide namespace bindings for queries that may not declare all prefixes
+    init_ns = {
+        "kc": _KC,
+        "rdf": RDF,
+        "rdfs": RDFS,
+        "owl": OWL,
+        "xsd": XSD,
+        self._schema._namespace: self._ns,
+    }
+    results = self._instance_graph.query(sparql, initNs=init_ns)
+
+    columns = [str(v) for v in results.vars]
+    rows = []
+    for row in results:
+        rows.append([str(val) if val is not None else None for val in row])
+    return pd.DataFrame(rows, columns=columns)
+
+
+
+ +
+ +
+ + +

+ query_ids(template_name, **kwargs) + +

+ + +
+ +

Execute a named SPARQL template and return the first column as element IDs.

+

Like :meth:query but returns a set[str] of element IDs +(namespace prefix stripped) instead of a DataFrame. Useful for +obtaining subcomplexes from parameterized queries.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ template_name + + str + +
+

Name of a registered query template.

+
+
+ required +
+ **kwargs + + Any + +
+

Substitution values for {placeholder} tokens in the template.

+
+
+ {} +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ UnknownQueryError + +
+

If template_name is not registered.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
def query_ids(self, template_name: str, **kwargs: Any) -> set[str]:
+    """Execute a named SPARQL template and return the first column as element IDs.
+
+    Like :meth:`query` but returns a ``set[str]`` of element IDs
+    (namespace prefix stripped) instead of a DataFrame.  Useful for
+    obtaining subcomplexes from parameterized queries.
+
+    Parameters
+    ----------
+    template_name : str
+        Name of a registered query template.
+    **kwargs : Any
+        Substitution values for ``{placeholder}`` tokens in the template.
+
+    Returns
+    -------
+    set[str]
+
+    Raises
+    ------
+    UnknownQueryError
+        If template_name is not registered.
+    """
+    if template_name not in self._query_templates:
+        raise UnknownQueryError(
+            f"No query template named '{template_name}'. "
+            f"Available: {sorted(self._query_templates)}"
+        )
+    sparql = self._query_templates[template_name]
+    for key, value in kwargs.items():
+        sparql = sparql.replace(f"{{{key}}}", str(value))
+    return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ dump_graph() + +

+ + +
+ +

Return the instance graph as a Turtle string.

+ + +
+ Source code in knowledgecomplex/graph.py +
647
+648
+649
def dump_graph(self) -> str:
+    """Return the instance graph as a Turtle string."""
+    return self._instance_graph.serialize(format="turtle")
+
+
+
+ +
+ +
+ + +

+ export(path) + +

+ + +
+ +

Export the schema, queries, and instance graph to a directory.

+

Writes ontology.ttl, shapes.ttl, queries/*.sparql, and instance.ttl.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ path + + str | Path + +
+

Target directory. Created if it does not exist.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Path + +
+

The export directory.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
def export(self, path: str | Path) -> Path:
+    """
+    Export the schema, queries, and instance graph to a directory.
+
+    Writes ontology.ttl, shapes.ttl, queries/*.sparql, and instance.ttl.
+
+    Parameters
+    ----------
+    path : str | Path
+        Target directory. Created if it does not exist.
+
+    Returns
+    -------
+    Path
+        The export directory.
+    """
+    p = Path(path)
+    self._schema.export(p, query_dirs=self._query_dirs_raw)
+    (p / "instance.ttl").write_text(self.dump_graph())
+    return p
+
+
+
+ +
+ +
+ + +

+ load(path) + + + classmethod + + +

+ + +
+ +

Load a knowledge complex from a directory.

+

Reads ontology.ttl and shapes.ttl to reconstruct the schema, +queries/*.sparql for query templates, and instance.ttl (if present) +for the instance graph.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ path + + str | Path + +
+

Directory containing at minimum ontology.ttl and shapes.ttl.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ KnowledgeComplex + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
@classmethod
+def load(cls, path: str | Path) -> "KnowledgeComplex":
+    """
+    Load a knowledge complex from a directory.
+
+    Reads ontology.ttl and shapes.ttl to reconstruct the schema,
+    queries/*.sparql for query templates, and instance.ttl (if present)
+    for the instance graph.
+
+    Parameters
+    ----------
+    path : str | Path
+        Directory containing at minimum ontology.ttl and shapes.ttl.
+
+    Returns
+    -------
+    KnowledgeComplex
+    """
+    p = Path(path)
+    schema = SchemaBuilder.load(p)
+    query_dir = p / "queries"
+    query_dirs = [query_dir] if query_dir.exists() else []
+    kc = cls(schema=schema, query_dirs=query_dirs)
+    instance_file = p / "instance.ttl"
+    if instance_file.exists():
+        kc._instance_graph.parse(str(instance_file), format="turtle")
+    return kc
+
+
+
+ +
+ +
+ + +

+ element(id) + +

+ + +
+ +

Get an Element handle for the given element ID.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Local identifier of the element.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Element + +
+ +
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If no element with that ID exists in the graph.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
def element(self, id: str) -> Element:
+    """
+    Get an Element handle for the given element ID.
+
+    Parameters
+    ----------
+    id : str
+        Local identifier of the element.
+
+    Returns
+    -------
+    Element
+
+    Raises
+    ------
+    ValueError
+        If no element with that ID exists in the graph.
+    """
+    iri = URIRef(f"{self._schema._base_iri}{id}")
+    if (iri, RDF.type, None) not in self._instance_graph:
+        raise ValueError(f"No element with id '{id}' in the complex")
+    return Element(self, id)
+
+
+
+ +
+ +
+ + +

+ element_ids(type=None) + +

+ + +
+ +

List element IDs, optionally filtered by type (includes subtypes).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type + + str + +
+

Filter to elements of this type or any subtype.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
def element_ids(self, type: str | None = None) -> list[str]:
+    """
+    List element IDs, optionally filtered by type (includes subtypes).
+
+    Parameters
+    ----------
+    type : str, optional
+        Filter to elements of this type or any subtype.
+
+    Returns
+    -------
+    list[str]
+    """
+    ns_str = self._schema._base_iri
+    if type is not None:
+        if type not in self._schema._types:
+            raise SchemaError(f"Type '{type}' is not registered")
+        type_iri = self._ns[type]
+        # Use SPARQL with subClassOf* to include subtypes
+        sparql = f"""
+        SELECT ?elem WHERE {{
+            ?elem a/rdfs:subClassOf* <{type_iri}> .
+            <{self._complex_iri}> <https://example.org/kc#hasElement> ?elem .
+        }}
+        """
+        results = self._instance_graph.query(
+            sparql, initNs={"rdfs": RDFS, "rdf": RDF}
+        )
+        ids = []
+        for row in results:
+            elem_str = str(row[0])
+            if elem_str.startswith(ns_str):
+                ids.append(elem_str[len(ns_str):])
+        return sorted(ids)
+    else:
+        # All elements in the complex
+        ids = []
+        for _, _, o in self._instance_graph.triples(
+            (self._complex_iri, _KC.hasElement, None)
+        ):
+            elem_str = str(o)
+            if elem_str.startswith(ns_str):
+                ids.append(elem_str[len(ns_str):])
+        return sorted(ids)
+
+
+
+ +
+ +
+ + +

+ elements(type=None) + +

+ + +
+ +

List Element handles, optionally filtered by type (includes subtypes).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type + + str + +
+

Filter to elements of this type or any subtype.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[Element] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
def elements(self, type: str | None = None) -> list[Element]:
+    """
+    List Element handles, optionally filtered by type (includes subtypes).
+
+    Parameters
+    ----------
+    type : str, optional
+        Filter to elements of this type or any subtype.
+
+    Returns
+    -------
+    list[Element]
+    """
+    return [Element(self, id) for id in self.element_ids(type=type)]
+
+
+
+ +
+ +
+ + +

+ is_subcomplex(ids) + +

+ + +
+ +

Check whether a set of element IDs forms a valid subcomplex.

+

A set is a valid subcomplex iff it is closed under the boundary +operator: for every element in the set, all its boundary elements +are also in the set.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ ids + + set[str] + +
+

Element identifiers to check.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ bool + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
def is_subcomplex(self, ids: set[str]) -> bool:
+    """
+    Check whether a set of element IDs forms a valid subcomplex.
+
+    A set is a valid subcomplex iff it is closed under the boundary
+    operator: for every element in the set, all its boundary elements
+    are also in the set.
+
+    Parameters
+    ----------
+    ids : set[str]
+        Element identifiers to check.
+
+    Returns
+    -------
+    bool
+    """
+    if not ids:
+        return True
+    return set(ids) == self.closure(ids)
+
+
+
+ +
+ +
+ + +

+ boundary(id, *, type=None) + +

+ + +
+ +

Return ∂(id): the direct faces of element id via kc:boundedBy.

+

For a vertex, returns the empty set. +For an edge, returns its 2 boundary vertices. +For a face, returns its 3 boundary edges.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
def boundary(self, id: str, *, type: str | None = None) -> set[str]:
+    """Return ∂(id): the direct faces of element id via kc:boundedBy.
+
+    For a vertex, returns the empty set.
+    For an edge, returns its 2 boundary vertices.
+    For a face, returns its 3 boundary edges.
+
+    Parameters
+    ----------
+    id : str
+        Element identifier.
+    type : str, optional
+        Filter results to this type (including subtypes).
+
+    Returns
+    -------
+    set[str]
+    """
+    sparql = (
+        self._query_templates["boundary"]
+        .replace("{simplex}", f"<{self._iri(id)}>")
+        .replace("{type_filter}", self._type_filter_clause("boundary", type))
+    )
+    return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ coboundary(id, *, type=None) + +

+ + +
+ +

Return the cofaces of id: all simplices whose boundary contains id.

+

Computes {τ ∈ K : id ∈ ∂(τ)} — the set of (k+1)-simplices that +have id as a boundary element. This is the combinatorial coface +relation, not the algebraic coboundary operator δ on cochains.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
    def coboundary(self, id: str, *, type: str | None = None) -> set[str]:
+        """Return the cofaces of id: all simplices whose boundary contains id.
+
+        Computes {τ ∈ K : id ∈ ∂(τ)} — the set of (k+1)-simplices that
+        have id as a boundary element.  This is the combinatorial coface
+        relation, not the algebraic coboundary operator δ on cochains.
+
+        Parameters
+        ----------
+        id : str
+            Element identifier.
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        tf = self._type_filter_clause("coboundary", type)
+        sparql = f"""\
+PREFIX kc: <https://example.org/kc#>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+SELECT ?coboundary WHERE {{
+    ?coboundary kc:boundedBy <{self._iri(id)}> .
+    {tf}
+}}"""
+        return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ star(id, *, type=None) + +

+ + +
+ +

Return St(id): all simplices containing id as a face (transitive coboundary + self).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
def star(self, id: str, *, type: str | None = None) -> set[str]:
+    """Return St(id): all simplices containing id as a face (transitive coboundary + self).
+
+    Parameters
+    ----------
+    id : str
+        Element identifier.
+    type : str, optional
+        Filter results to this type (including subtypes).
+
+    Returns
+    -------
+    set[str]
+    """
+    sparql = (
+        self._query_templates["star"]
+        .replace("{simplex}", f"<{self._iri(id)}>")
+        .replace("{type_filter}", self._type_filter_clause("star", type))
+    )
+    return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ closure(ids, *, type=None) + +

+ + +
+ +

Return Cl(ids): the smallest subcomplex containing ids.

+

Accepts a single ID or a set of IDs. When given a set, returns the +union of closures.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ ids + + str or set[str] + +
+

Element identifier(s).

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
    def closure(self, ids: str | set[str], *, type: str | None = None) -> set[str]:
+        """Return Cl(ids): the smallest subcomplex containing ids.
+
+        Accepts a single ID or a set of IDs. When given a set, returns the
+        union of closures.
+
+        Parameters
+        ----------
+        ids : str or set[str]
+            Element identifier(s).
+        type : str, optional
+            Filter results to this type (including subtypes).
+
+        Returns
+        -------
+        set[str]
+        """
+        if isinstance(ids, str):
+            sparql = (
+                self._query_templates["closure"]
+                .replace("{simplex}", f"<{self._iri(ids)}>")
+                .replace("{type_filter}", self._type_filter_clause("closure", type))
+            )
+            return self._ids_from_query(sparql)
+        # Set input: use VALUES clause
+        values = " ".join(f"(<{self._iri(i)}>)" for i in ids)
+        tf = self._type_filter_clause("closure", type)
+        sparql = f"""\
+PREFIX kc: <https://example.org/kc#>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+SELECT ?closure WHERE {{
+    VALUES (?sigma) {{ {values} }}
+    ?sigma kc:boundedBy* ?closure .
+    {tf}
+}}"""
+        return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ closed_star(id, *, type=None) + +

+ + +
+ +

Return Cl(St(id)): the closure of the star.

+

Always a valid subcomplex.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
def closed_star(self, id: str, *, type: str | None = None) -> set[str]:
+    """Return Cl(St(id)): the closure of the star.
+
+    Always a valid subcomplex.
+
+    Parameters
+    ----------
+    id : str
+        Element identifier.
+    type : str, optional
+        Filter results to this type (including subtypes).
+
+    Returns
+    -------
+    set[str]
+    """
+    return self.closure(self.star(id), type=type)
+
+
+
+ +
+ +
+ + + + + +
+ +

Return Lk(id): Cl(St(id)) \ St(id).

+

The link is the set of simplices in the closed star that do not +themselves contain id as a face.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Element identifier.

+
+
+ required +
+ type + + str + +
+

Filter results to this type (including subtypes).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
def link(self, id: str, *, type: str | None = None) -> set[str]:
+    """Return Lk(id): Cl(St(id)) \\ St(id).
+
+    The link is the set of simplices in the closed star that do not
+    themselves contain id as a face.
+
+    Parameters
+    ----------
+    id : str
+        Element identifier.
+    type : str, optional
+        Filter results to this type (including subtypes).
+
+    Returns
+    -------
+    set[str]
+    """
+    result = self.closed_star(id) - self.star(id)
+    if type is not None:
+        typed = set(self.element_ids(type=type))
+        result &= typed
+    return result
+
+
+
+ +
+ +
+ + +

+ skeleton(k) + +

+ + +
+ +

Return sk_k(K): all elements of dimension <= k.

+

k=0: vertices only +k=1: vertices and edges +k=2: vertices, edges, and faces (everything)

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ k + + int + +
+

Maximum dimension (0, 1, or 2).

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ set[str] + +
+ +
+
+ + +

Raises:

+ + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

If k < 0 or k > 2.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
 991
+ 992
+ 993
+ 994
+ 995
+ 996
+ 997
+ 998
+ 999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
def skeleton(self, k: int) -> set[str]:
+    """Return sk_k(K): all elements of dimension <= k.
+
+    k=0: vertices only
+    k=1: vertices and edges
+    k=2: vertices, edges, and faces (everything)
+
+    Parameters
+    ----------
+    k : int
+        Maximum dimension (0, 1, or 2).
+
+    Returns
+    -------
+    set[str]
+
+    Raises
+    ------
+    ValueError
+        If k < 0 or k > 2.
+    """
+    if k < 0 or k > 2:
+        raise ValueError(f"skeleton dimension must be 0, 1, or 2; got {k}")
+    dim_classes_map = {
+        0: [_KC.Vertex],
+        1: [_KC.Vertex, _KC.Edge],
+        2: [_KC.Vertex, _KC.Edge, _KC.Face],
+    }
+    classes = dim_classes_map[k]
+    unions = " UNION ".join(
+        f"{{ ?elem a/rdfs:subClassOf* <{c}> }}" for c in classes
+    )
+    sparql = (
+        self._query_templates["skeleton"]
+        .replace("{complex}", f"<{self._complex_iri}>")
+        .replace("{dim_classes}", unions)
+    )
+    return self._ids_from_query(sparql)
+
+
+
+ +
+ +
+ + +

+ degree(id) + +

+ + +
+ +

Return deg(id): the number of edges incident to vertex id.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ id + + str + +
+

Vertex identifier.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ int + +
+ +
+
+ + +
+ Source code in knowledgecomplex/graph.py +
1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
def degree(self, id: str) -> int:
+    """Return deg(id): the number of edges incident to vertex id.
+
+    Parameters
+    ----------
+    id : str
+        Vertex identifier.
+
+    Returns
+    -------
+    int
+    """
+    sparql = (
+        self._query_templates["degree"]
+        .replace("{simplex}", f"<{self._iri(id)}>")
+    )
+    init_ns = {
+        "kc": _KC, "rdf": RDF, "rdfs": RDFS,
+        "owl": OWL, "xsd": XSD,
+        self._schema._namespace: self._ns,
+    }
+    results = self._instance_graph.query(sparql, initNs=init_ns)
+    for row in results:
+        return int(row[0])
+    return 0
+
+
+
+ +
+ +
+ + +

+ register_codec(type_name, codec) + +

+ + +
+ +

Register a codec for the given type.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type_name + + str + +
+

Must be a registered type in the schema.

+
+
+ required +
+ codec + + Codec + +
+

Object implementing compile() and decompile().

+
+
+ required +
+ + +
+ Source code in knowledgecomplex/graph.py +
1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
def register_codec(self, type_name: str, codec: Codec) -> None:
+    """
+    Register a codec for the given type.
+
+    Parameters
+    ----------
+    type_name : str
+        Must be a registered type in the schema.
+    codec : Codec
+        Object implementing compile() and decompile().
+    """
+    if type_name not in self._schema._types:
+        raise SchemaError(f"Type '{type_name}' is not registered")
+    if not isinstance(codec, Codec):
+        raise TypeError(
+            f"Expected a Codec instance, got {type(codec).__name__}"
+        )
+    self._codecs[type_name] = codec
+
+
+
+ +
+ +
+ + +

+ decompile_uri(type_name, uri) + +

+ + +
+ +

Decompile an artifact at a URI without adding it to the graph.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type_name + + str + +
+

The element type (used to resolve the codec).

+
+
+ required +
+ uri + + str + +
+

URI of the artifact to read.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict + +
+

Attribute key-value pairs.

+
+
+ + +
+ Source code in knowledgecomplex/graph.py +
1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
def decompile_uri(self, type_name: str, uri: str) -> dict:
+    """
+    Decompile an artifact at a URI without adding it to the graph.
+
+    Parameters
+    ----------
+    type_name : str
+        The element type (used to resolve the codec).
+    uri : str
+        URI of the artifact to read.
+
+    Returns
+    -------
+    dict
+        Attribute key-value pairs.
+    """
+    if type_name not in self._schema._types:
+        raise SchemaError(f"Type '{type_name}' is not registered")
+    codec = self._resolve_codec(type_name)
+    return codec.decompile(uri)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/io/index.html b/site/api/io/index.html new file mode 100644 index 0000000..7272d3b --- /dev/null +++ b/site/api/io/index.html @@ -0,0 +1,1411 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + File I/O - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + + + + + + + +

File I/O

+ +
+ + + + +
+ +

File-based import/export utilities for KnowledgeComplex.

+ + +

Functions:

+ + + + + + + + + + + + + + + + + + + + + +
NameDescription
save_graph Serialize the instance graph to a file. +
+ +
+
load_graph Parse a file into the instance graph +
+ +
+
dump_graph Return the instance graph as a string in a given format. +
+ +
+
+ + +
+ Design notes +
    +
  • Turtle (.ttl) is the default format — human-readable and consistent + with the ontology/shapes patterns used throughout the codebase.
  • +
  • All load functions are additive: they call graph.parse() which + adds triples to the existing graph. Load into a fresh + KnowledgeComplex for a clean restore.
  • +
  • No TriG (.trig) or N-Quads (.nq) — KnowledgeComplex uses a + plain rdflib.Graph, not a ConjunctiveGraph.
  • +
+

Adapted from discourse_graph.io (multi-agent-dg).

+
+ + + + + + + + + +
+ + + + + + + + + + +
+ + +

+ save_graph(kc, path, format='turtle') + +

+ + +
+ +

Serialize kc._instance_graph to a file.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + 'KnowledgeComplex' + +
+

The KnowledgeComplex whose instance graph is serialized.

+
+
+ required +
+ path + + 'Path | str' + +
+

Destination file path. The file is created or overwritten.

+
+
+ required +
+ format + + str + +
+

rdflib serialization format string. Defaults to "turtle". +Other useful values: "json-ld", "ntriples", "n3", +"xml" (RDF/XML).

+
+
+ 'turtle' +
+ + +
+ Source code in knowledgecomplex/io.py +
73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
def save_graph(
+    kc: "KnowledgeComplex",
+    path: "Path | str",
+    format: str = "turtle",
+) -> None:
+    """Serialize ``kc._instance_graph`` to a file.
+
+    Parameters
+    ----------
+    kc :
+        The ``KnowledgeComplex`` whose instance graph is serialized.
+    path :
+        Destination file path.  The file is created or overwritten.
+    format :
+        rdflib serialization format string.  Defaults to ``"turtle"``.
+        Other useful values: ``"json-ld"``, ``"ntriples"``, ``"n3"``,
+        ``"xml"`` (RDF/XML).
+    """
+    path = Path(path)
+    kc._instance_graph.serialize(destination=str(path), format=format)
+
+
+
+ +
+ +
+ + +

+ load_graph(kc, path, format=None, validate=False) + +

+ + +
+ +

Parse a file into kc._instance_graph (additive).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + 'KnowledgeComplex' + +
+

The KnowledgeComplex whose instance graph receives the +parsed triples.

+
+
+ required +
+ path + + 'Path | str' + +
+

Source file path.

+
+
+ required +
+ format + + Optional[str] + +
+

rdflib serialization format string. When None, auto-detected +from the file extension using the built-in registry.

+
+
+ None +
+ validate + + bool + +
+

If True, run SHACL validation after parsing. On failure the +newly added triples are rolled back and ValidationError is +raised. Defaults to False because the data may have been +exported from a validated KnowledgeComplex and re-validating +is expensive.

+
+
+ False +
+ + +
+ Notes +

This operation is additive: existing triples in the instance graph +are retained. For a clean restore, load into a freshly constructed +KnowledgeComplex.

+

The instance graph includes TBox (ontology) triples. Loading a file +that was saved from a KC with the same schema is harmless — rdflib +deduplicates triples. Loading data from a different schema will merge +ontologies; the caller is responsible for schema compatibility.

+
+ +
+ Source code in knowledgecomplex/io.py +
 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
def load_graph(
+    kc: "KnowledgeComplex",
+    path: "Path | str",
+    format: Optional[str] = None,
+    validate: bool = False,
+) -> None:
+    """Parse a file into ``kc._instance_graph`` (additive).
+
+    Parameters
+    ----------
+    kc :
+        The ``KnowledgeComplex`` whose instance graph receives the
+        parsed triples.
+    path :
+        Source file path.
+    format :
+        rdflib serialization format string.  When ``None``, auto-detected
+        from the file extension using the built-in registry.
+    validate :
+        If ``True``, run SHACL validation after parsing.  On failure the
+        newly added triples are rolled back and ``ValidationError`` is
+        raised.  Defaults to ``False`` because the data may have been
+        exported from a validated ``KnowledgeComplex`` and re-validating
+        is expensive.
+
+    Notes
+    -----
+    This operation is **additive**: existing triples in the instance graph
+    are retained.  For a clean restore, load into a freshly constructed
+    ``KnowledgeComplex``.
+
+    The instance graph includes TBox (ontology) triples.  Loading a file
+    that was saved from a KC with the same schema is harmless — rdflib
+    deduplicates triples.  Loading data from a different schema will merge
+    ontologies; the caller is responsible for schema compatibility.
+    """
+    path = Path(path)
+    fmt = _detect_format(path, format)
+
+    if validate:
+        before = set(kc._instance_graph)
+
+    kc._instance_graph.parse(source=str(path), format=fmt)
+
+    if validate:
+        try:
+            kc._validate()
+        except ValidationError:
+            added = set(kc._instance_graph) - before
+            for triple in added:
+                kc._instance_graph.remove(triple)
+            raise
+
+
+
+ +
+ +
+ + +

+ dump_graph(kc, format='turtle') + +

+ + +
+ +

Return the instance graph as a string in the requested format.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + 'KnowledgeComplex' + +
+

The KnowledgeComplex whose instance graph is serialized.

+
+
+ required +
+ format + + str + +
+

rdflib serialization format string. Defaults to "turtle".

+
+
+ 'turtle' +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ str + +
+

The serialized graph.

+
+
+ + +
+ Source code in knowledgecomplex/io.py +
149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
def dump_graph(
+    kc: "KnowledgeComplex",
+    format: str = "turtle",
+) -> str:
+    """Return the instance graph as a string in the requested format.
+
+    Parameters
+    ----------
+    kc :
+        The ``KnowledgeComplex`` whose instance graph is serialized.
+    format :
+        rdflib serialization format string.  Defaults to ``"turtle"``.
+
+    Returns
+    -------
+    str
+        The serialized graph.
+    """
+    return kc._instance_graph.serialize(format=format)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/schema/index.html b/site/api/schema/index.html new file mode 100644 index 0000000..5730d80 --- /dev/null +++ b/site/api/schema/index.html @@ -0,0 +1,6762 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Schema - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+ +
+ + + + + + + + +

Schema

+ + +
+ + + + +
+ +

knowledgecomplex.schema — SchemaBuilder and vocab/text descriptors.

+

Public API. Never exposes rdflib, pyshacl, or owlrl objects.

+

Internal structure mirrors the 2x2 responsibility map: + {topological, ontological} x {OWL, SHACL}

+

The core ontology defines KC:Element as the base class for all simplices, +with KC:Vertex (k=0), KC:Edge (k=1), KC:Face (k=2) as subclasses. +add_vertex_type / add_edge_type / add_face_type each declare a user type +as a subclass of the appropriate simplex class and write to both internal +OWL and SHACL graphs.

+

dump_owl() and dump_shacl() return merged (core + user) Turtle strings.

+ + + + + + + + + + +
+ + + + + + + + + +
+ + + +

+ Codec + + +

+ + +
+

+ Bases: Protocol

+ + + +

Bidirectional bridge between element records and artifacts at URIs.

+

A codec pairs compile (map → territory) and decompile (territory → map) +for a given element type. Registered on KnowledgeComplex instances via +register_codec(), and inherited by child types.

+ + + + + + + + +
+ Source code in knowledgecomplex/schema.py +
39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
@runtime_checkable
+class Codec(Protocol):
+    """
+    Bidirectional bridge between element records and artifacts at URIs.
+
+    A codec pairs compile (map → territory) and decompile (territory → map)
+    for a given element type. Registered on KnowledgeComplex instances via
+    register_codec(), and inherited by child types.
+    """
+
+    def compile(self, element: dict) -> None:
+        """
+        Write an element record to the artifact at its URI.
+
+        Parameters
+        ----------
+        element : dict
+            Keys: id, type, uri, plus all attribute key-value pairs.
+        """
+        ...
+
+    def decompile(self, uri: str) -> dict:
+        """
+        Read the artifact at a URI and return an attribute dict.
+
+        Parameters
+        ----------
+        uri : str
+            The URI of the artifact to read.
+
+        Returns
+        -------
+        dict
+            Attribute key-value pairs suitable for add_vertex/add_edge/add_face kwargs.
+        """
+        ...
+
+
+ + + +
+ + + + + + + + + + +
+ + +

+ compile(element) + +

+ + +
+ +

Write an element record to the artifact at its URI.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ element + + dict + +
+

Keys: id, type, uri, plus all attribute key-value pairs.

+
+
+ required +
+ + +
+ Source code in knowledgecomplex/schema.py +
49
+50
+51
+52
+53
+54
+55
+56
+57
+58
def compile(self, element: dict) -> None:
+    """
+    Write an element record to the artifact at its URI.
+
+    Parameters
+    ----------
+    element : dict
+        Keys: id, type, uri, plus all attribute key-value pairs.
+    """
+    ...
+
+
+
+ +
+ +
+ + +

+ decompile(uri) + +

+ + +
+ +

Read the artifact at a URI and return an attribute dict.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ uri + + str + +
+

The URI of the artifact to read.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict + +
+

Attribute key-value pairs suitable for add_vertex/add_edge/add_face kwargs.

+
+
+ + +
+ Source code in knowledgecomplex/schema.py +
60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
def decompile(self, uri: str) -> dict:
+    """
+    Read the artifact at a URI and return an attribute dict.
+
+    Parameters
+    ----------
+    uri : str
+        The URI of the artifact to read.
+
+    Returns
+    -------
+    dict
+        Attribute key-value pairs suitable for add_vertex/add_edge/add_face kwargs.
+    """
+    ...
+
+
+
+ +
+ + + +
+ +
+ +
+ +
+ + + +

+ VocabDescriptor + + + + dataclass + + +

+ + +
+ + + +

Returned by vocab(). Carries the allowed string values for an attribute. +Generates both an OWL rdfs:comment annotation and a SHACL sh:in constraint +when passed to add_*_type().

+ + + + + + + + +
+ Source code in knowledgecomplex/schema.py +
77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
@dataclass(frozen=True)
+class VocabDescriptor:
+    """
+    Returned by vocab(). Carries the allowed string values for an attribute.
+    Generates both an OWL rdfs:comment annotation and a SHACL sh:in constraint
+    when passed to add_*_type().
+    """
+    values: tuple[str, ...]
+    multiple: bool = False
+
+    def __repr__(self) -> str:
+        suffix = ", multiple=True" if self.multiple else ""
+        return f"vocab({', '.join(repr(v) for v in self.values)}{suffix})"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ TextDescriptor + + + + dataclass + + +

+ + +
+ + + +

Returned by text(). Marks an attribute as a free-text string (no controlled vocabulary). +Generates an OWL DatatypeProperty with xsd:string range and a SHACL property shape +with sh:datatype xsd:string but no sh:in constraint.

+ + + + + + + + +
+ Source code in knowledgecomplex/schema.py +
120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
@dataclass(frozen=True)
+class TextDescriptor:
+    """
+    Returned by text(). Marks an attribute as a free-text string (no controlled vocabulary).
+    Generates an OWL DatatypeProperty with xsd:string range and a SHACL property shape
+    with sh:datatype xsd:string but no sh:in constraint.
+    """
+    required: bool = True
+    multiple: bool = False
+
+    def __repr__(self) -> str:
+        parts = []
+        if not self.required:
+            parts.append("required=False")
+        if self.multiple:
+            parts.append("multiple=True")
+        return f"text({', '.join(parts)})"
+
+
+ + + +
+ + + + + + + + + + + + +
+ +
+ +
+ +
+ + + +

+ SchemaBuilder + + +

+ + +
+ + + +

Author a knowledge complex schema: vertex types, edge types, face types.

+

Each add_*_type call declares a new OWL subclass of the appropriate +KC:Element subclass (Vertex, Edge, or Face) and creates a corresponding +SHACL node shape. Both OWL and SHACL graphs are maintained internally. +dump_owl() / dump_shacl() return the full merged Turtle strings.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ namespace + + str + +
+

Short namespace token for user-defined classes and properties. +Used to build IRI prefix: https://example.org/{namespace}#

+
+
+ required +
+ + +
+ Example +
+
+
+

sb = SchemaBuilder(namespace="aaa") +sb.add_vertex_type("spec") +sb.add_edge_type("verification", +... attributes={"status": vocab("passing", "failing", "pending")}) +sb.add_face_type("assurance") +owl_ttl = sb.dump_owl() +shacl_ttl = sb.dump_shacl()

+
+
+
+
+ + + + + + + +
+ Source code in knowledgecomplex/schema.py +
 165
+ 166
+ 167
+ 168
+ 169
+ 170
+ 171
+ 172
+ 173
+ 174
+ 175
+ 176
+ 177
+ 178
+ 179
+ 180
+ 181
+ 182
+ 183
+ 184
+ 185
+ 186
+ 187
+ 188
+ 189
+ 190
+ 191
+ 192
+ 193
+ 194
+ 195
+ 196
+ 197
+ 198
+ 199
+ 200
+ 201
+ 202
+ 203
+ 204
+ 205
+ 206
+ 207
+ 208
+ 209
+ 210
+ 211
+ 212
+ 213
+ 214
+ 215
+ 216
+ 217
+ 218
+ 219
+ 220
+ 221
+ 222
+ 223
+ 224
+ 225
+ 226
+ 227
+ 228
+ 229
+ 230
+ 231
+ 232
+ 233
+ 234
+ 235
+ 236
+ 237
+ 238
+ 239
+ 240
+ 241
+ 242
+ 243
+ 244
+ 245
+ 246
+ 247
+ 248
+ 249
+ 250
+ 251
+ 252
+ 253
+ 254
+ 255
+ 256
+ 257
+ 258
+ 259
+ 260
+ 261
+ 262
+ 263
+ 264
+ 265
+ 266
+ 267
+ 268
+ 269
+ 270
+ 271
+ 272
+ 273
+ 274
+ 275
+ 276
+ 277
+ 278
+ 279
+ 280
+ 281
+ 282
+ 283
+ 284
+ 285
+ 286
+ 287
+ 288
+ 289
+ 290
+ 291
+ 292
+ 293
+ 294
+ 295
+ 296
+ 297
+ 298
+ 299
+ 300
+ 301
+ 302
+ 303
+ 304
+ 305
+ 306
+ 307
+ 308
+ 309
+ 310
+ 311
+ 312
+ 313
+ 314
+ 315
+ 316
+ 317
+ 318
+ 319
+ 320
+ 321
+ 322
+ 323
+ 324
+ 325
+ 326
+ 327
+ 328
+ 329
+ 330
+ 331
+ 332
+ 333
+ 334
+ 335
+ 336
+ 337
+ 338
+ 339
+ 340
+ 341
+ 342
+ 343
+ 344
+ 345
+ 346
+ 347
+ 348
+ 349
+ 350
+ 351
+ 352
+ 353
+ 354
+ 355
+ 356
+ 357
+ 358
+ 359
+ 360
+ 361
+ 362
+ 363
+ 364
+ 365
+ 366
+ 367
+ 368
+ 369
+ 370
+ 371
+ 372
+ 373
+ 374
+ 375
+ 376
+ 377
+ 378
+ 379
+ 380
+ 381
+ 382
+ 383
+ 384
+ 385
+ 386
+ 387
+ 388
+ 389
+ 390
+ 391
+ 392
+ 393
+ 394
+ 395
+ 396
+ 397
+ 398
+ 399
+ 400
+ 401
+ 402
+ 403
+ 404
+ 405
+ 406
+ 407
+ 408
+ 409
+ 410
+ 411
+ 412
+ 413
+ 414
+ 415
+ 416
+ 417
+ 418
+ 419
+ 420
+ 421
+ 422
+ 423
+ 424
+ 425
+ 426
+ 427
+ 428
+ 429
+ 430
+ 431
+ 432
+ 433
+ 434
+ 435
+ 436
+ 437
+ 438
+ 439
+ 440
+ 441
+ 442
+ 443
+ 444
+ 445
+ 446
+ 447
+ 448
+ 449
+ 450
+ 451
+ 452
+ 453
+ 454
+ 455
+ 456
+ 457
+ 458
+ 459
+ 460
+ 461
+ 462
+ 463
+ 464
+ 465
+ 466
+ 467
+ 468
+ 469
+ 470
+ 471
+ 472
+ 473
+ 474
+ 475
+ 476
+ 477
+ 478
+ 479
+ 480
+ 481
+ 482
+ 483
+ 484
+ 485
+ 486
+ 487
+ 488
+ 489
+ 490
+ 491
+ 492
+ 493
+ 494
+ 495
+ 496
+ 497
+ 498
+ 499
+ 500
+ 501
+ 502
+ 503
+ 504
+ 505
+ 506
+ 507
+ 508
+ 509
+ 510
+ 511
+ 512
+ 513
+ 514
+ 515
+ 516
+ 517
+ 518
+ 519
+ 520
+ 521
+ 522
+ 523
+ 524
+ 525
+ 526
+ 527
+ 528
+ 529
+ 530
+ 531
+ 532
+ 533
+ 534
+ 535
+ 536
+ 537
+ 538
+ 539
+ 540
+ 541
+ 542
+ 543
+ 544
+ 545
+ 546
+ 547
+ 548
+ 549
+ 550
+ 551
+ 552
+ 553
+ 554
+ 555
+ 556
+ 557
+ 558
+ 559
+ 560
+ 561
+ 562
+ 563
+ 564
+ 565
+ 566
+ 567
+ 568
+ 569
+ 570
+ 571
+ 572
+ 573
+ 574
+ 575
+ 576
+ 577
+ 578
+ 579
+ 580
+ 581
+ 582
+ 583
+ 584
+ 585
+ 586
+ 587
+ 588
+ 589
+ 590
+ 591
+ 592
+ 593
+ 594
+ 595
+ 596
+ 597
+ 598
+ 599
+ 600
+ 601
+ 602
+ 603
+ 604
+ 605
+ 606
+ 607
+ 608
+ 609
+ 610
+ 611
+ 612
+ 613
+ 614
+ 615
+ 616
+ 617
+ 618
+ 619
+ 620
+ 621
+ 622
+ 623
+ 624
+ 625
+ 626
+ 627
+ 628
+ 629
+ 630
+ 631
+ 632
+ 633
+ 634
+ 635
+ 636
+ 637
+ 638
+ 639
+ 640
+ 641
+ 642
+ 643
+ 644
+ 645
+ 646
+ 647
+ 648
+ 649
+ 650
+ 651
+ 652
+ 653
+ 654
+ 655
+ 656
+ 657
+ 658
+ 659
+ 660
+ 661
+ 662
+ 663
+ 664
+ 665
+ 666
+ 667
+ 668
+ 669
+ 670
+ 671
+ 672
+ 673
+ 674
+ 675
+ 676
+ 677
+ 678
+ 679
+ 680
+ 681
+ 682
+ 683
+ 684
+ 685
+ 686
+ 687
+ 688
+ 689
+ 690
+ 691
+ 692
+ 693
+ 694
+ 695
+ 696
+ 697
+ 698
+ 699
+ 700
+ 701
+ 702
+ 703
+ 704
+ 705
+ 706
+ 707
+ 708
+ 709
+ 710
+ 711
+ 712
+ 713
+ 714
+ 715
+ 716
+ 717
+ 718
+ 719
+ 720
+ 721
+ 722
+ 723
+ 724
+ 725
+ 726
+ 727
+ 728
+ 729
+ 730
+ 731
+ 732
+ 733
+ 734
+ 735
+ 736
+ 737
+ 738
+ 739
+ 740
+ 741
+ 742
+ 743
+ 744
+ 745
+ 746
+ 747
+ 748
+ 749
+ 750
+ 751
+ 752
+ 753
+ 754
+ 755
+ 756
+ 757
+ 758
+ 759
+ 760
+ 761
+ 762
+ 763
+ 764
+ 765
+ 766
+ 767
+ 768
+ 769
+ 770
+ 771
+ 772
+ 773
+ 774
+ 775
+ 776
+ 777
+ 778
+ 779
+ 780
+ 781
+ 782
+ 783
+ 784
+ 785
+ 786
+ 787
+ 788
+ 789
+ 790
+ 791
+ 792
+ 793
+ 794
+ 795
+ 796
+ 797
+ 798
+ 799
+ 800
+ 801
+ 802
+ 803
+ 804
+ 805
+ 806
+ 807
+ 808
+ 809
+ 810
+ 811
+ 812
+ 813
+ 814
+ 815
+ 816
+ 817
+ 818
+ 819
+ 820
+ 821
+ 822
+ 823
+ 824
+ 825
+ 826
+ 827
+ 828
+ 829
+ 830
+ 831
+ 832
+ 833
+ 834
+ 835
+ 836
+ 837
+ 838
+ 839
+ 840
+ 841
+ 842
+ 843
+ 844
+ 845
+ 846
+ 847
+ 848
+ 849
+ 850
+ 851
+ 852
+ 853
+ 854
+ 855
+ 856
+ 857
+ 858
+ 859
+ 860
+ 861
+ 862
+ 863
+ 864
+ 865
+ 866
+ 867
+ 868
+ 869
+ 870
+ 871
+ 872
+ 873
+ 874
+ 875
+ 876
+ 877
+ 878
+ 879
+ 880
+ 881
+ 882
+ 883
+ 884
+ 885
+ 886
+ 887
+ 888
+ 889
+ 890
+ 891
+ 892
+ 893
+ 894
+ 895
+ 896
+ 897
+ 898
+ 899
+ 900
+ 901
+ 902
+ 903
+ 904
+ 905
+ 906
+ 907
+ 908
+ 909
+ 910
+ 911
+ 912
+ 913
+ 914
+ 915
+ 916
+ 917
+ 918
+ 919
+ 920
+ 921
+ 922
+ 923
+ 924
+ 925
+ 926
+ 927
+ 928
+ 929
+ 930
+ 931
+ 932
+ 933
+ 934
+ 935
+ 936
+ 937
+ 938
+ 939
+ 940
+ 941
+ 942
+ 943
+ 944
+ 945
+ 946
+ 947
+ 948
+ 949
+ 950
+ 951
+ 952
+ 953
+ 954
+ 955
+ 956
+ 957
+ 958
+ 959
+ 960
+ 961
+ 962
+ 963
+ 964
+ 965
+ 966
+ 967
+ 968
+ 969
+ 970
+ 971
+ 972
+ 973
+ 974
+ 975
+ 976
+ 977
+ 978
+ 979
+ 980
+ 981
+ 982
+ 983
+ 984
+ 985
+ 986
+ 987
+ 988
+ 989
+ 990
+ 991
+ 992
+ 993
+ 994
+ 995
+ 996
+ 997
+ 998
+ 999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
class SchemaBuilder:
+    """
+    Author a knowledge complex schema: vertex types, edge types, face types.
+
+    Each add_*_type call declares a new OWL subclass of the appropriate
+    KC:Element subclass (Vertex, Edge, or Face) and creates a corresponding
+    SHACL node shape. Both OWL and SHACL graphs are maintained internally.
+    dump_owl() / dump_shacl() return the full merged Turtle strings.
+
+    Parameters
+    ----------
+    namespace : str
+        Short namespace token for user-defined classes and properties.
+        Used to build IRI prefix: https://example.org/{namespace}#
+
+    Example
+    -------
+    >>> sb = SchemaBuilder(namespace="aaa")
+    >>> sb.add_vertex_type("spec")
+    >>> sb.add_edge_type("verification",
+    ...     attributes={"status": vocab("passing", "failing", "pending")})
+    >>> sb.add_face_type("assurance")
+    >>> owl_ttl = sb.dump_owl()
+    >>> shacl_ttl = sb.dump_shacl()
+    """
+
+    def __init__(self, namespace: str) -> None:
+        self._namespace = namespace
+        self._base_iri = f"https://example.org/{namespace}#"
+        # Internal namespace objects
+        self._ns = Namespace(self._base_iri)
+        self._nss = Namespace(f"https://example.org/{namespace}/shape#")
+        # Internal graphs — never exposed publicly
+        self._owl_graph: Any = None   # rdflib.Graph, populated in _init_graphs()
+        self._shacl_graph: Any = None # rdflib.Graph, populated in _init_graphs()
+        self._types: dict[str, dict] = {}  # registry: name -> {kind, attributes}
+        self._attr_domains: dict[str, URIRef | None] = {}  # attr name → first domain or None if shared
+        self._queries: dict[str, str] = {}  # name -> SPARQL template string
+        self._init_graphs()
+
+    def __repr__(self) -> str:
+        return f"SchemaBuilder(namespace={self._namespace!r}, types={len(self._types)})"
+
+    def _init_graphs(self) -> None:
+        """Load core OWL and SHACL static resources into internal graphs."""
+        self._owl_graph = Graph()
+        self._owl_graph.parse(str(_CORE_OWL), format="turtle")
+
+        self._shacl_graph = Graph()
+        self._shacl_graph.parse(str(_CORE_SHAPES), format="turtle")
+
+        # Bind prefixes on both graphs
+        for g in (self._owl_graph, self._shacl_graph):
+            g.bind("kc", _KC)
+            g.bind("kcs", _KCS)
+            g.bind("sh", _SH)
+            g.bind("owl", OWL)
+            g.bind("rdfs", RDFS)
+            g.bind("rdf", RDF)
+            g.bind("xsd", XSD)
+            g.bind(self._namespace, self._ns)
+            g.bind(f"{self._namespace}s", self._nss)
+
+    def _set_owl_domain(self, attr_iri: URIRef, attr_name: str, type_iri: URIRef) -> None:
+        """Set rdfs:domain for a property, removing it if shared across types.
+
+        When a property appears on multiple types, setting multiple rdfs:domain
+        values causes RDFS inference to classify any individual with that property
+        as a member of ALL domain types — leading to spurious SHACL cross-type
+        violations. If the property already has a domain for a different type,
+        we remove all domain assertions (SHACL shapes handle per-type enforcement).
+        """
+        if attr_name not in self._attr_domains:
+            # First time seeing this attribute — set domain
+            self._attr_domains[attr_name] = type_iri
+            self._owl_graph.add((attr_iri, RDFS.domain, type_iri))
+        elif self._attr_domains[attr_name] is not None and self._attr_domains[attr_name] != type_iri:
+            # Shared across types — remove existing domain
+            self._owl_graph.remove((attr_iri, RDFS.domain, None))
+            self._attr_domains[attr_name] = None
+        # else: already None (shared) or same type — no action needed
+
+    def _add_vocab_attr_to_graphs(
+        self,
+        type_iri: URIRef,
+        shape_iri: URIRef,
+        attr_name: str,
+        vocab_desc: VocabDescriptor,
+        required: bool,
+    ) -> None:
+        """Add a vocab attribute's OWL property and SHACL property shape (with sh:in)."""
+        attr_iri = self._ns[attr_name]
+
+        # OWL: declare data property
+        self._owl_graph.add((attr_iri, RDF.type, OWL.DatatypeProperty))
+        self._set_owl_domain(attr_iri, attr_name, type_iri)
+        self._owl_graph.add((attr_iri, RDFS.range, XSD.string))
+        self._owl_graph.add((attr_iri, RDFS.comment,
+                             Literal(f"Allowed values: {', '.join(vocab_desc.values)}")))
+
+        # SHACL: create property shape
+        prop_shape = BNode()
+        self._shacl_graph.add((shape_iri, _SH.property, prop_shape))
+        self._shacl_graph.add((prop_shape, _SH.path, attr_iri))
+        self._shacl_graph.add((prop_shape, _SH.datatype, XSD.string))
+        self._shacl_graph.add((prop_shape, _SH.minCount, Literal(1 if required else 0)))
+        if not vocab_desc.multiple:
+            self._shacl_graph.add((prop_shape, _SH.maxCount, Literal(1)))
+
+        # sh:in list
+        list_node = BNode()
+        self._shacl_graph.add((prop_shape, _SH["in"], list_node))
+        Collection(self._shacl_graph, list_node,
+                   [Literal(v) for v in vocab_desc.values])
+
+    def _add_text_attr_to_graphs(
+        self,
+        type_iri: URIRef,
+        shape_iri: URIRef,
+        attr_name: str,
+        text_desc: TextDescriptor,
+    ) -> None:
+        """Add a free-text attribute's OWL property and SHACL property shape (no sh:in)."""
+        attr_iri = self._ns[attr_name]
+
+        # OWL: declare data property
+        self._owl_graph.add((attr_iri, RDF.type, OWL.DatatypeProperty))
+        self._set_owl_domain(attr_iri, attr_name, type_iri)
+        self._owl_graph.add((attr_iri, RDFS.range, XSD.string))
+
+        # SHACL: create property shape
+        prop_shape = BNode()
+        self._shacl_graph.add((shape_iri, _SH.property, prop_shape))
+        self._shacl_graph.add((prop_shape, _SH.path, attr_iri))
+        self._shacl_graph.add((prop_shape, _SH.datatype, XSD.string))
+        self._shacl_graph.add((prop_shape, _SH.minCount,
+                               Literal(1 if text_desc.required else 0)))
+        if not text_desc.multiple:
+            self._shacl_graph.add((prop_shape, _SH.maxCount, Literal(1)))
+
+    def _add_attr_to_graphs(
+        self,
+        type_iri: URIRef,
+        shape_iri: URIRef,
+        attr_name: str,
+        descriptor: VocabDescriptor | TextDescriptor,
+        required: bool | None = None,
+    ) -> None:
+        """Dispatch to the appropriate attr handler based on descriptor type."""
+        if isinstance(descriptor, TextDescriptor):
+            self._add_text_attr_to_graphs(type_iri, shape_iri, attr_name, descriptor)
+        elif isinstance(descriptor, VocabDescriptor):
+            if required is None:
+                required = True
+            self._add_vocab_attr_to_graphs(type_iri, shape_iri, attr_name, descriptor, required)
+        else:
+            raise TypeError(f"Unknown attribute descriptor: {type(descriptor)}")
+
+    def _dispatch_attr(
+        self,
+        type_iri: URIRef,
+        shape_iri: URIRef,
+        attr_name: str,
+        attr_spec: VocabDescriptor | TextDescriptor | dict,
+    ) -> None:
+        """Route an attribute spec to the correct graph-writing method."""
+        if isinstance(attr_spec, (VocabDescriptor, TextDescriptor)):
+            self._add_attr_to_graphs(type_iri, shape_iri, attr_name, attr_spec)
+        elif isinstance(attr_spec, dict):
+            if "vocab" in attr_spec:
+                vd = attr_spec["vocab"]
+                req = attr_spec.get("required", True)
+                self._add_attr_to_graphs(type_iri, shape_iri, attr_name, vd, required=req)
+            elif "text" in attr_spec:
+                td = attr_spec["text"]
+                self._add_attr_to_graphs(type_iri, shape_iri, attr_name, td)
+            else:
+                raise TypeError(f"Attribute dict must have 'vocab' or 'text' key: {attr_spec}")
+        else:
+            raise TypeError(f"Unknown attribute spec type: {type(attr_spec)}")
+
+    def _validate_parent(self, parent: str | None, expected_kind: str) -> None:
+        """Validate parent type exists and has the correct kind."""
+        from knowledgecomplex.exceptions import SchemaError
+        if parent is None:
+            return
+        if parent not in self._types:
+            raise SchemaError(f"Parent type '{parent}' is not registered")
+        if self._types[parent]["kind"] != expected_kind:
+            raise SchemaError(
+                f"Parent type '{parent}' is kind '{self._types[parent]['kind']}', "
+                f"expected '{expected_kind}'"
+            )
+
+    def _collect_inherited_attributes(self, type_name: str) -> dict:
+        """Walk the parent chain and collect all inherited attributes."""
+        inherited = {}
+        current = self._types[type_name].get("parent")
+        while current is not None:
+            parent_attrs = self._types[current].get("attributes", {})
+            # Earlier ancestors are overridden by closer ancestors
+            for k, v in parent_attrs.items():
+                if k not in inherited:
+                    inherited[k] = v
+            current = self._types[current].get("parent")
+        return inherited
+
+    def _validate_bind(
+        self,
+        bind: dict[str, str],
+        all_attributes: dict,
+    ) -> None:
+        """Validate that bind keys exist in all_attributes and values are legal."""
+        from knowledgecomplex.exceptions import SchemaError
+        for attr_name, bound_value in bind.items():
+            if attr_name not in all_attributes:
+                raise SchemaError(
+                    f"Cannot bind '{attr_name}': attribute not found on this type or its ancestors"
+                )
+            descriptor = all_attributes[attr_name]
+            # Unwrap dict-style descriptors
+            if isinstance(descriptor, dict):
+                descriptor = descriptor.get("vocab") or descriptor.get("text")
+            if isinstance(descriptor, VocabDescriptor):
+                if bound_value not in descriptor.values:
+                    raise SchemaError(
+                        f"Cannot bind '{attr_name}' to '{bound_value}': "
+                        f"not in allowed values {descriptor.values}"
+                    )
+
+    def _apply_bind(self, shape_iri: URIRef, bind: dict[str, str]) -> None:
+        """Add sh:hasValue + sh:minCount 1 constraints for bound attributes."""
+        for attr_name, bound_value in bind.items():
+            attr_iri = self._ns[attr_name]
+            prop_shape = BNode()
+            self._shacl_graph.add((shape_iri, _SH.property, prop_shape))
+            self._shacl_graph.add((prop_shape, _SH.path, attr_iri))
+            self._shacl_graph.add((prop_shape, _SH.hasValue, Literal(bound_value)))
+            self._shacl_graph.add((prop_shape, _SH.minCount, Literal(1)))
+
+    def add_vertex_type(
+        self,
+        name: str,
+        attributes: dict[str, VocabDescriptor | TextDescriptor | Any] | None = None,
+        parent: str | None = None,
+        bind: dict[str, str] | None = None,
+    ) -> "SchemaBuilder":
+        """
+        Declare a new vertex type (OWL subclass of KC:Vertex + SHACL node shape).
+
+        Parameters
+        ----------
+        name : str
+            Class name within the user namespace.
+        attributes : dict, optional
+            Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+            or dict with "vocab"/"text" key and optional "required" flag).
+        parent : str, optional
+            Name of a registered vertex type to inherit from.
+        bind : dict, optional
+            Mapping of attribute names to fixed string values (sh:hasValue).
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if name in self._types:
+            raise SchemaError(f"Type '{name}' is already registered")
+        self._validate_parent(parent, "vertex")
+        attributes = attributes or {}
+        bind = bind or {}
+
+        self._types[name] = {
+            "kind": "vertex",
+            "attributes": dict(attributes),
+            "parent": parent,
+            "bind": dict(bind),
+        }
+
+        # Validate bind against all attributes (own + inherited)
+        if bind:
+            inherited = self._collect_inherited_attributes(name)
+            all_attrs = {**inherited, **attributes}
+            self._validate_bind(bind, all_attrs)
+
+        type_iri = self._ns[name]
+        shape_iri = self._nss[f"{name}Shape"]
+
+        # OWL
+        superclass = self._ns[parent] if parent else _KC.Vertex
+        self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+        self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+        # SHACL
+        self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+        self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+        for attr_name, attr_spec in attributes.items():
+            self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+        if bind:
+            self._apply_bind(shape_iri, bind)
+
+        return self
+
+    def add_edge_type(
+        self,
+        name: str,
+        attributes: dict[str, VocabDescriptor | TextDescriptor | Any] | None = None,
+        parent: str | None = None,
+        bind: dict[str, str] | None = None,
+    ) -> "SchemaBuilder":
+        """
+        Declare a new edge type (OWL subclass of KC:Edge + SHACL property shapes).
+
+        Parameters
+        ----------
+        name : str
+            Class name within the user namespace.
+        attributes : dict, optional
+            Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+            or dict with "vocab"/"text" key and optional "required" flag).
+        parent : str, optional
+            Name of a registered edge type to inherit from.
+        bind : dict, optional
+            Mapping of attribute names to fixed string values (sh:hasValue).
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if name in self._types:
+            raise SchemaError(f"Type '{name}' is already registered")
+        self._validate_parent(parent, "edge")
+        attributes = attributes or {}
+        bind = bind or {}
+
+        self._types[name] = {
+            "kind": "edge",
+            "attributes": dict(attributes),
+            "parent": parent,
+            "bind": dict(bind),
+        }
+
+        if bind:
+            inherited = self._collect_inherited_attributes(name)
+            all_attrs = {**inherited, **attributes}
+            self._validate_bind(bind, all_attrs)
+
+        type_iri = self._ns[name]
+        shape_iri = self._nss[f"{name}Shape"]
+
+        # OWL
+        superclass = self._ns[parent] if parent else _KC.Edge
+        self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+        self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+        # SHACL
+        self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+        self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+        for attr_name, attr_spec in attributes.items():
+            self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+        if bind:
+            self._apply_bind(shape_iri, bind)
+
+        return self
+
+    def add_face_type(
+        self,
+        name: str,
+        attributes: dict[str, Any] | None = None,
+        parent: str | None = None,
+        bind: dict[str, str] | None = None,
+    ) -> "SchemaBuilder":
+        """
+        Declare a new face type (OWL subclass of KC:Face + SHACL property shapes).
+
+        Attributes with ``required=False`` generate sh:minCount 0 constraints.
+
+        Parameters
+        ----------
+        name : str
+            Class name within the user namespace.
+        attributes : dict, optional
+            Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+            or dict with "vocab"/"text" key and optional "required" flag).
+        parent : str, optional
+            Name of a registered face type to inherit from.
+        bind : dict, optional
+            Mapping of attribute names to fixed string values (sh:hasValue).
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if name in self._types:
+            raise SchemaError(f"Type '{name}' is already registered")
+        self._validate_parent(parent, "face")
+        attributes = attributes or {}
+        bind = bind or {}
+
+        self._types[name] = {
+            "kind": "face",
+            "attributes": dict(attributes),
+            "parent": parent,
+            "bind": dict(bind),
+        }
+
+        if bind:
+            inherited = self._collect_inherited_attributes(name)
+            all_attrs = {**inherited, **attributes}
+            self._validate_bind(bind, all_attrs)
+
+        type_iri = self._ns[name]
+        shape_iri = self._nss[f"{name}Shape"]
+
+        # OWL
+        superclass = self._ns[parent] if parent else _KC.Face
+        self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+        self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+        # SHACL
+        self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+        self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+        for attr_name, attr_spec in attributes.items():
+            self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+        if bind:
+            self._apply_bind(shape_iri, bind)
+
+        return self
+
+    def describe_type(self, name: str) -> dict:
+        """
+        Inspect a registered type's attributes, parent, and bindings.
+
+        Parameters
+        ----------
+        name : str
+            The registered type name.
+
+        Returns
+        -------
+        dict
+            Keys: name, kind, parent, own_attributes, inherited_attributes,
+            all_attributes, bound.
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if name not in self._types:
+            raise SchemaError(f"Type '{name}' is not registered")
+
+        info = self._types[name]
+        own_attrs = dict(info.get("attributes", {}))
+        inherited_attrs = self._collect_inherited_attributes(name)
+        # Collect bindings from ancestors
+        inherited_bind = {}
+        current = info.get("parent")
+        while current is not None:
+            parent_bind = self._types[current].get("bind", {})
+            for k, v in parent_bind.items():
+                if k not in inherited_bind:
+                    inherited_bind[k] = v
+            current = self._types[current].get("parent")
+        own_bind = dict(info.get("bind", {}))
+        all_bind = {**inherited_bind, **own_bind}
+
+        all_attrs = {**inherited_attrs, **own_attrs}
+        return {
+            "name": name,
+            "kind": info["kind"],
+            "parent": info.get("parent"),
+            "own_attributes": own_attrs,
+            "inherited_attributes": inherited_attrs,
+            "all_attributes": all_attrs,
+            "bound": all_bind,
+        }
+
+    def type_names(self, kind: str | None = None) -> list[str]:
+        """
+        List registered type names, optionally filtered by kind.
+
+        Parameters
+        ----------
+        kind : str, optional
+            Filter by "vertex", "edge", or "face".
+
+        Returns
+        -------
+        list[str]
+        """
+        if kind is None:
+            return list(self._types.keys())
+        return [n for n, info in self._types.items() if info["kind"] == kind]
+
+    def promote_to_attribute(
+        self,
+        type: str,
+        attribute: str,
+        vocab: VocabDescriptor | None = None,
+        text: TextDescriptor | None = None,
+        required: bool = True,
+    ) -> "SchemaBuilder":
+        """
+        Atomically promote a discovered pattern to a first-class typed attribute.
+
+        Updates both OWL property definition and SHACL shape constraint for the named type.
+        After calling this, dump_owl() and dump_shacl() both reflect the updated attribute.
+
+        Parameters
+        ----------
+        type : str
+            The type name (must have been registered via add_*_type).
+        attribute : str
+            Attribute name to add or upgrade.
+        vocab : VocabDescriptor, optional
+            Controlled vocabulary for the attribute.
+        text : TextDescriptor, optional
+            Free-text descriptor for the attribute.
+        required : bool
+            If True, generates sh:minCount 1. Overrides the descriptor's own required flag.
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if type not in self._types:
+            raise SchemaError(f"Type '{type}' is not registered")
+        if vocab is None and text is None:
+            raise SchemaError("promote_to_attribute requires either vocab or text descriptor")
+
+        type_iri = self._ns[type]
+        shape_iri = self._nss[f"{type}Shape"]
+        attr_iri = self._ns[attribute]
+
+        # Remove existing OWL triples for this attribute (if upgrading)
+        for p in (RDFS.domain, RDFS.range, RDFS.comment):
+            self._owl_graph.remove((attr_iri, p, None))
+        self._owl_graph.remove((attr_iri, RDF.type, OWL.DatatypeProperty))
+
+        # Remove existing SHACL property shape for this attribute (if upgrading)
+        for prop_node in list(self._shacl_graph.objects(shape_iri, _SH.property)):
+            if (prop_node, _SH.path, attr_iri) in self._shacl_graph:
+                # Remove the sh:in list
+                list_head = self._shacl_graph.value(prop_node, _SH["in"])
+                if list_head is not None:
+                    Collection(self._shacl_graph, list_head).clear()
+                    self._shacl_graph.remove((prop_node, _SH["in"], list_head))
+                # Remove all triples about this property shape
+                for p, o in list(self._shacl_graph.predicate_objects(prop_node)):
+                    self._shacl_graph.remove((prop_node, p, o))
+                self._shacl_graph.remove((shape_iri, _SH.property, prop_node))
+
+        # Re-add with new settings
+        if vocab is not None:
+            self._add_attr_to_graphs(type_iri, shape_iri, attribute, vocab, required=required)
+        else:
+            # Override the text descriptor's required flag with the promote call's value
+            effective = TextDescriptor(required=required, multiple=text.multiple)
+            self._add_attr_to_graphs(type_iri, shape_iri, attribute, effective)
+
+        # Update type registry
+        if "attributes" not in self._types[type]:
+            self._types[type]["attributes"] = {}
+        if vocab is not None:
+            self._types[type]["attributes"][attribute] = {
+                "vocab": vocab, "required": required
+            }
+        else:
+            self._types[type]["attributes"][attribute] = text
+
+        return self
+
+    def add_sparql_constraint(
+        self,
+        type_name: str,
+        sparql: str,
+        message: str,
+    ) -> "SchemaBuilder":
+        """
+        Attach a sh:sparql constraint to the SHACL shape for type_name.
+
+        The sparql argument must be a SPARQL SELECT query that returns $this
+        for each violating focus node. pyshacl evaluates this and reports the
+        message for each returned row.
+
+        Parameters
+        ----------
+        type_name : str
+            The type name (must have been registered via add_*_type).
+        sparql : str
+            SPARQL SELECT query. Must bind $this to each violating node.
+        message : str
+            Human-readable message reported when the constraint is violated.
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if type_name not in self._types:
+            raise SchemaError(f"Type '{type_name}' is not registered")
+        shape_iri = self._nss[f"{type_name}Shape"]
+        constraint = BNode()
+        self._shacl_graph.add((shape_iri, _SH.sparql, constraint))
+        self._shacl_graph.add((constraint, RDF.type, _SH.SPARQLConstraint))
+        self._shacl_graph.add((constraint, _SH.select, Literal(sparql)))
+        self._shacl_graph.add((constraint, _SH.message, Literal(message)))
+        return self
+
+    # --- Topological query registration and constraint escalation ---
+
+    _TOPO_PATTERNS: dict[str, tuple[str, str]] = {
+        # operation -> (graph_pattern_template, result_variable)
+        # {simplex_iri} is replaced by the target IRI,
+        # {type_filter} by a type constraint or "".
+        "boundary": (
+            "{simplex_iri} kc:boundedBy ?result . {type_filter}",
+            "result",
+        ),
+        "coboundary": (
+            "?result kc:boundedBy {simplex_iri} . {type_filter}",
+            "result",
+        ),
+        "star": (
+            "?result kc:boundedBy* {simplex_iri} . {type_filter}",
+            "result",
+        ),
+        "closure": (
+            "{simplex_iri} kc:boundedBy* ?result . {type_filter}",
+            "result",
+        ),
+        "link": (
+            # closed_star minus star: elements reachable from star's closure
+            # but not in the star itself
+            "?star_elem kc:boundedBy* {simplex_iri} . "
+            "?star_elem kc:boundedBy* ?result . "
+            "FILTER NOT EXISTS {{ ?result kc:boundedBy* {simplex_iri} }} "
+            "{type_filter}",
+            "result",
+        ),
+        "degree": (
+            "?result kc:boundedBy {simplex_iri} .",
+            "result",
+        ),
+    }
+
+    def _build_topo_sparql(
+        self,
+        operation: str,
+        *,
+        simplex_iri: str = "{simplex}",
+        target_type: str | None = None,
+    ) -> str:
+        """Build a complete SPARQL SELECT from a topological operation.
+
+        Parameters
+        ----------
+        operation :
+            One of: boundary, coboundary, star, closure, link, degree.
+        simplex_iri :
+            IRI or placeholder for the focus element.
+        target_type :
+            Optional type name to filter results.
+
+        Returns
+        -------
+        str
+            A complete SPARQL SELECT query string.
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if operation not in self._TOPO_PATTERNS:
+            raise SchemaError(
+                f"Unknown topological operation '{operation}'. "
+                f"Valid: {sorted(self._TOPO_PATTERNS)}"
+            )
+        pattern_tmpl, result_var = self._TOPO_PATTERNS[operation]
+
+        if target_type is not None:
+            if target_type not in self._types:
+                raise SchemaError(f"Type '{target_type}' is not registered")
+            type_iri = self._ns[target_type]
+            tf = f"?{result_var} a/rdfs:subClassOf* <{type_iri}> ."
+        else:
+            tf = ""
+
+        pattern = (
+            pattern_tmpl
+            .replace("{simplex_iri}", simplex_iri)
+            .replace("{type_filter}", tf)
+        )
+
+        return (
+            f"PREFIX kc: <https://example.org/kc#>\n"
+            f"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
+            f"SELECT ?{result_var} WHERE {{\n"
+            f"    {pattern}\n"
+            f"}}\n"
+        )
+
+    def add_query(
+        self,
+        name: str,
+        operation: str,
+        *,
+        target_type: str | None = None,
+    ) -> "SchemaBuilder":
+        """Register a named topological query template on this schema.
+
+        The query is generated from a topological operation and optional type
+        filter, then stored internally. It is exported as a ``.sparql`` file
+        by :meth:`export` and automatically loaded by
+        :class:`~knowledgecomplex.graph.KnowledgeComplex` at runtime.
+
+        Parameters
+        ----------
+        name : str
+            Query template name (becomes the filename stem, e.g. ``"spec_coboundary"``
+            exports as ``queries/spec_coboundary.sparql``).
+        operation : str
+            Topological operation: ``"boundary"``, ``"coboundary"``, ``"star"``,
+            ``"closure"``, ``"link"``, or ``"degree"``.
+        target_type : str, optional
+            Filter results to this type (including subtypes via OWL class hierarchy).
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+
+        Example
+        -------
+        >>> sb.add_query("spec_coboundary", "coboundary", target_type="verification")
+        """
+        sparql = self._build_topo_sparql(
+            operation, simplex_iri="{simplex}", target_type=target_type,
+        )
+        self._queries[name] = sparql
+        return self
+
+    def add_topological_constraint(
+        self,
+        type_name: str,
+        operation: str,
+        *,
+        target_type: str | None = None,
+        predicate: str = "min_count",
+        min_count: int = 1,
+        max_count: int | None = None,
+        message: str | None = None,
+    ) -> "SchemaBuilder":
+        """Escalate a topological query to a SHACL constraint.
+
+        Generates a ``sh:sparql`` constraint that, for each focus node of
+        *type_name*, evaluates a topological operation and checks a cardinality
+        predicate. Delegates to :meth:`add_sparql_constraint`.
+
+        Parameters
+        ----------
+        type_name : str
+            The type to constrain (must be registered).
+        operation : str
+            Topological operation: ``"boundary"``, ``"coboundary"``, ``"star"``,
+            ``"closure"``, ``"link"``, or ``"degree"``.
+        target_type : str, optional
+            Filter the topological result to this type.
+        predicate : str
+            ``"min_count"`` — at least *min_count* results (default).
+            ``"max_count"`` — at most *max_count* results.
+            ``"exact_count"`` — exactly *min_count* results.
+        min_count : int
+            Minimum count (used by ``"min_count"`` and ``"exact_count"``).
+        max_count : int, optional
+            Maximum count (used by ``"max_count"``).
+        message : str, optional
+            Custom violation message. Auto-generated if not provided.
+
+        Returns
+        -------
+        SchemaBuilder (self, for chaining)
+
+        Example
+        -------
+        >>> sb.add_topological_constraint(
+        ...     "spec", "coboundary",
+        ...     target_type="verification",
+        ...     predicate="min_count", min_count=1,
+        ...     message="Every spec must have at least one verification edge",
+        ... )
+        """
+        from knowledgecomplex.exceptions import SchemaError
+        if type_name not in self._types:
+            raise SchemaError(f"Type '{type_name}' is not registered")
+        if operation not in self._TOPO_PATTERNS:
+            raise SchemaError(
+                f"Unknown topological operation '{operation}'. "
+                f"Valid: {sorted(self._TOPO_PATTERNS)}"
+            )
+
+        pattern_tmpl, result_var = self._TOPO_PATTERNS[operation]
+
+        if target_type is not None:
+            if target_type not in self._types:
+                raise SchemaError(f"Type '{target_type}' is not registered")
+            type_iri = self._ns[target_type]
+            tf = f"?{result_var} a/rdfs:subClassOf* <{type_iri}> ."
+        else:
+            tf = ""
+
+        pattern = (
+            pattern_tmpl
+            .replace("{simplex_iri}", "$this")
+            .replace("{type_filter}", tf)
+        )
+
+        # Build the HAVING clause based on predicate
+        if predicate == "min_count":
+            having = f"HAVING (COUNT(DISTINCT ?{result_var}) < {min_count})"
+        elif predicate == "max_count":
+            if max_count is None:
+                raise SchemaError("max_count predicate requires max_count parameter")
+            having = f"HAVING (COUNT(DISTINCT ?{result_var}) > {max_count})"
+        elif predicate == "exact_count":
+            having = f"HAVING (COUNT(DISTINCT ?{result_var}) != {min_count})"
+        else:
+            raise SchemaError(
+                f"Unknown predicate '{predicate}'. "
+                f"Valid: min_count, max_count, exact_count"
+            )
+
+        # Wrap pattern in OPTIONAL so GROUP BY produces a row even when
+        # there are zero matches (otherwise HAVING never fires for empty results)
+        sparql = (
+            f"PREFIX kc: <https://example.org/kc#>\n"
+            f"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
+            f"SELECT $this WHERE {{\n"
+            f"    OPTIONAL {{ {pattern} }}\n"
+            f"}}\n"
+            f"GROUP BY $this\n"
+            f"{having}\n"
+        )
+
+        if message is None:
+            target_desc = f" of type '{target_type}'" if target_type else ""
+            message = (
+                f"Topological constraint violated: {operation}{target_desc} "
+                f"on '{type_name}' failed {predicate} check "
+                f"(min={min_count}, max={max_count})"
+            )
+
+        return self.add_sparql_constraint(type_name, sparql, message)
+
+    def dump_owl(self) -> str:
+        """Return merged OWL graph (core + user schema) as a Turtle string."""
+        return self._owl_graph.serialize(format="turtle")
+
+    def dump_shacl(self) -> str:
+        """Return merged SHACL graph (core shapes + user shapes) as a Turtle string."""
+        return self._shacl_graph.serialize(format="turtle")
+
+    def export(
+        self,
+        path: str | Path,
+        query_dirs: list[Path] | None = None,
+    ) -> Path:
+        """
+        Export the schema to a directory as standard semantic web files.
+
+        Writes ontology.ttl (OWL) and shapes.ttl (SHACL). If query_dirs are
+        provided, copies all .sparql files into a queries/ subdirectory.
+
+        Parameters
+        ----------
+        path : str | Path
+            Target directory. Created if it does not exist.
+        query_dirs : list[Path], optional
+            Directories containing .sparql query templates to include.
+
+        Returns
+        -------
+        Path
+            The export directory.
+        """
+        p = Path(path)
+        p.mkdir(parents=True, exist_ok=True)
+        (p / "ontology.ttl").write_text(self.dump_owl())
+        (p / "shapes.ttl").write_text(self.dump_shacl())
+        # Write schema-generated query templates and copy external query dirs
+        if self._queries or query_dirs:
+            qdir = p / "queries"
+            qdir.mkdir(exist_ok=True)
+            for name, sparql_text in self._queries.items():
+                (qdir / f"{name}.sparql").write_text(sparql_text)
+            if query_dirs:
+                for d in query_dirs:
+                    for sparql_file in d.glob("*.sparql"):
+                        shutil.copy2(sparql_file, qdir / sparql_file.name)
+        return p
+
+    @classmethod
+    def load(cls, path: str | Path) -> "SchemaBuilder":
+        """
+        Load a schema from a directory containing ontology.ttl and shapes.ttl.
+
+        Reconstructs the type registry by inspecting OWL subclass triples.
+
+        Parameters
+        ----------
+        path : str | Path
+            Directory containing ontology.ttl and shapes.ttl.
+
+        Returns
+        -------
+        SchemaBuilder
+        """
+        p = Path(path)
+
+        owl_graph = Graph()
+        owl_graph.parse(str(p / "ontology.ttl"), format="turtle")
+
+        shacl_graph = Graph()
+        shacl_graph.parse(str(p / "shapes.ttl"), format="turtle")
+
+        # Discover model namespace: find a namespace binding that is not
+        # one of the well-known prefixes (kc, kcs, sh, owl, rdfs, rdf, xsd)
+        well_known = {
+            str(_KC), str(_KCS), str(_SH),
+            str(OWL), str(RDFS), str(RDF), str(XSD),
+        }
+        namespace = None
+        ns_obj = None
+        for prefix, uri in owl_graph.namespaces():
+            uri_str = str(uri)
+            if prefix and uri_str not in well_known and uri_str.startswith("https://example.org/"):
+                # Skip shape namespaces (ending with /shape#)
+                if "/shape#" in uri_str:
+                    continue
+                namespace = prefix
+                ns_obj = Namespace(uri_str)
+                break
+
+        if namespace is None:
+            raise ValueError(
+                f"Could not detect model namespace in {p / 'ontology.ttl'}. "
+                "Expected a namespace binding like 'aaa: <https://example.org/aaa#>'."
+            )
+
+        # Build instance without calling __init__
+        sb = object.__new__(cls)
+        sb._namespace = namespace
+        sb._base_iri = str(ns_obj)
+        sb._ns = ns_obj
+        sb._nss = Namespace(f"https://example.org/{namespace}/shape#")
+        sb._owl_graph = owl_graph
+        sb._shacl_graph = shacl_graph
+        sb._attr_domains = {}
+        sb._queries = {}
+
+        # Reconstruct _types registry from OWL subclass triples
+        sb._types = {}
+        kind_map = {
+            _KC.Vertex: "vertex",
+            _KC.Edge: "edge",
+            _KC.Face: "face",
+        }
+        for kc_class, kind in kind_map.items():
+            for type_iri in owl_graph.subjects(RDFS.subClassOf, kc_class):
+                # Extract local name from IRI
+                local_name = str(type_iri).replace(sb._base_iri, "")
+                if local_name:
+                    sb._types[local_name] = {"kind": kind}
+
+        return sb
+
+
+ + + +
+ + + + + + + + + + +
+ + +

+ add_vertex_type(name, attributes=None, parent=None, bind=None) + +

+ + +
+ +

Declare a new vertex type (OWL subclass of KC:Vertex + SHACL node shape).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ name + + str + +
+

Class name within the user namespace.

+
+
+ required +
+ attributes + + dict + +
+

Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor, +or dict with "vocab"/"text" key and optional "required" flag).

+
+
+ None +
+ parent + + str + +
+

Name of a registered vertex type to inherit from.

+
+
+ None +
+ bind + + dict + +
+

Mapping of attribute names to fixed string values (sh:hasValue).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
def add_vertex_type(
+    self,
+    name: str,
+    attributes: dict[str, VocabDescriptor | TextDescriptor | Any] | None = None,
+    parent: str | None = None,
+    bind: dict[str, str] | None = None,
+) -> "SchemaBuilder":
+    """
+    Declare a new vertex type (OWL subclass of KC:Vertex + SHACL node shape).
+
+    Parameters
+    ----------
+    name : str
+        Class name within the user namespace.
+    attributes : dict, optional
+        Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+        or dict with "vocab"/"text" key and optional "required" flag).
+    parent : str, optional
+        Name of a registered vertex type to inherit from.
+    bind : dict, optional
+        Mapping of attribute names to fixed string values (sh:hasValue).
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if name in self._types:
+        raise SchemaError(f"Type '{name}' is already registered")
+    self._validate_parent(parent, "vertex")
+    attributes = attributes or {}
+    bind = bind or {}
+
+    self._types[name] = {
+        "kind": "vertex",
+        "attributes": dict(attributes),
+        "parent": parent,
+        "bind": dict(bind),
+    }
+
+    # Validate bind against all attributes (own + inherited)
+    if bind:
+        inherited = self._collect_inherited_attributes(name)
+        all_attrs = {**inherited, **attributes}
+        self._validate_bind(bind, all_attrs)
+
+    type_iri = self._ns[name]
+    shape_iri = self._nss[f"{name}Shape"]
+
+    # OWL
+    superclass = self._ns[parent] if parent else _KC.Vertex
+    self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+    self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+    # SHACL
+    self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+    self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+    for attr_name, attr_spec in attributes.items():
+        self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+    if bind:
+        self._apply_bind(shape_iri, bind)
+
+    return self
+
+
+
+ +
+ +
+ + +

+ add_edge_type(name, attributes=None, parent=None, bind=None) + +

+ + +
+ +

Declare a new edge type (OWL subclass of KC:Edge + SHACL property shapes).

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ name + + str + +
+

Class name within the user namespace.

+
+
+ required +
+ attributes + + dict + +
+

Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor, +or dict with "vocab"/"text" key and optional "required" flag).

+
+
+ None +
+ parent + + str + +
+

Name of a registered edge type to inherit from.

+
+
+ None +
+ bind + + dict + +
+

Mapping of attribute names to fixed string values (sh:hasValue).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
def add_edge_type(
+    self,
+    name: str,
+    attributes: dict[str, VocabDescriptor | TextDescriptor | Any] | None = None,
+    parent: str | None = None,
+    bind: dict[str, str] | None = None,
+) -> "SchemaBuilder":
+    """
+    Declare a new edge type (OWL subclass of KC:Edge + SHACL property shapes).
+
+    Parameters
+    ----------
+    name : str
+        Class name within the user namespace.
+    attributes : dict, optional
+        Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+        or dict with "vocab"/"text" key and optional "required" flag).
+    parent : str, optional
+        Name of a registered edge type to inherit from.
+    bind : dict, optional
+        Mapping of attribute names to fixed string values (sh:hasValue).
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if name in self._types:
+        raise SchemaError(f"Type '{name}' is already registered")
+    self._validate_parent(parent, "edge")
+    attributes = attributes or {}
+    bind = bind or {}
+
+    self._types[name] = {
+        "kind": "edge",
+        "attributes": dict(attributes),
+        "parent": parent,
+        "bind": dict(bind),
+    }
+
+    if bind:
+        inherited = self._collect_inherited_attributes(name)
+        all_attrs = {**inherited, **attributes}
+        self._validate_bind(bind, all_attrs)
+
+    type_iri = self._ns[name]
+    shape_iri = self._nss[f"{name}Shape"]
+
+    # OWL
+    superclass = self._ns[parent] if parent else _KC.Edge
+    self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+    self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+    # SHACL
+    self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+    self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+    for attr_name, attr_spec in attributes.items():
+        self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+    if bind:
+        self._apply_bind(shape_iri, bind)
+
+    return self
+
+
+
+ +
+ +
+ + +

+ add_face_type(name, attributes=None, parent=None, bind=None) + +

+ + +
+ +

Declare a new face type (OWL subclass of KC:Face + SHACL property shapes).

+

Attributes with required=False generate sh:minCount 0 constraints.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ name + + str + +
+

Class name within the user namespace.

+
+
+ required +
+ attributes + + dict + +
+

Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor, +or dict with "vocab"/"text" key and optional "required" flag).

+
+
+ None +
+ parent + + str + +
+

Name of a registered face type to inherit from.

+
+
+ None +
+ bind + + dict + +
+

Mapping of attribute names to fixed string values (sh:hasValue).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
def add_face_type(
+    self,
+    name: str,
+    attributes: dict[str, Any] | None = None,
+    parent: str | None = None,
+    bind: dict[str, str] | None = None,
+) -> "SchemaBuilder":
+    """
+    Declare a new face type (OWL subclass of KC:Face + SHACL property shapes).
+
+    Attributes with ``required=False`` generate sh:minCount 0 constraints.
+
+    Parameters
+    ----------
+    name : str
+        Class name within the user namespace.
+    attributes : dict, optional
+        Mapping of attribute name to descriptor (VocabDescriptor, TextDescriptor,
+        or dict with "vocab"/"text" key and optional "required" flag).
+    parent : str, optional
+        Name of a registered face type to inherit from.
+    bind : dict, optional
+        Mapping of attribute names to fixed string values (sh:hasValue).
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if name in self._types:
+        raise SchemaError(f"Type '{name}' is already registered")
+    self._validate_parent(parent, "face")
+    attributes = attributes or {}
+    bind = bind or {}
+
+    self._types[name] = {
+        "kind": "face",
+        "attributes": dict(attributes),
+        "parent": parent,
+        "bind": dict(bind),
+    }
+
+    if bind:
+        inherited = self._collect_inherited_attributes(name)
+        all_attrs = {**inherited, **attributes}
+        self._validate_bind(bind, all_attrs)
+
+    type_iri = self._ns[name]
+    shape_iri = self._nss[f"{name}Shape"]
+
+    # OWL
+    superclass = self._ns[parent] if parent else _KC.Face
+    self._owl_graph.add((type_iri, RDF.type, OWL.Class))
+    self._owl_graph.add((type_iri, RDFS.subClassOf, superclass))
+
+    # SHACL
+    self._shacl_graph.add((shape_iri, RDF.type, _SH.NodeShape))
+    self._shacl_graph.add((shape_iri, _SH.targetClass, type_iri))
+
+    for attr_name, attr_spec in attributes.items():
+        self._dispatch_attr(type_iri, shape_iri, attr_name, attr_spec)
+
+    if bind:
+        self._apply_bind(shape_iri, bind)
+
+    return self
+
+
+
+ +
+ +
+ + +

+ describe_type(name) + +

+ + +
+ +

Inspect a registered type's attributes, parent, and bindings.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ name + + str + +
+

The registered type name.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict + +
+

Keys: name, kind, parent, own_attributes, inherited_attributes, +all_attributes, bound.

+
+
+ + +
+ Source code in knowledgecomplex/schema.py +
603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
def describe_type(self, name: str) -> dict:
+    """
+    Inspect a registered type's attributes, parent, and bindings.
+
+    Parameters
+    ----------
+    name : str
+        The registered type name.
+
+    Returns
+    -------
+    dict
+        Keys: name, kind, parent, own_attributes, inherited_attributes,
+        all_attributes, bound.
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if name not in self._types:
+        raise SchemaError(f"Type '{name}' is not registered")
+
+    info = self._types[name]
+    own_attrs = dict(info.get("attributes", {}))
+    inherited_attrs = self._collect_inherited_attributes(name)
+    # Collect bindings from ancestors
+    inherited_bind = {}
+    current = info.get("parent")
+    while current is not None:
+        parent_bind = self._types[current].get("bind", {})
+        for k, v in parent_bind.items():
+            if k not in inherited_bind:
+                inherited_bind[k] = v
+        current = self._types[current].get("parent")
+    own_bind = dict(info.get("bind", {}))
+    all_bind = {**inherited_bind, **own_bind}
+
+    all_attrs = {**inherited_attrs, **own_attrs}
+    return {
+        "name": name,
+        "kind": info["kind"],
+        "parent": info.get("parent"),
+        "own_attributes": own_attrs,
+        "inherited_attributes": inherited_attrs,
+        "all_attributes": all_attrs,
+        "bound": all_bind,
+    }
+
+
+
+ +
+ +
+ + +

+ type_names(kind=None) + +

+ + +
+ +

List registered type names, optionally filtered by kind.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kind + + str + +
+

Filter by "vertex", "edge", or "face".

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ list[str] + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
def type_names(self, kind: str | None = None) -> list[str]:
+    """
+    List registered type names, optionally filtered by kind.
+
+    Parameters
+    ----------
+    kind : str, optional
+        Filter by "vertex", "edge", or "face".
+
+    Returns
+    -------
+    list[str]
+    """
+    if kind is None:
+        return list(self._types.keys())
+    return [n for n, info in self._types.items() if info["kind"] == kind]
+
+
+
+ +
+ +
+ + +

+ promote_to_attribute(type, attribute, vocab=None, text=None, required=True) + +

+ + +
+ +

Atomically promote a discovered pattern to a first-class typed attribute.

+

Updates both OWL property definition and SHACL shape constraint for the named type. +After calling this, dump_owl() and dump_shacl() both reflect the updated attribute.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type + + str + +
+

The type name (must have been registered via add_*_type).

+
+
+ required +
+ attribute + + str + +
+

Attribute name to add or upgrade.

+
+
+ required +
+ vocab + + VocabDescriptor + +
+

Controlled vocabulary for the attribute.

+
+
+ None +
+ text + + TextDescriptor + +
+

Free-text descriptor for the attribute.

+
+
+ None +
+ required + + bool + +
+

If True, generates sh:minCount 1. Overrides the descriptor's own required flag.

+
+
+ True +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
def promote_to_attribute(
+    self,
+    type: str,
+    attribute: str,
+    vocab: VocabDescriptor | None = None,
+    text: TextDescriptor | None = None,
+    required: bool = True,
+) -> "SchemaBuilder":
+    """
+    Atomically promote a discovered pattern to a first-class typed attribute.
+
+    Updates both OWL property definition and SHACL shape constraint for the named type.
+    After calling this, dump_owl() and dump_shacl() both reflect the updated attribute.
+
+    Parameters
+    ----------
+    type : str
+        The type name (must have been registered via add_*_type).
+    attribute : str
+        Attribute name to add or upgrade.
+    vocab : VocabDescriptor, optional
+        Controlled vocabulary for the attribute.
+    text : TextDescriptor, optional
+        Free-text descriptor for the attribute.
+    required : bool
+        If True, generates sh:minCount 1. Overrides the descriptor's own required flag.
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if type not in self._types:
+        raise SchemaError(f"Type '{type}' is not registered")
+    if vocab is None and text is None:
+        raise SchemaError("promote_to_attribute requires either vocab or text descriptor")
+
+    type_iri = self._ns[type]
+    shape_iri = self._nss[f"{type}Shape"]
+    attr_iri = self._ns[attribute]
+
+    # Remove existing OWL triples for this attribute (if upgrading)
+    for p in (RDFS.domain, RDFS.range, RDFS.comment):
+        self._owl_graph.remove((attr_iri, p, None))
+    self._owl_graph.remove((attr_iri, RDF.type, OWL.DatatypeProperty))
+
+    # Remove existing SHACL property shape for this attribute (if upgrading)
+    for prop_node in list(self._shacl_graph.objects(shape_iri, _SH.property)):
+        if (prop_node, _SH.path, attr_iri) in self._shacl_graph:
+            # Remove the sh:in list
+            list_head = self._shacl_graph.value(prop_node, _SH["in"])
+            if list_head is not None:
+                Collection(self._shacl_graph, list_head).clear()
+                self._shacl_graph.remove((prop_node, _SH["in"], list_head))
+            # Remove all triples about this property shape
+            for p, o in list(self._shacl_graph.predicate_objects(prop_node)):
+                self._shacl_graph.remove((prop_node, p, o))
+            self._shacl_graph.remove((shape_iri, _SH.property, prop_node))
+
+    # Re-add with new settings
+    if vocab is not None:
+        self._add_attr_to_graphs(type_iri, shape_iri, attribute, vocab, required=required)
+    else:
+        # Override the text descriptor's required flag with the promote call's value
+        effective = TextDescriptor(required=required, multiple=text.multiple)
+        self._add_attr_to_graphs(type_iri, shape_iri, attribute, effective)
+
+    # Update type registry
+    if "attributes" not in self._types[type]:
+        self._types[type]["attributes"] = {}
+    if vocab is not None:
+        self._types[type]["attributes"][attribute] = {
+            "vocab": vocab, "required": required
+        }
+    else:
+        self._types[type]["attributes"][attribute] = text
+
+    return self
+
+
+
+ +
+ +
+ + +

+ add_sparql_constraint(type_name, sparql, message) + +

+ + +
+ +

Attach a sh:sparql constraint to the SHACL shape for type_name.

+

The sparql argument must be a SPARQL SELECT query that returns $this +for each violating focus node. pyshacl evaluates this and reports the +message for each returned row.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type_name + + str + +
+

The type name (must have been registered via add_*_type).

+
+
+ required +
+ sparql + + str + +
+

SPARQL SELECT query. Must bind $this to each violating node.

+
+
+ required +
+ message + + str + +
+

Human-readable message reported when the constraint is violated.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
def add_sparql_constraint(
+    self,
+    type_name: str,
+    sparql: str,
+    message: str,
+) -> "SchemaBuilder":
+    """
+    Attach a sh:sparql constraint to the SHACL shape for type_name.
+
+    The sparql argument must be a SPARQL SELECT query that returns $this
+    for each violating focus node. pyshacl evaluates this and reports the
+    message for each returned row.
+
+    Parameters
+    ----------
+    type_name : str
+        The type name (must have been registered via add_*_type).
+    sparql : str
+        SPARQL SELECT query. Must bind $this to each violating node.
+    message : str
+        Human-readable message reported when the constraint is violated.
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if type_name not in self._types:
+        raise SchemaError(f"Type '{type_name}' is not registered")
+    shape_iri = self._nss[f"{type_name}Shape"]
+    constraint = BNode()
+    self._shacl_graph.add((shape_iri, _SH.sparql, constraint))
+    self._shacl_graph.add((constraint, RDF.type, _SH.SPARQLConstraint))
+    self._shacl_graph.add((constraint, _SH.select, Literal(sparql)))
+    self._shacl_graph.add((constraint, _SH.message, Literal(message)))
+    return self
+
+
+
+ +
+ +
+ + +

+ add_query(name, operation, *, target_type=None) + +

+ + +
+ +

Register a named topological query template on this schema.

+

The query is generated from a topological operation and optional type +filter, then stored internally. It is exported as a .sparql file +by :meth:export and automatically loaded by +:class:~knowledgecomplex.graph.KnowledgeComplex at runtime.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ name + + str + +
+

Query template name (becomes the filename stem, e.g. "spec_coboundary" +exports as queries/spec_coboundary.sparql).

+
+
+ required +
+ operation + + str + +
+

Topological operation: "boundary", "coboundary", "star", +"closure", "link", or "degree".

+
+
+ required +
+ target_type + + str + +
+

Filter results to this type (including subtypes via OWL class hierarchy).

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Example +
+
+
+

sb.add_query("spec_coboundary", "coboundary", target_type="verification")

+
+
+
+
+ +
+ Source code in knowledgecomplex/schema.py +
871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
def add_query(
+    self,
+    name: str,
+    operation: str,
+    *,
+    target_type: str | None = None,
+) -> "SchemaBuilder":
+    """Register a named topological query template on this schema.
+
+    The query is generated from a topological operation and optional type
+    filter, then stored internally. It is exported as a ``.sparql`` file
+    by :meth:`export` and automatically loaded by
+    :class:`~knowledgecomplex.graph.KnowledgeComplex` at runtime.
+
+    Parameters
+    ----------
+    name : str
+        Query template name (becomes the filename stem, e.g. ``"spec_coboundary"``
+        exports as ``queries/spec_coboundary.sparql``).
+    operation : str
+        Topological operation: ``"boundary"``, ``"coboundary"``, ``"star"``,
+        ``"closure"``, ``"link"``, or ``"degree"``.
+    target_type : str, optional
+        Filter results to this type (including subtypes via OWL class hierarchy).
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+
+    Example
+    -------
+    >>> sb.add_query("spec_coboundary", "coboundary", target_type="verification")
+    """
+    sparql = self._build_topo_sparql(
+        operation, simplex_iri="{simplex}", target_type=target_type,
+    )
+    self._queries[name] = sparql
+    return self
+
+
+
+ +
+ +
+ + +

+ add_topological_constraint(type_name, operation, *, target_type=None, predicate='min_count', min_count=1, max_count=None, message=None) + +

+ + +
+ +

Escalate a topological query to a SHACL constraint.

+

Generates a sh:sparql constraint that, for each focus node of +type_name, evaluates a topological operation and checks a cardinality +predicate. Delegates to :meth:add_sparql_constraint.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ type_name + + str + +
+

The type to constrain (must be registered).

+
+
+ required +
+ operation + + str + +
+

Topological operation: "boundary", "coboundary", "star", +"closure", "link", or "degree".

+
+
+ required +
+ target_type + + str + +
+

Filter the topological result to this type.

+
+
+ None +
+ predicate + + str + +
+

"min_count" — at least min_count results (default). +"max_count" — at most max_count results. +"exact_count" — exactly min_count results.

+
+
+ 'min_count' +
+ min_count + + int + +
+

Minimum count (used by "min_count" and "exact_count").

+
+
+ 1 +
+ max_count + + int + +
+

Maximum count (used by "max_count").

+
+
+ None +
+ message + + str + +
+

Custom violation message. Auto-generated if not provided.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder (self, for chaining) + +
+ +
+
+ + +
+ Example +
+
+
+

sb.add_topological_constraint( +... "spec", "coboundary", +... target_type="verification", +... predicate="min_count", min_count=1, +... message="Every spec must have at least one verification edge", +... )

+
+
+
+
+ +
+ Source code in knowledgecomplex/schema.py +
 910
+ 911
+ 912
+ 913
+ 914
+ 915
+ 916
+ 917
+ 918
+ 919
+ 920
+ 921
+ 922
+ 923
+ 924
+ 925
+ 926
+ 927
+ 928
+ 929
+ 930
+ 931
+ 932
+ 933
+ 934
+ 935
+ 936
+ 937
+ 938
+ 939
+ 940
+ 941
+ 942
+ 943
+ 944
+ 945
+ 946
+ 947
+ 948
+ 949
+ 950
+ 951
+ 952
+ 953
+ 954
+ 955
+ 956
+ 957
+ 958
+ 959
+ 960
+ 961
+ 962
+ 963
+ 964
+ 965
+ 966
+ 967
+ 968
+ 969
+ 970
+ 971
+ 972
+ 973
+ 974
+ 975
+ 976
+ 977
+ 978
+ 979
+ 980
+ 981
+ 982
+ 983
+ 984
+ 985
+ 986
+ 987
+ 988
+ 989
+ 990
+ 991
+ 992
+ 993
+ 994
+ 995
+ 996
+ 997
+ 998
+ 999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
def add_topological_constraint(
+    self,
+    type_name: str,
+    operation: str,
+    *,
+    target_type: str | None = None,
+    predicate: str = "min_count",
+    min_count: int = 1,
+    max_count: int | None = None,
+    message: str | None = None,
+) -> "SchemaBuilder":
+    """Escalate a topological query to a SHACL constraint.
+
+    Generates a ``sh:sparql`` constraint that, for each focus node of
+    *type_name*, evaluates a topological operation and checks a cardinality
+    predicate. Delegates to :meth:`add_sparql_constraint`.
+
+    Parameters
+    ----------
+    type_name : str
+        The type to constrain (must be registered).
+    operation : str
+        Topological operation: ``"boundary"``, ``"coboundary"``, ``"star"``,
+        ``"closure"``, ``"link"``, or ``"degree"``.
+    target_type : str, optional
+        Filter the topological result to this type.
+    predicate : str
+        ``"min_count"`` — at least *min_count* results (default).
+        ``"max_count"`` — at most *max_count* results.
+        ``"exact_count"`` — exactly *min_count* results.
+    min_count : int
+        Minimum count (used by ``"min_count"`` and ``"exact_count"``).
+    max_count : int, optional
+        Maximum count (used by ``"max_count"``).
+    message : str, optional
+        Custom violation message. Auto-generated if not provided.
+
+    Returns
+    -------
+    SchemaBuilder (self, for chaining)
+
+    Example
+    -------
+    >>> sb.add_topological_constraint(
+    ...     "spec", "coboundary",
+    ...     target_type="verification",
+    ...     predicate="min_count", min_count=1,
+    ...     message="Every spec must have at least one verification edge",
+    ... )
+    """
+    from knowledgecomplex.exceptions import SchemaError
+    if type_name not in self._types:
+        raise SchemaError(f"Type '{type_name}' is not registered")
+    if operation not in self._TOPO_PATTERNS:
+        raise SchemaError(
+            f"Unknown topological operation '{operation}'. "
+            f"Valid: {sorted(self._TOPO_PATTERNS)}"
+        )
+
+    pattern_tmpl, result_var = self._TOPO_PATTERNS[operation]
+
+    if target_type is not None:
+        if target_type not in self._types:
+            raise SchemaError(f"Type '{target_type}' is not registered")
+        type_iri = self._ns[target_type]
+        tf = f"?{result_var} a/rdfs:subClassOf* <{type_iri}> ."
+    else:
+        tf = ""
+
+    pattern = (
+        pattern_tmpl
+        .replace("{simplex_iri}", "$this")
+        .replace("{type_filter}", tf)
+    )
+
+    # Build the HAVING clause based on predicate
+    if predicate == "min_count":
+        having = f"HAVING (COUNT(DISTINCT ?{result_var}) < {min_count})"
+    elif predicate == "max_count":
+        if max_count is None:
+            raise SchemaError("max_count predicate requires max_count parameter")
+        having = f"HAVING (COUNT(DISTINCT ?{result_var}) > {max_count})"
+    elif predicate == "exact_count":
+        having = f"HAVING (COUNT(DISTINCT ?{result_var}) != {min_count})"
+    else:
+        raise SchemaError(
+            f"Unknown predicate '{predicate}'. "
+            f"Valid: min_count, max_count, exact_count"
+        )
+
+    # Wrap pattern in OPTIONAL so GROUP BY produces a row even when
+    # there are zero matches (otherwise HAVING never fires for empty results)
+    sparql = (
+        f"PREFIX kc: <https://example.org/kc#>\n"
+        f"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
+        f"SELECT $this WHERE {{\n"
+        f"    OPTIONAL {{ {pattern} }}\n"
+        f"}}\n"
+        f"GROUP BY $this\n"
+        f"{having}\n"
+    )
+
+    if message is None:
+        target_desc = f" of type '{target_type}'" if target_type else ""
+        message = (
+            f"Topological constraint violated: {operation}{target_desc} "
+            f"on '{type_name}' failed {predicate} check "
+            f"(min={min_count}, max={max_count})"
+        )
+
+    return self.add_sparql_constraint(type_name, sparql, message)
+
+
+
+ +
+ +
+ + +

+ dump_owl() + +

+ + +
+ +

Return merged OWL graph (core + user schema) as a Turtle string.

+ + +
+ Source code in knowledgecomplex/schema.py +
1022
+1023
+1024
def dump_owl(self) -> str:
+    """Return merged OWL graph (core + user schema) as a Turtle string."""
+    return self._owl_graph.serialize(format="turtle")
+
+
+
+ +
+ +
+ + +

+ dump_shacl() + +

+ + +
+ +

Return merged SHACL graph (core shapes + user shapes) as a Turtle string.

+ + +
+ Source code in knowledgecomplex/schema.py +
1026
+1027
+1028
def dump_shacl(self) -> str:
+    """Return merged SHACL graph (core shapes + user shapes) as a Turtle string."""
+    return self._shacl_graph.serialize(format="turtle")
+
+
+
+ +
+ +
+ + +

+ export(path, query_dirs=None) + +

+ + +
+ +

Export the schema to a directory as standard semantic web files.

+

Writes ontology.ttl (OWL) and shapes.ttl (SHACL). If query_dirs are +provided, copies all .sparql files into a queries/ subdirectory.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ path + + str | Path + +
+

Target directory. Created if it does not exist.

+
+
+ required +
+ query_dirs + + list[Path] + +
+

Directories containing .sparql query templates to include.

+
+
+ None +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Path + +
+

The export directory.

+
+
+ + +
+ Source code in knowledgecomplex/schema.py +
1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
def export(
+    self,
+    path: str | Path,
+    query_dirs: list[Path] | None = None,
+) -> Path:
+    """
+    Export the schema to a directory as standard semantic web files.
+
+    Writes ontology.ttl (OWL) and shapes.ttl (SHACL). If query_dirs are
+    provided, copies all .sparql files into a queries/ subdirectory.
+
+    Parameters
+    ----------
+    path : str | Path
+        Target directory. Created if it does not exist.
+    query_dirs : list[Path], optional
+        Directories containing .sparql query templates to include.
+
+    Returns
+    -------
+    Path
+        The export directory.
+    """
+    p = Path(path)
+    p.mkdir(parents=True, exist_ok=True)
+    (p / "ontology.ttl").write_text(self.dump_owl())
+    (p / "shapes.ttl").write_text(self.dump_shacl())
+    # Write schema-generated query templates and copy external query dirs
+    if self._queries or query_dirs:
+        qdir = p / "queries"
+        qdir.mkdir(exist_ok=True)
+        for name, sparql_text in self._queries.items():
+            (qdir / f"{name}.sparql").write_text(sparql_text)
+        if query_dirs:
+            for d in query_dirs:
+                for sparql_file in d.glob("*.sparql"):
+                    shutil.copy2(sparql_file, qdir / sparql_file.name)
+    return p
+
+
+
+ +
+ +
+ + +

+ load(path) + + + classmethod + + +

+ + +
+ +

Load a schema from a directory containing ontology.ttl and shapes.ttl.

+

Reconstructs the type registry by inspecting OWL subclass triples.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ path + + str | Path + +
+

Directory containing ontology.ttl and shapes.ttl.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ SchemaBuilder + +
+ +
+
+ + +
+ Source code in knowledgecomplex/schema.py +
1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
@classmethod
+def load(cls, path: str | Path) -> "SchemaBuilder":
+    """
+    Load a schema from a directory containing ontology.ttl and shapes.ttl.
+
+    Reconstructs the type registry by inspecting OWL subclass triples.
+
+    Parameters
+    ----------
+    path : str | Path
+        Directory containing ontology.ttl and shapes.ttl.
+
+    Returns
+    -------
+    SchemaBuilder
+    """
+    p = Path(path)
+
+    owl_graph = Graph()
+    owl_graph.parse(str(p / "ontology.ttl"), format="turtle")
+
+    shacl_graph = Graph()
+    shacl_graph.parse(str(p / "shapes.ttl"), format="turtle")
+
+    # Discover model namespace: find a namespace binding that is not
+    # one of the well-known prefixes (kc, kcs, sh, owl, rdfs, rdf, xsd)
+    well_known = {
+        str(_KC), str(_KCS), str(_SH),
+        str(OWL), str(RDFS), str(RDF), str(XSD),
+    }
+    namespace = None
+    ns_obj = None
+    for prefix, uri in owl_graph.namespaces():
+        uri_str = str(uri)
+        if prefix and uri_str not in well_known and uri_str.startswith("https://example.org/"):
+            # Skip shape namespaces (ending with /shape#)
+            if "/shape#" in uri_str:
+                continue
+            namespace = prefix
+            ns_obj = Namespace(uri_str)
+            break
+
+    if namespace is None:
+        raise ValueError(
+            f"Could not detect model namespace in {p / 'ontology.ttl'}. "
+            "Expected a namespace binding like 'aaa: <https://example.org/aaa#>'."
+        )
+
+    # Build instance without calling __init__
+    sb = object.__new__(cls)
+    sb._namespace = namespace
+    sb._base_iri = str(ns_obj)
+    sb._ns = ns_obj
+    sb._nss = Namespace(f"https://example.org/{namespace}/shape#")
+    sb._owl_graph = owl_graph
+    sb._shacl_graph = shacl_graph
+    sb._attr_domains = {}
+    sb._queries = {}
+
+    # Reconstruct _types registry from OWL subclass triples
+    sb._types = {}
+    kind_map = {
+        _KC.Vertex: "vertex",
+        _KC.Edge: "edge",
+        _KC.Face: "face",
+    }
+    for kc_class, kind in kind_map.items():
+        for type_iri in owl_graph.subjects(RDFS.subClassOf, kc_class):
+            # Extract local name from IRI
+            local_name = str(type_iri).replace(sb._base_iri, "")
+            if local_name:
+                sb._types[local_name] = {"kind": kind}
+
+    return sb
+
+
+
+ +
+ + + +
+ +
+ +
+ + +
+ + +

+ vocab(*values, multiple=False) + +

+ + +
+ +

Declare a controlled vocabulary for an attribute.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ *values + + str + +
+

The allowed string values.

+
+
+ () +
+ multiple + + bool + +
+

If True, allows multiple values (no sh:maxCount). +If False (default), generates sh:maxCount 1.

+
+
+ False +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ VocabDescriptor + +
+ +
+
+ + +
+ Example +
+
+
+

vocab("adjacent", "opposite") +vocab('adjacent', 'opposite') +vocab("a", "b", "c", multiple=True) +vocab('a', 'b', 'c', multiple=True)

+
+
+
+
+ +
+ Source code in knowledgecomplex/schema.py +
 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
def vocab(*values: str, multiple: bool = False) -> VocabDescriptor:
+    """
+    Declare a controlled vocabulary for an attribute.
+
+    Parameters
+    ----------
+    *values : str
+        The allowed string values.
+    multiple : bool
+        If True, allows multiple values (no sh:maxCount).
+        If False (default), generates sh:maxCount 1.
+
+    Returns
+    -------
+    VocabDescriptor
+
+    Example
+    -------
+    >>> vocab("adjacent", "opposite")
+    vocab('adjacent', 'opposite')
+    >>> vocab("a", "b", "c", multiple=True)
+    vocab('a', 'b', 'c', multiple=True)
+    """
+    if not values:
+        raise ValueError("vocab() requires at least one value")
+    return VocabDescriptor(values=tuple(values), multiple=multiple)
+
+
+
+ +
+ +
+ + +

+ text(*, required=True, multiple=False) + +

+ + +
+ +

Declare a free-text string attribute.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ required + + bool + +
+

If True (default), generates sh:minCount 1.

+
+
+ True +
+ multiple + + bool + +
+

If True, allows multiple values (no sh:maxCount). +If False (default), generates sh:maxCount 1.

+
+
+ False +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ TextDescriptor + +
+ +
+
+ + +
+ Example +
+
+
+

text() +text() +text(required=False, multiple=True) +text(required=False, multiple=True)

+
+
+
+
+ +
+ Source code in knowledgecomplex/schema.py +
139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
def text(*, required: bool = True, multiple: bool = False) -> TextDescriptor:
+    """
+    Declare a free-text string attribute.
+
+    Parameters
+    ----------
+    required : bool
+        If True (default), generates sh:minCount 1.
+    multiple : bool
+        If True, allows multiple values (no sh:maxCount).
+        If False (default), generates sh:maxCount 1.
+
+    Returns
+    -------
+    TextDescriptor
+
+    Example
+    -------
+    >>> text()
+    text()
+    >>> text(required=False, multiple=True)
+    text(required=False, multiple=True)
+    """
+    return TextDescriptor(required=required, multiple=multiple)
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/api/viz/index.html b/site/api/viz/index.html new file mode 100644 index 0000000..29e03b7 --- /dev/null +++ b/site/api/viz/index.html @@ -0,0 +1,3275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + Visualization - knowledgecomplex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+ +
+ + + + + + + + +

Visualization

+ +
+ + + + +
+ +

knowledgecomplex.viz — NetworkX export and visualization helpers.

+

Two complementary views of a knowledge complex are provided:

+

Hasse diagram (plot_hasse, plot_hasse_star, plot_hasse_skeleton) + Every element (vertex, edge, face) becomes a graph node. Directed edges + represent the boundary operator, pointing from each element to its boundary + elements (higher dimension → lower dimension). Faces have out-degree 3 + and in-degree 0; edges have out-degree 2; vertices have out-degree 0. + Nodes are colored by type and sized by dimension.

+

Geometric realization (plot_geometric, plot_geometric_interactive) + Only KC vertices become points in 3D space. KC edges become line segments + connecting their two boundary vertices. KC faces become filled, + semi-transparent triangular patches spanning their three boundary vertices. + This is the classical geometric realization of the abstract simplicial + complex — the view a topologist would draw.

+

to_networkx exports a DiGraph that backs the Hasse plots. +verify_networkx validates that a DiGraph satisfies simplicial complex +cardinality and closure invariants at runtime.

+

Requires optional dependencies::

+
pip install knowledgecomplex[viz]                # matplotlib + networkx
+pip install knowledgecomplex[viz-interactive]     # + plotly for interactive 3D
+
+ + + + + + + + + + +
+ + + + + + + + + + +
+ + +

+ to_networkx(kc) + +

+ + +
+ +

Convert a KnowledgeComplex to a directed networkx DiGraph.

+

Every element (vertex, edge, face) becomes a node. Directed edges +represent the boundary operator kc:boundedBy, pointing from each +element to its boundary elements (higher dimension → lower dimension).

+

In the resulting DiGraph:

+
    +
  • Face nodes have out-degree 3 (→ 3 boundary edges) and in-degree 0.
  • +
  • Edge nodes have out-degree 2 (→ 2 boundary vertices).
  • +
  • Vertex nodes have out-degree 0 (empty boundary).
  • +
+

Each node carries attributes:

+
    +
  • type: element type name (e.g. "Node", "Link")
  • +
  • kind: "vertex", "edge", or "face"
  • +
  • dim: 0, 1, or 2
  • +
  • uri: file URI if present, else None
  • +
  • All model-namespace attributes from the element
  • +
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ DiGraph + +
+ +
+
+ + +
+ Source code in knowledgecomplex/viz.py +
 78
+ 79
+ 80
+ 81
+ 82
+ 83
+ 84
+ 85
+ 86
+ 87
+ 88
+ 89
+ 90
+ 91
+ 92
+ 93
+ 94
+ 95
+ 96
+ 97
+ 98
+ 99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
def to_networkx(kc: "KnowledgeComplex") -> Any:
+    """Convert a KnowledgeComplex to a directed networkx DiGraph.
+
+    Every element (vertex, edge, face) becomes a node.  Directed edges
+    represent the boundary operator ``kc:boundedBy``, pointing **from each
+    element to its boundary elements** (higher dimension → lower dimension).
+
+    In the resulting DiGraph:
+
+    - **Face** nodes have out-degree 3 (→ 3 boundary edges) and in-degree 0.
+    - **Edge** nodes have out-degree 2 (→ 2 boundary vertices).
+    - **Vertex** nodes have out-degree 0 (empty boundary).
+
+    Each node carries attributes:
+
+    - ``type``: element type name (e.g. ``"Node"``, ``"Link"``)
+    - ``kind``: ``"vertex"``, ``"edge"``, or ``"face"``
+    - ``dim``: 0, 1, or 2
+    - ``uri``: file URI if present, else ``None``
+    - All model-namespace attributes from the element
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    networkx.DiGraph
+    """
+    nx = _require_nx()
+    G = nx.DiGraph(name=kc._schema._namespace)
+
+    for elem_id in kc.element_ids():
+        elem = kc.element(elem_id)
+        type_name = elem.type
+        kind = kc._schema._types.get(type_name, {}).get("kind", "vertex")
+        attrs = {
+            "type": type_name,
+            "kind": kind,
+            "dim": _DIM_BY_KIND.get(kind, 0),
+            "uri": elem.uri,
+            **elem.attrs,
+        }
+        G.add_node(elem_id, **attrs)
+
+    # Directed boundary edges: element → boundary element (high dim → low dim)
+    for elem_id in kc.element_ids():
+        for boundary_id in kc.boundary(elem_id):
+            G.add_edge(elem_id, boundary_id)
+
+    return G
+
+
+
+ +
+ +
+ + +

+ verify_networkx(G) + +

+ + +
+ +

Validate that a DiGraph satisfies simplicial complex invariants.

+

Checks cardinality constraints and boundary closure:

+
    +
  • Every node has kind and dim attributes.
  • +
  • Vertices (dim=0): out-degree = 0.
  • +
  • Edges (dim=1): out-degree = 2, both targets are vertices (dim=0).
  • +
  • Faces (dim=2): out-degree = 3, all targets are edges (dim=1).
  • +
  • Closed-triangle: for each face, the 3 boundary edges share exactly + 3 distinct vertices (forming a closed triangle, not an open fan).
  • +
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ G + + DiGraph + +
+

A DiGraph produced by :func:to_networkx.

+
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ bool + +
+

True if all invariants hold.

+
+
+ + +

Raises:

+ + + + + + + + + + + + + + + + + +
TypeDescription
+ ValueError + +
+

On the first invariant violation, with a descriptive message.

+
+
+ TypeError + +
+

If G is not a DiGraph.

+
+
+ + +
+ Source code in knowledgecomplex/viz.py +
134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
def verify_networkx(G: Any) -> bool:
+    """Validate that a DiGraph satisfies simplicial complex invariants.
+
+    Checks cardinality constraints and boundary closure:
+
+    - Every node has ``kind`` and ``dim`` attributes.
+    - **Vertices** (dim=0): out-degree = 0.
+    - **Edges** (dim=1): out-degree = 2, both targets are vertices (dim=0).
+    - **Faces** (dim=2): out-degree = 3, all targets are edges (dim=1).
+    - **Closed-triangle**: for each face, the 3 boundary edges share exactly
+      3 distinct vertices (forming a closed triangle, not an open fan).
+
+    Parameters
+    ----------
+    G : networkx.DiGraph
+        A DiGraph produced by :func:`to_networkx`.
+
+    Returns
+    -------
+    bool
+        ``True`` if all invariants hold.
+
+    Raises
+    ------
+    ValueError
+        On the first invariant violation, with a descriptive message.
+    TypeError
+        If *G* is not a ``DiGraph``.
+    """
+    nx = _require_nx()
+    if not isinstance(G, nx.DiGraph):
+        raise TypeError(f"Expected nx.DiGraph, got {type(G).__name__}")
+
+    for node in G.nodes:
+        data = G.nodes[node]
+        if "kind" not in data or "dim" not in data:
+            raise ValueError(f"Node '{node}' missing 'kind' or 'dim' attribute")
+
+        dim = data["dim"]
+        out_deg = G.out_degree(node)
+        successors = list(G.successors(node))
+
+        if dim == 0:  # vertex
+            if out_deg != 0:
+                raise ValueError(
+                    f"Vertex '{node}' has out-degree {out_deg}, expected 0"
+                )
+
+        elif dim == 1:  # edge
+            if out_deg != 2:
+                raise ValueError(
+                    f"Edge '{node}' has out-degree {out_deg}, expected 2"
+                )
+            for s in successors:
+                if G.nodes[s].get("dim") != 0:
+                    raise ValueError(
+                        f"Edge '{node}' boundary target '{s}' is not a vertex "
+                        f"(dim={G.nodes[s].get('dim')})"
+                    )
+
+        elif dim == 2:  # face
+            if out_deg != 3:
+                raise ValueError(
+                    f"Face '{node}' has out-degree {out_deg}, expected 3"
+                )
+            for s in successors:
+                if G.nodes[s].get("dim") != 1:
+                    raise ValueError(
+                        f"Face '{node}' boundary target '{s}' is not an edge "
+                        f"(dim={G.nodes[s].get('dim')})"
+                    )
+            # Closed-triangle: 3 edges must share exactly 3 distinct vertices
+            face_vertices = set()
+            for edge_node in successors:
+                for v in G.successors(edge_node):
+                    face_vertices.add(v)
+            if len(face_vertices) != 3:
+                raise ValueError(
+                    f"Face '{node}' boundary edges span {len(face_vertices)} "
+                    f"distinct vertices, expected 3 (closed triangle)"
+                )
+
+    return True
+
+
+
+ +
+ +
+ + +

+ type_color_map(kc) + +

+ + +
+ +

Build a type-name to hex-color mapping from the schema's type registry.

+

Uses matplotlib's tab10 colormap (or tab20 if > 10 types) +for distinct, visually separable colors.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ dict[str, str] + +
+

Mapping from type name to hex color string.

+
+
+ + +
+ Source code in knowledgecomplex/viz.py +
222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
def type_color_map(kc: "KnowledgeComplex") -> dict[str, str]:
+    """Build a type-name to hex-color mapping from the schema's type registry.
+
+    Uses matplotlib's ``tab10`` colormap (or ``tab20`` if > 10 types)
+    for distinct, visually separable colors.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    dict[str, str]
+        Mapping from type name to hex color string.
+    """
+    _, plt = _require_mpl()
+    import matplotlib.colors as mcolors
+
+    type_names = sorted(kc._schema._types.keys())
+    cmap_name = "tab10" if len(type_names) <= 10 else "tab20"
+    cmap = plt.get_cmap(cmap_name)
+
+    colors = {}
+    for i, name in enumerate(type_names):
+        colors[name] = mcolors.to_hex(cmap(i % cmap.N))
+    return colors
+
+
+
+ +
+ +
+ + +

+ plot_hasse(kc, *, ax=None, figsize=(10, 8), with_labels=True, node_size_by_dim=True) + +

+ + +
+ +

Plot the Hasse diagram of the complex with type-based color coding.

+

Every element (vertex, edge, face) is drawn as a node. Directed arrows +represent the boundary operator, pointing from each element to its +boundary elements (higher dimension → lower dimension). Nodes are colored +by type and sized by dimension (vertices largest, faces smallest).

+

This is not a geometric picture of the complex — it is the partially +ordered set of simplices. For a geometric view where vertices are points, +edges are line segments, and faces are filled triangles, see +:func:plot_geometric.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ ax + + matplotlib Axes + +
+

Axes to draw on. Created if not provided.

+
+
+ None +
+ figsize + + tuple + +
+

Figure size if creating a new figure.

+
+
+ (10, 8) +
+ with_labels + + bool + +
+

Show element ID labels on nodes.

+
+
+ True +
+ node_size_by_dim + + bool + +
+

Scale node size by dimension (vertex=large, face=small).

+
+
+ True +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ (fig, ax) + +
+

The matplotlib Figure and Axes.

+
+
+ + +
+ Source code in knowledgecomplex/viz.py +
278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
def plot_hasse(
+    kc: "KnowledgeComplex",
+    *,
+    ax: Any = None,
+    figsize: tuple[float, float] = (10, 8),
+    with_labels: bool = True,
+    node_size_by_dim: bool = True,
+) -> tuple[Any, Any]:
+    """Plot the Hasse diagram of the complex with type-based color coding.
+
+    Every element (vertex, edge, face) is drawn as a node.  Directed arrows
+    represent the boundary operator, pointing from each element to its
+    boundary elements (higher dimension → lower dimension).  Nodes are colored
+    by type and sized by dimension (vertices largest, faces smallest).
+
+    This is **not** a geometric picture of the complex — it is the partially
+    ordered set of simplices.  For a geometric view where vertices are points,
+    edges are line segments, and faces are filled triangles, see
+    :func:`plot_geometric`.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    ax : matplotlib Axes, optional
+        Axes to draw on. Created if not provided.
+    figsize : tuple
+        Figure size if creating a new figure.
+    with_labels : bool
+        Show element ID labels on nodes.
+    node_size_by_dim : bool
+        Scale node size by dimension (vertex=large, face=small).
+
+    Returns
+    -------
+    (fig, ax)
+        The matplotlib Figure and Axes.
+    """
+    nx = _require_nx()
+    _, plt = _require_mpl()
+
+    G = to_networkx(kc)
+    fig, ax = _prepare_ax(ax, figsize)
+    pos = _layout(G)
+    colors = type_color_map(kc)
+
+    if len(G) == 0:
+        ax.set_title("Empty complex")
+        ax.axis("off")
+        return fig, ax
+
+    node_colors = [colors.get(G.nodes[n].get("type", ""), "#999999") for n in G]
+    if node_size_by_dim:
+        node_sizes = [_SIZE_BY_DIM.get(G.nodes[n].get("dim", 0), 200) for n in G]
+    else:
+        node_sizes = 300
+
+    nx.draw_networkx_edges(
+        G, pos, ax=ax, edge_color="#cccccc", width=1.5,
+        arrows=True, arrowstyle="-|>", arrowsize=12,
+        connectionstyle="arc3,rad=0.05",
+    )
+    nx.draw_networkx_nodes(
+        G, pos, ax=ax,
+        node_color=node_colors,
+        node_size=node_sizes,
+        edgecolors="#333333",
+        linewidths=0.5,
+    )
+    if with_labels:
+        nx.draw_networkx_labels(G, pos, ax=ax, font_size=8)
+
+    # Legend
+    from matplotlib.lines import Line2D
+    legend_handles = []
+    for type_name in sorted(colors):
+        kind = kc._schema._types.get(type_name, {}).get("kind", "?")
+        legend_handles.append(
+            Line2D([0], [0], marker="o", color="w",
+                   markerfacecolor=colors[type_name], markersize=10,
+                   label=f"{type_name} ({kind})")
+        )
+    if legend_handles:
+        ax.legend(handles=legend_handles, loc="best", fontsize=8)
+
+    ax.set_title(f"Hasse Diagram: {kc._schema._namespace}")
+    ax.axis("off")
+    return fig, ax
+
+
+
+ +
+ +
+ + +

+ plot_hasse_star(kc, id, *, ax=None, figsize=(10, 8), with_labels=True) + +

+ + +
+ +

Plot the Hasse diagram with the star of an element highlighted.

+

Elements in St(id) are drawn in full color with directed arrows; +all other elements are dimmed to light gray. This is the Hasse-diagram +view — see :func:plot_hasse for details on what that means.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ id + + str + +
+

Element whose star to highlight.

+
+
+ required +
+ ax + + matplotlib Axes + +
+ +
+
+ None +
+ figsize + + tuple + +
+ +
+
+ (10, 8) +
+ with_labels + + bool + +
+ +
+
+ True +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ (fig, ax) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/viz.py +
367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
def plot_hasse_star(
+    kc: "KnowledgeComplex",
+    id: str,
+    *,
+    ax: Any = None,
+    figsize: tuple[float, float] = (10, 8),
+    with_labels: bool = True,
+) -> tuple[Any, Any]:
+    """Plot the Hasse diagram with the star of an element highlighted.
+
+    Elements in ``St(id)`` are drawn in full color with directed arrows;
+    all other elements are dimmed to light gray.  This is the Hasse-diagram
+    view — see :func:`plot_hasse` for details on what that means.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    id : str
+        Element whose star to highlight.
+    ax : matplotlib Axes, optional
+    figsize : tuple
+    with_labels : bool
+
+    Returns
+    -------
+    (fig, ax)
+    """
+    nx = _require_nx()
+    _, plt = _require_mpl()
+
+    G = to_networkx(kc)
+    fig, ax = _prepare_ax(ax, figsize)
+    pos = _layout(G)
+    colors = type_color_map(kc)
+    star_ids = kc.star(id)
+
+    highlighted = [n for n in G if n in star_ids]
+    dimmed = [n for n in G if n not in star_ids]
+
+    if dimmed:
+        nx.draw_networkx_nodes(
+            G, pos, nodelist=dimmed, ax=ax,
+            node_color="#dddddd", node_size=150,
+            edgecolors="#cccccc", linewidths=0.5,
+        )
+
+    star_edges = [(u, v) for u, v in G.edges() if u in star_ids and v in star_ids]
+    dim_edges = [(u, v) for u, v in G.edges() if (u, v) not in set(star_edges)]
+    if dim_edges:
+        nx.draw_networkx_edges(
+            G, pos, edgelist=dim_edges, ax=ax, edge_color="#eeeeee", width=1.0,
+            arrows=True, arrowstyle="-|>", arrowsize=8,
+        )
+    if star_edges:
+        nx.draw_networkx_edges(
+            G, pos, edgelist=star_edges, ax=ax, edge_color="#666666", width=2.0,
+            arrows=True, arrowstyle="-|>", arrowsize=14,
+        )
+
+    if highlighted:
+        h_colors = [colors.get(G.nodes[n].get("type", ""), "#999999") for n in highlighted]
+        h_sizes = [_SIZE_BY_DIM.get(G.nodes[n].get("dim", 0), 200) for n in highlighted]
+        nx.draw_networkx_nodes(
+            G, pos, nodelist=highlighted, ax=ax,
+            node_color=h_colors, node_size=h_sizes,
+            edgecolors="#333333", linewidths=1.0,
+        )
+
+    if with_labels:
+        nx.draw_networkx_labels(G, pos, ax=ax, font_size=8)
+
+    ax.set_title(f"Hasse Star({id})")
+    ax.axis("off")
+    return fig, ax
+
+
+
+ +
+ +
+ + +

+ plot_hasse_skeleton(kc, k, *, ax=None, figsize=(10, 8), with_labels=True) + +

+ + +
+ +

Plot the Hasse diagram of the k-skeleton only.

+

Shows only elements of dimension ≤ k, with directed boundary arrows. +This is the Hasse-diagram view — see :func:plot_hasse for details.

+

k=0: vertices only, k=1: vertices + edges, k=2: everything.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ k + + int + +
+

Maximum dimension (0, 1, or 2).

+
+
+ required +
+ ax + + matplotlib Axes + +
+ +
+
+ None +
+ figsize + + tuple + +
+ +
+
+ (10, 8) +
+ with_labels + + bool + +
+ +
+
+ True +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ (fig, ax) + +
+ +
+
+ + +
+ Source code in knowledgecomplex/viz.py +
443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
def plot_hasse_skeleton(
+    kc: "KnowledgeComplex",
+    k: int,
+    *,
+    ax: Any = None,
+    figsize: tuple[float, float] = (10, 8),
+    with_labels: bool = True,
+) -> tuple[Any, Any]:
+    """Plot the Hasse diagram of the k-skeleton only.
+
+    Shows only elements of dimension ≤ k, with directed boundary arrows.
+    This is the Hasse-diagram view — see :func:`plot_hasse` for details.
+
+    k=0: vertices only, k=1: vertices + edges, k=2: everything.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    k : int
+        Maximum dimension (0, 1, or 2).
+    ax : matplotlib Axes, optional
+    figsize : tuple
+    with_labels : bool
+
+    Returns
+    -------
+    (fig, ax)
+    """
+    nx = _require_nx()
+    _, plt = _require_mpl()
+
+    G = to_networkx(kc)
+    skel_ids = kc.skeleton(k)
+    subG = G.subgraph(skel_ids).copy()
+
+    fig, ax = _prepare_ax(ax, figsize)
+    pos = _layout(subG)
+    colors = type_color_map(kc)
+
+    if len(subG) == 0:
+        ax.set_title(f"Hasse Skeleton({k}) — empty")
+        ax.axis("off")
+        return fig, ax
+
+    node_colors = [colors.get(subG.nodes[n].get("type", ""), "#999999") for n in subG]
+    node_sizes = [_SIZE_BY_DIM.get(subG.nodes[n].get("dim", 0), 200) for n in subG]
+
+    nx.draw_networkx_edges(
+        subG, pos, ax=ax, edge_color="#cccccc", width=1.5,
+        arrows=True, arrowstyle="-|>", arrowsize=12,
+    )
+    nx.draw_networkx_nodes(
+        subG, pos, ax=ax,
+        node_color=node_colors,
+        node_size=node_sizes,
+        edgecolors="#333333",
+        linewidths=0.5,
+    )
+    if with_labels:
+        nx.draw_networkx_labels(subG, pos, ax=ax, font_size=8)
+
+    ax.set_title(f"Hasse Skeleton({k})")
+    ax.axis("off")
+    return fig, ax
+
+
+
+ +
+ +
+ + +

+ plot_complex(kc, **kwargs) + +

+ + +
+ +

Deprecated: use :func:plot_hasse instead.

+ + +
+ Source code in knowledgecomplex/viz.py +
512
+513
+514
+515
def plot_complex(kc, **kwargs):
+    """Deprecated: use :func:`plot_hasse` instead."""
+    warnings.warn("plot_complex is deprecated, use plot_hasse", DeprecationWarning, stacklevel=2)
+    return plot_hasse(kc, **kwargs)
+
+
+
+ +
+ +
+ + +

+ plot_star(kc, id, **kwargs) + +

+ + +
+ +

Deprecated: use :func:plot_hasse_star instead.

+ + +
+ Source code in knowledgecomplex/viz.py +
518
+519
+520
+521
def plot_star(kc, id, **kwargs):
+    """Deprecated: use :func:`plot_hasse_star` instead."""
+    warnings.warn("plot_star is deprecated, use plot_hasse_star", DeprecationWarning, stacklevel=2)
+    return plot_hasse_star(kc, id, **kwargs)
+
+
+
+ +
+ +
+ + +

+ plot_skeleton(kc, k, **kwargs) + +

+ + +
+ +

Deprecated: use :func:plot_hasse_skeleton instead.

+ + +
+ Source code in knowledgecomplex/viz.py +
524
+525
+526
+527
def plot_skeleton(kc, k, **kwargs):
+    """Deprecated: use :func:`plot_hasse_skeleton` instead."""
+    warnings.warn("plot_skeleton is deprecated, use plot_hasse_skeleton", DeprecationWarning, stacklevel=2)
+    return plot_hasse_skeleton(kc, k, **kwargs)
+
+
+
+ +
+ +
+ + +

+ plot_geometric(kc, *, ax=None, figsize=(10, 8), with_labels=True) + +

+ + +
+ +

Plot the geometric realization of the complex in 3D.

+

KC vertices become points in 3D space (positioned by force-directed +layout). KC edges become line segments connecting their two boundary +vertices. KC faces become filled, semi-transparent triangular patches +spanning their three boundary vertices.

+

This is the classical geometric realization — the view a topologist +would draw. For the Hasse diagram where every element is a node and +boundary relations are directed edges, see :func:plot_hasse.

+ + +

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ ax + + matplotlib Axes3D + +
+

A 3D axes to draw on. Created if not provided.

+
+
+ None +
+ figsize + + tuple + +
+

Figure size if creating a new figure.

+
+
+ (10, 8) +
+ with_labels + + bool + +
+

Show vertex ID labels.

+
+
+ True +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ (fig, ax) + +
+

The matplotlib Figure and Axes3D.

+
+
+ + +
+ Source code in knowledgecomplex/viz.py +
577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
def plot_geometric(
+    kc: "KnowledgeComplex",
+    *,
+    ax: Any = None,
+    figsize: tuple[float, float] = (10, 8),
+    with_labels: bool = True,
+) -> tuple[Any, Any]:
+    """Plot the geometric realization of the complex in 3D.
+
+    KC vertices become points in 3D space (positioned by force-directed
+    layout).  KC edges become line segments connecting their two boundary
+    vertices.  KC faces become filled, semi-transparent triangular patches
+    spanning their three boundary vertices.
+
+    This is the classical geometric realization — the view a topologist
+    would draw.  For the Hasse diagram where every element is a node and
+    boundary relations are directed edges, see :func:`plot_hasse`.
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+    ax : matplotlib Axes3D, optional
+        A 3D axes to draw on.  Created if not provided.
+    figsize : tuple
+        Figure size if creating a new figure.
+    with_labels : bool
+        Show vertex ID labels.
+
+    Returns
+    -------
+    (fig, ax)
+        The matplotlib Figure and Axes3D.
+    """
+    _, plt = _require_mpl()
+    from mpl_toolkits.mplot3d.art3d import Poly3DCollection
+
+    colors = type_color_map(kc)
+    pos = _vertex_positions_3d(kc)
+
+    if ax is None:
+        fig = plt.figure(figsize=figsize)
+        ax = fig.add_subplot(111, projection="3d")
+    else:
+        fig = ax.get_figure()
+
+    if not pos:
+        ax.set_title("Empty complex")
+        return fig, ax
+
+    # Draw faces as filled triangles
+    face_ids = kc.skeleton(2) - kc.skeleton(1)
+    for fid in face_ids:
+        verts = _face_vertices(kc, fid)
+        if len(verts) == 3 and all(v in pos for v in verts):
+            tri = [pos[v] for v in verts]
+            face_type = kc.element(fid).type
+            color = colors.get(face_type, "#999999")
+            poly = Poly3DCollection([tri], alpha=0.25, facecolor=color,
+                                    edgecolor=color, linewidths=0.5)
+            ax.add_collection3d(poly)
+
+    # Draw edges as line segments
+    edge_ids = kc.skeleton(1) - kc.skeleton(0)
+    for eid in edge_ids:
+        boundary = list(kc.boundary(eid))
+        if len(boundary) == 2 and all(v in pos for v in boundary):
+            p0, p1 = pos[boundary[0]], pos[boundary[1]]
+            edge_type = kc.element(eid).type
+            color = colors.get(edge_type, "#999999")
+            ax.plot3D(
+                [p0[0], p1[0]], [p0[1], p1[1]], [p0[2], p1[2]],
+                color=color, linewidth=2,
+            )
+
+    # Draw vertices as scatter points
+    vertex_ids = list(kc.skeleton(0))
+    for vid in vertex_ids:
+        if vid in pos:
+            x, y, z = pos[vid]
+            vtype = kc.element(vid).type
+            color = colors.get(vtype, "#999999")
+            ax.scatter3D(x, y, z, color=color, s=80, edgecolors="#333333",
+                         linewidths=0.5, zorder=5, depthshade=False)
+
+    # Labels
+    if with_labels:
+        for vid in vertex_ids:
+            if vid in pos:
+                x, y, z = pos[vid]
+                ax.text(x, y, z, f"  {vid}", fontsize=7)
+
+    # Legend
+    from matplotlib.lines import Line2D
+    legend_handles = []
+    for type_name in sorted(colors):
+        kind = kc._schema._types.get(type_name, {}).get("kind", "?")
+        legend_handles.append(
+            Line2D([0], [0], marker="o", color="w",
+                   markerfacecolor=colors[type_name], markersize=8,
+                   label=f"{type_name} ({kind})")
+        )
+    if legend_handles:
+        ax.legend(handles=legend_handles, loc="best", fontsize=7)
+
+    ax.set_title(f"Geometric Realization: {kc._schema._namespace}")
+    return fig, ax
+
+
+
+ +
+ +
+ + +

+ plot_geometric_interactive(kc) + +

+ + +
+ +

Plot an interactive 3D geometric realization of the complex.

+

Same geometry as :func:plot_geometric — KC vertices are points, KC edges +are line segments, KC faces are filled triangles — but rendered with +Plotly for interactive rotation, zoom, and hover inspection.

+

Requires plotly::

+
pip install knowledgecomplex[viz-interactive]
+
+ + +

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
+ kc + + KnowledgeComplex + +
+ +
+
+ required +
+ + +

Returns:

+ + + + + + + + + + + + + +
TypeDescription
+ Figure + +
+

Call .show() to display or .write_html("file.html") to save.

+
+
+ + +
+ Source code in knowledgecomplex/viz.py +
688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
def plot_geometric_interactive(
+    kc: "KnowledgeComplex",
+) -> Any:
+    """Plot an interactive 3D geometric realization of the complex.
+
+    Same geometry as :func:`plot_geometric` — KC vertices are points, KC edges
+    are line segments, KC faces are filled triangles — but rendered with
+    Plotly for interactive rotation, zoom, and hover inspection.
+
+    Requires plotly::
+
+        pip install knowledgecomplex[viz-interactive]
+
+    Parameters
+    ----------
+    kc : KnowledgeComplex
+
+    Returns
+    -------
+    plotly.graph_objects.Figure
+        Call ``.show()`` to display or ``.write_html("file.html")`` to save.
+    """
+    go = _require_plotly()
+
+    colors = type_color_map(kc)
+    pos = _vertex_positions_3d(kc)
+    fig = go.Figure()
+
+    if not pos:
+        fig.update_layout(title="Empty complex")
+        return fig
+
+    # Faces as Mesh3d triangles
+    face_ids = kc.skeleton(2) - kc.skeleton(1)
+    for fid in face_ids:
+        verts = _face_vertices(kc, fid)
+        if len(verts) == 3 and all(v in pos for v in verts):
+            xs = [pos[v][0] for v in verts]
+            ys = [pos[v][1] for v in verts]
+            zs = [pos[v][2] for v in verts]
+            face_type = kc.element(fid).type
+            color = colors.get(face_type, "#999999")
+            fig.add_trace(go.Mesh3d(
+                x=xs, y=ys, z=zs,
+                i=[0], j=[1], k=[2],
+                color=color, opacity=0.3,
+                hoverinfo="text",
+                hovertext=f"{fid} ({face_type})",
+                showlegend=False,
+            ))
+
+    # Edges as line segments
+    edge_ids = kc.skeleton(1) - kc.skeleton(0)
+    for eid in edge_ids:
+        boundary = list(kc.boundary(eid))
+        if len(boundary) == 2 and all(v in pos for v in boundary):
+            p0, p1 = pos[boundary[0]], pos[boundary[1]]
+            edge_type = kc.element(eid).type
+            color = colors.get(edge_type, "#999999")
+            fig.add_trace(go.Scatter3d(
+                x=[p0[0], p1[0]], y=[p0[1], p1[1]], z=[p0[2], p1[2]],
+                mode="lines",
+                line=dict(color=color, width=4),
+                hoverinfo="text",
+                hovertext=f"{eid} ({edge_type})",
+                showlegend=False,
+            ))
+
+    # Vertices as markers
+    vertex_ids = [v for v in kc.skeleton(0) if v in pos]
+    xs = [pos[v][0] for v in vertex_ids]
+    ys = [pos[v][1] for v in vertex_ids]
+    zs = [pos[v][2] for v in vertex_ids]
+    vtypes = [kc.element(v).type for v in vertex_ids]
+    vcolors = [colors.get(t, "#999999") for t in vtypes]
+    hover = [f"{vid} ({vt})" for vid, vt in zip(vertex_ids, vtypes)]
+
+    fig.add_trace(go.Scatter3d(
+        x=xs, y=ys, z=zs,
+        mode="markers+text",
+        marker=dict(size=6, color=vcolors, line=dict(width=1, color="#333333")),
+        text=vertex_ids,
+        textposition="top center",
+        textfont=dict(size=8),
+        hoverinfo="text",
+        hovertext=hover,
+        showlegend=False,
+    ))
+
+    fig.update_layout(
+        title=f"Geometric Realization: {kc._schema._namespace}",
+        scene=dict(
+            xaxis=dict(showgrid=False, zeroline=False, showticklabels=False, title=""),
+            yaxis=dict(showgrid=False, zeroline=False, showticklabels=False, title=""),
+            zaxis=dict(showgrid=False, zeroline=False, showticklabels=False, title=""),
+        ),
+        showlegend=False,
+    )
+    return fig
+
+
+
+ +
+ + + +
+ +
+ +
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/site/assets/_mkdocstrings.css b/site/assets/_mkdocstrings.css new file mode 100644 index 0000000..854048c --- /dev/null +++ b/site/assets/_mkdocstrings.css @@ -0,0 +1,237 @@ + +/* Avoid breaking parameter names, etc. in table cells. */ +.doc-contents td code { + word-break: normal !important; +} + +/* No line break before first paragraph of descriptions. */ +.doc-md-description, +.doc-md-description>p:first-child { + display: inline; +} + +/* No text transformation from Material for MkDocs for H5 headings. */ +.md-typeset h5 .doc-object-name { + text-transform: none; +} + +/* Max width for docstring sections tables. */ +.doc .md-typeset__table, +.doc .md-typeset__table table { + display: table !important; + width: 100%; +} + +.doc .md-typeset__table tr { + display: table-row; +} + +/* Defaults in Spacy table style. */ +.doc-param-default, +.doc-type_param-default { + float: right; +} + +/* Parameter headings must be inline, not blocks. */ +.doc-heading-parameter, +.doc-heading-type_parameter { + display: inline; +} + +/* Default font size for parameter headings. */ +.md-typeset .doc-heading-parameter { + font-size: inherit; +} + +/* Prefer space on the right, not the left of parameter permalinks. */ +.doc-heading-parameter .headerlink, +.doc-heading-type_parameter .headerlink { + margin-left: 0 !important; + margin-right: 0.2rem; +} + +/* Backward-compatibility: docstring section titles in bold. */ +.doc-section-title { + font-weight: bold; +} + +/* Backlinks crumb separator. */ +.doc-backlink-crumb { + display: inline-flex; + gap: .2rem; + white-space: nowrap; + align-items: center; + vertical-align: middle; +} +.doc-backlink-crumb:not(:first-child)::before { + background-color: var(--md-default-fg-color--lighter); + content: ""; + display: inline; + height: 1rem; + --md-path-icon: url('data:image/svg+xml;charset=utf-8,'); + -webkit-mask-image: var(--md-path-icon); + mask-image: var(--md-path-icon); + width: 1rem; +} +.doc-backlink-crumb.last { + font-weight: bold; +} + +/* Symbols in Navigation and ToC. */ +:root, :host, +[data-md-color-scheme="default"] { + --doc-symbol-parameter-fg-color: #df50af; + --doc-symbol-type_parameter-fg-color: #df50af; + --doc-symbol-attribute-fg-color: #953800; + --doc-symbol-function-fg-color: #8250df; + --doc-symbol-method-fg-color: #8250df; + --doc-symbol-class-fg-color: #0550ae; + --doc-symbol-type_alias-fg-color: #0550ae; + --doc-symbol-module-fg-color: #5cad0f; + + --doc-symbol-parameter-bg-color: #df50af1a; + --doc-symbol-type_parameter-bg-color: #df50af1a; + --doc-symbol-attribute-bg-color: #9538001a; + --doc-symbol-function-bg-color: #8250df1a; + --doc-symbol-method-bg-color: #8250df1a; + --doc-symbol-class-bg-color: #0550ae1a; + --doc-symbol-type_alias-bg-color: #0550ae1a; + --doc-symbol-module-bg-color: #5cad0f1a; +} + +[data-md-color-scheme="slate"] { + --doc-symbol-parameter-fg-color: #ffa8cc; + --doc-symbol-type_parameter-fg-color: #ffa8cc; + --doc-symbol-attribute-fg-color: #ffa657; + --doc-symbol-function-fg-color: #d2a8ff; + --doc-symbol-method-fg-color: #d2a8ff; + --doc-symbol-class-fg-color: #79c0ff; + --doc-symbol-type_alias-fg-color: #79c0ff; + --doc-symbol-module-fg-color: #baff79; + + --doc-symbol-parameter-bg-color: #ffa8cc1a; + --doc-symbol-type_parameter-bg-color: #ffa8cc1a; + --doc-symbol-attribute-bg-color: #ffa6571a; + --doc-symbol-function-bg-color: #d2a8ff1a; + --doc-symbol-method-bg-color: #d2a8ff1a; + --doc-symbol-class-bg-color: #79c0ff1a; + --doc-symbol-type_alias-bg-color: #79c0ff1a; + --doc-symbol-module-bg-color: #baff791a; +} + +code.doc-symbol { + border-radius: .1rem; + font-size: .85em; + padding: 0 .3em; + font-weight: bold; +} + +code.doc-symbol-parameter, +a code.doc-symbol-parameter { + color: var(--doc-symbol-parameter-fg-color); + background-color: var(--doc-symbol-parameter-bg-color); +} + +code.doc-symbol-parameter::after { + content: "param"; +} + +code.doc-symbol-type_parameter, +a code.doc-symbol-type_parameter { + color: var(--doc-symbol-type_parameter-fg-color); + background-color: var(--doc-symbol-type_parameter-bg-color); +} + +code.doc-symbol-type_parameter::after { + content: "type-param"; +} + +code.doc-symbol-attribute, +a code.doc-symbol-attribute { + color: var(--doc-symbol-attribute-fg-color); + background-color: var(--doc-symbol-attribute-bg-color); +} + +code.doc-symbol-attribute::after { + content: "attr"; +} + +code.doc-symbol-function, +a code.doc-symbol-function { + color: var(--doc-symbol-function-fg-color); + background-color: var(--doc-symbol-function-bg-color); +} + +code.doc-symbol-function::after { + content: "func"; +} + +code.doc-symbol-method, +a code.doc-symbol-method { + color: var(--doc-symbol-method-fg-color); + background-color: var(--doc-symbol-method-bg-color); +} + +code.doc-symbol-method::after { + content: "meth"; +} + +code.doc-symbol-class, +a code.doc-symbol-class { + color: var(--doc-symbol-class-fg-color); + background-color: var(--doc-symbol-class-bg-color); +} + +code.doc-symbol-class::after { + content: "class"; +} + + +code.doc-symbol-type_alias, +a code.doc-symbol-type_alias { + color: var(--doc-symbol-type_alias-fg-color); + background-color: var(--doc-symbol-type_alias-bg-color); +} + +code.doc-symbol-type_alias::after { + content: "type"; +} + +code.doc-symbol-module, +a code.doc-symbol-module { + color: var(--doc-symbol-module-fg-color); + background-color: var(--doc-symbol-module-bg-color); +} + +code.doc-symbol-module::after { + content: "mod"; +} + +.doc-signature .autorefs { + color: inherit; + border-bottom: 1px dotted currentcolor; +} + +/* Source code blocks (admonitions). */ +:root { + --md-admonition-icon--mkdocstrings-source: url('data:image/svg+xml;charset=utf-8,') +} +.md-typeset .admonition.mkdocstrings-source, +.md-typeset details.mkdocstrings-source { + border: none; + padding: 0; +} +.md-typeset .admonition.mkdocstrings-source:focus-within, +.md-typeset details.mkdocstrings-source:focus-within { + box-shadow: none; +} +.md-typeset .mkdocstrings-source > .admonition-title, +.md-typeset .mkdocstrings-source > summary { + background-color: inherit; +} +.md-typeset .mkdocstrings-source > .admonition-title::before, +.md-typeset .mkdocstrings-source > summary::before { + background-color: var(--md-default-fg-color); + -webkit-mask-image: var(--md-admonition-icon--mkdocstrings-source); + mask-image: var(--md-admonition-icon--mkdocstrings-source); +} diff --git a/site/assets/images/favicon.png b/site/assets/images/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..1cf13b9f9d978896599290a74f77d5dbe7d1655c GIT binary patch literal 1870 zcmV-U2eJ5xP)Gc)JR9QMau)O=X#!i9;T z37kk-upj^(fsR36MHs_+1RCI)NNu9}lD0S{B^g8PN?Ww(5|~L#Ng*g{WsqleV}|#l zz8@ri&cTzw_h33bHI+12+kK6WN$h#n5cD8OQt`5kw6p~9H3()bUQ8OS4Q4HTQ=1Ol z_JAocz`fLbT2^{`8n~UAo=#AUOf=SOq4pYkt;XbC&f#7lb$*7=$na!mWCQ`dBQsO0 zLFBSPj*N?#u5&pf2t4XjEGH|=pPQ8xh7tpx;US5Cx_Ju;!O`ya-yF`)b%TEt5>eP1ZX~}sjjA%FJF?h7cX8=b!DZl<6%Cv z*G0uvvU+vmnpLZ2paivG-(cd*y3$hCIcsZcYOGh{$&)A6*XX&kXZd3G8m)G$Zz-LV z^GF3VAW^Mdv!)4OM8EgqRiz~*Cji;uzl2uC9^=8I84vNp;ltJ|q-*uQwGp2ma6cY7 z;`%`!9UXO@fr&Ebapfs34OmS9^u6$)bJxrucutf>`dKPKT%%*d3XlFVKunp9 zasduxjrjs>f8V=D|J=XNZp;_Zy^WgQ$9WDjgY=z@stwiEBm9u5*|34&1Na8BMjjgf3+SHcr`5~>oz1Y?SW^=K z^bTyO6>Gar#P_W2gEMwq)ot3; zREHn~U&Dp0l6YT0&k-wLwYjb?5zGK`W6S2v+K>AM(95m2C20L|3m~rN8dprPr@t)5lsk9Hu*W z?pS990s;Ez=+Rj{x7p``4>+c0G5^pYnB1^!TL=(?HLHZ+HicG{~4F1d^5Awl_2!1jICM-!9eoLhbbT^;yHcefyTAaqRcY zmuctDopPT!%k+}x%lZRKnzykr2}}XfG_ne?nRQO~?%hkzo;@RN{P6o`&mMUWBYMTe z6i8ChtjX&gXl`nvrU>jah)2iNM%JdjqoaeaU%yVn!^70x-flljp6Q5tK}5}&X8&&G zX3fpb3E(!rH=zVI_9Gjl45w@{(ITqngWFe7@9{mX;tO25Z_8 zQHEpI+FkTU#4xu>RkN>b3Tnc3UpWzPXWm#o55GKF09j^Mh~)K7{QqbO_~(@CVq! zS<8954|P8mXN2MRs86xZ&Q4EfM@JB94b=(YGuk)s&^jiSF=t3*oNK3`rD{H`yQ?d; ztE=laAUoZx5?RC8*WKOj`%LXEkgDd>&^Q4M^z`%u0rg-It=hLCVsq!Z%^6eB-OvOT zFZ28TN&cRmgU}Elrnk43)!>Z1FCPL2K$7}gwzIc48NX}#!A1BpJP?#v5wkNprhV** z?Cpalt1oH&{r!o3eSKc&ap)iz2BTn_VV`4>9M^b3;(YY}4>#ML6{~(4mH+?%07*qo IM6N<$f(jP3KmY&$ literal 0 HcmV?d00001 diff --git a/site/assets/javascripts/bundle.79ae519e.min.js b/site/assets/javascripts/bundle.79ae519e.min.js new file mode 100644 index 0000000..3df3e5e --- /dev/null +++ b/site/assets/javascripts/bundle.79ae519e.min.js @@ -0,0 +1,16 @@ +"use strict";(()=>{var Zi=Object.create;var _r=Object.defineProperty;var ea=Object.getOwnPropertyDescriptor;var ta=Object.getOwnPropertyNames,Bt=Object.getOwnPropertySymbols,ra=Object.getPrototypeOf,Ar=Object.prototype.hasOwnProperty,bo=Object.prototype.propertyIsEnumerable;var ho=(e,t,r)=>t in e?_r(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))Ar.call(t,r)&&ho(e,r,t[r]);if(Bt)for(var r of Bt(t))bo.call(t,r)&&ho(e,r,t[r]);return e};var vo=(e,t)=>{var r={};for(var o in e)Ar.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Bt)for(var o of Bt(e))t.indexOf(o)<0&&bo.call(e,o)&&(r[o]=e[o]);return r};var Cr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var oa=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of ta(t))!Ar.call(e,n)&&n!==r&&_r(e,n,{get:()=>t[n],enumerable:!(o=ea(t,n))||o.enumerable});return e};var $t=(e,t,r)=>(r=e!=null?Zi(ra(e)):{},oa(t||!e||!e.__esModule?_r(r,"default",{value:e,enumerable:!0}):r,e));var go=(e,t,r)=>new Promise((o,n)=>{var i=c=>{try{a(r.next(c))}catch(p){n(p)}},s=c=>{try{a(r.throw(c))}catch(p){n(p)}},a=c=>c.done?o(c.value):Promise.resolve(c.value).then(i,s);a((r=r.apply(e,t)).next())});var xo=Cr((kr,yo)=>{(function(e,t){typeof kr=="object"&&typeof yo!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(kr,(function(){"use strict";function e(r){var o=!0,n=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(k){return!!(k&&k!==document&&k.nodeName!=="HTML"&&k.nodeName!=="BODY"&&"classList"in k&&"contains"in k.classList)}function c(k){var ut=k.type,je=k.tagName;return!!(je==="INPUT"&&s[ut]&&!k.readOnly||je==="TEXTAREA"&&!k.readOnly||k.isContentEditable)}function p(k){k.classList.contains("focus-visible")||(k.classList.add("focus-visible"),k.setAttribute("data-focus-visible-added",""))}function l(k){k.hasAttribute("data-focus-visible-added")&&(k.classList.remove("focus-visible"),k.removeAttribute("data-focus-visible-added"))}function f(k){k.metaKey||k.altKey||k.ctrlKey||(a(r.activeElement)&&p(r.activeElement),o=!0)}function u(k){o=!1}function d(k){a(k.target)&&(o||c(k.target))&&p(k.target)}function v(k){a(k.target)&&(k.target.classList.contains("focus-visible")||k.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(k.target))}function S(k){document.visibilityState==="hidden"&&(n&&(o=!0),X())}function X(){document.addEventListener("mousemove",ee),document.addEventListener("mousedown",ee),document.addEventListener("mouseup",ee),document.addEventListener("pointermove",ee),document.addEventListener("pointerdown",ee),document.addEventListener("pointerup",ee),document.addEventListener("touchmove",ee),document.addEventListener("touchstart",ee),document.addEventListener("touchend",ee)}function re(){document.removeEventListener("mousemove",ee),document.removeEventListener("mousedown",ee),document.removeEventListener("mouseup",ee),document.removeEventListener("pointermove",ee),document.removeEventListener("pointerdown",ee),document.removeEventListener("pointerup",ee),document.removeEventListener("touchmove",ee),document.removeEventListener("touchstart",ee),document.removeEventListener("touchend",ee)}function ee(k){k.target.nodeName&&k.target.nodeName.toLowerCase()==="html"||(o=!1,re())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",S,!0),X(),r.addEventListener("focus",d,!0),r.addEventListener("blur",v,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)}))});var ro=Cr((jy,Rn)=>{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var qa=/["'&<>]/;Rn.exports=Ka;function Ka(e){var t=""+e,r=qa.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Nt=="object"&&typeof io=="object"?io.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Nt=="object"?Nt.ClipboardJS=r():t.ClipboardJS=r()})(Nt,function(){return(function(){var e={686:(function(o,n,i){"use strict";i.d(n,{default:function(){return Xi}});var s=i(279),a=i.n(s),c=i(370),p=i.n(c),l=i(817),f=i.n(l);function u(q){try{return document.execCommand(q)}catch(C){return!1}}var d=function(C){var _=f()(C);return u("cut"),_},v=d;function S(q){var C=document.documentElement.getAttribute("dir")==="rtl",_=document.createElement("textarea");_.style.fontSize="12pt",_.style.border="0",_.style.padding="0",_.style.margin="0",_.style.position="absolute",_.style[C?"right":"left"]="-9999px";var D=window.pageYOffset||document.documentElement.scrollTop;return _.style.top="".concat(D,"px"),_.setAttribute("readonly",""),_.value=q,_}var X=function(C,_){var D=S(C);_.container.appendChild(D);var N=f()(D);return u("copy"),D.remove(),N},re=function(C){var _=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},D="";return typeof C=="string"?D=X(C,_):C instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(C==null?void 0:C.type)?D=X(C.value,_):(D=f()(C),u("copy")),D},ee=re;function k(q){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?k=function(_){return typeof _}:k=function(_){return _&&typeof Symbol=="function"&&_.constructor===Symbol&&_!==Symbol.prototype?"symbol":typeof _},k(q)}var ut=function(){var C=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},_=C.action,D=_===void 0?"copy":_,N=C.container,G=C.target,We=C.text;if(D!=="copy"&&D!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(G!==void 0)if(G&&k(G)==="object"&&G.nodeType===1){if(D==="copy"&&G.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(D==="cut"&&(G.hasAttribute("readonly")||G.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(We)return ee(We,{container:N});if(G)return D==="cut"?v(G):ee(G,{container:N})},je=ut;function R(q){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?R=function(_){return typeof _}:R=function(_){return _&&typeof Symbol=="function"&&_.constructor===Symbol&&_!==Symbol.prototype?"symbol":typeof _},R(q)}function se(q,C){if(!(q instanceof C))throw new TypeError("Cannot call a class as a function")}function ce(q,C){for(var _=0;_0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof N.action=="function"?N.action:this.defaultAction,this.target=typeof N.target=="function"?N.target:this.defaultTarget,this.text=typeof N.text=="function"?N.text:this.defaultText,this.container=R(N.container)==="object"?N.container:document.body}},{key:"listenClick",value:function(N){var G=this;this.listener=p()(N,"click",function(We){return G.onClick(We)})}},{key:"onClick",value:function(N){var G=N.delegateTarget||N.currentTarget,We=this.action(G)||"copy",Yt=je({action:We,container:this.container,target:this.target(G),text:this.text(G)});this.emit(Yt?"success":"error",{action:We,text:Yt,trigger:G,clearSelection:function(){G&&G.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(N){return Mr("action",N)}},{key:"defaultTarget",value:function(N){var G=Mr("target",N);if(G)return document.querySelector(G)}},{key:"defaultText",value:function(N){return Mr("text",N)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(N){var G=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return ee(N,G)}},{key:"cut",value:function(N){return v(N)}},{key:"isSupported",value:function(){var N=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],G=typeof N=="string"?[N]:N,We=!!document.queryCommandSupported;return G.forEach(function(Yt){We=We&&!!document.queryCommandSupported(Yt)}),We}}]),_})(a()),Xi=Ji}),828:(function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==n;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}o.exports=s}),438:(function(o,n,i){var s=i(828);function a(l,f,u,d,v){var S=p.apply(this,arguments);return l.addEventListener(u,S,v),{destroy:function(){l.removeEventListener(u,S,v)}}}function c(l,f,u,d,v){return typeof l.addEventListener=="function"?a.apply(null,arguments):typeof u=="function"?a.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(S){return a(S,f,u,d,v)}))}function p(l,f,u,d){return function(v){v.delegateTarget=s(v.target,f),v.delegateTarget&&d.call(l,v)}}o.exports=c}),879:(function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}}),370:(function(o,n,i){var s=i(879),a=i(438);function c(u,d,v){if(!u&&!d&&!v)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(v))throw new TypeError("Third argument must be a Function");if(s.node(u))return p(u,d,v);if(s.nodeList(u))return l(u,d,v);if(s.string(u))return f(u,d,v);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function p(u,d,v){return u.addEventListener(d,v),{destroy:function(){u.removeEventListener(d,v)}}}function l(u,d,v){return Array.prototype.forEach.call(u,function(S){S.addEventListener(d,v)}),{destroy:function(){Array.prototype.forEach.call(u,function(S){S.removeEventListener(d,v)})}}}function f(u,d,v){return a(document.body,u,d,v)}o.exports=c}),817:(function(o){function n(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),p=document.createRange();p.selectNodeContents(i),c.removeAllRanges(),c.addRange(p),s=c.toString()}return s}o.exports=n}),279:(function(o){function n(){}n.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function p(){c.off(i,p),s.apply(a,arguments)}return p._=s,this.on(i,p,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,p=a.length;for(c;c0&&i[i.length-1])&&(p[0]===6||p[0]===2)){r=0;continue}if(p[0]===3&&(!i||p[1]>i[0]&&p[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function K(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],s;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(a){s={error:a}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(s)throw s.error}}return i}function B(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||c(d,S)})},v&&(n[d]=v(n[d])))}function c(d,v){try{p(o[d](v))}catch(S){u(i[0][3],S)}}function p(d){d.value instanceof dt?Promise.resolve(d.value.v).then(l,f):u(i[0][2],d)}function l(d){c("next",d)}function f(d){c("throw",d)}function u(d,v){d(v),i.shift(),i.length&&c(i[0][0],i[0][1])}}function To(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof Oe=="function"?Oe(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),n(a,c,s.done,s.value)})}}function n(i,s,a,c){Promise.resolve(c).then(function(p){i({value:p,done:a})},s)}}function I(e){return typeof e=="function"}function yt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var Jt=yt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function Ze(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var qe=(function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=Oe(s),c=a.next();!c.done;c=a.next()){var p=c.value;p.remove(this)}}catch(S){t={error:S}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var l=this.initialTeardown;if(I(l))try{l()}catch(S){i=S instanceof Jt?S.errors:[S]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=Oe(f),d=u.next();!d.done;d=u.next()){var v=d.value;try{So(v)}catch(S){i=i!=null?i:[],S instanceof Jt?i=B(B([],K(i)),K(S.errors)):i.push(S)}}}catch(S){o={error:S}}finally{try{d&&!d.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new Jt(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)So(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&Ze(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&Ze(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=(function(){var t=new e;return t.closed=!0,t})(),e})();var $r=qe.EMPTY;function Xt(e){return e instanceof qe||e&&"closed"in e&&I(e.remove)&&I(e.add)&&I(e.unsubscribe)}function So(e){I(e)?e():e.unsubscribe()}var De={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var xt={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,s=n.isStopped,a=n.observers;return i||s?$r:(this.currentObservers=null,a.push(r),new qe(function(){o.currentObservers=null,Ze(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,s=o.isStopped;n?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,o){return new Ho(r,o)},t})(F);var Ho=(function(e){ie(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:$r},t})(T);var jr=(function(e){ie(t,e);function t(r){var o=e.call(this)||this;return o._value=r,o}return Object.defineProperty(t.prototype,"value",{get:function(){return this.getValue()},enumerable:!1,configurable:!0}),t.prototype._subscribe=function(r){var o=e.prototype._subscribe.call(this,r);return!o.closed&&r.next(this._value),o},t.prototype.getValue=function(){var r=this,o=r.hasError,n=r.thrownError,i=r._value;if(o)throw n;return this._throwIfClosed(),i},t.prototype.next=function(r){e.prototype.next.call(this,this._value=r)},t})(T);var Rt={now:function(){return(Rt.delegate||Date).now()},delegate:void 0};var It=(function(e){ie(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=Rt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,s=o._infiniteTimeWindow,a=o._timestampProvider,c=o._windowTime;n||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,s=n._buffer,a=s.slice(),c=0;c0?e.prototype.schedule.call(this,r,o):(this.delay=o,this.state=r,this.scheduler.flush(this),this)},t.prototype.execute=function(r,o){return o>0||this.closed?e.prototype.execute.call(this,r,o):this._execute(r,o)},t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!=null&&n>0||n==null&&this.delay>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.flush(this),0)},t})(St);var Ro=(function(e){ie(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t})(Ot);var Dr=new Ro(Po);var Io=(function(e){ie(t,e);function t(r,o){var n=e.call(this,r,o)||this;return n.scheduler=r,n.work=o,n}return t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!==null&&n>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=Tt.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var s=r.actions;o!=null&&o===r._scheduled&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==o&&(Tt.cancelAnimationFrame(o),r._scheduled=void 0)},t})(St);var Fo=(function(e){ie(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o;r?o=r.id:(o=this._scheduled,this._scheduled=void 0);var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t})(Ot);var ye=new Fo(Io);var y=new F(function(e){return e.complete()});function tr(e){return e&&I(e.schedule)}function Vr(e){return e[e.length-1]}function pt(e){return I(Vr(e))?e.pop():void 0}function Fe(e){return tr(Vr(e))?e.pop():void 0}function rr(e,t){return typeof Vr(e)=="number"?e.pop():t}var Lt=(function(e){return e&&typeof e.length=="number"&&typeof e!="function"});function or(e){return I(e==null?void 0:e.then)}function nr(e){return I(e[wt])}function ir(e){return Symbol.asyncIterator&&I(e==null?void 0:e[Symbol.asyncIterator])}function ar(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function fa(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var sr=fa();function cr(e){return I(e==null?void 0:e[sr])}function pr(e){return wo(this,arguments,function(){var r,o,n,i;return Gt(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,dt(r.read())];case 3:return o=s.sent(),n=o.value,i=o.done,i?[4,dt(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,dt(n)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function lr(e){return I(e==null?void 0:e.getReader)}function U(e){if(e instanceof F)return e;if(e!=null){if(nr(e))return ua(e);if(Lt(e))return da(e);if(or(e))return ha(e);if(ir(e))return jo(e);if(cr(e))return ba(e);if(lr(e))return va(e)}throw ar(e)}function ua(e){return new F(function(t){var r=e[wt]();if(I(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function da(e){return new F(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?g(function(n,i){return e(n,i,o)}):be,Ee(1),r?Qe(t):tn(function(){return new fr}))}}function Yr(e){return e<=0?function(){return y}:E(function(t,r){var o=[];t.subscribe(w(r,function(n){o.push(n),e=2,!0))}function le(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new T}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(p){var l,f,u,d=0,v=!1,S=!1,X=function(){f==null||f.unsubscribe(),f=void 0},re=function(){X(),l=u=void 0,v=S=!1},ee=function(){var k=l;re(),k==null||k.unsubscribe()};return E(function(k,ut){d++,!S&&!v&&X();var je=u=u!=null?u:r();ut.add(function(){d--,d===0&&!S&&!v&&(f=Br(ee,c))}),je.subscribe(ut),!l&&d>0&&(l=new bt({next:function(R){return je.next(R)},error:function(R){S=!0,X(),f=Br(re,n,R),je.error(R)},complete:function(){v=!0,X(),f=Br(re,s),je.complete()}}),U(k).subscribe(l))})(p)}}function Br(e,t){for(var r=[],o=2;oe.next(document)),e}function M(e,t=document){return Array.from(t.querySelectorAll(e))}function j(e,t=document){let r=ue(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ue(e,t=document){return t.querySelector(e)||void 0}function Ne(){var e,t,r,o;return(o=(r=(t=(e=document.activeElement)==null?void 0:e.shadowRoot)==null?void 0:t.activeElement)!=null?r:document.activeElement)!=null?o:void 0}var Ra=L(h(document.body,"focusin"),h(document.body,"focusout")).pipe(Ae(1),Q(void 0),m(()=>Ne()||document.body),Z(1));function Ye(e){return Ra.pipe(m(t=>e.contains(t)),Y())}function it(e,t){return H(()=>L(h(e,"mouseenter").pipe(m(()=>!0)),h(e,"mouseleave").pipe(m(()=>!1))).pipe(t?jt(r=>He(+!r*t)):be,Q(e.matches(":hover"))))}function sn(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)sn(e,r)}function x(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)sn(o,n);return o}function br(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function _t(e){let t=x("script",{src:e});return H(()=>(document.head.appendChild(t),L(h(t,"load"),h(t,"error").pipe(b(()=>Nr(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),A(()=>document.head.removeChild(t)),Ee(1))))}var cn=new T,Ia=H(()=>typeof ResizeObserver=="undefined"?_t("https://unpkg.com/resize-observer-polyfill"):$(void 0)).pipe(m(()=>new ResizeObserver(e=>e.forEach(t=>cn.next(t)))),b(e=>L(tt,$(e)).pipe(A(()=>e.disconnect()))),Z(1));function de(e){return{width:e.offsetWidth,height:e.offsetHeight}}function Le(e){let t=e;for(;t.clientWidth===0&&t.parentElement;)t=t.parentElement;return Ia.pipe(O(r=>r.observe(t)),b(r=>cn.pipe(g(o=>o.target===t),A(()=>r.unobserve(t)))),m(()=>de(e)),Q(de(e)))}function At(e){return{width:e.scrollWidth,height:e.scrollHeight}}function vr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}function pn(e){let t=[],r=e.parentElement;for(;r;)(e.clientWidth>r.clientWidth||e.clientHeight>r.clientHeight)&&t.push(r),r=(e=r).parentElement;return t.length===0&&t.push(document.documentElement),t}function Be(e){return{x:e.offsetLeft,y:e.offsetTop}}function ln(e){let t=e.getBoundingClientRect();return{x:t.x+window.scrollX,y:t.y+window.scrollY}}function mn(e){return L(h(window,"load"),h(window,"resize")).pipe($e(0,ye),m(()=>Be(e)),Q(Be(e)))}function gr(e){return{x:e.scrollLeft,y:e.scrollTop}}function Ge(e){return L(h(e,"scroll"),h(window,"scroll"),h(window,"resize")).pipe($e(0,ye),m(()=>gr(e)),Q(gr(e)))}var fn=new T,Fa=H(()=>$(new IntersectionObserver(e=>{for(let t of e)fn.next(t)},{threshold:0}))).pipe(b(e=>L(tt,$(e)).pipe(A(()=>e.disconnect()))),Z(1));function mt(e){return Fa.pipe(O(t=>t.observe(e)),b(t=>fn.pipe(g(({target:r})=>r===e),A(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function un(e,t=16){return Ge(e).pipe(m(({y:r})=>{let o=de(e),n=At(e);return r>=n.height-o.height-t}),Y())}var yr={drawer:j("[data-md-toggle=drawer]"),search:j("[data-md-toggle=search]")};function dn(e){return yr[e].checked}function at(e,t){yr[e].checked!==t&&yr[e].click()}function Je(e){let t=yr[e];return h(t,"change").pipe(m(()=>t.checked),Q(t.checked))}function ja(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ua(){return L(h(window,"compositionstart").pipe(m(()=>!0)),h(window,"compositionend").pipe(m(()=>!1))).pipe(Q(!1))}function hn(){let e=h(window,"keydown").pipe(g(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:dn("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),g(({mode:t,type:r})=>{if(t==="global"){let o=Ne();if(typeof o!="undefined")return!ja(o,r)}return!0}),le());return Ua().pipe(b(t=>t?y:e))}function we(){return new URL(location.href)}function st(e,t=!1){if(V("navigation.instant")&&!t){let r=x("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function bn(){return new T}function vn(){return location.hash.slice(1)}function gn(e){let t=x("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Zr(e){return L(h(window,"hashchange"),e).pipe(m(vn),Q(vn()),g(t=>t.length>0),Z(1))}function yn(e){return Zr(e).pipe(m(t=>ue(`[id="${t}"]`)),g(t=>typeof t!="undefined"))}function Wt(e){let t=matchMedia(e);return ur(r=>t.addListener(()=>r(t.matches))).pipe(Q(t.matches))}function xn(){let e=matchMedia("print");return L(h(window,"beforeprint").pipe(m(()=>!0)),h(window,"afterprint").pipe(m(()=>!1))).pipe(Q(e.matches))}function eo(e,t){return e.pipe(b(r=>r?t():y))}function to(e,t){return new F(r=>{let o=new XMLHttpRequest;return o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network error"))}),o.addEventListener("abort",()=>{r.complete()}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{var i;if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let s=(i=o.getResponseHeader("Content-Length"))!=null?i:0;t.progress$.next(n.loaded/+s*100)}}),t.progress$.next(5)),o.send(),()=>o.abort()})}function ze(e,t){return to(e,t).pipe(b(r=>r.text()),m(r=>JSON.parse(r)),Z(1))}function xr(e,t){let r=new DOMParser;return to(e,t).pipe(b(o=>o.text()),m(o=>r.parseFromString(o,"text/html")),Z(1))}function En(e,t){let r=new DOMParser;return to(e,t).pipe(b(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),Z(1))}function wn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function Tn(){return L(h(window,"scroll",{passive:!0}),h(window,"resize",{passive:!0})).pipe(m(wn),Q(wn()))}function Sn(){return{width:innerWidth,height:innerHeight}}function On(){return h(window,"resize",{passive:!0}).pipe(m(Sn),Q(Sn()))}function Ln(){return z([Tn(),On()]).pipe(m(([e,t])=>({offset:e,size:t})),Z(1))}function Er(e,{viewport$:t,header$:r}){let o=t.pipe(ne("size")),n=z([o,r]).pipe(m(()=>Be(e)));return z([r,t,n]).pipe(m(([{height:i},{offset:s,size:a},{x:c,y:p}])=>({offset:{x:s.x-c,y:s.y-p+i},size:a})))}function Wa(e){return h(e,"message",t=>t.data)}function Da(e){let t=new T;return t.subscribe(r=>e.postMessage(r)),t}function Mn(e,t=new Worker(e)){let r=Wa(t),o=Da(t),n=new T;n.subscribe(o);let i=o.pipe(oe(),ae(!0));return n.pipe(oe(),Ve(r.pipe(W(i))),le())}var Va=j("#__config"),Ct=JSON.parse(Va.textContent);Ct.base=`${new URL(Ct.base,we())}`;function Te(){return Ct}function V(e){return Ct.features.includes(e)}function Me(e,t){return typeof t!="undefined"?Ct.translations[e].replace("#",t.toString()):Ct.translations[e]}function Ce(e,t=document){return j(`[data-md-component=${e}]`,t)}function me(e,t=document){return M(`[data-md-component=${e}]`,t)}function Na(e){let t=j(".md-typeset > :first-child",e);return h(t,"click",{once:!0}).pipe(m(()=>j(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function _n(e){if(!V("announce.dismiss")||!e.childElementCount)return y;if(!e.hidden){let t=j(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return H(()=>{let t=new T;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),Na(e).pipe(O(r=>t.next(r)),A(()=>t.complete()),m(r=>P({ref:e},r)))})}function za(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function An(e,t){let r=new T;return r.subscribe(({hidden:o})=>{e.hidden=o}),za(e,t).pipe(O(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))}function Dt(e,t){return t==="inline"?x("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},x("div",{class:"md-tooltip__inner md-typeset"})):x("div",{class:"md-tooltip",id:e,role:"tooltip"},x("div",{class:"md-tooltip__inner md-typeset"}))}function wr(...e){return x("div",{class:"md-tooltip2",role:"dialog"},x("div",{class:"md-tooltip2__inner md-typeset"},e))}function Cn(...e){return x("div",{class:"md-tooltip2",role:"tooltip"},x("div",{class:"md-tooltip2__inner md-typeset"},e))}function kn(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return x("aside",{class:"md-annotation",tabIndex:0},Dt(t),x("a",{href:r,class:"md-annotation__index",tabIndex:-1},x("span",{"data-md-annotation-id":e})))}else return x("aside",{class:"md-annotation",tabIndex:0},Dt(t),x("span",{class:"md-annotation__index",tabIndex:-1},x("span",{"data-md-annotation-id":e})))}function Hn(e){return x("button",{class:"md-code__button",title:Me("clipboard.copy"),"data-clipboard-target":`#${e} > code`,"data-md-type":"copy"})}function $n(){return x("button",{class:"md-code__button",title:"Toggle line selection","data-md-type":"select"})}function Pn(){return x("nav",{class:"md-code__nav"})}var In=$t(ro());function oo(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(c=>!e.terms[c]).reduce((c,p)=>[...c,x("del",null,(0,In.default)(p))," "],[]).slice(0,-1),i=Te(),s=new URL(e.location,i.base);V("search.highlight")&&s.searchParams.set("h",Object.entries(e.terms).filter(([,c])=>c).reduce((c,[p])=>`${c} ${p}`.trim(),""));let{tags:a}=Te();return x("a",{href:`${s}`,class:"md-search-result__link",tabIndex:-1},x("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&x("div",{class:"md-search-result__icon md-icon"}),r>0&&x("h1",null,e.title),r<=0&&x("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&x("nav",{class:"md-tags"},e.tags.map(c=>{let p=a?c in a?`md-tag-icon md-tag--${a[c]}`:"md-tag-icon":"";return x("span",{class:`md-tag ${p}`},c)})),o>0&&n.length>0&&x("p",{class:"md-search-result__terms"},Me("search.result.term.missing"),": ",...n)))}function Fn(e){let t=e[0].score,r=[...e],o=Te(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),s=r.findIndex(l=>l.scoreoo(l,1)),...c.length?[x("details",{class:"md-search-result__more"},x("summary",{tabIndex:-1},x("div",null,c.length>0&&c.length===1?Me("search.result.more.one"):Me("search.result.more.other",c.length))),...c.map(l=>oo(l,1)))]:[]];return x("li",{class:"md-search-result__item"},p)}function jn(e){return x("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>x("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?br(r):r)))}function no(e){let t=`tabbed-control tabbed-control--${e}`;return x("div",{class:t,hidden:!0},x("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function Un(e){return x("div",{class:"md-typeset__scrollwrap"},x("div",{class:"md-typeset__table"},e))}function Qa(e){var o;let t=Te(),r=new URL(`../${e.version}/`,t.base);return x("li",{class:"md-version__item"},x("a",{href:`${r}`,class:"md-version__link"},e.title,((o=t.version)==null?void 0:o.alias)&&e.aliases.length>0&&x("span",{class:"md-version__alias"},e.aliases[0])))}function Wn(e,t){var o;let r=Te();return e=e.filter(n=>{var i;return!((i=n.properties)!=null&&i.hidden)}),x("div",{class:"md-version"},x("button",{class:"md-version__current","aria-label":Me("select.version")},t.title,((o=r.version)==null?void 0:o.alias)&&t.aliases.length>0&&x("span",{class:"md-version__alias"},t.aliases[0])),x("ul",{class:"md-version__list"},e.map(Qa)))}var Ya=0;function Ba(e,t=250){let r=z([Ye(e),it(e,t)]).pipe(m(([n,i])=>n||i),Y()),o=H(()=>pn(e)).pipe(J(Ge),gt(1),Pe(r),m(()=>ln(e)));return r.pipe(Re(n=>n),b(()=>z([r,o])),m(([n,i])=>({active:n,offset:i})),le())}function Vt(e,t,r=250){let{content$:o,viewport$:n}=t,i=`__tooltip2_${Ya++}`;return H(()=>{let s=new T,a=new jr(!1);s.pipe(oe(),ae(!1)).subscribe(a);let c=a.pipe(jt(l=>He(+!l*250,Dr)),Y(),b(l=>l?o:y),O(l=>l.id=i),le());z([s.pipe(m(({active:l})=>l)),c.pipe(b(l=>it(l,250)),Q(!1))]).pipe(m(l=>l.some(f=>f))).subscribe(a);let p=a.pipe(g(l=>l),te(c,n),m(([l,f,{size:u}])=>{let d=e.getBoundingClientRect(),v=d.width/2;if(f.role==="tooltip")return{x:v,y:8+d.height};if(d.y>=u.height/2){let{height:S}=de(f);return{x:v,y:-16-S}}else return{x:v,y:16+d.height}}));return z([c,s,p]).subscribe(([l,{offset:f},u])=>{l.style.setProperty("--md-tooltip-host-x",`${f.x}px`),l.style.setProperty("--md-tooltip-host-y",`${f.y}px`),l.style.setProperty("--md-tooltip-x",`${u.x}px`),l.style.setProperty("--md-tooltip-y",`${u.y}px`),l.classList.toggle("md-tooltip2--top",u.y<0),l.classList.toggle("md-tooltip2--bottom",u.y>=0)}),a.pipe(g(l=>l),te(c,(l,f)=>f),g(l=>l.role==="tooltip")).subscribe(l=>{let f=de(j(":scope > *",l));l.style.setProperty("--md-tooltip-width",`${f.width}px`),l.style.setProperty("--md-tooltip-tail","0px")}),a.pipe(Y(),xe(ye),te(c)).subscribe(([l,f])=>{f.classList.toggle("md-tooltip2--active",l)}),z([a.pipe(g(l=>l)),c]).subscribe(([l,f])=>{f.role==="dialog"?(e.setAttribute("aria-controls",i),e.setAttribute("aria-haspopup","dialog")):e.setAttribute("aria-describedby",i)}),a.pipe(g(l=>!l)).subscribe(()=>{e.removeAttribute("aria-controls"),e.removeAttribute("aria-describedby"),e.removeAttribute("aria-haspopup")}),Ba(e,r).pipe(O(l=>s.next(l)),A(()=>s.complete()),m(l=>P({ref:e},l)))})}function Xe(e,{viewport$:t},r=document.body){return Vt(e,{content$:new F(o=>{let n=e.title,i=Cn(n);return o.next(i),e.removeAttribute("title"),r.append(i),()=>{i.remove(),e.setAttribute("title",n)}}),viewport$:t},0)}function Ga(e,t){let r=H(()=>z([mn(e),Ge(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:s,height:a}=de(e);return{x:o-i.x+s/2,y:n-i.y+a/2}}));return Ye(e).pipe(b(o=>r.pipe(m(n=>({active:o,offset:n})),Ee(+!o||1/0))))}function Dn(e,t,{target$:r}){let[o,n]=Array.from(e.children);return H(()=>{let i=new T,s=i.pipe(oe(),ae(!0));return i.subscribe({next({offset:a}){e.style.setProperty("--md-tooltip-x",`${a.x}px`),e.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),mt(e).pipe(W(s)).subscribe(a=>{e.toggleAttribute("data-md-visible",a)}),L(i.pipe(g(({active:a})=>a)),i.pipe(Ae(250),g(({active:a})=>!a))).subscribe({next({active:a}){a?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe($e(16,ye)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(gt(125,ye),g(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?e.style.setProperty("--md-tooltip-0",`${-a}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),h(n,"click").pipe(W(s),g(a=>!(a.metaKey||a.ctrlKey))).subscribe(a=>{a.stopPropagation(),a.preventDefault()}),h(n,"mousedown").pipe(W(s),te(i)).subscribe(([a,{active:c}])=>{var p;if(a.button!==0||a.metaKey||a.ctrlKey)a.preventDefault();else if(c){a.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(p=Ne())==null||p.blur()}}),r.pipe(W(s),g(a=>a===o),nt(125)).subscribe(()=>e.focus()),Ga(e,t).pipe(O(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function Ja(e){let t=Te();if(e.tagName!=="CODE")return[e];let r=[".c",".c1",".cm"];if(t.annotate&&typeof t.annotate=="object"){let o=e.closest("[class|=language]");if(o)for(let n of Array.from(o.classList)){if(!n.startsWith("language-"))continue;let[,i]=n.split("-");i in t.annotate&&r.push(...t.annotate[i])}}return M(r.join(", "),e)}function Xa(e){let t=[];for(let r of Ja(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let s;for(;s=/(\(\d+\))(!)?/.exec(i.textContent);){let[,a,c]=s;if(typeof c=="undefined"){let p=i.splitText(s.index);i=p.splitText(a.length),t.push(p)}else{i.textContent=a,t.push(i);break}}}}return t}function Vn(e,t){t.append(...Array.from(e.childNodes))}function Tr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,s=new Map;for(let a of Xa(t)){let[,c]=a.textContent.match(/\((\d+)\)/);ue(`:scope > li:nth-child(${c})`,e)&&(s.set(c,kn(c,i)),a.replaceWith(s.get(c)))}return s.size===0?y:H(()=>{let a=new T,c=a.pipe(oe(),ae(!0)),p=[];for(let[l,f]of s)p.push([j(".md-typeset",f),j(`:scope > li:nth-child(${l})`,e)]);return o.pipe(W(c)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of p)l?Vn(f,u):Vn(u,f)}),L(...[...s].map(([,l])=>Dn(l,t,{target$:r}))).pipe(A(()=>a.complete()),le())})}function Nn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Nn(t)}}function zn(e,t){return H(()=>{let r=Nn(e);return typeof r!="undefined"?Tr(r,e,t):y})}var Kn=$t(ao());var Za=0,qn=L(h(window,"keydown").pipe(m(()=>!0)),L(h(window,"keyup"),h(window,"contextmenu")).pipe(m(()=>!1))).pipe(Q(!1),Z(1));function Qn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return Qn(t)}}function es(e){return Le(e).pipe(m(({width:t})=>({scrollable:At(e).width>t})),ne("scrollable"))}function Yn(e,t){let{matches:r}=matchMedia("(hover)"),o=H(()=>{let n=new T,i=n.pipe(Yr(1));n.subscribe(({scrollable:d})=>{d&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let s=[],a=e.closest("pre"),c=a.closest("[id]"),p=c?c.id:Za++;a.id=`__code_${p}`;let l=[],f=e.closest(".highlight");if(f instanceof HTMLElement){let d=Qn(f);if(typeof d!="undefined"&&(f.classList.contains("annotate")||V("content.code.annotate"))){let v=Tr(d,e,t);l.push(Le(f).pipe(W(i),m(({width:S,height:X})=>S&&X),Y(),b(S=>S?v:y)))}}let u=M(":scope > span[id]",e);if(u.length&&(e.classList.add("md-code__content"),e.closest(".select")||V("content.code.select")&&!e.closest(".no-select"))){let d=+u[0].id.split("-").pop(),v=$n();s.push(v),V("content.tooltips")&&l.push(Xe(v,{viewport$}));let S=h(v,"click").pipe(Ut(R=>!R,!1),O(()=>v.blur()),le());S.subscribe(R=>{v.classList.toggle("md-code__button--active",R)});let X=fe(u).pipe(J(R=>it(R).pipe(m(se=>[R,se]))));S.pipe(b(R=>R?X:y)).subscribe(([R,se])=>{let ce=ue(".hll.select",R);if(ce&&!se)ce.replaceWith(...Array.from(ce.childNodes));else if(!ce&&se){let he=document.createElement("span");he.className="hll select",he.append(...Array.from(R.childNodes).slice(1)),R.append(he)}});let re=fe(u).pipe(J(R=>h(R,"mousedown").pipe(O(se=>se.preventDefault()),m(()=>R)))),ee=S.pipe(b(R=>R?re:y),te(qn),m(([R,se])=>{var he;let ce=u.indexOf(R)+d;if(se===!1)return[ce,ce];{let Se=M(".hll",e).map(Ue=>u.indexOf(Ue.parentElement)+d);return(he=window.getSelection())==null||he.removeAllRanges(),[Math.min(ce,...Se),Math.max(ce,...Se)]}})),k=Zr(y).pipe(g(R=>R.startsWith(`__codelineno-${p}-`)));k.subscribe(R=>{let[,,se]=R.split("-"),ce=se.split(":").map(Se=>+Se-d+1);ce.length===1&&ce.push(ce[0]);for(let Se of M(".hll:not(.select)",e))Se.replaceWith(...Array.from(Se.childNodes));let he=u.slice(ce[0]-1,ce[1]);for(let Se of he){let Ue=document.createElement("span");Ue.className="hll",Ue.append(...Array.from(Se.childNodes).slice(1)),Se.append(Ue)}}),k.pipe(Ee(1),xe(pe)).subscribe(R=>{if(R.includes(":")){let se=document.getElementById(R.split(":")[0]);se&&setTimeout(()=>{let ce=se,he=-64;for(;ce!==document.body;)he+=ce.offsetTop,ce=ce.offsetParent;window.scrollTo({top:he})},1)}});let je=fe(M('a[href^="#__codelineno"]',f)).pipe(J(R=>h(R,"click").pipe(O(se=>se.preventDefault()),m(()=>R)))).pipe(W(i),te(qn),m(([R,se])=>{let he=+j(`[id="${R.hash.slice(1)}"]`).parentElement.id.split("-").pop();if(se===!1)return[he,he];{let Se=M(".hll",e).map(Ue=>+Ue.parentElement.id.split("-").pop());return[Math.min(he,...Se),Math.max(he,...Se)]}}));L(ee,je).subscribe(R=>{let se=`#__codelineno-${p}-`;R[0]===R[1]?se+=R[0]:se+=`${R[0]}:${R[1]}`,history.replaceState({},"",se),window.dispatchEvent(new HashChangeEvent("hashchange",{newURL:window.location.origin+window.location.pathname+se,oldURL:window.location.href}))})}if(Kn.default.isSupported()&&(e.closest(".copy")||V("content.code.copy")&&!e.closest(".no-copy"))){let d=Hn(a.id);s.push(d),V("content.tooltips")&&l.push(Xe(d,{viewport$}))}if(s.length){let d=Pn();d.append(...s),a.insertBefore(d,e)}return es(e).pipe(O(d=>n.next(d)),A(()=>n.complete()),m(d=>P({ref:e},d)),Ve(L(...l).pipe(W(i))))});return V("content.lazy")?mt(e).pipe(g(n=>n),Ee(1),b(()=>o)):o}function ts(e,{target$:t,print$:r}){let o=!0;return L(t.pipe(m(n=>n.closest("details:not([open])")),g(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(g(n=>n||!o),O(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Bn(e,t){return H(()=>{let r=new T;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),ts(e,t).pipe(O(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}var Gn=0;function rs(e){let t=document.createElement("h3");t.innerHTML=e.innerHTML;let r=[t],o=e.nextElementSibling;for(;o&&!(o instanceof HTMLHeadingElement);)r.push(o),o=o.nextElementSibling;return r}function os(e,t){for(let r of M("[href], [src]",e))for(let o of["href","src"]){let n=r.getAttribute(o);if(n&&!/^(?:[a-z]+:)?\/\//i.test(n)){r[o]=new URL(r.getAttribute(o),t).toString();break}}for(let r of M("[name^=__], [for]",e))for(let o of["id","for","name"]){let n=r.getAttribute(o);n&&r.setAttribute(o,`${n}$preview_${Gn}`)}return Gn++,$(e)}function Jn(e,t){let{sitemap$:r}=t;if(!(e instanceof HTMLAnchorElement))return y;if(!(V("navigation.instant.preview")||e.hasAttribute("data-preview")))return y;e.removeAttribute("title");let o=z([Ye(e),it(e)]).pipe(m(([i,s])=>i||s),Y(),g(i=>i));return rt([r,o]).pipe(b(([i])=>{let s=new URL(e.href);return s.search=s.hash="",i.has(`${s}`)?$(s):y}),b(i=>xr(i).pipe(b(s=>os(s,i)))),b(i=>{let s=e.hash?`article [id="${e.hash.slice(1)}"]`:"article h1",a=ue(s,i);return typeof a=="undefined"?y:$(rs(a))})).pipe(b(i=>{let s=new F(a=>{let c=wr(...i);return a.next(c),document.body.append(c),()=>c.remove()});return Vt(e,P({content$:s},t))}))}var Xn=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.flowchartTitleText{fill:var(--md-mermaid-label-fg-color)}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel p,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel p{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color)}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}.classDiagramTitleText{fill:var(--md-mermaid-label-fg-color)}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs marker.marker.composition.class path,defs marker.marker.dependency.class path,defs marker.marker.extension.class path{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs marker.marker.aggregation.class path{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}.statediagramTitleText{fill:var(--md-mermaid-label-fg-color)}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel,.nodeLabel p{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}a .nodeLabel{text-decoration:underline}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}[id^=entity] path,[id^=entity] rect{fill:var(--md-default-bg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs .marker.oneOrMore.er *,defs .marker.onlyOne.er *,defs .marker.zeroOrMore.er *,defs .marker.zeroOrOne.er *{stroke:var(--md-mermaid-edge-color)!important}text:not([class]):last-child{fill:var(--md-mermaid-label-fg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var so,is=0;function as(){return typeof mermaid=="undefined"||mermaid instanceof Element?_t("https://unpkg.com/mermaid@11/dist/mermaid.min.js"):$(void 0)}function Zn(e){return e.classList.remove("mermaid"),so||(so=as().pipe(O(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Xn,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),Z(1))),so.subscribe(()=>go(null,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${is++}`,r=x("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),s=r.attachShadow({mode:"closed"});s.innerHTML=n,e.replaceWith(r),i==null||i(s)})),so.pipe(m(()=>({ref:e})))}var ei=x("table");function ti(e){return e.replaceWith(ei),ei.replaceWith(Un(e)),$({ref:e})}function ss(e){let t=e.find(r=>r.checked)||e[0];return L(...e.map(r=>h(r,"change").pipe(m(()=>j(`label[for="${r.id}"]`))))).pipe(Q(j(`label[for="${t.id}"]`)),m(r=>({active:r})))}function ri(e,{viewport$:t,target$:r}){let o=j(".tabbed-labels",e),n=M(":scope > input",e),i=no("prev");e.append(i);let s=no("next");return e.append(s),H(()=>{let a=new T,c=a.pipe(oe(),ae(!0));z([a,Le(e),mt(e)]).pipe(W(c),$e(1,ye)).subscribe({next([{active:p},l]){let f=Be(p),{width:u}=de(p);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let d=gr(o);(f.xd.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),z([Ge(o),Le(o)]).pipe(W(c)).subscribe(([p,l])=>{let f=At(o);i.hidden=p.x<16,s.hidden=p.x>f.width-l.width-16}),L(h(i,"click").pipe(m(()=>-1)),h(s,"click").pipe(m(()=>1))).pipe(W(c)).subscribe(p=>{let{width:l}=de(o);o.scrollBy({left:l*p,behavior:"smooth"})}),r.pipe(W(c),g(p=>n.includes(p))).subscribe(p=>p.click()),o.classList.add("tabbed-labels--linked");for(let p of n){let l=j(`label[for="${p.id}"]`);l.replaceChildren(x("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),h(l.firstElementChild,"click").pipe(W(c),g(f=>!(f.metaKey||f.ctrlKey)),O(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return V("content.tabs.link")&&a.pipe(Ie(1),te(t)).subscribe(([{active:p},{offset:l}])=>{let f=p.innerText.trim();if(p.hasAttribute("data-md-switching"))p.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let v of M("[data-tabs]"))for(let S of M(":scope > input",v)){let X=j(`label[for="${S.id}"]`);if(X!==p&&X.innerText.trim()===f){X.setAttribute("data-md-switching",""),S.click();break}}window.scrollTo({top:e.offsetTop-u});let d=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...d])])}}),a.pipe(W(c)).subscribe(()=>{for(let p of M("audio, video",e))p.offsetWidth&&p.autoplay?p.play().catch(()=>{}):p.pause()}),ss(n).pipe(O(p=>a.next(p)),A(()=>a.complete()),m(p=>P({ref:e},p)))}).pipe(et(pe))}function oi(e,t){let{viewport$:r,target$:o,print$:n}=t;return L(...M(".annotate:not(.highlight)",e).map(i=>zn(i,{target$:o,print$:n})),...M("pre:not(.mermaid) > code",e).map(i=>Yn(i,{target$:o,print$:n})),...M("a",e).map(i=>Jn(i,t)),...M("pre.mermaid",e).map(i=>Zn(i)),...M("table:not([class])",e).map(i=>ti(i)),...M("details",e).map(i=>Bn(i,{target$:o,print$:n})),...M("[data-tabs]",e).map(i=>ri(i,{viewport$:r,target$:o})),...M("[title]:not([data-preview])",e).filter(()=>V("content.tooltips")).map(i=>Xe(i,{viewport$:r})),...M(".footnote-ref",e).filter(()=>V("content.footnote.tooltips")).map(i=>Vt(i,{content$:new F(s=>{let a=new URL(i.href).hash.slice(1),c=Array.from(document.getElementById(a).cloneNode(!0).children),p=wr(...c);return s.next(p),document.body.append(p),()=>p.remove()}),viewport$:r})))}function cs(e,{alert$:t}){return t.pipe(b(r=>L($(!0),$(!1).pipe(nt(2e3))).pipe(m(o=>({message:r,active:o})))))}function ni(e,t){let r=j(".md-typeset",e);return H(()=>{let o=new T;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),cs(e,t).pipe(O(n=>o.next(n)),A(()=>o.complete()),m(n=>P({ref:e},n)))})}var ps=0;function ls(e,t){document.body.append(e);let{width:r}=de(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=vr(t),n=typeof o!="undefined"?Ge(o):$({x:0,y:0}),i=L(Ye(t),it(t)).pipe(Y());return z([i,n]).pipe(m(([s,a])=>{let{x:c,y:p}=Be(t),l=de(t),f=t.closest("table");return f&&t.parentElement&&(c+=f.offsetLeft+t.parentElement.offsetLeft,p+=f.offsetTop+t.parentElement.offsetTop),{active:s,offset:{x:c-a.x+l.width/2-r/2,y:p-a.y+l.height+8}}}))}function ii(e){let t=e.title;if(!t.length)return y;let r=`__tooltip_${ps++}`,o=Dt(r,"inline"),n=j(".md-typeset",o);return n.innerHTML=t,H(()=>{let i=new T;return i.subscribe({next({offset:s}){o.style.setProperty("--md-tooltip-x",`${s.x}px`),o.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),L(i.pipe(g(({active:s})=>s)),i.pipe(Ae(250),g(({active:s})=>!s))).subscribe({next({active:s}){s?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe($e(16,ye)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(gt(125,ye),g(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?o.style.setProperty("--md-tooltip-0",`${-s}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),ls(o,e).pipe(O(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))}).pipe(et(pe))}function ms({viewport$:e}){if(!V("header.autohide"))return $(!1);let t=e.pipe(m(({offset:{y:n}})=>n),ot(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),Y()),o=Je("search");return z([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),Y(),b(n=>n?r:$(!1)),Q(!1))}function ai(e,t){return H(()=>z([Le(e),ms(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),Y((r,o)=>r.height===o.height&&r.hidden===o.hidden),Z(1))}function si(e,{header$:t,main$:r}){return H(()=>{let o=new T,n=o.pipe(oe(),ae(!0));o.pipe(ne("active"),Pe(t)).subscribe(([{active:s},{hidden:a}])=>{e.classList.toggle("md-header--shadow",s&&!a),e.hidden=a});let i=fe(M("[title]",e)).pipe(g(()=>V("content.tooltips")),J(s=>ii(s)));return r.subscribe(o),t.pipe(W(n),m(s=>P({ref:e},s)),Ve(i.pipe(W(n))))})}function fs(e,{viewport$:t,header$:r}){return Er(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=de(e);return{active:n>0&&o>=n}}),ne("active"))}function ci(e,t){return H(()=>{let r=new T;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=ue(".md-content h1");return typeof o=="undefined"?y:fs(o,t).pipe(O(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))})}function pi(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),Y()),n=o.pipe(b(()=>Le(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),ne("bottom"))));return z([o,n,t]).pipe(m(([i,{top:s,bottom:a},{offset:{y:c},size:{height:p}}])=>(p=Math.max(0,p-Math.max(0,s-c,i)-Math.max(0,p+c-a)),{offset:s-i,height:p,active:s-i<=c})),Y((i,s)=>i.offset===s.offset&&i.height===s.height&&i.active===s.active))}function us(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return $(...e).pipe(J(o=>h(o,"change").pipe(m(()=>o))),Q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),Z(1))}function li(e){let t=M("input",e),r=x("meta",{name:"theme-color"});document.head.appendChild(r);let o=x("meta",{name:"color-scheme"});document.head.appendChild(o);let n=Wt("(prefers-color-scheme: light)");return H(()=>{let i=new T;return i.subscribe(s=>{if(document.body.setAttribute("data-md-color-switching",""),s.color.media==="(prefers-color-scheme)"){let a=matchMedia("(prefers-color-scheme: light)"),c=document.querySelector(a.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");s.color.scheme=c.getAttribute("data-md-color-scheme"),s.color.primary=c.getAttribute("data-md-color-primary"),s.color.accent=c.getAttribute("data-md-color-accent")}for(let[a,c]of Object.entries(s.color))document.body.setAttribute(`data-md-color-${a}`,c);for(let a=0;as.key==="Enter"),te(i,(s,a)=>a)).subscribe(({index:s})=>{s=(s+1)%t.length,t[s].click(),t[s].focus()}),i.pipe(m(()=>{let s=Ce("header"),a=window.getComputedStyle(s);return o.content=a.colorScheme,a.backgroundColor.match(/\d+/g).map(c=>(+c).toString(16).padStart(2,"0")).join("")})).subscribe(s=>r.content=`#${s}`),i.pipe(xe(pe)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),us(t).pipe(W(n.pipe(Ie(1))),vt(),O(s=>i.next(s)),A(()=>i.complete()),m(s=>P({ref:e},s)))})}function mi(e,{progress$:t}){return H(()=>{let r=new T;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(O(o=>r.next({value:o})),A(()=>r.complete()),m(o=>({ref:e,value:o})))})}function fi(e,t){return e.protocol=t.protocol,e.hostname=t.hostname,e}function ds(e,t){let r=new Map;for(let o of M("url",e)){let n=j("loc",o),i=[fi(new URL(n.textContent),t)];r.set(`${i[0]}`,i);for(let s of M("[rel=alternate]",o)){let a=s.getAttribute("href");a!=null&&i.push(fi(new URL(a),t))}}return r}function kt(e){return En(new URL("sitemap.xml",e)).pipe(m(t=>ds(t,new URL(e))),ve(()=>$(new Map)),le())}function ui({document$:e}){let t=new Map;e.pipe(b(()=>M("link[rel=alternate]")),m(r=>new URL(r.href)),g(r=>!t.has(r.toString())),J(r=>kt(r).pipe(m(o=>[r,o]),ve(()=>y)))).subscribe(([r,o])=>{t.set(r.toString().replace(/\/$/,""),o)}),h(document.body,"click").pipe(g(r=>!r.metaKey&&!r.ctrlKey),b(r=>{if(r.target instanceof Element){let o=r.target.closest("a");if(o&&!o.target){let n=[...t].find(([f])=>o.href.startsWith(`${f}/`));if(typeof n=="undefined")return y;let[i,s]=n,a=we();if(a.href.startsWith(i))return y;let c=Te(),p=a.href.replace(c.base,"");p=`${i}/${p}`;let l=s.has(p.split("#")[0])?new URL(p,c.base):new URL(i);return r.preventDefault(),$(l)}}return y})).subscribe(r=>st(r,!0))}var co=$t(ao());function hs(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function di({alert$:e}){co.default.isSupported()&&new F(t=>{new co.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||hs(j(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(O(t=>{t.trigger.focus()}),m(()=>Me("clipboard.copied"))).subscribe(e)}function hi(e,t){if(!(e.target instanceof Element))return y;let r=e.target.closest("a");if(r===null)return y;if(r.target||e.metaKey||e.ctrlKey)return y;let o=new URL(r.href);return o.search=o.hash="",t.has(`${o}`)?(e.preventDefault(),$(r)):y}function bi(e){let t=new Map;for(let r of M(":scope > *",e.head))t.set(r.outerHTML,r);return t}function vi(e){for(let t of M("[href], [src]",e))for(let r of["href","src"]){let o=t.getAttribute(r);if(o&&!/^(?:[a-z]+:)?\/\//i.test(o)){t[r]=t[r];break}}return $(e)}function bs(e){for(let o of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...V("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let n=ue(o),i=ue(o,e);typeof n!="undefined"&&typeof i!="undefined"&&n.replaceWith(i)}let t=bi(document);for(let[o,n]of bi(e))t.has(o)?t.delete(o):document.head.appendChild(n);for(let o of t.values()){let n=o.getAttribute("name");n!=="theme-color"&&n!=="color-scheme"&&o.remove()}let r=Ce("container");return Ke(M("script",r)).pipe(b(o=>{let n=e.createElement("script");if(o.src){for(let i of o.getAttributeNames())n.setAttribute(i,o.getAttribute(i));return o.replaceWith(n),new F(i=>{n.onload=()=>i.complete()})}else return n.textContent=o.textContent,o.replaceWith(n),y}),oe(),ae(document))}function gi({sitemap$:e,location$:t,viewport$:r,progress$:o}){if(location.protocol==="file:")return y;$(document).subscribe(vi);let n=h(document.body,"click").pipe(Pe(e),b(([a,c])=>hi(a,c)),m(({href:a})=>new URL(a)),le()),i=h(window,"popstate").pipe(m(we),le());n.pipe(te(r)).subscribe(([a,{offset:c}])=>{history.replaceState(c,""),history.pushState(null,"",a)}),L(n,i).subscribe(t);let s=t.pipe(ne("pathname"),b(a=>xr(a,{progress$:o}).pipe(ve(()=>(st(a,!0),y)))),b(vi),b(bs),le());return L(s.pipe(te(t,(a,c)=>c)),s.pipe(b(()=>t),ne("hash")),t.pipe(Y((a,c)=>a.pathname===c.pathname&&a.hash===c.hash),b(()=>n),O(()=>history.back()))).subscribe(a=>{var c,p;history.state!==null||!a.hash?window.scrollTo(0,(p=(c=history.state)==null?void 0:c.y)!=null?p:0):(history.scrollRestoration="auto",gn(a.hash),history.scrollRestoration="manual")}),t.subscribe(()=>{history.scrollRestoration="manual"}),h(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),r.pipe(ne("offset"),Ae(100)).subscribe(({offset:a})=>{history.replaceState(a,"")}),V("navigation.instant.prefetch")&&L(h(document.body,"mousemove"),h(document.body,"focusin")).pipe(Pe(e),b(([a,c])=>hi(a,c)),Ae(25),Qr(({href:a})=>a),hr(a=>{let c=document.createElement("link");return c.rel="prefetch",c.href=a.toString(),document.head.appendChild(c),h(c,"load").pipe(m(()=>c),Ee(1))})).subscribe(a=>a.remove()),s}var yi=$t(ro());function xi(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,s)=>`${i}${s}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").replace(/&/g,"&").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return s=>(0,yi.default)(s).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function zt(e){return e.type===1}function Sr(e){return e.type===3}function Ei(e,t){let r=Mn(e);return L($(location.protocol!=="file:"),Je("search")).pipe(Re(o=>o),b(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:V("search.suggest")}}})),r}function wi(e){var l;let{selectedVersionSitemap:t,selectedVersionBaseURL:r,currentLocation:o,currentBaseURL:n}=e,i=(l=po(n))==null?void 0:l.pathname;if(i===void 0)return;let s=ys(o.pathname,i);if(s===void 0)return;let a=Es(t.keys());if(!t.has(a))return;let c=po(s,a);if(!c||!t.has(c.href))return;let p=po(s,r);if(p)return p.hash=o.hash,p.search=o.search,p}function po(e,t){try{return new URL(e,t)}catch(r){return}}function ys(e,t){if(e.startsWith(t))return e.slice(t.length)}function xs(e,t){let r=Math.min(e.length,t.length),o;for(o=0;oy)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:s,aliases:a})=>s===i||a.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),b(n=>h(document.body,"click").pipe(g(i=>!i.metaKey&&!i.ctrlKey),te(o),b(([i,s])=>{if(i.target instanceof Element){let a=i.target.closest("a");if(a&&!a.target&&n.has(a.href)){let c=a.href;return!i.target.closest(".md-version")&&n.get(c)===s?y:(i.preventDefault(),$(new URL(c)))}}return y}),b(i=>kt(i).pipe(m(s=>{var a;return(a=wi({selectedVersionSitemap:s,selectedVersionBaseURL:i,currentLocation:we(),currentBaseURL:t.base}))!=null?a:i})))))).subscribe(n=>st(n,!0)),z([r,o]).subscribe(([n,i])=>{j(".md-header__topic").appendChild(Wn(n,i))}),e.pipe(b(()=>o)).subscribe(n=>{var a;let i=new URL(t.base),s=__md_get("__outdated",sessionStorage,i);if(s===null){s=!0;let c=((a=t.version)==null?void 0:a.default)||"latest";Array.isArray(c)||(c=[c]);e:for(let p of c)for(let l of n.aliases.concat(n.version))if(new RegExp(p,"i").test(l)){s=!1;break e}__md_set("__outdated",s,sessionStorage,i)}if(s)for(let c of me("outdated"))c.hidden=!1})}function ws(e,{worker$:t}){let{searchParams:r}=we();r.has("q")&&(at("search",!0),e.value=r.get("q"),e.focus(),Je("search").pipe(Re(i=>!i)).subscribe(()=>{let i=we();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=Ye(e),n=L(t.pipe(Re(zt)),h(e,"keyup"),o).pipe(m(()=>e.value),Y());return z([n,o]).pipe(m(([i,s])=>({value:i,focus:s})),Z(1))}function Si(e,{worker$:t}){let r=new T,o=r.pipe(oe(),ae(!0));z([t.pipe(Re(zt)),r],(i,s)=>s).pipe(ne("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(ne("focus")).subscribe(({focus:i})=>{i&&at("search",i)}),h(e.form,"reset").pipe(W(o)).subscribe(()=>e.focus());let n=j("header [for=__search]");return h(n,"click").subscribe(()=>e.focus()),ws(e,{worker$:t}).pipe(O(i=>r.next(i)),A(()=>r.complete()),m(i=>P({ref:e},i)),Z(1))}function Oi(e,{worker$:t,query$:r}){let o=new T,n=un(e.parentElement).pipe(g(Boolean)),i=e.parentElement,s=j(":scope > :first-child",e),a=j(":scope > :last-child",e);Je("search").subscribe(l=>{a.setAttribute("role",l?"list":"presentation"),a.hidden=!l}),o.pipe(te(r),Gr(t.pipe(Re(zt)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:s.textContent=f.length?Me("search.result.none"):Me("search.result.placeholder");break;case 1:s.textContent=Me("search.result.one");break;default:let u=br(l.length);s.textContent=Me("search.result.other",u)}});let c=o.pipe(O(()=>a.innerHTML=""),b(({items:l})=>L($(...l.slice(0,10)),$(...l.slice(10)).pipe(ot(4),Xr(n),b(([f])=>f)))),m(Fn),le());return c.subscribe(l=>a.appendChild(l)),c.pipe(J(l=>{let f=ue("details",l);return typeof f=="undefined"?y:h(f,"toggle").pipe(W(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(g(Sr),m(({data:l})=>l)).pipe(O(l=>o.next(l)),A(()=>o.complete()),m(l=>P({ref:e},l)))}function Ts(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=we();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function Li(e,t){let r=new T,o=r.pipe(oe(),ae(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),h(e,"click").pipe(W(o)).subscribe(n=>n.preventDefault()),Ts(e,t).pipe(O(n=>r.next(n)),A(()=>r.complete()),m(n=>P({ref:e},n)))}function Mi(e,{worker$:t,keyboard$:r}){let o=new T,n=Ce("search-query"),i=L(h(n,"keydown"),h(n,"focus")).pipe(xe(pe),m(()=>n.value),Y());return o.pipe(Pe(i),m(([{suggest:a},c])=>{let p=c.split(/([\s-]+)/);if(a!=null&&a.length&&p[p.length-1]){let l=a[a.length-1];l.startsWith(p[p.length-1])&&(p[p.length-1]=l)}else p.length=0;return p})).subscribe(a=>e.innerHTML=a.join("").replace(/\s/g," ")),r.pipe(g(({mode:a})=>a==="search")).subscribe(a=>{a.type==="ArrowRight"&&e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText)}),t.pipe(g(Sr),m(({data:a})=>a)).pipe(O(a=>o.next(a)),A(()=>o.complete()),m(()=>({ref:e})))}function _i(e,{index$:t,keyboard$:r}){let o=Te();try{let n=Ei(o.search,t),i=Ce("search-query",e),s=Ce("search-result",e);h(e,"click").pipe(g(({target:c})=>c instanceof Element&&!!c.closest("a"))).subscribe(()=>at("search",!1)),r.pipe(g(({mode:c})=>c==="search")).subscribe(c=>{let p=Ne();switch(c.type){case"Enter":if(p===i){let l=new Map;for(let f of M(":first-child [href]",s)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,d])=>d-u);f.click()}c.claim()}break;case"Escape":case"Tab":at("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof p=="undefined")i.focus();else{let l=[i,...M(":not(details) > [href], summary, details[open] [href]",s)],f=Math.max(0,(Math.max(0,l.indexOf(p))+l.length+(c.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}c.claim();break;default:i!==Ne()&&i.focus()}}),r.pipe(g(({mode:c})=>c==="global")).subscribe(c=>{switch(c.type){case"f":case"s":case"/":i.focus(),i.select(),c.claim();break}});let a=Si(i,{worker$:n});return L(a,Oi(s,{worker$:n,query$:a})).pipe(Ve(...me("search-share",e).map(c=>Li(c,{query$:a})),...me("search-suggest",e).map(c=>Mi(c,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,tt}}function Ai(e,{index$:t,location$:r}){return z([t,r.pipe(Q(we()),g(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>xi(o.config)(n.searchParams.get("h"))),m(o=>{var s;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let a=i.nextNode();a;a=i.nextNode())if((s=a.parentElement)!=null&&s.offsetHeight){let c=a.textContent,p=o(c);p.length>c.length&&n.set(a,p)}for(let[a,c]of n){let{childNodes:p}=x("span",null,c);a.replaceWith(...Array.from(p))}return{ref:e,nodes:n}}))}function Ss(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return z([r,t]).pipe(m(([{offset:i,height:s},{offset:{y:a}}])=>(s=s+Math.min(n,Math.max(0,a-i))-n,{height:s,locked:a>=i+n})),Y((i,s)=>i.height===s.height&&i.locked===s.locked))}function lo(e,o){var n=o,{header$:t}=n,r=vo(n,["header$"]);let i=j(".md-sidebar__scrollwrap",e),{y:s}=Be(i);return H(()=>{let a=new T,c=a.pipe(oe(),ae(!0)),p=a.pipe($e(0,ye));return p.pipe(te(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*s}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),p.pipe(Re()).subscribe(()=>{for(let l of M(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=de(f);f.scrollTo({top:u-d/2})}}}),fe(M("label[tabindex]",e)).pipe(J(l=>h(l,"click").pipe(xe(pe),m(()=>l),W(c)))).subscribe(l=>{let f=j(`[id="${l.htmlFor}"]`);j(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),V("content.tooltips")&&fe(M("abbr[title]",e)).pipe(J(l=>Xe(l,{viewport$})),W(c)).subscribe(),Ss(e,r).pipe(O(l=>a.next(l)),A(()=>a.complete()),m(l=>P({ref:e},l)))})}function Ci(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return rt(ze(`${r}/releases/latest`).pipe(ve(()=>y),m(o=>({version:o.tag_name})),Qe({})),ze(r).pipe(ve(()=>y),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),Qe({}))).pipe(m(([o,n])=>P(P({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return ze(r).pipe(m(o=>({repositories:o.public_repos})),Qe({}))}}function ki(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return rt(ze(`${r}/releases/permalink/latest`).pipe(ve(()=>y),m(({tag_name:o})=>({version:o})),Qe({})),ze(r).pipe(ve(()=>y),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),Qe({}))).pipe(m(([o,n])=>P(P({},o),n)))}function Hi(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return Ci(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ki(r,o)}return y}var Os;function Ls(e){return Os||(Os=H(()=>{let t=__md_get("__source",sessionStorage);if(t)return $(t);if(me("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return y}return Hi(e.href).pipe(O(o=>__md_set("__source",o,sessionStorage)))}).pipe(ve(()=>y),g(t=>Object.keys(t).length>0),m(t=>({facts:t})),Z(1)))}function $i(e){let t=j(":scope > :last-child",e);return H(()=>{let r=new T;return r.subscribe(({facts:o})=>{t.appendChild(jn(o)),t.classList.add("md-source__repository--active")}),Ls(e).pipe(O(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function Ms(e,{viewport$:t,header$:r}){return Le(document.body).pipe(b(()=>Er(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),ne("hidden"))}function Pi(e,t){return H(()=>{let r=new T;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(V("navigation.tabs.sticky")?$({hidden:!1}):Ms(e,t)).pipe(O(o=>r.next(o)),A(()=>r.complete()),m(o=>P({ref:e},o)))})}function _s(e,{viewport$:t,header$:r}){let o=new Map,n=M(".md-nav__link",e);for(let a of n){let c=decodeURIComponent(a.hash.substring(1)),p=ue(`[id="${c}"]`);typeof p!="undefined"&&o.set(a,p)}let i=r.pipe(ne("height"),m(({height:a})=>{let c=Ce("main"),p=j(":scope > :first-child",c);return a+.8*(p.offsetTop-c.offsetTop)}),le());return Le(document.body).pipe(ne("height"),b(a=>H(()=>{let c=[];return $([...o].reduce((p,[l,f])=>{for(;c.length&&o.get(c[c.length-1]).tagName>=f.tagName;)c.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let d=f.offsetParent;for(;d;d=d.offsetParent)u+=d.offsetTop;return p.set([...c=[...c,l]].reverse(),u)},new Map))}).pipe(m(c=>new Map([...c].sort(([,p],[,l])=>p-l))),Pe(i),b(([c,p])=>t.pipe(Ut(([l,f],{offset:{y:u},size:d})=>{let v=u+d.height>=Math.floor(a.height);for(;f.length;){let[,S]=f[0];if(S-p=u&&!v)f=[l.pop(),...f];else break}return[l,f]},[[],[...c]]),Y((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([a,c])=>({prev:a.map(([p])=>p),next:c.map(([p])=>p)})),Q({prev:[],next:[]}),ot(2,1),m(([a,c])=>a.prev.length{let i=new T,s=i.pipe(oe(),ae(!0));if(i.subscribe(({prev:a,next:c})=>{for(let[p]of c)p.classList.remove("md-nav__link--passed"),p.classList.remove("md-nav__link--active");for(let[p,[l]]of a.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",p===a.length-1)}),V("toc.follow")){let a=L(t.pipe(Ae(1),m(()=>{})),t.pipe(Ae(250),m(()=>"smooth")));i.pipe(g(({prev:c})=>c.length>0),Pe(o.pipe(xe(pe))),te(a)).subscribe(([[{prev:c}],p])=>{let[l]=c[c.length-1];if(l.offsetHeight){let f=vr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:d}=de(f);f.scrollTo({top:u-d/2,behavior:p})}}})}return V("navigation.tracking")&&t.pipe(W(s),ne("offset"),Ae(250),Ie(1),W(n.pipe(Ie(1))),vt({delay:250}),te(i)).subscribe(([,{prev:a}])=>{let c=we(),p=a[a.length-1];if(p&&p.length){let[l]=p,{hash:f}=new URL(l.href);c.hash!==f&&(c.hash=f,history.replaceState({},"",`${c}`))}else c.hash="",history.replaceState({},"",`${c}`)}),_s(e,{viewport$:t,header$:r}).pipe(O(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))})}function As(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:s}})=>s),ot(2,1),m(([s,a])=>s>a&&a>0),Y()),i=r.pipe(m(({active:s})=>s));return z([i,n]).pipe(m(([s,a])=>!(s&&a)),Y(),W(o.pipe(Ie(1))),ae(!0),vt({delay:250}),m(s=>({hidden:s})))}function Ii(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new T,s=i.pipe(oe(),ae(!0));return i.subscribe({next({hidden:a}){e.hidden=a,a?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(W(s),ne("height")).subscribe(({height:a})=>{e.style.top=`${a+16}px`}),h(e,"click").subscribe(a=>{a.preventDefault(),window.scrollTo({top:0})}),As(e,{viewport$:t,main$:o,target$:n}).pipe(O(a=>i.next(a)),A(()=>i.complete()),m(a=>P({ref:e},a)))}function Fi({document$:e,viewport$:t}){e.pipe(b(()=>M(".md-ellipsis")),J(r=>mt(r).pipe(W(e.pipe(Ie(1))),g(o=>o),m(()=>r),Ee(1))),g(r=>r.offsetWidth{let o=r.innerText,n=r.closest("a")||r;return n.title=o,V("content.tooltips")?Xe(n,{viewport$:t}).pipe(W(e.pipe(Ie(1))),A(()=>n.removeAttribute("title"))):y})).subscribe(),V("content.tooltips")&&e.pipe(b(()=>M(".md-status")),J(r=>Xe(r,{viewport$:t}))).subscribe()}function ji({document$:e,tablet$:t}){e.pipe(b(()=>M(".md-toggle--indeterminate")),O(r=>{r.indeterminate=!0,r.checked=!1}),J(r=>h(r,"change").pipe(Jr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),te(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function Cs(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function Ui({document$:e}){e.pipe(b(()=>M("[data-md-scrollfix]")),O(t=>t.removeAttribute("data-md-scrollfix")),g(Cs),J(t=>h(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function Wi({viewport$:e,tablet$:t}){z([Je("search"),t]).pipe(m(([r,o])=>r&&!o),b(r=>$(r).pipe(nt(r?400:100))),te(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function ks(){return location.protocol==="file:"?_t(`${new URL("search/search_index.js",Or.base)}`).pipe(m(()=>__index),Z(1)):ze(new URL("search/search_index.json",Or.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var ct=an(),Kt=bn(),Ht=yn(Kt),mo=hn(),ke=Ln(),Lr=Wt("(min-width: 60em)"),Vi=Wt("(min-width: 76.25em)"),Ni=xn(),Or=Te(),zi=document.forms.namedItem("search")?ks():tt,fo=new T;di({alert$:fo});ui({document$:ct});var uo=new T,qi=kt(Or.base);V("navigation.instant")&&gi({sitemap$:qi,location$:Kt,viewport$:ke,progress$:uo}).subscribe(ct);var Di;((Di=Or.version)==null?void 0:Di.provider)==="mike"&&Ti({document$:ct});L(Kt,Ht).pipe(nt(125)).subscribe(()=>{at("drawer",!1),at("search",!1)});mo.pipe(g(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=ue("link[rel=prev]");typeof t!="undefined"&&st(t);break;case"n":case".":let r=ue("link[rel=next]");typeof r!="undefined"&&st(r);break;case"Enter":let o=Ne();o instanceof HTMLLabelElement&&o.click()}});Fi({viewport$:ke,document$:ct});ji({document$:ct,tablet$:Lr});Ui({document$:ct});Wi({viewport$:ke,tablet$:Lr});var ft=ai(Ce("header"),{viewport$:ke}),qt=ct.pipe(m(()=>Ce("main")),b(e=>pi(e,{viewport$:ke,header$:ft})),Z(1)),Hs=L(...me("consent").map(e=>An(e,{target$:Ht})),...me("dialog").map(e=>ni(e,{alert$:fo})),...me("palette").map(e=>li(e)),...me("progress").map(e=>mi(e,{progress$:uo})),...me("search").map(e=>_i(e,{index$:zi,keyboard$:mo})),...me("source").map(e=>$i(e))),$s=H(()=>L(...me("announce").map(e=>_n(e)),...me("content").map(e=>oi(e,{sitemap$:qi,viewport$:ke,target$:Ht,print$:Ni})),...me("content").map(e=>V("search.highlight")?Ai(e,{index$:zi,location$:Kt}):y),...me("header").map(e=>si(e,{viewport$:ke,header$:ft,main$:qt})),...me("header-title").map(e=>ci(e,{viewport$:ke,header$:ft})),...me("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?eo(Vi,()=>lo(e,{viewport$:ke,header$:ft,main$:qt})):eo(Lr,()=>lo(e,{viewport$:ke,header$:ft,main$:qt}))),...me("tabs").map(e=>Pi(e,{viewport$:ke,header$:ft})),...me("toc").map(e=>Ri(e,{viewport$:ke,header$:ft,main$:qt,target$:Ht})),...me("top").map(e=>Ii(e,{viewport$:ke,header$:ft,main$:qt,target$:Ht})))),Ki=ct.pipe(b(()=>$s),Ve(Hs),Z(1));Ki.subscribe();window.document$=ct;window.location$=Kt;window.target$=Ht;window.keyboard$=mo;window.viewport$=ke;window.tablet$=Lr;window.screen$=Vi;window.print$=Ni;window.alert$=fo;window.progress$=uo;window.component$=Ki;})(); +//# sourceMappingURL=bundle.79ae519e.min.js.map + diff --git a/site/assets/javascripts/bundle.79ae519e.min.js.map b/site/assets/javascripts/bundle.79ae519e.min.js.map new file mode 100644 index 0000000..5cf0289 --- /dev/null +++ b/site/assets/javascripts/bundle.79ae519e.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/escape-html/index.js", "node_modules/clipboard/dist/clipboard.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/tslib/tslib.es6.mjs", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/BehaviorSubject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/QueueAction.ts", "node_modules/rxjs/src/internal/scheduler/QueueScheduler.ts", "node_modules/rxjs/src/internal/scheduler/queue.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounce.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinct.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/exhaustMap.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip2/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/link/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/alternate/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/findurl/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*\n * Copyright (c) 2016-2025 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n fetchSitemap,\n setupAlternate,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 60em)\")\nconst screen$ = watchMedia(\"(min-width: 76.25em)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up language selector */\nsetupAlternate({ document$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up sitemap for instant navigation and previews */\nconst sitemap$ = fetchSitemap(config.base)\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ sitemap$, location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ viewport$, document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { sitemap$, viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/******************************************************************************\nCopyright (c) Microsoft Corporation.\n\nPermission to use, copy, modify, and/or distribute this software for any\npurpose with or without fee is hereby granted.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\nPERFORMANCE OF THIS SOFTWARE.\n***************************************************************************** */\n/* global Reflect, Promise, SuppressedError, Symbol, Iterator */\n\nvar extendStatics = function(d, b) {\n extendStatics = Object.setPrototypeOf ||\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\n return extendStatics(d, b);\n};\n\nexport function __extends(d, b) {\n if (typeof b !== \"function\" && b !== null)\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\n extendStatics(d, b);\n function __() { this.constructor = d; }\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\n}\n\nexport var __assign = function() {\n __assign = Object.assign || function __assign(t) {\n for (var s, i = 1, n = arguments.length; i < n; i++) {\n s = arguments[i];\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\n }\n return t;\n }\n return __assign.apply(this, arguments);\n}\n\nexport function __rest(s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n}\n\nexport function __decorate(decorators, target, key, desc) {\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\n return c > 3 && r && Object.defineProperty(target, key, r), r;\n}\n\nexport function __param(paramIndex, decorator) {\n return function (target, key) { decorator(target, key, paramIndex); }\n}\n\nexport function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {\n function accept(f) { if (f !== void 0 && typeof f !== \"function\") throw new TypeError(\"Function expected\"); return f; }\n var kind = contextIn.kind, key = kind === \"getter\" ? \"get\" : kind === \"setter\" ? \"set\" : \"value\";\n var target = !descriptorIn && ctor ? contextIn[\"static\"] ? ctor : ctor.prototype : null;\n var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});\n var _, done = false;\n for (var i = decorators.length - 1; i >= 0; i--) {\n var context = {};\n for (var p in contextIn) context[p] = p === \"access\" ? {} : contextIn[p];\n for (var p in contextIn.access) context.access[p] = contextIn.access[p];\n context.addInitializer = function (f) { if (done) throw new TypeError(\"Cannot add initializers after decoration has completed\"); extraInitializers.push(accept(f || null)); };\n var result = (0, decorators[i])(kind === \"accessor\" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);\n if (kind === \"accessor\") {\n if (result === void 0) continue;\n if (result === null || typeof result !== \"object\") throw new TypeError(\"Object expected\");\n if (_ = accept(result.get)) descriptor.get = _;\n if (_ = accept(result.set)) descriptor.set = _;\n if (_ = accept(result.init)) initializers.unshift(_);\n }\n else if (_ = accept(result)) {\n if (kind === \"field\") initializers.unshift(_);\n else descriptor[key] = _;\n }\n }\n if (target) Object.defineProperty(target, contextIn.name, descriptor);\n done = true;\n};\n\nexport function __runInitializers(thisArg, initializers, value) {\n var useValue = arguments.length > 2;\n for (var i = 0; i < initializers.length; i++) {\n value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);\n }\n return useValue ? value : void 0;\n};\n\nexport function __propKey(x) {\n return typeof x === \"symbol\" ? x : \"\".concat(x);\n};\n\nexport function __setFunctionName(f, name, prefix) {\n if (typeof name === \"symbol\") name = name.description ? \"[\".concat(name.description, \"]\") : \"\";\n return Object.defineProperty(f, \"name\", { configurable: true, value: prefix ? \"\".concat(prefix, \" \", name) : name });\n};\n\nexport function __metadata(metadataKey, metadataValue) {\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\n}\n\nexport function __awaiter(thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n}\n\nexport function __generator(thisArg, body) {\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === \"function\" ? Iterator : Object).prototype);\n return g.next = verb(0), g[\"throw\"] = verb(1), g[\"return\"] = verb(2), typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\n function verb(n) { return function (v) { return step([n, v]); }; }\n function step(op) {\n if (f) throw new TypeError(\"Generator is already executing.\");\n while (g && (g = 0, op[0] && (_ = 0)), _) try {\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\n if (y = 0, t) op = [op[0] & 2, t.value];\n switch (op[0]) {\n case 0: case 1: t = op; break;\n case 4: _.label++; return { value: op[1], done: false };\n case 5: _.label++; y = op[1]; op = [0]; continue;\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\n default:\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\n if (t[2]) _.ops.pop();\n _.trys.pop(); continue;\n }\n op = body.call(thisArg, _);\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\n }\n}\n\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n});\n\nexport function __exportStar(m, o) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\n}\n\nexport function __values(o) {\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\n if (m) return m.call(o);\n if (o && typeof o.length === \"number\") return {\n next: function () {\n if (o && i >= o.length) o = void 0;\n return { value: o && o[i++], done: !o };\n }\n };\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\n}\n\nexport function __read(o, n) {\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\n if (!m) return o;\n var i = m.call(o), r, ar = [], e;\n try {\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\n }\n catch (error) { e = { error: error }; }\n finally {\n try {\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\n }\n finally { if (e) throw e.error; }\n }\n return ar;\n}\n\n/** @deprecated */\nexport function __spread() {\n for (var ar = [], i = 0; i < arguments.length; i++)\n ar = ar.concat(__read(arguments[i]));\n return ar;\n}\n\n/** @deprecated */\nexport function __spreadArrays() {\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\n r[k] = a[j];\n return r;\n}\n\nexport function __spreadArray(to, from, pack) {\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\n if (ar || !(i in from)) {\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\n ar[i] = from[i];\n }\n }\n return to.concat(ar || Array.prototype.slice.call(from));\n}\n\nexport function __await(v) {\n return this instanceof __await ? (this.v = v, this) : new __await(v);\n}\n\nexport function __asyncGenerator(thisArg, _arguments, generator) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\n return i = Object.create((typeof AsyncIterator === \"function\" ? AsyncIterator : Object).prototype), verb(\"next\"), verb(\"throw\"), verb(\"return\", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;\n function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }\n function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\n function fulfill(value) { resume(\"next\", value); }\n function reject(value) { resume(\"throw\", value); }\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\n}\n\nexport function __asyncDelegator(o) {\n var i, p;\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; }\n}\n\nexport function __asyncValues(o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n}\n\nexport function __makeTemplateObject(cooked, raw) {\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\n return cooked;\n};\n\nvar __setModuleDefault = Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n};\n\nexport function __importStar(mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n}\n\nexport function __importDefault(mod) {\n return (mod && mod.__esModule) ? mod : { default: mod };\n}\n\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n}\n\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n}\n\nexport function __classPrivateFieldIn(state, receiver) {\n if (receiver === null || (typeof receiver !== \"object\" && typeof receiver !== \"function\")) throw new TypeError(\"Cannot use 'in' operator on non-object\");\n return typeof state === \"function\" ? receiver === state : state.has(receiver);\n}\n\nexport function __addDisposableResource(env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n}\n\nvar _SuppressedError = typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n};\n\nexport function __disposeResources(env) {\n function fail(e) {\n env.error = env.hasError ? new _SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n}\n\nexport default {\n __extends,\n __assign,\n __rest,\n __decorate,\n __param,\n __metadata,\n __awaiter,\n __generator,\n __createBinding,\n __exportStar,\n __values,\n __read,\n __spread,\n __spreadArrays,\n __spreadArray,\n __await,\n __asyncGenerator,\n __asyncDelegator,\n __asyncValues,\n __makeTemplateObject,\n __importStar,\n __importDefault,\n __classPrivateFieldGet,\n __classPrivateFieldSet,\n __classPrivateFieldIn,\n __addDisposableResource,\n __disposeResources,\n};\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n */\nexport class Subscription implements SubscriptionLike {\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param value The `next` value.\n */\n next(value: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param err The `error` exception.\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as ((value: T) => void) | undefined,\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent.\n * @param subscriber The stopped subscriber.\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @param subscribe The function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @param subscribe the subscriber function to be passed to the Observable constructor\n * @return A new observable.\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @param operator the operator defining the operation to take on the observable\n * @return A new observable with the Operator applied.\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param observerOrNext Either an {@link Observer} with some or all callback methods,\n * or the `next` handler that is called for each value emitted from the subscribed Observable.\n * @param error A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param complete A handler for a terminal event resulting from successful completion.\n * @return A subscription reference to the registered handlers.\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next A handler for each value emitted by the observable.\n * @return A promise that either resolves on observable completion or\n * rejects with the handled error.\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @return This instance of the observable.\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n *\n * @return The Observable result of all the operators having been called\n * in the order they were passed in.\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return Observable that this Subject casts to.\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { Subject } from './Subject';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\n\n/**\n * A variant of Subject that requires an initial value and emits its current\n * value whenever it is subscribed to.\n */\nexport class BehaviorSubject extends Subject {\n constructor(private _value: T) {\n super();\n }\n\n get value(): T {\n return this.getValue();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n const subscription = super._subscribe(subscriber);\n !subscription.closed && subscriber.next(this._value);\n return subscription;\n }\n\n getValue(): T {\n const { hasError, thrownError, _value } = this;\n if (hasError) {\n throw thrownError;\n }\n this._throwIfClosed();\n return _value;\n }\n\n next(value: T): void {\n super.next((this._value = value));\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param _bufferSize The size of the buffer to replay on subscription\n * @param _windowTime The amount of time the buffered items will stay buffered\n * @param _timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param state Some contextual data that the `work` function uses when called by the\n * Scheduler.\n * @param delay Time to wait before executing the work, where the time unit is implicit\n * and defined by the Scheduler.\n * @return A subscription in order to be able to unsubscribe the scheduled work.\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param work A function representing a task, or some unit of work to be\n * executed by the Scheduler.\n * @param delay Time to wait before executing the work, where the time unit is\n * implicit and defined by the Scheduler itself.\n * @param state Some contextual data that the `work` function uses when called\n * by the Scheduler.\n * @return A subscription in order to be able to unsubscribe the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { Subscription } from '../Subscription';\nimport { QueueScheduler } from './QueueScheduler';\nimport { SchedulerAction } from '../types';\nimport { TimerHandle } from './timerHandle';\n\nexport class QueueAction extends AsyncAction {\n constructor(protected scheduler: QueueScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (delay > 0) {\n return super.schedule(state, delay);\n }\n this.delay = delay;\n this.state = state;\n this.scheduler.flush(this);\n return this;\n }\n\n public execute(state: T, delay: number): any {\n return delay > 0 || this.closed ? super.execute(state, delay) : this._execute(state, delay);\n }\n\n protected requestAsyncId(scheduler: QueueScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n\n if ((delay != null && delay > 0) || (delay == null && this.delay > 0)) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n\n // Otherwise flush the scheduler starting with this action.\n scheduler.flush(this);\n\n // HACK: In the past, this was returning `void`. However, `void` isn't a valid\n // `TimerHandle`, and generally the return value here isn't really used. So the\n // compromise is to return `0` which is both \"falsy\" and a valid `TimerHandle`,\n // as opposed to refactoring every other instanceo of `requestAsyncId`.\n return 0;\n }\n}\n", "import { AsyncScheduler } from './AsyncScheduler';\n\nexport class QueueScheduler extends AsyncScheduler {\n}\n", "import { QueueAction } from './QueueAction';\nimport { QueueScheduler } from './QueueScheduler';\n\n/**\n *\n * Queue Scheduler\n *\n * Put every next task on a queue, instead of executing it immediately\n *\n * `queue` scheduler, when used with delay, behaves the same as {@link asyncScheduler} scheduler.\n *\n * When used without delay, it schedules given task synchronously - executes it right when\n * it is scheduled. However when called recursively, that is when inside the scheduled task,\n * another task is scheduled with queue scheduler, instead of executing immediately as well,\n * that task will be put on a queue and wait for current one to finish.\n *\n * This means that when you execute task with `queue` scheduler, you are sure it will end\n * before any other task scheduled with that scheduler will start.\n *\n * ## Examples\n * Schedule recursively first, then do something\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(() => {\n * queueScheduler.schedule(() => console.log('second')); // will not happen now, but will be put on a queue\n *\n * console.log('first');\n * });\n *\n * // Logs:\n * // \"first\"\n * // \"second\"\n * ```\n *\n * Reschedule itself recursively\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(function(state) {\n * if (state !== 0) {\n * console.log('before', state);\n * this.schedule(state - 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * console.log('after', state);\n * }\n * }, 0, 3);\n *\n * // In scheduler that runs recursively, you would expect:\n * // \"before\", 3\n * // \"before\", 2\n * // \"before\", 1\n * // \"after\", 1\n * // \"after\", 2\n * // \"after\", 3\n *\n * // But with queue it logs:\n * // \"before\", 3\n * // \"after\", 3\n * // \"before\", 2\n * // \"after\", 2\n * // \"before\", 1\n * // \"after\", 1\n * ```\n */\n\nexport const queueScheduler = new QueueScheduler(QueueAction);\n\n/**\n * @deprecated Renamed to {@link queueScheduler}. Will be removed in v8.\n */\nexport const queue = queueScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && id === scheduler._scheduled && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n let flushId;\n if (action) {\n flushId = action.id;\n } else {\n flushId = this._scheduled;\n this._scheduled = undefined;\n }\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an