|
| 1 | +#!/usr/bin/env python3 |
| 2 | +# pac_lab.py |
| 3 | +# A compact lab to test PAC residuals, global balance projection, and emergent lattice reconfiguration. |
| 4 | + |
| 5 | +from __future__ import annotations |
| 6 | +import argparse, dataclasses, json, math, os, sys, time |
| 7 | +from typing import Dict, List, Tuple |
| 8 | +import numpy as np |
| 9 | + |
| 10 | +# Optional SciPy for fast Poisson solve (lattice mode). Fallback to Jacobi if unavailable. |
| 11 | +try: |
| 12 | + from scipy.fft import fft2, ifft2 |
| 13 | + SCIPY_OK = True |
| 14 | +except Exception: |
| 15 | + SCIPY_OK = False |
| 16 | + |
| 17 | + |
| 18 | +# --------------------------- |
| 19 | +# Utils |
| 20 | +# --------------------------- |
| 21 | + |
| 22 | +def mk_run_dir(tag: str = "") -> str: |
| 23 | + ts = time.strftime("%Y%m%d_%H%M%S") |
| 24 | + name = f"runs/{ts}{('_' + tag) if tag else ''}" |
| 25 | + os.makedirs(name, exist_ok=True) |
| 26 | + return name |
| 27 | + |
| 28 | +def seed_all(seed: int = 42): |
| 29 | + np.random.seed(seed) |
| 30 | + |
| 31 | +def save_csv(path: str, header: List[str], rows: List[List[float]]): |
| 32 | + with open(path, "w") as f: |
| 33 | + f.write(",".join(header) + "\n") |
| 34 | + for r in rows: |
| 35 | + f.write(",".join(str(x) for x in r) + "\n") |
| 36 | + |
| 37 | +def pretty(obj) -> str: |
| 38 | + return json.dumps(obj, indent=2, sort_keys=True) |
| 39 | + |
| 40 | + |
| 41 | +# --------------------------- |
| 42 | +# PART A: Graph PAC (discrete) |
| 43 | +# --------------------------- |
| 44 | + |
| 45 | +@dataclasses.dataclass |
| 46 | +class GraphPAC: |
| 47 | + """ |
| 48 | + Discrete PAC on a directed acyclic graph (DAG) or general directed graph. |
| 49 | + Nodes are 0..N-1. |
| 50 | + f: node values (information/energy payloads) |
| 51 | + edges: list of (parent, child) |
| 52 | + alphas: ownership weights for each (parent, child), in [0,1] |
| 53 | + """ |
| 54 | + f: np.ndarray # shape (N,) |
| 55 | + edges: List[Tuple[int, int]] # (p, c) |
| 56 | + alphas: Dict[Tuple[int, int], float] |
| 57 | + |
| 58 | + def N(self) -> int: |
| 59 | + return self.f.shape[0] |
| 60 | + |
| 61 | + def build_A(self) -> np.ndarray: |
| 62 | + """A v = residuals, where A[v,v]=1 and A[v,u]=-alpha_{v->u} for u in D(v).""" |
| 63 | + N = self.N() |
| 64 | + A = np.eye(N, dtype=float) |
| 65 | + # For each parent, subtract weighted children |
| 66 | + for (p, c) in self.edges: |
| 67 | + A[p, c] -= self.alphas[(p, c)] |
| 68 | + return A |
| 69 | + |
| 70 | + def residuals(self, f: np.ndarray | None = None) -> np.ndarray: |
| 71 | + """r(v) = f(v) - sum_{u in D(v)} alpha_{v->u} f(u)""" |
| 72 | + if f is None: f = self.f |
| 73 | + N = f.shape[0] |
| 74 | + r = np.copy(f) |
| 75 | + for (p, c) in self.edges: |
| 76 | + r[p] -= self.alphas[(p, c)] * f[c] |
| 77 | + return r |
| 78 | + |
| 79 | + def project_pinv(self, f: np.ndarray | None = None, rtol: float = 1e-12) -> Tuple[np.ndarray, float]: |
| 80 | + """ |
| 81 | + Minimal-norm global correction using Moore–Penrose pseudoinverse. |
| 82 | + Returns (f_corrected, post_residual_norm). |
| 83 | + """ |
| 84 | + if f is None: f = self.f |
| 85 | + A = self.build_A() |
| 86 | + r = A @ f |
| 87 | + pinv = np.linalg.pinv(A, rcond=rtol) |
| 88 | + delta = - pinv @ r |
| 89 | + f_corr = f + delta |
| 90 | + post = np.linalg.norm(A @ f_corr) |
| 91 | + return f_corr, float(post) |
| 92 | + |
| 93 | + def project_gauss_seidel(self, iters: int = 200, tol: float = 1e-9) -> Tuple[np.ndarray, List[float]]: |
| 94 | + """ |
| 95 | + Iterative local projection: for each parent p, rescale its children so that |
| 96 | + f(p) = sum alpha_{p->u} f(u). Works best if children do not overlap too much. |
| 97 | + Returns (f_corrected, residual_norm_trace). |
| 98 | + """ |
| 99 | + f = self.f.copy() |
| 100 | + N = self.N() |
| 101 | + # Precompute children list per parent |
| 102 | + children: Dict[int, List[int]] = {i: [] for i in range(N)} |
| 103 | + for (p, c) in self.edges: |
| 104 | + children[p].append(c) |
| 105 | + |
| 106 | + trace = [] |
| 107 | + A = self.build_A() |
| 108 | + for _ in range(iters): |
| 109 | + # sweep |
| 110 | + for p in range(N): |
| 111 | + if not children[p]: # leaf |
| 112 | + continue |
| 113 | + denom = sum(self.alphas[(p, c)] for c in children[p]) |
| 114 | + if denom == 0.0: |
| 115 | + continue |
| 116 | + target = f[p] |
| 117 | + weighted_sum = sum(self.alphas[(p, c)] * f[c] for c in children[p]) |
| 118 | + if weighted_sum == 0.0: |
| 119 | + # distribute proportionally to alphas |
| 120 | + base = target / (denom + 1e-12) |
| 121 | + for c in children[p]: |
| 122 | + f[c] = base # scaled by alpha inside the equation next loop |
| 123 | + else: |
| 124 | + ratio = target / (weighted_sum + 1e-12) |
| 125 | + for c in children[p]: |
| 126 | + f[c] *= ratio # rescale children to meet parent sum |
| 127 | + rn = float(np.linalg.norm(A @ f)) |
| 128 | + trace.append(rn) |
| 129 | + if rn < tol: |
| 130 | + break |
| 131 | + return f, trace |
| 132 | + |
| 133 | + def run_perturb_resolve(self, node: int, eps: float = 1.0, mode: str = "pinv") -> Dict: |
| 134 | + """ |
| 135 | + Perturb one node by eps, then re-project to conservation using chosen mode. |
| 136 | + Returns diagnostics dict. |
| 137 | + """ |
| 138 | + f0 = self.f.copy() |
| 139 | + self.f[node] += eps |
| 140 | + r_before = self.residuals() |
| 141 | + if mode == "pinv": |
| 142 | + f_corr, post = self.project_pinv() |
| 143 | + traj = None |
| 144 | + else: |
| 145 | + f_corr, traj = self.project_gauss_seidel() |
| 146 | + post = float(np.linalg.norm(self.build_A() @ f_corr)) |
| 147 | + |
| 148 | + r_after = self.residuals(f_corr) |
| 149 | + out = dict( |
| 150 | + mode=mode, |
| 151 | + node=node, |
| 152 | + eps=float(eps), |
| 153 | + pre_residual=float(np.linalg.norm(r_before)), |
| 154 | + post_residual=float(np.linalg.norm(r_after)), |
| 155 | + traj=traj, |
| 156 | + f0=f0.tolist(), |
| 157 | + f_corr=f_corr.tolist() |
| 158 | + ) |
| 159 | + # restore original f |
| 160 | + self.f = f0 |
| 161 | + return out |
| 162 | + |
| 163 | + |
| 164 | +def demo_graph_small(seed: int = 7) -> GraphPAC: |
| 165 | + """ |
| 166 | + Build a small graph with one shared child to test weighted ownership. |
| 167 | + P(0) -> A(1), B(2) |
| 168 | + A(1) -> X(3), A2(4) |
| 169 | + B(2) -> X(3), B2(5), B3(6) |
| 170 | + Ownership: alpha_{A->X}=0.6, alpha_{B->X}=0.4, others = 1.0 to unique children normalized per parent. |
| 171 | + """ |
| 172 | + rng = np.random.default_rng(seed) |
| 173 | + N = 7 |
| 174 | + f = rng.uniform(10, 50, size=N).astype(float) |
| 175 | + edges = [(0,1),(0,2),(1,3),(1,4),(2,3),(2,5),(2,6)] |
| 176 | + |
| 177 | + # Set alphas so for each parent p, sum alpha_{p->u} over its children is 1.0 |
| 178 | + alphas = {} |
| 179 | + # P -> A, B |
| 180 | + alphas[(0,1)] = 0.5 |
| 181 | + alphas[(0,2)] = 0.5 |
| 182 | + # A -> X, A2 |
| 183 | + alphas[(1,3)] = 0.6 |
| 184 | + alphas[(1,4)] = 0.4 |
| 185 | + # B -> X, B2, B3 (normalize to 1.0) |
| 186 | + raw = [0.4, 0.35, 0.25] |
| 187 | + s = sum(raw) |
| 188 | + alphas[(2,3)] = raw[0]/s |
| 189 | + alphas[(2,5)] = raw[1]/s |
| 190 | + alphas[(2,6)] = raw[2]/s |
| 191 | + |
| 192 | + return GraphPAC(f=f, edges=edges, alphas=alphas) |
| 193 | + |
| 194 | + |
| 195 | +# --------------------------- |
| 196 | +# PART B: Lattice “emergent gravity” demo |
| 197 | +# --------------------------- |
| 198 | + |
| 199 | +@dataclasses.dataclass |
| 200 | +class LatticeModel: |
| 201 | + """ |
| 202 | + 2D periodic lattice of a scalar field f. |
| 203 | + Local residual r = f - alpha * (sum of 4 neighbors). |
| 204 | + Solve Poisson: Laplacian(psi) = -r, then update f by f += eta * psi + noise. |
| 205 | + """ |
| 206 | + N: int = 128 |
| 207 | + alpha: float = 0.25 |
| 208 | + eta: float = 0.2 |
| 209 | + noise: float = 0.005 |
| 210 | + seed: int = 1 |
| 211 | + use_fft: bool = True |
| 212 | + |
| 213 | + def __post_init__(self): |
| 214 | + seed_all(self.seed) |
| 215 | + self.f = np.random.normal(loc=0.0, scale=1.0, size=(self.N, self.N)) |
| 216 | + if SCIPY_OK and self.use_fft: |
| 217 | + kx = np.fft.fftfreq(self.N) * 2 * np.pi |
| 218 | + ky = np.fft.fftfreq(self.N) * 2 * np.pi |
| 219 | + self.KX, self.KY = np.meshgrid(kx, ky, indexing='ij') |
| 220 | + self.lap_symbol = -(self.KX**2 + self.KY**2) |
| 221 | + self.lap_symbol[0,0] = 1.0 # avoid division by 0 (we'll zero mean) |
| 222 | + |
| 223 | + def residual(self) -> np.ndarray: |
| 224 | + up = np.roll(self.f, -1, axis=0) |
| 225 | + down = np.roll(self.f, 1, axis=0) |
| 226 | + left = np.roll(self.f, -1, axis=1) |
| 227 | + right = np.roll(self.f, 1, axis=1) |
| 228 | + child = self.alpha * (up + down + left + right) |
| 229 | + return self.f - child |
| 230 | + |
| 231 | + def solve_poisson(self, rhs: np.ndarray) -> np.ndarray: |
| 232 | + # Laplace psi = rhs, periodic BCs |
| 233 | + if SCIPY_OK and self.use_fft: |
| 234 | + rhs_hat = fft2(rhs) |
| 235 | + psi_hat = rhs_hat.copy() |
| 236 | + psi_hat[0,0] = 0.0 |
| 237 | + psi_hat /= self.lap_symbol |
| 238 | + psi = np.real(ifft2(psi_hat)) |
| 239 | + return psi |
| 240 | + # Jacobi fallback (slower) |
| 241 | + psi = np.zeros_like(rhs) |
| 242 | + for _ in range(200): |
| 243 | + psi = 0.25 * (np.roll(psi,1,0) + np.roll(psi,-1,0) + |
| 244 | + np.roll(psi,1,1) + np.roll(psi,-1,1) - rhs) |
| 245 | + return psi |
| 246 | + |
| 247 | + @staticmethod |
| 248 | + def entropy_proxy(field: np.ndarray, bins: int = 100) -> float: |
| 249 | + hist, _ = np.histogram(field.flatten(), bins=bins, density=True) |
| 250 | + hist = hist + 1e-12 |
| 251 | + return float(-np.sum(hist * np.log(hist)) * (1.0 / bins)) |
| 252 | + |
| 253 | + def step(self) -> Dict[str, float]: |
| 254 | + r = self.residual() |
| 255 | + psi = self.solve_poisson(-r) |
| 256 | + self.f += self.eta * psi |
| 257 | + if self.noise > 0: |
| 258 | + self.f += self.noise * np.random.normal(size=self.f.shape) |
| 259 | + # diagnostics |
| 260 | + ent = self.entropy_proxy(self.f) |
| 261 | + rn = float(np.linalg.norm(r)) |
| 262 | + thr = np.mean(self.f) + 1.0 * np.std(self.f) |
| 263 | + cluster_frac = float(np.mean(self.f > thr)) |
| 264 | + return dict(residual_norm=rn, entropy=ent, cluster_frac=cluster_frac) |
| 265 | + |
| 266 | + def run(self, steps: int = 200) -> List[Dict[str, float]]: |
| 267 | + logs = [] |
| 268 | + for t in range(steps): |
| 269 | + d = self.step() |
| 270 | + d["t"] = t |
| 271 | + logs.append(d) |
| 272 | + if t % 20 == 0: |
| 273 | + print(f"[t={t:03d}] residual={d['residual_norm']:.4e} entropy={d['entropy']:.4f} clusters={d['cluster_frac']:.4f}") |
| 274 | + return logs |
| 275 | + |
| 276 | + |
| 277 | +# --------------------------- |
| 278 | +# CLI Entrypoints |
| 279 | +# --------------------------- |
| 280 | + |
| 281 | +def run_graph(args): |
| 282 | + seed_all(args.seed) |
| 283 | + g = demo_graph_small(seed=args.seed) |
| 284 | + run_dir = mk_run_dir("graph") |
| 285 | + print("Graph initial f:", np.array2string(g.f, precision=3)) |
| 286 | + |
| 287 | + # Baseline residual |
| 288 | + r0 = g.residuals() |
| 289 | + print("Initial residual norm:", np.linalg.norm(r0)) |
| 290 | + |
| 291 | + # Exact projection |
| 292 | + f_corr, post = g.project_pinv() |
| 293 | + print("PINV post-residual:", post) |
| 294 | + |
| 295 | + # Iterative projection |
| 296 | + f_it, traj = g.project_gauss_seidel(iters=args.iters, tol=args.tol) |
| 297 | + print("GS post-residual:", np.linalg.norm(g.build_A() @ f_it)) |
| 298 | + |
| 299 | + # Perturb & resolve demo |
| 300 | + report = g.run_perturb_resolve(node=args.perturb_node, eps=args.eps, mode="pinv") |
| 301 | + print("Perturb/resolve (PINV):", pretty({k: report[k] for k in ("node","eps","pre_residual","post_residual")})) |
| 302 | + |
| 303 | + report2 = g.run_perturb_resolve(node=args.perturb_node, eps=args.eps, mode="gs") |
| 304 | + print("Perturb/resolve (GS):", pretty({k: report2[k] for k in ("node","eps","pre_residual","post_residual")})) |
| 305 | + |
| 306 | + # Save logs |
| 307 | + rows = [] |
| 308 | + for i, r in enumerate(traj): |
| 309 | + rows.append([i, r]) |
| 310 | + save_csv(os.path.join(run_dir, "gs_trace.csv"), ["iter","residual_norm"], rows) |
| 311 | + |
| 312 | + meta = dict( |
| 313 | + seed=args.seed, iters=args.iters, tol=args.tol, |
| 314 | + initial_f=g.f.tolist(), |
| 315 | + pinv_post=post, |
| 316 | + gs_post=float(np.linalg.norm(g.build_A() @ f_it)), |
| 317 | + perturb_pin=report, perturb_gs=report2 |
| 318 | + ) |
| 319 | + with open(os.path.join(run_dir, "meta.json"), "w") as f: |
| 320 | + json.dump(meta, f, indent=2) |
| 321 | + print("Saved outputs to", run_dir) |
| 322 | + |
| 323 | + |
| 324 | +def run_lattice(args): |
| 325 | + model = LatticeModel(N=args.N, alpha=args.alpha, eta=args.eta, |
| 326 | + noise=args.noise, seed=args.seed, use_fft=not args.no_fft) |
| 327 | + run_dir = mk_run_dir("lattice") |
| 328 | + logs = model.run(steps=args.steps) |
| 329 | + save_csv(os.path.join(run_dir, "lattice_metrics.csv"), |
| 330 | + ["t","residual_norm","entropy","cluster_frac"], |
| 331 | + [[d["t"], d["residual_norm"], d["entropy"], d["cluster_frac"]] for d in logs]) |
| 332 | + # Save final field as .npy for quick visualization elsewhere |
| 333 | + np.save(os.path.join(run_dir, "final_field.npy"), model.f) |
| 334 | + with open(os.path.join(run_dir, "config.json"), "w") as f: |
| 335 | + json.dump(dataclasses.asdict(model) | {"steps": args.steps}, f, indent=2) |
| 336 | + print("Saved outputs to", run_dir) |
| 337 | + print("Tip: visualize final_field.npy with matplotlib imshow for clustering.") |
| 338 | + |
| 339 | + |
| 340 | +def main(): |
| 341 | + p = argparse.ArgumentParser(description="PAC Lab: Graph projection & Lattice emergence") |
| 342 | + sub = p.add_subparsers(dest="mode", required=True) |
| 343 | + |
| 344 | + pg = sub.add_parser("graph", help="Discrete PAC on a small graph") |
| 345 | + pg.add_argument("--seed", type=int, default=7) |
| 346 | + pg.add_argument("--iters", type=int, default=400) |
| 347 | + pg.add_argument("--tol", type=float, default=1e-9) |
| 348 | + pg.add_argument("--perturb-node", type=int, default=3, help="Node index to perturb") |
| 349 | + pg.add_argument("--eps", type=float, default=4.0, help="Perturbation amount") |
| 350 | + pg.set_defaults(func=run_graph) |
| 351 | + |
| 352 | + pl = sub.add_parser("lattice", help="Emergent lattice reconfiguration") |
| 353 | + pl.add_argument("--N", type=int, default=128) |
| 354 | + pl.add_argument("--steps", type=int, default=200) |
| 355 | + pl.add_argument("--alpha", type=float, default=0.25, help="Neighbor weight") |
| 356 | + pl.add_argument("--eta", type=float, default=0.2, help="Reconfiguration rate") |
| 357 | + pl.add_argument("--noise", type=float, default=0.005, help="Thermal/noise term") |
| 358 | + pl.add_argument("--seed", type=int, default=1) |
| 359 | + pl.add_argument("--no-fft", action="store_true", help="Disable FFT Poisson solver (use Jacobi)") |
| 360 | + pl.set_defaults(func=run_lattice) |
| 361 | + |
| 362 | + args = p.parse_args() |
| 363 | + os.makedirs("runs", exist_ok=True) |
| 364 | + args.func(args) |
| 365 | + |
| 366 | +if __name__ == "__main__": |
| 367 | + main() |
0 commit comments