diff --git a/app/demo/index.html b/app/demo/index.html
index f6fc1bf..2de17a3 100644
--- a/app/demo/index.html
+++ b/app/demo/index.html
@@ -167,6 +167,9 @@
}
.card {
flex: 0 0 260px;
+ /* Override flex-item default min-width: auto so long content in
+ .card-path can't force the card wider than its flex-basis. */
+ min-width: 0;
scroll-snap-align: start;
border: 1px solid var(--hair);
background: var(--panel);
@@ -261,6 +264,8 @@
font-size: 11px;
color: var(--muted);
margin-bottom: 8px;
+ /* Long sklearn paths would otherwise overflow and force the card wider. */
+ word-break: break-all;
}
.card-desc {
font-size: 13px;
@@ -398,24 +403,30 @@ function normalize(points) {
let mx = 0, my = 0, mz = 0;
for (const p of points) { mx += p[0]; my += p[1]; mz += p[2]; }
mx /= n; my /= n; mz /= n;
- let maxAbs = 1e-9;
- for (const p of points) {
+
+ // Percentile-based scale: take the 95th percentile of per-point
+ // max-coord magnitude. Robust to outliers in the distribution tails
+ // (e.g. gaussian_quantiles / classification have long radial tails
+ // that, under pure max-abs normalization, shrink the visible bulk
+ // to a fraction of the viewport). p95 keeps datasets visually uniform.
+ const perPoint = new Float64Array(n);
+ for (let i = 0; i < n; i++) {
+ const p = points[i];
const a = Math.abs(p[0] - mx);
const b = Math.abs(p[1] - my);
const c = Math.abs(p[2] - mz);
- if (a > maxAbs) maxAbs = a;
- if (b > maxAbs) maxAbs = b;
- if (c > maxAbs) maxAbs = c;
+ perPoint[i] = a > b ? (a > c ? a : c) : (b > c ? b : c);
}
+ const sorted = Array.from(perPoint).sort((a, b) => a - b);
+ const scale = Math.max(sorted[Math.floor(n * 0.95)], 1e-9);
+
const out = new Float32Array(n * 3);
for (let i = 0; i < n; i++) {
- out[i*3] = (points[i][0] - mx) / maxAbs;
- out[i*3+1] = (points[i][1] - my) / maxAbs;
- out[i*3+2] = (points[i][2] - mz) / maxAbs;
+ out[i*3] = (points[i][0] - mx) / scale;
+ out[i*3+1] = (points[i][1] - my) / scale;
+ out[i*3+2] = (points[i][2] - mz) / scale;
}
- // scaleFactor converts raw-unit jitter to normalized-unit jitter
- // so a slider value in raw coords lands correctly after normalize().
- return { positions: out, scaleFactor: 1 / maxAbs };
+ return { positions: out };
}
@@ -440,7 +451,7 @@ function buildColors(labels, kind) {
}
function createScene(container, dataset) {
- const { positions: basePositions, scaleFactor } = normalize(dataset.points);
+ const { positions: basePositions } = normalize(dataset.points);
const colors = buildColors(dataset.labels, dataset.kind);
// mutable copy — the render loop writes jittered positions here.
@@ -482,7 +493,6 @@ function createScene(container, dataset) {
return {
scene, camera, renderer, controls, container, geometry,
basePositions,
- scaleFactor,
// trajectory state is populated by buildTrajectories() once at init
trajectories: null,
numFrames: 0,
@@ -543,6 +553,17 @@ async function main() {
const order = Object.entries(data);
let selectedId = null;
+ // A single ResizeObserver keeps every viz canvas sized to its real,
+ // post-layout container width. Handles initial layout, scroll-container
+ // late measurements, and window resize uniformly.
+ const vizToScene = new WeakMap();
+ const sizeObserver = new ResizeObserver((entries) => {
+ for (const entry of entries) {
+ const s = vizToScene.get(entry.target);
+ if (s) sizeScene(s);
+ }
+ });
+
order.forEach(([id, ds], i) => {
const card = document.createElement('div');
card.className = 'card';
@@ -566,10 +587,13 @@ async function main() {
const viz = card.querySelector('.viz');
const s = createScene(viz, ds);
- sizeScene(s);
// Precompute at the max frame count; toggling only changes cycle length.
buildTrajectories(s, MAX_FRAMES);
scenes.push(s);
+ // ResizeObserver fires once on observe with the real laid-out width,
+ // which is what we want — skips the forEach-time measurement race.
+ vizToScene.set(viz, s);
+ sizeObserver.observe(viz);
// Stop auto-rotate once the user interacts.
s.controls.addEventListener('start', () => { s.controls.autoRotate = false; });
@@ -675,12 +699,8 @@ async function main() {
alert(`Would continue with generator:\n${ds.path}\n\n(demo — no flow dispatched yet)`);
});
- // Resize handling.
- let resizeTimer = null;
- window.addEventListener('resize', () => {
- clearTimeout(resizeTimer);
- resizeTimer = setTimeout(() => scenes.forEach(sizeScene), 80);
- });
+ // Resize is handled by the ResizeObserver above — it fires per-card on
+ // any width change (including the initial layout settle).
// Render loop. Each scene walks through its cumulative trajectory,
// interpolating between consecutive frames; wraps from frame N-1 back to 0
diff --git a/app/demo/main.py b/app/demo/main.py
index d6b6d94..ad3d763 100644
--- a/app/demo/main.py
+++ b/app/demo/main.py
@@ -3,23 +3,48 @@ from pathlib import Path
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
-from sklearn.datasets import make_blobs, make_s_curve, make_swiss_roll
+from sklearn.datasets import (
+ make_blobs,
+ make_classification,
+ make_gaussian_quantiles,
+ make_s_curve,
+ make_swiss_roll,
+)
app = FastAPI()
HERE = Path(__file__).parent
+N = 5000
+SEED = 0
+
@lru_cache(maxsize=1)
def _datasets():
- s, sl = make_s_curve(n_samples=5000, noise=0.03, random_state=0)
- sr, srl = make_swiss_roll(n_samples=5000, noise=0.15, random_state=0)
+ s, sl = make_s_curve(n_samples=N, noise=0.03, random_state=SEED)
+ sr, srl = make_swiss_roll(n_samples=N, noise=0.15, random_state=SEED)
+ srh, srhl = make_swiss_roll(n_samples=N, noise=0.15, hole=True, random_state=SEED)
b, bl = make_blobs(
- n_samples=5000, n_features=3, centers=5, cluster_std=1.0, random_state=0
+ n_samples=N, n_features=3, centers=5, cluster_std=1.0, random_state=SEED
+ )
+ gq, gql = make_gaussian_quantiles(
+ n_samples=N, n_features=3, n_classes=4, random_state=SEED
+ )
+ cls, clsl = make_classification(
+ n_samples=N,
+ n_features=3,
+ n_informative=3,
+ n_redundant=0,
+ n_repeated=0,
+ n_classes=4,
+ n_clusters_per_class=2,
+ class_sep=1.5,
+ random_state=SEED,
)
return {
"s_curve": {
"name": "S-Curve",
"path": "sklearn.datasets.make_s_curve",
+ "kwargs": {},
"description": (
"A 2-D manifold warped into R³. Continuous label encodes position "
"along the curve — a good test of whether a reducer unrolls the "
@@ -32,6 +57,7 @@ def _datasets():
"swiss_roll": {
"name": "Swiss Roll",
"path": "sklearn.datasets.make_swiss_roll",
+ "kwargs": {},
"description": (
"A rolled-up plane. The canonical hard case for linear methods: "
"PCA collapses the spiral, non-linear methods should recover the "
@@ -41,9 +67,23 @@ def _datasets():
"points": sr.tolist(),
"labels": srl.tolist(),
},
+ "swiss_roll_hole": {
+ "name": "Swiss Roll (hole)",
+ "path": "sklearn.datasets.make_swiss_roll",
+ "kwargs": {"hole": True},
+ "description": (
+ "Swiss roll with a rectangular hole punched through. Same manifold, "
+ "non-trivial topology — a faithful unroll should preserve the hole "
+ "rather than smearing it closed."
+ ),
+ "kind": "continuous",
+ "points": srh.tolist(),
+ "labels": srhl.tolist(),
+ },
"blobs": {
"name": "Gaussian Blobs",
"path": "sklearn.datasets.make_blobs",
+ "kwargs": {"centers": 5, "cluster_std": 1.0},
"description": (
"Five isotropic Gaussian clusters in R³. Discrete class labels. "
"Tests whether a reducer preserves cluster separation when "
@@ -53,6 +93,39 @@ def _datasets():
"points": b.tolist(),
"labels": bl.tolist(),
},
+ "gaussian_quantiles": {
+ "name": "Gaussian Quantiles",
+ "path": "sklearn.datasets.make_gaussian_quantiles",
+ "kwargs": {"n_classes": 4},
+ "description": (
+ "Concentric Gaussian shells in R³; class = which shell. Classes "
+ "are linearly inseparable by construction — PCA collapses them, "
+ "kernel and manifold methods have a chance."
+ ),
+ "kind": "categorical",
+ "points": gq.tolist(),
+ "labels": gql.tolist(),
+ },
+ "classification": {
+ "name": "Hypercube Clusters",
+ "path": "sklearn.datasets.make_classification",
+ "kwargs": {
+ "n_informative": 3,
+ "n_redundant": 0,
+ "n_repeated": 0,
+ "n_classes": 4,
+ "n_clusters_per_class": 2,
+ "class_sep": 1.5,
+ },
+ "description": (
+ "Four classes, two sub-clusters each, placed at hypercube vertices "
+ "with informative noise. A denser discrete test than blobs — "
+ "within-class bimodality stresses cluster-preserving reducers."
+ ),
+ "kind": "categorical",
+ "points": cls.tolist(),
+ "labels": clsl.tolist(),
+ },
}