rcnn/backend/apps/coaster/physics.py
munsel 42197bfbc9 add coaster naming, challenge detail panel, and fix GeoJSON id bug
- Coaster editor: name input in top bar, saved/loaded with coaster data
- CoasterListPanel: show coaster name prominently alongside creator username
- ChallengesListPanel: drill-in detail view with center map, plan coaster,
  and accept challenge buttons; coaster count shown in list and detail
- AllCoastersPanel: coaster count visible in challenge entries
- Backend: add coaster_count to ChallengeMapSerializer and ChallengeDetailSerializer
- Fix: ChallengeLayer and ChallengesListPanel were reading f.properties.id
  (always undefined) instead of f.id — GeoFeatureModelSerializer puts the pk
  at the GeoJSON Feature level, not in properties
- Types: remove id from ChallengeMapProperties to reflect actual data shape

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-23 01:43:10 +02:00

444 lines
16 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
Physics-Based Roller Coaster Rail Generator
============================================
Refactored from the project root for backend use.
Removed: build123d, ocp_vscode, sample_wire(), __main__ block.
All core mathematics is pure numpy.
Public API
----------
generate_rails(points_m, **kwargs) -> (rail_1_pts, rail_2_pts)
Takes a (n, 3) numpy array of centreline points in metres (local frame)
and returns two (m, 3) numpy arrays of rail points in metres.
"""
import numpy as np
GRAVITY = 9.81 # m/s²
# ── Utilities ──────────────────────────────────────────────────────────────────
def normalize(v):
mag = np.linalg.norm(v)
if mag < 1e-12:
return np.zeros_like(v) if hasattr(v, "__len__") else 0
return v / mag
def arc_length(points):
n = len(points)
s = np.zeros(n)
for i in range(1, n):
s[i] = s[i - 1] + np.linalg.norm(points[i] - points[i - 1])
return s
def _fit_spline_and_sample(pts: np.ndarray, n: int):
"""
Fit a cubic B-spline through *pts* (n_ctrl × 3, in metres) and sample *n*
evenly-spaced points along it.
Returns (positions, arc_lengths, unit_tangents, curvatures, curvature_vectors)
where derivatives are evaluated **analytically** from the spline — not via
finite differences on a piecewise-linear path. This avoids the curvature
spikes that occur at polyline vertices and are the primary source of jitter
in the binormal / rail computation.
The spline is clamped at both ends: the tangent direction at the first and
last sampled points matches the direction of the first and last input
segments respectively.
"""
from scipy.interpolate import make_interp_spline
s_raw = arc_length(pts)
if s_raw[-1] < 1e-9:
raise ValueError("Path has zero length")
# Normalise to [0, 1] and remove any duplicate parameter values.
u_raw = s_raw / s_raw[-1]
mask = np.concatenate([[True], np.diff(u_raw) > 1e-12])
pts_c = pts[mask]
u_c = u_raw[mask]
k_order = min(3, len(pts_c) - 1) # cubic when ≥ 4 pts, else lower
# Clamped boundary conditions: fix dr/du at both endpoints so the spline
# leaves / arrives in the direction of the first / last input segment.
if k_order == 3 and len(pts_c) >= 4:
tang_start = (pts_c[1] - pts_c[0]) / (u_c[1] - u_c[0])
tang_end = (pts_c[-1] - pts_c[-2]) / (u_c[-1] - u_c[-2])
bc_type = ([(1, tang_start)], [(1, tang_end)])
else:
bc_type = None
bsp = make_interp_spline(u_c, pts_c, k=k_order, bc_type=bc_type)
u_new = np.linspace(0, 1, n)
r0 = bsp(u_new) # positions, shape (n, 3)
r1 = bsp(u_new, 1) # dr/du, shape (n, 3)
r2 = bsp(u_new, 2) # d²r/du², shape (n, 3)
s_new = arc_length(r0)
# Unit tangent T = r1 / |r1|
r1_mag = np.linalg.norm(r1, axis=1, keepdims=True)
r1_mag = np.where(r1_mag < 1e-12, 1e-12, r1_mag)
T = r1 / r1_mag
# Curvature vector (w.r.t. arc-length parameter s):
# κ_vec = (r2 (r2 · T) T) / |r1|²
r2_along_T = np.sum(r2 * T, axis=1, keepdims=True)
k_vector = (r2 - r2_along_T * T) / (r1_mag ** 2)
k = np.linalg.norm(k_vector, axis=1)
return r0, s_new, T, k, k_vector
# ── Differential geometry ──────────────────────────────────────────────────────
def compute_geometry(points, s):
"""Return T, k, k_vector, dk along the path."""
n = len(points)
T = np.zeros_like(points)
for i in range(1, n - 1):
ds = s[i + 1] - s[i - 1]
if ds > 1e-12:
T[i] = (points[i + 1] - points[i - 1]) / ds
T[0], T[-1] = T[1], T[-2]
T = np.array([normalize(t) for t in T])
k_vector = np.zeros_like(points)
for i in range(1, n - 1):
ds1 = s[i] - s[i - 1]
ds2 = s[i + 1] - s[i]
if ds1 > 1e-12 and ds2 > 1e-12:
k_vector[i] = (
2 * ((points[i + 1] - points[i]) / ds2 - (points[i] - points[i - 1]) / ds1)
/ (ds1 + ds2)
)
k_vector[0], k_vector[-1] = k_vector[1], k_vector[-2]
k = np.linalg.norm(k_vector, axis=1)
dk = np.gradient(k, s)
return T, k, k_vector, dk
# ── Junction detection ─────────────────────────────────────────────────────────
def detect_junctions(k, dk, threshold_percentile=88.0, min_separation=50):
abs_dk = np.abs(dk)
d2k = np.abs(np.gradient(dk))
severity = abs_dk + 2 * d2k
threshold = np.percentile(severity, threshold_percentile)
junctions = []
for i in range(10, len(severity) - 10):
if severity[i] > threshold and severity[i] > severity[i - 1] and severity[i] > severity[i + 1]:
junctions.append((i, severity[i]))
if junctions:
junctions.sort(key=lambda x: x[1], reverse=True)
merged = [junctions[0][0]]
for j_idx, _ in junctions[1:]:
if all(abs(j_idx - m) > min_separation for m in merged):
merged.append(j_idx)
junctions = sorted(merged)
return junctions
# ── G2-continuous transitions ──────────────────────────────────────────────────
def quintic_hermite_blend(t, p0, v0, a0, p1, v1, a1):
t2, t3, t4, t5 = t**2, t**3, t**4, t**5
h0 = 1 - 10*t3 + 15*t4 - 6*t5
h1 = t - 6*t3 + 8*t4 - 3*t5
h2 = 0.5*t2 - 1.5*t3 + 1.5*t4 - 0.5*t5
h3 = 10*t3 - 15*t4 + 6*t5
h4 = -4*t3 + 7*t4 - 3*t5
h5 = 0.5*t3 - t4 + 0.5*t5
return h0*p0 + h1*v0 + h2*a0 + h3*p1 + h4*v1 + h5*a1
def create_g2_transition(p_start, T_start, k_vector_start, p_end, T_end, k_vector_end, n_points=100):
L = np.linalg.norm(p_end - p_start)
t = np.linspace(0, 1, n_points)
v0, v1 = T_start * L, T_end * L
a0, a1 = k_vector_start * L**2, k_vector_end * L**2
pts = np.zeros((n_points, 3))
for i, ti in enumerate(t):
pts[i] = quintic_hermite_blend(ti, p_start, v0, a0, p_end, v1, a1)
return pts
def replace_junctions_with_g2_transitions(points, s, junctions, window_length=0.025):
if not junctions:
return points
T, k, k_vector, dk = compute_geometry(points, s)
pts_new = points.copy()
windows = []
for jidx in junctions:
hw = window_length / 2
si = np.searchsorted(s, s[jidx] - hw)
ei = np.searchsorted(s, s[jidx] + hw)
si = max(5, si)
ei = min(len(points) - 5, ei)
if ei - si >= 10:
windows.append((jidx, si, ei))
replaced = []
for jidx, si, ei in windows:
if any(not (ei < rs or re < si) for rs, re in replaced):
continue
n_t = ei - si + 1
transition = create_g2_transition(
pts_new[si], T[si], k_vector[si],
pts_new[ei], T[ei], k_vector[ei],
n_points=n_t,
)
pts_new[si:ei + 1] = transition
replaced.append((si, ei))
return pts_new
# ── Physics simulation ─────────────────────────────────────────────────────────
def compute_velocity_profile(points, s, mass, v0, friction_coeff, g_scaled,
acceleration_strips=None):
n = len(points)
s_total = s[-1] if s[-1] > 0 else 1.0
v = np.zeros(n)
v[0] = v0
for i in range(1, n):
d = np.linalg.norm(points[i] - points[i - 1])
dh = points[i - 1][2] - points[i][2]
friction_work = friction_coeff * mass * g_scaled * d
strip_accel = 0.0
if acceleration_strips:
s_frac_i = s[i] / s_total
for strip in acceleration_strips:
if strip['start_frac'] <= s_frac_i <= strip['end_frac']:
strip_accel += strip['accel_ms2']
v2 = v[i - 1]**2 + 2 * g_scaled * dh - 2 * friction_work / mass + 2 * strip_accel * d
v[i] = np.sqrt(max(v2, 0))
return v
def compute_hanging_binormals(points, velocities, T, k, k_vector, g_scaled):
n = len(points)
gravity = np.array([0.0, 0.0, -g_scaled])
B = np.zeros((n, 3))
def _apparent(i):
T_i = normalize(T[i])
if k[i] < 1e-9:
f = gravity
else:
R = 1 / k[i]
N_in = normalize(k_vector[i])
f = gravity + (velocities[i]**2 / R) * (-N_in)
perp = f - np.dot(f, T_i) * T_i
return normalize(perp)
B[0] = _apparent(0)
if B[0][2] > 0:
B[0] = -B[0]
for i in range(1, n):
b = _apparent(i)
if np.dot(b, B[i - 1]) < 0:
b = -b
B[i] = b
return B
def smooth_binormals(B, tangents, iterations=5):
B_s = B.copy()
n = len(B)
for _ in range(iterations):
B_new = B_s.copy()
for i in range(2, n - 2):
avg = (B_s[i - 1] + 2 * B_s[i] + B_s[i + 1]) / 4
T_i = normalize(tangents[i])
avg_perp = avg - np.dot(avg, T_i) * T_i
B_new[i] = normalize(avg_perp)
B_s = B_new
return B_s
# ── Diagnostics ────────────────────────────────────────────────────────────────
def compute_diagnostics(points, velocities, k, B) -> dict:
"""Return a dict of ride-quality metrics (no side effects)."""
g_forces = []
for i in range(len(points)):
if k[i] > 1e-6:
R = 1 / k[i]
if R > 0.002:
g_forces.append(velocities[i]**2 / R / GRAVITY)
stall_idx = np.where(velocities < 0.01)[0]
binormal_changes = np.array([np.linalg.norm(B[i] - B[i - 1]) for i in range(1, len(B))])
down_pct = 100 * np.sum(B[:, 2] < 0) / len(B)
return {
"height_range_m": [float(np.min(points[:, 2])), float(np.max(points[:, 2]))],
"velocity_range_ms": [float(np.min(velocities)), float(np.max(velocities))],
"g_force_range": [float(min(g_forces)), float(max(g_forces))] if g_forces else None,
"stall_at_pct": float(100 * stall_idx[0] / len(points)) if len(stall_idx) else None,
"binormal_variation_max": float(np.max(binormal_changes)),
"binormal_downward_pct": float(down_pct),
}
# ── Profile arrays ────────────────────────────────────────────────────────────
def build_profile_arrays(s, velocities, k, downsample_factor, B, T) -> dict:
"""Return downsampled per-point arrays for frontend charting."""
dvds = np.gradient(velocities, s)
accel = velocities * dvds # dv/dt = v * dv/ds (m/s²)
gf = (velocities ** 2 * k) / GRAVITY
idx = np.arange(0, len(s), downsample_factor)
s_ds = s[idx]
s_frac = (s_ds / s_ds[-1]).tolist() if s_ds[-1] > 0 else s_ds.tolist()
# Total arc length and ride duration (integrate dt = ds/v).
# Floor at 1 m/s so that isolated transient-zero points (coaster just
# barely crests a hill, sqrt(max(v2,0)) == 0 for one step) each
# contribute at most ~2 s instead of ~1400 s to the integral.
total_length_m = float(s[-1])
safe_v = np.maximum(velocities, 1.0)
total_duration_s = float(np.trapz(1.0 / safe_v, s))
# Bank / roll angle: signed angle between apparent-down (B) and true vertical,
# measured around the forward tangent.
# right = T × [0,0,1] (local right when looking forward)
# roll = atan2(dot(B, right), -B_z)
# 0° = flat, +90° = banked right, 90° = banked left.
right = np.cross(T, np.array([0.0, 0.0, 1.0]))
r_norm = np.linalg.norm(right, axis=1, keepdims=True)
r_norm = np.where(r_norm < 1e-9, 1.0, r_norm)
right = right / r_norm
roll_rad = np.arctan2(
np.einsum('ij,ij->i', B, right),
-B[:, 2],
)
roll_deg = np.degrees(roll_rad)
return {
's_frac': s_frac,
'velocity_ms': velocities[idx].tolist(),
'accel_ms2': accel[idx].tolist(),
'g_force': gf[idx].tolist(),
'roll_deg': roll_deg[idx].tolist(),
'total_length_m': total_length_m,
'total_duration_s': total_duration_s,
}
# ── Public API ─────────────────────────────────────────────────────────────────
def generate_rails(
points_m: np.ndarray,
rail_spacing: float = 0.6,
mass: float = 1000.0,
initial_velocity: float = 1.0,
friction_coeff: float = 0.02,
junction_window_length: float = 5.0,
junction_threshold: float = 88.0,
binormal_smooth_iterations: int = 5,
downsample_factor: int = 10,
internal_steps: int = 5000,
acceleration_strips: list | None = None,
) -> tuple:
"""
Generate physics-based roller coaster rails from a centreline path.
Parameters
----------
points_m : np.ndarray shape (n, 3)
Centreline waypoints in **metres**, local coordinate frame.
rail_spacing : float
Half-distance from centreline to each rail in metres (total width = 2×, default 0.6 → 1.2 m gauge).
mass : float
Mass of the coaster car in kg, used for friction losses.
initial_velocity : float
Speed at the start of the track in m/s.
friction_coeff : float
Rolling resistance coefficient (steel-on-steel ≈ 0.02).
junction_window_length : float
Arc-length of G2 transition windows in metres.
junction_threshold : float
Percentile threshold for curvature-discontinuity detection (8595).
binormal_smooth_iterations : int
Number of binormal smoothing passes.
downsample_factor : int
Output rail points = internal_steps / downsample_factor.
internal_steps : int
Number of points used internally for the simulation.
Returns
-------
rail_1_pts, rail_2_pts : np.ndarray shape (m, 3)
Left and right rail positions in metres (same local frame as input).
diagnostics : dict
Ride-quality metrics.
"""
if len(points_m) < 2:
raise ValueError("Need at least 2 path points")
# Scale resolution with path length: 1 000 steps per km (≈ 1 per metre).
# The caller-supplied internal_steps acts as a lower bound.
approx_length = float(arc_length(points_m)[-1])
internal_steps = max(internal_steps, int(approx_length))
# Fit a cubic B-spline through the control points and evaluate positions
# plus analytical tangents/curvatures at internal_steps evenly-spaced samples.
# This eliminates the curvature spikes that arise from computing finite
# differences on a piecewise-linear polyline approximation.
pts, s, T, k, k_vector = _fit_spline_and_sample(points_m, internal_steps)
dk = np.gradient(k, s)
# Junction detection + G2 smoothing (handles any remaining curvature
# discontinuities, e.g. from anchors placed very close together).
avg_spacing = s[-1] / len(pts)
min_sep = int((junction_window_length * 1.2) / avg_spacing)
junctions = detect_junctions(k, dk, threshold_percentile=junction_threshold, min_separation=min_sep)
if junctions:
pts = replace_junctions_with_g2_transitions(pts, s, junctions, window_length=junction_window_length)
s = arc_length(pts)
# Recompute geometry via finite differences only after the path has been
# modified; for the common (no-junction) path the spline values are used.
T, k, k_vector, dk = compute_geometry(pts, s)
# Physics
velocities = compute_velocity_profile(pts, s, mass, initial_velocity, friction_coeff, GRAVITY,
acceleration_strips=acceleration_strips)
B = compute_hanging_binormals(pts, velocities, T, k, k_vector, GRAVITY)
if binormal_smooth_iterations > 0:
B = smooth_binormals(B, T, iterations=binormal_smooth_iterations)
diag = compute_diagnostics(pts, velocities, k, B)
profile = build_profile_arrays(s, velocities, k, downsample_factor, B, T)
# Rail positions: offset perpendicular to tangent within the binormal plane
crosses = np.cross(B, T) # (n, 3)
rail_1 = pts + rail_spacing * crosses
rail_2 = pts - rail_spacing * crosses
# Downsample
rail_1 = rail_1[::downsample_factor]
rail_2 = rail_2[::downsample_factor]
return rail_1, rail_2, diag, profile