Initial commit
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
commit
d93412cd0d
52
.env.example
Normal file
52
.env.example
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
# Django
|
||||||
|
SECRET_KEY=change-me-to-a-long-random-string
|
||||||
|
DEBUG=True
|
||||||
|
ALLOWED_HOSTS=localhost,127.0.0.1
|
||||||
|
|
||||||
|
# Database (PostGIS)
|
||||||
|
POSTGRES_DB=splatmap
|
||||||
|
POSTGRES_USER=splatmap
|
||||||
|
POSTGRES_PASSWORD=splatmap
|
||||||
|
DATABASE_URL=postgis://splatmap:splatmap@db:5432/splatmap
|
||||||
|
|
||||||
|
# Redis / Celery
|
||||||
|
REDIS_URL=redis://redis:6379/0
|
||||||
|
CELERY_BROKER_URL=redis://redis:6379/0
|
||||||
|
CELERY_RESULT_BACKEND=django-db
|
||||||
|
|
||||||
|
# Authentik OIDC
|
||||||
|
OIDC_RP_CLIENT_ID=splatmap-backend
|
||||||
|
OIDC_RP_CLIENT_SECRET=
|
||||||
|
OIDC_OP_BASE_URL=https://auth.yourdomain.com/application/o/splatmap
|
||||||
|
|
||||||
|
# Wasabi S3
|
||||||
|
AWS_ACCESS_KEY_ID=
|
||||||
|
AWS_SECRET_ACCESS_KEY=
|
||||||
|
AWS_STORAGE_BUCKET_NAME=splatmap
|
||||||
|
AWS_S3_ENDPOINT_URL=https://s3.wasabisys.com
|
||||||
|
AWS_S3_REGION_NAME=us-east-1
|
||||||
|
|
||||||
|
# Firebase (FCM)
|
||||||
|
FIREBASE_CREDENTIALS_FILE=/app/secrets/firebase-credentials.json
|
||||||
|
|
||||||
|
# RunPod
|
||||||
|
RUNPOD_API_KEY=
|
||||||
|
RUNPOD_ENDPOINT_ID=
|
||||||
|
|
||||||
|
# Webhook secret — must match what is configured in RunPod endpoint settings
|
||||||
|
WEBHOOK_SECRET=change-me-to-a-random-secret
|
||||||
|
|
||||||
|
# Public URL of this API (used in RunPod callback payload)
|
||||||
|
API_BASE_URL=http://localhost:8000
|
||||||
|
|
||||||
|
# Cloudflare CDN prefix in front of Wasabi bucket (leave empty in dev)
|
||||||
|
CDN_BASE_URL=
|
||||||
|
|
||||||
|
# Sentry (production only)
|
||||||
|
SENTRY_DSN=
|
||||||
|
|
||||||
|
# Frontend (Vite) — VITE_ prefix exposes these to the browser bundle
|
||||||
|
VITE_OIDC_AUTHORITY=http://localhost:9000/application/o/splatmap
|
||||||
|
VITE_OIDC_CLIENT_ID=splatmap-web
|
||||||
|
VITE_API_BASE_URL=/api/v1
|
||||||
|
VITE_CESIUM_ION_TOKEN=
|
||||||
45
.gitignore
vendored
Normal file
45
.gitignore
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# Environment
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*.pyo
|
||||||
|
*.pyd
|
||||||
|
*.egg-info/
|
||||||
|
*.egg
|
||||||
|
.eggs/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.pytest_cache/
|
||||||
|
.mypy_cache/
|
||||||
|
.ruff_cache/
|
||||||
|
htmlcov/
|
||||||
|
.coverage
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Django
|
||||||
|
staticfiles/
|
||||||
|
media/
|
||||||
|
backend/secrets/
|
||||||
|
|
||||||
|
# Node / Vite
|
||||||
|
node_modules/
|
||||||
|
web/dist/
|
||||||
|
web/.vite/
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
*.override.yml
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Editor
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
158
README.md
Normal file
158
README.md
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
# SplatMap
|
||||||
|
|
||||||
|
A mobile + web app where users record videos of real-world locations, which are processed into 3D Gaussian Splats and surfaced on a map. At street-level zoom the map transitions into a live 3D splat rendering of that location. Users can create public challenges to send others to specific regions for recording.
|
||||||
|
|
||||||
|
## Stack
|
||||||
|
|
||||||
|
- **Mobile** — React Native + Vision Camera + Mapbox
|
||||||
|
- **Web map** — Cesium.js + gaussian-splats-3d
|
||||||
|
- **Backend** — Django + GeoDjango + PostGIS
|
||||||
|
- **Queue** — Celery + Redis
|
||||||
|
- **Splatting pipeline** — COLMAP + gsplat on RunPod
|
||||||
|
- **Storage** — Wasabi (S3-compatible)
|
||||||
|
- **Auth** — Authentik (OIDC)
|
||||||
|
- **Notifications** — Firebase Cloud Messaging
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Docker + Docker Compose
|
||||||
|
- Node.js 20+ (for the web frontend)
|
||||||
|
- A `.env` file (see below)
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
### 1. Environment
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Open `.env` and set at minimum:
|
||||||
|
|
||||||
|
```
|
||||||
|
SECRET_KEY=<long random string>
|
||||||
|
POSTGRES_PASSWORD=<choose a password>
|
||||||
|
VITE_CESIUM_ION_TOKEN=<from cesium.com/ion — free tier required>
|
||||||
|
```
|
||||||
|
|
||||||
|
All other values can stay as defaults for local development.
|
||||||
|
|
||||||
|
> **Cesium Ion token** — register a free account at https://cesium.com/ion/ and create a token
|
||||||
|
> with default asset access. Required even in development for imagery and terrain.
|
||||||
|
|
||||||
|
### 2. Start the backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up --build
|
||||||
|
docker compose exec web python manage.py migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Start the web frontend
|
||||||
|
|
||||||
|
The frontend is not containerised — run it locally alongside Docker.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd web
|
||||||
|
npm install
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Frontend: http://localhost:5173 (proxies `/api` → Django at :8000)
|
||||||
|
|
||||||
|
### 4. Create a superuser
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose exec web python manage.py createsuperuser
|
||||||
|
```
|
||||||
|
|
||||||
|
Admin panel: http://localhost:8000/admin
|
||||||
|
|
||||||
|
## Common commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start all services
|
||||||
|
docker compose up
|
||||||
|
|
||||||
|
# Start in background
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Rebuild after dependency changes
|
||||||
|
docker compose up --build
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
docker compose exec web python manage.py migrate
|
||||||
|
|
||||||
|
# Make new migrations after model changes
|
||||||
|
docker compose exec web python manage.py makemigrations
|
||||||
|
|
||||||
|
# Open a Django shell
|
||||||
|
docker compose exec web python manage.py shell
|
||||||
|
|
||||||
|
# Open a psql shell
|
||||||
|
docker compose exec db psql -U splatmap splatmap
|
||||||
|
|
||||||
|
# View logs for a specific service
|
||||||
|
docker compose logs -f web
|
||||||
|
docker compose logs -f celery
|
||||||
|
|
||||||
|
# Stop all services
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
# Stop and remove volumes (wipes the database)
|
||||||
|
docker compose down -v
|
||||||
|
```
|
||||||
|
|
||||||
|
## Project structure
|
||||||
|
|
||||||
|
```
|
||||||
|
rcnn/
|
||||||
|
├── docker-compose.yml
|
||||||
|
├── .env.example
|
||||||
|
├── web/ ← Vite + React frontend
|
||||||
|
│ ├── package.json
|
||||||
|
│ ├── vite.config.ts
|
||||||
|
│ └── src/
|
||||||
|
│ ├── cesium/ ← Cesium viewer + camera hooks
|
||||||
|
│ ├── splat/ ← Gaussian splat layer + renderer
|
||||||
|
│ ├── challenges/ ← Challenge layer + panel + creator
|
||||||
|
│ ├── api/ ← Typed API wrappers
|
||||||
|
│ ├── store/ ← Zustand state slices
|
||||||
|
│ ├── auth/ ← Authentik OIDC
|
||||||
|
│ └── ui/ ← Shared UI components
|
||||||
|
└── backend/
|
||||||
|
├── Dockerfile
|
||||||
|
├── manage.py
|
||||||
|
├── requirements/
|
||||||
|
│ ├── base.txt
|
||||||
|
│ ├── development.txt
|
||||||
|
│ └── production.txt
|
||||||
|
├── config/
|
||||||
|
│ ├── settings/
|
||||||
|
│ │ ├── base.py
|
||||||
|
│ │ ├── development.py
|
||||||
|
│ │ └── production.py
|
||||||
|
│ ├── urls.py
|
||||||
|
│ ├── api_urls.py
|
||||||
|
│ └── celery.py
|
||||||
|
└── apps/
|
||||||
|
├── users/
|
||||||
|
├── splats/
|
||||||
|
├── challenges/
|
||||||
|
└── jobs/
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
Base URL: `http://localhost:8000/api/v1/`
|
||||||
|
|
||||||
|
All endpoints require a Bearer token from Authentik. In development you can test unauthenticated endpoints directly, or pass a token via:
|
||||||
|
|
||||||
|
```
|
||||||
|
Authorization: Bearer <token>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose exec web pytest
|
||||||
|
```
|
||||||
27
backend/Dockerfile
Normal file
27
backend/Dockerfile
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
# GDAL and geo dependencies for GeoDjango
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
gdal-bin \
|
||||||
|
libgdal-dev \
|
||||||
|
libgeos-dev \
|
||||||
|
libproj-dev \
|
||||||
|
binutils \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ENV GDAL_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgdal.so
|
||||||
|
ENV GEOS_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgeos_c.so
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ARG REQUIREMENTS=base
|
||||||
|
COPY requirements/ requirements/
|
||||||
|
RUN pip install --no-cache-dir -r requirements/${REQUIREMENTS}.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN python manage.py collectstatic --noinput 2>/dev/null || true
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000", "--workers", "4"]
|
||||||
0
backend/apps/__init__.py
Normal file
0
backend/apps/__init__.py
Normal file
0
backend/apps/challenges/__init__.py
Normal file
0
backend/apps/challenges/__init__.py
Normal file
6
backend/apps/challenges/apps.py
Normal file
6
backend/apps/challenges/apps.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengesConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "apps.challenges"
|
||||||
45
backend/apps/challenges/migrations/0001_initial.py
Normal file
45
backend/apps/challenges/migrations/0001_initial.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Challenge',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('title', models.CharField(max_length=255)),
|
||||||
|
('description', models.TextField(blank=True, default='')),
|
||||||
|
('status', models.CharField(choices=[('active', 'Active'), ('closed', 'Closed')], db_index=True, default='active', max_length=20)),
|
||||||
|
('region', django.contrib.gis.db.models.fields.PolygonField(geography=True, srid=4326)),
|
||||||
|
('region_centroid', django.contrib.gis.db.models.fields.PointField(geography=True, srid=4326)),
|
||||||
|
('max_submissions', models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('submission_count', models.PositiveIntegerField(default=0)),
|
||||||
|
('expires_at', models.DateTimeField(blank=True, null=True)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'challenges',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ChallengeParticipant',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('joined_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'challenge_participants',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
51
backend/apps/challenges/migrations/0002_initial.py
Normal file
51
backend/apps/challenges/migrations/0002_initial.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('challenges', '0001_initial'),
|
||||||
|
('splats', '0001_initial'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='challenge',
|
||||||
|
name='creator',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='created_challenges', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='challengeparticipant',
|
||||||
|
name='challenge',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='participants', to='challenges.challenge'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='challengeparticipant',
|
||||||
|
name='submitted_splat',
|
||||||
|
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='challenge_participation', to='splats.splat'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='challengeparticipant',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='challenge_participations', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='challenge',
|
||||||
|
index=models.Index(fields=['status', 'created_at'], name='challenges_status_7602d2_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='challengeparticipant',
|
||||||
|
index=models.Index(fields=['user', 'joined_at'], name='challenge_p_user_id_c70b4e_idx'),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='challengeparticipant',
|
||||||
|
unique_together={('challenge', 'user')},
|
||||||
|
),
|
||||||
|
]
|
||||||
0
backend/apps/challenges/migrations/__init__.py
Normal file
0
backend/apps/challenges/migrations/__init__.py
Normal file
85
backend/apps/challenges/models.py
Normal file
85
backend/apps/challenges/models.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
import uuid
|
||||||
|
from django.contrib.gis.db import models
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
class Challenge(models.Model):
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
ACTIVE = "active", "Active"
|
||||||
|
CLOSED = "closed", "Closed"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
creator = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="created_challenges"
|
||||||
|
)
|
||||||
|
|
||||||
|
title = models.CharField(max_length=255)
|
||||||
|
description = models.TextField(blank=True, default="")
|
||||||
|
status = models.CharField(max_length=20, choices=Status.choices, default=Status.ACTIVE, db_index=True)
|
||||||
|
|
||||||
|
# The polygon users must be physically inside to submit a recording.
|
||||||
|
# Drawn by the creator on the Cesium map.
|
||||||
|
region = models.PolygonField(geography=True)
|
||||||
|
# Derived centroid — stored separately for cheap proximity queries and map pin placement.
|
||||||
|
# Set automatically from `region` on save; not user-supplied.
|
||||||
|
region_centroid = models.PointField(geography=True)
|
||||||
|
|
||||||
|
# Optional cap on accepted submissions (null = unlimited)
|
||||||
|
max_submissions = models.PositiveIntegerField(null=True, blank=True)
|
||||||
|
# Denormalised count of published splats linked to this challenge.
|
||||||
|
# Incremented by a Celery task when a splat transitions to is_published=True.
|
||||||
|
# Avoids a COUNT(*) on every map tile request.
|
||||||
|
submission_count = models.PositiveIntegerField(default=0)
|
||||||
|
|
||||||
|
expires_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
# Keep centroid in sync with region automatically
|
||||||
|
if self.region:
|
||||||
|
self.region_centroid = self.region.centroid
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.title
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "challenges"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["status", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeParticipant(models.Model):
|
||||||
|
"""
|
||||||
|
A user who has bookmarked / accepted a challenge.
|
||||||
|
Used to target FCM notifications (e.g. new submission, challenge expiring soon).
|
||||||
|
Participation is recorded automatically when a user submits a splat for a challenge,
|
||||||
|
or manually when they tap "I'll do this" in the app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
challenge = models.ForeignKey(Challenge, on_delete=models.CASCADE, related_name="participants")
|
||||||
|
user = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="challenge_participations"
|
||||||
|
)
|
||||||
|
# The splat this user submitted for the challenge, if any
|
||||||
|
submitted_splat = models.OneToOneField(
|
||||||
|
"splats.Splat",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="challenge_participation",
|
||||||
|
)
|
||||||
|
joined_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.user_id} → {self.challenge_id}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "challenge_participants"
|
||||||
|
unique_together = [("challenge", "user")]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["user", "joined_at"]),
|
||||||
|
]
|
||||||
102
backend/apps/challenges/serializers.py
Normal file
102
backend/apps/challenges/serializers.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
from rest_framework import serializers
|
||||||
|
from rest_framework_gis.fields import GeometryField
|
||||||
|
from rest_framework_gis.serializers import GeoFeatureModelSerializer
|
||||||
|
|
||||||
|
from apps.utils.storage import preview_url
|
||||||
|
from .models import Challenge, ChallengeParticipant
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeCreateSerializer(serializers.ModelSerializer):
|
||||||
|
# Accepts GeoJSON Polygon from the client
|
||||||
|
region = GeometryField(precision=6)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Challenge
|
||||||
|
fields = ["title", "description", "region", "max_submissions", "expires_at"]
|
||||||
|
|
||||||
|
def validate_region(self, value):
|
||||||
|
if value.geom_type != "Polygon":
|
||||||
|
raise serializers.ValidationError("region must be a GeoJSON Polygon.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeUpdateSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Challenge
|
||||||
|
fields = ["title", "description", "expires_at", "status"]
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeMapSerializer(GeoFeatureModelSerializer):
|
||||||
|
"""
|
||||||
|
GeoJSON FeatureCollection using region_centroid as geometry.
|
||||||
|
Used for the map pin list — does not include the full region polygon.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Challenge
|
||||||
|
geo_field = "region_centroid"
|
||||||
|
fields = [
|
||||||
|
"id", "title", "status",
|
||||||
|
"submission_count", "max_submissions",
|
||||||
|
"expires_at", "created_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeSplatPreviewSerializer(serializers.Serializer):
|
||||||
|
id = serializers.UUIDField()
|
||||||
|
preview_url = serializers.SerializerMethodField()
|
||||||
|
created_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
def get_preview_url(self, obj):
|
||||||
|
return preview_url(obj.preview_key)
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeDetailSerializer(serializers.ModelSerializer):
|
||||||
|
region = serializers.SerializerMethodField()
|
||||||
|
region_centroid = serializers.SerializerMethodField()
|
||||||
|
creator_username = serializers.CharField(source="creator.username", read_only=True)
|
||||||
|
participant_count = serializers.SerializerMethodField()
|
||||||
|
is_participating = serializers.SerializerMethodField()
|
||||||
|
preview_splats = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Challenge
|
||||||
|
fields = [
|
||||||
|
"id", "title", "description", "status",
|
||||||
|
"creator_username",
|
||||||
|
"region", "region_centroid",
|
||||||
|
"max_submissions", "submission_count",
|
||||||
|
"participant_count", "is_participating",
|
||||||
|
"preview_splats",
|
||||||
|
"expires_at", "created_at", "updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_region(self, obj):
|
||||||
|
return obj.region.geojson if obj.region else None
|
||||||
|
|
||||||
|
def get_region_centroid(self, obj):
|
||||||
|
return obj.region_centroid.geojson if obj.region_centroid else None
|
||||||
|
|
||||||
|
def get_participant_count(self, obj):
|
||||||
|
return obj.participants.count()
|
||||||
|
|
||||||
|
def get_is_participating(self, obj):
|
||||||
|
request = self.context.get("request")
|
||||||
|
if not request or not request.user.is_authenticated:
|
||||||
|
return False
|
||||||
|
return obj.participants.filter(user=request.user).exists()
|
||||||
|
|
||||||
|
def get_preview_splats(self, obj):
|
||||||
|
from apps.splats.models import Splat
|
||||||
|
qs = (
|
||||||
|
Splat.objects.filter(challenge=obj, is_published=True)
|
||||||
|
.order_by("-created_at")[:5]
|
||||||
|
)
|
||||||
|
return ChallengeSplatPreviewSerializer(qs, many=True).data
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeParticipantSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ChallengeParticipant
|
||||||
|
fields = ["id", "joined_at"]
|
||||||
|
read_only_fields = fields
|
||||||
34
backend/apps/challenges/tasks.py
Normal file
34
backend/apps/challenges/tasks.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import logging
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(queue="default")
|
||||||
|
def notify_challenge_expiring(challenge_id: str):
|
||||||
|
"""
|
||||||
|
Send an FCM notification to all participants of a challenge that is
|
||||||
|
expiring soon. Scheduled by celery-beat.
|
||||||
|
"""
|
||||||
|
from apps.challenges.models import Challenge, ChallengeParticipant
|
||||||
|
from apps.utils.fcm import send_notification
|
||||||
|
|
||||||
|
try:
|
||||||
|
challenge = Challenge.objects.get(pk=challenge_id, status=Challenge.Status.ACTIVE)
|
||||||
|
except Challenge.DoesNotExist:
|
||||||
|
return
|
||||||
|
|
||||||
|
participants = ChallengeParticipant.objects.filter(
|
||||||
|
challenge=challenge,
|
||||||
|
submitted_splat__isnull=True, # only those who haven't submitted yet
|
||||||
|
).select_related("user")
|
||||||
|
|
||||||
|
for participant in participants:
|
||||||
|
send_notification(
|
||||||
|
participant.user.fcm_token,
|
||||||
|
title="Challenge expiring soon!",
|
||||||
|
body=f'"{challenge.title}" is closing soon. Don\'t miss your chance!',
|
||||||
|
data={"challenge_id": str(challenge.id), "type": "challenge_expiring"},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("Sent expiry notifications for challenge %s to %d participants", challenge_id, participants.count())
|
||||||
14
backend/apps/challenges/urls.py
Normal file
14
backend/apps/challenges/urls.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from .views import (
|
||||||
|
ChallengeListCreateView,
|
||||||
|
ChallengeDetailView,
|
||||||
|
ChallengeParticipateView,
|
||||||
|
ChallengeSplatsView,
|
||||||
|
)
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("", ChallengeListCreateView.as_view(), name="challenge-list-create"),
|
||||||
|
path("<uuid:pk>/", ChallengeDetailView.as_view(), name="challenge-detail"),
|
||||||
|
path("<uuid:pk>/participate/", ChallengeParticipateView.as_view(), name="challenge-participate"),
|
||||||
|
path("<uuid:pk>/splats/", ChallengeSplatsView.as_view(), name="challenge-splats"),
|
||||||
|
]
|
||||||
170
backend/apps/challenges/views.py
Normal file
170
backend/apps/challenges/views.py
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
from django.contrib.gis.geos import Point, Polygon
|
||||||
|
from django.contrib.gis.measure import D
|
||||||
|
from rest_framework import generics, status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.common.permissions import IsCreator
|
||||||
|
from apps.utils.fcm import send_notification
|
||||||
|
from .models import Challenge, ChallengeParticipant
|
||||||
|
from .serializers import (
|
||||||
|
ChallengeCreateSerializer,
|
||||||
|
ChallengeUpdateSerializer,
|
||||||
|
ChallengeDetailSerializer,
|
||||||
|
ChallengeMapSerializer,
|
||||||
|
ChallengeParticipantSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_bbox(bbox_str):
|
||||||
|
try:
|
||||||
|
min_lon, min_lat, max_lon, max_lat = [float(x) for x in bbox_str.split(",")]
|
||||||
|
poly = Polygon.from_bbox((min_lon, min_lat, max_lon, max_lat))
|
||||||
|
poly.srid = 4326
|
||||||
|
return poly, None
|
||||||
|
except Exception:
|
||||||
|
return None, "bbox must be four comma-separated floats: minLon,minLat,maxLon,maxLat"
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeListCreateView(APIView):
|
||||||
|
def get(self, request):
|
||||||
|
qs = Challenge.objects.all()
|
||||||
|
|
||||||
|
status_filter = request.query_params.get("status", "active")
|
||||||
|
qs = qs.filter(status=status_filter)
|
||||||
|
|
||||||
|
bbox_str = request.query_params.get("bbox")
|
||||||
|
if bbox_str:
|
||||||
|
bbox, error = _parse_bbox(bbox_str)
|
||||||
|
if error:
|
||||||
|
return Response({"error": "invalid_bbox", "detail": error}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
qs = qs.filter(region_centroid__within=bbox)
|
||||||
|
|
||||||
|
near_str = request.query_params.get("near")
|
||||||
|
if near_str:
|
||||||
|
try:
|
||||||
|
lat, lon, radius_m = [float(x) for x in near_str.split(",")]
|
||||||
|
except Exception:
|
||||||
|
return Response(
|
||||||
|
{"error": "invalid_near", "detail": "near must be lat,lon,radius_m"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
center = Point(lon, lat, srid=4326)
|
||||||
|
qs = qs.filter(region_centroid__distance_lte=(center, D(m=radius_m)))
|
||||||
|
|
||||||
|
return Response(ChallengeMapSerializer(qs, many=True).data)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = ChallengeCreateSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
challenge = serializer.save(creator=request.user)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
ChallengeDetailSerializer(challenge, context={"request": request}).data,
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeDetailView(APIView):
|
||||||
|
def _get_challenge(self, pk):
|
||||||
|
try:
|
||||||
|
return Challenge.objects.select_related("creator").prefetch_related("participants").get(pk=pk)
|
||||||
|
except Challenge.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get(self, request, pk):
|
||||||
|
challenge = self._get_challenge(pk)
|
||||||
|
if not challenge:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
return Response(ChallengeDetailSerializer(challenge, context={"request": request}).data)
|
||||||
|
|
||||||
|
def patch(self, request, pk):
|
||||||
|
challenge = self._get_challenge(pk)
|
||||||
|
if not challenge:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
permission = IsCreator()
|
||||||
|
if not permission.has_object_permission(request, self, challenge):
|
||||||
|
return Response(status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
serializer = ChallengeUpdateSerializer(challenge, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
serializer.save()
|
||||||
|
|
||||||
|
return Response(ChallengeDetailSerializer(challenge, context={"request": request}).data)
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
challenge = self._get_challenge(pk)
|
||||||
|
if not challenge:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
permission = IsCreator()
|
||||||
|
if not permission.has_object_permission(request, self, challenge):
|
||||||
|
return Response(status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
challenge.status = Challenge.Status.CLOSED
|
||||||
|
challenge.save(update_fields=["status", "updated_at"])
|
||||||
|
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeParticipateView(APIView):
|
||||||
|
def post(self, request, pk):
|
||||||
|
try:
|
||||||
|
challenge = Challenge.objects.select_related("creator").get(pk=pk)
|
||||||
|
except Challenge.DoesNotExist:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if challenge.status != Challenge.Status.ACTIVE:
|
||||||
|
return Response(
|
||||||
|
{"error": "challenge_closed", "detail": "This challenge is no longer active."},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
participant, created = ChallengeParticipant.objects.get_or_create(
|
||||||
|
challenge=challenge,
|
||||||
|
user=request.user,
|
||||||
|
)
|
||||||
|
|
||||||
|
if created:
|
||||||
|
# Notify the challenge creator
|
||||||
|
send_notification(
|
||||||
|
challenge.creator.fcm_token,
|
||||||
|
title="Someone accepted your challenge!",
|
||||||
|
body=f'A user is heading out for "{challenge.title}".',
|
||||||
|
data={"challenge_id": str(challenge.id), "type": "challenge_accepted"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response_status = status.HTTP_201_CREATED if created else status.HTTP_200_OK
|
||||||
|
return Response(ChallengeParticipantSerializer(participant).data, status=response_status)
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
try:
|
||||||
|
participant = ChallengeParticipant.objects.get(
|
||||||
|
challenge_id=pk,
|
||||||
|
user=request.user,
|
||||||
|
)
|
||||||
|
except ChallengeParticipant.DoesNotExist:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if participant.submitted_splat_id:
|
||||||
|
return Response(
|
||||||
|
{"error": "cannot_leave", "detail": "You cannot remove yourself after submitting a splat."},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
participant.delete()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ChallengeSplatsView(generics.ListAPIView):
|
||||||
|
from apps.splats.serializers import SplatMapSerializer
|
||||||
|
serializer_class = SplatMapSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
from apps.splats.models import Splat
|
||||||
|
return Splat.objects.filter(
|
||||||
|
challenge_id=self.kwargs["pk"],
|
||||||
|
is_published=True,
|
||||||
|
).order_by("-created_at")
|
||||||
0
backend/apps/common/__init__.py
Normal file
0
backend/apps/common/__init__.py
Normal file
35
backend/apps/common/permissions.py
Normal file
35
backend/apps/common/permissions.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
||||||
|
|
||||||
|
|
||||||
|
class IsOwner(BasePermission):
|
||||||
|
"""Object-level: allow access only to the object's owner."""
|
||||||
|
|
||||||
|
def has_object_permission(self, request, view, obj):
|
||||||
|
return obj.owner == request.user
|
||||||
|
|
||||||
|
|
||||||
|
class IsCreator(BasePermission):
|
||||||
|
"""Object-level: allow access only to the object's creator."""
|
||||||
|
|
||||||
|
def has_object_permission(self, request, view, obj):
|
||||||
|
return obj.creator == request.user
|
||||||
|
|
||||||
|
|
||||||
|
class IsOwnerOrReadOnly(BasePermission):
|
||||||
|
def has_object_permission(self, request, view, obj):
|
||||||
|
if request.method in SAFE_METHODS:
|
||||||
|
return True
|
||||||
|
return obj.owner == request.user
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookPermission(BasePermission):
|
||||||
|
"""
|
||||||
|
Validates the X-Webhook-Secret header against settings.WEBHOOK_SECRET.
|
||||||
|
Used on the RunPod callback endpoint.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
secret = request.headers.get("X-Webhook-Secret", "")
|
||||||
|
return bool(settings.WEBHOOK_SECRET) and secret == settings.WEBHOOK_SECRET
|
||||||
0
backend/apps/jobs/__init__.py
Normal file
0
backend/apps/jobs/__init__.py
Normal file
6
backend/apps/jobs/apps.py
Normal file
6
backend/apps/jobs/apps.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class JobsConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "apps.jobs"
|
||||||
36
backend/apps/jobs/migrations/0001_initial.py
Normal file
36
backend/apps/jobs/migrations/0001_initial.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SplatJob',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('status', models.CharField(choices=[('queued', 'Queued'), ('running', 'Running'), ('succeeded', 'Succeeded'), ('failed', 'Failed')], db_index=True, default='queued', max_length=20)),
|
||||||
|
('current_step', models.CharField(blank=True, choices=[('extracting_frames', 'Extracting frames'), ('running_colmap', 'Running COLMAP'), ('training_gsplat', 'Training gsplat'), ('exporting', 'Exporting .ksplat'), ('quality_check', 'Quality check')], default='', max_length=30)),
|
||||||
|
('progress', models.PositiveSmallIntegerField(default=0)),
|
||||||
|
('runpod_job_id', models.CharField(blank=True, db_index=True, default='', max_length=255)),
|
||||||
|
('celery_task_id', models.CharField(blank=True, default='', max_length=255)),
|
||||||
|
('retry_count', models.PositiveSmallIntegerField(default=0)),
|
||||||
|
('error_message', models.TextField(blank=True, default='')),
|
||||||
|
('pipeline_logs', models.JSONField(blank=True, default=dict)),
|
||||||
|
('colmap_points', models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('queued_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('started_at', models.DateTimeField(blank=True, null=True)),
|
||||||
|
('finished_at', models.DateTimeField(blank=True, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'splat_jobs',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
22
backend/apps/jobs/migrations/0002_initial.py
Normal file
22
backend/apps/jobs/migrations/0002_initial.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('jobs', '0001_initial'),
|
||||||
|
('splats', '0001_initial'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='splatjob',
|
||||||
|
name='splat',
|
||||||
|
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='job', to='splats.splat'),
|
||||||
|
),
|
||||||
|
]
|
||||||
27
backend/apps/jobs/migrations/0003_initial.py
Normal file
27
backend/apps/jobs/migrations/0003_initial.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('jobs', '0002_initial'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='splatjob',
|
||||||
|
name='submitted_by',
|
||||||
|
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='splat_jobs', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='splatjob',
|
||||||
|
index=models.Index(fields=['status', 'queued_at'], name='splat_jobs_status_73c52b_idx'),
|
||||||
|
),
|
||||||
|
]
|
||||||
0
backend/apps/jobs/migrations/__init__.py
Normal file
0
backend/apps/jobs/migrations/__init__.py
Normal file
61
backend/apps/jobs/models.py
Normal file
61
backend/apps/jobs/models.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import uuid
|
||||||
|
from django.db import models
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
class SplatJob(models.Model):
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
QUEUED = "queued", "Queued"
|
||||||
|
RUNNING = "running", "Running"
|
||||||
|
SUCCEEDED = "succeeded", "Succeeded"
|
||||||
|
FAILED = "failed", "Failed"
|
||||||
|
|
||||||
|
class Step(models.TextChoices):
|
||||||
|
EXTRACTING_FRAMES = "extracting_frames", "Extracting frames"
|
||||||
|
RUNNING_COLMAP = "running_colmap", "Running COLMAP"
|
||||||
|
TRAINING_GSPLAT = "training_gsplat", "Training gsplat"
|
||||||
|
EXPORTING = "exporting", "Exporting .ksplat"
|
||||||
|
QUALITY_CHECK = "quality_check", "Quality check"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
splat = models.OneToOneField("splats.Splat", on_delete=models.CASCADE, related_name="job")
|
||||||
|
submitted_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, related_name="splat_jobs"
|
||||||
|
)
|
||||||
|
|
||||||
|
status = models.CharField(max_length=20, choices=Status.choices, default=Status.QUEUED, db_index=True)
|
||||||
|
current_step = models.CharField(max_length=30, choices=Step.choices, blank=True, default="")
|
||||||
|
|
||||||
|
# 0–100 overall progress, updated by the RunPod webhook on each step transition
|
||||||
|
progress = models.PositiveSmallIntegerField(default=0)
|
||||||
|
|
||||||
|
# RunPod serverless job ID — used to poll status and match incoming webhooks
|
||||||
|
runpod_job_id = models.CharField(max_length=255, blank=True, default="", db_index=True)
|
||||||
|
celery_task_id = models.CharField(max_length=255, blank=True, default="")
|
||||||
|
|
||||||
|
# Number of times this job has been requeued after a transient failure
|
||||||
|
retry_count = models.PositiveSmallIntegerField(default=0)
|
||||||
|
|
||||||
|
error_message = models.TextField(blank=True, default="")
|
||||||
|
|
||||||
|
# Structured log output from each pipeline step, keyed by Step value.
|
||||||
|
# Populated by the RunPod webhook on each step completion.
|
||||||
|
# Example: {"extracting_frames": {"frames": 1350, "duration_s": 8.2}, ...}
|
||||||
|
pipeline_logs = models.JSONField(default=dict, blank=True)
|
||||||
|
|
||||||
|
# COLMAP sparse reconstruction quality signal.
|
||||||
|
# Low point count (< ~500) usually means the splat will be poor quality.
|
||||||
|
colmap_points = models.PositiveIntegerField(null=True, blank=True)
|
||||||
|
|
||||||
|
queued_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
started_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
finished_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Job {self.id} for splat {self.splat_id} [{self.status}]"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "splat_jobs"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["status", "queued_at"]),
|
||||||
|
]
|
||||||
41
backend/apps/jobs/serializers.py
Normal file
41
backend/apps/jobs/serializers.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from rest_framework import serializers
|
||||||
|
from .models import SplatJob
|
||||||
|
|
||||||
|
|
||||||
|
class SplatJobSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = SplatJob
|
||||||
|
fields = [
|
||||||
|
"id", "status", "current_step", "progress",
|
||||||
|
"retry_count", "error_message",
|
||||||
|
"colmap_points",
|
||||||
|
"queued_at", "started_at", "finished_at",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookInputSerializer(serializers.Serializer):
|
||||||
|
"""Validates the payload sent by RunPod to POST /jobs/webhook/."""
|
||||||
|
|
||||||
|
class OutputSerializer(serializers.Serializer):
|
||||||
|
splat_key = serializers.CharField(required=False, default="")
|
||||||
|
preview_key = serializers.CharField(required=False, default="")
|
||||||
|
splat_file_size = serializers.IntegerField(required=False, allow_null=True)
|
||||||
|
colmap_points = serializers.IntegerField(required=False, allow_null=True)
|
||||||
|
quality_score = serializers.FloatField(required=False, allow_null=True)
|
||||||
|
frame_count = serializers.IntegerField(required=False, allow_null=True)
|
||||||
|
# GeoJSON point [lon, lat]
|
||||||
|
location = serializers.ListField(child=serializers.FloatField(), required=False, allow_null=True)
|
||||||
|
altitude = serializers.FloatField(required=False, allow_null=True)
|
||||||
|
heading = serializers.FloatField(required=False, allow_null=True)
|
||||||
|
# GeoJSON polygon for coverage
|
||||||
|
coverage = serializers.JSONField(required=False, allow_null=True)
|
||||||
|
|
||||||
|
STATUS_CHOICES = ["succeeded", "failed", "step_complete"]
|
||||||
|
|
||||||
|
job_id = serializers.CharField() # RunPod job ID
|
||||||
|
status = serializers.ChoiceField(choices=STATUS_CHOICES)
|
||||||
|
step = serializers.CharField(required=False, default="")
|
||||||
|
progress = serializers.IntegerField(min_value=0, max_value=100, required=False, default=0)
|
||||||
|
output = OutputSerializer(required=False, default=dict)
|
||||||
|
error = serializers.CharField(required=False, default="")
|
||||||
70
backend/apps/jobs/tasks.py
Normal file
70
backend/apps/jobs/tasks.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from celery import shared_task
|
||||||
|
from django.conf import settings
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(bind=True, max_retries=3, default_retry_delay=60, queue="splat_jobs")
|
||||||
|
def dispatch_splat_job(self, splat_job_id: str):
|
||||||
|
"""
|
||||||
|
Submit a splatting job to the RunPod serverless endpoint.
|
||||||
|
Stores the RunPod job ID on the SplatJob record so incoming
|
||||||
|
webhooks can be matched back to it.
|
||||||
|
"""
|
||||||
|
from apps.jobs.models import SplatJob
|
||||||
|
from apps.splats.models import Splat
|
||||||
|
from apps.utils.storage import generate_presigned_get_url
|
||||||
|
|
||||||
|
try:
|
||||||
|
job = SplatJob.objects.select_related("splat").get(id=splat_job_id)
|
||||||
|
except SplatJob.DoesNotExist:
|
||||||
|
logger.error("SplatJob %s not found", splat_job_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
splat = job.splat
|
||||||
|
|
||||||
|
# Generate a time-limited download URL for RunPod to fetch the video
|
||||||
|
video_url = generate_presigned_get_url(splat.video_key, expires_in=7200)
|
||||||
|
if video_url is None:
|
||||||
|
# Development — no real storage, bail out gracefully
|
||||||
|
logger.info("Skipping RunPod dispatch in dev (no S3 storage): job %s", splat_job_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
webhook_url = f"{settings.API_BASE_URL}/api/v1/jobs/webhook/"
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"input": {
|
||||||
|
"video_url": video_url,
|
||||||
|
"splat_id": str(splat.id),
|
||||||
|
"job_id": str(job.id),
|
||||||
|
"webhook_url": webhook_url,
|
||||||
|
"webhook_secret": settings.WEBHOOK_SECRET,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.post(
|
||||||
|
f"https://api.runpod.io/v2/{settings.RUNPOD_ENDPOINT_ID}/run",
|
||||||
|
json=payload,
|
||||||
|
headers={"Authorization": f"Bearer {settings.RUNPOD_API_KEY}"},
|
||||||
|
timeout=15,
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.RequestException as exc:
|
||||||
|
logger.exception("RunPod dispatch failed for job %s", splat_job_id)
|
||||||
|
raise self.retry(exc=exc)
|
||||||
|
|
||||||
|
runpod_job_id = response.json()["id"]
|
||||||
|
|
||||||
|
SplatJob.objects.filter(pk=job.pk).update(
|
||||||
|
runpod_job_id=runpod_job_id,
|
||||||
|
status=SplatJob.Status.RUNNING,
|
||||||
|
started_at=timezone.now(),
|
||||||
|
)
|
||||||
|
Splat.objects.filter(pk=splat.pk).update(status=Splat.Status.PROCESSING)
|
||||||
|
|
||||||
|
logger.info("Dispatched RunPod job %s for splat %s", runpod_job_id, splat.id)
|
||||||
7
backend/apps/jobs/urls.py
Normal file
7
backend/apps/jobs/urls.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from .views import JobDetailView, JobWebhookView
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("<uuid:pk>/", JobDetailView.as_view(), name="job-detail"),
|
||||||
|
path("webhook/", JobWebhookView.as_view(), name="job-webhook"),
|
||||||
|
]
|
||||||
217
backend/apps/jobs/views.py
Normal file
217
backend/apps/jobs/views.py
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from django.contrib.gis.geos import Point, GEOSGeometry
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.common.permissions import WebhookPermission
|
||||||
|
from .models import SplatJob
|
||||||
|
from .serializers import SplatJobSerializer, WebhookInputSerializer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class JobDetailView(APIView):
|
||||||
|
def get(self, request, pk):
|
||||||
|
try:
|
||||||
|
job = SplatJob.objects.select_related("splat__owner").get(pk=pk)
|
||||||
|
except SplatJob.DoesNotExist:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if job.splat.owner != request.user:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
return Response(SplatJobSerializer(job).data)
|
||||||
|
|
||||||
|
|
||||||
|
class JobWebhookView(APIView):
|
||||||
|
authentication_classes = []
|
||||||
|
permission_classes = [WebhookPermission]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = WebhookInputSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
payload = serializer.validated_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
job = SplatJob.objects.select_related("splat__owner", "splat__challenge__creator").get(
|
||||||
|
runpod_job_id=payload["job_id"]
|
||||||
|
)
|
||||||
|
except SplatJob.DoesNotExist:
|
||||||
|
logger.warning("Webhook received for unknown RunPod job ID: %s", payload["job_id"])
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
webhook_status = payload["status"]
|
||||||
|
|
||||||
|
if webhook_status == "step_complete":
|
||||||
|
_handle_step_complete(job, payload)
|
||||||
|
|
||||||
|
elif webhook_status == "succeeded":
|
||||||
|
_handle_succeeded(job, payload)
|
||||||
|
|
||||||
|
elif webhook_status == "failed":
|
||||||
|
_handle_failed(job, payload)
|
||||||
|
|
||||||
|
return Response(status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Internal webhook handlers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _handle_step_complete(job, payload):
|
||||||
|
step = payload.get("step", "")
|
||||||
|
progress = payload.get("progress", job.progress)
|
||||||
|
|
||||||
|
logs = dict(job.pipeline_logs)
|
||||||
|
logs[step] = payload.get("output", {})
|
||||||
|
|
||||||
|
SplatJob.objects.filter(pk=job.pk).update(
|
||||||
|
current_step=step,
|
||||||
|
progress=progress,
|
||||||
|
pipeline_logs=logs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_succeeded(job, payload):
|
||||||
|
from apps.splats.models import Splat
|
||||||
|
from apps.utils.fcm import send_notification
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
output = payload.get("output", {})
|
||||||
|
splat = job.splat
|
||||||
|
|
||||||
|
# Update spatial fields
|
||||||
|
splat_updates = {
|
||||||
|
"splat_key": output.get("splat_key", ""),
|
||||||
|
"preview_key": output.get("preview_key", ""),
|
||||||
|
"splat_file_size": output.get("splat_file_size"),
|
||||||
|
"quality_score": output.get("quality_score"),
|
||||||
|
"frame_count": output.get("frame_count"),
|
||||||
|
}
|
||||||
|
|
||||||
|
location_coords = output.get("location")
|
||||||
|
if location_coords:
|
||||||
|
splat_updates["location"] = Point(location_coords[0], location_coords[1], srid=4326)
|
||||||
|
|
||||||
|
splat_updates["altitude"] = output.get("altitude")
|
||||||
|
splat_updates["heading"] = output.get("heading")
|
||||||
|
|
||||||
|
coverage_geojson = output.get("coverage")
|
||||||
|
if coverage_geojson:
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
splat_updates["coverage"] = GEOSGeometry(json.dumps(coverage_geojson), srid=4326)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to parse coverage GeoJSON for splat %s", splat.id)
|
||||||
|
|
||||||
|
# Quality gate
|
||||||
|
thresholds = settings.SPLAT_QUALITY_THRESHOLDS
|
||||||
|
colmap_points = output.get("colmap_points") or 0
|
||||||
|
quality_score = output.get("quality_score") or 0.0
|
||||||
|
frame_count = output.get("frame_count") or 0
|
||||||
|
|
||||||
|
passed = (
|
||||||
|
colmap_points >= thresholds["min_colmap_points"]
|
||||||
|
and quality_score >= thresholds["min_quality_score"]
|
||||||
|
and frame_count >= thresholds["min_frame_count"]
|
||||||
|
)
|
||||||
|
|
||||||
|
splat_updates["status"] = Splat.Status.READY
|
||||||
|
splat_updates["is_published"] = passed
|
||||||
|
|
||||||
|
Splat.objects.filter(pk=splat.pk).update(**splat_updates)
|
||||||
|
|
||||||
|
SplatJob.objects.filter(pk=job.pk).update(
|
||||||
|
status=SplatJob.Status.SUCCEEDED,
|
||||||
|
progress=100,
|
||||||
|
current_step=SplatJob.Step.QUALITY_CHECK,
|
||||||
|
colmap_points=colmap_points,
|
||||||
|
pipeline_logs={**job.pipeline_logs, "quality_gate": {"passed": passed}},
|
||||||
|
finished_at=timezone.now(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notify splat owner
|
||||||
|
if passed:
|
||||||
|
send_notification(
|
||||||
|
splat.owner.fcm_token,
|
||||||
|
title="Your splat is ready!",
|
||||||
|
body="Your recording has been processed and is now visible on the map.",
|
||||||
|
data={"splat_id": str(splat.id), "type": "splat_ready"},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
send_notification(
|
||||||
|
splat.owner.fcm_token,
|
||||||
|
title="Splat processing complete",
|
||||||
|
body="Your recording was processed but did not meet quality thresholds.",
|
||||||
|
data={"splat_id": str(splat.id), "type": "splat_quality_failed"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# If this splat is linked to a challenge, update submission count and notify
|
||||||
|
if passed and splat.challenge_id:
|
||||||
|
_handle_challenge_submission(splat)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_failed(job, payload):
|
||||||
|
from apps.splats.models import Splat
|
||||||
|
from apps.utils.fcm import send_notification
|
||||||
|
|
||||||
|
error_message = payload.get("error", "Unknown pipeline error")
|
||||||
|
|
||||||
|
SplatJob.objects.filter(pk=job.pk).update(
|
||||||
|
status=SplatJob.Status.FAILED,
|
||||||
|
error_message=error_message,
|
||||||
|
finished_at=timezone.now(),
|
||||||
|
)
|
||||||
|
Splat.objects.filter(pk=job.splat_id).update(status=Splat.Status.FAILED)
|
||||||
|
|
||||||
|
send_notification(
|
||||||
|
job.splat.owner.fcm_token,
|
||||||
|
title="Recording failed",
|
||||||
|
body="There was a problem processing your recording. Please try again.",
|
||||||
|
data={"splat_id": str(job.splat_id), "type": "splat_failed"},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.error("Splat job %s failed: %s", job.id, error_message)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_challenge_submission(splat):
|
||||||
|
"""
|
||||||
|
Increment challenge submission count and dispatch FCM notifications
|
||||||
|
to the challenge creator and all other participants.
|
||||||
|
"""
|
||||||
|
from apps.challenges.models import Challenge, ChallengeParticipant
|
||||||
|
from apps.utils.fcm import send_notification
|
||||||
|
|
||||||
|
Challenge.objects.filter(pk=splat.challenge_id).update(
|
||||||
|
submission_count=Challenge.objects.filter(pk=splat.challenge_id).values("submission_count")[0]["submission_count"] + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-fetch to check max_submissions
|
||||||
|
challenge = Challenge.objects.get(pk=splat.challenge_id)
|
||||||
|
if challenge.max_submissions and challenge.submission_count >= challenge.max_submissions:
|
||||||
|
Challenge.objects.filter(pk=challenge.pk).update(status=Challenge.Status.CLOSED)
|
||||||
|
|
||||||
|
# Notify creator
|
||||||
|
send_notification(
|
||||||
|
challenge.creator.fcm_token,
|
||||||
|
title="New submission to your challenge!",
|
||||||
|
body=f'Someone submitted a splat for "{challenge.title}".',
|
||||||
|
data={"challenge_id": str(challenge.id), "splat_id": str(splat.id), "type": "challenge_submission"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notify other participants (excluding the submitter)
|
||||||
|
participants = (
|
||||||
|
ChallengeParticipant.objects.filter(challenge=challenge)
|
||||||
|
.exclude(user=splat.owner)
|
||||||
|
.select_related("user")
|
||||||
|
)
|
||||||
|
for participant in participants:
|
||||||
|
send_notification(
|
||||||
|
participant.user.fcm_token,
|
||||||
|
title="New splat on a challenge you joined",
|
||||||
|
body=f'A new recording was submitted for "{challenge.title}".',
|
||||||
|
data={"challenge_id": str(challenge.id), "type": "challenge_new_splat"},
|
||||||
|
)
|
||||||
0
backend/apps/splats/__init__.py
Normal file
0
backend/apps/splats/__init__.py
Normal file
6
backend/apps/splats/apps.py
Normal file
6
backend/apps/splats/apps.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class SplatsConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "apps.splats"
|
||||||
43
backend/apps/splats/migrations/0001_initial.py
Normal file
43
backend/apps/splats/migrations/0001_initial.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('challenges', '0001_initial'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Splat',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('status', models.CharField(choices=[('pending', 'Pending'), ('processing', 'Processing'), ('ready', 'Ready'), ('failed', 'Failed')], db_index=True, default='pending', max_length=20)),
|
||||||
|
('is_published', models.BooleanField(db_index=True, default=False)),
|
||||||
|
('location', django.contrib.gis.db.models.fields.PointField(blank=True, geography=True, null=True, srid=4326)),
|
||||||
|
('coverage', django.contrib.gis.db.models.fields.PolygonField(blank=True, geography=True, null=True, srid=4326)),
|
||||||
|
('heading', models.FloatField(blank=True, null=True)),
|
||||||
|
('altitude', models.FloatField(blank=True, null=True)),
|
||||||
|
('video_key', models.CharField(blank=True, default='', max_length=500)),
|
||||||
|
('splat_key', models.CharField(blank=True, default='', max_length=500)),
|
||||||
|
('preview_key', models.CharField(blank=True, default='', max_length=500)),
|
||||||
|
('splat_file_size', models.PositiveBigIntegerField(blank=True, null=True)),
|
||||||
|
('quality_score', models.FloatField(blank=True, null=True)),
|
||||||
|
('frame_count', models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
('capture_metadata', models.JSONField(blank=True, default=dict)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('challenge', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='splats', to='challenges.challenge')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'splats',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
35
backend/apps/splats/migrations/0002_initial.py
Normal file
35
backend/apps/splats/migrations/0002_initial.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('splats', '0001_initial'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='splat',
|
||||||
|
name='owner',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='splats', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='splat',
|
||||||
|
index=models.Index(fields=['owner', 'status'], name='splats_owner_i_939cfa_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='splat',
|
||||||
|
index=models.Index(fields=['challenge', 'is_published'], name='splats_challen_c6ea3d_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='splat',
|
||||||
|
index=models.Index(fields=['created_at'], name='splats_created_695e22_idx'),
|
||||||
|
),
|
||||||
|
]
|
||||||
0
backend/apps/splats/migrations/__init__.py
Normal file
0
backend/apps/splats/migrations/__init__.py
Normal file
83
backend/apps/splats/models.py
Normal file
83
backend/apps/splats/models.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import uuid
|
||||||
|
from django.contrib.gis.db import models
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
class Splat(models.Model):
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
PENDING = "pending", "Pending" # created, awaiting video upload confirmation
|
||||||
|
PROCESSING = "processing", "Processing" # RunPod job running
|
||||||
|
READY = "ready", "Ready" # pipeline done, quality check passed
|
||||||
|
FAILED = "failed", "Failed" # pipeline error or quality check failed
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
owner = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="splats"
|
||||||
|
)
|
||||||
|
challenge = models.ForeignKey(
|
||||||
|
"challenges.Challenge",
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="splats",
|
||||||
|
)
|
||||||
|
|
||||||
|
status = models.CharField(max_length=20, choices=Status.choices, default=Status.PENDING, db_index=True)
|
||||||
|
|
||||||
|
# Visible on the map only after pipeline succeeds and quality gate passes
|
||||||
|
is_published = models.BooleanField(default=False, db_index=True)
|
||||||
|
|
||||||
|
# --- Geo fields ---
|
||||||
|
# Anchor point (centroid) used for map pin and proximity queries
|
||||||
|
location = models.PointField(geography=True, null=True, blank=True)
|
||||||
|
# Footprint polygon of the splat's coverage on the ground
|
||||||
|
coverage = models.PolygonField(geography=True, null=True, blank=True)
|
||||||
|
# Compass bearing of the camera at recording start (0–360°), used to orient
|
||||||
|
# the splat when rendering it on the map
|
||||||
|
heading = models.FloatField(null=True, blank=True)
|
||||||
|
# Elevation above sea level in metres, for Cesium 3D positioning
|
||||||
|
altitude = models.FloatField(null=True, blank=True)
|
||||||
|
|
||||||
|
# --- Wasabi storage keys ---
|
||||||
|
# Set at Splat creation so the presigned upload URL can be generated immediately.
|
||||||
|
# splat_key and preview_key are populated by the pipeline on completion.
|
||||||
|
video_key = models.CharField(max_length=500, blank=True, default="")
|
||||||
|
splat_key = models.CharField(max_length=500, blank=True, default="")
|
||||||
|
preview_key = models.CharField(max_length=500, blank=True, default="")
|
||||||
|
splat_file_size = models.PositiveBigIntegerField(null=True, blank=True) # bytes
|
||||||
|
|
||||||
|
# Pipeline output quality signals
|
||||||
|
quality_score = models.FloatField(null=True, blank=True) # 0.0–1.0
|
||||||
|
frame_count = models.PositiveIntegerField(null=True, blank=True)
|
||||||
|
|
||||||
|
# Per-frame GPS/IMU data from Vision Camera, passed as-is to the pipeline.
|
||||||
|
# Expected shape:
|
||||||
|
# {
|
||||||
|
# "fps": 30,
|
||||||
|
# "duration_seconds": 45.2,
|
||||||
|
# "device_model": "iPhone 15 Pro",
|
||||||
|
# "frames": [
|
||||||
|
# {
|
||||||
|
# "timestamp": 0.033,
|
||||||
|
# "lat": 52.520008, "lon": 13.404954,
|
||||||
|
# "altitude_m": 34.2,
|
||||||
|
# "heading_deg": 178.3, "pitch_deg": -12.5, "roll_deg": 2.1,
|
||||||
|
# "accuracy_m": 3.0
|
||||||
|
# }, ...
|
||||||
|
# ]
|
||||||
|
# }
|
||||||
|
capture_metadata = models.JSONField(default=dict, blank=True)
|
||||||
|
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Splat {self.id} [{self.status}] by {self.owner_id}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "splats"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["owner", "status"]),
|
||||||
|
models.Index(fields=["challenge", "is_published"]),
|
||||||
|
models.Index(fields=["created_at"]),
|
||||||
|
]
|
||||||
109
backend/apps/splats/serializers.py
Normal file
109
backend/apps/splats/serializers.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework import serializers
|
||||||
|
from rest_framework_gis.serializers import GeoFeatureModelSerializer
|
||||||
|
|
||||||
|
from apps.utils.storage import generate_presigned_put_url, preview_url
|
||||||
|
from .models import Splat
|
||||||
|
|
||||||
|
|
||||||
|
class SplatJobSummarySerializer(serializers.Serializer):
|
||||||
|
"""Inlined job status — used in SplatMineSerializer."""
|
||||||
|
id = serializers.UUIDField()
|
||||||
|
status = serializers.CharField()
|
||||||
|
current_step = serializers.CharField()
|
||||||
|
progress = serializers.IntegerField()
|
||||||
|
error_message = serializers.CharField()
|
||||||
|
queued_at = serializers.DateTimeField()
|
||||||
|
started_at = serializers.DateTimeField()
|
||||||
|
finished_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
|
class SplatCreateSerializer(serializers.ModelSerializer):
|
||||||
|
challenge_id = serializers.UUIDField(required=False, allow_null=True)
|
||||||
|
capture_metadata = serializers.JSONField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Splat
|
||||||
|
fields = ["challenge_id", "capture_metadata"]
|
||||||
|
|
||||||
|
def validate_capture_metadata(self, value):
|
||||||
|
frames = value.get("frames", [])
|
||||||
|
min_frames = settings.MIN_CAPTURE_FRAMES
|
||||||
|
if len(frames) < min_frames:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
f"capture_metadata must contain at least {min_frames} frames; "
|
||||||
|
f"got {len(frames)}."
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class SplatCreateResponseSerializer(serializers.Serializer):
|
||||||
|
"""Shape of the POST /splats/ response — not a ModelSerializer."""
|
||||||
|
id = serializers.UUIDField()
|
||||||
|
status = serializers.CharField()
|
||||||
|
upload_url = serializers.CharField(allow_null=True)
|
||||||
|
upload_expires_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
|
class SplatMapSerializer(GeoFeatureModelSerializer):
|
||||||
|
"""GeoJSON FeatureCollection for the map tile endpoint."""
|
||||||
|
preview_url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Splat
|
||||||
|
geo_field = "location"
|
||||||
|
fields = ["id", "heading", "altitude", "preview_url", "splat_file_size", "created_at"]
|
||||||
|
|
||||||
|
def get_preview_url(self, obj):
|
||||||
|
return preview_url(obj.preview_key)
|
||||||
|
|
||||||
|
|
||||||
|
class SplatDetailSerializer(serializers.ModelSerializer):
|
||||||
|
coverage = serializers.SerializerMethodField()
|
||||||
|
preview_url = serializers.SerializerMethodField()
|
||||||
|
owner_username = serializers.CharField(source="owner.username", read_only=True)
|
||||||
|
challenge_id = serializers.UUIDField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Splat
|
||||||
|
fields = [
|
||||||
|
"id", "owner_username", "challenge_id", "status", "is_published",
|
||||||
|
"location", "coverage", "heading", "altitude",
|
||||||
|
"preview_url", "splat_file_size",
|
||||||
|
"quality_score", "frame_count",
|
||||||
|
"created_at", "updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_coverage(self, obj):
|
||||||
|
if obj.coverage:
|
||||||
|
return obj.coverage.geojson
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_preview_url(self, obj):
|
||||||
|
return preview_url(obj.preview_key)
|
||||||
|
|
||||||
|
|
||||||
|
class SplatMineSerializer(serializers.ModelSerializer):
|
||||||
|
job = serializers.SerializerMethodField()
|
||||||
|
preview_url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Splat
|
||||||
|
fields = [
|
||||||
|
"id", "status", "is_published",
|
||||||
|
"challenge_id", "preview_url",
|
||||||
|
"quality_score", "frame_count",
|
||||||
|
"created_at", "updated_at",
|
||||||
|
"job",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_job(self, obj):
|
||||||
|
try:
|
||||||
|
j = obj.job
|
||||||
|
except Splat.job.RelatedObjectDoesNotExist:
|
||||||
|
return None
|
||||||
|
return SplatJobSummarySerializer(j).data
|
||||||
|
|
||||||
|
def get_preview_url(self, obj):
|
||||||
|
return preview_url(obj.preview_key)
|
||||||
0
backend/apps/splats/tasks.py
Normal file
0
backend/apps/splats/tasks.py
Normal file
18
backend/apps/splats/urls.py
Normal file
18
backend/apps/splats/urls.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from .views import (
|
||||||
|
SplatCreateView,
|
||||||
|
SplatConfirmUploadView,
|
||||||
|
SplatDetailView,
|
||||||
|
SplatDownloadURLView,
|
||||||
|
SplatMapView,
|
||||||
|
SplatMineView,
|
||||||
|
)
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("", SplatMapView.as_view(), name="splat-map"),
|
||||||
|
path("create/", SplatCreateView.as_view(), name="splat-create"),
|
||||||
|
path("mine/", SplatMineView.as_view(), name="splat-mine"),
|
||||||
|
path("<uuid:pk>/", SplatDetailView.as_view(), name="splat-detail"),
|
||||||
|
path("<uuid:pk>/confirm-upload/", SplatConfirmUploadView.as_view(), name="splat-confirm-upload"),
|
||||||
|
path("<uuid:pk>/download-url/", SplatDownloadURLView.as_view(), name="splat-download-url"),
|
||||||
|
]
|
||||||
195
backend/apps/splats/views.py
Normal file
195
backend/apps/splats/views.py
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
import uuid
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.gis.geos import Point, Polygon
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework import generics, status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.common.permissions import IsOwner
|
||||||
|
from apps.utils.storage import generate_presigned_put_url, generate_presigned_get_url, object_exists
|
||||||
|
from .models import Splat
|
||||||
|
from .serializers import (
|
||||||
|
SplatCreateSerializer,
|
||||||
|
SplatCreateResponseSerializer,
|
||||||
|
SplatDetailSerializer,
|
||||||
|
SplatMapSerializer,
|
||||||
|
SplatMineSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_bbox(bbox_str):
|
||||||
|
"""Parse 'minLon,minLat,maxLon,maxLat' into a GEOS Polygon (SRID 4326)."""
|
||||||
|
try:
|
||||||
|
parts = [float(x) for x in bbox_str.split(",")]
|
||||||
|
except (ValueError, AttributeError):
|
||||||
|
return None, "bbox must be four comma-separated floats: minLon,minLat,maxLon,maxLat"
|
||||||
|
|
||||||
|
if len(parts) != 4:
|
||||||
|
return None, "bbox must have exactly 4 values"
|
||||||
|
|
||||||
|
min_lon, min_lat, max_lon, max_lat = parts
|
||||||
|
|
||||||
|
if (max_lon - min_lon) > settings.MAX_BBOX_DEGREES or (max_lat - min_lat) > settings.MAX_BBOX_DEGREES:
|
||||||
|
return None, (
|
||||||
|
f"bbox too large. Each side must be ≤ {settings.MAX_BBOX_DEGREES}° "
|
||||||
|
f"(≈{int(settings.MAX_BBOX_DEGREES * 111)} km). Zoom in and try again."
|
||||||
|
)
|
||||||
|
|
||||||
|
poly = Polygon.from_bbox((min_lon, min_lat, max_lon, max_lat))
|
||||||
|
poly.srid = 4326
|
||||||
|
return poly, None
|
||||||
|
|
||||||
|
|
||||||
|
class SplatCreateView(APIView):
|
||||||
|
def post(self, request):
|
||||||
|
serializer = SplatCreateSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
data = serializer.validated_data
|
||||||
|
|
||||||
|
challenge = None
|
||||||
|
challenge_id = data.get("challenge_id")
|
||||||
|
if challenge_id:
|
||||||
|
from apps.challenges.models import Challenge
|
||||||
|
try:
|
||||||
|
challenge = Challenge.objects.get(pk=challenge_id, status=Challenge.Status.ACTIVE)
|
||||||
|
except Challenge.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "challenge_not_found", "detail": "Challenge does not exist or is closed."},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Geofence check: first frame GPS must be inside the challenge region
|
||||||
|
frames = data["capture_metadata"].get("frames", [])
|
||||||
|
first = frames[0]
|
||||||
|
user_point = Point(first["lon"], first["lat"], srid=4326)
|
||||||
|
|
||||||
|
from apps.challenges.models import Challenge as C
|
||||||
|
inside = C.objects.filter(pk=challenge.pk, region__contains=user_point).exists()
|
||||||
|
if not inside:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "outside_challenge_region",
|
||||||
|
"detail": "Your location is not within the challenge region.",
|
||||||
|
},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
if challenge.max_submissions and challenge.submission_count >= challenge.max_submissions:
|
||||||
|
return Response(
|
||||||
|
{"error": "submission_limit_reached", "detail": "This challenge has reached its submission limit."},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
splat_id = uuid.uuid4()
|
||||||
|
video_key = f"videos/{splat_id}/raw.mp4"
|
||||||
|
|
||||||
|
splat = Splat.objects.create(
|
||||||
|
id=splat_id,
|
||||||
|
owner=request.user,
|
||||||
|
challenge=challenge,
|
||||||
|
capture_metadata=data["capture_metadata"],
|
||||||
|
video_key=video_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
upload_url = generate_presigned_put_url(video_key, content_type="video/mp4", expires_in=3600)
|
||||||
|
expires_at = timezone.now() + timedelta(seconds=3600)
|
||||||
|
|
||||||
|
response_data = {
|
||||||
|
"id": splat.id,
|
||||||
|
"status": splat.status,
|
||||||
|
"upload_url": upload_url,
|
||||||
|
"upload_expires_at": expires_at,
|
||||||
|
}
|
||||||
|
return Response(SplatCreateResponseSerializer(response_data).data, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
|
||||||
|
class SplatConfirmUploadView(APIView):
|
||||||
|
def post(self, request, pk):
|
||||||
|
try:
|
||||||
|
splat = Splat.objects.select_related("owner").get(pk=pk, owner=request.user)
|
||||||
|
except Splat.DoesNotExist:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if splat.status != Splat.Status.PENDING:
|
||||||
|
return Response(
|
||||||
|
{"error": "upload_already_confirmed", "detail": "This splat has already been submitted for processing."},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not object_exists(splat.video_key):
|
||||||
|
return Response(
|
||||||
|
{"error": "video_not_found_in_storage", "detail": "Video file was not found. Please upload the video first."},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Lazy import avoids circular dependency at module load time
|
||||||
|
from apps.jobs.models import SplatJob
|
||||||
|
from apps.jobs.tasks import dispatch_splat_job
|
||||||
|
|
||||||
|
job = SplatJob.objects.create(splat=splat, submitted_by=request.user)
|
||||||
|
dispatch_splat_job.delay(str(job.id))
|
||||||
|
|
||||||
|
return Response({"job_id": str(job.id), "status": job.status}, status=status.HTTP_202_ACCEPTED)
|
||||||
|
|
||||||
|
|
||||||
|
class SplatMapView(APIView):
|
||||||
|
def get(self, request):
|
||||||
|
bbox_str = request.query_params.get("bbox")
|
||||||
|
if not bbox_str:
|
||||||
|
return Response(
|
||||||
|
{"error": "missing_parameter", "detail": "bbox is required."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
bbox, error = _parse_bbox(bbox_str)
|
||||||
|
if error:
|
||||||
|
return Response({"error": "invalid_bbox", "detail": error}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
qs = Splat.objects.filter(is_published=True, location__within=bbox).select_related("owner")
|
||||||
|
|
||||||
|
challenge_id = request.query_params.get("challenge_id")
|
||||||
|
if challenge_id:
|
||||||
|
qs = qs.filter(challenge_id=challenge_id)
|
||||||
|
|
||||||
|
serializer = SplatMapSerializer(qs, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
class SplatDetailView(generics.RetrieveAPIView):
|
||||||
|
serializer_class = SplatDetailSerializer
|
||||||
|
queryset = Splat.objects.filter(is_published=True).select_related("owner")
|
||||||
|
lookup_field = "pk"
|
||||||
|
|
||||||
|
|
||||||
|
class SplatDownloadURLView(APIView):
|
||||||
|
def get(self, request, pk):
|
||||||
|
try:
|
||||||
|
splat = Splat.objects.get(pk=pk, is_published=True)
|
||||||
|
except Splat.DoesNotExist:
|
||||||
|
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if not splat.splat_key:
|
||||||
|
return Response(
|
||||||
|
{"error": "splat_not_ready", "detail": "Splat file is not available yet."},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
expires_in = 3600
|
||||||
|
url = generate_presigned_get_url(splat.splat_key, expires_in=expires_in)
|
||||||
|
expires_at = timezone.now() + timedelta(seconds=expires_in)
|
||||||
|
|
||||||
|
return Response({"url": url, "expires_at": expires_at})
|
||||||
|
|
||||||
|
|
||||||
|
class SplatMineView(generics.ListAPIView):
|
||||||
|
serializer_class = SplatMineSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Splat.objects.filter(owner=self.request.user)
|
||||||
|
.select_related("job")
|
||||||
|
.order_by("-created_at")
|
||||||
|
)
|
||||||
0
backend/apps/users/__init__.py
Normal file
0
backend/apps/users/__init__.py
Normal file
6
backend/apps/users/apps.py
Normal file
6
backend/apps/users/apps.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class UsersConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "apps.users"
|
||||||
47
backend/apps/users/migrations/0001_initial.py
Normal file
47
backend/apps/users/migrations/0001_initial.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# Generated by Django 5.1.4 on 2026-04-06 03:08
|
||||||
|
|
||||||
|
import django.contrib.auth.models
|
||||||
|
import django.contrib.auth.validators
|
||||||
|
import django.utils.timezone
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('auth', '0012_alter_user_first_name_max_length'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='User',
|
||||||
|
fields=[
|
||||||
|
('password', models.CharField(max_length=128, verbose_name='password')),
|
||||||
|
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
|
||||||
|
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
|
||||||
|
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
|
||||||
|
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
|
||||||
|
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
|
||||||
|
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
|
||||||
|
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
|
||||||
|
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
|
||||||
|
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('oidc_sub', models.CharField(blank=True, max_length=255, null=True, unique=True)),
|
||||||
|
('fcm_token', models.TextField(blank=True, default='')),
|
||||||
|
('avatar_url', models.URLField(blank=True, default='')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')),
|
||||||
|
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'users',
|
||||||
|
},
|
||||||
|
managers=[
|
||||||
|
('objects', django.contrib.auth.models.UserManager()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
0
backend/apps/users/migrations/__init__.py
Normal file
0
backend/apps/users/migrations/__init__.py
Normal file
18
backend/apps/users/models.py
Normal file
18
backend/apps/users/models.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import uuid
|
||||||
|
from django.contrib.auth.models import AbstractUser
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class User(AbstractUser):
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
# sub claim from Authentik OIDC token — used to match incoming JWT to a User row
|
||||||
|
oidc_sub = models.CharField(max_length=255, unique=True, null=True, blank=True)
|
||||||
|
fcm_token = models.TextField(blank=True, default="")
|
||||||
|
avatar_url = models.URLField(blank=True, default="")
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.username
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "users"
|
||||||
19
backend/apps/users/serializers.py
Normal file
19
backend/apps/users/serializers.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from rest_framework import serializers
|
||||||
|
from .models import User
|
||||||
|
|
||||||
|
|
||||||
|
class UserSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = ["id", "username", "avatar_url", "created_at"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class UserUpdateSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = ["avatar_url"]
|
||||||
|
|
||||||
|
|
||||||
|
class FCMTokenSerializer(serializers.Serializer):
|
||||||
|
token = serializers.CharField(max_length=512)
|
||||||
0
backend/apps/users/tasks.py
Normal file
0
backend/apps/users/tasks.py
Normal file
7
backend/apps/users/urls.py
Normal file
7
backend/apps/users/urls.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from django.urls import path
|
||||||
|
from .views import MeView, FCMTokenView
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("me/", MeView.as_view(), name="user-me"),
|
||||||
|
path("me/fcm-token/", FCMTokenView.as_view(), name="user-fcm-token"),
|
||||||
|
]
|
||||||
25
backend/apps/users/views.py
Normal file
25
backend/apps/users/views.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from rest_framework import generics, status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from .models import User
|
||||||
|
from .serializers import UserSerializer, UserUpdateSerializer, FCMTokenSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class MeView(generics.RetrieveUpdateAPIView):
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.request.method == "PATCH":
|
||||||
|
return UserUpdateSerializer
|
||||||
|
return UserSerializer
|
||||||
|
|
||||||
|
def get_object(self):
|
||||||
|
return self.request.user
|
||||||
|
|
||||||
|
|
||||||
|
class FCMTokenView(APIView):
|
||||||
|
def put(self, request):
|
||||||
|
serializer = FCMTokenSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
request.user.fcm_token = serializer.validated_data["token"]
|
||||||
|
request.user.save(update_fields=["fcm_token"])
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
0
backend/apps/utils/__init__.py
Normal file
0
backend/apps/utils/__init__.py
Normal file
55
backend/apps/utils/fcm.py
Normal file
55
backend/apps/utils/fcm.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
import logging
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_app = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_app():
|
||||||
|
global _app
|
||||||
|
if _app is not None:
|
||||||
|
return _app
|
||||||
|
|
||||||
|
credentials_file = settings.FIREBASE_CREDENTIALS_FILE
|
||||||
|
if not credentials_file:
|
||||||
|
return None
|
||||||
|
|
||||||
|
import firebase_admin
|
||||||
|
from firebase_admin import credentials
|
||||||
|
|
||||||
|
try:
|
||||||
|
cred = credentials.Certificate(credentials_file)
|
||||||
|
_app = firebase_admin.initialize_app(cred)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to initialise Firebase app")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return _app
|
||||||
|
|
||||||
|
|
||||||
|
def send_notification(fcm_token, *, title, body, data=None):
|
||||||
|
"""
|
||||||
|
Send a single FCM push notification.
|
||||||
|
Silently no-ops if Firebase is not configured (e.g. in development).
|
||||||
|
`data` values must all be strings.
|
||||||
|
"""
|
||||||
|
if not fcm_token:
|
||||||
|
return
|
||||||
|
|
||||||
|
app = _get_app()
|
||||||
|
if app is None:
|
||||||
|
logger.debug("FCM not configured — skipping notification: %s", title)
|
||||||
|
return
|
||||||
|
|
||||||
|
from firebase_admin import messaging
|
||||||
|
|
||||||
|
message = messaging.Message(
|
||||||
|
notification=messaging.Notification(title=title, body=body),
|
||||||
|
data={k: str(v) for k, v in (data or {}).items()},
|
||||||
|
token=fcm_token,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
messaging.send(message)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to send FCM notification to token %s", fcm_token[:10])
|
||||||
77
backend/apps/utils/storage.py
Normal file
77
backend/apps/utils/storage.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import boto3
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
def _is_s3_storage():
|
||||||
|
return "S3Boto3Storage" in settings.DEFAULT_FILE_STORAGE
|
||||||
|
|
||||||
|
|
||||||
|
def _get_client():
|
||||||
|
return boto3.client(
|
||||||
|
"s3",
|
||||||
|
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
||||||
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
|
region_name=settings.AWS_S3_REGION_NAME,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_presigned_put_url(key, content_type="video/mp4", expires_in=3600):
|
||||||
|
"""
|
||||||
|
Return a presigned PUT URL the client can use to upload directly to Wasabi.
|
||||||
|
In development (local filesystem storage) returns None — callers should handle this.
|
||||||
|
"""
|
||||||
|
if not _is_s3_storage():
|
||||||
|
return None
|
||||||
|
client = _get_client()
|
||||||
|
return client.generate_presigned_url(
|
||||||
|
"put_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
|
"Key": key,
|
||||||
|
"ContentType": content_type,
|
||||||
|
},
|
||||||
|
ExpiresIn=expires_in,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_presigned_get_url(key, expires_in=3600):
|
||||||
|
"""Return a presigned GET URL for downloading a private Wasabi object."""
|
||||||
|
if not _is_s3_storage():
|
||||||
|
return None
|
||||||
|
client = _get_client()
|
||||||
|
return client.generate_presigned_url(
|
||||||
|
"get_object",
|
||||||
|
Params={
|
||||||
|
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
|
||||||
|
"Key": key,
|
||||||
|
},
|
||||||
|
ExpiresIn=expires_in,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def object_exists(key):
|
||||||
|
"""Return True if the object exists in the Wasabi bucket."""
|
||||||
|
if not _is_s3_storage():
|
||||||
|
# In dev, assume upload happened (no real Wasabi)
|
||||||
|
return True
|
||||||
|
client = _get_client()
|
||||||
|
try:
|
||||||
|
client.head_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key)
|
||||||
|
return True
|
||||||
|
except ClientError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def preview_url(key):
|
||||||
|
"""
|
||||||
|
Return the public URL for a preview image.
|
||||||
|
Uses Cloudflare CDN in production; falls back to a presigned URL.
|
||||||
|
"""
|
||||||
|
if not key:
|
||||||
|
return None
|
||||||
|
cdn = settings.CDN_BASE_URL
|
||||||
|
if cdn:
|
||||||
|
return f"{cdn.rstrip('/')}/{key}"
|
||||||
|
return generate_presigned_get_url(key, expires_in=86400)
|
||||||
3
backend/config/__init__.py
Normal file
3
backend/config/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .celery import app as celery_app
|
||||||
|
|
||||||
|
__all__ = ["celery_app"]
|
||||||
8
backend/config/api_urls.py
Normal file
8
backend/config/api_urls.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from django.urls import path, include
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("users/", include("apps.users.urls")),
|
||||||
|
path("splats/", include("apps.splats.urls")),
|
||||||
|
path("challenges/", include("apps.challenges.urls")),
|
||||||
|
path("jobs/", include("apps.jobs.urls")),
|
||||||
|
]
|
||||||
8
backend/config/celery.py
Normal file
8
backend/config/celery.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import os
|
||||||
|
from celery import Celery
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development")
|
||||||
|
|
||||||
|
app = Celery("splatmap")
|
||||||
|
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||||
|
app.autodiscover_tasks()
|
||||||
0
backend/config/settings/__init__.py
Normal file
0
backend/config/settings/__init__.py
Normal file
165
backend/config/settings/base.py
Normal file
165
backend/config/settings/base.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
||||||
|
|
||||||
|
SECRET_KEY = os.environ["SECRET_KEY"]
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
"django.contrib.admin",
|
||||||
|
"django.contrib.auth",
|
||||||
|
"django.contrib.contenttypes",
|
||||||
|
"django.contrib.sessions",
|
||||||
|
"django.contrib.messages",
|
||||||
|
"django.contrib.staticfiles",
|
||||||
|
"django.contrib.gis",
|
||||||
|
# Third-party
|
||||||
|
"rest_framework",
|
||||||
|
"rest_framework_gis",
|
||||||
|
"corsheaders",
|
||||||
|
"mozilla_django_oidc",
|
||||||
|
"django_celery_results",
|
||||||
|
"django_celery_beat",
|
||||||
|
"storages",
|
||||||
|
# Apps
|
||||||
|
"apps.users",
|
||||||
|
"apps.splats",
|
||||||
|
"apps.challenges",
|
||||||
|
"apps.jobs",
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = [
|
||||||
|
"django.middleware.security.SecurityMiddleware",
|
||||||
|
"corsheaders.middleware.CorsMiddleware",
|
||||||
|
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||||
|
"django.middleware.common.CommonMiddleware",
|
||||||
|
"django.middleware.csrf.CsrfViewMiddleware",
|
||||||
|
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||||
|
"django.contrib.messages.middleware.MessageMiddleware",
|
||||||
|
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||||
|
]
|
||||||
|
|
||||||
|
ROOT_URLCONF = "config.urls"
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||||
|
"DIRS": [],
|
||||||
|
"APP_DIRS": True,
|
||||||
|
"OPTIONS": {
|
||||||
|
"context_processors": [
|
||||||
|
"django.template.context_processors.debug",
|
||||||
|
"django.template.context_processors.request",
|
||||||
|
"django.contrib.auth.context_processors.auth",
|
||||||
|
"django.contrib.messages.context_processors.messages",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
WSGI_APPLICATION = "config.wsgi.application"
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
"default": {
|
||||||
|
"ENGINE": "django.contrib.gis.db.backends.postgis",
|
||||||
|
"NAME": os.environ.get("POSTGRES_DB", "splatmap"),
|
||||||
|
"USER": os.environ.get("POSTGRES_USER", "splatmap"),
|
||||||
|
"PASSWORD": os.environ.get("POSTGRES_PASSWORD", "splatmap"),
|
||||||
|
"HOST": os.environ.get("POSTGRES_HOST", "db"),
|
||||||
|
"PORT": os.environ.get("POSTGRES_PORT", "5432"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = "users.User"
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = [
|
||||||
|
"mozilla_django_oidc.auth.OIDCAuthenticationBackend",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Authentik OIDC
|
||||||
|
_OIDC_BASE = os.environ.get("OIDC_OP_BASE_URL", "")
|
||||||
|
OIDC_RP_CLIENT_ID = os.environ.get("OIDC_RP_CLIENT_ID", "")
|
||||||
|
OIDC_RP_CLIENT_SECRET = os.environ.get("OIDC_RP_CLIENT_SECRET", "")
|
||||||
|
OIDC_OP_AUTHORIZATION_ENDPOINT = f"{_OIDC_BASE}/authorize/"
|
||||||
|
OIDC_OP_TOKEN_ENDPOINT = f"{_OIDC_BASE}/token/"
|
||||||
|
OIDC_OP_USER_ENDPOINT = f"{_OIDC_BASE}/userinfo/"
|
||||||
|
OIDC_OP_JWKS_ENDPOINT = f"{_OIDC_BASE}/jwks/"
|
||||||
|
OIDC_RP_SIGN_ALGO = "RS256"
|
||||||
|
OIDC_STORE_ACCESS_TOKEN = True
|
||||||
|
OIDC_STORE_ID_TOKEN = True
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
"DEFAULT_AUTHENTICATION_CLASSES": [
|
||||||
|
"mozilla_django_oidc.contrib.drf.OIDCAuthentication",
|
||||||
|
],
|
||||||
|
"DEFAULT_PERMISSION_CLASSES": [
|
||||||
|
"rest_framework.permissions.IsAuthenticated",
|
||||||
|
],
|
||||||
|
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.CursorPagination",
|
||||||
|
"PAGE_SIZE": 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Celery
|
||||||
|
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://redis:6379/0")
|
||||||
|
CELERY_RESULT_BACKEND = "django-db"
|
||||||
|
CELERY_CACHE_BACKEND = "django-cache"
|
||||||
|
CELERY_ACCEPT_CONTENT = ["json"]
|
||||||
|
CELERY_TASK_SERIALIZER = "json"
|
||||||
|
CELERY_RESULT_SERIALIZER = "json"
|
||||||
|
CELERY_TIMEZONE = "UTC"
|
||||||
|
CELERY_TASK_ROUTES = {
|
||||||
|
"apps.jobs.tasks.*": {"queue": "splat_jobs"},
|
||||||
|
"apps.challenges.tasks.*": {"queue": "default"},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Wasabi / S3
|
||||||
|
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||||
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
|
||||||
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
|
||||||
|
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_STORAGE_BUCKET_NAME", "splatmap")
|
||||||
|
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "https://s3.wasabisys.com")
|
||||||
|
AWS_S3_REGION_NAME = os.environ.get("AWS_S3_REGION_NAME", "us-east-1")
|
||||||
|
AWS_S3_FILE_OVERWRITE = False
|
||||||
|
AWS_DEFAULT_ACL = "private"
|
||||||
|
AWS_S3_SIGNATURE_VERSION = "s3v4"
|
||||||
|
AWS_PRESIGNED_EXPIRY = 3600 # seconds
|
||||||
|
|
||||||
|
# Firebase
|
||||||
|
FIREBASE_CREDENTIALS_FILE = os.environ.get("FIREBASE_CREDENTIALS_FILE", "")
|
||||||
|
|
||||||
|
# RunPod
|
||||||
|
RUNPOD_API_KEY = os.environ.get("RUNPOD_API_KEY", "")
|
||||||
|
RUNPOD_ENDPOINT_ID = os.environ.get("RUNPOD_ENDPOINT_ID", "")
|
||||||
|
|
||||||
|
# Webhook authentication secret — sent by RunPod in X-Webhook-Secret header
|
||||||
|
WEBHOOK_SECRET = os.environ.get("WEBHOOK_SECRET", "")
|
||||||
|
|
||||||
|
# Public base URL of this API — sent to RunPod so it can call back
|
||||||
|
API_BASE_URL = os.environ.get("API_BASE_URL", "http://localhost:8000")
|
||||||
|
|
||||||
|
# Cloudflare CDN prefix in front of Wasabi — used for preview image URLs
|
||||||
|
CDN_BASE_URL = os.environ.get("CDN_BASE_URL", "")
|
||||||
|
|
||||||
|
# Minimum number of frames required in capture_metadata to attempt reconstruction
|
||||||
|
MIN_CAPTURE_FRAMES = 100
|
||||||
|
|
||||||
|
# Thresholds for the quality gate after splatting pipeline completes.
|
||||||
|
# A splat only gets is_published=True if it passes all three.
|
||||||
|
SPLAT_QUALITY_THRESHOLDS = {
|
||||||
|
"min_colmap_points": 500,
|
||||||
|
"min_quality_score": 0.3,
|
||||||
|
"min_frame_count": 100,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Maximum side length in degrees for map tile bbox queries (~111 km per degree)
|
||||||
|
MAX_BBOX_DEGREES = 1.0
|
||||||
|
|
||||||
|
LANGUAGE_CODE = "en-us"
|
||||||
|
TIME_ZONE = "UTC"
|
||||||
|
USE_I18N = False
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
STATIC_URL = "/static/"
|
||||||
|
STATIC_ROOT = BASE_DIR / "staticfiles"
|
||||||
|
|
||||||
|
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||||
21
backend/config/settings/development.py
Normal file
21
backend/config/settings/development.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from .base import *
|
||||||
|
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ["*"]
|
||||||
|
|
||||||
|
INSTALLED_APPS += ["debug_toolbar"]
|
||||||
|
|
||||||
|
MIDDLEWARE = ["debug_toolbar.middleware.DebugToolbarMiddleware"] + MIDDLEWARE
|
||||||
|
|
||||||
|
INTERNAL_IPS = ["127.0.0.1"]
|
||||||
|
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True
|
||||||
|
|
||||||
|
# Use local filesystem in development to avoid needing real Wasabi credentials
|
||||||
|
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||||
|
MEDIA_URL = "/media/"
|
||||||
|
MEDIA_ROOT = BASE_DIR / "media"
|
||||||
|
|
||||||
|
# Log Celery tasks to console
|
||||||
|
CELERY_TASK_ALWAYS_EAGER = False # set True to run tasks synchronously for debugging
|
||||||
20
backend/config/settings/production.py
Normal file
20
backend/config/settings/production.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from .base import *
|
||||||
|
import sentry_sdk
|
||||||
|
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = os.environ["ALLOWED_HOSTS"].split(",")
|
||||||
|
|
||||||
|
CORS_ALLOWED_ORIGINS = os.environ.get("CORS_ALLOWED_ORIGINS", "").split(",")
|
||||||
|
|
||||||
|
MIDDLEWARE = ["whitenoise.middleware.WhiteNoiseMiddleware"] + MIDDLEWARE
|
||||||
|
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||||
|
|
||||||
|
SECURE_HSTS_SECONDS = 31536000
|
||||||
|
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
|
||||||
|
SECURE_SSL_REDIRECT = True
|
||||||
|
SESSION_COOKIE_SECURE = True
|
||||||
|
CSRF_COOKIE_SECURE = True
|
||||||
|
|
||||||
|
if dsn := os.environ.get("SENTRY_DSN"):
|
||||||
|
sentry_sdk.init(dsn=dsn, traces_sample_rate=0.2)
|
||||||
16
backend/config/urls.py
Normal file
16
backend/config/urls.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from django.contrib import admin
|
||||||
|
from django.urls import path, include
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("admin/", admin.site.urls),
|
||||||
|
path("oidc/", include("mozilla_django_oidc.urls")),
|
||||||
|
path("api/v1/", include("config.api_urls")),
|
||||||
|
]
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
import debug_toolbar
|
||||||
|
from django.conf.urls.static import static
|
||||||
|
|
||||||
|
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
|
||||||
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
6
backend/config/wsgi.py
Normal file
6
backend/config/wsgi.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import os
|
||||||
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
|
||||||
|
|
||||||
|
application = get_wsgi_application()
|
||||||
16
backend/manage.py
Normal file
16
backend/manage.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.development")
|
||||||
|
try:
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
except ImportError as exc:
|
||||||
|
raise ImportError("Couldn't import Django.") from exc
|
||||||
|
execute_from_command_line(sys.argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
16
backend/requirements/base.txt
Normal file
16
backend/requirements/base.txt
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
Django==5.1.4
|
||||||
|
djangorestframework==3.15.2
|
||||||
|
djangorestframework-gis==1.1
|
||||||
|
django-cors-headers==4.6.0
|
||||||
|
psycopg2-binary==2.9.10
|
||||||
|
celery[redis]==5.4.0
|
||||||
|
django-celery-results==2.5.1
|
||||||
|
django-celery-beat==2.7.0
|
||||||
|
redis==5.2.1
|
||||||
|
boto3==1.35.86
|
||||||
|
django-storages==1.14.4
|
||||||
|
mozilla-django-oidc==4.0.1
|
||||||
|
PyJWT==2.10.1
|
||||||
|
cryptography==44.0.0
|
||||||
|
requests==2.32.3
|
||||||
|
firebase-admin==6.6.0
|
||||||
6
backend/requirements/development.txt
Normal file
6
backend/requirements/development.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
-r base.txt
|
||||||
|
django-debug-toolbar==4.4.6
|
||||||
|
ipython==8.30.0
|
||||||
|
factory-boy==3.3.1
|
||||||
|
pytest-django==4.9.0
|
||||||
|
pytest-celery==1.1.3
|
||||||
4
backend/requirements/production.txt
Normal file
4
backend/requirements/production.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
-r base.txt
|
||||||
|
gunicorn==23.0.0
|
||||||
|
whitenoise==6.8.2
|
||||||
|
sentry-sdk[django]==2.19.2
|
||||||
85
docker-compose.yml
Normal file
85
docker-compose.yml
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgis/postgis:16-3.4
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-splatmap}
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-splatmap}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-splatmap}
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-splatmap}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
web:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
args:
|
||||||
|
REQUIREMENTS: development
|
||||||
|
command: python manage.py runserver 0.0.0.0:8000
|
||||||
|
volumes:
|
||||||
|
- ./backend:/app
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
DJANGO_SETTINGS_MODULE: config.settings.development
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
celery:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
args:
|
||||||
|
REQUIREMENTS: development
|
||||||
|
command: celery -A config worker -l info -Q default,splat_jobs
|
||||||
|
volumes:
|
||||||
|
- ./backend:/app
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
DJANGO_SETTINGS_MODULE: config.settings.development
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
celery-beat:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
args:
|
||||||
|
REQUIREMENTS: development
|
||||||
|
command: celery -A config beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||||
|
volumes:
|
||||||
|
- ./backend:/app
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
DJANGO_SETTINGS_MODULE: config.settings.development
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
26
web/index.html
Normal file
26
web/index.html
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>SplatMap</title>
|
||||||
|
<style>
|
||||||
|
/* Cesium viewer requires the host element to have explicit dimensions.
|
||||||
|
Set the entire page to full viewport with no scroll. */
|
||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
html, body, #root {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
overflow: hidden;
|
||||||
|
background: #000;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
2935
web/package-lock.json
generated
Normal file
2935
web/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
32
web/package.json
Normal file
32
web/package.json
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"name": "splatmap-web",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"build": "tsc -b && vite build",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mkkellogg/gaussian-splats-3d": "^0.4.6",
|
||||||
|
"axios": "^1.7.9",
|
||||||
|
"cesium": "^1.124.0",
|
||||||
|
"oidc-client-ts": "^3.1.0",
|
||||||
|
"react": "^19.0.0",
|
||||||
|
"react-dom": "^19.0.0",
|
||||||
|
"react-router-dom": "^7.1.1",
|
||||||
|
"three": "^0.171.0",
|
||||||
|
"zustand": "^5.0.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/geojson": "^7946.0.16",
|
||||||
|
"@types/react": "^19.0.2",
|
||||||
|
"@types/react-dom": "^19.0.2",
|
||||||
|
"@types/three": "^0.171.0",
|
||||||
|
"@vitejs/plugin-react": "^4.3.4",
|
||||||
|
"typescript": "^5.7.2",
|
||||||
|
"vite": "^5.4.11",
|
||||||
|
"vite-plugin-cesium": "^1.2.22"
|
||||||
|
}
|
||||||
|
}
|
||||||
14
web/public/auth/silent-callback.html
Normal file
14
web/public/auth/silent-callback.html
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head><title>Silent renew</title></head>
|
||||||
|
<body>
|
||||||
|
<script>
|
||||||
|
// oidc-client-ts exposes a global when loaded as a classic script.
|
||||||
|
// We dynamically import it from the bundled app's chunk path.
|
||||||
|
// The simplest approach: post a message back to the opener.
|
||||||
|
if (window.opener) {
|
||||||
|
window.opener.postMessage(window.location.href, window.location.origin)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
39
web/src/App.tsx
Normal file
39
web/src/App.tsx
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import { BrowserRouter, Routes, Route } from 'react-router-dom'
|
||||||
|
import { AuthProvider } from './auth/AuthProvider'
|
||||||
|
import { CallbackPage } from './auth/CallbackPage'
|
||||||
|
import { CesiumViewer } from './cesium/CesiumViewer'
|
||||||
|
import { SplatLayer } from './splat/SplatLayer'
|
||||||
|
import { SplatRenderer } from './splat/SplatRenderer'
|
||||||
|
import { ChallengeLayer } from './challenges/ChallengeLayer'
|
||||||
|
import { ChallengePanel } from './challenges/ChallengePanel'
|
||||||
|
import { ChallengeCreator } from './challenges/ChallengeCreator'
|
||||||
|
import { MapOverlay } from './ui/MapOverlay'
|
||||||
|
|
||||||
|
function MapPage() {
|
||||||
|
return (
|
||||||
|
<CesiumViewer>
|
||||||
|
{/* Imperative Cesium layers — render no DOM, manage entities */}
|
||||||
|
<SplatLayer />
|
||||||
|
<ChallengeLayer />
|
||||||
|
{/* Three.js splat overlay — portalled canvas above Cesium */}
|
||||||
|
<SplatRenderer />
|
||||||
|
{/* React UI — z-indexed above Cesium canvas */}
|
||||||
|
<MapOverlay />
|
||||||
|
<ChallengePanel />
|
||||||
|
<ChallengeCreator />
|
||||||
|
</CesiumViewer>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function App() {
|
||||||
|
return (
|
||||||
|
<BrowserRouter>
|
||||||
|
<AuthProvider>
|
||||||
|
<Routes>
|
||||||
|
<Route path="/auth/callback" element={<CallbackPage />} />
|
||||||
|
<Route path="/" element={<MapPage />} />
|
||||||
|
</Routes>
|
||||||
|
</AuthProvider>
|
||||||
|
</BrowserRouter>
|
||||||
|
)
|
||||||
|
}
|
||||||
61
web/src/api/challenges.ts
Normal file
61
web/src/api/challenges.ts
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import { apiClient } from './client'
|
||||||
|
import { bboxToString } from '../types/geo'
|
||||||
|
import type { BBox } from '../types/geo'
|
||||||
|
import type {
|
||||||
|
ChallengeCreateBody,
|
||||||
|
ChallengeDetail,
|
||||||
|
ChallengeMapProperties,
|
||||||
|
ChallengeParticipant,
|
||||||
|
SplatMapProperties,
|
||||||
|
} from '../types/api'
|
||||||
|
|
||||||
|
export async function fetchChallenges(
|
||||||
|
options: { bbox?: BBox; status?: string; near?: { lat: number; lon: number; radiusM: number } } = {},
|
||||||
|
): Promise<GeoJSON.FeatureCollection<GeoJSON.Point, ChallengeMapProperties>> {
|
||||||
|
const params: Record<string, string> = {}
|
||||||
|
if (options.bbox) params.bbox = bboxToString(options.bbox)
|
||||||
|
if (options.status) params.status = options.status
|
||||||
|
if (options.near) {
|
||||||
|
params.near = `${options.near.lat},${options.near.lon},${options.near.radiusM}`
|
||||||
|
}
|
||||||
|
const { data } = await apiClient.get('/challenges/', { params })
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchChallengeDetail(id: string): Promise<ChallengeDetail> {
|
||||||
|
const { data } = await apiClient.get(`/challenges/${id}/`)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createChallenge(body: ChallengeCreateBody): Promise<ChallengeDetail> {
|
||||||
|
const { data } = await apiClient.post('/challenges/', body)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateChallenge(
|
||||||
|
id: string,
|
||||||
|
body: Partial<Pick<ChallengeDetail, 'title' | 'description' | 'expires_at' | 'status'>>,
|
||||||
|
): Promise<ChallengeDetail> {
|
||||||
|
const { data } = await apiClient.patch(`/challenges/${id}/`, body)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function closeChallenge(id: string): Promise<void> {
|
||||||
|
await apiClient.delete(`/challenges/${id}/`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function participateInChallenge(id: string): Promise<ChallengeParticipant> {
|
||||||
|
const { data } = await apiClient.post(`/challenges/${id}/participate/`)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function leaveChallenge(id: string): Promise<void> {
|
||||||
|
await apiClient.delete(`/challenges/${id}/participate/`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchChallengeSplats(
|
||||||
|
id: string,
|
||||||
|
): Promise<GeoJSON.FeatureCollection<GeoJSON.Point, SplatMapProperties>> {
|
||||||
|
const { data } = await apiClient.get(`/challenges/${id}/splats/`)
|
||||||
|
return data
|
||||||
|
}
|
||||||
14
web/src/api/client.ts
Normal file
14
web/src/api/client.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
import { userManager } from '../auth/userManager'
|
||||||
|
|
||||||
|
export const apiClient = axios.create({
|
||||||
|
baseURL: import.meta.env.VITE_API_BASE_URL ?? '/api/v1',
|
||||||
|
})
|
||||||
|
|
||||||
|
apiClient.interceptors.request.use(async (config) => {
|
||||||
|
const user = await userManager.getUser()
|
||||||
|
if (user?.access_token) {
|
||||||
|
config.headers.Authorization = `Bearer ${user.access_token}`
|
||||||
|
}
|
||||||
|
return config
|
||||||
|
})
|
||||||
24
web/src/api/splats.ts
Normal file
24
web/src/api/splats.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import { apiClient } from './client'
|
||||||
|
import { bboxToString } from '../types/geo'
|
||||||
|
import type { BBox } from '../types/geo'
|
||||||
|
import type { SplatDetail, SplatDownloadUrl, SplatMapProperties } from '../types/api'
|
||||||
|
|
||||||
|
export async function fetchSplats(
|
||||||
|
bbox: BBox,
|
||||||
|
challengeId?: string,
|
||||||
|
): Promise<GeoJSON.FeatureCollection<GeoJSON.Point, SplatMapProperties>> {
|
||||||
|
const params: Record<string, string> = { bbox: bboxToString(bbox) }
|
||||||
|
if (challengeId) params.challenge_id = challengeId
|
||||||
|
const { data } = await apiClient.get('/splats/', { params })
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchSplatDetail(id: string): Promise<SplatDetail> {
|
||||||
|
const { data } = await apiClient.get(`/splats/${id}/`)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchSplatDownloadUrl(id: string): Promise<SplatDownloadUrl> {
|
||||||
|
const { data } = await apiClient.get(`/splats/${id}/download-url/`)
|
||||||
|
return data
|
||||||
|
}
|
||||||
61
web/src/auth/AuthProvider.tsx
Normal file
61
web/src/auth/AuthProvider.tsx
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import { useEffect } from 'react'
|
||||||
|
import { useLocation } from 'react-router-dom'
|
||||||
|
import { useAuthStore } from '../store/authStore'
|
||||||
|
import { userManager } from './userManager'
|
||||||
|
|
||||||
|
const DEV_SKIP_AUTH = import.meta.env.VITE_DEV_SKIP_AUTH === 'true'
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
children: React.ReactNode
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AuthProvider({ children }: Props) {
|
||||||
|
const { setUser, setLoading } = useAuthStore()
|
||||||
|
const location = useLocation()
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Dev bypass: skip OIDC entirely when Authentik is not running locally.
|
||||||
|
// API calls will return 401 but the map and UI will render.
|
||||||
|
if (DEV_SKIP_AUTH) {
|
||||||
|
setLoading(false)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let cancelled = false
|
||||||
|
|
||||||
|
async function init() {
|
||||||
|
setLoading(true)
|
||||||
|
try {
|
||||||
|
const user = await userManager.getUser()
|
||||||
|
if (cancelled) return
|
||||||
|
|
||||||
|
if (user && !user.expired) {
|
||||||
|
setUser(user)
|
||||||
|
} else if (location.pathname !== '/auth/callback') {
|
||||||
|
await userManager.signinRedirect({
|
||||||
|
state: { returnTo: location.pathname },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (!cancelled) setLoading(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
init()
|
||||||
|
|
||||||
|
const onUserLoaded = userManager.events.addUserLoaded((user) => {
|
||||||
|
if (!cancelled) setUser(user)
|
||||||
|
})
|
||||||
|
const onUserUnloaded = userManager.events.addUserUnloaded(() => {
|
||||||
|
if (!cancelled) setUser(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cancelled = true
|
||||||
|
userManager.events.removeUserLoaded(onUserLoaded as never)
|
||||||
|
userManager.events.removeUserUnloaded(onUserUnloaded as never)
|
||||||
|
}
|
||||||
|
}, []) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
return <>{children}</>
|
||||||
|
}
|
||||||
30
web/src/auth/CallbackPage.tsx
Normal file
30
web/src/auth/CallbackPage.tsx
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { useEffect } from 'react'
|
||||||
|
import { useNavigate } from 'react-router-dom'
|
||||||
|
import { userManager } from './userManager'
|
||||||
|
import { useAuthStore } from '../store/authStore'
|
||||||
|
|
||||||
|
export function CallbackPage() {
|
||||||
|
const navigate = useNavigate()
|
||||||
|
const { setUser } = useAuthStore()
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
userManager
|
||||||
|
.signinRedirectCallback()
|
||||||
|
.then((user) => {
|
||||||
|
setUser(user)
|
||||||
|
const state = user.state as { returnTo?: string } | undefined
|
||||||
|
navigate(state?.returnTo ?? '/', { replace: true })
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
// If the callback fails (e.g. page refreshed on the callback URL),
|
||||||
|
// kick off a fresh login instead of showing a blank screen.
|
||||||
|
userManager.signinRedirect()
|
||||||
|
})
|
||||||
|
}, []) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', justifyContent: 'center', height: '100%', color: '#fff' }}>
|
||||||
|
Signing in…
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
12
web/src/auth/userManager.ts
Normal file
12
web/src/auth/userManager.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { UserManager, WebStorageStateStore } from 'oidc-client-ts'
|
||||||
|
|
||||||
|
export const userManager = new UserManager({
|
||||||
|
authority: import.meta.env.VITE_OIDC_AUTHORITY,
|
||||||
|
client_id: import.meta.env.VITE_OIDC_CLIENT_ID,
|
||||||
|
redirect_uri: `${window.location.origin}/auth/callback`,
|
||||||
|
silent_redirect_uri: `${window.location.origin}/auth/silent-callback.html`,
|
||||||
|
scope: 'openid profile email',
|
||||||
|
response_type: 'code',
|
||||||
|
userStore: new WebStorageStateStore({ store: window.localStorage }),
|
||||||
|
automaticSilentRenew: true,
|
||||||
|
})
|
||||||
68
web/src/cesium/CesiumViewer.tsx
Normal file
68
web/src/cesium/CesiumViewer.tsx
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import { useEffect, useRef, useState } from 'react'
|
||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import 'cesium/Build/Cesium/Widgets/widgets.css'
|
||||||
|
import { CesiumContext } from './cesiumContext'
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
children?: React.ReactNode
|
||||||
|
}
|
||||||
|
|
||||||
|
export function CesiumViewer({ children }: Props) {
|
||||||
|
const containerRef = useRef<HTMLDivElement>(null)
|
||||||
|
const [viewer, setViewer] = useState<Cesium.Viewer | null>(null)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Guard: only create if the container is mounted and no viewer yet
|
||||||
|
if (!containerRef.current || viewer) return
|
||||||
|
|
||||||
|
Cesium.Ion.defaultAccessToken = import.meta.env.VITE_CESIUM_ION_TOKEN ?? ''
|
||||||
|
|
||||||
|
const v = new Cesium.Viewer(containerRef.current, {
|
||||||
|
terrainProvider: new Cesium.EllipsoidTerrainProvider(),
|
||||||
|
homeButton: false,
|
||||||
|
baseLayerPicker: false,
|
||||||
|
navigationHelpButton: false,
|
||||||
|
animation: false,
|
||||||
|
timeline: false,
|
||||||
|
geocoder: false,
|
||||||
|
sceneModePicker: false,
|
||||||
|
fullscreenButton: false,
|
||||||
|
infoBox: false,
|
||||||
|
selectionIndicator: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Async: upgrade to world terrain after initial load
|
||||||
|
Cesium.createWorldTerrainAsync()
|
||||||
|
.then((tp) => {
|
||||||
|
if (!v.isDestroyed()) v.terrainProvider = tp
|
||||||
|
})
|
||||||
|
.catch(() => {/* non-fatal: fall back to ellipsoid */})
|
||||||
|
|
||||||
|
// Hide Cesium's own credit container — we'll add our own if needed
|
||||||
|
const creditContainer = v.cesiumWidget.creditContainer as HTMLElement
|
||||||
|
creditContainer.style.display = 'none'
|
||||||
|
|
||||||
|
setViewer(v)
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (!v.isDestroyed()) v.destroy()
|
||||||
|
setViewer(null)
|
||||||
|
}
|
||||||
|
}, []) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{/* Cesium mounts itself into this div and fills it completely */}
|
||||||
|
<div
|
||||||
|
ref={containerRef}
|
||||||
|
style={{ position: 'fixed', inset: 0 }}
|
||||||
|
/>
|
||||||
|
{/* Provide viewer to all children; only render children once viewer is ready */}
|
||||||
|
{viewer && (
|
||||||
|
<CesiumContext.Provider value={viewer}>
|
||||||
|
{children}
|
||||||
|
</CesiumContext.Provider>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
14
web/src/cesium/cesiumContext.ts
Normal file
14
web/src/cesium/cesiumContext.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { createContext, useContext } from 'react'
|
||||||
|
import type * as CesiumType from 'cesium'
|
||||||
|
|
||||||
|
type Viewer = InstanceType<typeof CesiumType.Viewer>
|
||||||
|
|
||||||
|
export const CesiumContext = createContext<Viewer | null>(null)
|
||||||
|
|
||||||
|
export function useCesiumViewer(): Viewer {
|
||||||
|
const viewer = useContext(CesiumContext)
|
||||||
|
if (!viewer) {
|
||||||
|
throw new Error('useCesiumViewer must be used inside <CesiumViewer>')
|
||||||
|
}
|
||||||
|
return viewer
|
||||||
|
}
|
||||||
63
web/src/cesium/geoUtils.ts
Normal file
63
web/src/cesium/geoUtils.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import * as THREE from 'three'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a Three.js Matrix4 that positions and orients a local scene
|
||||||
|
* at the given geographic coordinate.
|
||||||
|
*
|
||||||
|
* The returned matrix transforms from local ENU space (metres from the
|
||||||
|
* anchor point, X=East, Y=North, Z=Up) to Cesium ECEF space (metres
|
||||||
|
* from Earth centre). Apply it to a Three.js Object3D.matrixWorld and
|
||||||
|
* set matrixAutoUpdate = false.
|
||||||
|
*
|
||||||
|
* @param lon Longitude in degrees
|
||||||
|
* @param lat Latitude in degrees
|
||||||
|
* @param alt Altitude in metres above WGS-84 ellipsoid
|
||||||
|
* @param headingDeg Clockwise heading in degrees (0 = North, 90 = East)
|
||||||
|
*/
|
||||||
|
export function buildSplatWorldMatrix(
|
||||||
|
lon: number,
|
||||||
|
lat: number,
|
||||||
|
alt: number,
|
||||||
|
headingDeg: number,
|
||||||
|
): THREE.Matrix4 {
|
||||||
|
const position = Cesium.Cartesian3.fromDegrees(lon, lat, alt)
|
||||||
|
|
||||||
|
// 4×4 column-major matrix: local ENU → ECEF
|
||||||
|
const enuToEcef = Cesium.Transforms.eastNorthUpToFixedFrame(position)
|
||||||
|
|
||||||
|
// Apply a rotation around local Up (Z in ENU) for heading.
|
||||||
|
// Cesium heading is clockwise from North, which is –Z rotation in ENU.
|
||||||
|
const headingRad = Cesium.Math.toRadians(-headingDeg)
|
||||||
|
const headingRotation = Cesium.Matrix4.fromRotationTranslation(
|
||||||
|
Cesium.Matrix3.fromRotationZ(headingRad),
|
||||||
|
)
|
||||||
|
const finalCesiumMatrix = new Cesium.Matrix4()
|
||||||
|
Cesium.Matrix4.multiply(enuToEcef, headingRotation, finalCesiumMatrix)
|
||||||
|
|
||||||
|
// Cesium Matrix4 is a Float64Array in column-major order.
|
||||||
|
// Three.js Matrix4 uses Float32Array, also column-major.
|
||||||
|
// Direct cast works since both use the same element layout.
|
||||||
|
const threeMatrix = new THREE.Matrix4()
|
||||||
|
threeMatrix.set(
|
||||||
|
finalCesiumMatrix[0], finalCesiumMatrix[4], finalCesiumMatrix[8], finalCesiumMatrix[12],
|
||||||
|
finalCesiumMatrix[1], finalCesiumMatrix[5], finalCesiumMatrix[9], finalCesiumMatrix[13],
|
||||||
|
finalCesiumMatrix[2], finalCesiumMatrix[6], finalCesiumMatrix[10], finalCesiumMatrix[14],
|
||||||
|
finalCesiumMatrix[3], finalCesiumMatrix[7], finalCesiumMatrix[11], finalCesiumMatrix[15],
|
||||||
|
)
|
||||||
|
return threeMatrix
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a Cesium Rectangle (radians) to a bbox tuple (degrees).
|
||||||
|
*/
|
||||||
|
export function rectangleToBbox(
|
||||||
|
rect: Cesium.Rectangle,
|
||||||
|
): [number, number, number, number] {
|
||||||
|
return [
|
||||||
|
Cesium.Math.toDegrees(rect.west),
|
||||||
|
Cesium.Math.toDegrees(rect.south),
|
||||||
|
Cesium.Math.toDegrees(rect.east),
|
||||||
|
Cesium.Math.toDegrees(rect.north),
|
||||||
|
]
|
||||||
|
}
|
||||||
37
web/src/cesium/useCesiumCamera.ts
Normal file
37
web/src/cesium/useCesiumCamera.ts
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import { useEffect } from 'react'
|
||||||
|
import { useCesiumViewer } from './cesiumContext'
|
||||||
|
import { useMapStore } from '../store/mapStore'
|
||||||
|
import { rectangleToBbox } from './geoUtils'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attaches a scene.preUpdate listener that writes camera height and
|
||||||
|
* the current view bbox to mapStore on every frame.
|
||||||
|
*
|
||||||
|
* Throttled so the store update fires at most once per 200 ms to avoid
|
||||||
|
* triggering expensive API queries on every rendered frame.
|
||||||
|
*/
|
||||||
|
export function useCesiumCamera() {
|
||||||
|
const viewer = useCesiumViewer()
|
||||||
|
const setCameraState = useMapStore((s) => s.setCameraState)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
let lastFired = 0
|
||||||
|
const THROTTLE_MS = 200
|
||||||
|
|
||||||
|
const removeListener = viewer.scene.preUpdate.addEventListener(() => {
|
||||||
|
const now = Date.now()
|
||||||
|
if (now - lastFired < THROTTLE_MS) return
|
||||||
|
lastFired = now
|
||||||
|
|
||||||
|
const height = viewer.camera.positionCartographic.height
|
||||||
|
const rect = viewer.camera.computeViewRectangle()
|
||||||
|
if (rect) {
|
||||||
|
setCameraState(height, rectangleToBbox(rect))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
removeListener()
|
||||||
|
}
|
||||||
|
}, [viewer, setCameraState])
|
||||||
|
}
|
||||||
76
web/src/challenges/ChallengeCreator.module.css
Normal file
76
web/src/challenges/ChallengeCreator.module.css
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
.overlay {
|
||||||
|
position: fixed;
|
||||||
|
inset: 0;
|
||||||
|
background: rgba(0,0,0,0.6);
|
||||||
|
z-index: 50;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal {
|
||||||
|
background: #1a1a2e;
|
||||||
|
border: 1px solid rgba(255,255,255,0.12);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 28px;
|
||||||
|
width: 100%;
|
||||||
|
max-width: 480px;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
.heading { margin: 0 0 6px; font-size: 20px; }
|
||||||
|
.sub { margin: 0 0 20px; color: rgba(255,255,255,0.55); font-size: 14px; }
|
||||||
|
|
||||||
|
.form { display: flex; flex-direction: column; gap: 16px; }
|
||||||
|
|
||||||
|
.label {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 6px;
|
||||||
|
font-size: 14px;
|
||||||
|
color: rgba(255,255,255,0.7);
|
||||||
|
}
|
||||||
|
|
||||||
|
.input {
|
||||||
|
padding: 9px 12px;
|
||||||
|
background: rgba(255,255,255,0.07);
|
||||||
|
border: 1px solid rgba(255,255,255,0.15);
|
||||||
|
border-radius: 6px;
|
||||||
|
color: #fff;
|
||||||
|
font-size: 15px;
|
||||||
|
outline: none;
|
||||||
|
resize: vertical;
|
||||||
|
}
|
||||||
|
.input:focus { border-color: #6366f1; }
|
||||||
|
.input::placeholder { color: rgba(255,255,255,0.3); }
|
||||||
|
|
||||||
|
.row { display: grid; grid-template-columns: 1fr 1fr; gap: 12px; }
|
||||||
|
|
||||||
|
.error { color: #f87171; font-size: 14px; margin: 0; }
|
||||||
|
|
||||||
|
.actions { display: flex; gap: 10px; justify-content: flex-end; }
|
||||||
|
|
||||||
|
.cancelBtn {
|
||||||
|
padding: 10px 18px;
|
||||||
|
background: none;
|
||||||
|
border: 1px solid rgba(255,255,255,0.2);
|
||||||
|
border-radius: 8px;
|
||||||
|
color: rgba(255,255,255,0.7);
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.cancelBtn:hover { border-color: rgba(255,255,255,0.4); color: #fff; }
|
||||||
|
|
||||||
|
.submitBtn {
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: #6366f1;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
color: #fff;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.submitBtn:hover:not(:disabled) { background: #4f46e5; }
|
||||||
|
.submitBtn:disabled { opacity: 0.5; cursor: default; }
|
||||||
118
web/src/challenges/ChallengeCreator.tsx
Normal file
118
web/src/challenges/ChallengeCreator.tsx
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
import { useState } from 'react'
|
||||||
|
import { useChallengeStore } from '../store/challengeStore'
|
||||||
|
import { createChallenge } from '../api/challenges'
|
||||||
|
import styles from './ChallengeCreator.module.css'
|
||||||
|
|
||||||
|
export function ChallengeCreator() {
|
||||||
|
const { draftPolygon, setDraftPolygon, setSelectedChallengeId } = useChallengeStore()
|
||||||
|
const [title, setTitle] = useState('')
|
||||||
|
const [description, setDescription] = useState('')
|
||||||
|
const [maxSubmissions, setMaxSubmissions] = useState('')
|
||||||
|
const [expiresAt, setExpiresAt] = useState('')
|
||||||
|
const [saving, setSaving] = useState(false)
|
||||||
|
const [error, setError] = useState('')
|
||||||
|
|
||||||
|
if (!draftPolygon) return null
|
||||||
|
|
||||||
|
async function handleSubmit(e: React.FormEvent) {
|
||||||
|
e.preventDefault()
|
||||||
|
if (!title.trim() || !draftPolygon) return
|
||||||
|
|
||||||
|
setSaving(true)
|
||||||
|
setError('')
|
||||||
|
try {
|
||||||
|
const challenge = await createChallenge({
|
||||||
|
title: title.trim(),
|
||||||
|
description: description.trim() || undefined,
|
||||||
|
region: draftPolygon,
|
||||||
|
max_submissions: maxSubmissions ? parseInt(maxSubmissions, 10) : null,
|
||||||
|
expires_at: expiresAt || null,
|
||||||
|
})
|
||||||
|
setDraftPolygon(null)
|
||||||
|
setSelectedChallengeId(challenge.id)
|
||||||
|
setTitle('')
|
||||||
|
setDescription('')
|
||||||
|
setMaxSubmissions('')
|
||||||
|
setExpiresAt('')
|
||||||
|
} catch {
|
||||||
|
setError('Failed to create challenge. Please try again.')
|
||||||
|
} finally {
|
||||||
|
setSaving(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={styles.overlay}>
|
||||||
|
<div className={styles.modal}>
|
||||||
|
<h2 className={styles.heading}>Create Challenge</h2>
|
||||||
|
<p className={styles.sub}>
|
||||||
|
Your polygon has been captured. Fill in the details below.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<form onSubmit={handleSubmit} className={styles.form}>
|
||||||
|
<label className={styles.label}>
|
||||||
|
Title *
|
||||||
|
<input
|
||||||
|
className={styles.input}
|
||||||
|
value={title}
|
||||||
|
onChange={(e) => setTitle(e.target.value)}
|
||||||
|
placeholder="e.g. Capture the old library"
|
||||||
|
required
|
||||||
|
maxLength={255}
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<label className={styles.label}>
|
||||||
|
Description
|
||||||
|
<textarea
|
||||||
|
className={styles.input}
|
||||||
|
value={description}
|
||||||
|
onChange={(e) => setDescription(e.target.value)}
|
||||||
|
placeholder="What should people capture here?"
|
||||||
|
rows={3}
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<div className={styles.row}>
|
||||||
|
<label className={styles.label}>
|
||||||
|
Max submissions
|
||||||
|
<input
|
||||||
|
className={styles.input}
|
||||||
|
type="number"
|
||||||
|
min={1}
|
||||||
|
value={maxSubmissions}
|
||||||
|
onChange={(e) => setMaxSubmissions(e.target.value)}
|
||||||
|
placeholder="Unlimited"
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<label className={styles.label}>
|
||||||
|
Expires at
|
||||||
|
<input
|
||||||
|
className={styles.input}
|
||||||
|
type="datetime-local"
|
||||||
|
value={expiresAt}
|
||||||
|
onChange={(e) => setExpiresAt(e.target.value)}
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && <p className={styles.error}>{error}</p>}
|
||||||
|
|
||||||
|
<div className={styles.actions}>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className={styles.cancelBtn}
|
||||||
|
onClick={() => setDraftPolygon(null)}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button type="submit" className={styles.submitBtn} disabled={saving}>
|
||||||
|
{saving ? 'Creating…' : 'Create challenge'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
142
web/src/challenges/ChallengeLayer.tsx
Normal file
142
web/src/challenges/ChallengeLayer.tsx
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import { useCesiumViewer } from '../cesium/cesiumContext'
|
||||||
|
import { useMapStore } from '../store/mapStore'
|
||||||
|
import { useChallengeStore } from '../store/challengeStore'
|
||||||
|
import { usePolygonDraw } from './usePolygonDraw'
|
||||||
|
import { fetchChallenges } from '../api/challenges'
|
||||||
|
import type { BBox } from '../types/geo'
|
||||||
|
import type { ChallengeMapProperties } from '../types/api'
|
||||||
|
|
||||||
|
const CHALLENGE_VISIBLE_HEIGHT = 200_000
|
||||||
|
|
||||||
|
export function ChallengeLayer() {
|
||||||
|
const viewer = useCesiumViewer()
|
||||||
|
usePolygonDraw()
|
||||||
|
|
||||||
|
const { bbox, cameraHeight, setLoadedChallenges } = useMapStore()
|
||||||
|
const { selectedChallengeId, setSelectedChallengeId } = useChallengeStore()
|
||||||
|
|
||||||
|
const entityMapRef = useRef<Map<string, Cesium.Entity>>(new Map())
|
||||||
|
const regionEntityRef = useRef<Cesium.Entity | null>(null)
|
||||||
|
const lastBboxRef = useRef<BBox | null>(null)
|
||||||
|
|
||||||
|
// Fetch and render challenge pins
|
||||||
|
useEffect(() => {
|
||||||
|
if (!bbox || cameraHeight > CHALLENGE_VISIBLE_HEIGHT) {
|
||||||
|
entityMapRef.current.forEach((e) => viewer.entities.remove(e))
|
||||||
|
entityMapRef.current.clear()
|
||||||
|
setLoadedChallenges([])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const last = lastBboxRef.current
|
||||||
|
if (last) {
|
||||||
|
const delta = Math.max(
|
||||||
|
Math.abs(bbox[0] - last[0]), Math.abs(bbox[1] - last[1]),
|
||||||
|
Math.abs(bbox[2] - last[2]), Math.abs(bbox[3] - last[3]),
|
||||||
|
)
|
||||||
|
if (delta < 0.05) return
|
||||||
|
}
|
||||||
|
lastBboxRef.current = bbox
|
||||||
|
|
||||||
|
fetchChallenges({ bbox }).then((fc) => {
|
||||||
|
const incoming = new Set(fc.features.map((f: GeoJSON.Feature<GeoJSON.Point, ChallengeMapProperties>) => f.properties.id))
|
||||||
|
|
||||||
|
entityMapRef.current.forEach((entity, id) => {
|
||||||
|
if (!incoming.has(id)) {
|
||||||
|
viewer.entities.remove(entity)
|
||||||
|
entityMapRef.current.delete(id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
fc.features.forEach((feature: GeoJSON.Feature<GeoJSON.Point, ChallengeMapProperties>) => {
|
||||||
|
const id = feature.properties.id
|
||||||
|
if (entityMapRef.current.has(id)) return
|
||||||
|
|
||||||
|
const [lon, lat] = feature.geometry.coordinates
|
||||||
|
|
||||||
|
const entity = viewer.entities.add({
|
||||||
|
id: `challenge-${id}`,
|
||||||
|
position: Cesium.Cartesian3.fromDegrees(lon, lat),
|
||||||
|
billboard: {
|
||||||
|
image: createChallengePinSvg(),
|
||||||
|
width: 36,
|
||||||
|
height: 36,
|
||||||
|
verticalOrigin: Cesium.VerticalOrigin.BOTTOM,
|
||||||
|
disableDepthTestDistance: Number.POSITIVE_INFINITY,
|
||||||
|
},
|
||||||
|
properties: { challengeId: id },
|
||||||
|
})
|
||||||
|
entityMapRef.current.set(id, entity)
|
||||||
|
})
|
||||||
|
|
||||||
|
setLoadedChallenges(fc.features)
|
||||||
|
}).catch(console.error)
|
||||||
|
}, [bbox, cameraHeight]) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
// Show region polygon for selected challenge
|
||||||
|
useEffect(() => {
|
||||||
|
if (regionEntityRef.current) {
|
||||||
|
viewer.entities.remove(regionEntityRef.current)
|
||||||
|
regionEntityRef.current = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!selectedChallengeId) return
|
||||||
|
|
||||||
|
// We need the full detail to get the region polygon — ChallengePanel fetches
|
||||||
|
// it; we read from the DOM or re-fetch. For simplicity, re-fetch here.
|
||||||
|
import('../api/challenges').then(({ fetchChallengeDetail }) =>
|
||||||
|
fetchChallengeDetail(selectedChallengeId),
|
||||||
|
).then((detail) => {
|
||||||
|
if (!detail.region) return
|
||||||
|
const coords = detail.region.coordinates[0]
|
||||||
|
const positions = coords.map((c) =>
|
||||||
|
Cesium.Cartesian3.fromDegrees(c[0], c[1]),
|
||||||
|
)
|
||||||
|
|
||||||
|
regionEntityRef.current = viewer.entities.add({
|
||||||
|
polygon: {
|
||||||
|
hierarchy: new Cesium.PolygonHierarchy(positions),
|
||||||
|
material: Cesium.Color.YELLOW.withAlpha(0.15),
|
||||||
|
outline: true,
|
||||||
|
outlineColor: Cesium.Color.YELLOW,
|
||||||
|
outlineWidth: 2,
|
||||||
|
heightReference: Cesium.HeightReference.CLAMP_TO_GROUND,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}).catch(console.error)
|
||||||
|
}, [selectedChallengeId, viewer])
|
||||||
|
|
||||||
|
// Wire up entity selection
|
||||||
|
useEffect(() => {
|
||||||
|
const remove = viewer.selectedEntityChanged.addEventListener((entity) => {
|
||||||
|
if (!entity) return
|
||||||
|
const challengeId = entity.properties?.challengeId?.getValue()
|
||||||
|
if (challengeId) setSelectedChallengeId(challengeId)
|
||||||
|
})
|
||||||
|
return () => remove()
|
||||||
|
}, [viewer, setSelectedChallengeId])
|
||||||
|
|
||||||
|
// Cleanup on unmount
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
entityMapRef.current.forEach((e) => viewer.entities.remove(e))
|
||||||
|
entityMapRef.current.clear()
|
||||||
|
if (regionEntityRef.current) viewer.entities.remove(regionEntityRef.current)
|
||||||
|
}
|
||||||
|
}, [viewer])
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function createChallengePinSvg(): string {
|
||||||
|
const svg = `
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="36" height="36" viewBox="0 0 36 36">
|
||||||
|
<circle cx="18" cy="16" r="12" fill="#f59e0b" stroke="#fff" stroke-width="2"/>
|
||||||
|
<polygon points="18,32 11,21 25,21" fill="#f59e0b"/>
|
||||||
|
<text x="18" y="20" text-anchor="middle" fill="#fff" font-size="11" font-weight="bold">!</text>
|
||||||
|
</svg>
|
||||||
|
`
|
||||||
|
return `data:image/svg+xml;base64,${btoa(svg)}`
|
||||||
|
}
|
||||||
55
web/src/challenges/ChallengePanel.module.css
Normal file
55
web/src/challenges/ChallengePanel.module.css
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
.loading { color: rgba(255,255,255,0.5); }
|
||||||
|
|
||||||
|
.content { display: flex; flex-direction: column; gap: 16px; }
|
||||||
|
|
||||||
|
.description { margin: 0; line-height: 1.5; color: rgba(255,255,255,0.8); }
|
||||||
|
|
||||||
|
.meta {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: auto 1fr;
|
||||||
|
gap: 6px 16px;
|
||||||
|
margin: 0;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
.meta dt { color: rgba(255,255,255,0.5); }
|
||||||
|
.meta dd { margin: 0; }
|
||||||
|
|
||||||
|
.participateBtn {
|
||||||
|
padding: 10px 20px;
|
||||||
|
background: #f59e0b;
|
||||||
|
color: #000;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 15px;
|
||||||
|
}
|
||||||
|
.participateBtn:hover { background: #fbbf24; }
|
||||||
|
|
||||||
|
.participating { color: #6ee7b7; font-size: 14px; margin: 0; }
|
||||||
|
|
||||||
|
.previews h3 { margin: 0 0 12px; font-size: 15px; font-weight: 600; }
|
||||||
|
|
||||||
|
.previewGrid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(3, 1fr);
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.previewCard {
|
||||||
|
aspect-ratio: 1;
|
||||||
|
border-radius: 6px;
|
||||||
|
overflow: hidden;
|
||||||
|
background: rgba(255,255,255,0.05);
|
||||||
|
}
|
||||||
|
.previewCard img { width: 100%; height: 100%; object-fit: cover; }
|
||||||
|
|
||||||
|
.previewPlaceholder {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
color: rgba(255,255,255,0.3);
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
91
web/src/challenges/ChallengePanel.tsx
Normal file
91
web/src/challenges/ChallengePanel.tsx
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
import { useEffect, useState } from 'react'
|
||||||
|
import { Panel } from '../ui/Panel'
|
||||||
|
import { useChallengeStore } from '../store/challengeStore'
|
||||||
|
import { fetchChallengeDetail, participateInChallenge } from '../api/challenges'
|
||||||
|
import type { ChallengeDetail } from '../types/api'
|
||||||
|
import styles from './ChallengePanel.module.css'
|
||||||
|
|
||||||
|
export function ChallengePanel() {
|
||||||
|
const { selectedChallengeId, setSelectedChallengeId } = useChallengeStore()
|
||||||
|
const [detail, setDetail] = useState<ChallengeDetail | null>(null)
|
||||||
|
const [participating, setParticipating] = useState(false)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!selectedChallengeId) {
|
||||||
|
setDetail(null)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fetchChallengeDetail(selectedChallengeId)
|
||||||
|
.then((d) => {
|
||||||
|
setDetail(d)
|
||||||
|
setParticipating(d.is_participating)
|
||||||
|
})
|
||||||
|
.catch(console.error)
|
||||||
|
}, [selectedChallengeId])
|
||||||
|
|
||||||
|
async function handleParticipate() {
|
||||||
|
if (!selectedChallengeId) return
|
||||||
|
await participateInChallenge(selectedChallengeId)
|
||||||
|
setParticipating(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Panel
|
||||||
|
open={!!selectedChallengeId}
|
||||||
|
onClose={() => setSelectedChallengeId(null)}
|
||||||
|
title={detail?.title ?? 'Challenge'}
|
||||||
|
>
|
||||||
|
{!detail ? (
|
||||||
|
<p className={styles.loading}>Loading…</p>
|
||||||
|
) : (
|
||||||
|
<div className={styles.content}>
|
||||||
|
{detail.description && (
|
||||||
|
<p className={styles.description}>{detail.description}</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<dl className={styles.meta}>
|
||||||
|
<dt>Created by</dt>
|
||||||
|
<dd>{detail.creator_username}</dd>
|
||||||
|
<dt>Submissions</dt>
|
||||||
|
<dd>
|
||||||
|
{detail.submission_count}
|
||||||
|
{detail.max_submissions ? ` / ${detail.max_submissions}` : ''}
|
||||||
|
</dd>
|
||||||
|
{detail.expires_at && (
|
||||||
|
<>
|
||||||
|
<dt>Expires</dt>
|
||||||
|
<dd>{new Date(detail.expires_at).toLocaleDateString()}</dd>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</dl>
|
||||||
|
|
||||||
|
{detail.status === 'active' && !participating && (
|
||||||
|
<button className={styles.participateBtn} onClick={handleParticipate}>
|
||||||
|
Accept challenge
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{participating && (
|
||||||
|
<p className={styles.participating}>You've accepted this challenge</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{detail.preview_splats.length > 0 && (
|
||||||
|
<section className={styles.previews}>
|
||||||
|
<h3>Submissions</h3>
|
||||||
|
<div className={styles.previewGrid}>
|
||||||
|
{detail.preview_splats.map((s) => (
|
||||||
|
<div key={s.id} className={styles.previewCard}>
|
||||||
|
{s.preview_url ? (
|
||||||
|
<img src={s.preview_url} alt="Splat preview" />
|
||||||
|
) : (
|
||||||
|
<div className={styles.previewPlaceholder}>3D</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Panel>
|
||||||
|
)
|
||||||
|
}
|
||||||
96
web/src/challenges/usePolygonDraw.ts
Normal file
96
web/src/challenges/usePolygonDraw.ts
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import { useCesiumViewer } from '../cesium/cesiumContext'
|
||||||
|
import { useChallengeStore } from '../store/challengeStore'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When drawingMode is true, intercepts Cesium mouse events to let the user
|
||||||
|
* draw a polygon by clicking vertices on the globe.
|
||||||
|
*
|
||||||
|
* LEFT_CLICK → add vertex
|
||||||
|
* RIGHT_CLICK → close polygon and write GeoJSON Polygon to challengeStore
|
||||||
|
*/
|
||||||
|
export function usePolygonDraw() {
|
||||||
|
const viewer = useCesiumViewer()
|
||||||
|
const { drawingMode, setDrawingMode, setDraftPolygon } = useChallengeStore()
|
||||||
|
const verticesRef = useRef<Cesium.Cartesian3[]>([])
|
||||||
|
const previewEntityRef = useRef<Cesium.Entity | null>(null)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!drawingMode) {
|
||||||
|
cleanup()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
verticesRef.current = []
|
||||||
|
|
||||||
|
const handler = new Cesium.ScreenSpaceEventHandler(viewer.scene.canvas)
|
||||||
|
|
||||||
|
handler.setInputAction((event: { position: Cesium.Cartesian2 }) => {
|
||||||
|
const ray = viewer.camera.getPickRay(event.position)
|
||||||
|
if (!ray) return
|
||||||
|
const intersection = viewer.scene.globe.pick(ray, viewer.scene)
|
||||||
|
if (!intersection) return
|
||||||
|
|
||||||
|
verticesRef.current.push(intersection.clone())
|
||||||
|
updatePreview()
|
||||||
|
}, Cesium.ScreenSpaceEventType.LEFT_CLICK)
|
||||||
|
|
||||||
|
handler.setInputAction(() => {
|
||||||
|
const verts = verticesRef.current
|
||||||
|
if (verts.length < 3) return
|
||||||
|
|
||||||
|
// Convert Cartesian3 vertices to [lon, lat] degree pairs
|
||||||
|
const coords: [number, number][] = verts.map((v) => {
|
||||||
|
const carto = Cesium.Cartographic.fromCartesian(v)
|
||||||
|
return [
|
||||||
|
Cesium.Math.toDegrees(carto.longitude),
|
||||||
|
Cesium.Math.toDegrees(carto.latitude),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
// Close the ring
|
||||||
|
coords.push(coords[0])
|
||||||
|
|
||||||
|
const polygon: GeoJSON.Polygon = {
|
||||||
|
type: 'Polygon',
|
||||||
|
coordinates: [coords],
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup()
|
||||||
|
setDraftPolygon(polygon)
|
||||||
|
setDrawingMode(false)
|
||||||
|
}, Cesium.ScreenSpaceEventType.RIGHT_CLICK)
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
handler.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
verticesRef.current = []
|
||||||
|
if (previewEntityRef.current) {
|
||||||
|
viewer.entities.remove(previewEntityRef.current)
|
||||||
|
previewEntityRef.current = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updatePreview() {
|
||||||
|
if (previewEntityRef.current) {
|
||||||
|
viewer.entities.remove(previewEntityRef.current)
|
||||||
|
}
|
||||||
|
|
||||||
|
const verts = verticesRef.current
|
||||||
|
if (verts.length < 2) return
|
||||||
|
|
||||||
|
previewEntityRef.current = viewer.entities.add({
|
||||||
|
polyline: {
|
||||||
|
positions: [...verts, verts[0]], // close the preview ring
|
||||||
|
width: 2,
|
||||||
|
material: new Cesium.ColorMaterialProperty(
|
||||||
|
Cesium.Color.YELLOW.withAlpha(0.9),
|
||||||
|
),
|
||||||
|
clampToGround: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, [drawingMode, viewer, setDrawingMode, setDraftPolygon])
|
||||||
|
}
|
||||||
13
web/src/env.d.ts
vendored
Normal file
13
web/src/env.d.ts
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
/// <reference types="vite/client" />
|
||||||
|
|
||||||
|
interface ImportMetaEnv {
|
||||||
|
readonly VITE_OIDC_AUTHORITY: string
|
||||||
|
readonly VITE_OIDC_CLIENT_ID: string
|
||||||
|
readonly VITE_API_BASE_URL: string
|
||||||
|
readonly VITE_CESIUM_ION_TOKEN: string
|
||||||
|
readonly VITE_DEV_SKIP_AUTH?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ImportMeta {
|
||||||
|
readonly env: ImportMetaEnv
|
||||||
|
}
|
||||||
7
web/src/main.tsx
Normal file
7
web/src/main.tsx
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { createRoot } from 'react-dom/client'
|
||||||
|
import App from './App'
|
||||||
|
|
||||||
|
// StrictMode is intentionally disabled.
|
||||||
|
// Cesium's Viewer cannot survive React's double-invocation of useEffect
|
||||||
|
// in StrictMode — it tries to mount on an already-destroyed canvas.
|
||||||
|
createRoot(document.getElementById('root')!).render(<App />)
|
||||||
117
web/src/splat/SplatLayer.tsx
Normal file
117
web/src/splat/SplatLayer.tsx
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import { useCesiumViewer } from '../cesium/cesiumContext'
|
||||||
|
import { useCesiumCamera } from '../cesium/useCesiumCamera'
|
||||||
|
import { useMapStore } from '../store/mapStore'
|
||||||
|
import { fetchSplats } from '../api/splats'
|
||||||
|
import type { BBox } from '../types/geo'
|
||||||
|
import type { SplatMapProperties } from '../types/api'
|
||||||
|
|
||||||
|
// Show splat pins when camera is below this altitude (metres)
|
||||||
|
const SPLAT_VISIBLE_HEIGHT = 50_000
|
||||||
|
|
||||||
|
export function SplatLayer() {
|
||||||
|
const viewer = useCesiumViewer()
|
||||||
|
useCesiumCamera()
|
||||||
|
|
||||||
|
const { bbox, cameraHeight, setLoadedSplats, setActiveSplatId } = useMapStore()
|
||||||
|
const entityMapRef = useRef<Map<string, Cesium.Entity>>(new Map())
|
||||||
|
const lastBboxRef = useRef<BBox | null>(null)
|
||||||
|
|
||||||
|
// Fetch and sync splat entities whenever the bbox changes meaningfully
|
||||||
|
useEffect(() => {
|
||||||
|
if (!bbox || cameraHeight > SPLAT_VISIBLE_HEIGHT) {
|
||||||
|
// Clear all splat pins when zoomed out
|
||||||
|
entityMapRef.current.forEach((e) => viewer.entities.remove(e))
|
||||||
|
entityMapRef.current.clear()
|
||||||
|
setLoadedSplats([])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid re-fetching if bbox moved less than 0.01° (noise reduction)
|
||||||
|
const last = lastBboxRef.current
|
||||||
|
if (last) {
|
||||||
|
const delta = Math.max(
|
||||||
|
Math.abs(bbox[0] - last[0]),
|
||||||
|
Math.abs(bbox[1] - last[1]),
|
||||||
|
Math.abs(bbox[2] - last[2]),
|
||||||
|
Math.abs(bbox[3] - last[3]),
|
||||||
|
)
|
||||||
|
if (delta < 0.01) return
|
||||||
|
}
|
||||||
|
lastBboxRef.current = bbox
|
||||||
|
|
||||||
|
fetchSplats(bbox).then((fc) => {
|
||||||
|
const incoming = new Set(fc.features.map((f: GeoJSON.Feature<GeoJSON.Point, SplatMapProperties>) => f.properties.id))
|
||||||
|
|
||||||
|
// Remove entities that are no longer in view
|
||||||
|
entityMapRef.current.forEach((entity, id) => {
|
||||||
|
if (!incoming.has(id)) {
|
||||||
|
viewer.entities.remove(entity)
|
||||||
|
entityMapRef.current.delete(id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Add new entities
|
||||||
|
fc.features.forEach((feature: GeoJSON.Feature<GeoJSON.Point, SplatMapProperties>) => {
|
||||||
|
const id = feature.properties.id
|
||||||
|
if (entityMapRef.current.has(id)) return
|
||||||
|
|
||||||
|
const [lon, lat] = feature.geometry.coordinates
|
||||||
|
const alt = feature.properties.altitude ?? 0
|
||||||
|
|
||||||
|
const entity = viewer.entities.add({
|
||||||
|
id: `splat-${id}`,
|
||||||
|
position: Cesium.Cartesian3.fromDegrees(lon, lat, alt),
|
||||||
|
billboard: {
|
||||||
|
image: createSplatPinSvg(),
|
||||||
|
width: 32,
|
||||||
|
height: 32,
|
||||||
|
verticalOrigin: Cesium.VerticalOrigin.BOTTOM,
|
||||||
|
disableDepthTestDistance: Number.POSITIVE_INFINITY,
|
||||||
|
},
|
||||||
|
properties: { splatId: id },
|
||||||
|
})
|
||||||
|
entityMapRef.current.set(id, entity)
|
||||||
|
})
|
||||||
|
|
||||||
|
setLoadedSplats(fc.features)
|
||||||
|
}).catch(console.error)
|
||||||
|
}, [bbox, cameraHeight]) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
// Wire up entity selection → activeSplatId in store
|
||||||
|
useEffect(() => {
|
||||||
|
const remove = viewer.selectedEntityChanged.addEventListener((entity) => {
|
||||||
|
if (!entity) {
|
||||||
|
setActiveSplatId(null)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const splatId = entity.properties?.splatId?.getValue()
|
||||||
|
if (splatId) {
|
||||||
|
setActiveSplatId(splatId)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return () => remove()
|
||||||
|
}, [viewer, setActiveSplatId])
|
||||||
|
|
||||||
|
// Clean up all entities on unmount
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
entityMapRef.current.forEach((e) => viewer.entities.remove(e))
|
||||||
|
entityMapRef.current.clear()
|
||||||
|
}
|
||||||
|
}, [viewer])
|
||||||
|
|
||||||
|
return null // no DOM — everything is imperative Cesium entities
|
||||||
|
}
|
||||||
|
|
||||||
|
function createSplatPinSvg(): string {
|
||||||
|
const svg = `
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32">
|
||||||
|
<circle cx="16" cy="14" r="10" fill="#6366f1" stroke="#fff" stroke-width="2"/>
|
||||||
|
<polygon points="16,28 10,18 22,18" fill="#6366f1"/>
|
||||||
|
<text x="16" y="18" text-anchor="middle" fill="#fff" font-size="10" font-weight="bold">3D</text>
|
||||||
|
</svg>
|
||||||
|
`
|
||||||
|
return `data:image/svg+xml;base64,${btoa(svg)}`
|
||||||
|
}
|
||||||
161
web/src/splat/SplatRenderer.tsx
Normal file
161
web/src/splat/SplatRenderer.tsx
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import { createPortal } from 'react-dom'
|
||||||
|
import * as THREE from 'three'
|
||||||
|
import { useCesiumViewer } from '../cesium/cesiumContext'
|
||||||
|
import { useMapStore } from '../store/mapStore'
|
||||||
|
import { useSplatStore } from '../store/splatStore'
|
||||||
|
import { syncSplatCamera } from './useSplatCamera'
|
||||||
|
import { getSplatDownloadUrl } from './splatLoader'
|
||||||
|
import { buildSplatWorldMatrix } from '../cesium/geoUtils'
|
||||||
|
import { fetchSplatDetail } from '../api/splats'
|
||||||
|
|
||||||
|
// Only render the splat when the camera is below this altitude
|
||||||
|
const RENDER_HEIGHT = 500
|
||||||
|
|
||||||
|
export function SplatRenderer() {
|
||||||
|
const viewer = useCesiumViewer()
|
||||||
|
const canvasRef = useRef<HTMLCanvasElement>(null)
|
||||||
|
const splatViewerRef = useRef<unknown>(null)
|
||||||
|
const camerRef = useRef<THREE.PerspectiveCamera>(new THREE.PerspectiveCamera())
|
||||||
|
const activeSplatId = useMapStore((s) => s.activeSplatId)
|
||||||
|
const cameraHeight = useMapStore((s) => s.cameraHeight)
|
||||||
|
const { setSplatDetail, splatCache } = useSplatStore()
|
||||||
|
|
||||||
|
// Keep canvas dimensions in sync with Cesium canvas
|
||||||
|
useEffect(() => {
|
||||||
|
const cesiumCanvas = viewer.canvas
|
||||||
|
const overlayCanvas = canvasRef.current
|
||||||
|
if (!overlayCanvas) return
|
||||||
|
|
||||||
|
function syncSize() {
|
||||||
|
overlayCanvas!.width = cesiumCanvas.width
|
||||||
|
overlayCanvas!.height = cesiumCanvas.height
|
||||||
|
camerRef.current.aspect = cesiumCanvas.width / cesiumCanvas.height
|
||||||
|
camerRef.current.updateProjectionMatrix()
|
||||||
|
}
|
||||||
|
|
||||||
|
syncSize()
|
||||||
|
const observer = new ResizeObserver(syncSize)
|
||||||
|
observer.observe(cesiumCanvas)
|
||||||
|
return () => observer.disconnect()
|
||||||
|
}, [viewer])
|
||||||
|
|
||||||
|
// Load / unload splat when activeSplatId changes
|
||||||
|
useEffect(() => {
|
||||||
|
if (!activeSplatId) {
|
||||||
|
disposeSplatViewer()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let cancelled = false
|
||||||
|
|
||||||
|
async function loadSplat() {
|
||||||
|
if (!activeSplatId) return
|
||||||
|
|
||||||
|
// Fetch detail if not cached
|
||||||
|
let detail = splatCache.get(activeSplatId)
|
||||||
|
if (!detail) {
|
||||||
|
detail = await fetchSplatDetail(activeSplatId)
|
||||||
|
if (cancelled) return
|
||||||
|
setSplatDetail(activeSplatId, detail)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!detail.location || !detail.is_published) return
|
||||||
|
|
||||||
|
const url = await getSplatDownloadUrl(activeSplatId)
|
||||||
|
if (cancelled) return
|
||||||
|
|
||||||
|
// Dynamically import the library to keep initial bundle lean
|
||||||
|
const { Viewer: GaussianViewer } = await import('@mkkellogg/gaussian-splats-3d')
|
||||||
|
if (cancelled) return
|
||||||
|
|
||||||
|
const canvas = canvasRef.current
|
||||||
|
if (!canvas) return
|
||||||
|
|
||||||
|
disposeSplatViewer()
|
||||||
|
|
||||||
|
const gViewer = new GaussianViewer({
|
||||||
|
selfDrivenMode: false,
|
||||||
|
useBuiltInControls: false,
|
||||||
|
renderer: new THREE.WebGLRenderer({ canvas, alpha: true }),
|
||||||
|
camera: camerRef.current,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Geo-anchor the splat
|
||||||
|
const [lon, lat] = detail.location.coordinates
|
||||||
|
const alt = detail.altitude ?? 0
|
||||||
|
const heading = detail.heading ?? 0
|
||||||
|
const worldMatrix = buildSplatWorldMatrix(lon, lat, alt, heading)
|
||||||
|
|
||||||
|
await gViewer.addSplatScene(url, {
|
||||||
|
progressiveLoad: true,
|
||||||
|
onProgress: () => { /* optional: update progress UI */ },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (cancelled) {
|
||||||
|
gViewer.dispose()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply geo-anchor transform to the loaded scene
|
||||||
|
const scene = gViewer.splatMesh
|
||||||
|
if (scene) {
|
||||||
|
scene.matrixAutoUpdate = false
|
||||||
|
scene.matrix.copy(worldMatrix)
|
||||||
|
scene.matrixWorld.copy(worldMatrix)
|
||||||
|
}
|
||||||
|
|
||||||
|
splatViewerRef.current = gViewer
|
||||||
|
}
|
||||||
|
|
||||||
|
loadSplat().catch(console.error)
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cancelled = true
|
||||||
|
}
|
||||||
|
}, [activeSplatId]) // eslint-disable-line react-hooks/exhaustive-deps
|
||||||
|
|
||||||
|
// Drive the splat render loop from Cesium's postRender event
|
||||||
|
useEffect(() => {
|
||||||
|
const remove = viewer.scene.postRender.addEventListener(() => {
|
||||||
|
const gViewer = splatViewerRef.current as import('@mkkellogg/gaussian-splats-3d').Viewer | null
|
||||||
|
if (!gViewer || cameraHeight > RENDER_HEIGHT) return
|
||||||
|
|
||||||
|
const canvas = canvasRef.current
|
||||||
|
if (!canvas) return
|
||||||
|
|
||||||
|
syncSplatCamera(viewer, camerRef.current, canvas)
|
||||||
|
gViewer.update()
|
||||||
|
gViewer.render()
|
||||||
|
})
|
||||||
|
return () => remove()
|
||||||
|
}, [viewer, cameraHeight])
|
||||||
|
|
||||||
|
function disposeSplatViewer() {
|
||||||
|
const gViewer = splatViewerRef.current as { dispose?: () => void } | null
|
||||||
|
if (gViewer?.dispose) gViewer.dispose()
|
||||||
|
splatViewerRef.current = null
|
||||||
|
|
||||||
|
// Clear the overlay canvas
|
||||||
|
const canvas = canvasRef.current
|
||||||
|
if (canvas) {
|
||||||
|
const ctx = canvas.getContext('2d')
|
||||||
|
ctx?.clearRect(0, 0, canvas.width, canvas.height)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overlay canvas portal — sits above the Cesium canvas, no pointer events
|
||||||
|
const overlayCanvas = (
|
||||||
|
<canvas
|
||||||
|
ref={canvasRef}
|
||||||
|
style={{
|
||||||
|
position: 'fixed',
|
||||||
|
inset: 0,
|
||||||
|
pointerEvents: 'none',
|
||||||
|
zIndex: 10,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
|
||||||
|
return createPortal(overlayCanvas, document.body)
|
||||||
|
}
|
||||||
16
web/src/splat/splatLoader.ts
Normal file
16
web/src/splat/splatLoader.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import { fetchSplatDownloadUrl } from '../api/splats'
|
||||||
|
import { useSplatStore } from '../store/splatStore'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a valid presigned download URL for the given splat ID.
|
||||||
|
* Checks the cache first; fetches a fresh URL if missing or expired.
|
||||||
|
*/
|
||||||
|
export async function getSplatDownloadUrl(splatId: string): Promise<string> {
|
||||||
|
const store = useSplatStore.getState()
|
||||||
|
const cached = store.getValidDownloadUrl(splatId)
|
||||||
|
if (cached) return cached
|
||||||
|
|
||||||
|
const result = await fetchSplatDownloadUrl(splatId)
|
||||||
|
store.setDownloadUrl(splatId, result)
|
||||||
|
return result.url
|
||||||
|
}
|
||||||
41
web/src/splat/useSplatCamera.ts
Normal file
41
web/src/splat/useSplatCamera.ts
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import * as Cesium from 'cesium'
|
||||||
|
import * as THREE from 'three'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synchronise a Three.js PerspectiveCamera to the current Cesium camera.
|
||||||
|
*
|
||||||
|
* Both cameras work in ECEF space (metres from Earth centre).
|
||||||
|
* The splat scene's Object3D has its matrixWorld set to the ECEF transform
|
||||||
|
* of the capture point (see geoUtils.buildSplatWorldMatrix), so the camera
|
||||||
|
* and the scene are in the same coordinate space.
|
||||||
|
*
|
||||||
|
* Call this inside a Cesium scene.postRender listener, before splatViewer.render().
|
||||||
|
*/
|
||||||
|
export function syncSplatCamera(
|
||||||
|
cesiumViewer: Cesium.Viewer,
|
||||||
|
threeCamera: THREE.PerspectiveCamera,
|
||||||
|
canvas: HTMLCanvasElement,
|
||||||
|
) {
|
||||||
|
const cam = cesiumViewer.camera
|
||||||
|
|
||||||
|
// Position (ECEF, metres)
|
||||||
|
const pos = cam.positionWC
|
||||||
|
threeCamera.position.set(pos.x, pos.y, pos.z)
|
||||||
|
|
||||||
|
// LookAt target: position + direction
|
||||||
|
const dir = cam.directionWC
|
||||||
|
const target = new THREE.Vector3(pos.x + dir.x, pos.y + dir.y, pos.z + dir.z)
|
||||||
|
|
||||||
|
// Up vector
|
||||||
|
const up = cam.upWC
|
||||||
|
threeCamera.up.set(up.x, up.y, up.z)
|
||||||
|
threeCamera.lookAt(target)
|
||||||
|
|
||||||
|
// FOV and aspect
|
||||||
|
const frustum = cam.frustum as Cesium.PerspectiveFrustum
|
||||||
|
if (frustum.fovy != null) {
|
||||||
|
threeCamera.fov = Cesium.Math.toDegrees(frustum.fovy)
|
||||||
|
}
|
||||||
|
threeCamera.aspect = canvas.width / canvas.height
|
||||||
|
threeCamera.updateProjectionMatrix()
|
||||||
|
}
|
||||||
21
web/src/store/authStore.ts
Normal file
21
web/src/store/authStore.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { create } from 'zustand'
|
||||||
|
import type { User } from 'oidc-client-ts'
|
||||||
|
|
||||||
|
interface AuthState {
|
||||||
|
user: User | null
|
||||||
|
isLoading: boolean
|
||||||
|
setUser: (user: User | null) => void
|
||||||
|
setLoading: (loading: boolean) => void
|
||||||
|
logout: () => Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useAuthStore = create<AuthState>((set) => ({
|
||||||
|
user: null,
|
||||||
|
isLoading: true,
|
||||||
|
setUser: (user) => set({ user }),
|
||||||
|
setLoading: (isLoading) => set({ isLoading }),
|
||||||
|
logout: async () => {
|
||||||
|
const { userManager } = await import('../auth/userManager')
|
||||||
|
await userManager.signoutRedirect()
|
||||||
|
},
|
||||||
|
}))
|
||||||
23
web/src/store/challengeStore.ts
Normal file
23
web/src/store/challengeStore.ts
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import { create } from 'zustand'
|
||||||
|
|
||||||
|
interface ChallengeState {
|
||||||
|
selectedChallengeId: string | null
|
||||||
|
setSelectedChallengeId: (id: string | null) => void
|
||||||
|
|
||||||
|
drawingMode: boolean
|
||||||
|
setDrawingMode: (active: boolean) => void
|
||||||
|
|
||||||
|
draftPolygon: GeoJSON.Polygon | null
|
||||||
|
setDraftPolygon: (polygon: GeoJSON.Polygon | null) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useChallengeStore = create<ChallengeState>((set) => ({
|
||||||
|
selectedChallengeId: null,
|
||||||
|
setSelectedChallengeId: (selectedChallengeId) => set({ selectedChallengeId }),
|
||||||
|
|
||||||
|
drawingMode: false,
|
||||||
|
setDrawingMode: (drawingMode) => set({ drawingMode }),
|
||||||
|
|
||||||
|
draftPolygon: null,
|
||||||
|
setDraftPolygon: (draftPolygon) => set({ draftPolygon }),
|
||||||
|
}))
|
||||||
33
web/src/store/mapStore.ts
Normal file
33
web/src/store/mapStore.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { create } from 'zustand'
|
||||||
|
import type { BBox } from '../types/geo'
|
||||||
|
import type { SplatMapProperties, ChallengeMapProperties } from '../types/api'
|
||||||
|
|
||||||
|
interface MapState {
|
||||||
|
cameraHeight: number
|
||||||
|
bbox: BBox | null
|
||||||
|
setCameraState: (height: number, bbox: BBox) => void
|
||||||
|
|
||||||
|
activeSplatId: string | null
|
||||||
|
setActiveSplatId: (id: string | null) => void
|
||||||
|
|
||||||
|
loadedSplats: GeoJSON.Feature<GeoJSON.Point, SplatMapProperties>[]
|
||||||
|
setLoadedSplats: (splats: GeoJSON.Feature<GeoJSON.Point, SplatMapProperties>[]) => void
|
||||||
|
|
||||||
|
loadedChallenges: GeoJSON.Feature<GeoJSON.Point, ChallengeMapProperties>[]
|
||||||
|
setLoadedChallenges: (challenges: GeoJSON.Feature<GeoJSON.Point, ChallengeMapProperties>[]) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useMapStore = create<MapState>((set) => ({
|
||||||
|
cameraHeight: 10_000_000, // start high (whole-earth view)
|
||||||
|
bbox: null,
|
||||||
|
setCameraState: (cameraHeight, bbox) => set({ cameraHeight, bbox }),
|
||||||
|
|
||||||
|
activeSplatId: null,
|
||||||
|
setActiveSplatId: (activeSplatId) => set({ activeSplatId }),
|
||||||
|
|
||||||
|
loadedSplats: [],
|
||||||
|
setLoadedSplats: (loadedSplats) => set({ loadedSplats }),
|
||||||
|
|
||||||
|
loadedChallenges: [],
|
||||||
|
setLoadedChallenges: (loadedChallenges) => set({ loadedChallenges }),
|
||||||
|
}))
|
||||||
38
web/src/store/splatStore.ts
Normal file
38
web/src/store/splatStore.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import { create } from 'zustand'
|
||||||
|
import type { SplatDetail, SplatDownloadUrl } from '../types/api'
|
||||||
|
|
||||||
|
interface CachedDownloadUrl extends SplatDownloadUrl {
|
||||||
|
cachedAt: number // Date.now()
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SplatState {
|
||||||
|
splatCache: Map<string, SplatDetail>
|
||||||
|
setSplatDetail: (id: string, detail: SplatDetail) => void
|
||||||
|
|
||||||
|
downloadUrlCache: Map<string, CachedDownloadUrl>
|
||||||
|
setDownloadUrl: (id: string, data: SplatDownloadUrl) => void
|
||||||
|
getValidDownloadUrl: (id: string) => string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useSplatStore = create<SplatState>((set, get) => ({
|
||||||
|
splatCache: new Map(),
|
||||||
|
setSplatDetail: (id, detail) =>
|
||||||
|
set((s) => ({ splatCache: new Map(s.splatCache).set(id, detail) })),
|
||||||
|
|
||||||
|
downloadUrlCache: new Map(),
|
||||||
|
setDownloadUrl: (id, data) =>
|
||||||
|
set((s) => ({
|
||||||
|
downloadUrlCache: new Map(s.downloadUrlCache).set(id, {
|
||||||
|
...data,
|
||||||
|
cachedAt: Date.now(),
|
||||||
|
}),
|
||||||
|
})),
|
||||||
|
getValidDownloadUrl: (id) => {
|
||||||
|
const entry = get().downloadUrlCache.get(id)
|
||||||
|
if (!entry) return null
|
||||||
|
// Expire 5 minutes before the server-reported expiry to avoid edge cases
|
||||||
|
const expiresAt = new Date(entry.expires_at).getTime() - 5 * 60 * 1000
|
||||||
|
if (Date.now() > expiresAt) return null
|
||||||
|
return entry.url
|
||||||
|
},
|
||||||
|
}))
|
||||||
123
web/src/types/api.ts
Normal file
123
web/src/types/api.ts
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
// TypeScript mirrors of the Django backend serializers.
|
||||||
|
// Keep in sync with apps/splats/serializers.py, apps/challenges/serializers.py, etc.
|
||||||
|
|
||||||
|
export type SplatStatus = 'pending' | 'processing' | 'ready' | 'failed'
|
||||||
|
export type ChallengeStatus = 'active' | 'closed'
|
||||||
|
export type JobStatus = 'queued' | 'running' | 'succeeded' | 'failed'
|
||||||
|
|
||||||
|
// ---------- Splats ----------
|
||||||
|
|
||||||
|
export interface SplatMapProperties {
|
||||||
|
id: string
|
||||||
|
heading: number | null
|
||||||
|
altitude: number | null
|
||||||
|
preview_url: string | null
|
||||||
|
splat_file_size: number | null
|
||||||
|
created_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SplatDetail {
|
||||||
|
id: string
|
||||||
|
owner_username: string
|
||||||
|
challenge_id: string | null
|
||||||
|
status: SplatStatus
|
||||||
|
is_published: boolean
|
||||||
|
location: GeoJSON.Point | null
|
||||||
|
coverage: GeoJSON.Polygon | null
|
||||||
|
heading: number | null
|
||||||
|
altitude: number | null
|
||||||
|
preview_url: string | null
|
||||||
|
splat_file_size: number | null
|
||||||
|
quality_score: number | null
|
||||||
|
frame_count: number | null
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SplatDownloadUrl {
|
||||||
|
url: string
|
||||||
|
expires_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SplatJob {
|
||||||
|
id: string
|
||||||
|
status: JobStatus
|
||||||
|
current_step: string
|
||||||
|
progress: number
|
||||||
|
retry_count: number
|
||||||
|
error_message: string
|
||||||
|
queued_at: string
|
||||||
|
started_at: string | null
|
||||||
|
finished_at: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MineSplat {
|
||||||
|
id: string
|
||||||
|
status: SplatStatus
|
||||||
|
is_published: boolean
|
||||||
|
challenge_id: string | null
|
||||||
|
preview_url: string | null
|
||||||
|
quality_score: number | null
|
||||||
|
frame_count: number | null
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
job: SplatJob | null
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------- Challenges ----------
|
||||||
|
|
||||||
|
export interface ChallengeMapProperties {
|
||||||
|
id: string
|
||||||
|
title: string
|
||||||
|
status: ChallengeStatus
|
||||||
|
submission_count: number
|
||||||
|
max_submissions: number | null
|
||||||
|
expires_at: string | null
|
||||||
|
created_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PreviewSplat {
|
||||||
|
id: string
|
||||||
|
preview_url: string | null
|
||||||
|
created_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChallengeDetail {
|
||||||
|
id: string
|
||||||
|
title: string
|
||||||
|
description: string
|
||||||
|
status: ChallengeStatus
|
||||||
|
creator_username: string
|
||||||
|
region: GeoJSON.Polygon
|
||||||
|
region_centroid: GeoJSON.Point
|
||||||
|
max_submissions: number | null
|
||||||
|
submission_count: number
|
||||||
|
participant_count: number
|
||||||
|
is_participating: boolean
|
||||||
|
preview_splats: PreviewSplat[]
|
||||||
|
expires_at: string | null
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChallengeParticipant {
|
||||||
|
id: string
|
||||||
|
joined_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChallengeCreateBody {
|
||||||
|
title: string
|
||||||
|
description?: string
|
||||||
|
region: GeoJSON.Polygon
|
||||||
|
max_submissions?: number | null
|
||||||
|
expires_at?: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------- Users ----------
|
||||||
|
|
||||||
|
export interface UserProfile {
|
||||||
|
id: string
|
||||||
|
username: string
|
||||||
|
avatar_url: string
|
||||||
|
created_at: string
|
||||||
|
}
|
||||||
28
web/src/types/gaussian-splats-3d.d.ts
vendored
Normal file
28
web/src/types/gaussian-splats-3d.d.ts
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// Type stub for @mkkellogg/gaussian-splats-3d — no official @types package exists.
|
||||||
|
declare module '@mkkellogg/gaussian-splats-3d' {
|
||||||
|
import type * as THREE from 'three'
|
||||||
|
|
||||||
|
interface ViewerOptions {
|
||||||
|
selfDrivenMode?: boolean
|
||||||
|
useBuiltInControls?: boolean
|
||||||
|
renderer?: THREE.WebGLRenderer
|
||||||
|
camera?: THREE.PerspectiveCamera
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AddSplatSceneOptions {
|
||||||
|
progressiveLoad?: boolean
|
||||||
|
onProgress?: (progress: number) => void
|
||||||
|
position?: [number, number, number]
|
||||||
|
rotation?: [number, number, number, number]
|
||||||
|
scale?: [number, number, number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Viewer {
|
||||||
|
splatMesh: THREE.Object3D | null
|
||||||
|
constructor(options?: ViewerOptions)
|
||||||
|
addSplatScene(url: string, options?: AddSplatSceneOptions): Promise<void>
|
||||||
|
update(): void
|
||||||
|
render(): void
|
||||||
|
dispose(): void
|
||||||
|
}
|
||||||
|
}
|
||||||
17
web/src/types/geo.ts
Normal file
17
web/src/types/geo.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
// Lightweight GeoJSON type helpers. The full GeoJSON namespace comes from
|
||||||
|
// TypeScript's built-in DOM lib — no extra @types package needed.
|
||||||
|
|
||||||
|
export type BBox = [minLon: number, minLat: number, maxLon: number, maxLat: number]
|
||||||
|
|
||||||
|
export function bboxToString(bbox: BBox): string {
|
||||||
|
return bbox.join(',')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function bboxFromRectangle(west: number, south: number, east: number, north: number): BBox {
|
||||||
|
return [
|
||||||
|
(west * 180) / Math.PI,
|
||||||
|
(south * 180) / Math.PI,
|
||||||
|
(east * 180) / Math.PI,
|
||||||
|
(north * 180) / Math.PI,
|
||||||
|
]
|
||||||
|
}
|
||||||
46
web/src/ui/MapOverlay.module.css
Normal file
46
web/src/ui/MapOverlay.module.css
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
.toolbar {
|
||||||
|
position: fixed;
|
||||||
|
top: 16px;
|
||||||
|
left: 16px;
|
||||||
|
z-index: 20;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn {
|
||||||
|
padding: 9px 16px;
|
||||||
|
background: rgba(15, 15, 20, 0.85);
|
||||||
|
backdrop-filter: blur(8px);
|
||||||
|
border: 1px solid rgba(255,255,255,0.15);
|
||||||
|
border-radius: 8px;
|
||||||
|
color: #fff;
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.btn:hover { background: rgba(40, 40, 55, 0.9); }
|
||||||
|
|
||||||
|
.btn.active {
|
||||||
|
background: rgba(99, 102, 241, 0.8);
|
||||||
|
border-color: #6366f1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout {
|
||||||
|
margin-top: 4px;
|
||||||
|
font-size: 13px;
|
||||||
|
color: rgba(255,255,255,0.6);
|
||||||
|
}
|
||||||
|
.logout:hover { color: #fff; }
|
||||||
|
|
||||||
|
.hint {
|
||||||
|
margin: 0;
|
||||||
|
padding: 8px 12px;
|
||||||
|
background: rgba(245, 158, 11, 0.15);
|
||||||
|
border: 1px solid rgba(245, 158, 11, 0.4);
|
||||||
|
border-radius: 6px;
|
||||||
|
color: #fbbf24;
|
||||||
|
font-size: 13px;
|
||||||
|
}
|
||||||
43
web/src/ui/MapOverlay.tsx
Normal file
43
web/src/ui/MapOverlay.tsx
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import { useChallengeStore } from '../store/challengeStore'
|
||||||
|
import { useAuthStore } from '../store/authStore'
|
||||||
|
import styles from './MapOverlay.module.css'
|
||||||
|
|
||||||
|
export function MapOverlay() {
|
||||||
|
const { drawingMode, setDrawingMode, setDraftPolygon } = useChallengeStore()
|
||||||
|
const { logout } = useAuthStore()
|
||||||
|
|
||||||
|
function handleDrawToggle() {
|
||||||
|
if (drawingMode) {
|
||||||
|
setDrawingMode(false)
|
||||||
|
setDraftPolygon(null)
|
||||||
|
} else {
|
||||||
|
setDrawingMode(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={styles.toolbar}>
|
||||||
|
<button
|
||||||
|
className={`${styles.btn} ${drawingMode ? styles.active : ''}`}
|
||||||
|
onClick={handleDrawToggle}
|
||||||
|
title={drawingMode ? 'Cancel drawing' : 'Create challenge (draw region)'}
|
||||||
|
>
|
||||||
|
{drawingMode ? '✕ Cancel' : '+ Challenge'}
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{drawingMode && (
|
||||||
|
<p className={styles.hint}>
|
||||||
|
Click to place vertices · Right-click to close polygon
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<button
|
||||||
|
className={`${styles.btn} ${styles.logout}`}
|
||||||
|
onClick={() => logout()}
|
||||||
|
title="Sign out"
|
||||||
|
>
|
||||||
|
Sign out
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user