diff --git a/src/zyra/api/routers/cli.py b/src/zyra/api/routers/cli.py index 4ff00f3a..a910aa99 100644 --- a/src/zyra/api/routers/cli.py +++ b/src/zyra/api/routers/cli.py @@ -16,10 +16,12 @@ from __future__ import annotations import argparse +import mimetypes +from pathlib import Path from typing import Any from fastapi import APIRouter, BackgroundTasks, HTTPException, Request -from fastapi.responses import HTMLResponse +from fastapi.responses import FileResponse, HTMLResponse from zyra.api.models.cli_request import ( CLIRunRequest, @@ -351,6 +353,262 @@ def list_cli_examples() -> dict[str, Any]: } ) + # 4b) Visualize: globe (webgl-sphere) with packaged Earth texture + examples.append( + { + "name": "visualize_globe_webgl", + "description": "Generate a WebGL globe bundle using the packaged Earth texture (writes index.html + assets).", + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "webgl-sphere", + "output": "api_examples/globe_webgl", + "texture": "pkg:zyra.assets.images/earth_vegetation.jpg", + "probe_data": "pkg:zyra.assets.samples/points.csv", + "title": "NASA Blue Marble", + "description": "Global imagery derived from NASA's Blue Marble dataset.", + "probe": True, + }, + }, + "preview": { + "type": "iframe", + "url": "/examples/view/globe_webgl/", + }, + } + ) + + # 4b-1) Visualize: globe (webgl-sphere) animating local drought risk frames + examples.append( + { + "name": "visualize_globe_webgl_drought", + "description": "Animate NOAA drought risk imagery on the Three.js globe using the local _work/drought frame stack with interactive playback controls.", + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "webgl-sphere", + "output": "api_examples/globe_webgl_drought", + "texture_pattern": "drought/DroughtRisk_Weekly_*.png", + "date_format": "%Y%m%d", + "animate": "time", + "auto_rotate": True, + "title": "Weekly Drought Risk", + "description": "Animated drought outlook sourced from ftp.nnvl.noaa.gov/SOS/DroughtRisk_Weekly.", + "probe": False, + }, + }, + "preview": { + "type": "iframe", + "url": "/examples/view/globe_webgl_drought/", + }, + "warning": "Requires drought frames synced under DATA_DIR/drought (default _work/drought). Run zyra acquire ftp … beforehand.", + } + ) + + # 4b-2) Visualize: Cesium globe animating local drought risk frames + examples.append( + { + "name": "visualize_globe_cesium_drought", + "description": "Animate the local drought risk frame stack on a CesiumJS globe with built-in timeline controls.", + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "cesium-globe", + "output": "api_examples/globe_cesium_drought", + "texture_pattern": "drought/DroughtRisk_Weekly_*.png", + "date_format": "%Y%m%d", + "animate": "time", + "auto_rotate": False, + "probe": False, + "title": "Weekly Drought Risk (Cesium)", + "description": "Interactive Cesium globe with timeline playback of NOAA drought risk imagery.", + }, + }, + "preview": { + "type": "iframe", + "url": "/examples/view/globe_cesium_drought/", + }, + "warning": "Requires drought frames synced under DATA_DIR/drought (default _work/drought). Run zyra acquire ftp … beforehand.", + } + ) + + # 4b-3) Visualize: globe (webgl-sphere) from pre-generated drought video + examples.append( + { + "name": "visualize_globe_webgl_drought_video", + "description": ( + "Render the weekly drought risk timeline by extracting frames from drought/drought.mp4. " + "This variant samples at the video cadence (30 fps) and anchors the first frame to 2024-12-05T00:00:00Z." + ), + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "webgl-sphere", + "output": "api_examples/globe_webgl_drought_video", + "video_source": "drought/drought.mp4", + "fps": 30.0, + "frame_cache": "drought/frame_cache", + "frames_meta": "drought/frames_meta.json", + "animate": "time", + "title": "Weekly Drought Risk (Video)", + "description": "WebGL globe backed by the pre-rendered drought MP4 (frames extracted automatically).", + "probe": False, + }, + }, + "preview": { + "type": "iframe", + "url": "/examples/view/globe_webgl_drought_video/", + }, + "warning": ( + "Requires ffmpeg/ffprobe in PATH, drought.mp4 under DATA_DIR/drought (default _work/drought), and frames_meta.json with the desired cadence. " + "Generate the video via `zyra visualize compose-video --fps 30 ...`, run `zyра transform metadata --frames _work/drought --datetime-format %Y%m%d --period-seconds 604800 --output _work/drought/frames_meta.json`, " + "then mirror this API call locally with " + "`zyra visualize globe --target webgl-sphere --output _work/drought_globe " + "--video-source _work/drought/drought.mp4 --fps 30 --frames-meta _work/drought/frames_meta.json " + "--animate time --title 'Weekly Drought Risk'`." + ), + } + ) + + # 4b-4) Visualize: Cesium globe from pre-generated drought video + examples.append( + { + "name": "visualize_globe_cesium_drought_video", + "description": ( + "Render the drought risk MP4 on a Cesium globe; timestamps track the 30 fps cadence starting at 2024-12-05T00:00:00Z." + ), + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "cesium-globe", + "output": "api_examples/globe_cesium_drought_video", + "video_source": "drought/drought.mp4", + "fps": 30.0, + "frame_cache": "drought/frame_cache", + "frames_meta": "drought/frames_meta.json", + "animate": "time", + "probe": False, + "title": "Weekly Drought Risk (Cesium Video)", + "description": "Cesium globe that extracts frames from a local drought MP4 and provides timeline playback.", + }, + }, + "preview": { + "type": "iframe", + "url": "/examples/view/globe_cesium_drought_video/", + }, + "warning": ( + "Requires ffmpeg/ffprobe in PATH plus drought.mp4 under DATA_DIR/drought. " + "Match the start timestamp to your video cadence and regenerate frames_meta.json with the correct period before running through the API." + ), + } + ) + + # 4c) Visualize: Cesium globe seeded with Age of the Seafloor probe data + examples.append( + { + "name": "visualize_globe_cesium", + "description": ( + "Build a CesiumJS globe with probe points derived from NOAA NCEI's Age of the Seafloor dataset. " + "Outputs a bundle under api_examples/ with an interactive iframe preview. Uses the ArcGIS tiled " + "seafloor age service for imagery (CESIUM_ION_TOKEN optional for terrain/fallback layers)." + ), + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "cesium-globe", + "output": "api_examples/globe_cesium_seafloor", + "title": "Age of the Seafloor (Cesium)", + "description": ( + "Color-coded plate ages in million years with probe sampling seeded from Science On a Sphere " + "metadata for the Age of the Seafloor collection." + ), + "tile_url": "https://tiledimageservices.arcgis.com/P3ePLMYs2RVChkJx/ArcGIS/rest/services/Seafloor_Age_02_WM/ImageServer", + "tile_credit": "Tiles © Esri, NOAA NCEI", + "legend": "https://d3sik7mbbzunjo.cloudfront.net/land/sea_floor_age/colorbar_contour_en.png", + "probe_data": "pkg:zyra.assets.samples/age_of_seafloor_points.csv", + "probe_units": "Ma", + "auto_rotate": True, + "auto_rotate_speed": 0.4, + "lighting": False, + "width": 1280, + "height": 720, + }, + }, + "warning": ( + "Requires network access to fetch ArcGIS tiled imagery and the remote legend. " + "Provide `CESIUM_ION_TOKEN` if you want Cesium World Terrain or fallback base imagery." + ), + "preview": { + "type": "iframe", + "url": "/examples/view/globe_cesium_seafloor/", + }, + } + ) + + # 4d) Visualize: Cesium globe with NASA GIBS VIIRS true-color tiles + examples.append( + { + "name": "visualize_globe_cesium_viirs", + "description": ( + "Build a CesiumJS globe that streams NASA GIBS near real-time VIIRS SNPP " + "Corrected Reflectance (True Color) tiles." + ), + "request": { + "stage": "visualize", + "command": "globe", + "mode": "sync", + "args": { + "target": "cesium-globe", + "output": "api_examples/globe_viirs_gibs", + "title": "VIIRS True Color (Cesium)", + "description": ( + "NASA GIBS SNPP VIIRS corrected reflectance (True Color) tiles streamed from the " + "Web Map Tile Service." + ), + "tile_url": ( + "https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/" + "VIIRS_SNPP_CorrectedReflectance_TrueColor/default/" + "{time}/GoogleMapsCompatible_Level9/{TileMatrix}/{TileRow}/{TileCol}.jpg" + ), + "tile_type": "template", + "tile_scheme": "webmercator", + "tile_time_key": "time", + "tile_time_start": "2024-07-16", + "tile_time_end": "2024-07-18", + "tile_time_period": "1d", + "animate": "time", + "tile_min_level": 0, + "tile_max_level": 9, + "probe": False, + "auto_rotate": True, + "auto_rotate_speed": 0.25, + "lighting": False, + "width": 1280, + "height": 720, + }, + }, + "warning": ( + "Requires network access to fetch VIIRS tiles from NASA GIBS. Update `tile_time_values` " + "to browse different archive dates (uses the `best` collection)." + ), + "preview": { + "type": "iframe", + "url": "/examples/view/globe_viirs_gibs/", + }, + } + ) + # 5) Export: upload a local file to S3 (one-off) examples.append( { @@ -969,6 +1227,51 @@ def examples_page(request: Request) -> HTMLResponse: const runAsync = el('button', { textContent: 'Run (async)' }); const controls = el('div', { className: 'row' }, []); const warn = el('div', { className: 'small', textContent: '' }); + const setWarn = (msg) => { + warn.textContent = msg || ''; + warn.style.display = msg ? 'block' : 'none'; + }; + const previewWrap = el('div', { className: 'preview', style: 'margin-top:0.5rem' }); + let previewShown = false; + const setPreviewMessage = (msg) => { + if (!ex.preview) return; + previewShown = false; + previewWrap.textContent = msg; + }; + const refreshPreview = () => { + if (!ex.preview || !ex.preview.url) return; + previewShown = true; + const baseUrl = ex.preview.url; + const bust = baseUrl + (baseUrl.includes('?') ? '&' : '?') + 't=' + Date.now(); + previewWrap.innerHTML = ''; + previewWrap.appendChild( + el('div', { className: 'small' }, [ + el('a', { + href: bust, + target: '_blank', + rel: 'noopener', + textContent: 'Open preview in new tab', + }), + ]) + ); + if (ex.preview.type === 'iframe') { + previewWrap.appendChild( + el('iframe', { + src: bust, + loading: 'lazy', + style: 'width:100%;min-height:420px;border:1px solid #ddd;border-radius:6px;margin-top:0.5rem;', + }) + ); + } + }; + if (ex.preview) { + setPreviewMessage('Run to generate preview.'); + } + if (ex.warning) { + setWarn(ex.warning); + } else { + setWarn(''); + } // Add Dry-run toggle for pipeline run examples try { const body = ex.request || {}; @@ -983,7 +1286,7 @@ def examples_page(request: Request) -> HTMLResponse: obj.args = obj.args || {}; obj.args.dry_run = !!dry.checked; area.value = JSON.stringify(obj, null, 2); - warn.textContent = dry.checked ? '' : 'Warning: This will attempt network I/O and may require credentials.'; + setWarn(dry.checked ? '' : 'Warning: This will attempt network I/O and may require credentials.'); } catch {} }; dry.onchange = updateBody; @@ -992,19 +1295,32 @@ def examples_page(request: Request) -> HTMLResponse: } catch {} runSync.onclick = async () => { out.textContent = ''; status.textContent = 'Running…'; + if (ex.preview) setPreviewMessage('Generating preview…'); try { const body = JSON.parse(area.value); body.mode = 'sync'; const r = await fetch('/cli/run', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(body)}); - const t = await r.text(); + const respText = await r.text(); status.textContent = 'HTTP ' + r.status; - out.textContent = t; + let parsed = null; + try { parsed = JSON.parse(respText); } catch {} + out.textContent = parsed ? JSON.stringify(parsed, null, 2) : respText; + const succeeded = parsed + ? (parsed.status === 'success' || parsed.exit_code === 0) + : r.ok; + if (succeeded) { + refreshPreview(); + } else if (ex.preview) { + setPreviewMessage('Preview unavailable (command failed).'); + } } catch (e) { status.textContent = 'Error'; out.textContent = String(e); + if (ex.preview) setPreviewMessage('Preview unavailable (command failed).'); } }; runAsync.onclick = async () => { out.textContent = ''; status.textContent = 'Submitting…'; + if (ex.preview) setPreviewMessage('Waiting for async job…'); try { const body = JSON.parse(area.value); body.mode = 'async'; @@ -1025,22 +1341,36 @@ def examples_page(request: Request) -> HTMLResponse: out.textContent = JSON.stringify(jjs, null, 2); if (['succeeded','failed','canceled'].includes(String(jjs.status))) { clearInterval(timer); + if (ex.preview) { + if (String(jjs.status) === 'succeeded') { + refreshPreview(); + } else { + setPreviewMessage('Preview unavailable (job did not succeed).'); + } + } } if (tries > 30) { clearInterval(timer); status.textContent += ' (timeout)'; + if (ex.preview && !previewShown) { + setPreviewMessage('Preview unavailable (job timeout).'); + } } } catch (e) { clearInterval(timer); status.textContent = 'Polling error'; + if (ex.preview) setPreviewMessage('Preview unavailable (polling error).'); } }, 1000); + } else if (ex.preview) { + setPreviewMessage('Preview unavailable (job id missing).'); } } catch (e) { status.textContent = 'Error'; out.textContent = String(e); + if (ex.preview) setPreviewMessage('Preview unavailable (command failed).'); } }; - const card = el('div', { className: 'example' }, [ + const children = [ el('h3', { textContent: ex.name || ('Example #' + (idx+1)) }), el('div', { className: 'small', textContent: ex.description || '' }), area, @@ -1048,7 +1378,11 @@ def examples_page(request: Request) -> HTMLResponse: warn, el('div', { className: 'row' }, [ runSync, runAsync, status ]), out, - ]); + ]; + if (ex.preview) { + children.push(previewWrap); + } + const card = el('div', { className: 'example' }, children); wrap.appendChild(card); }); } catch (e) { @@ -1060,3 +1394,35 @@ def examples_page(request: Request) -> HTMLResponse: """ return HTMLResponse(content=html) + + +@router.get("/examples/view/{bundle}/{asset_path:path}", include_in_schema=False) +def examples_view(bundle: str, asset_path: str = "") -> FileResponse: + """Serve generated example bundles under ``api_examples``. + + Limits access to the ``api_examples`` directory within ``DATA_DIR`` (or ``_work`` + when unset) and resolves directory requests to ``index.html``. + """ + + base_root = Path(env("DATA_DIR", "_work") or "_work").expanduser() + examples_root = (base_root / "api_examples").resolve() + if not examples_root.exists(): + raise HTTPException(status_code=404, detail="Not found") + + relative = Path(bundle) + if asset_path: + relative = relative / asset_path + if not asset_path or asset_path.endswith("/"): + relative = relative / "index.html" + + target = (examples_root / relative).resolve() + try: + target.relative_to(examples_root) + except ValueError as exc: # pragma: no cover - defensive + raise HTTPException(status_code=404, detail="Not found") from exc + + if not target.exists() or not target.is_file(): + raise HTTPException(status_code=404, detail="Not found") + + media_type = mimetypes.guess_type(str(target))[0] or "application/octet-stream" + return FileResponse(target, media_type=media_type) diff --git a/src/zyra/api/workers/executor.py b/src/zyra/api/workers/executor.py index edfa902d..772a7269 100644 --- a/src/zyra/api/workers/executor.py +++ b/src/zyra/api/workers/executor.py @@ -267,14 +267,25 @@ def _args_dict_to_argv(stage: str, command: str, args: dict[str, Any]) -> list[s argv.append(str(path)) # Remaining keys become long flags + negation_flags: dict[tuple[str, str], dict[str, str]] = { + ("visualize", "globe"): { + "probe": "--no-probe", + "lighting": "--no-lighting", + "auto_rotate": "--no-auto-rotate", + } + } + for key, value in norm_args.items(): if value is None: continue - flag = f"--{_to_kebab(key)}" if isinstance(value, bool): + neg_flag = negation_flags.get((stage, command), {}).get(key) if value: - argv.append(flag) + argv.append(f"--{_to_kebab(key)}") + elif neg_flag: + argv.append(neg_flag) continue + flag = f"--{_to_kebab(key)}" if isinstance(value, (list, tuple)): for item in value: argv.extend([flag, str(item)]) diff --git a/src/zyra/assets/samples/__init__.py b/src/zyra/assets/samples/__init__.py new file mode 100644 index 00000000..028239fc --- /dev/null +++ b/src/zyra/assets/samples/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Packaged sample datasets for quick-start demos.""" diff --git a/src/zyra/assets/samples/age_of_seafloor_points.csv b/src/zyra/assets/samples/age_of_seafloor_points.csv new file mode 100644 index 00000000..723bc44c --- /dev/null +++ b/src/zyra/assets/samples/age_of_seafloor_points.csv @@ -0,0 +1,219 @@ +lat,lon,value,units +-60.00,-180.00,180.0,Ma +-60.00,-158.82,180.0,Ma +-60.00,-137.65,110.5,Ma +-60.00,-116.47,58.5,Ma +-60.00,-95.29,180.0,Ma +-60.00,-74.12,180.0,Ma +-60.00,-52.94,58.7,Ma +-60.00,-31.76,110.3,Ma +-60.00,-10.59,180.0,Ma +-60.00,10.59,180.0,Ma +-60.00,31.76,56.4,Ma +-60.00,52.94,30.2,Ma +-60.00,74.12,157.3,Ma +-60.00,95.29,180.0,Ma +-60.00,116.47,180.0,Ma +-60.00,137.65,180.0,Ma +-60.00,158.82,180.0,Ma +-60.00,180.00,180.0,Ma +-55.00,-150.00,180.0,Ma +-55.00,-141.11,136.5,Ma +-55.00,-132.22,83.2,Ma +-55.00,-123.33,29.8,Ma +-55.00,-114.44,62.0,Ma +-55.00,-105.56,115.3,Ma +-55.00,-96.67,168.7,Ma +-55.00,-27.73,125.8,Ma +-55.00,-21.82,161.2,Ma +-55.00,-15.91,180.0,Ma +-55.00,-10.00,180.0,Ma +-55.00,140.00,180.0,Ma +-55.00,145.57,180.0,Ma +-55.00,151.14,180.0,Ma +-55.00,156.71,180.0,Ma +-55.00,162.29,180.0,Ma +-55.00,167.86,180.0,Ma +-55.00,173.43,180.0,Ma +-55.00,179.00,180.0,Ma +-50.00,-180.00,180.0,Ma +-50.00,-158.82,180.0,Ma +-50.00,-137.65,121.4,Ma +-50.00,-116.47,40.6,Ma +-50.00,-95.29,167.7,Ma +-50.00,-10.59,180.0,Ma +-50.00,10.59,172.5,Ma +-50.00,20.00,116.1,Ma +-50.00,30.56,52.7,Ma +-50.00,31.76,45.5,Ma +-50.00,41.11,45.6,Ma +-50.00,51.67,23.8,Ma +-50.00,52.94,18.9,Ma +-50.00,62.22,74.6,Ma +-50.00,72.78,137.9,Ma +-50.00,74.12,145.9,Ma +-50.00,83.33,180.0,Ma +-50.00,93.89,180.0,Ma +-50.00,95.29,180.0,Ma +-50.00,104.44,180.0,Ma +-50.00,115.00,180.0,Ma +-50.00,116.47,180.0,Ma +-50.00,137.65,180.0,Ma +-50.00,158.82,180.0,Ma +-50.00,180.00,180.0,Ma +-43.12,-150.00,180.0,Ma +-43.12,-141.11,150.8,Ma +-43.12,-132.22,97.5,Ma +-43.12,-123.33,44.2,Ma +-43.12,-114.44,39.3,Ma +-43.12,-105.56,92.7,Ma +-43.12,-96.67,146.0,Ma +-42.14,156.71,180.0,Ma +-42.14,162.29,180.0,Ma +-42.14,167.86,180.0,Ma +-42.14,173.43,180.0,Ma +-42.14,179.00,180.0,Ma +-41.25,-27.73,97.0,Ma +-41.25,-21.82,132.4,Ma +-41.25,-15.91,167.9,Ma +-41.25,-10.00,180.0,Ma +-40.00,-180.00,180.0,Ma +-40.00,-158.82,180.0,Ma +-40.00,-137.65,134.2,Ma +-40.00,-116.47,20.8,Ma +-40.00,-95.29,147.9,Ma +-40.00,52.94,22.7,Ma +-40.00,74.12,132.3,Ma +-40.00,95.29,180.0,Ma +-40.00,158.82,180.0,Ma +-40.00,180.00,180.0,Ma +-37.50,41.11,52.1,Ma +-37.50,51.67,32.3,Ma +-37.50,62.22,57.3,Ma +-37.50,72.78,120.6,Ma +-37.50,83.33,180.0,Ma +-37.50,93.89,180.0,Ma +-37.50,104.44,180.0,Ma +-31.25,-150.00,180.0,Ma +-31.25,-141.11,167.4,Ma +-31.25,-132.22,114.0,Ma +-31.25,-123.33,60.7,Ma +-31.25,-114.44,14.5,Ma +-31.25,-105.56,67.8,Ma +-31.25,-96.67,121.2,Ma +-29.29,156.71,180.0,Ma +-29.29,162.29,180.0,Ma +-29.29,167.86,180.0,Ma +-29.29,173.43,180.0,Ma +-29.29,179.00,180.0,Ma +-27.50,-27.73,62.5,Ma +-27.50,-21.82,98.0,Ma +-25.00,41.11,59.9,Ma +-25.00,51.67,43.7,Ma +-25.00,62.22,37.1,Ma +-25.00,72.78,100.4,Ma +-25.00,83.33,163.8,Ma +-25.00,93.89,180.0,Ma +-25.00,104.44,180.0,Ma +-19.38,-150.00,180.0,Ma +-19.38,-141.11,180.0,Ma +-19.38,-132.22,131.9,Ma +-19.38,-123.33,78.5,Ma +-19.38,-114.44,25.2,Ma +-19.38,-105.56,41.7,Ma +-19.38,-96.67,95.0,Ma +-16.43,156.71,180.0,Ma +-16.43,162.29,180.0,Ma +-16.43,167.86,180.0,Ma +-16.43,173.43,180.0,Ma +-16.43,179.00,180.0,Ma +-13.75,-27.73,24.1,Ma +-13.75,-21.82,59.6,Ma +-12.50,41.11,68.4,Ma +-12.50,51.67,57.3,Ma +-12.50,62.22,14.8,Ma +-12.50,72.78,78.1,Ma +-12.50,83.33,141.4,Ma +-12.50,93.89,180.0,Ma +-12.50,104.44,180.0,Ma +-7.50,-150.00,180.0,Ma +-7.50,-141.11,180.0,Ma +-7.50,-132.22,150.1,Ma +-7.50,-123.33,96.7,Ma +-7.50,-114.44,43.4,Ma +-7.50,-105.56,15.2,Ma +-7.50,-96.67,68.5,Ma +-3.57,151.14,180.0,Ma +-3.57,156.71,180.0,Ma +-3.57,162.29,180.0,Ma +-3.57,167.86,180.0,Ma +-3.57,173.43,180.0,Ma +-3.57,179.00,180.0,Ma +0.00,-27.73,16.4,Ma +0.00,-21.82,19.1,Ma +0.00,41.11,77.1,Ma +0.00,51.67,72.2,Ma +0.00,62.22,8.8,Ma +0.00,72.78,54.5,Ma +0.00,83.33,117.8,Ma +0.00,93.89,180.0,Ma +0.00,104.44,180.0,Ma +0.00,115.00,180.0,Ma +4.38,-150.00,180.0,Ma +4.38,-141.11,180.0,Ma +4.38,-132.22,170.7,Ma +4.38,-123.33,117.4,Ma +4.38,-114.44,64.0,Ma +4.38,-105.56,10.7,Ma +4.38,-96.67,45.7,Ma +9.29,151.14,180.0,Ma +9.29,156.71,180.0,Ma +9.29,162.29,180.0,Ma +9.29,167.86,180.0,Ma +9.29,173.43,180.0,Ma +9.29,179.00,180.0,Ma +12.50,41.11,94.0,Ma +12.50,51.67,96.1,Ma +12.50,62.22,32.8,Ma +12.50,72.78,39.3,Ma +12.50,83.33,102.6,Ma +12.50,93.89,166.0,Ma +12.50,104.44,180.0,Ma +12.50,115.00,180.0,Ma +13.75,-27.73,56.8,Ma +13.75,-21.82,21.4,Ma +22.14,151.14,180.0,Ma +22.14,156.71,180.0,Ma +22.14,162.29,180.0,Ma +22.14,167.86,180.0,Ma +22.14,173.43,180.0,Ma +22.14,179.00,180.0,Ma +25.00,41.11,109.8,Ma +25.00,51.67,119.5,Ma +25.00,62.22,56.2,Ma +25.00,72.78,24.6,Ma +25.00,83.33,88.0,Ma +25.00,93.89,151.3,Ma +25.00,104.44,180.0,Ma +25.00,115.00,180.0,Ma +27.50,-45.45,180.0,Ma +27.50,-39.55,166.2,Ma +27.50,-33.64,130.7,Ma +27.50,-27.73,95.3,Ma +27.50,-21.82,59.8,Ma +35.00,151.14,180.0,Ma +35.00,156.71,180.0,Ma +35.00,162.29,180.0,Ma +35.00,167.86,180.0,Ma +35.00,173.43,180.0,Ma +35.00,179.00,180.0,Ma +41.25,-45.45,180.0,Ma +41.25,-39.55,180.0,Ma +41.25,-33.64,165.2,Ma +41.25,-27.73,129.7,Ma +41.25,-21.82,94.2,Ma +55.00,-45.45,180.0,Ma +55.00,-39.55,180.0,Ma +55.00,-33.64,180.0,Ma +55.00,-27.73,158.5,Ma +55.00,-21.82,123.0,Ma diff --git a/src/zyra/assets/samples/points.csv b/src/zyra/assets/samples/points.csv new file mode 100644 index 00000000..e68f2fc1 --- /dev/null +++ b/src/zyra/assets/samples/points.csv @@ -0,0 +1,19 @@ +lat,lon,popup +51.5074,-0.1278,Buckingham Palace (London) +48.8566,2.3522,Eiffel Tower (Paris) +40.7128,-74.0060,Statue of Liberty (New York) +34.0522,-118.2437,Griffith Observatory (Los Angeles) +37.7749,-122.4194,Golden Gate Bridge (San Francisco) +19.4326,-99.1332,Zocalo (Mexico City) +-23.5505,-46.6333,Ibirapuera Park (Sao Paulo) +-34.6037,-58.3816,Obelisco (Buenos Aires) +-1.2921,36.8219,Kenyatta International (Nairobi) +35.6895,139.6917,Shinjuku Station (Tokyo) +37.5665,126.9780,Gyeongbokgung (Seoul) +31.2304,121.4737,The Bund (Shanghai) +-33.8688,151.2093,Sydney Opera House (Sydney) +1.3521,103.8198,Marina Bay Sands (Singapore) +28.6139,77.2090,India Gate (New Delhi) +55.7558,37.6176,Red Square (Moscow) +64.1265,-21.8174,Hallgrimskirkja (Reykjavik) +47.6062,-122.3321,Pike Place Market (Seattle) diff --git a/src/zyra/connectors/backends/vimeo.py b/src/zyra/connectors/backends/vimeo.py index 5ddcba0f..c5ab6bc7 100644 --- a/src/zyra/connectors/backends/vimeo.py +++ b/src/zyra/connectors/backends/vimeo.py @@ -92,6 +92,73 @@ def fetch_bytes(video_id: str) -> bytes: # pragma: no cover - placeholder raise NotImplementedError("Ingest from Vimeo is not implemented yet") +def get_download_url( + video_id: str, + *, + token: str | None = None, + client_id: str | None = None, + client_secret: str | None = None, +) -> str | None: + """Return a progressive download URL for the given Vimeo video. + + Requires PyVimeo credentials (access token or client id/secret). When multiple + download renditions are available, the highest resolution progressive file is + returned. + """ + + client = _get_client(token=token, client_id=client_id, client_secret=client_secret) + video_id = str(video_id).strip("/") + if not video_id.startswith("videos/"): + video_uri = f"/videos/{video_id}" + else: + video_uri = f"/{video_id}" + try: + response = client.get(video_uri) + except Exception as exc: # pragma: no cover - network/SDK dependent + raise RuntimeError(_summarize_exception(exc, operation="get")) from exc + + data = None + if hasattr(response, "json"): + try: + data = response.json() + except Exception: # pragma: no cover - defensive + data = None + if not data: + data = getattr(response, "body", None) + if not isinstance(data, dict): + return None + + downloads = data.get("download") or [] + best_link = None + best_pixels = -1 + for entry in downloads: + if not isinstance(entry, dict): + continue + link = entry.get("link") + if not link: + continue + if entry.get("quality") == "source": + return str(link) + width = entry.get("width") or 0 + height = entry.get("height") or 0 + pixels = width * height + if pixels > best_pixels: + best_pixels = pixels + best_link = str(link) + if best_link: + return best_link + + files = data.get("files") or [] + progressive = [ + entry + for entry in files + if entry.get("quality") == "sd" or entry.get("quality") == "hd" + ] + if progressive: + return str(progressive[0].get("link_secure") or progressive[0].get("link")) + return None + + def upload_path( video_path: str, *, diff --git a/src/zyra/visualization/README.md b/src/zyra/visualization/README.md index 23ba3803..42dd81f3 100644 --- a/src/zyra/visualization/README.md +++ b/src/zyra/visualization/README.md @@ -8,6 +8,7 @@ Commands - `animate` — Render animations from frames or datasets. - `compose-video` — Compose image sequences into a video. - `interactive` — Generate interactive maps. +- `globe` — Build interactive WebGL or Cesium globe bundles. Common options (subset) - `--input` / `--inputs` — single or batch inputs @@ -21,3 +22,7 @@ Examples - Heatmap: `zyra visualize heatmap --input data.nc --var T --extent -180 180 -90 90 --output heatmap.png` - Vector: `zyra visualize vector --input data.nc --u U --v V --output wind.png` - Animation: `zyra visualize animate --inputs frames/*.png --fps 24 --output anim.mp4` +- Globe (WebGL): ``zyra visualize globe --target webgl-sphere --texture earth.jpg --legend legend.png --output webgl_globe`` +- Globe (Cesium): ``zyra visualize globe --target cesium-globe --tile-url https://tiledimageservices.arcgis.com/P3ePLMYs2RVChkJx/ArcGIS/rest/services/Seafloor_Age_02_WM/ImageServer --legend https://d3sik7mbbzunjo.cloudfront.net/land/sea_floor_age/colorbar_contour_en.png --output cesium_globe`` + +Globe bundles accept shared flags such as ``--title``/``--description`` for overlay copy, ``--legend`` for a title legend image, and ``--probe-gradient`` / ``--probe-lut`` / ``--probe-data`` so probe readouts can return color-decoded or dataset-backed values. Reusable color tables can be registered with ``--shared-gradient name=path`` when multiple layers or frame stacks should point at the same resource. Provide ``--video-source`` (local file, HTTP URL, or ``vimeo:ID``) together with ``--video-start`` and ``--video-fps`` to sample frames directly from video content—each extracted frame records an absolute timestamp based on the playback position. Textures and legends may reference local files, ``pkg:`` assets, or remote HTTP(S) URLs. Drag to rotate and use the mouse wheel or a touch pinch to zoom; ``--auto-rotate`` restores the legacy spin when desired. Use ``--lighting`` to opt back into shaded rendering—the viewer defaults to an unlit texture for clarity. The command writes an ``index.html`` alongside ``assets/`` that can be opened locally or published as static content. diff --git a/src/zyra/visualization/__init__.py b/src/zyra/visualization/__init__.py index 3e4a1e62..aa6ce0d6 100644 --- a/src/zyra/visualization/__init__.py +++ b/src/zyra/visualization/__init__.py @@ -36,570 +36,14 @@ "FIGURE_DPI", "MAP_STYLES", "apply_matplotlib_style", + "register_cli", ] -# ---- CLI registration --------------------------------------------------------------- - from typing import Any -from zyra.visualization.cli_animate import handle_animate -from zyra.visualization.cli_compose_video import handle_compose_video -from zyra.visualization.cli_contour import handle_contour -from zyra.visualization.cli_heatmap import handle_heatmap -from zyra.visualization.cli_interactive import handle_interactive -from zyra.visualization.cli_timeseries import handle_timeseries -from zyra.visualization.cli_vector import handle_vector +from .cli_register import register_cli as _register_cli def register_cli(subparsers: Any) -> None: - """Register visualization subcommands under a provided subparsers object. - - Adds: heatmap, contour, timeseries, vector, wind, animate, compose-video, interactive - Reuses existing CLI handlers where possible to avoid duplication. - """ - - # Removed duplicate per-command handlers in favor of dedicated modules - - # heatmap - p_hm = subparsers.add_parser( - "heatmap", - help="Visualization: render 2D heatmap", - description=( - "Render a heatmap from a 2D array or NetCDF variable with optional basemap, " - "styling, and geospatial extent." - ), - ) - p_hm.add_argument("--input", required=True, help="Path to .nc or .npy input") - p_hm.add_argument("--var", help="Variable name for NetCDF inputs") - p_hm.add_argument("--basemap", help="Path to background image") - p_hm.add_argument( - "--extent", - nargs=4, - type=float, - default=[-180, 180, -90, 90], - help="west east south north", - ) - p_hm.add_argument( - "--output", - help="Output PNG path (required when using --input; for --inputs use --output-dir)", - ) - p_hm.add_argument( - "--inputs", nargs="+", help="Multiple input paths for batch rendering" - ) - p_hm.add_argument( - "--output-dir", - dest="output_dir", - help="Directory to write outputs for --inputs", - ) - p_hm.add_argument("--width", type=int, default=1024) - p_hm.add_argument("--height", type=int, default=512) - p_hm.add_argument("--dpi", type=int, default=96) - p_hm.add_argument("--cmap", default="YlOrBr") - p_hm.add_argument("--colorbar", action="store_true") - p_hm.add_argument("--label") - p_hm.add_argument("--units") - p_hm.add_argument( - "--features", help="Comma-separated features: coastline,borders,gridlines" - ) - p_hm.add_argument( - "--xarray-engine", - dest="xarray_engine", - help="xarray engine for NetCDF inputs (e.g., netcdf4, h5netcdf, scipy)", - ) - p_hm.add_argument( - "--map-type", - choices=["image", "tile"], - default="image", - help="Basemap type: image (default) or tile", - ) - p_hm.add_argument( - "--tile-source", - help="Contextily tile source name or URL (when --map-type=tile)", - ) - p_hm.add_argument( - "--tile-zoom", - dest="tile_zoom", - type=int, - default=3, - help="Tile source zoom level", - ) - p_hm.add_argument("--timestamp", help="Overlay timestamp string") - p_hm.add_argument("--crs", help="Force input CRS (e.g., EPSG:3857)") - p_hm.add_argument( - "--reproject", - action="store_true", - help="Attempt reprojection to EPSG:4326 (limited support)", - ) - p_hm.add_argument( - "--timestamp-loc", - dest="timestamp_loc", - choices=["upper_left", "upper_right", "lower_left", "lower_right"], - default="lower_right", - help="Timestamp placement (axes-relative)", - ) - # Feature negations - p_hm.add_argument("--no-coastline", action="store_true") - p_hm.add_argument("--no-borders", action="store_true") - p_hm.add_argument("--no-gridlines", action="store_true") - p_hm.set_defaults(func=handle_heatmap) - - # contour - p_ct = subparsers.add_parser( - "contour", - help="Visualization: render contour/filled contours", - description=( - "Render contour or filled-contour images from a 2D array or NetCDF variable " - "with optional basemap and styling." - ), - ) - p_ct.add_argument("--input", help="Path to .nc or .npy input") - p_ct.add_argument("--inputs", nargs="+", help="Multiple inputs for batch rendering") - p_ct.add_argument( - "--output-dir", - dest="output_dir", - help="Directory to write outputs for --inputs", - ) - p_ct.add_argument("--var", help="Variable name for NetCDF inputs") - p_ct.add_argument("--basemap", help="Path to background image") - p_ct.add_argument( - "--extent", - nargs=4, - type=float, - default=[-180, 180, -90, 90], - help="west east south north", - ) - p_ct.add_argument( - "--output", - required=True, - help="Output PNG path (required for single --input; when using --inputs, prefer --output-dir)", - ) - p_ct.add_argument("--width", type=int, default=1024) - p_ct.add_argument("--height", type=int, default=512) - p_ct.add_argument("--dpi", type=int, default=96) - p_ct.add_argument("--cmap", default="YlOrBr") - p_ct.add_argument("--filled", action="store_true", help="Use filled contours") - p_ct.add_argument("--levels", default=10, help="Count or comma-separated levels") - p_ct.add_argument("--colorbar", action="store_true") - p_ct.add_argument("--label") - p_ct.add_argument("--units") - p_ct.add_argument( - "--features", help="Comma-separated features: coastline,borders,gridlines" - ) - p_ct.add_argument( - "--xarray-engine", - dest="xarray_engine", - help="xarray engine for NetCDF inputs (e.g., netcdf4, h5netcdf, scipy)", - ) - p_ct.add_argument("--map-type", choices=["image", "tile"], default="image") - p_ct.add_argument( - "--tile-source", help="Contextily tile source (when --map-type=tile)" - ) - p_ct.add_argument("--tile-zoom", dest="tile_zoom", type=int, default=3) - p_ct.add_argument("--timestamp", help="Overlay timestamp string") - p_ct.add_argument("--crs", help="Force input CRS (e.g., EPSG:3857)") - p_ct.add_argument("--reproject", action="store_true") - p_ct.add_argument( - "--timestamp-loc", - dest="timestamp_loc", - choices=["upper_left", "upper_right", "lower_left", "lower_right"], - default="lower_right", - help="Timestamp placement (axes-relative)", - ) - p_ct.add_argument("--no-coastline", action="store_true") - p_ct.add_argument("--no-borders", action="store_true") - p_ct.add_argument("--no-gridlines", action="store_true") - p_ct.set_defaults(func=handle_contour) - - # timeseries - p_ts = subparsers.add_parser( - "timeseries", - help="Visualization: render a time series from CSV or NetCDF", - description=( - "Plot a time series to a PNG image from CSV columns or a NetCDF variable, " - "with titles and axis labels." - ), - ) - p_ts.add_argument("--input", required=True, help="Path to .csv or .nc input") - p_ts.add_argument("--x", help="CSV: X column name (e.g., time)") - p_ts.add_argument("--y", help="CSV: Y column name (value)") - p_ts.add_argument("--var", help="NetCDF: variable name to plot") - p_ts.add_argument("--output", required=True, help="Output PNG path") - p_ts.add_argument("--width", type=int, default=1024) - p_ts.add_argument("--height", type=int, default=512) - p_ts.add_argument("--dpi", type=int, default=96) - p_ts.add_argument("--title") - p_ts.add_argument("--xlabel") - p_ts.add_argument("--ylabel") - p_ts.add_argument( - "--style", choices=["line", "marker", "line_marker"], default="line" - ) - p_ts.set_defaults(func=handle_timeseries) - - # vector - p_vector = subparsers.add_parser( - "vector", - help="Visualization: render vector fields (e.g., wind, currents)", - description=( - "Render vector fields from U/V arrays or NetCDF variables as quiver arrows or " - "streamlines with optional basemap." - ), - ) - p_vector.add_argument( - "--input", help="Path to .nc input (alternative to --u/--v .npy)" - ) - p_vector.add_argument( - "--inputs", nargs="+", help="Multiple inputs for batch rendering" - ) - p_vector.add_argument( - "--output-dir", - dest="output_dir", - help="Directory to write outputs for --inputs", - ) - p_vector.add_argument("--uvar", help="NetCDF: U variable name") - p_vector.add_argument("--vvar", help="NetCDF: V variable name") - p_vector.add_argument("--u", help="Path to U .npy file (alternative input)") - p_vector.add_argument("--v", help="Path to V .npy file (alternative input)") - p_vector.add_argument("--basemap", help="Path to background image") - p_vector.add_argument( - "--extent", - nargs=4, - type=float, - default=[-180, 180, -90, 90], - help="west east south north", - ) - p_vector.add_argument( - "--output", - required=True, - help="Output PNG path (required for single --input/--u/--v; when using --inputs, prefer --output-dir)", - ) - p_vector.add_argument("--width", type=int, default=1024) - p_vector.add_argument("--height", type=int, default=512) - p_vector.add_argument("--dpi", type=int, default=96) - p_vector.add_argument( - "--density", type=float, default=0.2, help="Arrow sampling density (0 bool: + if not isinstance(value, str): + return False + lower = value.lower() + return lower.startswith(("http://", "https://", "http:/", "https:/")) + + +def _normalize_remote_ref(value: str) -> str: + lower = value.lower() + if lower.startswith(("http://", "https://")): + return value + if lower.startswith(("http:/", "https:/")): + scheme, rest = value.split(":/", 1) + return f"{scheme}://{rest.lstrip('/')}" + return value + + +def _renderer_options(ns: Any) -> dict[str, Any]: + """Translate argparse namespace into renderer keyword options.""" + + def _coerce_bool(val: Any) -> bool: + if isinstance(val, bool): + return val + if isinstance(val, str): + v = val.strip().lower() + if v in {"false", "0", "no", "off"}: + return False + if v in {"true", "1", "yes", "on"}: + return True + return bool(val) + + def _parse_iso_timestamp(raw: str) -> tuple[datetime, bool, bool]: + value = str(raw).strip() + if not value: + raise ValueError("time value cannot be empty") + use_z = False + if value.endswith("Z"): + use_z = True + value = value[:-1] + try: + dt = datetime.fromisoformat(value) + except ValueError as exc: + raise ValueError(f"Invalid ISO-8601 timestamp '{raw}'") from exc + if dt.tzinfo is not None: + dt = dt.astimezone(timezone.utc).replace(tzinfo=None) + use_z = True + has_time = "T" in value + return dt, has_time, use_z + + def _format_iso_timestamp(dt: datetime, has_time: bool, use_z: bool) -> str: + if has_time: + base = dt.isoformat(timespec="seconds") + if use_z: + if base.endswith("+00:00"): + base = base[:-6] + return base + "Z" if not base.endswith("Z") else base + return base + return dt.date().isoformat() + + def _parse_time_period(spec: str | None) -> timedelta: + if spec is None: + return timedelta(days=1) + value = str(spec).strip().lower() + if not value: + raise ValueError("time period cannot be empty") + + def _seconds(multiplier: float) -> timedelta: + return timedelta(seconds=multiplier) + + if value.startswith("p") and value.endswith("d"): + try: + days = float(value[1:-1]) + except ValueError as exc: + raise ValueError(f"Invalid period '{spec}'") from exc + return timedelta(days=days) + if value.endswith("day") or value.endswith("days"): + try: + days = float(value.split("day")[0]) + except ValueError as exc: + raise ValueError(f"Invalid period '{spec}'") from exc + return timedelta(days=days) + unit_map = { + "d": 86400, + "h": 3600, + "m": 60, + "s": 1, + } + for suffix, seconds in unit_map.items(): + if value.endswith(suffix): + try: + magnitude = float(value[:-1]) + except ValueError as exc: + raise ValueError(f"Invalid period '{spec}'") from exc + return _seconds(magnitude * seconds) + try: + magnitude = float(value) + return _seconds(magnitude) + except ValueError as exc: + raise ValueError(f"Invalid period '{spec}'") from exc + + def _generate_time_series( + start_raw: str, + end_raw: str, + period_spec: str | None, + ) -> list[str]: + start_dt, start_has_time, start_use_z = _parse_iso_timestamp(start_raw) + end_dt, end_has_time, end_use_z = _parse_iso_timestamp(end_raw) + if start_dt > end_dt: + raise ValueError( + f"tile-time-start '{start_raw}' must be before or equal to tile-time-end '{end_raw}'" + ) + step = _parse_time_period(period_spec) + if step.total_seconds() <= 0: + raise ValueError("tile-time-period must be positive") + has_time = start_has_time or end_has_time + use_z = start_use_z or end_use_z + if not has_time: + start_dt = datetime.combine(start_dt.date(), datetime.min.time()) + end_dt = datetime.combine(end_dt.date(), datetime.min.time()) + values: list[str] = [] + current = start_dt + tolerance = timedelta(microseconds=500) + max_steps = 5000 + steps = 0 + while current <= end_dt + tolerance: + values.append(_format_iso_timestamp(current, has_time, use_z)) + current = current + step + steps += 1 + if steps > max_steps: + raise ValueError( + "Too many time steps generated; adjust tile-time-period or range." + ) + final_value = _format_iso_timestamp(end_dt, has_time, use_z) + if final_value not in values: + values.append(final_value) + return values + + options: dict[str, Any] = { + "animate": ns.animate, + "probe_enabled": _coerce_bool(getattr(ns, "probe", True)), + } + if ns.width is not None: + options["width"] = ns.width + if ns.height is not None: + options["height"] = ns.height + if ns.texture: + options["texture"] = ns.texture + if ns.texture_pattern: + options["texture_pattern"] = ns.texture_pattern + if ns.frame_list: + options["frame_list"] = ns.frame_list + if ns.frame_cache: + options["frame_cache"] = ns.frame_cache + if getattr(ns, "date_format", None): + options["date_format"] = ns.date_format + if getattr(ns, "frame_duration", None) is not None: + options["frame_duration"] = ns.frame_duration + if getattr(ns, "show_controls", None) is not None: + options["show_controls"] = _coerce_bool(ns.show_controls) + if ns.title: + options["title"] = ns.title + if ns.description: + options["description"] = ns.description + if ns.probe_gradient: + options["probe_gradient"] = ns.probe_gradient + if ns.probe_lut: + options["probe_lut"] = ns.probe_lut + if options["probe_enabled"] and ns.probe_data: + options["probe_data"] = ns.probe_data + if ns.probe_units: + options["probe_units"] = ns.probe_units + if ns.probe_var: + options["probe_var"] = ns.probe_var + if getattr(ns, "video_source", None): + options["video_source"] = ns.video_source + if getattr(ns, "start", None): + options["video_start"] = ns.start + if getattr(ns, "end", None): + options["video_end"] = ns.end + if getattr(ns, "fps", None): + options["video_fps"] = ns.fps + if getattr(ns, "period_seconds", None) is not None: + options["period_seconds"] = ns.period_seconds + if getattr(ns, "frames_meta", None): + options["frames_meta"] = ns.frames_meta + legend = getattr(ns, "legend", None) + if legend: + options["legend"] = legend + if getattr(ns, "tile_url", None): + options["tile_url"] = ns.tile_url + if getattr(ns, "tile_type", None): + options["tile_type"] = ns.tile_type + if getattr(ns, "tile_scheme", None): + options["tile_scheme"] = ns.tile_scheme + if getattr(ns, "tile_min_level", None) is not None: + options["tile_min_level"] = ns.tile_min_level + if getattr(ns, "tile_max_level", None) is not None: + options["tile_max_level"] = ns.tile_max_level + if getattr(ns, "tile_credit", None): + options["tile_credit"] = ns.tile_credit + if getattr(ns, "tile_token", None): + options["tile_token"] = ns.tile_token + tile_param_entries = getattr(ns, "tile_param", None) + if tile_param_entries: + params: dict[str, str] = {} + for entry in tile_param_entries: + if "=" not in entry: + raise SystemExit(f"Invalid tile-param '{entry}', expected KEY=VALUE") + key, value = entry.split("=", 1) + key = key.strip() + if not key: + raise SystemExit(f"Invalid tile-param '{entry}', missing key") + params[key] = value.strip() + options["tile_params"] = params + if getattr(ns, "tile_time_key", None): + options["tile_time_key"] = ns.tile_time_key + tile_time_values = getattr(ns, "tile_time_values", None) + time_range_start = getattr(ns, "tile_time_start", None) + time_range_end = getattr(ns, "tile_time_end", None) + time_range_period = getattr(ns, "tile_time_period", None) + if (time_range_start and not time_range_end) or ( + time_range_end and not time_range_start + ): + raise SystemExit( + "tile-time-start and tile-time-end must both be provided together." + ) + collected_time_values: list[str] = [] + if tile_time_values: + for value in tile_time_values: + value_str = str(value).strip() + if value_str: + collected_time_values.append(value_str) + if time_range_start and time_range_end: + try: + generated_values = _generate_time_series( + time_range_start, time_range_end, time_range_period + ) + except ValueError as exc: + raise SystemExit(str(exc)) from exc + collected_time_values.extend(generated_values) + options["tile_time_start"] = time_range_start + options["tile_time_end"] = time_range_end + if time_range_period: + options["tile_time_period"] = time_range_period + if collected_time_values: + dedup: dict[tuple[datetime, bool, bool], tuple[datetime, bool, bool]] = {} + for entry in collected_time_values: + dt, has_time, use_z = _parse_iso_timestamp(entry) + dedup[(dt, has_time, use_z)] = (dt, has_time, use_z) + sorted_entries = sorted(dedup.values(), key=lambda item: item[0]) + options["tile_time_values"] = [ + _format_iso_timestamp(dt, has_time, use_z) + for dt, has_time, use_z in sorted_entries + ] + shared_gradients = getattr(ns, "shared_gradient", None) + if isinstance(shared_gradients, dict) and shared_gradients: + options["shared_gradients"] = dict(shared_gradients) + if ns.time_key: + options["time_key"] = ns.time_key + if ns.time_format: + options["time_format"] = ns.time_format + if hasattr(ns, "lighting") and ns.lighting is not None: + options["lighting"] = _coerce_bool(ns.lighting) + if hasattr(ns, "auto_rotate") and ns.auto_rotate is not None: + options["auto_rotate"] = _coerce_bool(ns.auto_rotate) + if getattr(ns, "auto_rotate_speed", None) is not None: + options["auto_rotate_speed"] = ns.auto_rotate_speed + if ns.credential_file: + options["credential_file"] = ns.credential_file + if ns.auth: + options["auth"] = ns.auth + if getattr(ns, "verbose", False): + options["debug_overlay"] = True + creds_entries = getattr(ns, "credential", None) + if creds_entries: + try: + resolved = resolve_credentials( + creds_entries, + credential_file=getattr(ns, "credential_file", None), + namespace="visualize.globe", + ) + except CredentialResolutionError as exc: + raise SystemExit(str(exc)) from exc + options["credentials"] = dict(resolved.values) + options["credentials_masked"] = dict(resolved.masked) + return options + + +def _resolve_resource_option( + ns: Any, + attr: str, + label: str, + guards: list[object], +) -> None: + """Resolve packaged asset references (pkg:..., bare names) to filesystem paths.""" + + raw = getattr(ns, attr, None) + if not raw: + return + raw_str = str(raw) + if _is_remote_ref(raw_str): + setattr(ns, attr, _normalize_remote_ref(raw_str)) + return + resolved, guard = resolve_basemap_ref(raw) + if resolved is None: + raise SystemExit(f"{label} file not found: {raw}") + setattr(ns, attr, resolved) + if guard is not None: + guards.append(guard) + + +def _resolve_resource_options(ns: Any, guards: list[object]) -> None: + for attr, label in ( + ("texture", "Texture"), + ("legend", "Legend"), + ("probe_gradient", "Probe gradient"), + ("probe_lut", "Probe LUT"), + ("probe_data", "Probe data"), + ): + _resolve_resource_option(ns, attr, label, guards) + + +def _parse_shared_gradient_entry(entry: str) -> tuple[str, str]: + raw = str(entry).strip() + if not raw: + raise SystemExit("shared-gradient entry cannot be empty") + separators = ("=", "|", ":", ",") + name = None + value = None + for sep in separators: + if sep in raw: + name, value = raw.split(sep, 1) + break + if name is None or value is None: + raise SystemExit( + f"Invalid shared-gradient '{entry}'. Expected NAME=PATH or NAME|PATH." + ) + name = name.strip() + value = value.strip() + if not name: + raise SystemExit(f"Shared gradient missing name: '{entry}'") + if not value: + raise SystemExit(f"Shared gradient missing path: '{entry}'") + return name, value + + +def _resolve_shared_gradients(ns: Any, guards: list[object]) -> None: + raw_entries = getattr(ns, "shared_gradient", None) + if not raw_entries: + return + entries = [raw_entries] if isinstance(raw_entries, str) else list(raw_entries) + mapping: dict[str, str] = {} + for entry in entries: + name, raw_path = _parse_shared_gradient_entry(entry) + if _is_remote_ref(raw_path): + mapping[name] = _normalize_remote_ref(raw_path) + continue + resolved, guard = resolve_basemap_ref(raw_path) + if resolved is None: + raise SystemExit(f"Shared gradient file not found: {raw_path}") + mapping[name] = resolved + if guard is not None: + guards.append(guard) + ns.shared_gradient = mapping + + +def handle_globe(ns: Any) -> int: + """Handle ``visualize globe`` subcommand.""" + + if getattr(ns, "verbose", False): + os.environ["ZYRA_VERBOSITY"] = "debug" + elif getattr(ns, "quiet", False): + os.environ["ZYRA_VERBOSITY"] = "quiet" + if getattr(ns, "trace", False): + os.environ["ZYRA_SHELL_TRACE"] = "1" + + configure_logging_from_env() + + renderer_slugs = sorted(r.slug for r in available()) + if ns.target not in renderer_slugs: + raise SystemExit( + f"Unknown globe renderer '{ns.target}'. Available: {', '.join(renderer_slugs)}" + ) + + guards: list[object] = [] + try: + _resolve_resource_options(ns, guards) + _resolve_shared_gradients(ns, guards) + renderer = create(ns.target, **_renderer_options(ns)) + bundle = renderer.build(output_dir=Path(ns.output)) + finally: + for guard in guards: + with suppress(Exception): + guard.close() + + logging.info("Generated globe bundle at %s", bundle.index_html) + if bundle.assets: + logging.debug( + "Bundle assets: %s", + ", ".join( + str(path.relative_to(bundle.output_dir)) for path in bundle.assets + ), + ) + return 0 diff --git a/src/zyra/visualization/cli_register.py b/src/zyra/visualization/cli_register.py index 12d3411a..2dab214d 100644 --- a/src/zyra/visualization/cli_register.py +++ b/src/zyra/visualization/cli_register.py @@ -1,6 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations +import argparse from typing import Any # Import only lightweight CLI handler modules. Heavy managers are imported lazily @@ -8,6 +9,7 @@ from .cli_animate import handle_animate from .cli_compose_video import handle_compose_video from .cli_contour import handle_contour +from .cli_globe import handle_globe from .cli_heatmap import handle_heatmap from .cli_interactive import handle_interactive from .cli_timeseries import handle_timeseries @@ -352,6 +354,291 @@ def register_cli(subparsers: Any) -> None: ) p_cv.set_defaults(func=handle_compose_video) + # globe renderers (WebGL/Cesium) + from zyra.visualization import renderers as _globe_renderers + + renderer_slugs = sorted(r.slug for r in _globe_renderers.available()) + + p_glb = subparsers.add_parser( + "globe", + help="Interactive globe renderers (WebGL/Cesium)", + description=( + "Generate interactive globe bundles using modular renderers such as " + "webgl-sphere or cesium-globe." + ), + ) + p_glb.add_argument( + "--target", + required=True, + choices=renderer_slugs, + help="Renderer backend to use (e.g., webgl-sphere, cesium-globe)", + ) + p_glb.add_argument( + "--output", + required=True, + help="Directory for the generated bundle (index.html + assets)", + ) + p_glb.add_argument("--texture", help="Primary texture image for the globe surface") + p_glb.add_argument( + "--pattern", + dest="texture_pattern", + help="Glob pattern for frame textures (animated sequences)", + ) + p_glb.add_argument( + "--texture-pattern", + dest="texture_pattern", + help="Glob pattern for frame textures (animated sequences)", + ) + p_glb.add_argument( + "--frame-list", + dest="frame_list", + help="Text file listing frame paths (optional timestamps)", + ) + p_glb.add_argument( + "--frame-cache", + dest="frame_cache", + help="Directory to stage frames extracted from remote sources", + ) + p_glb.add_argument( + "--video-source", + dest="video_source", + help="Video file or URI used to derive frame textures (supports Vimeo URIs)", + ) + p_glb.add_argument( + "--start", + dest="start", + help="ISO-8601 timestamp for the first frame of the video", + ) + p_glb.add_argument( + "--end", + dest="end", + help="ISO-8601 timestamp for the final frame (optional; defaults to start + duration)", + ) + p_glb.add_argument( + "--fps", + dest="fps", + type=float, + help="Sampling rate when extracting frames from video sources (frames per second)", + ) + p_glb.add_argument( + "--period-seconds", + dest="period_seconds", + type=float, + help="Override cadence between frames (seconds) for timeline metadata", + ) + p_glb.add_argument( + "--frames-meta", + dest="frames_meta", + help="Frames metadata JSON (from transform metadata/scan-frames)", + ) + p_glb.add_argument( + "--date-format", + dest="date_format", + help="strftime-style format used to parse timestamps from frame filenames", + ) + p_glb.add_argument( + "--frame-duration", + dest="frame_duration", + type=float, + help="Seconds per frame when animating sequences (default 0.25)", + ) + p_glb.add_argument( + "--show-controls", + dest="show_controls", + action=argparse.BooleanOptionalAction, + default=True, + help="Toggle playback controls overlay for animated sequences", + ) + p_glb.add_argument("--title", help="Optional overlay title for the globe viewer") + p_glb.add_argument( + "--description", + help="Optional descriptive text shown under the globe title", + ) + p_glb.add_argument("--width", type=int, help="Preferred viewport width in pixels") + p_glb.add_argument("--height", type=int, help="Preferred viewport height in pixels") + p_glb.add_argument( + "--animate", + choices=["none", "time"], + default="none", + help="Animation mode for multi-frame inputs", + ) + p_glb.add_argument( + "--auto-rotate", + action=argparse.BooleanOptionalAction, + default=False, + help="Enable continuous auto-rotation (default uses drag-to-rotate)", + ) + p_glb.add_argument( + "--auto-rotate-speed", + dest="auto_rotate_speed", + type=float, + help="Auto-rotation speed in degrees per second when enabled", + ) + p_glb.add_argument( + "--probe", + action=argparse.BooleanOptionalAction, + default=True, + help="Toggle probe UI in the generated viewer", + ) + p_glb.add_argument( + "--lighting", + action=argparse.BooleanOptionalAction, + default=False, + help="Enable lighting/shading for the globe (default off for unlit texture)", + ) + p_glb.add_argument( + "--probe-gradient", + dest="probe_gradient", + help="Gradient asset for imagery-backed probe decoding", + ) + p_glb.add_argument( + "--probe-lut", + dest="probe_lut", + help="Lookup table for categorical probe decoding", + ) + p_glb.add_argument( + "--probe-data", + dest="probe_data", + help="JSON/CSV probe dataset packaged with the bundle", + ) + p_glb.add_argument( + "--probe-var", + dest="probe_var", + help="Variable name used when sampling structured probe datasets", + ) + p_glb.add_argument( + "--probe-units", + dest="probe_units", + help="Units label rendered with probe values (e.g., Ma, °C)", + ) + p_glb.add_argument( + "--legend", + help="Legend image path (pkg:, local file, or http/https URL)", + ) + p_glb.add_argument( + "--tile-url", + dest="tile_url", + help=( + "ArcGIS MapServer/ImageServer endpoint or URL template providing tiled imagery " + "(overrides --texture when supplied)" + ), + ) + p_glb.add_argument( + "--tile-type", + dest="tile_type", + choices=["arcgis", "template"], + help="Provider type for --tile-url (default: arcgis)", + ) + p_glb.add_argument( + "--tile-scheme", + dest="tile_scheme", + choices=["webmercator", "geographic"], + help="Tiling scheme for --tile-url when using template sources (default: webmercator)", + ) + p_glb.add_argument( + "--tile-min-level", + dest="tile_min_level", + type=int, + help="Minimum level for tiled imagery providers", + ) + p_glb.add_argument( + "--tile-max-level", + dest="tile_max_level", + type=int, + help="Maximum level for tiled imagery providers", + ) + p_glb.add_argument( + "--tile-credit", + dest="tile_credit", + help="Attribution string shown for tiled imagery providers", + ) + p_glb.add_argument( + "--tile-token", + dest="tile_token", + help="Access token passed through to the tiled imagery provider", + ) + p_glb.add_argument( + "--tile-param", + dest="tile_param", + metavar="KEY=VALUE", + action="append", + help="Placeholder substitutions for template tile URLs (repeatable)", + ) + p_glb.add_argument( + "--tile-time-key", + dest="tile_time_key", + help="Placeholder key in the tile URL that should be driven by time (e.g., 'time')", + ) + p_glb.add_argument( + "--tile-time-value", + "--tile-time-values", + dest="tile_time_values", + action="append", + metavar="ISO_DATE", + help="ISO-8601 timestamps used to populate the time placeholder (repeatable)", + ) + p_glb.add_argument( + "--tile-time-start", + dest="tile_time_start", + help="Start of the time range (inclusive, ISO-8601)", + ) + p_glb.add_argument( + "--tile-time-end", + dest="tile_time_end", + help="End of the time range (inclusive, ISO-8601)", + ) + p_glb.add_argument( + "--tile-time-period", + dest="tile_time_period", + help="Step between time samples (e.g., '1d', '6h', '3600s'; default 1d)", + ) + p_glb.add_argument( + "--shared-gradient", + dest="shared_gradient", + action="append", + help=( + "Name-to-path mapping for reusable gradients (repeatable; format NAME=PATH " + "or NAME|PATH; supports local files, pkg: refs, or URLs)" + ), + ) + p_glb.add_argument( + "--time-key", + dest="time_key", + help="Metadata key that maps inputs to timestamps", + ) + p_glb.add_argument( + "--time-format", + dest="time_format", + help="Format string for rendering timestamps", + ) + p_glb.add_argument( + "--credential", + action="append", + metavar="KEY=VALUE", + help="Inline credential assignment (repeatable)", + ) + p_glb.add_argument( + "--credential-file", + dest="credential_file", + help="Credential file resolved by the shared helper", + ) + p_glb.add_argument( + "--auth", + help="Auth helper shorthands (e.g., bearer:token, basic:user:pass)", + ) + p_glb.add_argument( + "--verbose", action="store_true", help="Verbose logging for this command" + ) + p_glb.add_argument( + "--quiet", action="store_true", help="Quiet logging for this command" + ) + p_glb.add_argument( + "--trace", + action="store_true", + help="Shell-style trace of key steps and external commands", + ) + p_glb.set_defaults(func=handle_globe) + # interactive p_int = subparsers.add_parser( "interactive", help="Interactive visualizations (HTML)" diff --git a/src/zyra/visualization/renderers/__init__.py b/src/zyra/visualization/renderers/__init__.py new file mode 100644 index 00000000..59635f03 --- /dev/null +++ b/src/zyra/visualization/renderers/__init__.py @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Interactive renderer registry and stub implementations.""" + +from __future__ import annotations + +from . import cesium_globe as _cesium_globe # noqa: F401 +from . import webgl_sphere as _webgl_sphere # noqa: F401 +from .base import InteractiveBundle, InteractiveRenderer +from .registry import available, create, get, register + +__all__ = [ + "InteractiveBundle", + "InteractiveRenderer", + "available", + "create", + "get", + "register", +] diff --git a/src/zyra/visualization/renderers/base.py b/src/zyra/visualization/renderers/base.py new file mode 100644 index 00000000..99de01d0 --- /dev/null +++ b/src/zyra/visualization/renderers/base.py @@ -0,0 +1,42 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Base interfaces for interactive visualization renderers.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Sequence + + +@dataclass(slots=True) +class InteractiveBundle: + """Describes the output artifacts produced by an interactive renderer.""" + + output_dir: Path + index_html: Path + assets: Sequence[Path] = field(default_factory=tuple) + + +class InteractiveRenderer(ABC): + """Contract for interactive renderers that emit self-contained bundles.""" + + slug: str = "interactive" + description: str = "" + + def __init__(self, **options: Any) -> None: + self._options: dict[str, Any] = dict(options) + + def configure(self, **options: Any) -> None: + """Update renderer options prior to bundle generation.""" + + self._options.update(options) + + @abstractmethod + def build(self, *, output_dir: Path) -> InteractiveBundle: + """Generate the interactive bundle inside ``output_dir``.""" + + def describe(self) -> dict[str, Any]: + """Return metadata about the renderer for CLI help text.""" + + return {"slug": self.slug, "description": self.description} diff --git a/src/zyra/visualization/renderers/cesium_globe.py b/src/zyra/visualization/renderers/cesium_globe.py new file mode 100644 index 00000000..90df3462 --- /dev/null +++ b/src/zyra/visualization/renderers/cesium_globe.py @@ -0,0 +1,2066 @@ +# SPDX-License-Identifier: Apache-2.0 +"""CesiumJS-based interactive globe renderer. + +The generated bundle references Cesium assets via jsDelivr CDN. Future +iterations can add an option to vendor or pin a local copy if offline support +is required. +""" + +from __future__ import annotations + +import json +import logging +import os +from datetime import datetime, timedelta, timezone +from pathlib import Path +from textwrap import dedent + +from zyra.utils.date_manager import DateManager + +from .base import InteractiveBundle, InteractiveRenderer +from .frame_utils import finalize_frame_entries, load_manifest_entries +from .probe_utils import ProbeDatasetError, prepare_probe_dataset_file +from .registry import register +from .video_utils import ( + VideoExtractionError, + compute_end_time, + compute_frame_timestamps, + extract_frames, + format_datetime, + parse_datetime, + probe_video_metadata, + resolve_video_source, +) + +LOGGER = logging.getLogger(__name__) + + +def _format_display_timestamp(dt: datetime) -> str: + """Render a display-friendly UTC label for timeline timestamps.""" + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + else: + dt = dt.astimezone(timezone.utc) + return dt.strftime("%Y-%m-%d %H:%M:%S UTC") + + +def _coerce_utc(dt: datetime | None) -> datetime | None: + if dt is None: + return None + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + + +def _is_remote_ref(value: object) -> bool: + if not isinstance(value, str): + return False + lower = value.lower() + return lower.startswith(("http://", "https://", "http:/", "https:/")) + + +def _normalize_remote_ref(value: str) -> str: + lower = value.lower() + if lower.startswith(("http://", "https://")): + return value + if lower.startswith(("http:/", "https:/")): + scheme, rest = value.split(":/", 1) + return f"{scheme}://{rest.lstrip('/')}" + return value + + +def _video_elapsed_seconds(start, timestamp: str) -> float: + try: + end = parse_datetime(timestamp) + except Exception: + return 0.0 + delta = end - start + return max(delta.total_seconds(), 0.0) + + +@register +class CesiumGlobeRenderer(InteractiveRenderer): + slug = "cesium-globe" + description = "CesiumJS globe renderer that emits a standalone bundle." + + def __init__(self, **options: object) -> None: + super().__init__(**options) + self._video_entries: list[dict[str, object]] | None = None + + def build(self, *, output_dir: Path) -> InteractiveBundle: + output_dir = Path(output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + + assets_dir = output_dir / "assets" + assets_dir.mkdir(parents=True, exist_ok=True) + + index_html = output_dir / "index.html" + script_path = assets_dir / "cesium.js" + config_path = assets_dir / "config.json" + + asset_overrides, asset_files = self._stage_assets(assets_dir) + + config = self._sanitized_config(overrides=asset_overrides) + config_json = json.dumps(config, indent=2) + config_path.write_text(config_json + "\n", encoding="utf-8") + + credentials = self._credential_payload() + + index_html.write_text( + self._render_index_html(config, credentials), encoding="utf-8" + ) + script_path.write_text(self._render_script(), encoding="utf-8") + + return InteractiveBundle( + output_dir=output_dir, + index_html=index_html, + assets=(script_path, config_path, *asset_files), + ) + + def _stage_assets( + self, assets_dir: Path + ) -> tuple[dict[str, object], tuple[Path, ...]]: + """Copy optional assets (gradients, LUTs, textures) into bundle.""" + + staged: list[Path] = [] + overrides: dict[str, object] = {} + + gradients_dir = assets_dir / "gradients" + textures_dir = assets_dir / "textures" + data_dir = assets_dir / "data" + legends_dir = assets_dir / "legends" + + video_meta = self._maybe_generate_video_frames(assets_dir) + + texture = self._options.get("texture") + if texture: + if _is_remote_ref(texture): + overrides["texture"] = _normalize_remote_ref(str(texture)) + else: + staged.append( + self._copy_asset(Path(texture), textures_dir, overrides, "texture") + ) + + probe_gradient = self._options.get("probe_gradient") + if probe_gradient: + if _is_remote_ref(probe_gradient): + overrides["probe_gradient"] = _normalize_remote_ref(str(probe_gradient)) + else: + staged.append( + self._copy_asset( + Path(probe_gradient), gradients_dir, overrides, "probe_gradient" + ) + ) + + probe_lut = self._options.get("probe_lut") + if probe_lut: + if _is_remote_ref(probe_lut): + overrides["probe_lut"] = _normalize_remote_ref(str(probe_lut)) + else: + staged.append( + self._copy_asset( + Path(probe_lut), gradients_dir, overrides, "probe_lut" + ) + ) + + probe_data = self._options.get("probe_data") + if probe_data: + if _is_remote_ref(probe_data): + overrides["probe_data"] = _normalize_remote_ref(str(probe_data)) + else: + converted = self._try_convert_probe_dataset(data_dir, Path(probe_data)) + if converted is not None: + dest_path, meta = converted + rel_path = Path("assets") / "data" / dest_path.name + overrides["probe_data"] = str(rel_path) + staged.append(dest_path) + if ( + meta.get("units") + and "probe_units" not in overrides + and "probe_units" not in self._options + ): + overrides["probe_units"] = meta["units"] + else: + staged.append( + self._copy_asset( + Path(probe_data), data_dir, overrides, "probe_data" + ) + ) + + legend_source = self._options.get("legend") + if legend_source is None: + legend_source = self._options.get("legend_texture") + if legend_source: + if _is_remote_ref(legend_source): + overrides["legend"] = _normalize_remote_ref(str(legend_source)) + else: + staged.append( + self._copy_asset( + Path(legend_source), legends_dir, overrides, "legend" + ) + ) + overrides.pop("legend_texture", None) + + frame_entries = self._collect_frames() + if frame_entries: + frames_dir = textures_dir + frames_dir.mkdir(parents=True, exist_ok=True) + staged_paths: list[Path] = [] + manifest: list[dict[str, object]] = [] + for entry in frame_entries: + raw_path = str(entry["path"]) + timestamp = entry.get("timestamp") + display_ts = entry.get("display_timestamp") + metadata = ( + entry.get("metadata") + if isinstance(entry.get("metadata"), dict) + else None + ) + label = entry.get("label") + if _is_remote_ref(raw_path): + manifest_entry: dict[str, object] = { + "path": _normalize_remote_ref(raw_path) + } + if timestamp: + manifest_entry["timestamp"] = timestamp + if display_ts: + manifest_entry["display_timestamp"] = display_ts + if label: + manifest_entry["label"] = label + if metadata: + manifest_entry["metadata"] = metadata + manifest.append(manifest_entry) + continue + src = Path(raw_path).expanduser() + if not src.is_file(): + msg = f"Frame file not found: {src}" + raise FileNotFoundError(msg) + dest = frames_dir / src.name + if src.resolve() != dest.resolve(): + dest.write_bytes(src.read_bytes()) + staged_paths.append(dest) + manifest_entry = {"path": f"assets/textures/{src.name}"} + if timestamp: + manifest_entry["timestamp"] = timestamp + if display_ts: + manifest_entry["display_timestamp"] = display_ts + if label: + manifest_entry["label"] = label + if metadata: + manifest_entry["metadata"] = metadata + manifest.append(manifest_entry) + overrides["frames"] = manifest + overrides.setdefault("texture", manifest[0]["path"]) + staged.extend(staged_paths) + + tile_url = self._options.get("tile_url") + if isinstance(tile_url, str) and _is_remote_ref(tile_url): + overrides["tile_url"] = _normalize_remote_ref(tile_url) + + shared_gradients = self._options.get("shared_gradients") + if isinstance(shared_gradients, dict) and shared_gradients: + shared_dir = gradients_dir / "shared" + shared_overrides: dict[str, str] = {} + for name, raw_value in shared_gradients.items(): + key = str(name).strip() + if not key: + continue + if _is_remote_ref(raw_value): + shared_overrides[key] = _normalize_remote_ref(str(raw_value)) + continue + src = Path(str(raw_value)).expanduser() + if not src.is_file(): + msg = f"Shared gradient '{key}' file not found: {src}" + raise FileNotFoundError(msg) + shared_dir.mkdir(parents=True, exist_ok=True) + dest = shared_dir / src.name + if src.resolve() != dest.resolve(): + dest.write_bytes(src.read_bytes()) + staged.append(dest) + shared_overrides[key] = f"assets/gradients/shared/{dest.name}" + if shared_overrides: + overrides["shared_gradients"] = shared_overrides + + if video_meta: + overrides.update(video_meta) + + return overrides, tuple(staged) + + def _maybe_generate_video_frames(self, assets_dir: Path) -> dict[str, object]: + video_source = self._options.get("video_source") + if not video_source: + self._video_entries = None + return {} + entries, meta = self._extract_video_frames(assets_dir) + self._video_entries = entries + for key, value in (meta or {}).items(): + if value is None: + continue + if key == "frame_duration" and self._options.get("frame_duration") not in ( + None, + 0, + "", + ): + continue + self._options[key] = value + return meta + + def _load_timeline_overrides( + self, + ) -> tuple[datetime | None, float | None, str | None]: + """Return optional (start, period, source) overrides for frame timelines.""" + + start_override: datetime | None = None + period_override: float | None = None + source: str | None = None + + frames_meta_path = self._options.get("frames_meta") + if frames_meta_path: + try: + meta_path = Path(frames_meta_path).expanduser() + data = json.loads(meta_path.read_text(encoding="utf-8")) + except Exception as exc: # pragma: no cover - filesystem dependent + LOGGER.warning( + "Failed to load frames metadata '%s': %s", frames_meta_path, exc + ) + else: + raw_start = data.get("start_datetime") or data.get("start") + raw_period = ( + data.get("period_seconds") + or data.get("cadence_seconds") + or data.get("interval_seconds") + ) + if raw_start: + try: + start_override = _coerce_utc(parse_datetime(str(raw_start))) + except Exception as exc: # pragma: no cover + LOGGER.warning( + "Invalid start_datetime in frames metadata '%s': %s", + frames_meta_path, + exc, + ) + if raw_period not in (None, ""): + try: + period_override = float(raw_period) + except (TypeError, ValueError) as exc: # pragma: no cover + LOGGER.warning( + "Invalid period_seconds in frames metadata '%s': %s", + frames_meta_path, + exc, + ) + source = "frames-meta" + + period_option = self._options.get("period_seconds") + if period_option not in (None, ""): + try: + period_override = float(period_option) + source = "period-seconds" + except (TypeError, ValueError) as exc: # pragma: no cover + LOGGER.warning( + "Ignoring invalid period_seconds override '%s': %s", + period_option, + exc, + ) + + return start_override, period_override, source + + def _apply_timeline_overrides( + self, + entries: list[dict[str, object]], + *, + default_start: datetime, + overrides: tuple[datetime | None, float | None, str | None], + ) -> dict[str, object]: + """Adjust frame timestamps/metadata based on cadence overrides.""" + + start_override, period_override, source = overrides + start_override = _coerce_utc(start_override) + default_start = _coerce_utc(default_start) + updates: dict[str, object] = {} + + if start_override is None and period_override in (None, 0): + return updates + + base_start = start_override or default_start + base_start = _coerce_utc(base_start) + + if ( + period_override is not None + and period_override > 0 + and base_start is not None + and entries + ): + step = timedelta(seconds=float(period_override)) + for idx, entry in enumerate(entries): + ts = base_start + step * idx + entry["timestamp"] = format_datetime(ts) + entry["display_timestamp"] = _format_display_timestamp(ts) + meta = entry.setdefault("metadata", {}) + meta["elapsed_seconds"] = float((ts - base_start).total_seconds()) + updates["video_start"] = format_datetime(base_start) + updates["video_end"] = format_datetime( + base_start + step * max(len(entries) - 1, 0) + ) + updates["timeline_period_seconds"] = step.total_seconds() + if source: + updates["timeline_source"] = source + return updates + + if start_override is not None and entries: + first_raw = entries[0].get("timestamp") + last_raw = entries[-1].get("timestamp") + try: + first_dt = ( + _coerce_utc(parse_datetime(str(first_raw))) + if first_raw + else default_start + ) + except Exception: + first_dt = default_start + try: + last_dt = ( + _coerce_utc(parse_datetime(str(last_raw))) + if last_raw + else default_start + ) + except Exception: + last_dt = default_start + + if first_dt is None or last_dt is None: + return updates + + delta = start_override - first_dt + for entry in entries: + raw = entry.get("timestamp") + if not raw: + continue + try: + ts = _coerce_utc(parse_datetime(str(raw))) + delta + except Exception: + continue + entry["timestamp"] = format_datetime(ts) + entry["display_timestamp"] = _format_display_timestamp(ts) + meta = entry.setdefault("metadata", {}) + meta["elapsed_seconds"] = float((ts - start_override).total_seconds()) + updates["video_start"] = format_datetime(start_override) + updates["video_end"] = format_datetime(last_dt + delta) + if source and "timeline_source" not in updates: + updates["timeline_source"] = source + return updates + + def _extract_video_frames( + self, assets_dir: Path + ) -> tuple[list[dict[str, object]], dict[str, object]]: + video_source = str(self._options.get("video_source")) + credentials = self._options.get("credentials") or {} + frame_cache_option = self._options.get("frame_cache") + if frame_cache_option: + frame_cache = Path(frame_cache_option) + if not frame_cache.is_absolute(): + frame_cache = Path.cwd() / frame_cache + else: + frame_cache = assets_dir / "_video_cache" + frame_cache.mkdir(parents=True, exist_ok=True) + + try: + video_url = resolve_video_source(video_source, credentials) + except Exception as exc: # pragma: no cover - Vimeo/network dependent + raise VideoExtractionError(str(exc)) from exc + + fps = float(self._options.get("video_fps") or 1.0) + metadata = probe_video_metadata(video_url) + + overrides = self._load_timeline_overrides() + start_override, period_override, source = overrides + + start_value = self._options.get("video_start") + if start_value: + start_dt = parse_datetime(str(start_value)) + elif start_override: + start_dt = start_override + self._options["video_start"] = format_datetime(start_dt) + else: + raise VideoExtractionError( + "--video-start is required when extracting frames from a video source." + ) + + end_value = self._options.get("video_end") + frames = extract_frames(video_url, output_dir=frame_cache, fps=fps) + + if end_value: + end_dt = parse_datetime(str(end_value)) + else: + end_dt = compute_end_time(start_dt, len(frames), fps) + + entries = compute_frame_timestamps( + frames=frames, + start_time=start_dt, + fps=fps, + ) + + for entry in entries: + entry_metadata = entry.setdefault("metadata", {}) + entry_metadata["elapsed_seconds"] = _video_elapsed_seconds( + start_dt, entry["timestamp"] + ) + + frame_duration = 1.0 / fps if fps > 0 else None + meta_payload = { + "video_start": format_datetime(start_dt), + "video_end": format_datetime(end_dt), + "video_duration_seconds": metadata.duration_seconds, + "video_fps": fps, + "frame_duration": frame_duration, + } + timeline_updates = self._apply_timeline_overrides( + entries, default_start=start_dt, overrides=overrides + ) + if timeline_updates: + meta_payload.update(timeline_updates) + return entries, meta_payload + + def _collect_frames(self) -> list[dict[str, object]]: + pattern = self._options.get("texture_pattern") + frame_list = self._options.get("frame_list") + date_format = self._options.get("date_format") + time_key = self._options.get("time_key") + time_format = self._options.get("time_format") + + parse_formats: list[str] = [] + if date_format: + parse_formats.append(str(date_format)) + if time_format and time_format not in parse_formats: + parse_formats.append(str(time_format)) + + try: + filename_date_manager: DateManager | None = ( + DateManager([date_format]) if date_format else DateManager([]) + ) + except Exception: + filename_date_manager = None + + entries: list[dict[str, object]] = [] + if self._video_entries: + entries.extend(self._video_entries) + if pattern: + base = Path(pattern) + for path in sorted(base.parent.glob(base.name)): + payload: dict[str, object] = {"path": str(path)} + timestamp = self._infer_frame_timestamp( + path.name, filename_date_manager + ) + if timestamp: + payload["timestamp"] = timestamp + entries.append(payload) + + if frame_list: + frame_file = Path(frame_list) + if not frame_file.is_file(): + msg = f"Frame list file not found: {frame_file}" + raise FileNotFoundError(msg) + + text = frame_file.read_text(encoding="utf-8") + manifest_entries = load_manifest_entries(text) + if manifest_entries is not None: + for item in manifest_entries: + path_value = item.get("path") + if isinstance(path_value, str) and not _is_remote_ref(path_value): + path_obj = Path(path_value) + if not path_obj.is_absolute(): + path_obj = (frame_file.parent / path_obj).resolve() + item["path"] = str(path_obj) + entries.extend(manifest_entries) + else: + base_dir = frame_file.parent + for line in text.splitlines(): + line = line.strip() + if not line or line.startswith("#"): + continue + parts = line.split() + raw_path = parts[0] + if not _is_remote_ref(raw_path): + path_obj = Path(raw_path) + if not path_obj.is_absolute(): + raw_path = str((base_dir / path_obj).resolve()) + payload: dict[str, object] = {"path": raw_path} + if len(parts) > 1: + payload["timestamp"] = " ".join(parts[1:]) + elif filename_date_manager: + timestamp = self._infer_frame_timestamp( + parts[0], filename_date_manager + ) + if timestamp: + payload["timestamp"] = timestamp + entries.append(payload) + + finalized = finalize_frame_entries( + entries, + time_key=time_key, + parse_formats=parse_formats, + display_format=time_format, + ) + + seen: set[tuple[str, str | None]] = set() + unique_entries: list[dict[str, object]] = [] + for entry in finalized: + key = (entry["path"], entry.get("timestamp")) + if key in seen: + continue + seen.add(key) + unique_entries.append(entry) + return unique_entries + + @staticmethod + def _infer_frame_timestamp( + filename: str, date_manager: DateManager | None + ) -> str | None: + if not date_manager: + return None + try: + extracted = date_manager.extract_date_time(filename) + except Exception: + return None + return extracted + + def _try_convert_probe_dataset( + self, data_dir: Path, source: Path + ) -> tuple[Path, dict[str, object]] | None: + try: + dest, metadata = prepare_probe_dataset_file( + source, + data_dir, + variable=self._options.get("probe_var"), + ) + except ProbeDatasetError: + return None + return dest, metadata + + def _maybe_generate_video_frames(self, assets_dir: Path) -> dict[str, object]: + video_source = self._options.get("video_source") + if not video_source: + self._video_entries = None + return {} + entries, meta = self._extract_video_frames(assets_dir) + self._video_entries = entries + for key, value in (meta or {}).items(): + if value is None: + continue + if key == "frame_duration" and self._options.get("frame_duration") not in ( + None, + 0, + "", + ): + continue + self._options[key] = value + return meta + + def _extract_video_frames( + self, assets_dir: Path + ) -> tuple[list[dict[str, object]], dict[str, object]]: + video_source = str(self._options.get("video_source")) + credentials = self._options.get("credentials") or {} + frame_cache_option = self._options.get("frame_cache") + if frame_cache_option: + frame_cache = Path(frame_cache_option) + if not frame_cache.is_absolute(): + frame_cache = Path.cwd() / frame_cache + else: + frame_cache = assets_dir / "_video_cache" + frame_cache.mkdir(parents=True, exist_ok=True) + + try: + video_url = resolve_video_source(video_source, credentials) + except Exception as exc: # pragma: no cover - Vimeo/network dependent + raise VideoExtractionError(str(exc)) from exc + + fps = float(self._options.get("video_fps") or 1.0) + metadata = probe_video_metadata(video_url) + + overrides = self._load_timeline_overrides() + start_override, period_override, source = overrides + + start_value = self._options.get("video_start") + if start_value: + start_dt = parse_datetime(str(start_value)) + elif start_override: + start_dt = start_override + self._options["video_start"] = format_datetime(start_dt) + else: + raise VideoExtractionError( + "--video-start is required when extracting frames from a video source." + ) + + end_value = self._options.get("video_end") + frames = extract_frames(video_url, output_dir=frame_cache, fps=fps) + + if end_value: + end_dt = parse_datetime(str(end_value)) + else: + end_dt = compute_end_time(start_dt, len(frames), fps) + + entries = compute_frame_timestamps( + frames=frames, + start_time=start_dt, + fps=fps, + ) + + for entry in entries: + entry_metadata = entry.setdefault("metadata", {}) + entry_metadata["elapsed_seconds"] = _video_elapsed_seconds( + start_dt, entry["timestamp"] + ) + + meta_payload = { + "video_start": format_datetime(start_dt), + "video_end": format_datetime(end_dt), + "video_duration_seconds": metadata.duration_seconds, + "video_fps": fps, + } + timeline_updates = self._apply_timeline_overrides( + entries, default_start=start_dt, overrides=overrides + ) + if timeline_updates: + meta_payload.update(timeline_updates) + return entries, meta_payload + + def _copy_asset( + self, + source: Path, + target_dir: Path, + overrides: dict[str, object], + key: str, + ) -> Path: + source = source.expanduser() + if not source.is_file(): + msg = f"{key.replace('_', ' ').capitalize()} file not found: {source}" + raise FileNotFoundError(msg) + target_dir.mkdir(parents=True, exist_ok=True) + dest = target_dir / source.name + if source.resolve() != dest.resolve(): + dest.write_bytes(source.read_bytes()) + rel_dir_map = { + "probe_gradient": "gradients", + "probe_lut": "gradients", + "probe_data": "data", + "legend": "legends", + } + rel_dir = rel_dir_map.get(key, "textures") + overrides[key] = f"assets/{rel_dir}/{source.name}" + return dest + + def _sanitized_config( + self, *, overrides: dict[str, object] | None = None + ) -> dict[str, object]: + """Return a Cesium config with sensitive keys removed.""" + + secrets = { + "credentials", + "auth", + "credential_file", + "cesium_ion_token", + "credentials_masked", + "texture", + "probe_gradient", + "probe_lut", + "probe_data", + "probe_var", + "tile_token", + "texture_pattern", + "frame_list", + "frame_cache", + "show_controls", + "shared_gradients", + "video_source", + } + filtered = { + key: value + for key, value in self._options.items() + if key not in secrets and value is not None + } + filtered.setdefault("width", None) + filtered.setdefault("height", None) + filtered.setdefault("animate", "none") + filtered.setdefault("probe_enabled", True) + filtered.setdefault("probe_height", False) + filtered.setdefault("auto_rotate", False) + filtered.setdefault("auto_rotate_speed", None) + filtered.setdefault("terrain", "ellipsoid") + filtered.setdefault("frame_duration", None) + if overrides: + filtered.update(overrides) + return filtered + + def _credential_payload(self) -> dict[str, str]: + """Return credentials to expose to the front-end bundle.""" + + payload: dict[str, str] = {} + credentials = self._options.get("credentials") + if isinstance(credentials, dict): + token = credentials.get("cesium_ion_token") or credentials.get( + "cesium_ion_default_access_token" + ) + tile_token = credentials.get("tile_token") or credentials.get("tileToken") + else: + token = None + tile_token = None + + token = token or self._options.get("cesium_ion_token") + if not token: + token = os.environ.get("CESIUM_ION_TOKEN") + if token: + payload["cesiumIonDefaultAccessToken"] = token + tile_token = tile_token or self._options.get("tile_token") + if tile_token: + payload["tileToken"] = tile_token + return payload + + def _render_index_html( + self, config: dict[str, object], credentials: dict[str, str] + ) -> str: + config_json = json.dumps(config, indent=2) + creds_json = json.dumps(credentials, indent=2) + html = f""" + + + + + + Zyra Cesium Globe + + + + +
+
+ + + + + +
+
Probe
+
Latitude
+
Longitude
+
Height
+
Value
+
Units
+
Gradient
+
LUT
+
+
+
+ + + + + + """ + return dedent(html).strip() + "\n" + + def _render_script(self) -> str: + return ( + dedent( + """ +(async function () { + const config = window.ZYRA_GLOBE_CONFIG || {}; + const container = document.getElementById("zyra-cesium"); + const overlay = document.getElementById("zyra-overlay"); + + const formatDisplayTime = (value) => { + if (value == null) { + return ""; + } + const raw = String(value).trim(); + if (!raw) { + return ""; + } + let candidate = raw; + if (!candidate.includes("T")) { + candidate = `${candidate}T00:00:00`; + } + if (!/[zZ]$/.test(candidate) && !/[+-]\\d{2}:?\\d{2}$/.test(candidate)) { + candidate = `${candidate}Z`; + } + const date = new Date(candidate); + if (Number.isNaN(date.getTime())) { + return raw; + } + return date.toISOString().replace("T", " ").replace("Z", " UTC"); + }; + + const rawFrameList = Array.isArray(config.frames) ? config.frames : null; + const frameDurationSeconds = Math.max(Number(config.frame_duration) || 3600, 0.1); + const normalizedFrames = []; + if (rawFrameList && rawFrameList.length) { + let fallbackEpoch = Date.UTC(2000, 0, 1); + rawFrameList.forEach((entry) => { + if (!entry || typeof entry.path !== "string") { + return; + } + const path = entry.path; + const rawTimestamp = entry.timestamp; + const providedDisplay = + typeof entry.display_timestamp === "string" && entry.display_timestamp.trim() + ? entry.display_timestamp.trim() + : typeof entry.display === "string" && entry.display.trim() + ? entry.display.trim() + : null; + const label = + typeof entry.label === "string" && entry.label.trim() ? entry.label.trim() : null; + const metadata = + entry.metadata && typeof entry.metadata === "object" ? { ...entry.metadata } : null; + + let iso = null; + if (typeof rawTimestamp === "number" && Number.isFinite(rawTimestamp)) { + const seconds = rawTimestamp > 1e12 ? rawTimestamp / 1000 : rawTimestamp; + iso = new Date(seconds * 1000).toISOString(); + } else if (typeof rawTimestamp === "string" && rawTimestamp.trim()) { + let candidate = rawTimestamp.trim(); + if (!candidate.includes("T")) { + candidate = `${candidate}T00:00:00`; + } + if (!/[zZ]$/.test(candidate) && !/[+-]\\d{2}:?\\d{2}$/.test(candidate)) { + candidate = `${candidate}Z`; + } + const date = new Date(candidate); + if (!Number.isNaN(date.getTime())) { + iso = date.toISOString(); + } + } else if (rawTimestamp != null) { + const date = new Date(rawTimestamp); + if (!Number.isNaN(date.getTime())) { + iso = date.toISOString(); + } + } + + if (!iso) { + const date = new Date(fallbackEpoch); + fallbackEpoch += frameDurationSeconds * 1000; + iso = date.toISOString(); + } + + let display = providedDisplay || label || null; + if (!display) { + display = formatDisplayTime(iso); + } + + const frameRecord = { path, iso, display }; + if (metadata) { + frameRecord.metadata = metadata; + } + if (label) { + frameRecord.label = label; + } + normalizedFrames.push(frameRecord); + }); + } + const hasFrameStack = normalizedFrames.length > 0; + + if (!window.Cesium) { + overlay.innerHTML = "Cesium failed to load."; + return; + } + + const credentials = window.ZYRA_GLOBE_CREDENTIALS || {}; + if (credentials.cesiumIonDefaultAccessToken) { + Cesium.Ion.defaultAccessToken = credentials.cesiumIonDefaultAccessToken; + } + const tileToken = credentials.tileToken || null; + + let frameEntries = null; + if (hasFrameStack) { + frameEntries = normalizedFrames + .map((entry, index) => { + try { + return { + ...entry, + index, + julian: Cesium.JulianDate.fromIso8601(entry.iso), + stop: null, + }; + } catch (error) { + console.warn( + "Cesium globe: failed to parse frame timestamp", + entry.iso, + error, + ); + return null; + } + }) + .filter((entry) => entry && entry.julian); + frameEntries.sort((a, b) => Cesium.JulianDate.compare(a.julian, b.julian)); + if (!frameEntries.length) { + frameEntries = null; + } + } + const frameStackActive = Boolean(frameEntries && frameEntries.length); + let frameStepSeconds = frameDurationSeconds; + + if (frameEntries) { + if (frameEntries.length > 1) { + const deltaSeconds = Cesium.JulianDate.secondsDifference( + frameEntries[1].julian, + frameEntries[0].julian, + ); + if (Number.isFinite(deltaSeconds) && deltaSeconds > 0) { + frameStepSeconds = deltaSeconds; + } + } + for (let i = 0; i < frameEntries.length; i += 1) { + const entry = frameEntries[i]; + const next = frameEntries[i + 1]; + if ( + next && + Cesium.JulianDate.greaterThan(next.julian, entry.julian) + ) { + entry.stop = Cesium.JulianDate.clone(next.julian); + } else { + entry.stop = Cesium.JulianDate.addSeconds( + entry.julian, + frameStepSeconds, + new Cesium.JulianDate(), + ); + } + } + } + + const latEl = document.querySelector("[data-probe-lat]"); + const lonEl = document.querySelector("[data-probe-lon]"); + const heightEl = document.querySelector("[data-probe-height]"); + const heightRow = document.querySelector("[data-probe-height-row]"); + const gradientEl = document.querySelector("[data-probe-gradient]"); + const lutEl = document.querySelector("[data-probe-lut]"); + const valueEl = document.querySelector("[data-probe-value]"); + const unitsEl = document.querySelector("[data-probe-units]"); + const legendEl = document.querySelector("[data-legend]"); + const titleEl = document.querySelector("[data-overlay-title]"); + const descriptionEl = document.querySelector("[data-overlay-description]"); + const timeEl = document.querySelector("[data-overlay-time]"); + const metaEl = document.querySelector("[data-overlay-meta]"); + const probeContainer = document.getElementById("zyra-probe"); + + const toFiniteNumber = (value) => { + const num = Number(value); + return Number.isFinite(num) ? num : undefined; + }; + + const tileUrl = + frameStackActive + ? "" + : typeof config.tile_url === "string" + ? config.tile_url.trim() + : ""; + const tileTypeRaw = config.tile_type; + const tileType = + typeof tileTypeRaw === "string" && tileTypeRaw + ? tileTypeRaw.toLowerCase() + : "arcgis"; + const tileCredit = + typeof config.tile_credit === "string" ? config.tile_credit : undefined; + const creditObject = + tileCredit && Cesium.Credit ? new Cesium.Credit(tileCredit, true) : undefined; + let tileMinLevel = toFiniteNumber( + config.tile_min_level ?? config.tile_minimum_level, + ); + let tileMaxLevel = toFiniteNumber( + config.tile_max_level ?? config.tile_maximum_level, + ); + if ( + tileMinLevel != null && + tileMaxLevel != null && + Number.isFinite(tileMinLevel) && + Number.isFinite(tileMaxLevel) && + tileMaxLevel < tileMinLevel + ) { + console.warn( + `Cesium globe: tile_max_level (${tileMaxLevel}) is below tile_min_level (${tileMinLevel}); swapping values`, + ); + const tmp = tileMinLevel; + tileMinLevel = tileMaxLevel; + tileMaxLevel = tmp; + } + let tileParams = + config.tile_params && typeof config.tile_params === "object" + ? { ...config.tile_params } + : null; + const tileTimeKeyRaw = config.tile_time_key; + const tileTimeKey = + typeof tileTimeKeyRaw === "string" && tileTimeKeyRaw.trim().length + ? tileTimeKeyRaw.trim() + : null; + const tileTimeValues = Array.isArray(config.tile_time_values) + ? config.tile_time_values + .map((value) => + typeof value === "string" ? value.trim() : value, + ) + .filter((value) => typeof value === "string" && value.length > 0) + : null; + if (tileParams && tileTimeKey) { + delete tileParams[tileTimeKey]; + } + const tileTimeMultiplier = toFiniteNumber(config.tile_time_multiplier) || 3600; + const tileSchemeRaw = + config.tile_scheme ?? config.tile_tiling_scheme ?? null; + const tileSchemeMode = + typeof tileSchemeRaw === "string" ? tileSchemeRaw.toLowerCase() : null; + const hasDynamicTime = + tileType === "template" && + tileTimeKey && + tileTimeValues && + tileTimeValues.length > 0; + let dynamicLayerActive = false; + const probeHeightRaw = config.probe_height; + const probeHeightEnabled = !( + probeHeightRaw === false || + probeHeightRaw === "false" || + probeHeightRaw === "False" || + probeHeightRaw === 0 || + probeHeightRaw === "0" + ); + if (!probeHeightEnabled && heightRow) { + heightRow.style.display = "none"; + } + + const width = config.width || window.innerWidth; + const height = config.height || window.innerHeight; + container.style.width = `${width}px`; + container.style.height = `${height}px`; + + const animate = config.animate === "time" || frameStackActive; + + let terrainProvider = undefined; + const terrainMode = (config.terrain || "ellipsoid").toString().toLowerCase(); + const wantsTerrain = !["ellipsoid", "none", "false", "0"].includes(terrainMode); + if (wantsTerrain && Cesium.createWorldTerrainAsync) { + try { + terrainProvider = await Cesium.createWorldTerrainAsync(); + } catch (error) { + console.warn("Cesium globe: failed to load world terrain, using ellipsoid", error); + terrainProvider = undefined; + } + } + + const viewer = new Cesium.Viewer(container, { + animation: animate, + timeline: animate, + baseLayerPicker: false, + geocoder: false, + homeButton: false, + sceneModePicker: false, + navigationHelpButton: false, + infoBox: false, + terrainProvider, + }); + + if (terrainProvider && viewer.scene?.globe) { + viewer.scene.globe.depthTestAgainstTerrain = true; + } + + const enableLighting = config.lighting == null ? true : Boolean(config.lighting); + viewer.scene.globe.enableLighting = enableLighting; + viewer.scene.skyAtmosphere.show = enableLighting; + viewer.scene.skyBox = enableLighting ? new Cesium.SkyBox({ show: true }) : undefined; + + viewer.imageryLayers.removeAll(); + let baseProvider = null; + if (frameStackActive && frameEntries) { + dynamicLayerActive = true; + + const startTime = Cesium.JulianDate.clone(frameEntries[0].julian); + const lastEntry = frameEntries[frameEntries.length - 1]; + const stopTime = Cesium.JulianDate.clone(lastEntry.stop); + + viewer.clock.startTime = Cesium.JulianDate.clone(startTime); + viewer.clock.stopTime = Cesium.JulianDate.clone(stopTime); + viewer.clock.currentTime = Cesium.JulianDate.clone(startTime); + viewer.clock.clockRange = Cesium.ClockRange.CLAMPED; + viewer.clock.clockStep = Cesium.ClockStep.SYSTEM_CLOCK_MULTIPLIER; + viewer.clock.multiplier = frameStepSeconds; + viewer.clock.shouldAnimate = config.animate === "time"; + if (viewer.timeline) { + viewer.timeline.zoomTo(viewer.clock.startTime, viewer.clock.stopTime); + } + + let activeFrameLayer = null; + let pendingFrameLayer = null; + let pendingFrameIndex = null; + let lastFrameIndex = -1; + + const ensureLayerForFrame = (frameEntry, reason) => { + if (!frameEntry) { + return; + } + if (frameEntry.index === lastFrameIndex && activeFrameLayer) { + return; + } + if (pendingFrameLayer) { + if (pendingFrameIndex === frameEntry.index) { + return; + } + viewer.imageryLayers.remove(pendingFrameLayer, true); + pendingFrameLayer = null; + pendingFrameIndex = null; + } + const providerOptions = { + url: frameEntry.path, + rectangle: Cesium.Rectangle.MAX_VALUE, + }; + if (creditObject) { + providerOptions.credit = creditObject; + } + console.debug("Cesium globe: configuring frame imagery provider", { + url: providerOptions.url, + reason, + }); + const provider = new Cesium.SingleTileImageryProvider(providerOptions); + let insertIndex = undefined; + if (activeFrameLayer) { + const existingIndex = viewer.imageryLayers.indexOf(activeFrameLayer); + if (existingIndex >= 0) { + insertIndex = existingIndex; + } + } + const newLayer = viewer.imageryLayers.addImageryProvider(provider, insertIndex); + newLayer.alpha = 0; + newLayer.show = true; + pendingFrameLayer = newLayer; + pendingFrameIndex = frameEntry.index; + + const finalizeLayer = () => { + if (pendingFrameLayer !== newLayer) { + return; + } + pendingFrameLayer = null; + pendingFrameIndex = null; + const previousLayer = activeFrameLayer && activeFrameLayer !== newLayer ? activeFrameLayer : null; + const fadeDurationMs = 250; + const start = typeof performance !== "undefined" ? performance.now() : Date.now(); + + const stepFade = (timestamp) => { + const now = timestamp || (typeof performance !== "undefined" ? performance.now() : Date.now()); + const progress = Math.min((now - start) / fadeDurationMs, 1); + newLayer.alpha = progress; + if (previousLayer) { + previousLayer.alpha = 1 - progress; + previousLayer.show = true; + } + viewer.scene.requestRender(); + if (progress < 1) { + if (typeof requestAnimationFrame === "function") { + requestAnimationFrame(stepFade); + } else { + setTimeout(() => stepFade(), 16); + } + } else if (previousLayer) { + try { + if (!viewer.imageryLayers.contains || viewer.imageryLayers.contains(previousLayer)) { + viewer.imageryLayers.remove(previousLayer, true); + } + } catch (error) { + console.debug("Cesium globe: failed to remove previous frame layer", error); + } + } + }; + + if (typeof requestAnimationFrame === "function") { + requestAnimationFrame(stepFade); + } else { + setTimeout(() => stepFade(), 0); + } + + activeFrameLayer = newLayer; + lastFrameIndex = frameEntry.index; + if (timeEl) { + timeEl.style.display = "block"; + timeEl.textContent = frameEntry.display || formatDisplayTime(frameEntry.iso); + } + console.debug("Cesium globe: applied frame imagery layer", { + url: providerOptions.url, + reason, + }); + }; + + const readyPromise = provider.readyPromise; + if (readyPromise && typeof readyPromise.then === "function") { + readyPromise + .then(finalizeLayer) + .catch((error) => { + console.warn("Cesium globe: frame imagery failed to load", providerOptions.url, error); + if (pendingFrameLayer === newLayer) { + pendingFrameLayer = null; + } + viewer.imageryLayers.remove(newLayer, true); + }); + } else { + finalizeLayer(); + } + }; + + const pickFrameForJulian = (julian) => { + for (const entry of frameEntries) { + if ( + Cesium.JulianDate.lessThanOrEquals(entry.julian, julian) && + Cesium.JulianDate.lessThan(julian, entry.stop) + ) { + return entry; + } + } + return frameEntries[frameEntries.length - 1]; + }; + + ensureLayerForFrame(frameEntries[0], "initial"); + + viewer.clock.onTick.addEventListener((clock) => { + const frameEntry = pickFrameForJulian(clock.currentTime); + if (!frameEntry) { + return; + } + ensureLayerForFrame(frameEntry, "clock"); + }); + } else if (config.texture) { + baseProvider = new Cesium.SingleTileImageryProvider({ + url: config.texture, + rectangle: Cesium.Rectangle.MAX_VALUE, + }); + } else if (tileUrl) { + try { + const normalizedTileUrl = tileUrl.replace(/[/]+$/, ""); + const isImageServer = normalizedTileUrl.toLowerCase().includes("imageserver"); + + const prepareTemplateUrl = (timeValue) => { + let url = tileUrl; + if (tileTimeKey && typeof timeValue === "string" && timeValue.length) { + const pattern = new RegExp(`\\{${tileTimeKey}\\}`, "gi"); + url = url.replace(pattern, encodeURIComponent(timeValue)); + } + if (tileParams) { + for (const [rawKey, rawValue] of Object.entries(tileParams)) { + if (rawValue == null) { + continue; + } + const key = String(rawKey); + const encodedValue = + typeof rawValue === "string" || typeof rawValue === "number" + ? encodeURIComponent(String(rawValue)) + : null; + if (!encodedValue) { + continue; + } + const pattern = new RegExp(`\\{${key}\\}`, "gi"); + url = url.replace(pattern, encodedValue); + } + } + if (tileToken) { + if (url.includes("{token}")) { + url = url.replace(new RegExp("\\{token\\}", "gi"), encodeURIComponent(tileToken)); + } else { + url = `${url}${url.includes("?") ? "&" : "?"}token=${encodeURIComponent(tileToken)}`; + } + } + url = url + .replace(/\\{TileMatrix\\}/gi, "{z}") + .replace(/\\{TileRow\\}/gi, "{y}") + .replace(/\\{TileCol\\}/gi, "{x}") + .replace(/\\{level\\}/gi, "{z}") + .replace(/\\{row\\}/gi, "{y}") + .replace(/\\{col\\}/gi, "{x}"); + return url; + }; + + let tilingScheme = + tileSchemeMode === "geographic" && Cesium.GeographicTilingScheme + ? new Cesium.GeographicTilingScheme() + : tileSchemeMode === "webmercator" && Cesium.WebMercatorTilingScheme + ? new Cesium.WebMercatorTilingScheme() + : undefined; + if (!tilingScheme && Cesium.WebMercatorTilingScheme) { + tilingScheme = new Cesium.WebMercatorTilingScheme(); + } + + if (hasDynamicTime) { + dynamicLayerActive = true; + + const timeEntries = tileTimeValues.map((value) => { + const isoValue = + typeof value === "string" && value.includes("T") + ? value + : `${value}T00:00:00Z`; + const julian = Cesium.JulianDate.fromIso8601(isoValue); + return { + value: String(value), + iso: isoValue, + julian, + }; + }); + timeEntries.sort((a, b) => Cesium.JulianDate.compare(a.julian, b.julian)); + timeEntries.forEach((entry, index) => { + if (index < timeEntries.length - 1) { + entry.stop = Cesium.JulianDate.clone(timeEntries[index + 1].julian); + } else { + entry.stop = Cesium.JulianDate.addDays(entry.julian, 1, new Cesium.JulianDate()); + } + }); + + const startTime = Cesium.JulianDate.clone(timeEntries[0].julian); + const stopTime = Cesium.JulianDate.clone(timeEntries[timeEntries.length - 1].stop); + + viewer.clock.startTime = Cesium.JulianDate.clone(startTime); + viewer.clock.stopTime = Cesium.JulianDate.clone(stopTime); + viewer.clock.currentTime = Cesium.JulianDate.clone(startTime); + viewer.clock.clockRange = Cesium.ClockRange.CLAMPED; + viewer.clock.clockStep = Cesium.ClockStep.SYSTEM_CLOCK_MULTIPLIER; + viewer.clock.multiplier = tileTimeMultiplier; + viewer.clock.shouldAnimate = true; + if (viewer.timeline) { + viewer.timeline.zoomTo(viewer.clock.startTime, viewer.clock.stopTime); + } + + let activeLayer = null; + let lastTimeValue = null; + + + const ensureLayerForTime = (timeValue, reason) => { + if (!timeValue) { + return; + } + if (timeValue === lastTimeValue && activeLayer) { + return; + } + const preparedUrl = prepareTemplateUrl(timeValue); + const providerOptions = { + url: preparedUrl, + tilingScheme, + }; + if (creditObject) { + providerOptions.credit = creditObject; + } + if (tileMinLevel != null) { + providerOptions.minimumLevel = tileMinLevel; + } + if (tileMaxLevel != null) { + providerOptions.maximumLevel = tileMaxLevel; + } + console.debug("Cesium globe: configuring UrlTemplateImageryProvider", { + url: providerOptions.url, + minimumLevel: providerOptions.minimumLevel ?? null, + maximumLevel: providerOptions.maximumLevel ?? null, + tilingScheme: tilingScheme ? tilingScheme.constructor?.name : null, + reason, + }); + const provider = new Cesium.UrlTemplateImageryProvider(providerOptions); + let insertIndex = undefined; + if (activeLayer) { + const existingIndex = viewer.imageryLayers.indexOf(activeLayer); + if (existingIndex >= 0) { + insertIndex = existingIndex; + } + } + const newLayer = viewer.imageryLayers.addImageryProvider(provider, insertIndex); + if (activeLayer) { + viewer.imageryLayers.remove(activeLayer, true); + } + activeLayer = newLayer; + lastTimeValue = timeValue; + if (timeEl) { + timeEl.style.display = "block"; + timeEl.textContent = formatDisplayTime(timeValue); + } + console.debug("Cesium globe: created UrlTemplateImageryProvider", { + url: providerOptions.url, + minimumLevel: providerOptions.minimumLevel ?? null, + maximumLevel: providerOptions.maximumLevel ?? null, + tilingScheme: tilingScheme ? tilingScheme.constructor?.name : null, + reason, + }); + }; + + const pickTimeForJulian = (julian) => { + for (const entry of timeEntries) { + if ( + Cesium.JulianDate.lessThanOrEquals(entry.julian, julian) && + Cesium.JulianDate.lessThan(julian, entry.stop) + ) { + return entry.value; + } + } + let closest = timeEntries[0]; + let bestDiff = Math.abs( + Cesium.JulianDate.secondsDifference(julian, closest.julian), + ); + for (const entry of timeEntries.slice(1)) { + const diff = Math.abs( + Cesium.JulianDate.secondsDifference(julian, entry.julian), + ); + if (diff < bestDiff) { + bestDiff = diff; + closest = entry; + } + } + return closest.value; + }; + + ensureLayerForTime(timeEntries[0].value, "initial"); + viewer.clock.onTick.addEventListener((clock) => { + const desiredValue = pickTimeForJulian(clock.currentTime); + ensureLayerForTime(desiredValue, "clock"); + }); + } else { + if (timeEl) { + timeEl.style.display = "none"; + } + let providerLoaded = false; + const inferredMinLevel = + tileMinLevel != null + ? tileMinLevel + : isImageServer + ? 2 + : undefined; + if ( + Cesium.ArcGisMapServerImageryProvider && + tileType !== "template" && + !isImageServer + ) { + try { + const arcgisOptions = { + enablePickFeatures: false, + }; + if (creditObject) { + arcgisOptions.credit = creditObject; + } + if (inferredMinLevel != null) { + arcgisOptions.minimumLevel = inferredMinLevel; + } + if (tileMaxLevel != null) { + arcgisOptions.maximumLevel = tileMaxLevel; + } + if (tileToken) { + arcgisOptions.token = tileToken; + } + if (typeof Cesium.ArcGisMapServerImageryProvider.fromUrl === "function") { + baseProvider = await Cesium.ArcGisMapServerImageryProvider.fromUrl( + tileUrl, + arcgisOptions, + ); + } else { + arcgisOptions.url = tileUrl; + baseProvider = new Cesium.ArcGisMapServerImageryProvider(arcgisOptions); + } + providerLoaded = true; + } catch (arcgisError) { + console.warn( + "Cesium globe: ArcGIS imagery provider failed, attempting template fallback", + arcgisError, + ); + baseProvider = null; + } + } + + if ( + !baseProvider && + Cesium.UrlTemplateImageryProvider && + (tileType === "template" || (tileType === "arcgis" && isImageServer)) + ) { + if (isImageServer) { + console.warn( + "Cesium globe: skipping URL template fallback for ImageServer endpoint (not tile cached)", + ); + } else { + const templateUrl = prepareTemplateUrl(null); + const templateOptions = { + url: templateUrl, + tilingScheme, + }; + if (creditObject) { + templateOptions.credit = creditObject; + } + if (inferredMinLevel != null) { + templateOptions.minimumLevel = inferredMinLevel; + } + if (tileMaxLevel != null) { + templateOptions.maximumLevel = tileMaxLevel; + } + console.debug("Cesium globe: configuring UrlTemplateImageryProvider", { + url: templateOptions.url, + minimumLevel: templateOptions.minimumLevel ?? null, + maximumLevel: templateOptions.maximumLevel ?? null, + tilingScheme: tilingScheme ? tilingScheme.constructor?.name : null, + }); + baseProvider = new Cesium.UrlTemplateImageryProvider(templateOptions); + console.debug("Cesium globe: created UrlTemplateImageryProvider", { + url: templateOptions.url, + minimumLevel: templateOptions.minimumLevel ?? null, + maximumLevel: templateOptions.maximumLevel ?? null, + tilingScheme: tilingScheme ? tilingScheme.constructor?.name : null, + }); + providerLoaded = true; + } + } + + if (!providerLoaded) { + console.warn( + "Cesium globe: ArcGIS tile support unavailable in this Cesium build", + ); + } + } + } catch (error) { + console.warn("Cesium globe: failed to initialize custom tile imagery", error); + baseProvider = null; + } + } else if (timeEl) { + timeEl.style.display = "none"; + } + if (!baseProvider && !dynamicLayerActive) { + if (Cesium.createWorldImageryAsync) { + try { + baseProvider = await Cesium.createWorldImageryAsync(); + } catch (error) { + console.warn("Cesium globe: failed to load world imagery", error); + } + } + if (!baseProvider && Cesium.createWorldImagery) { + try { + baseProvider = Cesium.createWorldImagery(); + } catch (error) { + console.warn("Cesium globe: createWorldImagery fallback failed", error); + } + } + if (!baseProvider && Cesium.IonImageryProvider) { + try { + baseProvider = new Cesium.IonImageryProvider({ assetId: 2 }); + } catch (error) { + console.warn("Cesium globe: Ion imagery provider failed", error); + } + } + if (!baseProvider && Cesium.TileCoordinatesImageryProvider) { + baseProvider = new Cesium.TileCoordinatesImageryProvider(); + } + } + if (baseProvider) { + viewer.imageryLayers.addImageryProvider(baseProvider); + } + const autoRotate = Boolean(config.auto_rotate); + const autoRotateSpeed = + Number.isFinite(Number(config.auto_rotate_speed)) && Number(config.auto_rotate_speed) !== 0 + ? Number(config.auto_rotate_speed) + : 0.5; // degrees per second + if (autoRotate) { + let spinEnabled = true; + const spinRate = Cesium.Math.toRadians(autoRotateSpeed); // degrees per second -> radians per second + let lastTimestamp = undefined; + const onPostRender = () => { + if (!spinEnabled) { + return; + } + const now = performance.now(); + if (lastTimestamp === undefined) { + lastTimestamp = now; + return; + } + const deltaSeconds = Math.max((now - lastTimestamp) / 1000, 0.016); + lastTimestamp = now; + viewer.scene.camera.rotate(Cesium.Cartesian3.UNIT_Z, spinRate * deltaSeconds); + }; + const stopSpin = () => { + spinEnabled = false; + viewer.scene.postRender.removeEventListener(onPostRender); + viewer.scene.canvas.removeEventListener("mousedown", stopSpin); + viewer.scene.canvas.removeEventListener("touchstart", stopSpin); + }; + viewer.scene.postRender.addEventListener(onPostRender); + viewer.scene.canvas.addEventListener("mousedown", stopSpin); + viewer.scene.canvas.addEventListener("touchstart", stopSpin); + } + + + const canvas = viewer.scene.canvas; + if (config.probe_enabled) { + canvas.style.cursor = "crosshair"; + } + + function sanitizeHeight(value) { + if (!Number.isFinite(value)) { + return null; + } + if (value < -11000 || value > 9000) { + return null; + } + return value; + } + + function labelFromPath(value) { + if (!value || typeof value !== "string") { + return "—"; + } + const parts = value.split("/"); + return parts[parts.length - 1] || value; + } + + if (gradientEl) gradientEl.textContent = labelFromPath(config.probe_gradient); + if (lutEl) lutEl.textContent = labelFromPath(config.probe_lut); + + const titleText = + typeof config.title === "string" ? config.title.trim() : ""; + if (titleEl) { + if (titleText) { + titleEl.textContent = titleText; + titleEl.style.display = "block"; + } else { + titleEl.style.display = "none"; + } + } + + const descriptionText = + typeof config.description === "string" ? config.description.trim() : ""; + if (descriptionEl) { + if (descriptionText) { + descriptionEl.textContent = descriptionText; + descriptionEl.style.display = "block"; + } else { + descriptionEl.style.display = "none"; + } + } + + const debugOverlayRaw = config.debug_overlay; + const debugOverlay = + debugOverlayRaw === true || + debugOverlayRaw === "true" || + debugOverlayRaw === "True" || + debugOverlayRaw === 1 || + debugOverlayRaw === "1"; + if (metaEl) { + metaEl.style.display = debugOverlay ? "block" : "none"; + } + + if (legendEl) { + const legendSrc = config.legend || null; + if (legendSrc) { + legendEl.innerHTML = ""; + const img = document.createElement("img"); + img.src = legendSrc; + img.loading = "lazy"; + img.alt = "Legend"; + legendEl.appendChild(img); + legendEl.style.display = "block"; + } else { + legendEl.style.display = "none"; + } + } + + if (!config.probe_enabled && overlay) { + overlay.style.opacity = "0.65"; + } + if (!config.probe_enabled && probeContainer) { + probeContainer.style.display = "none"; + } + + async function fetchText(url) { + const response = await fetch(url); + if (!response.ok) { + throw new Error(`Fetch failed: ${response.status}`); + } + return response.text(); + } + + async function loadJson(url) { + try { + const text = await fetchText(url); + return JSON.parse(text); + } catch (error) { + console.warn("Cesium probe: failed to load JSON", url, error); + return null; + } + } + + function parseCsvDataset(text) { + const lines = text + .split(/\\r?\\n/) + .map((line) => line.trim()) + .filter(Boolean); + if (!lines.length) { + return null; + } + const headers = lines[0] + .split(",") + .map((h) => h.trim().toLowerCase()); + const latIdx = headers.findIndex((h) => h === "lat" || h === "latitude"); + const lonIdx = headers.findIndex((h) => h === "lon" || h === "lng" || h === "long" || h === "longitude"); + const valueIdx = headers.findIndex((h) => h === "value" || h === "val" || h === "data"); + const unitsIdx = headers.findIndex((h) => h === "units" || h === "unit"); + if (latIdx === -1 || lonIdx === -1 || valueIdx === -1) { + return null; + } + const points = []; + for (let i = 1; i < lines.length; i += 1) { + const parts = lines[i].split(",").map((p) => p.trim()); + if (parts.length < headers.length) { + continue; + } + const lat = Number(parts[latIdx]); + const lon = Number(parts[lonIdx]); + const value = Number(parts[valueIdx]); + if (!Number.isFinite(lat) || !Number.isFinite(lon) || !Number.isFinite(value)) { + continue; + } + const entry = { lat, lon, value }; + if (unitsIdx !== -1 && parts[unitsIdx]) { + entry.units = parts[unitsIdx]; + } + points.push(entry); + } + return points.length ? { points } : null; + } + + function normalizeProbeArray(raw) { + if (!Array.isArray(raw)) { + return null; + } + const points = []; + for (const entry of raw) { + if (typeof entry !== "object" || entry == null) { + continue; + } + const lat = Number(entry.lat ?? entry.latitude); + const lon = Number(entry.lon ?? entry.lng ?? entry.long ?? entry.longitude); + const value = Number(entry.value ?? entry.val ?? entry.data); + if (!Number.isFinite(lat) || !Number.isFinite(lon) || !Number.isFinite(value)) { + continue; + } + points.push({ + lat, + lon, + value, + units: entry.units ?? entry.unit ?? null, + }); + } + return points.length ? { points } : null; + } + + async function loadProbeDataset(url) { + try { + const text = await fetchText(url); + try { + const parsed = JSON.parse(text); + const normalized = normalizeProbeArray(parsed); + if (normalized) { + return normalized; + } + } catch (jsonError) { + // fall through to CSV parser + } + const csv = parseCsvDataset(text); + if (csv) { + return csv; + } + console.warn("Cesium probe: unsupported dataset format", url); + } catch (error) { + console.warn("Cesium probe: failed to load dataset", url, error); + } + return null; + } + + function nearestProbe(lat, lon, dataset) { + if (!dataset || !dataset.points || !dataset.points.length) { + return null; + } + const latRad = Cesium.Math.toRadians(lat); + const lonRad = Cesium.Math.toRadians(lon); + let best = null; + let bestScore = Infinity; + for (const point of dataset.points) { + const pLat = Cesium.Math.toRadians(point.lat); + const pLon = Cesium.Math.toRadians(point.lon); + const dLat = latRad - pLat; + const dLon = lonRad - pLon; + const sinLat = Math.sin(dLat / 2); + const sinLon = Math.sin(dLon / 2); + const a = + sinLat * sinLat + + Math.cos(latRad) * Math.cos(pLat) * sinLon * sinLon; + const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); + if (c < bestScore) { + bestScore = c; + best = point; + } + } + return best; + } + + function formatValue(value) { + if (!Number.isFinite(value)) { + return "—"; + } + if (Math.abs(value) >= 1000 || Math.abs(value) < 0.01) { + return value.toExponential(2); + } + return value.toFixed(2); + } + + const probeDataset = config.probe_data + ? await loadProbeDataset(config.probe_data) + : null; + + const handler = new Cesium.ScreenSpaceEventHandler(canvas); + const ellipsoid = viewer.scene.globe.ellipsoid; + + function updateProbeDisplay(payload) { + if (!latEl || !lonEl) { + return; + } + if (!payload) { + latEl.textContent = "—"; + lonEl.textContent = "—"; + if (heightEl) heightEl.textContent = "—"; + if (valueEl) valueEl.textContent = "—"; + if (unitsEl) unitsEl.textContent = "—"; + return; + } + latEl.textContent = `${payload.lat.toFixed(2)}°`; + lonEl.textContent = `${payload.lon.toFixed(2)}°`; + if (heightEl) { + const cleanHeight = sanitizeHeight(payload.height); + if (cleanHeight == null) { + heightEl.textContent = "—"; + } else { + heightEl.textContent = `${cleanHeight.toFixed(0)} m`; + } + } + if (valueEl) { + valueEl.textContent = + payload.dataValue != null ? formatValue(payload.dataValue) : "—"; + } + if (unitsEl) { + unitsEl.textContent = payload.dataUnits ?? "—"; + } + } + + function clearProbe() { + updateProbeDisplay(null); + } + + let probeHeightRequestId = 0; + + if (config.probe_enabled) { + handler.setInputAction((movement) => { + const pickRay = viewer.camera.getPickRay(movement.endPosition); + let cartesian = pickRay + ? viewer.scene.globe.pick(pickRay, viewer.scene) + : undefined; + if (!Cesium.defined(cartesian)) { + cartesian = viewer.camera.pickEllipsoid( + movement.endPosition, + ellipsoid, + ); + } + if (!Cesium.defined(cartesian)) { + clearProbe(); + return; + } + const cartographic = ellipsoid.cartesianToCartographic(cartesian); + const lat = Cesium.Math.toDegrees(cartographic.latitude); + let lon = Cesium.Math.toDegrees(cartographic.longitude); + if (lon > 180) lon -= 360; + if (lon < -180) lon += 360; + let heightMeters = null; + if (probeHeightEnabled) { + heightMeters = sanitizeHeight(cartographic.height); + if (heightMeters == null) { + heightMeters = sanitizeHeight(viewer.scene.globe.getHeight(cartographic)); + } + if (heightMeters == null) { + heightMeters = sanitizeHeight(cartographic.height); + } + } + + let dataValue = null; + let dataUnits = null; + if (probeDataset) { + const nearest = nearestProbe(lat, lon, probeDataset); + if (nearest) { + dataValue = nearest.value; + dataUnits = nearest.units ?? config.probe_units ?? null; + } + } + + const payload = { + lat, + lon, + height: heightMeters, + dataValue, + dataUnits, + }; + updateProbeDisplay(payload); + + if ( + probeHeightEnabled && + wantsTerrain && + terrainProvider && + Cesium.sampleTerrainMostDetailed && + Cesium.Cartographic + ) { + const requestId = ++probeHeightRequestId; + const target = Cesium.Cartographic.clone(cartographic); + Cesium.sampleTerrainMostDetailed(terrainProvider, [target]) + .then((result) => { + if (requestId !== probeHeightRequestId) { + return; + } + const refined = (result && result[0]) || target; + if (refined) { + const clean = sanitizeHeight(refined.height); + if (clean != null) { + payload.height = clean; + updateProbeDisplay(payload); + } + } + }) + .catch((error) => { + if (requestId === probeHeightRequestId) { + console.warn("Cesium probe: terrain refinement failed", error); + } + }); + } + }, Cesium.ScreenSpaceEventType.MOUSE_MOVE); + + canvas.addEventListener("mouseleave", clearProbe); + clearProbe(); + } + + window.addEventListener("resize", () => { + const w = config.width || window.innerWidth; + const h = config.height || window.innerHeight; + container.style.width = `${w}px`; + container.style.height = `${h}px`; + viewer.resize(); + }); +})(); + +""" + ).strip() + + "\n" + ) diff --git a/src/zyra/visualization/renderers/frame_utils.py b/src/zyra/visualization/renderers/frame_utils.py new file mode 100644 index 00000000..342cbff5 --- /dev/null +++ b/src/zyra/visualization/renderers/frame_utils.py @@ -0,0 +1,224 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Shared helpers for processing frame manifests and timestamps.""" + +from __future__ import annotations + +import json +from datetime import datetime, timezone +from typing import Any + +from dateutil import parser as date_parser + +from zyra.utils.date_manager import DateManager + +_SIMPLE_TYPES = (str, int, float, bool) + + +def load_manifest_entries(text: str) -> list[dict[str, Any]] | None: + """Parse a JSON manifest describing frame entries. + + Accepts JSON arrays or mapping objects with ``frames``/``items``/``entries`` keys. + Returns a list of dictionaries each containing at least ``path``. Additional scalar + metadata is preserved when possible. + """ + + try: + data = json.loads(text) + except json.JSONDecodeError: + return None + + frames: list[dict[str, Any]] | None = None + if isinstance(data, list): + frames = [item for item in data if isinstance(item, dict)] + elif isinstance(data, dict): + for key in ("frames", "items", "entries"): + value = data.get(key) + if isinstance(value, list): + frames = [item for item in value if isinstance(item, dict)] + break + if frames is None and "path" in data: + frames = [data] + if not frames: + return [] + + results: list[dict[str, Any]] = [] + for item in frames: + path = None + for key in ("path", "texture", "uri", "url", "source"): + value = item.get(key) + if value: + path = str(value) + break + if not path: + continue + entry: dict[str, Any] = {"path": path} + if "timestamp" in item: + entry["timestamp"] = item["timestamp"] + elif "time" in item: + entry["timestamp"] = item["time"] + elif "datetime" in item: + entry["timestamp"] = item["datetime"] + + if "display" in item: + entry["display_timestamp"] = item["display"] + elif "display_timestamp" in item: + entry["display_timestamp"] = item["display_timestamp"] + + if isinstance(item.get("label"), str): + entry["label"] = item["label"] + + metadata: dict[str, Any] = {} + if isinstance(item.get("metadata"), dict): + for key, value in item["metadata"].items(): + if isinstance(value, _SIMPLE_TYPES) or value is None: + metadata[key] = value + for key, value in item.items(): + if key in { + "path", + "texture", + "uri", + "url", + "source", + "timestamp", + "time", + "datetime", + "display", + "display_timestamp", + "metadata", + "label", + }: + continue + if isinstance(value, _SIMPLE_TYPES) or value is None: + metadata.setdefault(key, value) + if metadata: + entry["metadata"] = metadata + results.append(entry) + return results + + +def finalize_frame_entries( + entries: list[dict[str, Any]], + *, + time_key: str | None = None, + parse_formats: list[str] | None = None, + display_format: str | None = None, +) -> list[dict[str, Any]]: + """Normalize frame entries by resolving timestamps and sanitizing metadata.""" + + date_manager = DateManager(parse_formats or []) + normalized: list[dict[str, Any]] = [] + for raw in entries: + path = raw.get("path") + if not path: + continue + metadata = raw.get("metadata") + if not isinstance(metadata, dict): + metadata = None + + timestamp_value = raw.get("timestamp") + if timestamp_value is None and time_key: + if time_key in raw: + timestamp_value = raw[time_key] + elif metadata: + timestamp_value = metadata.get(time_key) + + iso_value, display_value = _normalize_timestamp( + timestamp_value, date_manager, display_format + ) + if display_value is None and isinstance(raw.get("display_timestamp"), str): + candidate = raw["display_timestamp"].strip() + if candidate: + display_value = candidate + + entry: dict[str, Any] = {"path": str(path)} + if iso_value: + entry["timestamp"] = iso_value + if display_value: + entry["display_timestamp"] = display_value + + if metadata: + clean_meta = { + key: value + for key, value in metadata.items() + if isinstance(value, _SIMPLE_TYPES) or value is None + } + if clean_meta: + entry["metadata"] = clean_meta + + if isinstance(raw.get("label"), str): + label = raw["label"].strip() + if label: + entry["label"] = label + + normalized.append(entry) + return normalized + + +def _normalize_timestamp( + value: Any, + date_manager: DateManager, + display_format: str | None, +) -> tuple[str | None, str | None]: + """Convert a timestamp-like value to ISO-8601 and a display string.""" + + if value is None: + return None, None + + dt: datetime | None = None + if isinstance(value, datetime): + dt = value + elif isinstance(value, (int, float)): + dt = datetime.fromtimestamp(_coerce_epoch(value), tz=timezone.utc) + else: + value_str = str(value).strip() + if not value_str: + return None, None + try: + dt = date_parser.isoparse(value_str) + except (ValueError, TypeError): + dt = None + if dt is None and value_str.isdigit(): + try: + dt = datetime.fromtimestamp( + _coerce_epoch(int(value_str)), tz=timezone.utc + ) + except (ValueError, OSError, OverflowError): + dt = None + if dt is None: + extracted = None + try: + extracted = date_manager.extract_date_time(value_str) + except Exception: + extracted = None + if extracted: + try: + dt = date_parser.isoparse(extracted) + except (ValueError, TypeError): + dt = None + if dt is None: + return None, value_str + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + else: + dt = dt.astimezone(timezone.utc) + + iso_value = dt.isoformat().replace("+00:00", "Z") + if display_format: + try: + display_value = dt.strftime(display_format) + except Exception: + display_value = iso_value.replace("T", " ").replace("Z", " UTC") + else: + display_value = iso_value.replace("T", " ").replace("Z", " UTC") + return iso_value, display_value + + +def _coerce_epoch(value: float) -> float: + """Interpret a numeric value as seconds (detecting millisecond precision).""" + + seconds = float(value) + if seconds > 1e12: + # Heuristic: treat as milliseconds. + seconds /= 1000.0 + return seconds diff --git a/src/zyra/visualization/renderers/probe_utils.py b/src/zyra/visualization/renderers/probe_utils.py new file mode 100644 index 00000000..db66d9e3 --- /dev/null +++ b/src/zyra/visualization/renderers/probe_utils.py @@ -0,0 +1,187 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Helpers for preparing structured probe datasets for interactive viewers.""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +import numpy as np + +try: # optional dependency + import xarray as xr +except ImportError: # pragma: no cover - optional dependency + xr = None # type: ignore + +LAT_NAMES = {"lat", "latitude", "y", "ylat"} +LON_NAMES = {"lon", "longitude", "x", "xlon"} +MAX_PROBE_POINTS = 10_000 + + +class ProbeDatasetError(Exception): + """Raised when probe dataset conversion fails.""" + + +def prepare_probe_dataset_file( + source: Path, + dest_dir: Path, + *, + variable: str | None = None, + max_points: int = MAX_PROBE_POINTS, +) -> tuple[Path, dict[str, Any]]: + """Convert structured datasets into JSON probe points written under ``dest_dir``. + + Parameters + ---------- + source: + Input file path (e.g., NetCDF). + dest_dir: + Target directory inside the bundle's assets. + variable: + Optional variable name to extract (required when dataset has multiple data vars). + max_points: + Maximum number of probe points to emit. + + Returns + ------- + Path, dict + Tuple of the generated JSON file path and metadata (e.g., units, variable name). + + Raises + ------ + ProbeDatasetError + When the dataset cannot be converted. + """ + + if xr is None: # pragma: no cover - requires optional dep + raise ProbeDatasetError( + "xarray is required for probe datasets. Install with `pip install xarray` or use the `visualization` extra." + ) + + source = source.expanduser() + suffix = source.suffix.lower() + if suffix not in {".nc", ".nc4", ".cdf"}: + raise ProbeDatasetError(f"Unsupported probe dataset format: {source}") + + dest_dir.mkdir(parents=True, exist_ok=True) + dest_path = dest_dir / f"{source.stem}_probe_points.json" + + points, metadata = _extract_points_from_netcdf( + source, variable=variable, max_points=max_points + ) + + dest_path.write_text(json.dumps(points, indent=2), encoding="utf-8") + return dest_path, metadata + + +def _extract_points_from_netcdf( + source: Path, + *, + variable: str | None, + max_points: int = MAX_PROBE_POINTS, +) -> tuple[list[dict[str, Any]], dict[str, Any]]: + if xr is None: # pragma: no cover - requires optional dep + raise ProbeDatasetError( + "xarray is required for probe datasets. Install with `pip install xarray` or use the `visualization` extra." + ) + ds = xr.open_dataset(source) + try: + data_var = variable or _select_default_variable(ds) + arr = ds[data_var] + arr = _collapse_extra_dims(arr) + lat_dim, lon_dim = _detect_lat_lon_dims(arr) + + lat_vals = arr.coords[lat_dim].values + lon_vals = arr.coords[lon_dim].values + + lat_idx = _sample_indices(lat_vals.size, max_points) + lon_idx = _sample_indices(lon_vals.size, max_points) + + arr_sample = arr.isel({lat_dim: lat_idx, lon_dim: lon_idx}) + lat_sample = lat_vals[lat_idx] + lon_sample = lon_vals[lon_idx] + values = arr_sample.values + + units = arr.attrs.get("units") + + points: list[dict[str, Any]] = [] + for i, lat in enumerate(np.asarray(lat_sample).astype(float)): + for j, lon in enumerate(np.asarray(lon_sample).astype(float)): + value = values[i, j] + if not np.isfinite(value): + continue + point: dict[str, Any] = { + "lat": float(lat), + "lon": float(lon), + "value": float(value), + } + if units: + point["units"] = str(units) + points.append(point) + metadata = {"variable": data_var} + if units: + metadata["units"] = str(units) + finally: + ds.close() + if not points: + raise ProbeDatasetError("No finite probe points extracted from dataset.") + return points, metadata + + +def _select_default_variable(ds: xr.Dataset) -> str: + data_vars = list(ds.data_vars) + if not data_vars: + raise ProbeDatasetError("Dataset contains no data variables for probing.") + if len(data_vars) == 1: + return data_vars[0] + raise ProbeDatasetError( + "Dataset has multiple variables; specify --probe-var to select one." + ) + + +def _collapse_extra_dims(arr: xr.DataArray) -> xr.DataArray: + collapsible_dims = [dim for dim in arr.dims if dim not in arr.coords] + for dim in collapsible_dims: + arr = arr.isel({dim: 0}) + while arr.ndim > 2: + dim = arr.dims[0] + if dim in LAT_NAMES or dim in LON_NAMES: + break + arr = arr.isel({dim: 0}) + return arr + + +def _detect_lat_lon_dims(arr: xr.DataArray) -> tuple[str, str]: + lat_dim = None + lon_dim = None + for dim in arr.dims: + name = dim.lower() + if name in LAT_NAMES: + lat_dim = dim + elif name in LON_NAMES: + lon_dim = dim + if lat_dim and lon_dim: + return lat_dim, lon_dim + for coord_name in arr.coords: + name = coord_name.lower() + coord = arr.coords[coord_name] + if coord.ndim != 1: + continue + if not lat_dim and name in LAT_NAMES: + lat_dim = coord_name + elif not lon_dim and name in LON_NAMES: + lon_dim = coord_name + if not lat_dim or not lon_dim: + raise ProbeDatasetError( + "Unable to identify latitude/longitude coordinates for probe dataset." + ) + return lat_dim, lon_dim + + +def _sample_indices(size: int, max_points: int) -> np.ndarray: + if size <= 0: + return np.array([0]) + target = max(1, int(np.sqrt(max_points))) + sample = min(size, target) + return np.unique(np.linspace(0, size - 1, sample, dtype=int)) diff --git a/src/zyra/visualization/renderers/registry.py b/src/zyra/visualization/renderers/registry.py new file mode 100644 index 00000000..b2636641 --- /dev/null +++ b/src/zyra/visualization/renderers/registry.py @@ -0,0 +1,48 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Registry for interactive renderers.""" + +from __future__ import annotations + +from typing import Iterable, TypeVar + +from .base import InteractiveRenderer + +_RendererT = TypeVar("_RendererT", bound=InteractiveRenderer) + +_REGISTRY: dict[str, type[InteractiveRenderer]] = {} + + +def register(renderer_cls: type[_RendererT]) -> type[_RendererT]: + """Register ``renderer_cls`` keyed by its ``slug`` attribute.""" + + if not issubclass(renderer_cls, InteractiveRenderer): + raise TypeError("renderer must inherit InteractiveRenderer") + slug = renderer_cls.slug + if not slug: + raise ValueError("renderer slug must be non-empty") + if slug in _REGISTRY: + raise ValueError(f"renderer slug already registered: {slug}") + _REGISTRY[slug] = renderer_cls + return renderer_cls + + +def get(slug: str) -> type[InteractiveRenderer]: + """Return the renderer class registered for ``slug``.""" + + try: + return _REGISTRY[slug] + except KeyError as exc: + raise KeyError(f"unknown renderer slug: {slug}") from exc + + +def create(slug: str, **options) -> InteractiveRenderer: + """Instantiate the renderer registered under ``slug``.""" + + cls = get(slug) + return cls(**options) + + +def available() -> Iterable[type[InteractiveRenderer]]: + """Yield all registered renderer classes.""" + + return _REGISTRY.values() diff --git a/src/zyra/visualization/renderers/video_utils.py b/src/zyra/visualization/renderers/video_utils.py new file mode 100644 index 00000000..14e5bdde --- /dev/null +++ b/src/zyra/visualization/renderers/video_utils.py @@ -0,0 +1,220 @@ +# SPDX-License-Identifier: Apache-2.0 +"""Utilities for sampling frames from video sources.""" + +from __future__ import annotations + +import json +import shutil +import subprocess +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Any, Iterable +from urllib.parse import urlparse + +from zyra.connectors.backends import vimeo as vimeo_backend + + +class VideoExtractionError(RuntimeError): + """Raised when frame extraction fails.""" + + +@dataclass(frozen=True) +class VideoMetadata: + duration_seconds: float + width: int | None = None + height: int | None = None + codec: str | None = None + + +def ensure_ffmpeg() -> None: + """Ensure ``ffmpeg`` and ``ffprobe`` are available in ``PATH``.""" + + if shutil.which("ffmpeg") is None: + raise VideoExtractionError( + "ffmpeg is required to extract frames from video sources." + ) + if shutil.which("ffprobe") is None: + raise VideoExtractionError("ffprobe is required to probe video metadata.") + + +def resolve_video_source(source: str, credentials: dict[str, str] | None = None) -> str: + """Return a playable URL/path for ``source``. + + Supports Vimeo URIs (``vimeo:12345`` or ``https://vimeo.com/12345``) by returning a + progressive download link using PyVimeo. + """ + + parsed = urlparse(source) + if source.startswith("vimeo:") or "vimeo.com" in parsed.netloc: + video_id = _extract_vimeo_id(source) + url = vimeo_backend.get_download_url( + video_id, + token=(credentials or {}).get("access_token"), + client_id=(credentials or {}).get("client_id"), + client_secret=(credentials or {}).get("client_secret"), + ) + if not url: + raise VideoExtractionError( + f"Unable to resolve Vimeo download URL for video '{video_id}'." + ) + return url + return source + + +def _extract_vimeo_id(value: str) -> str: + value = value.strip() + if value.startswith("vimeo:"): + return value.split(":", 1)[1].strip("/") + parsed = urlparse(value) + path = parsed.path.strip("/") + if path.startswith("video/"): + path = path.split("/", 1)[1] + if path.startswith("videos/"): + path = path.split("/", 1)[1] + return path + + +def probe_video_metadata(source: str) -> VideoMetadata: + """Return video metadata using ``ffprobe``.""" + + ensure_ffmpeg() + + cmd = [ + "ffprobe", + "-v", + "error", + "-print_format", + "json", + "-show_format", + "-show_streams", + source, + ] + try: + proc = subprocess.run(cmd, check=True, capture_output=True, text=True) + except ( + subprocess.CalledProcessError + ) as exc: # pragma: no cover - depends on ffprobe + raise VideoExtractionError( + f"ffprobe failed to inspect video: {exc.stderr or exc.stdout or exc}" + ) from exc + + payload = json.loads(proc.stdout or "{}") + fmt = payload.get("format", {}) + duration = fmt.get("duration") + if duration is None: + raise VideoExtractionError("ffprobe did not return a duration for the video.") + + try: + duration_seconds = float(duration) + except ValueError as exc: + raise VideoExtractionError( + f"Invalid duration reported by ffprobe: {duration}" + ) from exc + + width = height = codec = None + for stream in payload.get("streams", []): + if stream.get("codec_type") == "video": + width = stream.get("width") + height = stream.get("height") + codec = stream.get("codec_name") + break + + return VideoMetadata( + duration_seconds=duration_seconds, + width=width, + height=height, + codec=codec, + ) + + +def extract_frames( + source: str, + *, + output_dir: Path, + fps: float, + image_format: str = "png", +) -> list[Path]: + """Extract frames from ``source`` into ``output_dir`` using ``ffmpeg``.""" + + ensure_ffmpeg() + output_dir.mkdir(parents=True, exist_ok=True) + for file in output_dir.glob(f"frame_*.{image_format}"): + file.unlink() + + fps = max(fps, 0.1) + pattern = output_dir / f"frame_%05d.{image_format}" + + cmd = [ + "ffmpeg", + "-hide_banner", + "-loglevel", + "error", + "-i", + source, + "-vf", + f"fps={fps}", + "-y", + str(pattern), + ] + try: + subprocess.run(cmd, check=True, capture_output=True) + except subprocess.CalledProcessError as exc: # pragma: no cover - depends on ffmpeg + raise VideoExtractionError( + f"ffmpeg failed to extract frames: {exc.stderr.decode('utf-8', 'ignore')}" + ) from exc + + frames = sorted(output_dir.glob(f"frame_*.{image_format}")) + if not frames: + raise VideoExtractionError("No frames were extracted from the video.") + return frames + + +def compute_frame_timestamps( + *, + frames: Iterable[Path], + start_time: datetime, + fps: float, +) -> list[dict[str, Any]]: + """Return frame metadata with timestamps for each frame.""" + + start_time = _coerce_utc(start_time) + frame_interval = timedelta(seconds=1.0 / fps if fps > 0 else 1.0) + entries: list[dict[str, Any]] = [] + for index, frame in enumerate(sorted(frames)): + timestamp = start_time + frame_interval * index + iso = timestamp.isoformat().replace("+00:00", "Z") + display = timestamp.strftime("%Y-%m-%d %H:%M:%S UTC") + entries.append( + { + "path": str(frame), + "timestamp": iso, + "display_timestamp": display, + } + ) + return entries + + +def _coerce_utc(dt: datetime) -> datetime: + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + + +def parse_datetime(value: str) -> datetime: + """Parse ISO-8601 datetime strings with optional ``Z`` suffix.""" + + if value.endswith("Z"): + value = value[:-1] + "+00:00" + return datetime.fromisoformat(value) + + +def format_datetime(dt: datetime) -> str: + return _coerce_utc(dt).isoformat().replace("+00:00", "Z") + + +def compute_end_time(start: datetime, frame_count: int, fps: float) -> datetime: + if frame_count <= 1 or fps <= 0: + return start + delta = (frame_count - 1) / fps + return _coerce_utc(start) + timedelta(seconds=delta) diff --git a/src/zyra/visualization/renderers/webgl_sphere.py b/src/zyra/visualization/renderers/webgl_sphere.py new file mode 100644 index 00000000..034bd581 --- /dev/null +++ b/src/zyra/visualization/renderers/webgl_sphere.py @@ -0,0 +1,1943 @@ +# SPDX-License-Identifier: Apache-2.0 +"""WebGL/Three.js based globe renderer.""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime, timedelta, timezone +from pathlib import Path +from textwrap import dedent, indent + +from zyra.utils.date_manager import DateManager + +from .base import InteractiveBundle, InteractiveRenderer +from .frame_utils import finalize_frame_entries, load_manifest_entries +from .probe_utils import ProbeDatasetError, prepare_probe_dataset_file +from .registry import register +from .video_utils import ( + VideoExtractionError, + compute_end_time, + compute_frame_timestamps, + extract_frames, + format_datetime, + parse_datetime, + probe_video_metadata, + resolve_video_source, +) + +LOGGER = logging.getLogger(__name__) + + +def _format_display_timestamp(dt: datetime) -> str: + """Render a display-friendly UTC label for timeline timestamps.""" + + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + else: + dt = dt.astimezone(timezone.utc) + return dt.strftime("%Y-%m-%d %H:%M:%S UTC") + + +def _coerce_utc(dt: datetime | None) -> datetime | None: + if dt is None: + return None + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + + +def _is_remote_ref(value: object) -> bool: + if not isinstance(value, str): + return False + lower = value.lower() + return lower.startswith(("http://", "https://", "http:/", "https:/")) + + +def _normalize_remote_ref(value: str) -> str: + lower = value.lower() + if lower.startswith(("http://", "https://")): + return value + if lower.startswith(("http:/", "https:/")): + scheme, rest = value.split(":/", 1) + return f"{scheme}://{rest.lstrip('/')}" + return value + + +def _elapsed_seconds(start, timestamp: str) -> float: + try: + end = parse_datetime(timestamp) + except Exception: + return 0.0 + delta = end - start + return max(delta.total_seconds(), 0.0) + + +@register +class WebGLSphereRenderer(InteractiveRenderer): + slug = "webgl-sphere" + description = "Three.js based sphere renderer that emits a standalone bundle." + + def __init__(self, **options: object) -> None: + super().__init__(**options) + self._video_entries: list[dict[str, object]] | None = None + + def build(self, *, output_dir: Path) -> InteractiveBundle: + output_dir = Path(output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + + assets_dir = output_dir / "assets" + assets_dir.mkdir(parents=True, exist_ok=True) + + index_html = output_dir / "index.html" + script_path = assets_dir / "sphere.js" + config_path = assets_dir / "config.json" + + asset_overrides, asset_files = self._stage_assets(assets_dir) + + config = self._sanitized_config(overrides=asset_overrides) + config_json = json.dumps(config, indent=2) + config_path.write_text(config_json + "\n", encoding="utf-8") + + index_html.write_text(self._render_index_html(config), encoding="utf-8") + script_path.write_text(self._render_script(), encoding="utf-8") + + return InteractiveBundle( + output_dir=output_dir, + index_html=index_html, + assets=(script_path, config_path, *asset_files), + ) + + def _stage_assets( + self, assets_dir: Path + ) -> tuple[dict[str, object], tuple[Path, ...]]: + """Copy optional assets (textures, legends) into bundle.""" + + staged: list[Path] = [] + overrides: dict[str, object] = {} + + textures_dir = assets_dir / "textures" + data_dir = assets_dir / "data" + legends_dir = assets_dir / "legends" + texture = self._options.get("texture") + if texture: + if _is_remote_ref(texture): + overrides["texture"] = _normalize_remote_ref(str(texture)) + else: + staged.append( + self._copy_asset(Path(texture), textures_dir, overrides, "texture") + ) + + video_meta = self._maybe_generate_video_frames(assets_dir) + + frame_entries = self._collect_frames() + if frame_entries: + frames_dir = textures_dir + frames_dir.mkdir(parents=True, exist_ok=True) + staged_paths = [] + manifest: list[dict[str, object]] = [] + for entry in frame_entries: + raw_path = str(entry["path"]) + timestamp = entry.get("timestamp") + display_ts = entry.get("display_timestamp") + metadata = ( + entry.get("metadata") + if isinstance(entry.get("metadata"), dict) + else None + ) + label = entry.get("label") + if _is_remote_ref(raw_path): + manifest_entry: dict[str, object] = { + "path": _normalize_remote_ref(raw_path) + } + if timestamp: + manifest_entry["timestamp"] = timestamp + if display_ts: + manifest_entry["display_timestamp"] = display_ts + if label: + manifest_entry["label"] = label + if metadata: + manifest_entry["metadata"] = metadata + manifest.append(manifest_entry) + continue + src = Path(raw_path).expanduser() + if not src.is_file(): + msg = f"Frame file not found: {src}" + raise FileNotFoundError(msg) + dest = frames_dir / src.name + if src.resolve() != dest.resolve(): + dest.write_bytes(src.read_bytes()) + staged_paths.append(dest) + manifest_entry = {"path": f"assets/textures/{src.name}"} + if timestamp: + manifest_entry["timestamp"] = timestamp + if display_ts: + manifest_entry["display_timestamp"] = display_ts + if label: + manifest_entry["label"] = label + if metadata: + manifest_entry["metadata"] = metadata + manifest.append(manifest_entry) + overrides["frames"] = manifest + overrides.setdefault("texture", manifest[0]["path"]) + staged.extend(staged_paths) + + if video_meta: + overrides.update(video_meta) + + gradients_dir = assets_dir / "gradients" + probe_gradient = self._options.get("probe_gradient") + if probe_gradient: + if _is_remote_ref(probe_gradient): + overrides["probe_gradient"] = _normalize_remote_ref(str(probe_gradient)) + else: + staged.append( + self._copy_asset( + Path(probe_gradient), gradients_dir, overrides, "probe_gradient" + ) + ) + + probe_lut = self._options.get("probe_lut") + if probe_lut: + if _is_remote_ref(probe_lut): + overrides["probe_lut"] = _normalize_remote_ref(str(probe_lut)) + else: + staged.append( + self._copy_asset( + Path(probe_lut), gradients_dir, overrides, "probe_lut" + ) + ) + + probe_data = self._options.get("probe_data") + if probe_data: + if _is_remote_ref(probe_data): + overrides["probe_data"] = _normalize_remote_ref(str(probe_data)) + else: + converted = self._try_convert_probe_dataset(data_dir, Path(probe_data)) + if converted is not None: + dest_path, meta = converted + rel_path = Path("assets") / "data" / dest_path.name + overrides["probe_data"] = str(rel_path) + staged.append(dest_path) + if ( + meta.get("units") + and "probe_units" not in overrides + and "probe_units" not in self._options + ): + overrides["probe_units"] = meta["units"] + else: + staged.append( + self._copy_asset( + Path(probe_data), data_dir, overrides, "probe_data" + ) + ) + + legend = self._options.get("legend") + if legend: + if _is_remote_ref(legend): + overrides["legend"] = _normalize_remote_ref(str(legend)) + else: + staged.append( + self._copy_asset(Path(legend), legends_dir, overrides, "legend") + ) + + shared_gradients = self._options.get("shared_gradients") + if isinstance(shared_gradients, dict) and shared_gradients: + shared_dir = gradients_dir / "shared" + shared_overrides: dict[str, str] = {} + for name, raw_value in shared_gradients.items(): + key = str(name).strip() + if not key: + continue + if _is_remote_ref(raw_value): + shared_overrides[key] = _normalize_remote_ref(str(raw_value)) + continue + src = Path(str(raw_value)).expanduser() + if not src.is_file(): + msg = f"Shared gradient '{key}' file not found: {src}" + raise FileNotFoundError(msg) + shared_dir.mkdir(parents=True, exist_ok=True) + dest = shared_dir / src.name + if src.resolve() != dest.resolve(): + dest.write_bytes(src.read_bytes()) + staged.append(dest) + shared_overrides[key] = f"assets/gradients/shared/{dest.name}" + if shared_overrides: + overrides["shared_gradients"] = shared_overrides + + return overrides, tuple(staged) + + def _maybe_generate_video_frames(self, assets_dir: Path) -> dict[str, object]: + video_source = self._options.get("video_source") + if not video_source: + self._video_entries = None + return {} + entries, meta = self._extract_video_frames(assets_dir) + self._video_entries = entries + for key, value in (meta or {}).items(): + if value is None: + continue + if key == "frame_duration" and self._options.get("frame_duration") not in ( + None, + 0, + "", + ): + continue + self._options[key] = value + return meta + + def _load_timeline_overrides( + self, + ) -> tuple[datetime | None, float | None, str | None]: + """Return optional (start, period, source) overrides for frame timelines.""" + + start_override: datetime | None = None + period_override: float | None = None + source: str | None = None + + frames_meta_path = self._options.get("frames_meta") + if frames_meta_path: + try: + meta_path = Path(frames_meta_path).expanduser() + data = json.loads(meta_path.read_text(encoding="utf-8")) + except Exception as exc: # pragma: no cover - depends on filesystem + LOGGER.warning( + "Failed to load frames metadata '%s': %s", frames_meta_path, exc + ) + else: + raw_start = data.get("start_datetime") or data.get("start") + raw_period = ( + data.get("period_seconds") + or data.get("cadence_seconds") + or data.get("interval_seconds") + ) + if raw_start: + try: + start_override = _coerce_utc(parse_datetime(str(raw_start))) + except Exception as exc: # pragma: no cover - defensive + LOGGER.warning( + "Invalid start_datetime in frames metadata '%s': %s", + frames_meta_path, + exc, + ) + if raw_period not in (None, ""): + try: + period_override = float(raw_period) + except (TypeError, ValueError) as exc: # pragma: no cover + LOGGER.warning( + "Invalid period_seconds in frames metadata '%s': %s", + frames_meta_path, + exc, + ) + source = "frames-meta" + + period_option = self._options.get("period_seconds") + if period_option not in (None, ""): + try: + period_override = float(period_option) + source = "period-seconds" + except ( + TypeError, + ValueError, + ) as exc: # pragma: no cover - CLI already types + LOGGER.warning( + "Ignoring invalid period_seconds override '%s': %s", + period_option, + exc, + ) + return start_override, period_override, source + + def _apply_timeline_overrides( + self, + entries: list[dict[str, object]], + *, + default_start: datetime, + overrides: tuple[datetime | None, float | None, str | None], + ) -> dict[str, object]: + """Adjust frame timestamps/metadata based on cadence overrides.""" + + start_override, period_override, source = overrides + start_override = _coerce_utc(start_override) + default_start = _coerce_utc(default_start) + updates: dict[str, object] = {} + + if start_override is None and period_override in (None, 0): + return updates + + base_start = start_override or default_start + base_start = _coerce_utc(base_start) + + if ( + period_override is not None + and period_override > 0 + and base_start is not None + and entries + ): + step = timedelta(seconds=float(period_override)) + for idx, entry in enumerate(entries): + ts = base_start + step * idx + entry["timestamp"] = format_datetime(ts) + entry["display_timestamp"] = _format_display_timestamp(ts) + meta = entry.setdefault("metadata", {}) + meta["elapsed_seconds"] = float((ts - base_start).total_seconds()) + updates["video_start"] = format_datetime(base_start) + updates["video_end"] = format_datetime( + base_start + step * max(len(entries) - 1, 0) + ) + updates["timeline_period_seconds"] = step.total_seconds() + if source: + updates["timeline_source"] = source + return updates + + if start_override is not None and entries: + first_raw = entries[0].get("timestamp") + last_raw = entries[-1].get("timestamp") + try: + first_dt = ( + _coerce_utc(parse_datetime(str(first_raw))) + if first_raw + else default_start + ) + except Exception: + first_dt = default_start + try: + last_dt = ( + _coerce_utc(parse_datetime(str(last_raw))) + if last_raw + else default_start + ) + except Exception: + last_dt = default_start + + if first_dt is None or last_dt is None: + return updates + + delta = start_override - first_dt + for entry in entries: + raw = entry.get("timestamp") + if not raw: + continue + try: + ts = _coerce_utc(parse_datetime(str(raw))) + delta + except Exception: + continue + entry["timestamp"] = format_datetime(ts) + entry["display_timestamp"] = _format_display_timestamp(ts) + meta = entry.setdefault("metadata", {}) + meta["elapsed_seconds"] = float((ts - start_override).total_seconds()) + updates["video_start"] = format_datetime(start_override) + updates["video_end"] = format_datetime(last_dt + delta) + if source and "timeline_source" not in updates: + updates["timeline_source"] = source + return updates + + def _extract_video_frames( + self, assets_dir: Path + ) -> tuple[list[dict[str, object]], dict[str, object]]: + video_source = str(self._options.get("video_source")) + credentials = self._options.get("credentials") or {} + frame_cache_option = self._options.get("frame_cache") + if frame_cache_option: + frame_cache = Path(frame_cache_option) + if not frame_cache.is_absolute(): + frame_cache = Path.cwd() / frame_cache + else: + frame_cache = assets_dir / "_video_cache" + frame_cache.mkdir(parents=True, exist_ok=True) + + try: + video_url = resolve_video_source(video_source, credentials) + except Exception as exc: # pragma: no cover - Vimeo/network dependent + raise VideoExtractionError(str(exc)) from exc + + fps = float(self._options.get("video_fps") or 1.0) + metadata = probe_video_metadata(video_url) + + overrides = self._load_timeline_overrides() + start_override, period_override, source = overrides + + start_value = self._options.get("video_start") + if start_value: + start_dt = parse_datetime(str(start_value)) + elif start_override: + start_dt = start_override + self._options["video_start"] = format_datetime(start_dt) + else: + raise VideoExtractionError( + "--video-start is required when extracting frames from a video source." + ) + + end_value = self._options.get("video_end") + frames = extract_frames(video_url, output_dir=frame_cache, fps=fps) + + if end_value: + end_dt = parse_datetime(str(end_value)) + else: + end_dt = compute_end_time(start_dt, len(frames), fps) + + entries = compute_frame_timestamps( + frames=frames, + start_time=start_dt, + fps=fps, + ) + + for entry in entries: + entry_metadata = entry.setdefault("metadata", {}) + entry_metadata["elapsed_seconds"] = _elapsed_seconds( + start_dt, entry["timestamp"] + ) + + frame_duration = 1.0 / fps if fps > 0 else None + meta_payload = { + "video_start": format_datetime(start_dt), + "video_end": format_datetime(end_dt), + "video_duration_seconds": metadata.duration_seconds, + "video_fps": fps, + "frame_duration": frame_duration, + } + timeline_updates = self._apply_timeline_overrides( + entries, default_start=start_dt, overrides=overrides + ) + if timeline_updates: + meta_payload.update(timeline_updates) + return entries, meta_payload + + def _collect_frames(self) -> list[dict[str, object]]: + pattern = self._options.get("texture_pattern") + frame_list = self._options.get("frame_list") + frame_date_format = self._options.get("frame_date_format") or self._options.get( + "date_format" + ) + time_key = self._options.get("time_key") + time_format = self._options.get("time_format") + + parse_formats: list[str] = [] + if frame_date_format: + parse_formats.append(str(frame_date_format)) + if time_format and time_format not in parse_formats: + parse_formats.append(str(time_format)) + + try: + filename_date_manager: DateManager | None = ( + DateManager([frame_date_format]) + if frame_date_format + else DateManager([]) + ) + except Exception: + filename_date_manager = None + + entries: list[dict[str, object]] = [] + if self._video_entries: + entries.extend(self._video_entries) + if pattern: + base = Path(pattern) + for path in sorted(base.parent.glob(base.name)): + payload: dict[str, object] = {"path": str(path)} + timestamp = self._infer_frame_timestamp( + path.name, filename_date_manager + ) + if timestamp: + payload["timestamp"] = timestamp + entries.append(payload) + + if frame_list: + frame_file = Path(frame_list) + if not frame_file.is_file(): + msg = f"Frame list file not found: {frame_file}" + raise FileNotFoundError(msg) + + text = frame_file.read_text(encoding="utf-8") + manifest_entries = load_manifest_entries(text) + if manifest_entries is not None: + for item in manifest_entries: + path_value = item.get("path") + if isinstance(path_value, str) and not _is_remote_ref(path_value): + path_obj = Path(path_value) + if not path_obj.is_absolute(): + path_obj = (frame_file.parent / path_obj).resolve() + item["path"] = str(path_obj) + entries.extend(manifest_entries) + else: + base_dir = frame_file.parent + for line in text.splitlines(): + line = line.strip() + if not line or line.startswith("#"): + continue + parts = line.split() + raw_path = parts[0] + if not _is_remote_ref(raw_path): + path_obj = Path(raw_path) + if not path_obj.is_absolute(): + raw_path = str((base_dir / path_obj).resolve()) + payload: dict[str, object] = {"path": raw_path} + if len(parts) > 1: + payload["timestamp"] = " ".join(parts[1:]) + elif filename_date_manager: + timestamp = self._infer_frame_timestamp( + parts[0], filename_date_manager + ) + if timestamp: + payload["timestamp"] = timestamp + entries.append(payload) + + finalized = finalize_frame_entries( + entries, + time_key=time_key, + parse_formats=parse_formats, + display_format=time_format, + ) + + seen: set[tuple[str, str | None]] = set() + unique_entries: list[dict[str, object]] = [] + for entry in finalized: + key = (entry["path"], entry.get("timestamp")) + if key in seen: + continue + seen.add(key) + unique_entries.append(entry) + return unique_entries + + @staticmethod + def _infer_frame_timestamp( + filename: str, date_manager: DateManager | None + ) -> str | None: + if not date_manager: + return None + try: + extracted = date_manager.extract_date_time(filename) + except Exception: + return None + return extracted + + def _try_convert_probe_dataset( + self, data_dir: Path, source: Path + ) -> tuple[Path, dict[str, object]] | None: + try: + dest, metadata = prepare_probe_dataset_file( + source, + data_dir, + variable=self._options.get("probe_var"), + ) + except ProbeDatasetError: + return None + return dest, metadata + + def _copy_asset( + self, + source: Path, + target_dir: Path, + overrides: dict[str, object], + key: str, + ) -> Path: + source = source.expanduser() + if not source.is_file(): + msg = f"{key.capitalize()} file not found: {source}" + raise FileNotFoundError(msg) + target_dir.mkdir(parents=True, exist_ok=True) + dest = target_dir / source.name + if source.resolve() != dest.resolve(): + dest.write_bytes(source.read_bytes()) + rel_dir_map = { + "probe_gradient": "gradients", + "probe_lut": "gradients", + "probe_data": "data", + "legend": "legends", + } + rel_dir = rel_dir_map.get(key, "textures") + overrides[key] = f"assets/{rel_dir}/{source.name}" + return dest + + def _sanitized_config( + self, *, overrides: dict[str, object] | None = None + ) -> dict[str, object]: + """Return config suitable for embedding (no sensitive values).""" + + secrets = { + "credentials", + "auth", + "credential_file", + "texture", + "texture_pattern", + "frame_list", + "frame_cache", + "probe_gradient", + "probe_lut", + "probe_data", + "probe_var", + "legend", + "shared_gradients", + "video_source", + } + filtered = { + key: value + for key, value in self._options.items() + if key not in secrets and value is not None + } + filtered.setdefault("width", None) + filtered.setdefault("height", None) + filtered.setdefault("animate", "none") + filtered.setdefault("probe_enabled", True) + filtered.setdefault("show_controls", True) + if overrides: + filtered.update(overrides) + return filtered + + def _render_index_html(self, config: dict[str, object]) -> str: + """Return the HTML entry point for the bundle.""" + + probe_section = "" + if config.get("probe_enabled", True): + probe_section = indent( + dedent( + """ +
+
Probe
+
Latitude
+
Longitude
+
Frame
+
Color
+
Value
+
Units
+
Gradient
+
LUT
+
+ """ + ).strip(), + " ", + ) + + config_json = json.dumps(config, indent=2) + + meta_section = "" + if config.get("debug_overlay"): + meta_section = indent( + dedent( + f""" +
+ Zyra WebGL Sphere (beta) +

Renderer target: {self.slug}

+
+ """ + ).strip(), + " ", + ) + + title_section = "" + if config.get("title"): + title_section = indent( + dedent( + f""" +
{config["title"]}
+ """ + ).strip(), + " ", + ) + + description_section = "" + if config.get("description"): + description_section = indent( + dedent( + f""" +
{config["description"]}
+ """ + ).strip(), + " ", + ) + + legend_section = "" + if config.get("legend"): + legend_section = indent( + dedent( + f""" +
Legend
+ """ + ).strip(), + " ", + ) + + frame_info_section = indent( + dedent( + """ +
+
+ Timestamp +
+
+ """ + ).strip(), + " ", + ) + + controls_section = indent( + dedent( + """ +
+ + + +
+ """ + ).strip(), + " ", + ) + + overlay_sections = "".join( + section + for section in ( + title_section, + description_section, + legend_section, + frame_info_section, + controls_section, + meta_section, + probe_section, + ) + if section + ) + + if overlay_sections: + overlay_block = dedent( + f""" +
+{overlay_sections} +
+ """ + ).strip("\n") + else: + overlay_block = "" + + return ( + dedent( + f""" + + + + + + Zyra WebGL Globe + + + + +{overlay_block} + + + + + """ + ).strip() + + "\n" + ) + + def _render_script(self) -> str: + """Return the JavaScript module that boots the globe.""" + + return ( + dedent( + """ +import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.161.0/build/three.module.js"; + +(async function bootstrap() { + const config = window.ZYRA_GLOBE_CONFIG || {}; + const canvas = document.getElementById("zyra-globe"); + if (!canvas) { + console.warn("Zyra globe canvas element not found"); + return; + } + canvas.style.touchAction = "none"; + + if (config.probe_enabled) { + canvas.style.cursor = "crosshair"; + } else { + canvas.style.cursor = "grab"; + } + + const probeContainer = document.getElementById("zyra-probe"); + const probeLatEl = document.querySelector("[data-probe-lat]"); + const probeLonEl = document.querySelector("[data-probe-lon]"); + const probeFrameEl = document.querySelector("[data-probe-frame]"); + const probeHexEl = document.querySelector("[data-probe-hex]"); + const probeSwatchEl = document.querySelector("[data-probe-swatch]"); + const probeGradientEl = document.querySelector("[data-probe-gradient]"); + const probeLutEl = document.querySelector("[data-probe-lut]"); + const probeValueEl = document.querySelector("[data-probe-value]"); + const probeUnitsEl = document.querySelector("[data-probe-units]"); + const controlsContainer = document.querySelector("[data-controls]"); + const playButton = document.querySelector("[data-controls-play]"); + const prevButton = document.querySelector("[data-controls-prev]"); + const nextButton = document.querySelector("[data-controls-next]"); + const frameTimestampEl = document.querySelector("[data-frame-timestamp]"); + const frameInfoContainer = document.querySelector("[data-frame-info]"); + + if (!config.probe_enabled && probeContainer) { + probeContainer.style.display = "none"; + } + + const renderer = new THREE.WebGLRenderer({ canvas, antialias: true }); + renderer.setPixelRatio(window.devicePixelRatio || 1); + + const scene = new THREE.Scene(); + scene.background = new THREE.Color(0x050608); + + const camera = new THREE.PerspectiveCamera( + 45, + window.innerWidth / window.innerHeight, + 0.1, + 1000, + ); + const frames = Array.isArray(config.frames) ? config.frames : null; + const totalFrames = frames ? frames.length : 0; + const showControls = config.show_controls !== false; + const controlsEnabled = Boolean(showControls && totalFrames > 1); + const useLighting = Boolean(config.lighting); + const autoRotate = Boolean(config.auto_rotate); + const rotationSpeed = Number(config.rotation_speed || 0.005); + const zoomSpeed = Number(config.zoom_speed || 0.003); + let cameraDistance = Number(config.camera_distance) || 3; + const minCameraDistance = Number(config.min_distance || 1.5); + const maxCameraDistance = Number(config.max_distance || 12); + const rotationClamp = Math.PI / 2 - 0.01; + const pointerState = new Map(); + let lastPinchDistance = null; + let isPointerDragging = false; + let isPlaying = Boolean(config.animate === "time" && totalFrames > 1); + const lookTarget = new THREE.Vector3(0, 0, 0); + + if (controlsContainer) { + controlsContainer.style.display = controlsEnabled ? "flex" : "none"; + } + if (frameInfoContainer && !totalFrames) { + frameInfoContainer.style.display = "none"; + } + + function applyCameraDistance() { + cameraDistance = Math.min( + maxCameraDistance, + Math.max(minCameraDistance, cameraDistance), + ); + camera.position.set(0, 0, cameraDistance); + camera.lookAt(lookTarget); + } + + applyCameraDistance(); + + const samplerCache = new Map(); + const textureCache = new Map(); + + async function loadImageSampler(url) { + try { + const image = new Image(); + image.src = url; + if (!url.startsWith("data:")) { + image.crossOrigin = "anonymous"; + } + await image.decode(); + const canvasSampler = document.createElement("canvas"); + canvasSampler.width = image.width; + canvasSampler.height = image.height; + const ctx = canvasSampler.getContext("2d"); + ctx.drawImage(image, 0, 0); + const imageData = ctx.getImageData(0, 0, canvasSampler.width, canvasSampler.height); + return { + width: canvasSampler.width, + height: canvasSampler.height, + data: imageData.data, + }; + } catch (error) { + console.warn("Failed to load sampler image", url, error); + return null; + } + } + + async function fetchText(url) { + const response = await fetch(url); + if (!response.ok) { + throw new Error(`Fetch failed: ${response.status}`); + } + return response.text(); + } + + async function loadJson(url) { + try { + const text = await fetchText(url); + return JSON.parse(text); + } catch (error) { + console.warn("Failed to load JSON", url, error); + return null; + } + } + + function parseCsvDataset(text) { + const lines = text + .split(/\\r?\\n/) + .map((line) => line.trim()) + .filter(Boolean); + if (!lines.length) { + return null; + } + const headers = lines[0] + .split(",") + .map((h) => h.trim().toLowerCase()); + const latIdx = headers.findIndex((h) => h === "lat" || h === "latitude"); + const lonIdx = headers.findIndex((h) => h === "lon" || h === "lng" || h === "long" || h === "longitude"); + const valueIdx = headers.findIndex((h) => h === "value" || h === "val" || h === "data" || h === "measurement"); + const labelIdx = headers.findIndex((h) => + h === "label" || + h === "popup" || + h === "name" || + h === "title" || + h === "text" || + h === "description" + ); + const unitsIdx = headers.findIndex((h) => h === "units" || h === "unit"); + if (latIdx === -1 || lonIdx === -1 || (valueIdx === -1 && labelIdx === -1)) { + return null; + } + const points = []; + for (let i = 1; i < lines.length; i += 1) { + const parts = lines[i].split(",").map((p) => p.trim()); + if (latIdx >= parts.length || lonIdx >= parts.length) { + continue; + } + const lat = Number(parts[latIdx]); + const lon = Number(parts[lonIdx]); + if (!Number.isFinite(lat) || !Number.isFinite(lon)) { + continue; + } + const rawValue = + valueIdx !== -1 && valueIdx < parts.length ? parts[valueIdx] : null; + const rawLabel = + labelIdx !== -1 && labelIdx < parts.length ? parts[labelIdx] : null; + let value = null; + let hasValue = false; + if (rawValue != null && rawValue !== "") { + const num = Number(rawValue); + if (Number.isFinite(num)) { + value = num; + hasValue = true; + } else { + const str = String(rawValue).trim(); + if (str) { + value = str; + hasValue = true; + } + } + } + if (!hasValue && rawLabel) { + const labelStr = String(rawLabel).trim(); + if (labelStr) { + value = labelStr; + hasValue = true; + } + } + if (!hasValue) { + continue; + } + const entry = { lat, lon, value }; + const label = rawLabel && String(rawLabel).trim() ? String(rawLabel).trim() : null; + if (label && String(value) !== label) { + entry.label = label; + } + if (unitsIdx !== -1 && unitsIdx < parts.length) { + const units = String(parts[unitsIdx]).trim(); + if (units) { + entry.units = units; + } + } + points.push(entry); + } + return points.length ? { points } : null; + } + + function normalizeProbeArray(raw) { + if (!Array.isArray(raw)) { + return null; + } + const points = []; + for (const entry of raw) { + if (typeof entry !== "object" || entry == null) { + continue; + } + const lat = Number(entry.lat ?? entry.latitude); + const lon = Number(entry.lon ?? entry.lng ?? entry.long ?? entry.longitude); + if (!Number.isFinite(lat) || !Number.isFinite(lon)) { + continue; + } + const rawValue = + entry.value ?? + entry.val ?? + entry.data ?? + entry.measurement ?? + entry.popup ?? + entry.label ?? + entry.name ?? + entry.title ?? + entry.text ?? + null; + const rawLabel = + entry.label ?? entry.popup ?? entry.name ?? entry.title ?? entry.text ?? null; + + let value = null; + let hasValue = false; + if (rawValue !== undefined && rawValue !== null) { + if (typeof rawValue === "number") { + if (Number.isFinite(rawValue)) { + value = rawValue; + hasValue = true; + } + } else if (typeof rawValue === "boolean") { + value = rawValue ? 1 : 0; + hasValue = true; + } else { + const str = String(rawValue).trim(); + if (str) { + const num = Number(str); + if (Number.isFinite(num)) { + value = num; + } else { + value = str; + } + hasValue = true; + } + } + } + if (!hasValue && rawLabel != null) { + const labelStr = String(rawLabel).trim(); + if (labelStr) { + value = labelStr; + hasValue = true; + } + } + if (!hasValue) { + continue; + } + const point = { lat, lon, value }; + const label = rawLabel && String(rawLabel).trim() ? String(rawLabel).trim() : null; + if (label && String(value) !== label) { + point.label = label; + } + const units = entry.units ?? entry.unit ?? null; + if (units != null) { + const unitsStr = String(units).trim(); + if (unitsStr) { + point.units = unitsStr; + } + } + points.push(point); + } + return points.length ? { points } : null; + } + + async function loadProbeDataset(url) { + try { + const text = await fetchText(url); + try { + const parsed = JSON.parse(text); + const normalized = normalizeProbeArray(parsed); + if (normalized) { + return normalized; + } + } catch (jsonError) { + // fall through to CSV parser + } + const csv = parseCsvDataset(text); + if (csv) { + return csv; + } + console.warn("Unsupported probe dataset format", url); + } catch (error) { + console.warn("Failed to load probe dataset", url, error); + } + return null; + } + + function nearestProbe(lat, lon, dataset) { + if (!dataset || !dataset.points || !dataset.points.length) { + return null; + } + const latRad = THREE.MathUtils.degToRad(lat); + const lonRad = THREE.MathUtils.degToRad(lon); + let best = null; + let bestScore = Infinity; + for (const point of dataset.points) { + const pLat = THREE.MathUtils.degToRad(point.lat); + const pLon = THREE.MathUtils.degToRad(point.lon); + const dLat = latRad - pLat; + const dLon = lonRad - pLon; + const sinLat = Math.sin(dLat / 2); + const sinLon = Math.sin(dLon / 2); + const a = + sinLat * sinLat + + Math.cos(latRad) * Math.cos(pLat) * sinLon * sinLon; + const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); + if (c < bestScore) { + bestScore = c; + best = point; + } + } + return best; + } + + const gradientSampler = config.probe_gradient + ? await loadImageSampler(config.probe_gradient) + : null; + const lutTable = config.probe_lut ? await loadJson(config.probe_lut) : null; + const probeDataset = config.probe_data + ? await loadProbeDataset(config.probe_data) + : null; + + const loader = new THREE.TextureLoader(); + + const sphereGeometry = new THREE.SphereGeometry(1, 64, 64); + const baseMaterialProps = { + color: 0xffffff, + wireframe: !config.texture && !(frames && frames.length), + }; + const sphereMaterial = useLighting + ? new THREE.MeshStandardMaterial(baseMaterialProps) + : new THREE.MeshBasicMaterial(baseMaterialProps); + + let currentTextureUri = null; + let currentFrameMeta = totalFrames ? frames[0] : null; + let currentFrameIndex = 0; + + function prepareSampler(texture, uri) { + if (!texture || !texture.image) { + return; + } + const image = texture.image; + const width = image.width; + const height = image.height; + if (!width || !height) { + return; + } + const canvasSampler = document.createElement("canvas"); + canvasSampler.width = width; + canvasSampler.height = height; + const ctx = canvasSampler.getContext("2d"); + ctx.drawImage(image, 0, 0); + const imageData = ctx.getImageData(0, 0, width, height); + samplerCache.set(uri, { + width, + height, + data: imageData.data, + }); + } + + function ensureSampler(uri) { + if (!uri || samplerCache.has(uri)) { + return; + } + const cachedTexture = textureCache.get(uri); + if (cachedTexture) { + prepareSampler(cachedTexture, uri); + } + } + + function sampleTexture(uv) { + const uri = currentTextureUri; + if (!uri) { + return null; + } + ensureSampler(uri); + const sampler = samplerCache.get(uri); + if (!sampler) { + return null; + } + const { width, height, data } = sampler; + const x = Math.min(width - 1, Math.max(0, Math.round(uv.x * (width - 1)))); + const y = Math.min(height - 1, Math.max(0, Math.round((1 - uv.y) * (height - 1)))); + const idx = (y * width + x) * 4; + return { + r: data[idx], + g: data[idx + 1], + b: data[idx + 2], + a: data[idx + 3], + }; + } + + function formatHex(color) { + const toHex = (value) => value.toString(16).padStart(2, "0"); + return `#${toHex(color.r)}${toHex(color.g)}${toHex(color.b)}`.toUpperCase(); + } + + function formatValue(value) { + if (value == null) { + return "—"; + } + if (typeof value === "string") { + return value; + } + if (typeof value === "number") { + if (!Number.isFinite(value)) { + return "—"; + } + if (Math.abs(value) >= 1000 || Math.abs(value) < 0.01) { + return value.toExponential(2); + } + return value.toFixed(2); + } + if (typeof value === "boolean") { + return value ? "true" : "false"; + } + return String(value); + } + + function mapGradient(color) { + if (!gradientSampler) { + return null; + } + const { width, height, data } = gradientSampler; + if (!width || !height) { + return null; + } + const row = Math.floor(height / 2); + let bestIdx = 0; + let bestScore = Infinity; + for (let x = 0; x < width; x += 1) { + const idx = (row * width + x) * 4; + const dr = data[idx] - color.r; + const dg = data[idx + 1] - color.g; + const db = data[idx + 2] - color.b; + const score = dr * dr + dg * dg + db * db; + if (score < bestScore) { + bestScore = score; + bestIdx = x; + } + } + return width > 1 ? bestIdx / (width - 1) : 0; + } + + function lookupLut(hex) { + if (!lutTable || !hex) { + return null; + } + if (Array.isArray(lutTable)) { + const entry = lutTable.find((item) => { + if (typeof item !== "object" || !item) { + return false; + } + const colorKey = item.color || item.hex || item.colour; + return typeof colorKey === "string" && colorKey.toUpperCase() === hex; + }); + if (entry && Object.prototype.hasOwnProperty.call(entry, "value")) { + return entry.value; + } + if (entry && Object.prototype.hasOwnProperty.call(entry, "label")) { + return entry.label; + } + return entry ?? null; + } + if (typeof lutTable === "object") { + return ( + lutTable[hex] ?? + lutTable[hex.toLowerCase?.() || ""] ?? + null + ); + } + return null; + } + + function formatIsoTimestamp(value) { + if (!value) { + return "—"; + } + try { + const date = new Date(value); + if (!Number.isNaN(date.getTime())) { + const iso = date.toISOString(); + if ( + date.getUTCHours() === 0 && + date.getUTCMinutes() === 0 && + date.getUTCSeconds() === 0 + ) { + return iso.slice(0, 10); + } + return iso.replace("T", " ").replace("Z", " UTC"); + } + } catch (formatError) { + // ignore + } + return String(value); + } + + function frameTimestampLabel(meta) { + if (!meta) { + return "—"; + } + if ( + typeof meta.display_timestamp === "string" && + meta.display_timestamp.trim() + ) { + return meta.display_timestamp.trim(); + } + if (typeof meta.label === "string" && meta.label.trim()) { + return meta.label.trim(); + } + if (typeof meta.timestamp === "string" && meta.timestamp.trim()) { + return formatIsoTimestamp(meta.timestamp.trim()); + } + if (meta.timestamp != null) { + return formatIsoTimestamp(meta.timestamp); + } + return "—"; + } + + function updateControlsUI() { + if (!controlsContainer || !controlsEnabled) { + return; + } + if (playButton) { + playButton.textContent = isPlaying ? "Pause" : "Play"; + } + if (prevButton) { + prevButton.disabled = totalFrames <= 1; + } + if (nextButton) { + nextButton.disabled = totalFrames <= 1; + } + } + + function updateFrameHUD(meta) { + if (frameTimestampEl) { + frameTimestampEl.textContent = frameTimestampLabel(meta); + } + } + + function setPlaying(state) { + isPlaying = Boolean(state); + updateControlsUI(); + } + + function setFrame(index) { + if (!frames || !frames.length) { + return; + } + const total = frames.length; + const nextIndex = ((index % total) + total) % total; + currentFrameIndex = nextIndex; + frameTime = 0; + const meta = frames[currentFrameIndex]; + resolveTexture(meta.path, meta); + updateFrameHUD(meta); + updateControlsUI(); + } + + function stepFrame(delta) { + setFrame(currentFrameIndex + delta); + } + + if (controlsContainer && controlsEnabled) { + if (playButton) { + playButton.addEventListener("click", () => { + setPlaying(!isPlaying); + }); + } + if (nextButton) { + nextButton.addEventListener("click", () => { + setPlaying(false); + stepFrame(1); + }); + } + if (prevButton) { + prevButton.addEventListener("click", () => { + setPlaying(false); + stepFrame(-1); + }); + } + } + + if (controlsEnabled) { + document.addEventListener("keydown", (event) => { + if (event.defaultPrevented) { + return; + } + const tag = (event.target && event.target.tagName) || ""; + if (["INPUT", "TEXTAREA", "SELECT", "BUTTON"].includes(tag)) { + return; + } + if (event.code === "Space") { + event.preventDefault(); + setPlaying(!isPlaying); + } else if (event.code === "ArrowRight") { + event.preventDefault(); + setPlaying(false); + stepFrame(1); + } else if (event.code === "ArrowLeft") { + event.preventDefault(); + setPlaying(false); + stepFrame(-1); + } + }); + } + + function applyTexture(uri, texture, frameMeta) { + texture.colorSpace = THREE.SRGBColorSpace; + textureCache.set(uri, texture); + sphereMaterial.map = texture; + sphereMaterial.wireframe = false; + sphereMaterial.needsUpdate = true; + prepareSampler(texture, uri); + if (frameMeta) { + currentFrameMeta = frameMeta; + } + updateFrameHUD(frameMeta || currentFrameMeta); + updateControlsUI(); + } + + function resolveTexture(uri, frameMeta) { + if (!uri) { + return; + } + currentTextureUri = uri; + currentFrameMeta = frameMeta || currentFrameMeta; + if (textureCache.has(uri)) { + applyTexture(uri, textureCache.get(uri), frameMeta || currentFrameMeta); + return; + } + loader.load( + uri, + (texture) => applyTexture(uri, texture, frameMeta || currentFrameMeta), + undefined, + (error) => console.warn("Failed to load texture", uri, error), + ); + } + + const sphere = new THREE.Mesh(sphereGeometry, sphereMaterial); + const globeGroup = new THREE.Group(); + globeGroup.add(sphere); + scene.add(globeGroup); + + if (useLighting) { + const ambientLight = new THREE.AmbientLight(0xffffff, 0.7); + scene.add(ambientLight); + + const directionalLight = new THREE.DirectionalLight(0xffffff, 0.6); + directionalLight.position.set(5, 5, 5); + scene.add(directionalLight); + } + + function resizeRenderer() { + const widthValue = config.width || window.innerWidth; + const heightValue = config.height || window.innerHeight; + renderer.setSize(widthValue, heightValue, false); + camera.aspect = widthValue / heightValue; + camera.updateProjectionMatrix(); + applyCameraDistance(); + } + + window.addEventListener("resize", resizeRenderer); + resizeRenderer(); + + const initialFrame = totalFrames ? frames[0] : null; + if (initialFrame) { + resolveTexture(initialFrame.path, initialFrame); + } else if (config.texture) { + resolveTexture(config.texture, null); + } + updateFrameHUD(initialFrame || currentFrameMeta); + updateControlsUI(); + + const raycaster = new THREE.Raycaster(); + const pointer = new THREE.Vector2(); + function setCursorDragging(active) { + if (config.probe_enabled) { + canvas.style.cursor = active ? "grabbing" : "crosshair"; + } else { + canvas.style.cursor = active ? "grabbing" : "grab"; + } + } + + function onPointerDown(event) { + event.preventDefault(); + pointerState.set(event.pointerId, { + x: event.clientX, + y: event.clientY, + }); + if (pointerState.size === 1) { + isPointerDragging = true; + setCursorDragging(true); + if (canvas.setPointerCapture) { + try { + canvas.setPointerCapture(event.pointerId); + } catch (captureError) { + // ignore + } + } + } else if (pointerState.size === 2) { + const points = Array.from(pointerState.values()); + lastPinchDistance = Math.hypot( + points[0].x - points[1].x, + points[0].y - points[1].y, + ); + } + handlePointer(event); + } + + function onPointerMove(event) { + event.preventDefault(); + const previous = pointerState.get(event.pointerId); + pointerState.set(event.pointerId, { + x: event.clientX, + y: event.clientY, + }); + if (pointerState.size === 1 && previous && isPointerDragging) { + const dx = event.clientX - previous.x; + const dy = event.clientY - previous.y; + globeGroup.rotation.y += dx * rotationSpeed; + globeGroup.rotation.x += dy * rotationSpeed; + globeGroup.rotation.x = Math.min( + rotationClamp, + Math.max(-rotationClamp, globeGroup.rotation.x), + ); + } else if (pointerState.size === 2) { + const points = Array.from(pointerState.values()); + const distance = Math.hypot( + points[0].x - points[1].x, + points[0].y - points[1].y, + ); + if (lastPinchDistance != null) { + const delta = distance - lastPinchDistance; + cameraDistance -= delta * zoomSpeed; + applyCameraDistance(); + } + lastPinchDistance = distance; + } + handlePointer(event); + } + + function releasePointer(event) { + pointerState.delete(event.pointerId); + if (canvas.releasePointerCapture) { + try { + canvas.releasePointerCapture(event.pointerId); + } catch (releaseError) { + // ignore + } + } + if (pointerState.size < 2) { + lastPinchDistance = null; + } + if (!pointerState.size) { + isPointerDragging = false; + setCursorDragging(false); + } + } + + function onPointerUp(event) { + event.preventDefault(); + releasePointer(event); + handlePointer(event); + } + + function onPointerCancel(event) { + releasePointer(event); + clearProbe(); + } + + function onWheel(event) { + event.preventDefault(); + cameraDistance += event.deltaY * zoomSpeed; + applyCameraDistance(); + } + + canvas.addEventListener("pointerdown", onPointerDown, { passive: false }); + canvas.addEventListener("pointermove", onPointerMove, { passive: false }); + canvas.addEventListener("pointerup", onPointerUp, { passive: false }); + canvas.addEventListener("pointercancel", onPointerCancel, { passive: false }); + canvas.addEventListener("pointerleave", (event) => { + releasePointer(event); + clearProbe(); + }); + canvas.addEventListener("wheel", onWheel, { passive: false }); + + function updateProbeDisplay(payload) { + if (!probeLatEl || !probeLonEl) { + return; + } + if (!payload) { + probeLatEl.textContent = "—"; + probeLonEl.textContent = "—"; + if (probeFrameEl) probeFrameEl.textContent = "—"; + if (probeHexEl) probeHexEl.textContent = "—"; + if (probeGradientEl) probeGradientEl.textContent = "—"; + if (probeLutEl) probeLutEl.textContent = "—"; + if (probeValueEl) probeValueEl.textContent = "—"; + if (probeUnitsEl) probeUnitsEl.textContent = "—"; + if (probeSwatchEl) { + probeSwatchEl.style.background = "transparent"; + probeSwatchEl.style.borderColor = "rgba(245, 247, 250, 0.65)"; + } + return; + } + probeLatEl.textContent = `${payload.lat.toFixed(2)}°`; + probeLonEl.textContent = `${payload.lon.toFixed(2)}°`; + if (probeFrameEl) { + if (payload.frameLabel) { + probeFrameEl.textContent = payload.frameLabel; + } else if (payload.frameTimestamp) { + probeFrameEl.textContent = formatIsoTimestamp(payload.frameTimestamp); + } else if (payload.frameIndex != null) { + probeFrameEl.textContent = `#${payload.frameIndex + 1}`; + } else { + probeFrameEl.textContent = "—"; + } + } + if (probeHexEl) { + probeHexEl.textContent = payload.hex || "—"; + } + if (probeSwatchEl) { + if (payload.hex) { + probeSwatchEl.style.background = payload.hex; + probeSwatchEl.style.borderColor = payload.hex; + } else { + probeSwatchEl.style.background = "transparent"; + probeSwatchEl.style.borderColor = "rgba(245, 247, 250, 0.65)"; + } + } + if (probeGradientEl) { + probeGradientEl.textContent = + payload.gradient != null ? `${(payload.gradient * 100).toFixed(1)}%` : "—"; + } + if (probeLutEl) { + probeLutEl.textContent = payload.lutValue != null ? String(payload.lutValue) : "—"; + } + if (probeValueEl) { + probeValueEl.textContent = + payload.dataValue != null ? formatValue(payload.dataValue) : "—"; + } + if (probeUnitsEl) { + probeUnitsEl.textContent = payload.dataUnits ?? "—"; + } + } + + function clearProbe() { + updateProbeDisplay(null); + } + + function handlePointer(event) { + if (!config.probe_enabled) { + return; + } + const rect = canvas.getBoundingClientRect(); + pointer.x = ((event.clientX - rect.left) / rect.width) * 2 - 1; + pointer.y = -((event.clientY - rect.top) / rect.height) * 2 + 1; + raycaster.setFromCamera(pointer, camera); + const hits = raycaster.intersectObject(sphere); + if (!hits.length) { + clearProbe(); + return; + } + const hit = hits[0]; + let lat; + let lon; + if (hit.uv) { + const u = hit.uv.x % 1; + const v = hit.uv.y % 1; + lon = u * 360 - 180; + lat = v * 180 - 90; + } else { + globeGroup.updateMatrixWorld(true); + const localPoint = globeGroup.worldToLocal(hit.point.clone()).normalize(); + lat = -THREE.MathUtils.radToDeg(Math.asin(localPoint.y)); + lon = THREE.MathUtils.radToDeg(Math.atan2(localPoint.z, localPoint.x)); + } + if (lon > 180) { + lon -= 360; + } + if (lon < -180) { + lon += 360; + } + + let hex = null; + let gradientValue = null; + let lutValue = null; + let dataValue = null; + let dataUnits = null; + + if (hit.uv) { + const color = sampleTexture(hit.uv); + if (color) { + hex = formatHex(color); + const gradientRatio = mapGradient(color); + gradientValue = gradientRatio != null ? gradientRatio : null; + lutValue = lookupLut(hex); + } + } + + if (probeDataset) { + const nearest = nearestProbe(lat, lon, probeDataset); + if (nearest) { + dataValue = nearest.value; + dataUnits = nearest.units ?? config.probe_units ?? null; + } + } + + updateProbeDisplay({ + lat, + lon, + hex, + gradient: gradientValue, + lutValue, + dataValue, + dataUnits, + frameIndex: frames && frames.length ? currentFrameIndex : null, + frameTimestamp: + currentFrameMeta && currentFrameMeta.timestamp + ? currentFrameMeta.timestamp + : null, + frameLabel: frameTimestampLabel(currentFrameMeta), + }); + } + + if (config.probe_enabled) { + clearProbe(); + } + + let lastTime = 0; + let frameTime = 0; + const frameDuration = Number(config.frame_duration) || 0.25; + + function render(time) { + const delta = (time - lastTime) / 1000; + lastTime = time; + if (autoRotate && !isPointerDragging && pointerState.size === 0) { + globeGroup.rotation.y += delta * 0.25; + } + if ( + config.animate === "time" && + frames && + frames.length > 1 && + isPlaying + ) { + frameTime += delta; + if (frameTime >= frameDuration) { + frameTime = 0; + setFrame(currentFrameIndex + 1); + } + } + renderer.render(scene, camera); + requestAnimationFrame(render); + } + + requestAnimationFrame(render); +})().catch((error) => { + console.error("Zyra globe bootstrap failed", error); +}); + +""" + ).strip() + + "\n" + ) + + def _maybe_generate_video_frames(self, assets_dir: Path) -> dict[str, object]: + video_source = self._options.get("video_source") + if not video_source: + self._video_entries = None + return {} + entries, meta = self._extract_video_frames(assets_dir) + self._video_entries = entries + for key, value in (meta or {}).items(): + if value is None: + continue + if key == "frame_duration" and self._options.get("frame_duration") not in ( + None, + 0, + "", + ): + continue + self._options[key] = value + return meta + + def _extract_video_frames( + self, assets_dir: Path + ) -> tuple[list[dict[str, object]], dict[str, object]]: + video_source = str(self._options.get("video_source")) + credentials = self._options.get("credentials") or {} + frame_cache_option = self._options.get("frame_cache") + if frame_cache_option: + frame_cache = Path(frame_cache_option) + if not frame_cache.is_absolute(): + frame_cache = Path.cwd() / frame_cache + else: + frame_cache = assets_dir / "_video_cache" + frame_cache.mkdir(parents=True, exist_ok=True) + + try: + video_url = resolve_video_source(video_source, credentials) + except Exception as exc: # pragma: no cover - Vimeo/network dependent + raise VideoExtractionError(str(exc)) from exc + + fps = float(self._options.get("video_fps") or 1.0) + metadata = probe_video_metadata(video_url) + + overrides = self._load_timeline_overrides() + start_override, period_override, source = overrides + + start_value = self._options.get("video_start") + if start_value: + start_dt = parse_datetime(str(start_value)) + elif start_override: + start_dt = start_override + self._options["video_start"] = format_datetime(start_dt) + else: + raise VideoExtractionError( + "--video-start is required when extracting frames from a video source." + ) + + end_value = self._options.get("video_end") + frames = extract_frames(video_url, output_dir=frame_cache, fps=fps) + + if end_value: + end_dt = parse_datetime(str(end_value)) + else: + end_dt = compute_end_time(start_dt, len(frames), fps) + + entries = compute_frame_timestamps( + frames=frames, + start_time=start_dt, + fps=fps, + ) + + frame_duration = 1.0 / fps if fps > 0 else None + meta_payload = { + "video_start": format_datetime(start_dt), + "video_end": format_datetime(end_dt), + "video_duration_seconds": metadata.duration_seconds, + "video_fps": fps, + "frame_duration": frame_duration, + } + timeline_updates = self._apply_timeline_overrides( + entries, default_start=start_dt, overrides=overrides + ) + if timeline_updates: + meta_payload.update(timeline_updates) + else: + for entry in entries: + entry["metadata"] = { + "elapsed_seconds": _elapsed_seconds(start_dt, entry["timestamp"]) + } + return entries, meta_payload diff --git a/src/zyra/wizard/zyra_capabilities.json b/src/zyra/wizard/zyra_capabilities.json index 8437f439..d00bec09 100644 --- a/src/zyra/wizard/zyra_capabilities.json +++ b/src/zyra/wizard/zyra_capabilities.json @@ -3498,6 +3498,223 @@ }, "example_args": null }, + "visualize globe": { + "description": "Generate interactive globe bundles using modular renderers such as webgl-sphere or cesium-globe.", + "doc": "Generate interactive globe bundles using modular renderers such as webgl-sphere or cesium-globe.", + "epilog": "", + "groups": [ + { + "title": "options", + "options": [ + "--help", + "--target", + "--output", + "--texture", + "--pattern", + "--texture-pattern", + "--frame-list", + "--frame-cache", + "--video-source", + "--start", + "--end", + "--fps", + "--period-seconds", + "--frames-meta", + "--date-format", + "--frame-duration", + "--show-controls", + "--title", + "--description", + "--width", + "--height", + "--animate", + "--auto-rotate", + "--auto-rotate-speed", + "--probe", + "--lighting", + "--probe-gradient", + "--probe-lut", + "--probe-data", + "--probe-var", + "--probe-units", + "--legend", + "--tile-url", + "--tile-type", + "--tile-scheme", + "--tile-min-level", + "--tile-max-level", + "--tile-credit", + "--tile-token", + "--tile-param", + "--tile-time-key", + "--tile-time-value", + "--tile-time-start", + "--tile-time-end", + "--tile-time-period", + "--shared-gradient", + "--time-key", + "--time-format", + "--credential", + "--credential-file", + "--auth", + "--verbose", + "--quiet", + "--trace" + ] + } + ], + "options": { + "--help": "show this help message and exit", + "--target": { + "help": "Renderer backend to use (e.g., webgl-sphere, cesium-globe)", + "choices": [ + "cesium-globe", + "webgl-sphere" + ], + "type": "str", + "required": true + }, + "--output": { + "help": "Directory for the generated bundle (index.html + assets)", + "path_arg": true, + "type": "path", + "required": true + }, + "--texture": "Primary texture image for the globe surface", + "--pattern": "Glob pattern for frame textures (animated sequences)", + "--texture-pattern": "Glob pattern for frame textures (animated sequences)", + "--frame-list": "Text file listing frame paths (optional timestamps)", + "--frame-cache": "Directory to stage frames extracted from remote sources", + "--video-source": "Video file or URI used to derive frame textures (supports Vimeo URIs)", + "--start": "ISO-8601 timestamp for the first frame of the video", + "--end": "ISO-8601 timestamp for the final frame (optional; defaults to start + duration)", + "--fps": { + "help": "Sampling rate when extracting frames from video sources (frames per second)", + "type": "float" + }, + "--period-seconds": { + "help": "Override cadence between frames (seconds) for timeline metadata", + "type": "float" + }, + "--frames-meta": { + "help": "Frames metadata JSON (from transform metadata/scan-frames)", + "path_arg": true, + "type": "path" + }, + "--date-format": "strftime-style format used to parse timestamps from frame filenames", + "--frame-duration": { + "help": "Seconds per frame when animating sequences (default 0.25)", + "type": "float" + }, + "--show-controls": { + "help": "Toggle playback controls overlay for animated sequences (default: %(default)s)", + "type": "str", + "default": true + }, + "--title": "Optional overlay title for the globe viewer", + "--description": "Optional descriptive text shown under the globe title", + "--width": { + "help": "Preferred viewport width in pixels", + "type": "int" + }, + "--height": { + "help": "Preferred viewport height in pixels", + "type": "int" + }, + "--animate": { + "help": "Animation mode for multi-frame inputs", + "choices": [ + "none", + "time" + ], + "type": "str", + "default": "none" + }, + "--auto-rotate": { + "help": "Enable continuous auto-rotation (default uses drag-to-rotate) (default: %(default)s)", + "type": "str", + "default": false + }, + "--auto-rotate-speed": { + "help": "Auto-rotation speed in degrees per second when enabled", + "type": "float" + }, + "--probe": { + "help": "Toggle probe UI in the generated viewer (default: %(default)s)", + "type": "str", + "default": true + }, + "--lighting": { + "help": "Enable lighting/shading for the globe (default off for unlit texture) (default: %(default)s)", + "type": "str", + "default": false + }, + "--probe-gradient": "Gradient asset for imagery-backed probe decoding", + "--probe-lut": "Lookup table for categorical probe decoding", + "--probe-data": "JSON/CSV probe dataset packaged with the bundle", + "--probe-var": "Variable name used when sampling structured probe datasets", + "--probe-units": "Units label rendered with probe values (e.g., Ma, °C)", + "--legend": "Legend image path (pkg:, local file, or http/https URL)", + "--tile-url": "ArcGIS MapServer/ImageServer endpoint or URL template providing tiled imagery (overrides --texture when supplied)", + "--tile-type": { + "help": "Provider type for --tile-url (default: arcgis)", + "choices": [ + "arcgis", + "template" + ], + "type": "str" + }, + "--tile-scheme": { + "help": "Tiling scheme for --tile-url when using template sources (default: webmercator)", + "choices": [ + "webmercator", + "geographic" + ], + "type": "str" + }, + "--tile-min-level": { + "help": "Minimum level for tiled imagery providers", + "type": "int" + }, + "--tile-max-level": { + "help": "Maximum level for tiled imagery providers", + "type": "int" + }, + "--tile-credit": "Attribution string shown for tiled imagery providers", + "--tile-token": "Access token passed through to the tiled imagery provider", + "--tile-param": "Placeholder substitutions for template tile URLs (repeatable)", + "--tile-time-key": "Placeholder key in the tile URL that should be driven by time (e.g., 'time')", + "--tile-time-value": "ISO-8601 timestamps used to populate the time placeholder (repeatable)", + "--tile-time-start": "Start of the time range (inclusive, ISO-8601)", + "--tile-time-end": "End of the time range (inclusive, ISO-8601)", + "--tile-time-period": "Step between time samples (e.g., '1d', '6h', '3600s'; default 1d)", + "--shared-gradient": "Name-to-path mapping for reusable gradients (repeatable; format NAME=PATH or NAME|PATH; supports local files, pkg: refs, or URLs)", + "--time-key": "Metadata key that maps inputs to timestamps", + "--time-format": "Format string for rendering timestamps", + "--credential": "Inline credential assignment (repeatable)", + "--credential-file": "Credential file resolved by the shared helper", + "--auth": "Auth helper shorthands (e.g., bearer:token, basic:user:pass)", + "--verbose": { + "help": "Verbose logging for this command", + "type": "bool", + "default": false + }, + "--quiet": { + "help": "Quiet logging for this command", + "type": "bool", + "default": false + }, + "--trace": { + "help": "Shell-style trace of key steps and external commands", + "type": "bool", + "default": false + } + }, + "positionals": [], + "domain": "visualize", + "args_schema": null, + "example_args": null + }, "visualize interactive": { "description": "zyra visualize interactive", "doc": "", @@ -4660,6 +4877,223 @@ "args_schema": null, "example_args": null }, + "render globe": { + "description": "Generate interactive globe bundles using modular renderers such as webgl-sphere or cesium-globe.", + "doc": "Generate interactive globe bundles using modular renderers such as webgl-sphere or cesium-globe.", + "epilog": "", + "groups": [ + { + "title": "options", + "options": [ + "--help", + "--target", + "--output", + "--texture", + "--pattern", + "--texture-pattern", + "--frame-list", + "--frame-cache", + "--video-source", + "--start", + "--end", + "--fps", + "--period-seconds", + "--frames-meta", + "--date-format", + "--frame-duration", + "--show-controls", + "--title", + "--description", + "--width", + "--height", + "--animate", + "--auto-rotate", + "--auto-rotate-speed", + "--probe", + "--lighting", + "--probe-gradient", + "--probe-lut", + "--probe-data", + "--probe-var", + "--probe-units", + "--legend", + "--tile-url", + "--tile-type", + "--tile-scheme", + "--tile-min-level", + "--tile-max-level", + "--tile-credit", + "--tile-token", + "--tile-param", + "--tile-time-key", + "--tile-time-value", + "--tile-time-start", + "--tile-time-end", + "--tile-time-period", + "--shared-gradient", + "--time-key", + "--time-format", + "--credential", + "--credential-file", + "--auth", + "--verbose", + "--quiet", + "--trace" + ] + } + ], + "options": { + "--help": "show this help message and exit", + "--target": { + "help": "Renderer backend to use (e.g., webgl-sphere, cesium-globe)", + "choices": [ + "cesium-globe", + "webgl-sphere" + ], + "type": "str", + "required": true + }, + "--output": { + "help": "Directory for the generated bundle (index.html + assets)", + "path_arg": true, + "type": "path", + "required": true + }, + "--texture": "Primary texture image for the globe surface", + "--pattern": "Glob pattern for frame textures (animated sequences)", + "--texture-pattern": "Glob pattern for frame textures (animated sequences)", + "--frame-list": "Text file listing frame paths (optional timestamps)", + "--frame-cache": "Directory to stage frames extracted from remote sources", + "--video-source": "Video file or URI used to derive frame textures (supports Vimeo URIs)", + "--start": "ISO-8601 timestamp for the first frame of the video", + "--end": "ISO-8601 timestamp for the final frame (optional; defaults to start + duration)", + "--fps": { + "help": "Sampling rate when extracting frames from video sources (frames per second)", + "type": "float" + }, + "--period-seconds": { + "help": "Override cadence between frames (seconds) for timeline metadata", + "type": "float" + }, + "--frames-meta": { + "help": "Frames metadata JSON (from transform metadata/scan-frames)", + "path_arg": true, + "type": "path" + }, + "--date-format": "strftime-style format used to parse timestamps from frame filenames", + "--frame-duration": { + "help": "Seconds per frame when animating sequences (default 0.25)", + "type": "float" + }, + "--show-controls": { + "help": "Toggle playback controls overlay for animated sequences (default: %(default)s)", + "type": "str", + "default": true + }, + "--title": "Optional overlay title for the globe viewer", + "--description": "Optional descriptive text shown under the globe title", + "--width": { + "help": "Preferred viewport width in pixels", + "type": "int" + }, + "--height": { + "help": "Preferred viewport height in pixels", + "type": "int" + }, + "--animate": { + "help": "Animation mode for multi-frame inputs", + "choices": [ + "none", + "time" + ], + "type": "str", + "default": "none" + }, + "--auto-rotate": { + "help": "Enable continuous auto-rotation (default uses drag-to-rotate) (default: %(default)s)", + "type": "str", + "default": false + }, + "--auto-rotate-speed": { + "help": "Auto-rotation speed in degrees per second when enabled", + "type": "float" + }, + "--probe": { + "help": "Toggle probe UI in the generated viewer (default: %(default)s)", + "type": "str", + "default": true + }, + "--lighting": { + "help": "Enable lighting/shading for the globe (default off for unlit texture) (default: %(default)s)", + "type": "str", + "default": false + }, + "--probe-gradient": "Gradient asset for imagery-backed probe decoding", + "--probe-lut": "Lookup table for categorical probe decoding", + "--probe-data": "JSON/CSV probe dataset packaged with the bundle", + "--probe-var": "Variable name used when sampling structured probe datasets", + "--probe-units": "Units label rendered with probe values (e.g., Ma, °C)", + "--legend": "Legend image path (pkg:, local file, or http/https URL)", + "--tile-url": "ArcGIS MapServer/ImageServer endpoint or URL template providing tiled imagery (overrides --texture when supplied)", + "--tile-type": { + "help": "Provider type for --tile-url (default: arcgis)", + "choices": [ + "arcgis", + "template" + ], + "type": "str" + }, + "--tile-scheme": { + "help": "Tiling scheme for --tile-url when using template sources (default: webmercator)", + "choices": [ + "webmercator", + "geographic" + ], + "type": "str" + }, + "--tile-min-level": { + "help": "Minimum level for tiled imagery providers", + "type": "int" + }, + "--tile-max-level": { + "help": "Maximum level for tiled imagery providers", + "type": "int" + }, + "--tile-credit": "Attribution string shown for tiled imagery providers", + "--tile-token": "Access token passed through to the tiled imagery provider", + "--tile-param": "Placeholder substitutions for template tile URLs (repeatable)", + "--tile-time-key": "Placeholder key in the tile URL that should be driven by time (e.g., 'time')", + "--tile-time-value": "ISO-8601 timestamps used to populate the time placeholder (repeatable)", + "--tile-time-start": "Start of the time range (inclusive, ISO-8601)", + "--tile-time-end": "End of the time range (inclusive, ISO-8601)", + "--tile-time-period": "Step between time samples (e.g., '1d', '6h', '3600s'; default 1d)", + "--shared-gradient": "Name-to-path mapping for reusable gradients (repeatable; format NAME=PATH or NAME|PATH; supports local files, pkg: refs, or URLs)", + "--time-key": "Metadata key that maps inputs to timestamps", + "--time-format": "Format string for rendering timestamps", + "--credential": "Inline credential assignment (repeatable)", + "--credential-file": "Credential file resolved by the shared helper", + "--auth": "Auth helper shorthands (e.g., bearer:token, basic:user:pass)", + "--verbose": { + "help": "Verbose logging for this command", + "type": "bool", + "default": false + }, + "--quiet": { + "help": "Quiet logging for this command", + "type": "bool", + "default": false + }, + "--trace": { + "help": "Shell-style trace of key steps and external commands", + "type": "bool", + "default": false + } + }, + "positionals": [], + "domain": "render", + "args_schema": null, + "example_args": null + }, "render interactive": { "description": "zyra render interactive", "doc": "", diff --git a/tests/visualization/test_cli_commands.py b/tests/visualization/test_cli_commands.py index b40e8e34..5f6b267a 100644 --- a/tests/visualization/test_cli_commands.py +++ b/tests/visualization/test_cli_commands.py @@ -1,8 +1,58 @@ # SPDX-License-Identifier: Apache-2.0 +import base64 +import functools +import json import subprocess import sys +from pathlib import Path +import numpy as np import pytest +from PIL import Image + +try: # pragma: no cover - optional dependency check + import xarray as xr +except ModuleNotFoundError: # pragma: no cover - optional dependency missing + pytest.skip("xarray is required for visualization tests", allow_module_level=True) + +from zyra.processing.video_processor import VideoProcessor + + +@functools.lru_cache +def _ffmpeg_available() -> bool: + try: + vp = VideoProcessor(input_directory=".", output_file="_/tmp/out.mp4") + return vp.check_ffmpeg_installed() + except Exception: + return False + + +def _write_color_png(path: Path, color: tuple[int, int, int]) -> None: + img = Image.new("RGB", (64, 32), color) + img.save(path, format="PNG") + + +def _assert_images_close(path_a: Path, path_b: Path, *, tolerance: int = 2) -> None: + with Image.open(path_a) as img_a, Image.open(path_b) as img_b: + a_rgb = img_a.convert("RGB") + b_rgb = img_b.convert("RGB") + assert a_rgb.size == b_rgb.size + arr_a = np.asarray(a_rgb, dtype=np.int16) + arr_b = np.asarray(b_rgb, dtype=np.int16) + diff = np.abs(arr_a - arr_b) + assert diff.max() <= tolerance + + +def _create_drought_frames(frames_dir: Path) -> list[Path]: + frames_dir.mkdir(parents=True, exist_ok=True) + colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] + dates = ["20240201", "20240202", "20240203"] + frames: list[Path] = [] + for color, date in zip(colors, dates, strict=True): + out = frames_dir / f"DroughtRisk_Weekly_{date}.png" + _write_color_png(out, color) + frames.append(out) + return frames @pytest.mark.cli @@ -16,8 +66,314 @@ ["visualize", "animate", "--help"], ["visualize", "compose-video", "--help"], ["visualize", "interactive", "--help"], + ["visualize", "globe", "--help"], ], ) def test_visualize_subcommand_help_exits_zero(cmd): proc = subprocess.run([sys.executable, "-m", "zyra.cli", *cmd], capture_output=True) assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + + +def test_visualize_globe_emits_bundle(tmp_path): + output_dir = tmp_path / "bundle" + texture_path = tmp_path / "dummy.png" + texture_path.write_bytes(_tiny_png()) + gradient_path = tmp_path / "gradient.png" + gradient_path.write_bytes(_tiny_png()) + legend_path = tmp_path / "legend.png" + legend_path.write_bytes(_tiny_png()) + shared_gradient_path = tmp_path / "shared_grad.png" + shared_gradient_path.write_bytes(_tiny_png()) + lut_path = tmp_path / "lut.json" + lut_path.write_text('{"a":1}', encoding="utf-8") + data_path = _write_probe_json(tmp_path) + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "webgl-sphere", + "--output", + str(output_dir), + "--texture", + str(texture_path), + "--probe-gradient", + str(gradient_path), + "--probe-lut", + str(lut_path), + "--probe-data", + str(data_path), + "--legend", + str(legend_path), + "--shared-gradient", + f"default={shared_gradient_path}", + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + assert (output_dir / "index.html").exists() + assert (output_dir / "assets" / "sphere.js").exists() + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + assert config.get("texture") == "assets/textures/dummy.png" + assert config.get("probe_gradient") == "assets/gradients/gradient.png" + assert config.get("probe_lut") == "assets/gradients/lut.json" + assert config.get("probe_data") == "assets/data/probe.json" + assert config.get("legend") == "assets/legends/legend.png" + shared_map = config.get("shared_gradients") + assert shared_map and shared_map.get("default") == ( + "assets/gradients/shared/shared_grad.png" + ) + shared_dest = output_dir / "assets" / "gradients" / "shared" / "shared_grad.png" + assert shared_dest.exists() + + +def test_visualize_globe_cesium(tmp_path): + output_dir = tmp_path / "cesium" + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "cesium-globe", + "--output", + str(output_dir), + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + assert (output_dir / "index.html").exists() + assert (output_dir / "assets" / "cesium.js").exists() + + +def test_visualize_globe_cesium_with_gradient(tmp_path): + output_dir = tmp_path / "cesium_grad" + gradient_path = tmp_path / "gradient.png" + lut_path = tmp_path / "lut.json" + gradient_path.write_bytes(_tiny_png()) + lut_path.write_text('{"a":1}', encoding="utf-8") + data_path = _write_probe_json(tmp_path) + + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "cesium-globe", + "--output", + str(output_dir), + "--probe-gradient", + str(gradient_path), + "--probe-lut", + str(lut_path), + "--probe-data", + str(data_path), + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + assert config.get("probe_gradient") == "assets/gradients/gradient.png" + assert config.get("probe_lut") == "assets/gradients/lut.json" + assert config.get("probe_data") == "assets/data/probe.json" + + +def test_visualize_globe_with_frame_pattern(tmp_path): + output_dir = tmp_path / "bundle_frames" + frames_dir = tmp_path / "frames" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "frame_01.png").write_bytes(data) + (frames_dir / "frame_02.png").write_bytes(data) + + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "webgl-sphere", + "--output", + str(output_dir), + "--texture-pattern", + str(frames_dir / "*.png"), + "--animate", + "time", + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + frames = config.get("frames") + assert isinstance(frames, list) + assert len(frames) == 2 + + +def test_visualize_globe_with_json_manifest(tmp_path): + frames_dir = tmp_path / "frames_json" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "frame_a.png").write_bytes(data) + (frames_dir / "frame_b.png").write_bytes(data) + + manifest_path = frames_dir / "manifest.json" + manifest = [ + {"path": "frame_a.png", "time": "2024-02-01 12:00"}, + {"path": "frame_b.png", "time": "2024-02-02 12:00"}, + ] + manifest_path.write_text(json.dumps(manifest), encoding="utf-8") + + output_dir = tmp_path / "bundle_manifest" + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "webgl-sphere", + "--output", + str(output_dir), + "--frame-list", + str(manifest_path), + "--time-key", + "time", + "--time-format", + "%Y-%m-%d %H:%M", + "--animate", + "time", + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + frames = config.get("frames") + assert isinstance(frames, list) + assert frames[0]["display_timestamp"] == "2024-02-01 12:00" + assert (output_dir / "assets" / "textures" / "frame_a.png").exists() + + +def test_visualize_globe_with_probe_dataset(tmp_path): + nc_path = tmp_path / "probe.nc" + _write_tiny_netcdf(nc_path) + + output_dir = tmp_path / "bundle_probe_dataset" + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "webgl-sphere", + "--output", + str(output_dir), + "--probe-data", + str(nc_path), + "--probe-var", + "temp", + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + assert config.get("probe_data") == "assets/data/probe_probe_points.json" + assert config.get("probe_units") == "K" + data_file = output_dir / "assets" / "data" / "probe_probe_points.json" + assert data_file.exists() + + +@pytest.mark.skipif(not _ffmpeg_available(), reason="requires ffmpeg") +def test_visualize_globe_with_video_source(tmp_path): + frames_dir = tmp_path / "video_frames" + frames = _create_drought_frames(frames_dir) + video_path = tmp_path / "drought_globe.mp4" + vp = VideoProcessor(input_directory=str(frames_dir), output_file=str(video_path)) + assert vp.process_video(fps=1, input_glob="*.png") + + output_dir = tmp_path / "bundle_video_cli" + cache_dir = tmp_path / "frame_cache" + proc = subprocess.run( + [ + sys.executable, + "-m", + "zyra.cli", + "visualize", + "globe", + "--target", + "webgl-sphere", + "--output", + str(output_dir), + "--video-source", + str(video_path), + "--video-start", + "2024-02-01T00:00:00Z", + "--video-fps", + "1", + "--frame-cache", + str(cache_dir), + ], + capture_output=True, + ) + assert proc.returncode == 0, proc.stderr.decode(errors="ignore") + + config = json.loads( + (output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + frames_meta = config.get("frames") + assert isinstance(frames_meta, list) + assert len(frames_meta) == len(frames) + textures_dir = output_dir / "assets" / "textures" + extracted = sorted(textures_dir.glob("frame_*.png")) + assert len(extracted) == len(frames) + for extracted_frame, original in zip(extracted, sorted(frames), strict=False): + _assert_images_close(extracted_frame, original) + + +def _tiny_png() -> bytes: + return base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=" + ) + + +def _write_probe_json(tmp_path): + probe_path = tmp_path / "probe.json" + data = [{"lat": 0.0, "lon": 0.0, "value": 42.5, "units": "K"}] + probe_path.write_text(json.dumps(data), encoding="utf-8") + return probe_path + + +def _write_tiny_netcdf(path): + lat = np.array([-10.0, 0.0, 10.0]) + lon = np.array([0.0, 30.0, 60.0]) + data = np.arange(lat.size * lon.size, dtype=float).reshape(lat.size, lon.size) + da = xr.DataArray( + data, + coords={"lat": lat, "lon": lon}, + dims=("lat", "lon"), + name="temp", + attrs={"units": "K"}, + ) + da.to_dataset(name="temp").to_netcdf(path) diff --git a/tests/visualization/test_globe_renderers.py b/tests/visualization/test_globe_renderers.py new file mode 100644 index 00000000..8e35ed25 --- /dev/null +++ b/tests/visualization/test_globe_renderers.py @@ -0,0 +1,618 @@ +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import base64 +import functools +import json +from datetime import datetime, timedelta, timezone +from pathlib import Path + +import numpy as np +import pytest +from PIL import Image + +try: # pragma: no cover - optional dependency check + import xarray as xr +except ModuleNotFoundError: # pragma: no cover - optional dependency missing + pytest.skip("xarray is required for visualization tests", allow_module_level=True) + +from zyra.processing.video_processor import VideoProcessor +from zyra.visualization.renderers import available, create + + +@functools.lru_cache +def _ffmpeg_available() -> bool: + try: + vp = VideoProcessor(input_directory=".", output_file="_/tmp/out.mp4") + return vp.check_ffmpeg_installed() + except Exception: + return False + + +def _write_color_png(path: Path, color: tuple[int, int, int]) -> None: + img = Image.new("RGB", (64, 32), color) + img.save(path, format="PNG") + + +def _assert_images_close(path_a: Path, path_b: Path, *, tolerance: int = 2) -> None: + with Image.open(path_a) as img_a, Image.open(path_b) as img_b: + a_rgb = img_a.convert("RGB") + b_rgb = img_b.convert("RGB") + assert a_rgb.size == b_rgb.size + arr_a = np.asarray(a_rgb, dtype=np.int16) + arr_b = np.asarray(b_rgb, dtype=np.int16) + diff = np.abs(arr_a - arr_b) + assert diff.max() <= tolerance + + +def _create_drought_frames(frames_dir: Path) -> list[Path]: + frames_dir.mkdir(parents=True, exist_ok=True) + colors = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] + dates = ["20240201", "20240202", "20240203"] + frames: list[Path] = [] + for color, date in zip(colors, dates, strict=True): + out = frames_dir / f"DroughtRisk_Weekly_{date}.png" + _write_color_png(out, color) + frames.append(out) + return frames + + +def test_globe_renderers_registered() -> None: + slugs = {renderer.slug for renderer in available()} + assert "webgl-sphere" in slugs + assert "cesium-globe" in slugs + + +def test_webgl_renderer_builds_bundle(tmp_path) -> None: + renderer = create("webgl-sphere", width=640, height=360) + bundle = renderer.build(output_dir=tmp_path) + + assert bundle.index_html.exists() + html = bundle.index_html.read_text(encoding="utf-8") + assert '' in html + assert "data-probe-lat" in html + assert "window.ZYRA_GLOBE_CONFIG" in html + + asset_paths = {path.name for path in bundle.assets} + assert asset_paths == {"sphere.js", "config.json"} + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + assert config.get("width") == 640 + assert config.get("height") == 360 + assert config.get("show_controls") is True + + +def test_cesium_renderer_builds_bundle(tmp_path) -> None: + renderer = create("cesium-globe", width=800, height=600) + bundle = renderer.build(output_dir=tmp_path) + + assert bundle.index_html.exists() + html = bundle.index_html.read_text(encoding="utf-8") + assert "Zyra Cesium Globe" in html + assert "Cesium.js" in html + + asset_paths = {path.name for path in bundle.assets} + assert asset_paths == {"cesium.js", "config.json"} + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + assert config.get("width") == 800 + assert config.get("height") == 600 + + +def test_webgl_renderer_with_texture(tmp_path) -> None: + texture_path = tmp_path / "dummy.png" + texture_path.write_bytes(_tiny_png()) + + renderer = create("webgl-sphere", texture=str(texture_path)) + bundle = renderer.build(output_dir=tmp_path / "bundle") + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + assert config.get("texture") == "assets/textures/dummy.png" + + staged = tmp_path / "bundle" / "assets" / "textures" / "dummy.png" + assert staged.exists() + + +def test_webgl_renderer_with_legend(tmp_path) -> None: + legend_path = tmp_path / "legend.png" + legend_path.write_bytes(_tiny_png()) + + renderer = create("webgl-sphere", legend=str(legend_path)) + bundle = renderer.build(output_dir=tmp_path / "bundle_legend") + + config = json.loads( + (bundle.output_dir / "assets" / "config.json").read_text(encoding="utf-8") + ) + assert config.get("legend") == "assets/legends/legend.png" + + legends_dir = bundle.output_dir / "assets" / "legends" + assert (legends_dir / "legend.png").exists() + + +def test_webgl_renderer_with_remote_texture(tmp_path) -> None: + raw_url = "https:/example.com/remote_texture.jpg" + expected_url = "https://example.com/remote_texture.jpg" + + renderer = create("webgl-sphere", texture=raw_url) + bundle = renderer.build(output_dir=tmp_path / "bundle_remote") + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + assert config.get("texture") == expected_url + + textures_dir = bundle.output_dir / "assets" / "textures" + assert not textures_dir.exists() or not any(textures_dir.iterdir()) + + +def test_webgl_renderer_with_frame_pattern(tmp_path) -> None: + frames_dir = tmp_path / "frames" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "frame_01.png").write_bytes(data) + (frames_dir / "frame_02.png").write_bytes(data) + + renderer = create( + "webgl-sphere", + texture_pattern=str(frames_dir / "*.png"), + animate="time", + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_frames") + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + frames = config.get("frames") + assert isinstance(frames, list) + assert len(frames) == 2 + assert frames[0]["path"].endswith("frame_01.png") + assert config.get("texture").endswith("frame_01.png") + + staged = bundle.output_dir / "assets" / "textures" + assert (staged / "frame_01.png").exists() + assert (staged / "frame_02.png").exists() + + +def test_webgl_renderer_with_frame_pattern_and_date_format(tmp_path) -> None: + frames_dir = tmp_path / "frames_date" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "DroughtRisk_Weekly_20250101.png").write_bytes(data) + (frames_dir / "DroughtRisk_Weekly_20250108.png").write_bytes(data) + + renderer = create( + "webgl-sphere", + texture_pattern=str(frames_dir / "DroughtRisk_Weekly_*.png"), + frame_date_format="%Y%m%d", + animate="time", + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_frames_date") + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + frames = config.get("frames") + assert isinstance(frames, list) + assert frames[0]["timestamp"].startswith("2025-01-01") + + +def test_cesium_renderer_with_frame_pattern(tmp_path) -> None: + frames_dir = tmp_path / "frames_cesium" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "DroughtRisk_Weekly_20240201.png").write_bytes(data) + (frames_dir / "DroughtRisk_Weekly_20240208.png").write_bytes(data) + + renderer = create( + "cesium-globe", + texture_pattern=str(frames_dir / "DroughtRisk_Weekly_*.png"), + date_format="%Y%m%d", + animate="time", + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_cesium_frames") + + config_path = next(path for path in bundle.assets if path.name == "config.json") + config = json.loads(config_path.read_text(encoding="utf-8")) + frames = config.get("frames") + assert isinstance(frames, list) + assert len(frames) == 2 + assert frames[0]["path"].endswith("DroughtRisk_Weekly_20240201.png") + assert frames[0]["timestamp"].startswith("2024-02-01") + + +def test_webgl_renderer_with_gradient_and_lut(tmp_path) -> None: + gradient_path = tmp_path / "gradient.png" + lut_path = tmp_path / "lut.json" + gradient_path.write_bytes(_tiny_png()) + lut_path.write_text('{"a":1}', encoding="utf-8") + probe_path = _write_probe_json(tmp_path) + + renderer = create( + "webgl-sphere", + probe_gradient=str(gradient_path), + probe_lut=str(lut_path), + probe_data=str(probe_path), + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_grad") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("probe_gradient") == "assets/gradients/gradient.png" + assert config.get("probe_lut") == "assets/gradients/lut.json" + assert config.get("probe_data") == "assets/data/probe.json" + + gradients_dir = bundle.output_dir / "assets" / "gradients" + assert (gradients_dir / "gradient.png").exists() + assert (gradients_dir / "lut.json").exists() + data_dir = bundle.output_dir / "assets" / "data" + assert (data_dir / "probe.json").exists() + + +def test_cesium_renderer_with_gradient_and_lut(tmp_path) -> None: + gradient_path = tmp_path / "gradient.png" + lut_path = tmp_path / "lut.json" + gradient_path.write_bytes(_tiny_png()) + lut_path.write_text('{"a":1}', encoding="utf-8") + probe_path = _write_probe_json(tmp_path) + + renderer = create( + "cesium-globe", + probe_gradient=str(gradient_path), + probe_lut=str(lut_path), + probe_data=str(probe_path), + ) + bundle = renderer.build(output_dir=tmp_path / "cesium_bundle") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("probe_gradient") == "assets/gradients/gradient.png" + assert config.get("probe_lut") == "assets/gradients/lut.json" + assert config.get("probe_data") == "assets/data/probe.json" + + gradients_dir = bundle.output_dir / "assets" / "gradients" + assert (gradients_dir / "gradient.png").exists() + assert (gradients_dir / "lut.json").exists() + data_dir = bundle.output_dir / "assets" / "data" + assert (data_dir / "probe.json").exists() + + +def test_cesium_renderer_with_legend(tmp_path) -> None: + legend_path = tmp_path / "legend.png" + legend_path.write_bytes(_tiny_png()) + + renderer = create("cesium-globe", legend=str(legend_path)) + bundle = renderer.build(output_dir=tmp_path / "cesium_legend") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("legend") == "assets/legends/legend.png" + + legends_dir = bundle.output_dir / "assets" / "legends" + assert (legends_dir / "legend.png").exists() + + +def test_webgl_renderer_with_shared_gradients(tmp_path) -> None: + gradient_path = tmp_path / "shared.png" + gradient_path.write_bytes(_tiny_png()) + + renderer = create( + "webgl-sphere", + shared_gradients={"default": str(gradient_path)}, + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_shared_grad") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + shared = config.get("shared_gradients") + assert isinstance(shared, dict) + assert shared["default"].startswith("assets/gradients/shared/") + + gradients_dir = bundle.output_dir / "assets" / "gradients" / "shared" + assert (gradients_dir / "shared.png").exists() + + +@pytest.mark.skipif(not _ffmpeg_available(), reason="requires ffmpeg") +def test_webgl_renderer_with_video_source(tmp_path) -> None: + frames_dir = tmp_path / "drought_frames" + frames = _create_drought_frames(frames_dir) + video_path = tmp_path / "drought.mp4" + vp = VideoProcessor(input_directory=str(frames_dir), output_file=str(video_path)) + assert vp.process_video(fps=1, input_glob="*.png") + + cache_dir = tmp_path / "video_cache" + renderer = create( + "webgl-sphere", + video_source=str(video_path), + video_start="2024-02-01T00:00:00Z", + video_fps=1.0, + frame_cache=str(cache_dir), + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_video") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("video_start") == "2024-02-01T00:00:00Z" + assert config.get("video_end") == "2024-02-01T00:00:02Z" + frames_meta = config.get("frames") + assert isinstance(frames_meta, list) + assert len(frames_meta) == len(frames) + expected_times = [ + (datetime(2024, 2, 1, tzinfo=timezone.utc) + timedelta(seconds=i)) + .isoformat() + .replace("+00:00", "Z") + for i in range(len(frames)) + ] + assert [frame["timestamp"] for frame in frames_meta] == expected_times + + textures_dir = bundle.output_dir / "assets" / "textures" + extracted = sorted(textures_dir.glob("frame_*.png")) + assert len(extracted) == len(frames) + for extracted_frame, original in zip(extracted, sorted(frames), strict=False): + _assert_images_close(extracted_frame, original) + + +@pytest.mark.skipif(not _ffmpeg_available(), reason="requires ffmpeg") +def test_webgl_renderer_video_with_period_override(tmp_path) -> None: + frames_dir = tmp_path / "drought_frames_period" + frames = _create_drought_frames(frames_dir) + video_path = tmp_path / "drought_period.mp4" + vp = VideoProcessor(input_directory=str(frames_dir), output_file=str(video_path)) + assert vp.process_video(fps=1, input_glob="*.png") + + meta_path = tmp_path / "frames_meta.json" + meta_path.write_text( + json.dumps( + { + "start_datetime": "2024-02-01T00:00:00Z", + "period_seconds": 604800, + "frame_count_actual": len(frames), + } + ), + encoding="utf-8", + ) + + renderer = create( + "webgl-sphere", + video_source=str(video_path), + video_fps=1.0, + frames_meta=str(meta_path), + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_video_period") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + frames_meta = config.get("frames") + assert isinstance(frames_meta, list) + assert len(frames_meta) == len(frames) + + expected_times = [ + (datetime(2024, 2, 1, tzinfo=timezone.utc) + timedelta(days=7 * i)) + .isoformat() + .replace("+00:00", "Z") + for i in range(len(frames)) + ] + assert [frame["timestamp"] for frame in frames_meta] == expected_times + assert config.get("timeline_period_seconds") == 604800 + assert config.get("timeline_source") == "frames-meta" + assert frames_meta[1]["metadata"]["elapsed_seconds"] == pytest.approx(604800.0) + + +@pytest.mark.skipif(not _ffmpeg_available(), reason="requires ffmpeg") +def test_cesium_renderer_with_video_source(tmp_path) -> None: + frames_dir = tmp_path / "drought_frames_cesium" + frames = _create_drought_frames(frames_dir) + video_path = tmp_path / "drought_cesium.mp4" + vp = VideoProcessor(input_directory=str(frames_dir), output_file=str(video_path)) + assert vp.process_video(fps=1, input_glob="*.png") + + cache_dir = tmp_path / "video_cache_cesium" + renderer = create( + "cesium-globe", + video_source=str(video_path), + video_start="2024-02-01T00:00:00Z", + video_fps=1.0, + frame_cache=str(cache_dir), + ) + bundle = renderer.build(output_dir=tmp_path / "cesium_video") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + frames_meta = config.get("frames") + assert isinstance(frames_meta, list) + assert len(frames_meta) == len(frames) + textures_dir = bundle.output_dir / "assets" / "textures" + extracted = sorted(textures_dir.glob("frame_*.png")) + assert len(extracted) == len(frames) + for extracted_frame, original in zip(extracted, sorted(frames), strict=False): + _assert_images_close(extracted_frame, original) + + +def test_webgl_renderer_with_netcdf_probe(tmp_path) -> None: + nc_path = tmp_path / "probe.nc" + _write_tiny_netcdf(nc_path) + + renderer = create( + "webgl-sphere", + probe_data=str(nc_path), + probe_var="temp", + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_probe_nc") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("probe_data") == "assets/data/probe_probe_points.json" + assert config.get("probe_units") == "K" + data_file = bundle.output_dir / "assets" / "data" / "probe_probe_points.json" + assert data_file.exists() + payload = json.loads(data_file.read_text(encoding="utf-8")) + assert payload and payload[0]["value"] == 0.0 + + +def test_cesium_renderer_with_shared_gradients(tmp_path) -> None: + gradient_path = tmp_path / "shared.png" + gradient_path.write_bytes(_tiny_png()) + + renderer = create( + "cesium-globe", + shared_gradients={"primary": str(gradient_path)}, + ) + bundle = renderer.build(output_dir=tmp_path / "cesium_shared_grad") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + shared = config.get("shared_gradients") + assert isinstance(shared, dict) + assert shared["primary"].startswith("assets/gradients/shared/") + + gradients_dir = bundle.output_dir / "assets" / "gradients" / "shared" + assert (gradients_dir / "shared.png").exists() + + +def test_cesium_renderer_with_netcdf_probe(tmp_path) -> None: + nc_path = tmp_path / "probe.nc" + _write_tiny_netcdf(nc_path) + + renderer = create( + "cesium-globe", + probe_data=str(nc_path), + probe_var="temp", + ) + bundle = renderer.build(output_dir=tmp_path / "cesium_probe_nc") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + assert config.get("probe_data") == "assets/data/probe_probe_points.json" + assert config.get("probe_units") == "K" + data_file = bundle.output_dir / "assets" / "data" / "probe_probe_points.json" + assert data_file.exists() + + +def test_webgl_renderer_with_time_key_and_format(tmp_path) -> None: + frames_dir = tmp_path / "frames_json" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "frame_a.png").write_bytes(data) + (frames_dir / "frame_b.png").write_bytes(data) + + manifest_path = frames_dir / "frames_manifest.json" + manifest = [ + {"path": "frame_a.png", "time": "2024-02-01 06:00"}, + {"path": "frame_b.png", "time": "2024-02-02 06:00", "label": "Second"}, + ] + manifest_path.write_text(json.dumps(manifest), encoding="utf-8") + + renderer = create( + "webgl-sphere", + frame_list=str(manifest_path), + time_key="time", + time_format="%Y-%m-%d %H:%M", + animate="time", + ) + bundle = renderer.build(output_dir=tmp_path / "bundle_time_key") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + frames = config.get("frames") + assert isinstance(frames, list) + assert frames[0]["timestamp"].endswith("Z") + assert frames[0]["display_timestamp"] == "2024-02-01 06:00" + textures_dir = bundle.output_dir / "assets" / "textures" + assert (textures_dir / "frame_a.png").exists() + assert frames[1]["label"] == "Second" + + +def test_cesium_renderer_with_time_key_and_format(tmp_path) -> None: + frames_dir = tmp_path / "frames_json_cesium" + frames_dir.mkdir() + data = _tiny_png() + (frames_dir / "frame_a.png").write_bytes(data) + (frames_dir / "frame_b.png").write_bytes(data) + + manifest_path = frames_dir / "frames_manifest_cesium.json" + manifest = [ + {"path": "frame_a.png", "time": "2024-02-01 06:00"}, + {"path": "frame_b.png", "time": "2024-02-02 06:00"}, + ] + manifest_path.write_text(json.dumps(manifest), encoding="utf-8") + + renderer = create( + "cesium-globe", + frame_list=str(manifest_path), + time_key="time", + time_format="%Y-%m-%d %H:%M", + animate="time", + ) + bundle = renderer.build(output_dir=tmp_path / "cesium_time_key") + + config = json.loads( + (bundle.index_html.parent / "assets" / "config.json").read_text( + encoding="utf-8" + ) + ) + frames = config.get("frames") + assert isinstance(frames, list) + assert frames[0]["timestamp"].endswith("Z") + assert frames[0]["display_timestamp"] == "2024-02-01 06:00" + textures_dir = bundle.output_dir / "assets" / "textures" + assert (textures_dir / "frame_a.png").exists() + + +def _tiny_png() -> bytes: + return base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGNgYAAAAAMAASsJTYQAAAAASUVORK5CYII=" + ) + + +def _write_probe_json(tmp_path) -> Path: + probe_path = tmp_path / "probe.json" + data = [{"lat": 0.0, "lon": 0.0, "value": 42.5, "units": "K"}] + probe_path.write_text(json.dumps(data), encoding="utf-8") + return probe_path + + +def _write_tiny_netcdf(path: Path) -> None: + lat = np.array([-10.0, 0.0, 10.0]) + lon = np.array([0.0, 30.0, 60.0]) + data = np.arange(lat.size * lon.size, dtype=float).reshape(lat.size, lon.size) + da = xr.DataArray( + data, + coords={"lat": lat, "lon": lon}, + dims=("lat", "lon"), + name="temp", + attrs={"units": "K"}, + ) + ds = da.to_dataset(name="temp") + ds.to_netcdf(path)