豆豆友情提示:这是一个非官方 GitHub 代理镜像,主要用于网络测试或访问加速。请勿在此进行登录、注册或处理任何敏感信息。进行这些操作请务必访问官方网站 github.com。 Raw 内容也通过此代理提供。
Skip to content

Commit d2b2cbf

Browse files
committed
stream: extract viewer HTML to viewer.html, default bind to loopback
Strong concern #7 (PR #405): default HTTP bind leaked camera/CSI/vitals to the LAN. The `serve` fn now takes a single `bind` arg and prints a loud WARNING when bound outside loopback. Strong concern #10 (PR #405): embedded HTML+JS was ~220 LOC of the 418 LOC stream.rs. Moved the markup verbatim into viewer.html and inlined via `include_str!("viewer.html")`. Also: - Drop the #![allow(dead_code)] crate-level silencing (reviewer point #11). Remove the now-unused AppState.csi_pipeline field. - capture_camera_cloud_with_luminance returns the mean luminance of the captured frame; the background loop feeds that to CsiPipelineState::set_light_level so the night-mode flag actually toggles at runtime (previously it could only be set from tests). Net effect on file size: stream.rs 418 → 232 LOC. Co-Authored-By: claude-flow <ruv@ruv.net>
1 parent 770788f commit d2b2cbf

File tree

2 files changed

+289
-246
lines changed

2 files changed

+289
-246
lines changed
Lines changed: 60 additions & 246 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
//! HTTP server — live camera + ESP32 CSI + fusion → real-time point cloud.
2-
#![allow(dead_code)]
32
43
use crate::brain_bridge;
54
use crate::camera;
@@ -21,13 +20,19 @@ struct AppState {
2120
latest_pipeline: Mutex<Option<csi_pipeline::PipelineOutput>>,
2221
frame_count: Mutex<u64>,
2322
use_camera: bool,
24-
csi_pipeline: Option<Arc<Mutex<csi_pipeline::CsiPipelineState>>>,
2523
}
2624

27-
pub async fn serve(host: &str, port: u16, _wifi_source: Option<&str>) -> anyhow::Result<()> {
25+
/// Start the HTTP/viewer server bound to `bind` (e.g.
26+
/// `"127.0.0.1:9880"` — the safe default — or `"0.0.0.0:9880"` to expose
27+
/// the viewer to the LAN).
28+
///
29+
/// **Security**: the viewer streams live camera/CSI/vitals data. Bind to
30+
/// `127.0.0.1` unless you intentionally want remote viewers.
31+
pub async fn serve(bind: &str, _brain: Option<&str>) -> anyhow::Result<()> {
2832
let has_camera = camera::camera_available();
2933

30-
// Start CSI pipeline — listens for UDP CSI data from ESP32 nodes
34+
// Start CSI pipeline — listens for UDP CSI data from ESP32 nodes.
35+
// Kept on 0.0.0.0 because ESP32 nodes are remote devices on the LAN.
3136
let csi_pipeline_state = csi_pipeline::start_pipeline("0.0.0.0:3333");
3237
eprintln!(" CSI pipeline: UDP port 3333 (ADR-018 binary frames)");
3338

@@ -44,18 +49,17 @@ pub async fn serve(host: &str, port: u16, _wifi_source: Option<&str>) -> anyhow:
4449
latest_pipeline: Mutex::new(None),
4550
frame_count: Mutex::new(0),
4651
use_camera: has_camera,
47-
csi_pipeline: Some(csi_pipeline_state.clone()),
4852
});
4953

5054
// Background: capture + fuse every 500ms (motion-adaptive)
5155
let bg = state.clone();
52-
let bg_csi = Some(csi_pipeline_state.clone());
56+
let bg_csi = csi_pipeline_state.clone();
5357
let bg_cam = has_camera;
5458
tokio::spawn(async move {
5559
let mut skip_depth = false;
5660
loop {
5761
// Motion-adaptive: check CSI motion score
58-
let pipeline_out = bg_csi.as_ref().map(|c| csi_pipeline::get_pipeline_output(c));
62+
let pipeline_out = Some(csi_pipeline::get_pipeline_output(&bg_csi));
5963
if let Some(ref out) = pipeline_out {
6064
// Only run expensive depth when motion detected or every 5th frame
6165
let frame_num = *bg.frame_count.lock().unwrap();
@@ -68,13 +72,21 @@ pub async fn serve(host: &str, port: u16, _wifi_source: Option<&str>) -> anyhow:
6872
let interval = if skip_depth { 1000 } else { 500 }; // slower when no motion
6973
tokio::time::sleep(std::time::Duration::from_millis(interval)).await;
7074

71-
let cloud = if bg_cam && !skip_depth {
72-
tokio::task::spawn_blocking(capture_camera_cloud)
73-
.await.unwrap_or_else(|_| demo_cloud())
75+
let (cloud, luminance) = if bg_cam && !skip_depth {
76+
tokio::task::spawn_blocking(capture_camera_cloud_with_luminance)
77+
.await.unwrap_or_else(|_| (demo_cloud(), None))
7478
} else {
7579
// Reuse previous cloud when no motion
76-
bg.latest_cloud.lock().unwrap().clone()
80+
(bg.latest_cloud.lock().unwrap().clone(), None)
7781
};
82+
// Feed luminance into the CSI pipeline so is_dark toggles for the
83+
// viewer. The lock is held briefly here — the UDP thread never
84+
// touches it (messages go through the mpsc channel).
85+
if let Some(lum) = luminance {
86+
if let Ok(mut st) = bg_csi.lock() {
87+
st.set_light_level(lum);
88+
}
89+
}
7890
let splats = pointcloud::to_gaussian_splats(&cloud);
7991
*bg.latest_cloud.lock().unwrap() = cloud;
8092
*bg.latest_splats.lock().unwrap() = splats;
@@ -104,30 +116,54 @@ pub async fn serve(host: &str, port: u16, _wifi_source: Option<&str>) -> anyhow:
104116
.route("/health", get(api_health))
105117
.with_state(state);
106118

107-
let addr = format!("{host}:{port}");
108119
println!("╔══════════════════════════════════════════════╗");
109120
println!("║ RuView Dense Point Cloud — ALL SENSORS ║");
110121
println!("╚══════════════════════════════════════════════╝");
111-
println!(" Viewer: http://{addr}/");
122+
println!(" Viewer: http://{bind}/");
123+
if bind.starts_with("0.0.0.0") || bind.starts_with("::") {
124+
eprintln!(
125+
" WARNING: bound to {bind} — camera/CSI/vitals are exposed \
126+
to the network. Use --bind 127.0.0.1:9880 to restrict to loopback."
127+
);
128+
}
112129

113-
let listener = tokio::net::TcpListener::bind(&addr).await?;
130+
let listener = tokio::net::TcpListener::bind(bind).await?;
114131
axum::serve(listener, app).await?;
115132
Ok(())
116133
}
117134

118135
fn capture_camera_cloud() -> pointcloud::PointCloud {
136+
capture_camera_cloud_with_luminance().0
137+
}
138+
139+
/// Grab one camera frame, backproject it to a point cloud, and return the
140+
/// mean luminance alongside (used to drive `set_light_level` for night mode).
141+
fn capture_camera_cloud_with_luminance() -> (pointcloud::PointCloud, Option<f32>) {
119142
let config = camera::CameraConfig::default();
120143
match camera::capture_frame(&config) {
121144
Ok(frame) => {
122-
match depth::estimate_depth(&frame.rgb, frame.width, frame.height) {
145+
// Mean luminance across the RGB frame (BT.601 coefficients).
146+
let pixels = (frame.width as usize) * (frame.height as usize);
147+
let mut sum = 0.0f64;
148+
let mut n = 0usize;
149+
for chunk in frame.rgb.chunks_exact(3).take(pixels) {
150+
sum += 0.299 * chunk[0] as f64
151+
+ 0.587 * chunk[1] as f64
152+
+ 0.114 * chunk[2] as f64;
153+
n += 1;
154+
}
155+
let lum = if n > 0 { Some((sum / n as f64) as f32) } else { None };
156+
157+
let cloud = match depth::estimate_depth(&frame.rgb, frame.width, frame.height) {
123158
Ok(dm) => {
124159
let intr = depth::CameraIntrinsics::default();
125160
depth::backproject_depth(&dm, &intr, Some(&frame.rgb), 2)
126161
}
127162
Err(_) => depth::demo_depth_cloud(),
128-
}
163+
};
164+
(cloud, lum)
129165
}
130-
Err(_) => depth::demo_depth_cloud(),
166+
Err(_) => (depth::demo_depth_cloud(), None),
131167
}
132168
}
133169

@@ -185,234 +221,12 @@ async fn api_health() -> Json<serde_json::Value> {
185221
Json(serde_json::json!({"status": "ok"}))
186222
}
187223

188-
async fn index() -> Html<String> {
189-
Html(r#"<!DOCTYPE html>
190-
<html>
191-
<head>
192-
<title>RuView — Camera + WiFi CSI Point Cloud</title>
193-
<style>
194-
body { margin: 0; background: #0a0a0a; color: #e8a634; font-family: monospace; }
195-
canvas { display: block; }
196-
#info { position: absolute; top: 10px; left: 10px; padding: 12px; background: rgba(0,0,0,0.85); border: 1px solid #e8a634; border-radius: 6px; min-width: 240px; font-size: 13px; line-height: 1.5; }
197-
.live { color: #4f4; } .demo { color: #f44; }
198-
.section { margin-top: 6px; padding-top: 6px; border-top: 1px solid #333; }
199-
.label { color: #888; }
200-
</style>
201-
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
202-
<script src="https://cdn.jsdelivr.net/npm/three@0.128.0/examples/js/controls/OrbitControls.js"></script>
203-
</head>
204-
<body>
205-
<div id="info">
206-
<h3 style="margin:0 0 8px 0">RuView Point Cloud</h3>
207-
<div id="stats">Loading...</div>
208-
</div>
209-
<script>
210-
var scene = new THREE.Scene();
211-
scene.background = new THREE.Color(0x0a0a0a);
212-
var camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 100);
213-
camera.position.set(0, 2, -4);
214-
camera.lookAt(0, 0, 2);
215-
216-
var renderer = new THREE.WebGLRenderer({ antialias: true });
217-
renderer.setSize(window.innerWidth, window.innerHeight);
218-
document.body.appendChild(renderer.domElement);
224+
/// Viewer HTML/JS, compiled into the binary at build time. Keep the
225+
/// markup in `viewer.html` to keep this file under the 500-LOC limit and
226+
/// to make it trivially editable (no Rust rebuild when tweaking JS).
227+
static VIEWER_HTML: &str = include_str!("viewer.html");
219228

220-
var controls = new THREE.OrbitControls(camera, renderer.domElement);
221-
controls.enableDamping = true;
222-
controls.target.set(0, 0, 2);
223-
224-
var pointsMesh = null;
225-
var lastFrame = -1;
226-
var skeletonGroup = null;
227-
var prevTimestamp = 0;
228-
var frameRateVal = 0;
229-
230-
// COCO skeleton connections: pairs of keypoint indices
231-
// 0=nose 1=leftEye 2=rightEye 3=leftEar 4=rightEar
232-
// 5=leftShoulder 6=rightShoulder 7=leftElbow 8=rightElbow
233-
// 9=leftWrist 10=rightWrist 11=leftHip 12=rightHip
234-
// 13=leftKnee 14=rightKnee 15=leftAnkle 16=rightAnkle
235-
var COCO_BONES = [
236-
[0,1],[0,2],[1,3],[2,4],
237-
[5,6],[5,7],[7,9],[6,8],[8,10],
238-
[5,11],[6,12],[11,12],
239-
[11,13],[13,15],[12,14],[14,16]
240-
];
241-
242-
function clearSkeleton() {
243-
if (skeletonGroup) {
244-
scene.remove(skeletonGroup);
245-
skeletonGroup.traverse(function(obj) {
246-
if (obj.geometry) obj.geometry.dispose();
247-
if (obj.material) obj.material.dispose();
248-
});
249-
skeletonGroup = null;
250-
}
251-
}
252-
253-
function drawSkeleton(keypoints) {
254-
clearSkeleton();
255-
if (!keypoints || keypoints.length < 17) return;
256-
skeletonGroup = new THREE.Group();
257-
258-
// Map keypoints from [0,1] to scene coords
259-
// x: [-2, 2], y: [2, -2] (flip y), z: fixed at 2
260-
var sphereGeo = new THREE.SphereGeometry(0.04, 8, 8);
261-
var sphereMat = new THREE.MeshBasicMaterial({ color: 0xffff00 });
262-
var positions3D = [];
263-
var i, kp, sx, sy;
264-
for (i = 0; i < 17; i++) {
265-
kp = keypoints[i];
266-
if (!kp) { positions3D.push(null); continue; }
267-
sx = (kp[0] - 0.5) * 4;
268-
sy = (0.5 - kp[1]) * 4;
269-
positions3D.push([sx, sy, 2]);
270-
var sphere = new THREE.Mesh(sphereGeo, sphereMat);
271-
sphere.position.set(sx, sy, 2);
272-
skeletonGroup.add(sphere);
273-
}
274-
275-
// Draw bones as white lines
276-
var lineMat = new THREE.LineBasicMaterial({ color: 0xffffff, linewidth: 2 });
277-
var b, a, bIdx;
278-
for (b = 0; b < COCO_BONES.length; b++) {
279-
a = COCO_BONES[b][0];
280-
bIdx = COCO_BONES[b][1];
281-
if (!positions3D[a] || !positions3D[bIdx]) continue;
282-
var lineGeo = new THREE.BufferGeometry();
283-
var verts = new Float32Array([
284-
positions3D[a][0], positions3D[a][1], positions3D[a][2],
285-
positions3D[bIdx][0], positions3D[bIdx][1], positions3D[bIdx][2]
286-
]);
287-
lineGeo.setAttribute("position", new THREE.BufferAttribute(verts, 3));
288-
var line = new THREE.Line(lineGeo, lineMat);
289-
skeletonGroup.add(line);
290-
}
291-
292-
scene.add(skeletonGroup);
293-
}
294-
295-
async function fetchCloud() {
296-
try {
297-
var resp = await fetch("/api/splats");
298-
var data = await resp.json();
299-
if (data.splats && data.frame !== lastFrame) {
300-
// Compute CSI frame rate
301-
var now = Date.now();
302-
if (prevTimestamp > 0) {
303-
var dt = (now - prevTimestamp) / 1000.0;
304-
if (dt > 0) frameRateVal = (1.0 / dt).toFixed(1);
305-
}
306-
prevTimestamp = now;
307-
lastFrame = data.frame;
308-
updateSplats(data.splats);
309-
310-
// Draw skeleton if available
311-
var pipe = data.pipeline;
312-
if (pipe && pipe.skeleton && pipe.skeleton.keypoints) {
313-
drawSkeleton(pipe.skeleton.keypoints);
314-
} else {
315-
clearSkeleton();
316-
}
317-
318-
// Build info panel
319-
var mode = data.live
320-
? '<span class="live">&#9679; LIVE</span>'
321-
: '<span class="demo">&#9679; DEMO</span>';
322-
var html = mode + " Camera + CSI<br>"
323-
+ "Splats: " + data.count + "<br>"
324-
+ "Frame: " + data.frame;
325-
326-
// CSI frame rate
327-
html += '<div class="section">'
328-
+ '<span class="label">CSI Rate:</span> '
329-
+ frameRateVal + " fps</div>";
330-
331-
// Skeleton confidence
332-
if (pipe && pipe.skeleton && pipe.skeleton.confidence !== undefined) {
333-
var conf = (pipe.skeleton.confidence * 100).toFixed(0);
334-
html += '<div class="section">'
335-
+ '<span class="label">Skeleton:</span> '
336-
+ conf + "% confidence</div>";
337-
}
338-
339-
// Weather data
340-
if (pipe && pipe.weather) {
341-
var w = pipe.weather;
342-
html += '<div class="section">'
343-
+ '<span class="label">Weather:</span> ';
344-
if (w.temperature !== undefined) {
345-
html += w.temperature + "&deg;C";
346-
}
347-
if (w.conditions) {
348-
html += " " + w.conditions;
349-
}
350-
html += "</div>";
351-
}
352-
353-
// Building count from geo
354-
if (pipe && pipe.geo && pipe.geo.building_count !== undefined) {
355-
html += '<div class="section">'
356-
+ '<span class="label">Buildings:</span> '
357-
+ pipe.geo.building_count + "</div>";
358-
}
359-
360-
// Vitals
361-
if (pipe && pipe.vitals) {
362-
var v = pipe.vitals;
363-
html += '<div class="section">'
364-
+ '<span class="label">Vitals:</span> ';
365-
if (v.breathing_rate !== undefined) {
366-
html += "BR " + v.breathing_rate + "/min";
367-
}
368-
if (v.motion_score !== undefined) {
369-
html += " Motion " + (v.motion_score * 100).toFixed(0) + "%";
370-
}
371-
html += "</div>";
372-
}
373-
374-
document.getElementById("stats").innerHTML = html;
375-
}
376-
} catch(e) {}
377-
}
378-
fetchCloud();
379-
setInterval(fetchCloud, 500);
380-
381-
function updateSplats(splats) {
382-
if (pointsMesh) scene.remove(pointsMesh);
383-
var geometry = new THREE.BufferGeometry();
384-
var positions = new Float32Array(splats.length * 3);
385-
var colors = new Float32Array(splats.length * 3);
386-
var i, s;
387-
for (i = 0; i < splats.length; i++) {
388-
s = splats[i];
389-
positions[i*3] = s.center[0];
390-
positions[i*3+1] = -s.center[1];
391-
positions[i*3+2] = s.center[2];
392-
colors[i*3] = s.color[0];
393-
colors[i*3+1] = s.color[1];
394-
colors[i*3+2] = s.color[2];
395-
}
396-
geometry.setAttribute("position", new THREE.BufferAttribute(positions, 3));
397-
geometry.setAttribute("color", new THREE.BufferAttribute(colors, 3));
398-
pointsMesh = new THREE.Points(geometry, new THREE.PointsMaterial({
399-
size: 0.02, vertexColors: true, sizeAttenuation: true
400-
}));
401-
scene.add(pointsMesh);
402-
}
403-
404-
function animate() {
405-
requestAnimationFrame(animate);
406-
controls.update();
407-
renderer.render(scene, camera);
408-
}
409-
animate();
410-
window.addEventListener("resize", function() {
411-
camera.aspect = window.innerWidth / window.innerHeight;
412-
camera.updateProjectionMatrix();
413-
renderer.setSize(window.innerWidth, window.innerHeight);
414-
});
415-
</script>
416-
</body>
417-
</html>"#.to_string())
229+
async fn index() -> Html<&'static str> {
230+
Html(VIEWER_HTML)
418231
}
232+

0 commit comments

Comments
 (0)