1#!/usr/bin/env python3
2"""
3์์ ๋ถ์ ํ๋ก์ ํธ - ์๋ฎฌ๋ ์ด์
๋ชจ๋
4Pi Camera๋ฅผ ์ฌ์ฉํ ์์ ์บก์ฒ, ๊ฐ์ฒด ๊ฒ์ถ, ๋ชจ์
๊ฐ์ง
5
6์๋ฎฌ๋ ์ด์
๋ชจ๋๋ก ์ค์ ํ๋์จ์ด ์์ด ์คํ ๊ฐ๋ฅ
7"""
8
9import time
10import json
11import os
12from datetime import datetime
13from typing import List, Dict, Tuple, Optional
14from dataclasses import dataclass
15from enum import Enum
16import threading
17import io
18import random
19
20# numpy๋ ์๋ฎฌ๋ ์ด์
๋ชจ๋์์ ์ ํ์
21try:
22 import numpy as np
23 HAS_NUMPY = True
24except ImportError:
25 HAS_NUMPY = False
26 print("[๊ฒฝ๊ณ ] numpy๊ฐ ์ค์น๋์ง ์์. ์๋ฎฌ๋ ์ด์
๋ชจ๋๋ก ์คํํฉ๋๋ค.")
27 print(" ์ค์ ์ฌ์ฉ ์ 'pip install numpy' ์ค์น ํ์\n")
28
29
30# ==============================================================================
31# ๋ฐ์ดํฐ ํด๋์ค
32# ==============================================================================
33
34class DetectionClass(Enum):
35 """๊ฒ์ถ ๊ฐ๋ฅํ ๊ฐ์ฒด ํด๋์ค"""
36 PERSON = "person"
37 CAR = "car"
38 DOG = "dog"
39 CAT = "cat"
40 BICYCLE = "bicycle"
41 UNKNOWN = "unknown"
42
43
44@dataclass
45class BoundingBox:
46 """๋ฐ์ด๋ฉ ๋ฐ์ค"""
47 x1: int
48 y1: int
49 x2: int
50 y2: int
51
52 @property
53 def width(self) -> int:
54 return self.x2 - self.x1
55
56 @property
57 def height(self) -> int:
58 return self.y2 - self.y1
59
60 @property
61 def area(self) -> int:
62 return self.width * self.height
63
64
65@dataclass
66class Detection:
67 """๊ฐ์ฒด ๊ฒ์ถ ๊ฒฐ๊ณผ"""
68 class_name: str
69 confidence: float
70 bbox: BoundingBox
71 timestamp: datetime
72
73
74@dataclass
75class MotionEvent:
76 """๋ชจ์
๊ฐ์ง ์ด๋ฒคํธ"""
77 regions: List[BoundingBox]
78 area: int
79 timestamp: datetime
80 frame_id: int
81
82
83# ==============================================================================
84# ์๋ฎฌ๋ ์ด์
์นด๋ฉ๋ผ
85# ==============================================================================
86
87class SimulatedCamera:
88 """์๋ฎฌ๋ ์ด์
์นด๋ฉ๋ผ (์ค์ Pi Camera ๋์ฒด)"""
89
90 def __init__(self, resolution: Tuple[int, int] = (640, 480)):
91 self.resolution = resolution
92 self.is_running = False
93 self.frame_count = 0
94 print(f"[์๋ฎฌ๋ ์ด์
] ์นด๋ฉ๋ผ ์ด๊ธฐํ: {resolution[0]}x{resolution[1]}")
95
96 def start(self):
97 """์นด๋ฉ๋ผ ์์"""
98 self.is_running = True
99 print("[์๋ฎฌ๋ ์ด์
] ์นด๋ฉ๋ผ ์์")
100
101 def stop(self):
102 """์นด๋ฉ๋ผ ์ค์ง"""
103 self.is_running = False
104 print("[์๋ฎฌ๋ ์ด์
] ์นด๋ฉ๋ผ ์ค์ง")
105
106 def capture_frame(self):
107 """ํ๋ ์ ์บก์ฒ (์๋ฎฌ๋ ์ด์
)"""
108 if not self.is_running:
109 raise RuntimeError("์นด๋ฉ๋ผ๊ฐ ์์๋์ง ์์")
110
111 # ๋๋ค ๋
ธ์ด์ฆ๋ก ํ๋ ์ ์์ฑ
112 width, height = self.resolution
113
114 if HAS_NUMPY:
115 frame = np.random.randint(0, 256, (height, width, 3), dtype=np.uint8)
116
117 # ๊ฐ๋จํ ํจํด ์ถ๊ฐ (์์ง์ด๋ ์ฌ๊ฐํ)
118 x_offset = (self.frame_count * 5) % width
119 y_offset = 100
120 frame[y_offset:y_offset+50, x_offset:x_offset+50] = [255, 0, 0]
121 else:
122 # numpy ์์ ๋๋ ๊ฐ๋จํ ๋ฆฌ์คํธ๋ก ์๋ฎฌ๋ ์ด์
123 frame = [[[random.randint(0, 255) for _ in range(3)]
124 for _ in range(width)]
125 for _ in range(height)]
126
127 self.frame_count += 1
128 return frame
129
130 def capture_image(self, filename: str):
131 """์ด๋ฏธ์ง ์ ์ฅ (์๋ฎฌ๋ ์ด์
)"""
132 frame = self.capture_frame()
133 print(f"[์๋ฎฌ๋ ์ด์
] ์ด๋ฏธ์ง ์ ์ฅ: {filename} ({frame.shape})")
134 # ์ค์ ๋ก๋ PIL.Image.fromarray(frame).save(filename)
135
136
137# ==============================================================================
138# TFLite ๊ฐ์ฒด ๊ฒ์ถ (์๋ฎฌ๋ ์ด์
)
139# ==============================================================================
140
141class TFLiteObjectDetector:
142 """TFLite ๊ฐ์ฒด ๊ฒ์ถ๊ธฐ (์๋ฎฌ๋ ์ด์
)"""
143
144 COCO_LABELS = [
145 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
146 'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
147 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep',
148 'cow', 'elephant', 'bear', 'zebra', 'giraffe'
149 ]
150
151 def __init__(self, model_path: str = None, threshold: float = 0.5):
152 self.model_path = model_path or "simulated_model.tflite"
153 self.threshold = threshold
154 self.input_size = (300, 300)
155 print(f"[์๋ฎฌ๋ ์ด์
] TFLite ๋ชจ๋ธ ๋ก๋: {self.model_path}")
156 print(f" - ์
๋ ฅ ํฌ๊ธฐ: {self.input_size}")
157 print(f" - ์๊ณ๊ฐ: {self.threshold}")
158
159 def preprocess(self, frame):
160 """์ ์ฒ๋ฆฌ"""
161 # ๋ฆฌ์ฌ์ด์ฆ ์๋ฎฌ๋ ์ด์
162 target_h, target_w = self.input_size
163
164 if HAS_NUMPY:
165 # ์ค์ ๋ก๋ cv2.resize ์ฌ์ฉ
166 resized = np.random.randint(0, 256, (target_h, target_w, 3), dtype=np.uint8)
167 # ์ ๊ทํ
168 normalized = resized.astype(np.float32) / 255.0
169 return normalized
170 else:
171 # numpy ์์ ๋๋ ๊ฐ๋จํ ์ฒ๋ฆฌ
172 return frame
173
174 def detect(self, frame) -> List[Detection]:
175 """๊ฐ์ฒด ๊ฒ์ถ (์๋ฎฌ๋ ์ด์
)"""
176 # ์ ์ฒ๋ฆฌ
177 input_data = self.preprocess(frame)
178
179 # ์ถ๋ก ์๋ฎฌ๋ ์ด์
(์ผ๋ถ ํ๋ฅ ๋ก ๊ฐ์ฒด ๊ฒ์ถ)
180 detections = []
181
182 # ๋๋คํ๊ฒ 0-3๊ฐ ๊ฐ์ฒด ๊ฒ์ถ
183 num_objects = random.choice([0, 0, 0, 1, 1, 2]) # ๋๋ถ๋ถ 0, ๊ฐ๋ 1-2๊ฐ
184
185 if HAS_NUMPY:
186 h, w = frame.shape[:2]
187 else:
188 h, w = len(frame), len(frame[0]) if frame else 0
189
190 for _ in range(num_objects):
191 # ๋๋ค ํด๋์ค ์ ํ
192 class_id = random.randint(0, min(len(self.COCO_LABELS), 10) - 1)
193 class_name = self.COCO_LABELS[class_id]
194
195 # ๋๋ค ์ ๋ขฐ๋
196 confidence = random.uniform(self.threshold, 1.0)
197
198 # ๋๋ค ๋ฐ์ด๋ฉ ๋ฐ์ค
199 x1 = random.randint(0, w // 2)
200 y1 = random.randint(0, h // 2)
201 x2 = x1 + random.randint(50, w // 3)
202 y2 = y1 + random.randint(50, h // 3)
203
204 bbox = BoundingBox(
205 x1=min(x1, w-1),
206 y1=min(y1, h-1),
207 x2=min(x2, w-1),
208 y2=min(y2, h-1)
209 )
210
211 detection = Detection(
212 class_name=class_name,
213 confidence=confidence,
214 bbox=bbox,
215 timestamp=datetime.now()
216 )
217 detections.append(detection)
218
219 return detections
220
221 def draw_detections(self, frame, detections: List[Detection]):
222 """๊ฒ์ถ ๊ฒฐ๊ณผ ์๊ฐํ (์๋ฎฌ๋ ์ด์
)"""
223 if HAS_NUMPY:
224 result = frame.copy()
225 else:
226 result = frame # ์๋ฎฌ๋ ์ด์
๋ชจ๋์์๋ ๋ณต์ฌํ์ง ์์
227
228 for det in detections:
229 # ์ค์ ๋ก๋ cv2.rectangle, cv2.putText ์ฌ์ฉ
230 label = f"{det.class_name}: {det.confidence:.2f}"
231 print(f" [๊ฒ์ถ] {label} at ({det.bbox.x1}, {det.bbox.y1})")
232
233 return result
234
235
236# ==============================================================================
237# ๋ชจ์
๊ฐ์ง
238# ==============================================================================
239
240class MotionDetector:
241 """๋ชจ์
๊ฐ์ง๊ธฐ"""
242
243 def __init__(self, threshold: int = 30, min_area: int = 500):
244 self.threshold = threshold
245 self.min_area = min_area
246 self.prev_frame = None
247 self.motion_count = 0
248 print(f"[์๋ฎฌ๋ ์ด์
] ๋ชจ์
๊ฐ์ง๊ธฐ ์ด๊ธฐํ")
249 print(f" - ์๊ณ๊ฐ: {threshold}")
250 print(f" - ์ต์ ์์ญ: {min_area} pixels")
251
252 def detect_motion(self, frame) -> Tuple[bool, List[BoundingBox]]:
253 """๋ชจ์
๊ฐ์ง"""
254 if HAS_NUMPY:
255 # ๊ทธ๋ ์ด์ค์ผ์ผ ๋ณํ ์๋ฎฌ๋ ์ด์
256 gray = np.mean(frame, axis=2).astype(np.uint8)
257
258 # ๊ฐ์ฐ์์ ๋ธ๋ฌ ์๋ฎฌ๋ ์ด์
259 # ์ค์ ๋ก๋ cv2.GaussianBlur ์ฌ์ฉ
260
261 if self.prev_frame is None:
262 self.prev_frame = gray
263 return False, []
264
265 # ํ๋ ์ ์ฐจ์ด ๊ณ์ฐ
266 frame_delta = np.abs(gray.astype(np.int16) - self.prev_frame.astype(np.int16))
267
268 # ์๊ณ๊ฐ ์ ์ฉ
269 thresh = (frame_delta > self.threshold).astype(np.uint8) * 255
270
271 # ๋ณํ ์์ญ ๋น์จ ๊ณ์ฐ
272 motion_pixels = np.sum(thresh > 0)
273 total_pixels = thresh.size
274 motion_ratio = motion_pixels / total_pixels
275
276 self.prev_frame = gray
277
278 # ์ผ์ ๋น์จ ์ด์ ๋ณํ ์ ๋ชจ์
๊ฐ์ง
279 motion_detected = motion_ratio > 0.05 # 5% ์ด์ ๋ณํ
280
281 h, w = frame.shape[:2]
282 else:
283 # numpy ์์ ๋ ๊ฐ๋จํ ์๋ฎฌ๋ ์ด์
284 if self.prev_frame is None:
285 self.prev_frame = frame
286 return False, []
287
288 # ๋๋คํ๊ฒ ๋ชจ์
๊ฐ์ง (10% ํ๋ฅ )
289 motion_detected = random.random() < 0.1
290 h, w = len(frame), len(frame[0]) if frame else 0
291
292 regions = []
293 if motion_detected:
294 # ์๋ฎฌ๋ ์ด์
: ๋๋ค ๋ชจ์
์์ญ ์์ฑ
295 num_regions = random.randint(1, 3)
296
297 for _ in range(num_regions):
298 x1 = random.randint(0, w // 2)
299 y1 = random.randint(0, h // 2)
300 x2 = x1 + random.randint(50, 150)
301 y2 = y1 + random.randint(50, 150)
302
303 bbox = BoundingBox(
304 x1=min(x1, w-1),
305 y1=min(y1, h-1),
306 x2=min(x2, w-1),
307 y2=min(y2, h-1)
308 )
309
310 if bbox.area >= self.min_area:
311 regions.append(bbox)
312
313 self.motion_count += 1
314
315 return motion_detected, regions
316
317
318# ==============================================================================
319# ์์ ์คํธ๋ฆฌ๋ฐ (๊ฐ๋
)
320# ==============================================================================
321
322class VideoStreamer:
323 """๋น๋์ค ์คํธ๋ฆฌ๋จธ (MJPEG over HTTP ๊ฐ๋
)"""
324
325 def __init__(self, camera: SimulatedCamera, port: int = 8080):
326 self.camera = camera
327 self.port = port
328 self.is_streaming = False
329 self.frame_rate = 30
330 self.lock = threading.Lock()
331 self.current_frame = None
332 print(f"[์๋ฎฌ๋ ์ด์
] ๋น๋์ค ์คํธ๋ฆฌ๋จธ ์ด๊ธฐํ (ํฌํธ {port})")
333
334 def _capture_loop(self):
335 """์บก์ฒ ๋ฃจํ"""
336 frame_interval = 1.0 / self.frame_rate
337
338 while self.is_streaming:
339 start_time = time.time()
340
341 frame = self.camera.capture_frame()
342
343 # JPEG ์ธ์ฝ๋ฉ ์๋ฎฌ๋ ์ด์
344 # ์ค์ ๋ก๋ PIL.Image.fromarray(frame).save(buffer, 'JPEG')
345
346 with self.lock:
347 self.current_frame = frame
348
349 # ํ๋ ์ ๋ ์ดํธ ์ ์ง
350 elapsed = time.time() - start_time
351 sleep_time = max(0, frame_interval - elapsed)
352 time.sleep(sleep_time)
353
354 def start_streaming(self):
355 """์คํธ๋ฆฌ๋ฐ ์์"""
356 if self.is_streaming:
357 return
358
359 self.is_streaming = True
360 self.thread = threading.Thread(target=self._capture_loop, daemon=True)
361 self.thread.start()
362 print(f"[์๋ฎฌ๋ ์ด์
] ์คํธ๋ฆฌ๋ฐ ์์: http://0.0.0.0:{self.port}/video_feed")
363
364 def stop_streaming(self):
365 """์คํธ๋ฆฌ๋ฐ ์ค์ง"""
366 self.is_streaming = False
367 if hasattr(self, 'thread'):
368 self.thread.join(timeout=2)
369 print("[์๋ฎฌ๋ ์ด์
] ์คํธ๋ฆฌ๋ฐ ์ค์ง")
370
371 def get_frame(self):
372 """ํ์ฌ ํ๋ ์ ๋ฐํ"""
373 with self.lock:
374 return self.current_frame
375
376
377# ==============================================================================
378# ๊ฒฐ๊ณผ ๋ก๊น
379# ==============================================================================
380
381class ResultLogger:
382 """๊ฒ์ถ ๋ฐ ๋ชจ์
๊ฒฐ๊ณผ ๋ก๊น
"""
383
384 def __init__(self, log_dir: str = "logs"):
385 self.log_dir = log_dir
386 os.makedirs(log_dir, exist_ok=True)
387 self.log_file = os.path.join(log_dir, f"detection_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.jsonl")
388 print(f"[์๋ฎฌ๋ ์ด์
] ๋ก๊ฑฐ ์ด๊ธฐํ: {self.log_file}")
389
390 def log_detection(self, detections: List[Detection], frame_id: int):
391 """๊ฐ์ฒด ๊ฒ์ถ ๋ก๊น
"""
392 log_entry = {
393 "type": "detection",
394 "timestamp": datetime.now().isoformat(),
395 "frame_id": frame_id,
396 "count": len(detections),
397 "objects": [
398 {
399 "class": det.class_name,
400 "confidence": det.confidence,
401 "bbox": [det.bbox.x1, det.bbox.y1, det.bbox.x2, det.bbox.y2]
402 }
403 for det in detections
404 ]
405 }
406
407 with open(self.log_file, 'a') as f:
408 f.write(json.dumps(log_entry) + '\n')
409
410 def log_motion(self, event: MotionEvent):
411 """๋ชจ์
์ด๋ฒคํธ ๋ก๊น
"""
412 log_entry = {
413 "type": "motion",
414 "timestamp": event.timestamp.isoformat(),
415 "frame_id": event.frame_id,
416 "region_count": len(event.regions),
417 "total_area": event.area,
418 "regions": [
419 [r.x1, r.y1, r.x2, r.y2] for r in event.regions
420 ]
421 }
422
423 with open(self.log_file, 'a') as f:
424 f.write(json.dumps(log_entry) + '\n')
425
426 def get_statistics(self) -> Dict:
427 """๋ก๊ทธ ํต๊ณ"""
428 if not os.path.exists(self.log_file):
429 return {}
430
431 detection_count = 0
432 motion_count = 0
433 object_classes = {}
434
435 with open(self.log_file, 'r') as f:
436 for line in f:
437 entry = json.loads(line)
438 if entry['type'] == 'detection':
439 detection_count += 1
440 for obj in entry['objects']:
441 cls = obj['class']
442 object_classes[cls] = object_classes.get(cls, 0) + 1
443 elif entry['type'] == 'motion':
444 motion_count += 1
445
446 return {
447 "detection_events": detection_count,
448 "motion_events": motion_count,
449 "object_classes": object_classes
450 }
451
452
453# ==============================================================================
454# ์ฑ๋ฅ ๋ชจ๋ํฐ๋ง
455# ==============================================================================
456
457class PerformanceMonitor:
458 """์ฑ๋ฅ ๋ชจ๋ํฐ"""
459
460 def __init__(self):
461 self.metrics = {
462 "fps": [],
463 "detection_time": [],
464 "frame_count": 0,
465 "start_time": None
466 }
467
468 def start(self):
469 """๋ชจ๋ํฐ๋ง ์์"""
470 self.metrics["start_time"] = time.time()
471
472 def record_frame(self, processing_time: float):
473 """ํ๋ ์ ์ฒ๋ฆฌ ์๊ฐ ๊ธฐ๋ก"""
474 self.metrics["frame_count"] += 1
475 self.metrics["detection_time"].append(processing_time)
476
477 if processing_time > 0:
478 fps = 1.0 / processing_time
479 self.metrics["fps"].append(fps)
480
481 def get_report(self) -> Dict:
482 """์ฑ๋ฅ ๋ณด๊ณ ์"""
483 if self.metrics["start_time"] is None:
484 return {}
485
486 elapsed = time.time() - self.metrics["start_time"]
487
488 if HAS_NUMPY:
489 avg_fps = np.mean(self.metrics["fps"]) if self.metrics["fps"] else 0
490 avg_detection_time = np.mean(self.metrics["detection_time"]) if self.metrics["detection_time"] else 0
491 else:
492 avg_fps = sum(self.metrics["fps"]) / len(self.metrics["fps"]) if self.metrics["fps"] else 0
493 avg_detection_time = sum(self.metrics["detection_time"]) / len(self.metrics["detection_time"]) if self.metrics["detection_time"] else 0
494
495 return {
496 "total_frames": self.metrics["frame_count"],
497 "elapsed_time": elapsed,
498 "average_fps": avg_fps,
499 "average_detection_time_ms": avg_detection_time * 1000,
500 "frames_per_second_actual": self.metrics["frame_count"] / elapsed if elapsed > 0 else 0
501 }
502
503
504# ==============================================================================
505# ํตํฉ ์์ ๋ถ์ ์์คํ
506# ==============================================================================
507
508class ImageAnalysisSystem:
509 """ํตํฉ ์์ ๋ถ์ ์์คํ
"""
510
511 def __init__(self, config: Optional[Dict] = None):
512 config = config or {}
513
514 # ์นด๋ฉ๋ผ
515 resolution = config.get('resolution', (640, 480))
516 self.camera = SimulatedCamera(resolution)
517
518 # ๊ฐ์ฒด ๊ฒ์ถ
519 self.detector = TFLiteObjectDetector(
520 threshold=config.get('detection_threshold', 0.5)
521 )
522
523 # ๋ชจ์
๊ฐ์ง
524 self.motion_detector = MotionDetector(
525 threshold=config.get('motion_threshold', 30),
526 min_area=config.get('min_motion_area', 500)
527 )
528
529 # ๋ก๊ฑฐ
530 self.logger = ResultLogger(log_dir=config.get('log_dir', 'logs'))
531
532 # ์ฑ๋ฅ ๋ชจ๋ํฐ
533 self.perf_monitor = PerformanceMonitor()
534
535 # ์คํธ๋ฆฌ๋ฐ (์ต์
)
536 self.enable_streaming = config.get('enable_streaming', False)
537 if self.enable_streaming:
538 self.streamer = VideoStreamer(self.camera, port=config.get('stream_port', 8080))
539
540 self.is_running = False
541
542 print("\n" + "="*60)
543 print("์์ ๋ถ์ ์์คํ
์ด๊ธฐํ ์๋ฃ")
544 print("="*60)
545
546 def run(self, duration: float = 60, detect_objects: bool = True, detect_motion: bool = True):
547 """์์คํ
์คํ"""
548 print(f"\n์์คํ
์์ (์คํ ์๊ฐ: {duration}์ด)")
549 print(f" - ๊ฐ์ฒด ๊ฒ์ถ: {'ON' if detect_objects else 'OFF'}")
550 print(f" - ๋ชจ์
๊ฐ์ง: {'ON' if detect_motion else 'OFF'}")
551 print(f" - ์คํธ๋ฆฌ๋ฐ: {'ON' if self.enable_streaming else 'OFF'}")
552 print()
553
554 self.camera.start()
555 self.perf_monitor.start()
556
557 if self.enable_streaming:
558 self.streamer.start_streaming()
559
560 self.is_running = True
561 start_time = time.time()
562 frame_id = 0
563
564 try:
565 while time.time() - start_time < duration and self.is_running:
566 frame_start = time.time()
567
568 # ํ๋ ์ ์บก์ฒ
569 frame = self.camera.capture_frame()
570 frame_id += 1
571
572 # ๊ฐ์ฒด ๊ฒ์ถ
573 if detect_objects:
574 detections = self.detector.detect(frame)
575 if detections:
576 print(f"[ํ๋ ์ {frame_id}] ๊ฐ์ฒด ๊ฒ์ถ: {len(detections)}๊ฐ")
577 for det in detections:
578 print(f" - {det.class_name} (์ ๋ขฐ๋: {det.confidence:.2f})")
579 self.logger.log_detection(detections, frame_id)
580
581 # ๋ชจ์
๊ฐ์ง
582 if detect_motion:
583 motion_detected, regions = self.motion_detector.detect_motion(frame)
584 if motion_detected:
585 total_area = sum(r.area for r in regions)
586 event = MotionEvent(
587 regions=regions,
588 area=total_area,
589 timestamp=datetime.now(),
590 frame_id=frame_id
591 )
592 print(f"[ํ๋ ์ {frame_id}] ๋ชจ์
๊ฐ์ง: {len(regions)}๊ฐ ์์ญ (๋ฉด์ : {total_area})")
593 self.logger.log_motion(event)
594
595 # ์ฑ๋ฅ ๊ธฐ๋ก
596 processing_time = time.time() - frame_start
597 self.perf_monitor.record_frame(processing_time)
598
599 # ํ๋ ์ ๋ ์ดํธ ์กฐ์
600 sleep_time = max(0, 0.1 - processing_time) # ~10 FPS
601 time.sleep(sleep_time)
602
603 except KeyboardInterrupt:
604 print("\n\n์ฌ์ฉ์ ์ค๋จ")
605
606 finally:
607 self._cleanup()
608
609 def _cleanup(self):
610 """์ ๋ฆฌ"""
611 self.is_running = False
612 self.camera.stop()
613
614 if self.enable_streaming:
615 self.streamer.stop_streaming()
616
617 # ํต๊ณ ์ถ๋ ฅ
618 print("\n" + "="*60)
619 print("์คํ ์๋ฃ - ํต๊ณ")
620 print("="*60)
621
622 # ์ฑ๋ฅ ํต๊ณ
623 perf_report = self.perf_monitor.get_report()
624 print("\n[์ฑ๋ฅ]")
625 print(f" ์ด ํ๋ ์: {perf_report.get('total_frames', 0)}")
626 print(f" ์คํ ์๊ฐ: {perf_report.get('elapsed_time', 0):.1f}์ด")
627 print(f" ํ๊ท FPS: {perf_report.get('average_fps', 0):.1f}")
628 print(f" ํ๊ท ๊ฒ์ถ ์๊ฐ: {perf_report.get('average_detection_time_ms', 0):.1f}ms")
629
630 # ๋ก๊ทธ ํต๊ณ
631 log_stats = self.logger.get_statistics()
632 print("\n[๊ฒ์ถ ํต๊ณ]")
633 print(f" ๊ฒ์ถ ์ด๋ฒคํธ: {log_stats.get('detection_events', 0)}ํ")
634 print(f" ๋ชจ์
์ด๋ฒคํธ: {log_stats.get('motion_events', 0)}ํ")
635
636 object_classes = log_stats.get('object_classes', {})
637 if object_classes:
638 print("\n[๊ฒ์ถ๋ ๊ฐ์ฒด]")
639 for cls, count in sorted(object_classes.items(), key=lambda x: x[1], reverse=True):
640 print(f" {cls}: {count}ํ")
641
642 print("\n" + "="*60)
643
644
645# ==============================================================================
646# ๋ฉ์ธ ์คํ
647# ==============================================================================
648
649def main():
650 """๋ฉ์ธ ํจ์"""
651 print("์์ ๋ถ์ ํ๋ก์ ํธ - ์๋ฎฌ๋ ์ด์
๋ชจ๋")
652 print("="*60)
653 print("์ด ํ๋ก๊ทธ๋จ์ ์ค์ Pi Camera ์์ด ์๋ฎฌ๋ ์ด์
์ผ๋ก ๋์ํฉ๋๋ค.")
654 print()
655
656 # ์ค์
657 config = {
658 'resolution': (640, 480),
659 'detection_threshold': 0.6,
660 'motion_threshold': 30,
661 'min_motion_area': 500,
662 'log_dir': 'logs',
663 'enable_streaming': False, # ์ค์ Flask ์๋ฒ๋ ์คํํ์ง ์์
664 'stream_port': 8080
665 }
666
667 # ์์คํ
์์ฑ
668 system = ImageAnalysisSystem(config)
669
670 # ์คํ (ํ
์คํธ์ฉ์ผ๋ก ์งง๊ฒ ์ค์ , ์ค์ ์ฌ์ฉ์ duration ์ฆ๊ฐ)
671 import sys
672 test_mode = '--test' in sys.argv
673 duration = 5 if test_mode else 30
674
675 system.run(
676 duration=duration, # ํ
์คํธ: 5์ด, ์ผ๋ฐ: 30์ด
677 detect_objects=True,
678 detect_motion=True
679 )
680
681
682if __name__ == "__main__":
683 main()