1"""
217. ๋น๋์ค ์ฒ๋ฆฌ
3- VideoCapture (์นด๋ฉ๋ผ, ํ์ผ)
4- VideoWriter (๋น๋์ค ์ ์ฅ)
5- ํ๋ ์ ์ฒ๋ฆฌ
6- ๋ฐฑ๊ทธ๋ผ์ด๋ ์ฐจ๊ฐ
7- ๊ดํ ํ๋ฆ (Optical Flow)
8"""
9
10import cv2
11import numpy as np
12
13
14def video_capture_basics():
15 """๋น๋์ค ์บก์ฒ ๊ธฐ์ด"""
16 print("=" * 50)
17 print("VideoCapture ๊ธฐ์ด")
18 print("=" * 50)
19
20 print("\n์นด๋ฉ๋ผ ์บก์ฒ:")
21 print(" cap = cv2.VideoCapture(0) # ๊ธฐ๋ณธ ์นด๋ฉ๋ผ")
22 print(" cap = cv2.VideoCapture(1) # ๋ ๋ฒ์งธ ์นด๋ฉ๋ผ")
23
24 print("\nํ์ผ ์บก์ฒ:")
25 print(" cap = cv2.VideoCapture('video.mp4')")
26 print(" cap = cv2.VideoCapture('rtsp://...') # ์คํธ๋ฆฌ๋ฐ")
27
28 print("\n์ฃผ์ ์์ฑ:")
29 properties = [
30 ('CAP_PROP_FRAME_WIDTH', 'ํ๋ ์ ๋๋น'),
31 ('CAP_PROP_FRAME_HEIGHT', 'ํ๋ ์ ๋์ด'),
32 ('CAP_PROP_FPS', '์ด๋น ํ๋ ์ ์'),
33 ('CAP_PROP_FRAME_COUNT', '์ด ํ๋ ์ ์'),
34 ('CAP_PROP_POS_FRAMES', 'ํ์ฌ ํ๋ ์ ์์น'),
35 ('CAP_PROP_POS_MSEC', 'ํ์ฌ ์๊ฐ (ms)'),
36 ]
37
38 for prop, desc in properties:
39 print(f" cv2.{prop}: {desc}")
40
41 # ์๋ฎฌ๋ ์ด์
๋น๋์ค ์์ฑ
42 print("\n์๋ฎฌ๋ ์ด์
๋น๋์ค ์์ฑ ์ค...")
43 frames = []
44 for i in range(30):
45 frame = np.zeros((480, 640, 3), dtype=np.uint8)
46 frame[:] = [100, 100, 100]
47 cv2.circle(frame, (100 + i * 15, 240), 30, (0, 255, 0), -1)
48 cv2.putText(frame, f'Frame {i}', (10, 30),
49 cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
50 frames.append(frame)
51
52 cv2.imwrite('video_frame_sample.jpg', frames[15])
53 print("์ํ ํ๋ ์ ์ ์ฅ: video_frame_sample.jpg")
54
55
56def video_writer_demo():
57 """๋น๋์ค ์ ์ฅ ๋ฐ๋ชจ"""
58 print("\n" + "=" * 50)
59 print("VideoWriter (๋น๋์ค ์ ์ฅ)")
60 print("=" * 50)
61
62 # ํ๋ ์ ์์ฑ
63 width, height = 640, 480
64 fps = 30.0
65
66 # ์ฝ๋ฑ ์ค์
67 # fourcc = cv2.VideoWriter_fourcc(*'XVID') # AVI
68 fourcc = cv2.VideoWriter_fourcc(*'mp4v') # MP4
69
70 print(f"๋น๋์ค ์ค์ : {width}x{height}, {fps}fps")
71
72 # VideoWriter ์์ฑ
73 out = cv2.VideoWriter('output_video.mp4', fourcc, fps, (width, height))
74
75 if not out.isOpened():
76 print("VideoWriter๋ฅผ ์ด ์ ์์ต๋๋ค.")
77 return
78
79 # ํ๋ ์ ์์ฑ
80 for i in range(90): # 3์ด
81 frame = np.zeros((height, width, 3), dtype=np.uint8)
82 frame[:] = [50, 50, 50]
83
84 # ์์ง์ด๋ ์
85 x = int(100 + 5 * i)
86 y = int(240 + 100 * np.sin(i * 0.1))
87 cv2.circle(frame, (x, y), 40, (0, 200, 0), -1)
88
89 # ํ๋ ์ ๋ฒํธ
90 cv2.putText(frame, f'Frame: {i}', (10, 30),
91 cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)
92
93 out.write(frame)
94
95 out.release()
96 print("๋น๋์ค ์ ์ฅ ์๋ฃ: output_video.mp4")
97
98 print("\n์ง์ ์ฝ๋ฑ:")
99 codecs = [
100 ("'XVID'", '.avi', 'MPEG-4'),
101 ("'mp4v'", '.mp4', 'MPEG-4'),
102 ("'avc1'", '.mp4', 'H.264 (macOS)'),
103 ("'MJPG'", '.avi', 'Motion JPEG'),
104 ]
105 for code, ext, desc in codecs:
106 print(f" cv2.VideoWriter_fourcc(*{code}) โ {ext} ({desc})")
107
108
109def frame_processing_demo():
110 """ํ๋ ์ ์ฒ๋ฆฌ ๋ฐ๋ชจ"""
111 print("\n" + "=" * 50)
112 print("ํ๋ ์ ์ฒ๋ฆฌ")
113 print("=" * 50)
114
115 # ์๋ฎฌ๋ ์ด์
ํ๋ ์
116 frame = np.zeros((480, 640, 3), dtype=np.uint8)
117 frame[:] = [150, 150, 150]
118 cv2.rectangle(frame, (200, 150), (440, 330), (0, 100, 200), -1)
119 cv2.circle(frame, (320, 240), 50, (200, 100, 0), -1)
120
121 # ๋ค์ํ ์ฒ๋ฆฌ
122 # 1. ๊ทธ๋ ์ด์ค์ผ์ผ
123 gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
124
125 # 2. ๋ธ๋ฌ
126 blurred = cv2.GaussianBlur(frame, (15, 15), 0)
127
128 # 3. ์ฃ์ง
129 edges = cv2.Canny(gray, 50, 150)
130
131 # 4. ์ปฌ๋ฌ ์กฐ์
132 hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
133 hsv[:, :, 1] = cv2.add(hsv[:, :, 1], 50) # ์ฑ๋ ์ฆ๊ฐ
134 enhanced = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
135
136 print("ํ๋ ์ ์ฒ๋ฆฌ ์์ :")
137 print(" 1. ๊ทธ๋ ์ด์ค์ผ์ผ ๋ณํ")
138 print(" 2. ๊ฐ์ฐ์์ ๋ธ๋ฌ")
139 print(" 3. Canny ์ฃ์ง ๊ฒ์ถ")
140 print(" 4. ์์ ๋ณด์ (HSV)")
141
142 cv2.imwrite('frame_original.jpg', frame)
143 cv2.imwrite('frame_gray.jpg', gray)
144 cv2.imwrite('frame_blurred.jpg', blurred)
145 cv2.imwrite('frame_edges.jpg', edges)
146 cv2.imwrite('frame_enhanced.jpg', enhanced)
147
148 print("\n์ค์๊ฐ ์ฒ๋ฆฌ ํ
ํ๋ฆฟ:")
149 code = '''
150while True:
151 ret, frame = cap.read()
152 if not ret:
153 break
154
155 # ํ๋ ์ ์ฒ๋ฆฌ
156 processed = your_processing_function(frame)
157
158 cv2.imshow('Video', processed)
159 if cv2.waitKey(1) & 0xFF == ord('q'):
160 break
161'''
162 print(code)
163
164
165def background_subtraction_demo():
166 """๋ฐฐ๊ฒฝ ์ฐจ๊ฐ ๋ฐ๋ชจ"""
167 print("\n" + "=" * 50)
168 print("๋ฐฐ๊ฒฝ ์ฐจ๊ฐ (Background Subtraction)")
169 print("=" * 50)
170
171 # ์๋ฎฌ๋ ์ด์
ํ๋ ์๋ค (๋ฐฐ๊ฒฝ + ์์ง์ด๋ ๊ฐ์ฒด)
172 background = np.zeros((480, 640, 3), dtype=np.uint8)
173 background[:] = [120, 120, 120]
174 cv2.rectangle(background, (100, 300), (250, 400), (80, 80, 80), -1) # ๊ณ ์ ๊ฐ์ฒด
175
176 # ๋ฐฐ๊ฒฝ ์ฐจ๊ฐ๊ธฐ ์์ฑ
177 # MOG2: Mixture of Gaussians
178 bg_subtractor_mog2 = cv2.createBackgroundSubtractorMOG2(
179 history=500,
180 varThreshold=16,
181 detectShadows=True
182 )
183
184 # KNN
185 bg_subtractor_knn = cv2.createBackgroundSubtractorKNN(
186 history=500,
187 dist2Threshold=400,
188 detectShadows=True
189 )
190
191 print("๋ฐฐ๊ฒฝ ์ฐจ๊ฐ๊ธฐ:")
192 print(" 1. MOG2 (Mixture of Gaussians)")
193 print(" - ๋ณต์กํ ๋ฐฐ๊ฒฝ์ ํจ๊ณผ์ ")
194 print(" - ๊ทธ๋ฆผ์ ๊ฒ์ถ ๊ฐ๋ฅ")
195 print("")
196 print(" 2. KNN (K-Nearest Neighbors)")
197 print(" - ๋น์ ํ ๋ถํฌ์ ํจ๊ณผ์ ")
198 print(" - ์กฐ๋ช
๋ณํ์ ๊ฐ๊ฑด")
199
200 # ์๋ฎฌ๋ ์ด์
(์์ง์ด๋ ์)
201 for i in range(30):
202 frame = background.copy()
203 x = 100 + i * 15
204 cv2.circle(frame, (x, 200), 40, (0, 200, 0), -1)
205
206 # ๋ฐฐ๊ฒฝ ์ฐจ๊ฐ ์ ์ฉ
207 fg_mask_mog2 = bg_subtractor_mog2.apply(frame)
208 fg_mask_knn = bg_subtractor_knn.apply(frame)
209
210 if i == 15: # ์ค๊ฐ ํ๋ ์ ์ ์ฅ
211 cv2.imwrite('bg_frame.jpg', frame)
212 cv2.imwrite('bg_mask_mog2.jpg', fg_mask_mog2)
213 cv2.imwrite('bg_mask_knn.jpg', fg_mask_knn)
214
215 print("\nํ๋ผ๋ฏธํฐ:")
216 print(" history: ํ์ต์ ์ฌ์ฉํ ๊ณผ๊ฑฐ ํ๋ ์ ์")
217 print(" varThreshold (MOG2): ํฝ์
-๋ชจ๋ธ ๊ฑฐ๋ฆฌ ์๊ณ๊ฐ")
218 print(" dist2Threshold (KNN): ๊ฑฐ๋ฆฌ ์๊ณ๊ฐ")
219 print(" detectShadows: ๊ทธ๋ฆผ์ ๊ฒ์ถ ์ฌ๋ถ")
220
221 print("\nํ์ฒ๋ฆฌ:")
222 print(" - ๋ชจํด๋ก์ง ์ฐ์ฐ (๋
ธ์ด์ฆ ์ ๊ฑฐ)")
223 print(" - ์ปจํฌ์ด ๊ฒ์ถ (๊ฐ์ฒด ์ถ์ถ)")
224 print(" - ๋ฐ์ด๋ฉ ๋ฐ์ค ๊ทธ๋ฆฌ๊ธฐ")
225
226
227def optical_flow_demo():
228 """๊ดํ ํ๋ฆ ๋ฐ๋ชจ"""
229 print("\n" + "=" * 50)
230 print("๊ดํ ํ๋ฆ (Optical Flow)")
231 print("=" * 50)
232
233 # ๋ ํ๋ ์ ์์ฑ (์์ง์ ์๋ฎฌ๋ ์ด์
)
234 frame1 = np.zeros((300, 400), dtype=np.uint8)
235 frame1[:] = 100
236 cv2.circle(frame1, (100, 150), 30, 200, -1)
237 cv2.rectangle(frame1, (250, 100), (320, 200), 180, -1)
238
239 frame2 = np.zeros((300, 400), dtype=np.uint8)
240 frame2[:] = 100
241 cv2.circle(frame2, (130, 150), 30, 200, -1) # ์ค๋ฅธ์ชฝ์ผ๋ก ์ด๋
242 cv2.rectangle(frame2, (250, 130), (320, 230), 180, -1) # ์๋๋ก ์ด๋
243
244 # Lucas-Kanade (sparse)
245 print("1. Lucas-Kanade (Sparse Optical Flow)")
246 print(" - ํน์ ์ ๋ค์ ์์ง์ ์ถ์ ")
247 print(" - ๋น ๋ฆ, ํน์ง์ ๊ธฐ๋ฐ")
248
249 # ํน์ง์ ๊ฒ์ถ
250 p0 = cv2.goodFeaturesToTrack(frame1, maxCorners=100, qualityLevel=0.3,
251 minDistance=7, blockSize=7)
252
253 if p0 is not None:
254 # ๊ดํ ํ๋ฆ ๊ณ์ฐ
255 p1, status, err = cv2.calcOpticalFlowPyrLK(
256 frame1, frame2, p0, None,
257 winSize=(15, 15),
258 maxLevel=2,
259 criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)
260 )
261
262 # ์ข์ ์ ๋ง ์ ํ
263 good_old = p0[status == 1]
264 good_new = p1[status == 1]
265
266 # ์๊ฐํ
267 result_lk = cv2.cvtColor(frame2, cv2.COLOR_GRAY2BGR)
268 for old, new in zip(good_old, good_new):
269 a, b = new.ravel().astype(int)
270 c, d = old.ravel().astype(int)
271 cv2.line(result_lk, (a, b), (c, d), (0, 255, 0), 2)
272 cv2.circle(result_lk, (a, b), 5, (0, 0, 255), -1)
273
274 cv2.imwrite('optflow_lk.jpg', result_lk)
275 print(f" ์ถ์ ๋ ์ : {len(good_new)}")
276
277 # Farneback (dense)
278 print("\n2. Farneback (Dense Optical Flow)")
279 print(" - ๋ชจ๋ ํฝ์
์ ์์ง์ ๊ณ์ฐ")
280 print(" - ๋๋ฆผ, ์ ์ฒด ์์ง์ ํ์
")
281
282 flow = cv2.calcOpticalFlowFarneback(
283 frame1, frame2, None,
284 pyr_scale=0.5,
285 levels=3,
286 winsize=15,
287 iterations=3,
288 poly_n=5,
289 poly_sigma=1.2,
290 flags=0
291 )
292
293 # ํ๋ฆ์ ์์์ผ๋ก ์๊ฐํ
294 hsv = np.zeros((frame1.shape[0], frame1.shape[1], 3), dtype=np.uint8)
295 hsv[..., 1] = 255
296
297 mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
298 hsv[..., 0] = ang * 180 / np.pi / 2
299 hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
300
301 result_fb = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
302
303 cv2.imwrite('optflow_frame1.jpg', frame1)
304 cv2.imwrite('optflow_frame2.jpg', frame2)
305 cv2.imwrite('optflow_farneback.jpg', result_fb)
306
307 print(f" ํ๋ฆ ๋ฒกํฐ shape: {flow.shape}")
308
309 print("\n๊ดํ ํ๋ฆ ํ์ฉ:")
310 print(" - ์์ง์ ๊ฒ์ถ")
311 print(" - ๊ฐ์ฒด ์ถ์ ")
312 print(" - ๋น๋์ค ์์ถ")
313 print(" - ๋์ ์ธ์")
314
315
316def video_tracking_demo():
317 """๊ฐ์ฒด ์ถ์ ๋ฐ๋ชจ"""
318 print("\n" + "=" * 50)
319 print("๊ฐ์ฒด ์ถ์ (Object Tracking)")
320 print("=" * 50)
321
322 print("OpenCV ์ถ์ ๊ธฐ ์ข
๋ฅ:")
323 trackers = [
324 ('BOOSTING', '์ค๋๋ ๋ฐฉ์, ๋๋ฆผ'),
325 ('MIL', 'Multiple Instance Learning'),
326 ('KCF', 'Kernelized Correlation Filters, ๋น ๋ฆ'),
327 ('TLD', 'Tracking-Learning-Detection'),
328 ('MEDIANFLOW', '์์ธก ๊ฐ๋ฅํ ์์ง์์ ์ข์'),
329 ('GOTURN', 'Deep Learning ๊ธฐ๋ฐ'),
330 ('MOSSE', '๋งค์ฐ ๋น ๋ฆ'),
331 ('CSRT', '์ ํ, ๋ค์ ๋๋ฆผ'),
332 ]
333
334 for name, desc in trackers:
335 print(f" cv2.Tracker{name}_create(): {desc}")
336
337 print("\n์ถ์ ๊ธฐ ์ฌ์ฉ ํ
ํ๋ฆฟ:")
338 code = '''
339# ์ถ์ ๊ธฐ ์์ฑ
340tracker = cv2.TrackerKCF_create()
341# tracker = cv2.TrackerCSRT_create() # ๋ ์ ํ
342
343# ์ด๊ธฐ ๋ฐ์ด๋ฉ ๋ฐ์ค ์ค์
344bbox = (x, y, w, h) # ๋๋ cv2.selectROI()
345tracker.init(first_frame, bbox)
346
347# ์ถ์ ๋ฃจํ
348while True:
349 ret, frame = cap.read()
350 success, bbox = tracker.update(frame)
351
352 if success:
353 x, y, w, h = [int(v) for v in bbox]
354 cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
355 else:
356 cv2.putText(frame, "Tracking failure", (10, 30),
357 cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
358'''
359 print(code)
360
361
362def performance_tips():
363 """์ฑ๋ฅ ์ต์ ํ ํ"""
364 print("\n" + "=" * 50)
365 print("๋น๋์ค ์ฒ๋ฆฌ ์ฑ๋ฅ ์ต์ ํ")
366 print("=" * 50)
367
368 print("""
3691. ํ๋ ์ ํฌ๊ธฐ ์ค์ด๊ธฐ
370 - ์ฒ๋ฆฌ ์ : frame = cv2.resize(frame, None, fx=0.5, fy=0.5)
371 - ๊ฒ์ถ ํ ์๋ณธ์ ๊ฒฐ๊ณผ ๋งคํ
372
3732. ํ๋ ์ ์คํต
374 - ๋งค ํ๋ ์ ์ฒ๋ฆฌ ๋ถํ์
375 - frame_count % skip_frames == 0 ์ผ ๋๋ง ์ฒ๋ฆฌ
376
3773. ROI (Region of Interest) ํ์ฉ
378 - ์ ์ฒด ํ๋ ์ ๋์ ๊ด์ฌ ์์ญ๋ง ์ฒ๋ฆฌ
379 - roi = frame[y:y+h, x:x+w]
380
3814. ๋ฉํฐ์ค๋ ๋ฉ/๋ฉํฐํ๋ก์ธ์ฑ
382 - ์บก์ฒ์ ์ฒ๋ฆฌ ๋ถ๋ฆฌ
383 - Queue๋ก ํ๋ ์ ์ ๋ฌ
384
3855. GPU ๊ฐ์ (CUDA)
386 - cv2.cuda.GpuMat()
387 - cv2.cuda ๋ชจ๋ ํจ์ ์ฌ์ฉ
388
3896. ์บก์ฒ ๋ฒํผ ์ค์
390 - cap.set(cv2.CAP_PROP_BUFFERSIZE, 1)
391 - ์ง์ฐ ์ต์ํ
392""")
393
394
395def main():
396 """๋ฉ์ธ ํจ์"""
397 # VideoCapture ๊ธฐ์ด
398 video_capture_basics()
399
400 # VideoWriter
401 video_writer_demo()
402
403 # ํ๋ ์ ์ฒ๋ฆฌ
404 frame_processing_demo()
405
406 # ๋ฐฐ๊ฒฝ ์ฐจ๊ฐ
407 background_subtraction_demo()
408
409 # ๊ดํ ํ๋ฆ
410 optical_flow_demo()
411
412 # ๊ฐ์ฒด ์ถ์
413 video_tracking_demo()
414
415 # ์ฑ๋ฅ ํ
416 performance_tips()
417
418 print("\n๋น๋์ค ์ฒ๋ฆฌ ๋ฐ๋ชจ ์๋ฃ!")
419
420
421if __name__ == '__main__':
422 main()