20_practical_project.py

Download
python 588 lines 17.5 KB
  1"""
  220. ์‹ค์ „ ํ”„๋กœ์ ํŠธ
  3- ๋ฌธ์„œ ์Šค์บ๋„ˆ
  4- ์ฐจ๋Ÿ‰ ๋ฒˆํ˜ธํŒ ์ธ์‹
  5- ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ถ”์ 
  6- ์ด๋ฏธ์ง€ ํŒŒ๋…ธ๋ผ๋งˆ
  7"""
  8
  9import cv2
 10import numpy as np
 11
 12
 13# ============================================================
 14# ํ”„๋กœ์ ํŠธ 1: ๋ฌธ์„œ ์Šค์บ๋„ˆ
 15# ============================================================
 16
 17def document_scanner():
 18    """๋ฌธ์„œ ์Šค์บ๋„ˆ ํ”„๋กœ์ ํŠธ"""
 19    print("=" * 60)
 20    print("ํ”„๋กœ์ ํŠธ 1: ๋ฌธ์„œ ์Šค์บ๋„ˆ")
 21    print("=" * 60)
 22
 23    # ์‹œ๋ฎฌ๋ ˆ์ด์…˜์šฉ ๋ฌธ์„œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ
 24    # ์‹ค์ œ๋กœ๋Š” ์นด๋ฉ”๋ผ๋กœ ์ดฌ์˜ํ•œ ์ด๋ฏธ์ง€ ์‚ฌ์šฉ
 25    img = np.zeros((600, 800, 3), dtype=np.uint8)
 26    img[:] = [150, 150, 150]  # ํšŒ์ƒ‰ ๋ฐฐ๊ฒฝ
 27
 28    # ๊ธฐ์šธ์–ด์ง„ ๋ฌธ์„œ (์‚ฌ๋‹ค๋ฆฌ๊ผด)
 29    doc_pts = np.array([[150, 100], [650, 80], [700, 520], [100, 550]], np.int32)
 30    cv2.fillPoly(img, [doc_pts], (255, 255, 255))
 31
 32    # ๋ฌธ์„œ ๋‚ด์šฉ ์‹œ๋ฎฌ๋ ˆ์ด์…˜
 33    cv2.putText(img, 'DOCUMENT TITLE', (220, 200),
 34               cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 0), 2)
 35    cv2.line(img, (200, 250), (600, 240), (100, 100, 100), 2)
 36    cv2.line(img, (200, 300), (600, 290), (100, 100, 100), 2)
 37    cv2.line(img, (200, 350), (550, 340), (100, 100, 100), 2)
 38
 39    cv2.imwrite('scanner_input.jpg', img)
 40
 41    # 1. ๊ทธ๋ ˆ์ด์Šค์ผ€์ผ ๋ฐ ๋ธ”๋Ÿฌ
 42    gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
 43    blurred = cv2.GaussianBlur(gray, (5, 5), 0)
 44
 45    # 2. ์—ฃ์ง€ ๊ฒ€์ถœ
 46    edges = cv2.Canny(blurred, 50, 150)
 47
 48    # 3. ์ปจํˆฌ์–ด ๊ฒ€์ถœ
 49    contours, _ = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
 50
 51    # 4. ๊ฐ€์žฅ ํฐ ์‚ฌ๊ฐํ˜• ์ปจํˆฌ์–ด ์ฐพ๊ธฐ
 52    doc_contour = None
 53    max_area = 0
 54
 55    for cnt in contours:
 56        area = cv2.contourArea(cnt)
 57        if area > 10000:  # ์ตœ์†Œ ํฌ๊ธฐ
 58            peri = cv2.arcLength(cnt, True)
 59            approx = cv2.approxPolyDP(cnt, 0.02 * peri, True)
 60
 61            if len(approx) == 4 and area > max_area:
 62                doc_contour = approx
 63                max_area = area
 64
 65    if doc_contour is not None:
 66        # ์ฝ”๋„ˆ ์ •๋ ฌ
 67        pts = doc_contour.reshape(4, 2)
 68        rect = order_points(pts)
 69
 70        # ๊ฒฐ๊ณผ ์ด๋ฏธ์ง€์— ํ‘œ์‹œ
 71        result_contour = img.copy()
 72        cv2.drawContours(result_contour, [doc_contour], -1, (0, 255, 0), 3)
 73        for pt in rect:
 74            cv2.circle(result_contour, tuple(pt.astype(int)), 10, (0, 0, 255), -1)
 75
 76        cv2.imwrite('scanner_contour.jpg', result_contour)
 77
 78        # 5. ์›๊ทผ ๋ณ€ํ™˜
 79        width, height = 500, 700  # ์ถœ๋ ฅ ํฌ๊ธฐ
 80        dst = np.array([
 81            [0, 0],
 82            [width - 1, 0],
 83            [width - 1, height - 1],
 84            [0, height - 1]
 85        ], dtype=np.float32)
 86
 87        M = cv2.getPerspectiveTransform(rect, dst)
 88        warped = cv2.warpPerspective(img, M, (width, height))
 89
 90        # 6. ์ด์ง„ํ™” (์Šค์บ” ํšจ๊ณผ)
 91        warped_gray = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)
 92        scanned = cv2.adaptiveThreshold(
 93            warped_gray, 255,
 94            cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
 95            cv2.THRESH_BINARY, 11, 2
 96        )
 97
 98        cv2.imwrite('scanner_warped.jpg', warped)
 99        cv2.imwrite('scanner_result.jpg', scanned)
100
101        print("๋ฌธ์„œ ์Šค์บ๋„ˆ ์™„๋ฃŒ!")
102        print("  - scanner_input.jpg: ์›๋ณธ")
103        print("  - scanner_contour.jpg: ๋ฌธ์„œ ๊ฒ€์ถœ")
104        print("  - scanner_warped.jpg: ์›๊ทผ ๋ณด์ •")
105        print("  - scanner_result.jpg: ์ตœ์ข… ์Šค์บ”")
106    else:
107        print("๋ฌธ์„œ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")
108
109    print("\n์ฒ˜๋ฆฌ ํŒŒ์ดํ”„๋ผ์ธ:")
110    print("  1. ๊ทธ๋ ˆ์ด์Šค์ผ€์ผ + ๋ธ”๋Ÿฌ")
111    print("  2. Canny ์—ฃ์ง€ ๊ฒ€์ถœ")
112    print("  3. ์ปจํˆฌ์–ด ๊ฒ€์ถœ ๋ฐ ๊ทผ์‚ฌํ™”")
113    print("  4. 4๊ฐํ˜• ๋ฌธ์„œ ์„ ํƒ")
114    print("  5. ์›๊ทผ ๋ณ€ํ™˜")
115    print("  6. ์ด์ง„ํ™” (์„ ํƒ)")
116
117
118def order_points(pts):
119    """์ฝ”๋„ˆ ์ ์„ [์ขŒ์ƒ, ์šฐ์ƒ, ์šฐํ•˜, ์ขŒํ•˜] ์ˆœ์„œ๋กœ ์ •๋ ฌ"""
120    rect = np.zeros((4, 2), dtype=np.float32)
121
122    # ํ•ฉ์ด ๊ฐ€์žฅ ์ž‘์€ ๊ฒƒ: ์ขŒ์ƒ
123    # ํ•ฉ์ด ๊ฐ€์žฅ ํฐ ๊ฒƒ: ์šฐํ•˜
124    s = pts.sum(axis=1)
125    rect[0] = pts[np.argmin(s)]
126    rect[2] = pts[np.argmax(s)]
127
128    # ์ฐจ์ด๊ฐ€ ๊ฐ€์žฅ ์ž‘์€ ๊ฒƒ: ์šฐ์ƒ
129    # ์ฐจ์ด๊ฐ€ ๊ฐ€์žฅ ํฐ ๊ฒƒ: ์ขŒํ•˜
130    diff = np.diff(pts, axis=1)
131    rect[1] = pts[np.argmin(diff)]
132    rect[3] = pts[np.argmax(diff)]
133
134    return rect
135
136
137# ============================================================
138# ํ”„๋กœ์ ํŠธ 2: ์ฐจ๋Ÿ‰ ๋ฒˆํ˜ธํŒ ์ธ์‹ (๊ฐœ๋…)
139# ============================================================
140
141def license_plate_recognition():
142    """์ฐจ๋Ÿ‰ ๋ฒˆํ˜ธํŒ ์ธ์‹ ํ”„๋กœ์ ํŠธ (๊ฐœ๋…)"""
143    print("\n" + "=" * 60)
144    print("ํ”„๋กœ์ ํŠธ 2: ์ฐจ๋Ÿ‰ ๋ฒˆํ˜ธํŒ ์ธ์‹")
145    print("=" * 60)
146
147    # ์‹œ๋ฎฌ๋ ˆ์ด์…˜์šฉ ๋ฒˆํ˜ธํŒ ์ด๋ฏธ์ง€
148    img = np.zeros((400, 600, 3), dtype=np.uint8)
149    img[:] = [200, 200, 200]
150
151    # ์ฐจ๋Ÿ‰ ํ˜•ํƒœ
152    cv2.rectangle(img, (100, 100), (500, 350), (80, 80, 80), -1)
153    cv2.rectangle(img, (120, 120), (480, 250), (60, 60, 60), -1)
154
155    # ๋ฒˆํ˜ธํŒ
156    plate_x, plate_y = 200, 280
157    plate_w, plate_h = 200, 50
158    cv2.rectangle(img, (plate_x, plate_y), (plate_x+plate_w, plate_y+plate_h),
159                 (255, 255, 255), -1)
160    cv2.rectangle(img, (plate_x, plate_y), (plate_x+plate_w, plate_y+plate_h),
161                 (0, 0, 0), 2)
162    cv2.putText(img, '12AB3456', (plate_x+20, plate_y+35),
163               cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 0), 2)
164
165    cv2.imwrite('lpr_input.jpg', img)
166
167    print("\n๋ฒˆํ˜ธํŒ ์ธ์‹ ํŒŒ์ดํ”„๋ผ์ธ:")
168    print("""
1691. ๋ฒˆํ˜ธํŒ ๊ฒ€์ถœ (Plate Detection)
170   - Haar Cascade (ํ•™์Šต๋œ ๋ถ„๋ฅ˜๊ธฐ)
171   - DNN (YOLO, SSD)
172   - ์—ฃ์ง€ ๊ธฐ๋ฐ˜ ๊ฒ€์ถœ
173
1742. ๋ฒˆํ˜ธํŒ ์˜์—ญ ์ถ”์ถœ
175   - ์ปจํˆฌ์–ด ๊ฒ€์ถœ
176   - ์›๊ทผ ๋ณด์ •
177
1783. ๋ฌธ์ž ๋ถ„ํ•  (Character Segmentation)
179   - ์ด์ง„ํ™”
180   - ์ปจํˆฌ์–ด๋กœ ๊ฐ ๋ฌธ์ž ๋ถ„๋ฆฌ
181   - ์—ฐ๊ฒฐ ์š”์†Œ ๋ถ„์„
182
1834. ๋ฌธ์ž ์ธ์‹ (OCR)
184   - Tesseract OCR
185   - DNN ๊ธฐ๋ฐ˜ ์ธ์‹
186   - ํ…œํ”Œ๋ฆฟ ๋งค์นญ
187
1885. ํ›„์ฒ˜๋ฆฌ
189   - ํ˜•์‹ ๊ฒ€์ฆ
190   - ๋…ธ์ด์ฆˆ ์ œ๊ฑฐ
191""")
192
193    code = '''
194# ๋ฒˆํ˜ธํŒ ์ธ์‹ ์ฝ”๋“œ ์˜ˆ์‹œ
195import cv2
196import pytesseract
197
198# 1. ๋ฒˆํ˜ธํŒ ๊ฒ€์ถœ
199plate_cascade = cv2.CascadeClassifier('haarcascade_plate.xml')
200plates = plate_cascade.detectMultiScale(gray, 1.1, 5)
201
202for (x, y, w, h) in plates:
203    # 2. ๋ฒˆํ˜ธํŒ ์˜์—ญ ์ถ”์ถœ
204    plate_img = gray[y:y+h, x:x+w]
205
206    # 3. ์ „์ฒ˜๋ฆฌ
207    plate_img = cv2.resize(plate_img, None, fx=2, fy=2)
208    _, thresh = cv2.threshold(plate_img, 0, 255,
209                              cv2.THRESH_BINARY + cv2.THRESH_OTSU)
210
211    # 4. OCR
212    text = pytesseract.image_to_string(thresh, config='--psm 7')
213    print(f"๋ฒˆํ˜ธํŒ: {text.strip()}")
214'''
215    print(code)
216
217    print("\nํ•„์š” ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ:")
218    print("  - pytesseract: pip install pytesseract")
219    print("  - Tesseract-OCR: ์‹œ์Šคํ…œ ์„ค์น˜ ํ•„์š”")
220
221
222# ============================================================
223# ํ”„๋กœ์ ํŠธ 3: ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ถ”์ 
224# ============================================================
225
226def object_tracking_project():
227    """์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ถ”์  ํ”„๋กœ์ ํŠธ"""
228    print("\n" + "=" * 60)
229    print("ํ”„๋กœ์ ํŠธ 3: ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ถ”์ ")
230    print("=" * 60)
231
232    # ์‹œ๋ฎฌ๋ ˆ์ด์…˜ ํ”„๋ ˆ์ž„ ์‹œํ€€์Šค ์ƒ์„ฑ
233    frames = []
234    for i in range(30):
235        frame = np.zeros((480, 640, 3), dtype=np.uint8)
236        frame[:] = [50, 50, 50]
237
238        # ์›€์ง์ด๋Š” ๊ฐ์ฒด
239        x = 100 + i * 15
240        y = 240 + int(50 * np.sin(i * 0.3))
241        cv2.circle(frame, (x, y), 40, (0, 200, 0), -1)
242
243        # ๊ณ ์ • ๊ฐ์ฒด
244        cv2.rectangle(frame, (400, 100), (500, 200), (200, 0, 0), -1)
245
246        frames.append(frame)
247
248    # ์ฒซ ํ”„๋ ˆ์ž„์—์„œ ์ถ”์  ๋Œ€์ƒ ์„ ํƒ
249    first_frame = frames[0].copy()
250    bbox = (60, 200, 80, 80)  # x, y, w, h
251    cv2.rectangle(first_frame, (bbox[0], bbox[1]),
252                 (bbox[0]+bbox[2], bbox[1]+bbox[3]), (0, 255, 0), 2)
253    cv2.imwrite('tracking_init.jpg', first_frame)
254
255    # ์ถ”์  ์‹œ๋ฎฌ๋ ˆ์ด์…˜
256    print("\n์ถ”์  ์‹œ๋ฎฌ๋ ˆ์ด์…˜ (KCF Tracker ๊ฐœ๋…)")
257
258    # ์ถ”์  ๊ฒฐ๊ณผ ์‹œ๊ฐํ™”
259    result_frame = frames[15].copy()
260    new_x = 100 + 15 * 15
261    new_y = 240 + int(50 * np.sin(15 * 0.3))
262    cv2.rectangle(result_frame, (new_x-40, new_y-40),
263                 (new_x+40, new_y+40), (0, 255, 0), 2)
264    cv2.putText(result_frame, 'Tracking', (new_x-30, new_y-50),
265               cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
266    cv2.imwrite('tracking_result.jpg', result_frame)
267
268    print("\n์™„์„ฑ๋œ ์ถ”์  ์ฝ”๋“œ:")
269    code = '''
270import cv2
271
272# ๋น„๋””์˜ค ์บก์ฒ˜
273cap = cv2.VideoCapture(0)  # ๋˜๋Š” 'video.mp4'
274
275# ์ฒซ ํ”„๋ ˆ์ž„ ์ฝ๊ธฐ
276ret, frame = cap.read()
277
278# ROI ์„ ํƒ (๋งˆ์šฐ์Šค๋กœ ๋“œ๋ž˜๊ทธ)
279bbox = cv2.selectROI("Select Object", frame, fromCenter=False)
280cv2.destroyAllWindows()
281
282# ์ถ”์ ๊ธฐ ์ƒ์„ฑ (์—ฌ๋Ÿฌ ์˜ต์…˜)
283# tracker = cv2.TrackerBoosting_create()
284# tracker = cv2.TrackerMIL_create()
285tracker = cv2.TrackerKCF_create()
286# tracker = cv2.TrackerCSRT_create()  # ๋” ์ •ํ™•
287
288# ์ดˆ๊ธฐํ™”
289tracker.init(frame, bbox)
290
291while True:
292    ret, frame = cap.read()
293    if not ret:
294        break
295
296    # ์ถ”์  ์—…๋ฐ์ดํŠธ
297    success, bbox = tracker.update(frame)
298
299    if success:
300        x, y, w, h = [int(v) for v in bbox]
301        cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
302        cv2.putText(frame, "Tracking", (x, y-10),
303                   cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
304    else:
305        cv2.putText(frame, "Lost", (50, 50),
306                   cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
307
308    cv2.imshow('Tracking', frame)
309    if cv2.waitKey(1) & 0xFF == ord('q'):
310        break
311
312cap.release()
313cv2.destroyAllWindows()
314'''
315    print(code)
316
317    print("\n์ถ”์ ๊ธฐ ๋น„๊ต:")
318    trackers = [
319        ('KCF', '๋น ๋ฆ„, ์ผ๋ฐ˜์  ์„ฑ๋Šฅ'),
320        ('CSRT', '์ •ํ™•, ๋‹ค์†Œ ๋А๋ฆผ'),
321        ('MOSSE', '๋งค์šฐ ๋น ๋ฆ„, ๋‚ฎ์€ ์ •ํ™•๋„'),
322        ('MedianFlow', '์˜ˆ์ธก ๊ฐ€๋Šฅํ•œ ์›€์ง์ž„'),
323    ]
324    for name, desc in trackers:
325        print(f"  {name}: {desc}")
326
327
328# ============================================================
329# ํ”„๋กœ์ ํŠธ 4: ์ด๋ฏธ์ง€ ํŒŒ๋…ธ๋ผ๋งˆ
330# ============================================================
331
332def panorama_stitching():
333    """์ด๋ฏธ์ง€ ํŒŒ๋…ธ๋ผ๋งˆ ํ”„๋กœ์ ํŠธ"""
334    print("\n" + "=" * 60)
335    print("ํ”„๋กœ์ ํŠธ 4: ์ด๋ฏธ์ง€ ํŒŒ๋…ธ๋ผ๋งˆ")
336    print("=" * 60)
337
338    # ์‹œ๋ฎฌ๋ ˆ์ด์…˜์šฉ ๊ฒน์น˜๋Š” ์ด๋ฏธ์ง€ ์ƒ์„ฑ
339    # ๋ฐฐ๊ฒฝ
340    full_scene = np.zeros((300, 800, 3), dtype=np.uint8)
341    full_scene[:] = [200, 200, 200]
342
343    # ์žฅ๋ฉด์— ๊ฐ์ฒด ๋ฐฐ์น˜
344    cv2.circle(full_scene, (100, 150), 50, (0, 0, 150), -1)
345    cv2.rectangle(full_scene, (250, 100), (350, 200), (0, 150, 0), -1)
346    cv2.circle(full_scene, (500, 150), 60, (150, 0, 0), -1)
347    cv2.rectangle(full_scene, (650, 80), (750, 220), (150, 150, 0), -1)
348
349    # ๊ฒน์น˜๋Š” ๋ถ€๋ถ„์ด ์žˆ๋Š” ๋‘ ์ด๋ฏธ์ง€
350    img1 = full_scene[:, :450].copy()
351    img2 = full_scene[:, 300:].copy()
352
353    cv2.imwrite('panorama_img1.jpg', img1)
354    cv2.imwrite('panorama_img2.jpg', img2)
355
356    print("์Šคํ‹ฐ์นญํ•  ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์™„๋ฃŒ")
357
358    # ํŠน์ง•์  ๊ฒ€์ถœ ๋ฐ ๋งค์นญ
359    gray1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
360    gray2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
361
362    # ORB ํŠน์ง•์ 
363    orb = cv2.ORB_create(nfeatures=500)
364    kp1, des1 = orb.detectAndCompute(gray1, None)
365    kp2, des2 = orb.detectAndCompute(gray2, None)
366
367    if des1 is not None and des2 is not None:
368        # ๋งค์นญ
369        bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=False)
370        matches = bf.knnMatch(des1, des2, k=2)
371
372        # Ratio test
373        good = []
374        for m, n in matches:
375            if m.distance < 0.75 * n.distance:
376                good.append(m)
377
378        print(f"์ข‹์€ ๋งค์นญ: {len(good)}")
379
380        if len(good) >= 4:
381            # ํ˜ธ๋ชจ๊ทธ๋ž˜ํ”ผ ๊ณ„์‚ฐ
382            src_pts = np.float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2)
383            dst_pts = np.float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2)
384
385            H, mask = cv2.findHomography(dst_pts, src_pts, cv2.RANSAC, 5.0)
386
387            if H is not None:
388                # ํŒŒ๋…ธ๋ผ๋งˆ ์ƒ์„ฑ
389                h1, w1 = img1.shape[:2]
390                h2, w2 = img2.shape[:2]
391
392                # ๊ฒฐ๊ณผ ์ด๋ฏธ์ง€ ํฌ๊ธฐ ๊ณ„์‚ฐ
393                corners = np.float32([[0, 0], [w2, 0], [w2, h2], [0, h2]]).reshape(-1, 1, 2)
394                transformed = cv2.perspectiveTransform(corners, H)
395
396                all_corners = np.concatenate([
397                    np.float32([[0, 0], [w1, 0], [w1, h1], [0, h1]]).reshape(-1, 1, 2),
398                    transformed
399                ])
400
401                x_min, y_min = np.int32(all_corners.min(axis=0).ravel())
402                x_max, y_max = np.int32(all_corners.max(axis=0).ravel())
403
404                translation = np.array([
405                    [1, 0, -x_min],
406                    [0, 1, -y_min],
407                    [0, 0, 1]
408                ])
409
410                # ์›Œํ•‘ ๋ฐ ํ•ฉ์„ฑ
411                result_width = x_max - x_min
412                result_height = y_max - y_min
413
414                result = cv2.warpPerspective(img2, translation @ H,
415                                            (result_width, result_height))
416                result[-y_min:-y_min+h1, -x_min:-x_min+w1] = img1
417
418                cv2.imwrite('panorama_result.jpg', result)
419                print("ํŒŒ๋…ธ๋ผ๋งˆ ์ƒ์„ฑ ์™„๋ฃŒ: panorama_result.jpg")
420
421    print("\nํŒŒ๋…ธ๋ผ๋งˆ ์ƒ์„ฑ ํŒŒ์ดํ”„๋ผ์ธ:")
422    print("  1. ํŠน์ง•์  ๊ฒ€์ถœ (SIFT/ORB)")
423    print("  2. ํŠน์ง•์  ๋งค์นญ")
424    print("  3. ํ˜ธ๋ชจ๊ทธ๋ž˜ํ”ผ ๊ณ„์‚ฐ")
425    print("  4. ์ด๋ฏธ์ง€ ์›Œํ•‘")
426    print("  5. ๋ธ”๋ Œ๋”ฉ (๊ฒฝ๊ณ„ ๋ถ€๋“œ๋Ÿฝ๊ฒŒ)")
427
428    # OpenCV Stitcher ํด๋ž˜์Šค ์‚ฌ์šฉ
429    print("\nOpenCV Stitcher ์‚ฌ์šฉ:")
430    code = '''
431# ๊ฐ„๋‹จํ•œ ๋ฐฉ๋ฒ•: cv2.Stitcher
432stitcher = cv2.Stitcher_create(cv2.Stitcher_PANORAMA)
433status, panorama = stitcher.stitch([img1, img2, img3])
434
435if status == cv2.Stitcher_OK:
436    cv2.imwrite('panorama.jpg', panorama)
437else:
438    print(f"์Šคํ‹ฐ์นญ ์‹คํŒจ: {status}")
439'''
440    print(code)
441
442
443# ============================================================
444# ํ”„๋กœ์ ํŠธ 5: ์ฆ๊ฐ• ํ˜„์‹ค ๋งˆ์ปค
445# ============================================================
446
447def ar_marker_project():
448    """์ฆ๊ฐ• ํ˜„์‹ค ๋งˆ์ปค ํ”„๋กœ์ ํŠธ (๊ฐœ๋…)"""
449    print("\n" + "=" * 60)
450    print("ํ”„๋กœ์ ํŠธ 5: AR ๋งˆ์ปค ๊ธฐ๋ฐ˜ ์ฆ๊ฐ• ํ˜„์‹ค")
451    print("=" * 60)
452
453    # ArUco ๋งˆ์ปค ์ƒ์„ฑ (์‹œ๋ฎฌ๋ ˆ์ด์…˜)
454    marker_size = 200
455
456    # ์‹œ๋ฎฌ๋ ˆ์ด์…˜์šฉ ๋งˆ์ปค ์ด๋ฏธ์ง€
457    marker = np.zeros((marker_size, marker_size), dtype=np.uint8)
458    marker[:] = 255
459
460    # ๊ฐ„๋‹จํ•œ ํŒจํ„ด (์‹ค์ œ ArUco ๋งˆ์ปค๋Š” ๋” ๋ณต์žก)
461    cv2.rectangle(marker, (10, 10), (190, 190), 0, 10)
462    cv2.rectangle(marker, (40, 40), (80, 80), 0, -1)
463    cv2.rectangle(marker, (120, 40), (160, 80), 0, -1)
464    cv2.rectangle(marker, (40, 120), (80, 160), 0, -1)
465    cv2.rectangle(marker, (120, 120), (160, 160), 0, -1)
466    cv2.rectangle(marker, (80, 80), (120, 120), 0, -1)
467
468    cv2.imwrite('ar_marker.jpg', marker)
469
470    print("\nArUco ๋งˆ์ปค:")
471    print("  - OpenCV์— ๋‚ด์žฅ๋œ ๋งˆ์ปค ์‹œ์Šคํ…œ")
472    print("  - ์ž๋™ ๊ฒ€์ถœ ๋ฐ ID ์ธ์‹")
473    print("  - 4๊ฐœ ์ฝ”๋„ˆ๋กœ ํฌ์ฆˆ ์ถ”์ •")
474
475    code = '''
476# ArUco ๋งˆ์ปค ์ƒ์„ฑ
477import cv2
478
479# ๋”•์…”๋„ˆ๋ฆฌ ์„ ํƒ
480aruco_dict = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_6X6_250)
481
482# ๋งˆ์ปค ์ƒ์„ฑ (ID=42, ํฌ๊ธฐ=200x200)
483marker = cv2.aruco.generateImageMarker(aruco_dict, 42, 200)
484cv2.imwrite('marker_42.png', marker)
485
486# ๋งˆ์ปค ๊ฒ€์ถœ
487detector = cv2.aruco.ArucoDetector(aruco_dict)
488corners, ids, rejected = detector.detectMarkers(gray)
489
490# ๋งˆ์ปค ๊ทธ๋ฆฌ๊ธฐ
491cv2.aruco.drawDetectedMarkers(image, corners, ids)
492
493# ํฌ์ฆˆ ์ถ”์ • (์นด๋ฉ”๋ผ ์บ˜๋ฆฌ๋ธŒ๋ ˆ์ด์…˜ ํ•„์š”)
494rvecs, tvecs, _ = cv2.aruco.estimatePoseSingleMarkers(
495    corners, marker_length, camera_matrix, dist_coeffs
496)
497
498# ์ขŒํ‘œ์ถ• ๊ทธ๋ฆฌ๊ธฐ
499for rvec, tvec in zip(rvecs, tvecs):
500    cv2.drawFrameAxes(image, camera_matrix, dist_coeffs, rvec, tvec, 0.1)
501'''
502    print(code)
503
504    print("\nAR ์‘์šฉ:")
505    print("  - 3D ๊ฐ์ฒด ์˜ค๋ฒ„๋ ˆ์ด")
506    print("  - ๊ฐ€์ƒ ๊ฐ€๊ตฌ ๋ฐฐ์น˜")
507    print("  - ๊ฒŒ์ž„/๊ต์œก")
508
509
510# ============================================================
511# ํ”„๋กœ์ ํŠธ ๊ตฌ์กฐ ๊ฐ€์ด๋“œ
512# ============================================================
513
514def project_structure_guide():
515    """ํ”„๋กœ์ ํŠธ ๊ตฌ์กฐ ๊ฐ€์ด๋“œ"""
516    print("\n" + "=" * 60)
517    print("์ปดํ“จํ„ฐ ๋น„์ „ ํ”„๋กœ์ ํŠธ ๊ตฌ์กฐ ๊ฐ€์ด๋“œ")
518    print("=" * 60)
519
520    print("""
521๊ถŒ์žฅ ํ”„๋กœ์ ํŠธ ๊ตฌ์กฐ:
522
523project/
524โ”œโ”€โ”€ main.py           # ๋ฉ”์ธ ์‹คํ–‰ ํŒŒ์ผ
525โ”œโ”€โ”€ config.py         # ์„ค์ • (๊ฒฝ๋กœ, ํŒŒ๋ผ๋ฏธํ„ฐ)
526โ”œโ”€โ”€ requirements.txt  # ์˜์กด์„ฑ
527โ”œโ”€โ”€ README.md
528โ”‚
529โ”œโ”€โ”€ src/
530โ”‚   โ”œโ”€โ”€ __init__.py
531โ”‚   โ”œโ”€โ”€ detection.py      # ๊ฐ์ฒด ๊ฒ€์ถœ
532โ”‚   โ”œโ”€โ”€ preprocessing.py  # ์ „์ฒ˜๋ฆฌ
533โ”‚   โ”œโ”€โ”€ tracking.py       # ์ถ”์ 
534โ”‚   โ””โ”€โ”€ utils.py          # ์œ ํ‹ธ๋ฆฌํ‹ฐ
535โ”‚
536โ”œโ”€โ”€ models/           # ํ•™์Šต๋œ ๋ชจ๋ธ ํŒŒ์ผ
537โ”‚   โ”œโ”€โ”€ yolov3.weights
538โ”‚   โ”œโ”€โ”€ yolov3.cfg
539โ”‚   โ””โ”€โ”€ ...
540โ”‚
541โ”œโ”€โ”€ data/             # ์ž…๋ ฅ ๋ฐ์ดํ„ฐ
542โ”‚   โ”œโ”€โ”€ images/
543โ”‚   โ””โ”€โ”€ videos/
544โ”‚
545โ”œโ”€โ”€ output/           # ๊ฒฐ๊ณผ ์ €์žฅ
546โ”‚   โ”œโ”€โ”€ results/
547โ”‚   โ””โ”€โ”€ logs/
548โ”‚
549โ””โ”€โ”€ tests/            # ํ…Œ์ŠคํŠธ ์ฝ”๋“œ
550    โ””โ”€โ”€ test_detection.py
551""")
552
553    print("\n๊ฐœ๋ฐœ ํŒ:")
554    print("  1. ๋ชจ๋“ˆํ™”: ๊ธฐ๋Šฅ๋ณ„๋กœ ๋ถ„๋ฆฌ")
555    print("  2. ์„ค์ • ํŒŒ์ผ: ํ•˜๋“œ์ฝ”๋”ฉ ํ”ผํ•˜๊ธฐ")
556    print("  3. ๋กœ๊น…: ๋””๋ฒ„๊น… ์šฉ์ด")
557    print("  4. ํ…Œ์ŠคํŠธ: ๋‹จ์œ„ ํ…Œ์ŠคํŠธ ์ž‘์„ฑ")
558    print("  5. ๋ฌธ์„œํ™”: ํ•จ์ˆ˜/ํด๋ž˜์Šค docstring")
559
560
561def main():
562    """๋ฉ”์ธ ํ•จ์ˆ˜"""
563    # ํ”„๋กœ์ ํŠธ 1: ๋ฌธ์„œ ์Šค์บ๋„ˆ
564    document_scanner()
565
566    # ํ”„๋กœ์ ํŠธ 2: ๋ฒˆํ˜ธํŒ ์ธ์‹
567    license_plate_recognition()
568
569    # ํ”„๋กœ์ ํŠธ 3: ๊ฐ์ฒด ์ถ”์ 
570    object_tracking_project()
571
572    # ํ”„๋กœ์ ํŠธ 4: ํŒŒ๋…ธ๋ผ๋งˆ
573    panorama_stitching()
574
575    # ํ”„๋กœ์ ํŠธ 5: AR ๋งˆ์ปค
576    ar_marker_project()
577
578    # ํ”„๋กœ์ ํŠธ ๊ตฌ์กฐ ๊ฐ€์ด๋“œ
579    project_structure_guide()
580
581    print("\n" + "=" * 60)
582    print("์‹ค์ „ ํ”„๋กœ์ ํŠธ ๋ฐ๋ชจ ์™„๋ฃŒ!")
583    print("=" * 60)
584
585
586if __name__ == '__main__':
587    main()