diff options
-rw-r--r-- | face-detection/03_face_recogition.py | 88 | ||||
-rw-r--r-- | webui/main.py | 4 | ||||
-rw-r--r-- | webui/website/__init__.py | 7 | ||||
-rw-r--r-- | webui/website/auth.py | 15 | ||||
-rw-r--r-- | webui/website/models.py | 0 | ||||
-rw-r--r-- | webui/website/templates/html.html | 44 | ||||
-rw-r--r-- | webui/website/templates/live.html | 5 | ||||
-rw-r--r-- | webui/website/views.py | 47 |
8 files changed, 103 insertions, 107 deletions
diff --git a/face-detection/03_face_recogition.py b/face-detection/03_face_recogition.py index 185d3c2..51741a5 100644 --- a/face-detection/03_face_recogition.py +++ b/face-detection/03_face_recogition.py @@ -2,6 +2,9 @@ import cv2 import os import numpy as np from picamera2 import Picamera2 +from flask import Flask, render_template, Response + +app = Flask(__name__) #Parameters id = 0 @@ -28,53 +31,58 @@ cam.preview_configuration.align() cam.configure("preview") cam.start() -while True: - # Capture a frame from the camera - frame=cam.capture_array() +def generate_frames(): + while True: + # Capture a frame from the camera + frame=cam.capture_array() - #Convert fram from BGR to grayscale - frameGray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) - #Create a DS faces- array with 4 elements- x,y coordinates top-left corner), width and height - faces = face_detector.detectMultiScale( - frameGray, # The grayscale frame to detect - scaleFactor=1.1,# how much the image size is reduced at each image scale-10% reduction - minNeighbors=5, # how many neighbors each candidate rectangle should have to retain it - minSize=(150, 150)# Minimum possible object size. Objects smaller than this size are ignored. - ) - for(x,y,w,h) in faces: - namepos=(x+5,y-5) #shift right and up/outside the bounding box from top - confpos=(x+5,y+h-5) #shift right and up/intside the bounding box from bottom - #create a bounding box across the detected face - cv2.rectangle(frame, (x,y), (x+w,y+h), boxColor, 3) #5 parameters - frame, topleftcoords,bottomrightcooords,boxcolor,thickness + #Convert fram from BGR to grayscale + frameGray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + #Create a DS faces- array with 4 elements- x,y coordinates top-left corner), width and height + faces = face_detector.detectMultiScale( + frameGray, # The grayscale frame to detect + scaleFactor=1.1,# how much the image size is reduced at each image scale-10% reduction + minNeighbors=5, # how many neighbors each candidate rectangle should have to retain it + minSize=(150, 150)# Minimum possible object size. Objects smaller than this size are ignored. + ) + for(x,y,w,h) in faces: + namepos=(x+5,y-5) #shift right and up/outside the bounding box from top + confpos=(x+5,y+h-5) #shift right and up/intside the bounding box from bottom + #create a bounding box across the detected face + cv2.rectangle(frame, (x,y), (x+w,y+h), boxColor, 3) #5 parameters - frame, topleftcoords,bottomrightcooords,boxcolor,thickness - #recognizer.predict() method takes the ROI as input and - #returns the predicted label (id) and confidence score for the given face region. - id, confidence = recognizer.predict(frameGray[y:y+h,x:x+w]) + #recognizer.predict() method takes the ROI as input and + #returns the predicted label (id) and confidence score for the given face region. + id, confidence = recognizer.predict(frameGray[y:y+h,x:x+w]) - # If confidence is less than 100, it is considered a perfect match - if confidence < 100: - id = names[id] - confidence = f"{100 - confidence:.0f}%" - else: - id = "unknown" - confidence = f"{100 - confidence:.0f}%" + # If confidence is less than 100, it is considered a perfect match + if confidence < 100: + id = names[id] + confidence = f"{100 - confidence:.0f}%" + else: + id = "unknown" + confidence = f"{100 - confidence:.0f}%" #Display name and confidence of person who's face is recognized cv2.putText(frame, str(id), namepos, font, height, nameColor, 2) cv2.putText(frame, str(confidence), confpos, font, height, confColor, 1) - # Display realtime capture output to the user - cv2.imshow('Raspi Face Recognizer',frame) + # Display output Flask web application: + # Convert the frame to JPEG format + ret, buffer = cv2.imencode('.jpg', frame) + frame = buffer.tobytes() + + # Yield the frame in the HTTP response + yield (b'--frame\r\n' + b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') + +@app.route('/') +def index(): + return render_template('index.html') - # Wait for 30 milliseconds for a key event (extract sigfigs) and exit if 'ESC' or 'q' is pressed - key = cv2.waitKey(100) & 0xff - #Checking keycode - if key == 27: # ESCAPE key - break - elif key == 113: # q key - break +@app.route('/video_feed') +def video_feed(): + return Response(generate_frames(), mimetype='multipart/x-mixed-replace; boundary=frame') -# Release the camera and close all windows -print("\n [INFO] Exiting Program and cleaning up stuff") -cam.stop() -cv2.destroyAllWindows() +if __name__ == '__main__': + app.run(debug=True) diff --git a/webui/main.py b/webui/main.py index c281aac..a7bc2d6 100644 --- a/webui/main.py +++ b/webui/main.py @@ -4,9 +4,11 @@ from flask import render_template app = create_app() # Define the route for the custom 404 error + @app.errorhandler(404) def page_not_found(e): return render_template('404.html'), 404 + if __name__ == '__main__': - app.run(debug=True) + app.run(host='0.0.0.0', port=80, debug=True) diff --git a/webui/website/__init__.py b/webui/website/__init__.py index 4ca4c4b..2af8e41 100644 --- a/webui/website/__init__.py +++ b/webui/website/__init__.py @@ -4,10 +4,9 @@ def create_app(): app = Flask(__name__) app.config['SECRET_KEY'] = 'fdbcawejhfbwef' - from .views import views - from .auth import auth + from .views import views,streaming app.register_blueprint(views, url_prefix='/') - app.register_blueprint(auth, url_prefix='/') + app.register_blueprint(streaming, url_prefix='/') - return app
\ No newline at end of file + return app diff --git a/webui/website/auth.py b/webui/website/auth.py deleted file mode 100644 index c44b421..0000000 --- a/webui/website/auth.py +++ /dev/null @@ -1,15 +0,0 @@ -from flask import Blueprint, render_template - -auth = Blueprint('auth', __name__) - -@auth.route('/login') -def login(): - return render_template("404.html") - -@auth.route('/logout') -def logout(): - return render_template("404.html") - -@auth.route('/signup') -def signup(): - return render_template("404.html")
\ No newline at end of file diff --git a/webui/website/models.py b/webui/website/models.py deleted file mode 100644 index e69de29..0000000 --- a/webui/website/models.py +++ /dev/null diff --git a/webui/website/templates/html.html b/webui/website/templates/html.html deleted file mode 100644 index 8387ebb..0000000 --- a/webui/website/templates/html.html +++ /dev/null @@ -1,44 +0,0 @@ -<!DOCTYPE html>
-<html lang="en">
-<head>
- <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css">
- <meta charset="UTF-8">
- <meta http-equiv="X-UA-Compatible" content="IE=edge">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <title>Document</title>
- <link rel="stylesheet" href="css.css">
-</head>
-<body>
- <div class="nav">
- <input type="checkbox" id="nav-check">
- <div class="nav-header">
- <div class="nav-title">
- MysteryCode
- </div>
- </div>
- <div class="nav-btn">
- <label for="nav-check">
- <span></span>
- <span></span>
- <span></span>
- </label>
- </div>
-
- <div class="nav-links">
- <ul>
- <li><a href="#" target="_blank">Home</a></li>
- <li><a href="#" target="_blank">About</a></li>
- <li><a href="#" target="_blank">Services</a></li>
- <li><a href="#" target="_blank">Portfolio</a></li>
- <li><a href="#" target="_blank">Contact</a></li>
- <a class="icon">
- <i class="fa fa-twitter"></i>
- <i class="fa fa-linkedin"></i>
- <i class="fa fa-youtube"></i>
-
- </a>
- </ul>
- </div>
- </div>
-</body>
-</html>
\ No newline at end of file diff --git a/webui/website/templates/live.html b/webui/website/templates/live.html index 5d7d62f..bd71851 100644 --- a/webui/website/templates/live.html +++ b/webui/website/templates/live.html @@ -3,7 +3,8 @@ {% block content %} <body id="page"> <div id="spotlight" class="animated fadeIn"> - <h1>Hi<h1> + <img src="/stream" width="640" height="480" /> + <h1>Live-Feed</h1> </div> -{% endblock %}
\ No newline at end of file +{% endblock %} diff --git a/webui/website/views.py b/webui/website/views.py index 47a02f5..3523adf 100644 --- a/webui/website/views.py +++ b/webui/website/views.py @@ -1,8 +1,52 @@ -from flask import Blueprint, render_template +from flask import Blueprint, render_template, Response + +import io +import logging +import socketserver +from http import server +from threading import Condition + +from picamera2 import Picamera2 +from picamera2.encoders import JpegEncoder +from picamera2.outputs import FileOutput + views = Blueprint('views', __name__) +streaming = Blueprint('streaming', __name__) + +@streaming.route('/stream') +def stream(): + class StreamingOutput(io.BufferedIOBase): + def __init__(self): + self.frame = None + self.condition = Condition() + + def write(self, buf): + with self.condition: + self.frame = buf + self.condition.notify_all() + + def generate(): + try: + while True: + with output.condition: + output.condition.wait() + frame = output.frame + yield (b'--FRAME\r\n' + b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') + except Exception as e: + logging.warning('Removed streaming client: %s', str(e)) + + output = StreamingOutput() + picam2 = Picamera2() + picam2.configure(picam2.create_video_configuration(main={"size": (640, 480)})) + picam2.start_recording(JpegEncoder(), FileOutput(output)) + + return Response(generate(), mimetype='multipart/x-mixed-replace; boundary=FRAME') + @views.route('/') +@views.route('/index.html') def home(): return render_template("home.html") @@ -10,3 +54,4 @@ def home(): def live(): return render_template("live.html") +views.register_blueprint(streaming, url_prefix='/stream') |