EL GHAFRAOUI AYOUB commited on
Commit
8331ff3
·
1 Parent(s): 992c3eb
Files changed (3) hide show
  1. app.py +111 -16
  2. result_frame.jpg +0 -0
  3. templates/index.html +62 -48
app.py CHANGED
@@ -3,6 +3,8 @@ import time
3
  import numpy as np
4
  import mediapipe as mp
5
 
 
 
6
  import uvicorn
7
  from socketio import ASGIApp
8
 
@@ -20,6 +22,34 @@ import yt_dlp as youtube_dl
20
  import uvicorn
21
 
22
  import base64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
  model_object_detection = YOLO("bisindov2.pt")
25
 
@@ -276,6 +306,39 @@ class VideoStreaming(object):
276
  yield (b'--frame\r\n'
277
  b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
278
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
279
 
280
  # check_settings()
281
  VIDEO = VideoStreaming()
@@ -369,29 +432,61 @@ def test_connect():
369
  ######################
370
 
371
  def preprocess_frame(frame_data):
372
- # Convert base64 image string to numpy array
373
- imgdata = base64.b64decode(frame_data)
374
- nparr = np.frombuffer(imgdata, np.uint8)
375
- frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
376
- print("rani dezte hna koolchi mezian")
377
 
378
- # Apply image processing here (example: grayscale conversion)
379
- processed_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
 
 
 
 
 
 
380
 
381
- # Convert processed frame back to base64 image string
382
- _, buffer = cv2.imencode('.jpg', processed_frame)
383
- processed_frame_data = base64.b64encode(buffer).decode('utf-8')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
384
 
385
- return processed_frame_data
386
 
387
 
 
 
 
 
 
 
388
 
389
- @socketio.on('stream_frame')
390
- def handle_stream_frame(frame_data):
391
- processed_frame_data = preprocess_frame(frame_data)
392
- emit('receive_frame', processed_frame_data, broadcast=True)
393
- ##################
394
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
395
  if __name__ == '__main__':
396
  socketio.run(app, host="0.0.0.0", allow_unsafe_werkzeug=True,port=7860)
397
 
 
3
  import numpy as np
4
  import mediapipe as mp
5
 
6
+ from flask import Flask, request, jsonify, send_file
7
+
8
  import uvicorn
9
  from socketio import ASGIApp
10
 
 
22
  import uvicorn
23
 
24
  import base64
25
+ import matplotlib.pyplot as plt
26
+ import numpy as np
27
+ import base64
28
+ from io import BytesIO
29
+ from PIL import Image
30
+
31
+ def plot_base64_image(image_base64):
32
+ # Decode base64 string
33
+ image_data = base64.b64decode(image_base64)
34
+
35
+ # Convert bytes to PIL Image
36
+ image = Image.open(BytesIO(image_data))
37
+
38
+ # Convert PIL Image to numpy array
39
+ image_array = np.array(image)
40
+
41
+ # Plot image
42
+ plt.imshow(image_array)
43
+ plt.axis('off')
44
+ plt.show()
45
+
46
+ # Example usage:
47
+ # base64_image = "..." # Your base64 encoded image string
48
+ # plot_base64_image(base64_image)
49
+
50
+
51
+
52
+
53
 
54
  model_object_detection = YOLO("bisindov2.pt")
55
 
 
306
  yield (b'--frame\r\n'
307
  b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
308
 
309
+ def show1(self, url):
310
+ print("url")
311
+ self._preview = False
312
+ self._flipH = False
313
+ self._detect = False
314
+ self._mediaPipe = False
315
+
316
+ self._confidence = 75.0
317
+ ydl_opts = {
318
+ "quiet": True,
319
+ "no_warnings": True,
320
+ "format": "best",
321
+ "forceurl": True,
322
+ }
323
+
324
+
325
+ while True:
326
+ # Decoding the Base64 string to get the frame data
327
+ frame_bytes = base64.b64decode(url)
328
+
329
+ # Converting the frame data to an OpenCV image
330
+ frame_np = np.frombuffer(frame_bytes, np.uint8)
331
+ frame = cv2.imdecode(frame_np, cv2.IMREAD_COLOR)
332
+
333
+ # Encode the frame data to bytes
334
+ _, frame_encoded = cv2.imencode(".jpg", frame)
335
+ frame_bytes = frame_encoded.tobytes()
336
+
337
+ yield (
338
+ b'--frame\r\n'
339
+ b'Content-Type: image/jpeg\r\n\r\n' + frame_bytes + b'\r\n'
340
+ )
341
+
342
 
343
  # check_settings()
344
  VIDEO = VideoStreaming()
 
432
  ######################
433
 
434
  def preprocess_frame(frame_data):
435
+ if frame_data is None:
436
+ return None # Return None if frame_data is None
 
 
 
437
 
438
+ try:
439
+ # Convert base64 image string to numpy array
440
+ # Split frame_data to extract base64 part
441
+ base64_data = frame_data
442
+
443
+ # Convert base64 image string to numpy array
444
+ imgdata = base64.b64decode(base64_data)
445
+ imgarray = np.frombuffer(imgdata, np.uint8)
446
 
447
+ # Decode the image using cv2.imdecode
448
+ frame = cv2.imdecode(imgarray, cv2.IMREAD_COLOR)
449
+
450
+ print("rani dezte hna koolchi mezian")
451
+
452
+ # Apply image processing here (example: grayscale conversion)
453
+ processed_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
454
+
455
+ # Convert processed frame back to base64 image string
456
+ _, buffer = cv2.imencode('.jpg', processed_frame)
457
+ processed_frame_data = base64.b64encode(buffer).decode('utf-8')
458
+
459
+ #plot_base64_image(processed_frame_data)
460
+ return processed_frame_data
461
+ except Exception as e:
462
+ print("Error processing frame:", e)
463
+ return None
464
 
 
465
 
466
 
467
+ #@socketio.on('stream_frame')
468
+ #def handle_stream_frame(frame_data):
469
+ # processed_frame_data = preprocess_frame(frame_data)
470
+ # #emit('receive_frame', processed_frame_data, broadcast=True)
471
+ # return Response(VIDEO.show1(frame_data), mimetype='multipart/x-mixed-replace; boundary=frame')
472
+ # Route to receive a frame for processing
473
 
 
 
 
 
 
474
 
475
+
476
+ @app.route('/process_frame', methods=['POST'])
477
+ def process_frame():
478
+ if 'frame' in request.files:
479
+ frame = request.files['frame']
480
+ # Process the frame here
481
+ # For example, you can save the frame to a file
482
+ frame_path = 'result_frame.jpg'
483
+ frame.save(frame_path)
484
+ # Return the processed frame
485
+ return send_file(frame_path, mimetype='image/jpeg')
486
+ else:
487
+ return 'No frame data received'
488
+
489
+
490
  if __name__ == '__main__':
491
  socketio.run(app, host="0.0.0.0", allow_unsafe_werkzeug=True,port=7860)
492
 
result_frame.jpg ADDED
templates/index.html CHANGED
@@ -131,6 +131,17 @@
131
  <div class = 'video'>
132
  <img id="image">
133
  </div>
 
 
 
 
 
 
 
 
 
 
 
134
 
135
 
136
  </body>
@@ -142,58 +153,61 @@
142
 
143
 
144
  <script>
145
- function capture(video, scaleFactor) {
146
- if(scaleFactor == null){
147
- scaleFactor = 1;
148
- }
149
- var w = video.videoWidth * scaleFactor;
150
- var h = video.videoHeight * scaleFactor;
151
- var canvas = document.createElement('canvas');
152
- canvas.width = w;
153
- canvas.height = h;
154
- var ctx = canvas.getContext('2d');
155
- ctx.drawImage(video, 0, 0, w, h);
156
- return canvas;
157
- }
158
-
159
- var socket = io();
160
- socket.on('connect', function(){
161
- console.log("Connection has been succesfully established with socket.", socket.connected)
162
- });
163
- const video = document.querySelector("#videoElement1");
164
- video.width = 500;
165
- video.height = 375; ;
166
- if (navigator.mediaDevices.getUserMedia) {
167
- navigator.mediaDevices.getUserMedia({ video: true })
168
  .then(function (stream) {
169
- video.srcObject = stream;
170
- video.play();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  })
172
- .catch(function (err0r) {
173
- console.log(err0r)
174
- console.log("Something went wrong!");
175
  });
176
- }
177
- const FPS = 22;
178
-
179
- // Update this code snippet in your existing frontend script
180
-
181
- setInterval(() => {
182
- var type = "image/jpeg"; // Fix the typo in 'image/jpg'
183
- var video_element = document.getElementById("videoElement1");
184
- var frame = capture(video_element, 1);
185
- var data = frame.toDataURL(type);
186
-
187
- // Emit the image data to the backend using 'stream_frame' event
188
- socket.emit('stream_frame', data);
189
- }, 10000/FPS);
190
-
191
 
192
- socket.on('response_back', function(image){
193
- const image_id = document.getElementById('image');
194
- console.log(image)
195
- image_id.src = image;
196
- });
197
  </script>
198
 
199
  </html>
 
131
  <div class = 'video'>
132
  <img id="image">
133
  </div>
134
+ <div class="col-span-8 mx-4 mt-3" style="background-color: lightgrey; border-radius: 10px;"> <!-- أضفت لون خلفية ونصبت دائرة لـ div الفيديو -->
135
+ <div id="container">
136
+ <img class="center" src="{{ url_for('process_frame') }}" id="videoElement12" alt="Video Feed processed">
137
+ </div>
138
+ </div>
139
+
140
+ <div class="col-span-8 mx-4 mt-3" style="background-color: lightgrey; border-radius: 10px;">
141
+ <div id="container">
142
+ <img class="center" id="processedFrameElement" alt="Processed Frame">
143
+ </div>
144
+ </div>
145
 
146
 
147
  </body>
 
153
 
154
 
155
  <script>
156
+
157
+ // Create an img element to display the processed frame
158
+ var imgElement = document.createElement('img');
159
+ document.body.appendChild(imgElement);
160
+
161
+ navigator.mediaDevices.getUserMedia({ video: true })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  .then(function (stream) {
163
+ const videoElement = document.createElement('video');
164
+ document.body.appendChild(videoElement);
165
+ videoElement.srcObject = stream;
166
+ videoElement.play();
167
+
168
+ setInterval(() => {
169
+ var type = 'image/jpeg';
170
+ var canvas = document.createElement('canvas');
171
+ var context = canvas.getContext('2d');
172
+ canvas.width = videoElement.videoWidth;
173
+ canvas.height = videoElement.videoHeight;
174
+ context.drawImage(videoElement, 0, 0, canvas.width, canvas.height);
175
+
176
+ // Convert canvas data to Blob
177
+ canvas.toBlob(function(blob) {
178
+ // Create FormData object to send the blob to the server
179
+ var formData = new FormData();
180
+ formData.append('frame', blob, 'result_frame.jpg');
181
+
182
+ // Make a POST request to the Flask server
183
+ fetch('/process_frame', {
184
+ method: 'POST',
185
+ body: formData
186
+ })
187
+ .then(response => {
188
+ if (!response.ok) {
189
+ throw new Error('Failed to process frame on server');
190
+ }
191
+ return response.blob();
192
+ })
193
+ .then(blob => {
194
+ // Display the processed frame on the webpage
195
+ imgElement.src = URL.createObjectURL(blob);
196
+ })
197
+ .catch(error => {
198
+ console.error('Error:', error);
199
+ });
200
+ }, type);
201
+
202
+ }, 1000); // Capture and send a frame every second (1000 milliseconds)
203
+
204
  })
205
+ .catch(function (error) {
206
+ console.log(error);
207
+ console.log("An error occurred while accessing the camera stream.");
208
  });
209
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
210
 
 
 
 
 
 
211
  </script>
212
 
213
  </html>