|
| 1 | +#MIT License |
| 2 | +#Copyright (c) 2023 Adrian Winter |
| 3 | + |
| 4 | +import cv2 |
| 5 | +import numpy as np |
| 6 | +import tkinter as tk |
| 7 | +from tkinter import ttk |
| 8 | +import time |
| 9 | + |
| 10 | +# Open the default camera (0) of your MacBook |
| 11 | +#cap = cv2.VideoCapture(0) |
| 12 | + |
| 13 | +#use external IP Cam |
| 14 | +yourIP = "192.168.178.53" |
| 15 | + |
| 16 | +url = "http://"+yourIP+":8080/video" |
| 17 | +cap = cv2.VideoCapture(url) |
| 18 | +ALPHA = 0.4 |
| 19 | +GRIDSIZE = 1 |
| 20 | +THRESH = 22.0 |
| 21 | +THRESHWINDOWFILTER = 0 |
| 22 | +AUTOREFERENCEINTERVALL = 0.1 |
| 23 | +OVERLAY = 0.5 |
| 24 | + |
| 25 | +# Check if camera opened successfully |
| 26 | +if not cap.isOpened(): |
| 27 | + print("Error opening video stream or file") |
| 28 | + |
| 29 | +# Flag to indicate if an image has been taken |
| 30 | +image_taken = False |
| 31 | + |
| 32 | +last_execution_time = 0 |
| 33 | + |
| 34 | +# Define a function to update the parameters |
| 35 | +def update_params(lol): |
| 36 | + global ALPHA, GRIDSIZE, THRESH, THRESHWINDOWFILTER, AUTOREFERENCEINTERVALL, OVERLAY |
| 37 | + ALPHA = float(scale_alpha.get()) |
| 38 | + GRIDSIZE = int(scale_gridsize.get()) |
| 39 | + THRESH = float(scale_thresh.get()) |
| 40 | + THRESHWINDOWFILTER = int(scale_threshwindowfilter.get()) |
| 41 | + OVERLAY = float(scale_overlay.get()) |
| 42 | + AUTOREFERENCEINTERVALL = float(scale_autoreferenceintervall.get()) |
| 43 | + print(f"Updated parameters: ALPHA={ALPHA}, GRIDSIZE={GRIDSIZE}, THRESH={THRESH}, THRESHWINDOWFILTER={THRESHWINDOWFILTER}, AUTOREFERENCEINTERVALL={AUTOREFERENCEINTERVALL},OVERLAY={OVERLAY}") |
| 44 | + |
| 45 | +# Create the parameter window |
| 46 | +window = tk.Tk() |
| 47 | +window.title("Parameters") |
| 48 | + |
| 49 | +# Add a slider for ALPHA |
| 50 | +label_alpha = ttk.Label(window, text="ALPHA") |
| 51 | +label_alpha.grid(row=0, column=0, padx=5, pady=5, sticky=tk.W) |
| 52 | +scale_alpha = ttk.Scale(window, from_=0.0, to=2.0, orient=tk.HORIZONTAL, command=update_params) |
| 53 | +scale_alpha.set(ALPHA) |
| 54 | +scale_alpha.grid(row=0, column=1, padx=5, pady=5) |
| 55 | + |
| 56 | +# Add a slider for GRIDSIZE |
| 57 | +label_gridsize = ttk.Label(window, text="GRIDSIZE") |
| 58 | +label_gridsize.grid(row=1, column=0, padx=5, pady=5, sticky=tk.W) |
| 59 | +scale_gridsize = ttk.Scale(window, from_=1, to=80, orient=tk.HORIZONTAL, command=update_params) |
| 60 | +scale_gridsize.set(GRIDSIZE) |
| 61 | +scale_gridsize.grid(row=1, column=1, padx=5, pady=5) |
| 62 | + |
| 63 | +# Add a slider for THRESHWINDOWFILTER |
| 64 | +label_threshwindowfilter = ttk.Label(window, text="THRESHWINDOWFILTER") |
| 65 | +label_threshwindowfilter.grid(row=2, column=0, padx=5, pady=5, sticky=tk.W) |
| 66 | +scale_threshwindowfilter = ttk.Scale(window, from_=0, to=100, orient=tk.HORIZONTAL, command=update_params) |
| 67 | +scale_threshwindowfilter.set(THRESHWINDOWFILTER) |
| 68 | +scale_threshwindowfilter.grid(row=2, column=1, padx=5, pady=5) |
| 69 | + |
| 70 | +# Add a slider for THRESH |
| 71 | +label_thresh = ttk.Label(window, text="THRESH") |
| 72 | +label_thresh.grid(row=3, column=0, padx=5, pady=5, sticky=tk.W) |
| 73 | +scale_thresh = ttk.Scale(window, from_=0.0, to=70.0, orient=tk.HORIZONTAL, command=update_params) |
| 74 | +scale_thresh.set(THRESH) |
| 75 | +scale_thresh.grid(row=3, column=1, padx=5, pady=5) |
| 76 | + |
| 77 | +# Add a slider for OVERLAY |
| 78 | +label_overlay = ttk.Label(window, text="OVERLAY") |
| 79 | +label_overlay.grid(row=4, column=0, padx=5, pady=5, sticky=tk.W) |
| 80 | +scale_overlay = ttk.Scale(window, from_=0.0, to=1.0, orient=tk.HORIZONTAL, command=update_params) |
| 81 | +scale_overlay.set(OVERLAY) |
| 82 | +scale_overlay.grid(row=4, column=1, padx=5, pady=5) |
| 83 | + |
| 84 | +# Add a slider for AUTOREFERENCEINTERVALL |
| 85 | +label_autoreferenceintervall = ttk.Label(window, text="AUTOREFERENCEINTERVALL") |
| 86 | +label_autoreferenceintervall.grid(row=5, column=0, padx=5, pady=5, sticky=tk.W) |
| 87 | +scale_autoreferenceintervall = ttk.Scale(window, from_=0.1, to=10, orient=tk.HORIZONTAL, command=update_params) |
| 88 | +scale_autoreferenceintervall.set(AUTOREFERENCEINTERVALL) |
| 89 | +scale_autoreferenceintervall.grid(row=5, column=1, padx=5, pady=5) |
| 90 | + |
| 91 | +autoReferenceImage = False |
| 92 | +def toggle_switch(): |
| 93 | + global autoReferenceImage |
| 94 | + autoReferenceImage = switch_value.get() |
| 95 | + if switch_value.get(): |
| 96 | + print("Switch is ON") |
| 97 | + else: |
| 98 | + print("Switch is OFF") |
| 99 | + |
| 100 | +# AutoReferenceImage |
| 101 | +switch_value = tk.BooleanVar(value=False) |
| 102 | +switch = tk.Checkbutton(window, text="AutoReferenceImage", variable=switch_value, command=toggle_switch) |
| 103 | +switch.grid(row=5, column=1, padx=5, pady=5) |
| 104 | + |
| 105 | + |
| 106 | +# Loop through the video stream |
| 107 | +while cap.isOpened(): |
| 108 | + |
| 109 | + # Read the next frame |
| 110 | + ret, frame = cap.read() |
| 111 | + |
| 112 | + # Convert the frame to grayscale |
| 113 | + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) |
| 114 | + |
| 115 | + if image_taken: |
| 116 | + # Compute the absolute difference between the grayscale frame and the taken image |
| 117 | + diff = cv2.absdiff(gray, image) |
| 118 | + |
| 119 | + # Define the grid size and threshold |
| 120 | + rows = GRIDSIZE |
| 121 | + cols = GRIDSIZE |
| 122 | + threshold = THRESHWINDOWFILTER |
| 123 | + |
| 124 | + # Get the image dimensions and grid cell size |
| 125 | + height, width = diff.shape |
| 126 | + cell_width = width // cols |
| 127 | + cell_height = height // rows |
| 128 | + |
| 129 | + # Loop through each grid cell and check the average pixel intensity |
| 130 | + for r in range(rows): |
| 131 | + for c in range(cols): |
| 132 | + # Get the cell boundaries |
| 133 | + x1 = c * cell_width |
| 134 | + y1 = r * cell_height |
| 135 | + x2 = (c + 1) * cell_width |
| 136 | + y2 = (r + 1) * cell_height |
| 137 | + |
| 138 | + # Get the average pixel intensity in the cell |
| 139 | + cell = diff[y1:y2, x1:x2] |
| 140 | + avg_intensity = np.mean(cell) |
| 141 | + |
| 142 | + # If the average intensity is below the threshold, set all pixels in the cell to black |
| 143 | + if avg_intensity < threshold: |
| 144 | + #diff[y1:y2, x1:x2] = 0 |
| 145 | + diff[y1:y2, x1:x2] = cv2.multiply(diff[y1:y2, x1:x2], np.array([1/(alpha+0.1)])) |
| 146 | + |
| 147 | + #increase contrast |
| 148 | + alpha = ALPHA |
| 149 | + diff = cv2.multiply(diff, np.array([alpha])) |
| 150 | + |
| 151 | + # Apply a threshold to the difference image to filter out small changes and amplify large changes |
| 152 | + _, diff = cv2.threshold(diff, THRESH, 255, cv2.THRESH_BINARY) |
| 153 | + |
| 154 | + #invert |
| 155 | + #img = cv2.bitwise_not(diff) |
| 156 | + |
| 157 | + # Create an empty redscale image with the same dimensions as the grayscale image |
| 158 | + height, width = diff.shape |
| 159 | + img_red = np.zeros((height, width, 3), dtype=np.uint8) |
| 160 | + |
| 161 | + # Set the red channel to the intensity of the grayscale image |
| 162 | + img_red[:, :, 2] = diff |
| 163 | + |
| 164 | + |
| 165 | + # Display the difference image |
| 166 | + overlay = cv2.addWeighted(img_red, 1 - OVERLAY, cv2.cvtColor(image, cv2.COLOR_GRAY2BGR), OVERLAY, 0) |
| 167 | + cv2.imshow('Difference Image', overlay) |
| 168 | + |
| 169 | + else: |
| 170 | + # Display the current frame |
| 171 | + |
| 172 | + cv2.imshow('Current Frame', frame) |
| 173 | + |
| 174 | + # Wait for a key press to take an image |
| 175 | + if cv2.waitKey(1) & 0xFF == ord('c'): |
| 176 | + # Convert the current frame to grayscale and save it as the taken image |
| 177 | + image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) |
| 178 | + image_taken = True |
| 179 | + cv2.destroyWindow('Current Frame') |
| 180 | + print("Image taken") |
| 181 | + |
| 182 | + if autoReferenceImage: |
| 183 | + |
| 184 | + current_time = time.time() |
| 185 | + if current_time - last_execution_time >= AUTOREFERENCEINTERVALL: |
| 186 | + image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) |
| 187 | + print("Image taken") |
| 188 | + last_execution_time = current_time |
| 189 | + |
| 190 | + # Press 'q' to quit the video stream |
| 191 | + if cv2.waitKey(1) & 0xFF == ord('q'): |
| 192 | + break |
| 193 | + |
| 194 | + #print("Processing next frame") |
| 195 | + |
| 196 | + # Start the parameter window |
| 197 | + #window.mainloop() |
| 198 | + window.update_idletasks() |
| 199 | + window.update() |
| 200 | + |
| 201 | +# Release the video stream and close all windows |
| 202 | +cap.release() |
| 203 | +cv2.destroyAllWindows() |
0 commit comments