Browse Source

applied socket changes to ambulance detector

!!!Not tested probably will give an error refer to sender.py for fix
yigit
Yiğit Çolakoğlu 6 years ago
parent
commit
9d8b8e0b31
4 changed files with 209 additions and 85 deletions
  1. +85
    -37
      traffic_analyzer/ambulance_detect.py
  2. +40
    -5
      traffic_analyzer/master_app/src/main/java/me/yigitcolakoglu/master_app/cameraForm.form
  3. +73
    -31
      traffic_analyzer/master_app/src/main/java/me/yigitcolakoglu/master_app/cameraForm.java
  4. +11
    -12
      traffic_analyzer/sender.py

+ 85
- 37
traffic_analyzer/ambulance_detect.py View File

@ -2,20 +2,22 @@
import numpy as np
import os
import six.moves.urllib as urllib
import sys
import tarfile
import tensorflow as tf
import zipfile
import cv2
from distutils.version import StrictVersion
from collections import defaultdict
from io import StringIO
import socket
from utils import label_map_util
from utils import visualization_utils as vis_util
import psutil
import json
import base64
from PIL import Image
from io import BytesIO
import psutil
switch = 1
# This is needed since the notebook is stored in the object_detection folder.
sys.path.append("..")
@ -65,6 +67,16 @@ TEST_IMAGE_PATHS = [ os.path.join(PATH_TO_TEST_IMAGES_DIR, 'image{}.jpg'.format(
# Size, in inches, of the output images.
sess = 0
switch = 1
data = {"gpu_temp":"10C","gpu_load":"15%","cpu_temp":"47C","cpu_load":"15%","mem_temp":"NaN","mem_load":"17%","fan_speed":"10000RPM"}
def get_temps():
global data
temps = psutil.sensors_temperatures()
data["cpu_temp"] = str(int(temps["dell_smm"][0][1]))+"°C"
data["cpu_load"] = str(psutil.cpu_percent())+"%"
data["mem_load"] = str(dict(psutil.virtual_memory()._asdict())["percent"])+"%"
data["fan_speed"] = str(psutil.sensors_fans()["dell_smm"][0][1])+"RPM"
def run_inference_for_single_image(image, graph):
global switch
global sess
@ -113,43 +125,79 @@ def run_inference_for_single_image(image, graph):
output_dict['detection_masks'] = output_dict['detection_masks'][0]
return output_dict
cut=[-175,-1,-175,-1]
cut_send = [0,0,0,0]
a = 1
img_counter = 0
socket_switch = True
cam = cv2.VideoCapture(0)
with detection_graph.as_default():
sess = tf.Session()
switch = 0
switch = 0
get_temps()
while 1:
if(True):
try:
ret,image = cam.read()
image_np = image[cut[0]:cut[1],cut[2]:cut[3]]
#image_np = image_np[int(r[1]):int(r[1]+r[3]),int(r[0]):int(r[0]+r[2])]
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
t1 = time.time()
# Actual detection.
output_dict = run_inference_for_single_image(image_np_expanded, detection_graph)
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
output_dict['detection_boxes'],
output_dict['detection_classes'],
output_dict['detection_scores'],
category_index,
instance_masks=output_dict.get('detection_masks'),
use_normalized_coordinates=True,
line_thickness=8)
image[cut[0]:cut[1],cut[2]:cut[3]] = image_np
send_image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
cv2.imshow("Cam",image)
cv2.imshow("Cut",image_np)
if socket_switch:
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 8485))
connection = client_socket.makefile('wb')
socket_switch = False
except:
socket_switch=True
continue
try:
crop_img = send_image.copy(order='C')
crop_img = Image.fromarray(crop_img,"RGB")
buffered = BytesIO()
crop_img.save(buffered, format="JPEG")
img = base64.b64encode(buffered.getvalue()).decode("ascii")
client_socket.sendall(json.dumps({"image_full":img,"image_sizes":{"x":cut_send[2],"y":cut_send[0],"width":cut_send[3],"height":cut_send[1]},"load":data}).encode('gbk')+b"\n")
img_counter += 1
except:
socket_switch=True
if img_counter % 10 ==0:
get_temps()
t2 = time.time()
print("time taken for {}".format(t2-t1))
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
except KeyboardInterrupt:
if not socket_switch:
client_socket.sendall(b"Bye\n")
cam.release()
exit(0)
ret,image = cam.read()
image_np = image[cut[0]:cut[1],cut[2]:cut[3]]
#image_np = image_np[int(r[1]):int(r[1]+r[3]),int(r[0]):int(r[0]+r[2])]
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
t1 = time.time()
# Actual detection.
output_dict = run_inference_for_single_image(image_np_expanded, detection_graph)
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
output_dict['detection_boxes'],
output_dict['detection_classes'],
output_dict['detection_scores'],
category_index,
instance_masks=output_dict.get('detection_masks'),
use_normalized_coordinates=True,
line_thickness=8)
image[cut[0]:cut[1],cut[2]:cut[3]] = image_np
cv2.imshow("Cam",image)
cv2.imshow("Cut",image_np)
t2 = time.time()
print("time taken for {}".format(t2-t1))
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
cv2.destroyAllWindows()
cam.release()

+ 40
- 5
traffic_analyzer/master_app/src/main/java/me/yigitcolakoglu/master_app/cameraForm.form View File

@ -40,9 +40,19 @@
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="camera_full_label" min="-2" pref="900" max="-2" attributes="0"/>
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="camera_full_label" min="-2" pref="900" max="-2" attributes="0"/>
</Group>
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" pref="33" max="-2" attributes="0"/>
<Component id="jButton1" min="-2" pref="247" max="-2" attributes="0"/>
<EmptySpace min="-2" pref="217" max="-2" attributes="0"/>
<Component id="jButton2" min="-2" pref="248" max="-2" attributes="0"/>
</Group>
</Group>
<EmptySpace type="separate" max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="camera_cut_label" min="-2" pref="300" max="-2" attributes="0"/>
@ -75,6 +85,7 @@
</Group>
</Group>
</Group>
<Component id="jButton3" alignment="0" min="-2" pref="251" max="-2" attributes="0"/>
</Group>
<EmptySpace max="32767" attributes="0"/>
</Group>
@ -119,7 +130,13 @@
</Group>
<Component id="camera_full_label" min="-2" pref="720" max="-2" attributes="0"/>
</Group>
<EmptySpace pref="67" max="32767" attributes="0"/>
<EmptySpace type="separate" max="-2" attributes="0"/>
<Group type="103" groupAlignment="3" attributes="0">
<Component id="jButton1" alignment="3" min="-2" max="-2" attributes="0"/>
<Component id="jButton2" alignment="0" min="-2" pref="31" max="-2" attributes="0"/>
<Component id="jButton3" alignment="3" min="-2" max="-2" attributes="0"/>
</Group>
<EmptySpace pref="18" max="32767" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
@ -127,7 +144,7 @@
<SubComponents>
<Component class="javax.swing.JLabel" name="camera_full_label">
<Properties>
<Property name="text" type="java.lang.String" value="camera_full"/>
<Property name="text" type="java.lang.String" value=" "/>
</Properties>
</Component>
<Component class="javax.swing.JLabel" name="camera_cut_label">
@ -211,5 +228,23 @@
<Property name="text" type="java.lang.String" value="2500 RPM"/>
</Properties>
</Component>
<Component class="javax.swing.JButton" name="jButton1">
<Properties>
<Property name="text" type="java.lang.String" value="Ambulance"/>
</Properties>
</Component>
<Component class="javax.swing.JButton" name="jButton2">
<Properties>
<Property name="text" type="java.lang.String" value="Intersection"/>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="jButton2ActionPerformed"/>
</Events>
</Component>
<Component class="javax.swing.JButton" name="jButton3">
<Properties>
<Property name="text" type="java.lang.String" value="Bus"/>
</Properties>
</Component>
</SubComponents>
</Form>

+ 73
- 31
traffic_analyzer/master_app/src/main/java/me/yigitcolakoglu/master_app/cameraForm.java View File

@ -14,6 +14,7 @@ import java.awt.image.BufferedImage;
import java.util.Base64;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import org.json.JSONObject;
/**
@ -55,6 +56,9 @@ public class cameraForm extends javax.swing.JFrame {
ram_temp = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
fan_rpm = new javax.swing.JLabel();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
@ -69,7 +73,7 @@ public class cameraForm extends javax.swing.JFrame {
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
camera_full_label.setText("camera_full");
camera_full_label.setText(" ");
camera_cut_label.setText("camera_cut");
@ -103,13 +107,31 @@ public class cameraForm extends javax.swing.JFrame {
fan_rpm.setFont(new java.awt.Font("Dialog", 1, 24)); // NOI18N
fan_rpm.setText("2500 RPM");
jButton1.setText("Ambulance");
jButton2.setText("Intersection");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setText("Bus");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(camera_full_label, javax.swing.GroupLayout.PREFERRED_SIZE, 900, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(camera_full_label, javax.swing.GroupLayout.PREFERRED_SIZE, 900, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addGap(33, 33, 33)
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 247, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(217, 217, 217)
.addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 248, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(camera_cut_label, javax.swing.GroupLayout.PREFERRED_SIZE, 300, javax.swing.GroupLayout.PREFERRED_SIZE)
@ -135,7 +157,8 @@ public class cameraForm extends javax.swing.JFrame {
.addComponent(ram_temp)
.addComponent(cpu_temp)
.addComponent(gpu_temp)
.addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 84, javax.swing.GroupLayout.PREFERRED_SIZE)))))
.addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 84, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addComponent(jButton3, javax.swing.GroupLayout.PREFERRED_SIZE, 251, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
layout.setVerticalGroup(
@ -170,12 +193,21 @@ public class cameraForm extends javax.swing.JFrame {
.addComponent(jLabel7)
.addComponent(fan_rpm, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(camera_full_label, javax.swing.GroupLayout.PREFERRED_SIZE, 720, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(67, Short.MAX_VALUE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 31, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButton3))
.addContainerGap(18, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jButton2ActionPerformed
/**
* @param args the command line arguments
*/
@ -213,7 +245,9 @@ public class cameraForm extends javax.swing.JFrame {
}
public void onCreate() throws Exception{
String fromClient;
this.camera_cut_label.setIcon(new ImageIcon());
this.camera_full_label.setIcon(new ImageIcon());
String fromClient = "";
String toClient;
ServerSocket server = new ServerSocket(8485);
@ -225,50 +259,55 @@ public class cameraForm extends javax.swing.JFrame {
BufferedImage image = null;
byte[] imageByte;
int null_reps = 0;
while(run) {
try{
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
fromClient = in.readLine();
if(fromClient != null) {
System.out.println("received data in size: " + fromClient.length());
JSONObject json = new JSONObject(fromClient);
byte[] decodedBytes = Base64.getDecoder().decode(json.getString("image_full"));
ByteArrayInputStream bis = new ByteArrayInputStream(decodedBytes);
image = ImageIO.read(bis);
bis.close();
JSONObject dims = json.getJSONObject("image_sizes");
this.camera_cut_label.setIcon(new ImageIcon(resizeImage(image.getSubimage(dims.getInt("x"), dims.getInt("y"), dims.getInt("width"), dims.getInt("height")),300,300)));
this.camera_full_label.setIcon(new ImageIcon(resizeImage(image,900,720)));
JSONObject data = json.optJSONObject("load");
this.gpu_temp.setText(data.getString("gpu_temp"));
this.gpu_usage.setText(data.getString("gpu_load"));
this.cpu_temp.setText(data.getString("cpu_temp"));
this.cpu_usage.setText(data.getString("cpu_load"));
this.ram_temp.setText(data.getString("mem_temp"));
this.ram_usage.setText(data.getString("mem_load"));
this.fan_rpm.setText(data.getString("fan_speed"));
if(fromClient.equals("Bye")) {
client.close();
if(fromClient.trim().equals("Bye")) {
run = false;
System.out.println("socket closed");
System.exit(0);
}else{
System.out.println("received data in size: " + fromClient.length());
JSONObject json = new JSONObject(fromClient);
byte[] decodedBytes = Base64.getDecoder().decode(json.getString("image_full"));
ByteArrayInputStream bis = new ByteArrayInputStream(decodedBytes);
image = ImageIO.read(bis);
bis.close();
JSONObject dims = json.getJSONObject("image_sizes");
this.camera_cut_label.setIcon(new ImageIcon(resizeImage(image.getSubimage(dims.getInt("x"), dims.getInt("y"), dims.getInt("width"), dims.getInt("height")),300,300)));
this.camera_full_label.setIcon(new ImageIcon(resizeImage(image,900,720)));
JSONObject data = json.optJSONObject("load");
this.gpu_temp.setText(data.getString("gpu_temp"));
this.gpu_usage.setText(data.getString("gpu_load"));
this.cpu_temp.setText(data.getString("cpu_temp"));
this.cpu_usage.setText(data.getString("cpu_load"));
this.ram_temp.setText(data.getString("mem_temp"));
this.ram_usage.setText(data.getString("mem_load"));
this.fan_rpm.setText(data.getString("fan_speed"));
null_reps=0;
}
null_reps=0;
}else{
null_reps +=1;
}
}
catch(Exception e){
System.out.println(fromClient);
System.out.println(e.toString());
}
if (null_reps >= 1000){
break;
if (null_reps >= 100000){
run = false;
System.out.println("socket closed");
}
}
server.close();
client.close();
JOptionPane.showMessageDialog(this, "Ambulance socket server down!");
}
public static BufferedImage resizeImage(final Image image, int width, int height) {
@ -292,6 +331,9 @@ public class cameraForm extends javax.swing.JFrame {
private javax.swing.JLabel fan_rpm;
private javax.swing.JLabel gpu_temp;
private javax.swing.JLabel gpu_usage;
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;


+ 11
- 12
traffic_analyzer/sender.py View File

@ -13,7 +13,7 @@ img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
switch = True
socket_switch = True
cut=[-175,-1,-175,-1]
cut_send = [0,0,0,0]
data = {"gpu_temp":"10C","gpu_load":"15%","cpu_temp":"47C","cpu_load":"15%","mem_temp":"NaN","mem_load":"17%","fan_speed":"10000RPM"}
@ -35,6 +35,7 @@ while True:
if cut[i]<0:
cut_send[i] = lens[i] + cut[i]
cut_send[i+1] = abs(cut[i])-abs(cut[i+1])
backup = frame
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)
crop_img = frame.copy(order='C')
@ -42,35 +43,33 @@ while True:
buffered = BytesIO()
crop_img.save(buffered, format="JPEG")
img = base64.b64encode(buffered.getvalue()).decode("ascii")
frame_cut=frame[cut[0]:cut[1],cut[2]:cut[3]]
cv2.imshow("base",frame)
frame_cut=backup[cut[0]:cut[1],cut[2]:cut[3]]
cv2.imshow("base",backup)
cv2.imshow("cut",frame_cut)
cv2.imshow("test", frame[cut_send[0]:cut_send[0]+cut_send[1],cut_send[2]:cut_send[2]+cut_send[3]]
)
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
if switch:
if socket_switch:
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 8485))
connection = client_socket.makefile('wb')
switch = False
socket_switch = False
except:
switch=True
socket_switch=True
continue
try:
client_socket.sendall(json.dumps({"image_full":img,"image_sizes":{"x":cut_send[2],"y":cut_send[0],"width":cut_send[3],"height":cut_send[1]},"load":data}).encode('gbk')+b"\n")
print(img)
except:
switch=True
socket_switch=True
img_counter += 1
if img_counter % 30 ==0:
if img_counter % 10 ==0:
get_temps()
except KeyboardInterrupt:
if not switch:
client_socket.sendall(b"Bye")
if not socket_switch:
client_socket.sendall(b"Bye\n")
cam.release()
exit(0)


Loading…
Cancel
Save