Browse Source

Added several functions

- Added complete complaints functionality to android app
-Added a presentation mode that hides unnecessary items from navigation drawer to android app
-Added a button to return to home
-Setup the server side so that ML functions only get activated on  windows machines

TODO:
- Add ambulance tracking to denunciation
- Add car crash reporting to android app
yigit
Yiğit Çolakoğlu 6 years ago
parent
commit
55b96b14ac
85 changed files with 1995 additions and 618 deletions
  1. +11
    -0
      MyCity/.gitignore
  2. +4
    -6
      MyCity/app/build.gradle
  3. +1
    -0
      MyCity/app/src/main/AndroidManifest.xml
  4. +98
    -5
      MyCity/app/src/main/java/gq/yigit/mycity/MainActivity.java
  5. +15
    -3
      MyCity/app/src/main/java/gq/yigit/mycity/MainFragment.java
  6. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/ParkFragment.java
  7. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/QRFragment.java
  8. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/RateFragment.java
  9. +209
    -0
      MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintFragment.java
  10. +116
    -0
      MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintViewFragment.java
  11. +110
    -0
      MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintsContent.java
  12. +134
    -0
      MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintsFragment.java
  13. +108
    -0
      MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/MyComplaintRecyclerViewAdapter.java
  14. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/navigation/TransitFragment.java
  15. +0
    -2
      MyCity/app/src/main/java/gq/yigit/mycity/tools/WebRequest.java
  16. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/utility/UtilityMain.java
  17. +1
    -1
      MyCity/app/src/main/java/gq/yigit/mycity/votesFragment/VoteFragment.java
  18. +3
    -3
      MyCity/app/src/main/res/drawable-v24/side_nav_bar.xml
  19. +4
    -0
      MyCity/app/src/main/res/drawable/camera.xml
  20. +5
    -0
      MyCity/app/src/main/res/drawable/ic_home.xml
  21. +4
    -0
      MyCity/app/src/main/res/drawable/road_cracked.xml
  22. +4
    -0
      MyCity/app/src/main/res/drawable/status_done.xml
  23. +6
    -0
      MyCity/app/src/main/res/drawable/status_pending.xml
  24. +42
    -0
      MyCity/app/src/main/res/layout/fragment_complaint.xml
  25. +15
    -0
      MyCity/app/src/main/res/layout/fragment_complaint_list.xml
  26. +81
    -0
      MyCity/app/src/main/res/layout/fragment_complaint_view.xml
  27. +1
    -1
      MyCity/app/src/main/res/layout/nav_header_main.xml
  28. +16
    -4
      MyCity/app/src/main/res/menu/activity_main_drawer.xml
  29. +12
    -0
      MyCity/app/src/main/res/menu/main.xml
  30. +2
    -2
      MyCity/app/src/main/res/values/colors.xml
  31. +1
    -1
      MyCity/app/src/main/res/values/strings.xml
  32. +5
    -1
      server_side/api/app.py
  33. +0
    -0
      server_side/api/encryption/mycity-decrypted.key
  34. +0
    -0
      server_side/api/encryption/mycity.crt
  35. +0
    -0
      server_side/api/encryption/mycity.csr
  36. +0
    -0
      server_side/api/encryption/mycity.key
  37. +0
    -0
      server_side/api/encryption/mycity.pem
  38. BIN
      server_side/api/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq.png
  39. +0
    -0
      server_side/api/images/muhtarlik.jpg
  40. +0
    -0
      server_side/api/images/park.jpg
  41. +0
    -0
      server_side/api/images/voting.jpg
  42. +0
    -0
      server_side/api/modules/SpotSelector.py
  43. +0
    -0
      server_side/api/modules/__init__.py
  44. +0
    -0
      server_side/api/modules/announcements.py
  45. +0
    -0
      server_side/api/modules/bus_stops.py
  46. +143
    -0
      server_side/api/modules/complaint.py
  47. +0
    -0
      server_side/api/modules/databases/announcements.json
  48. +0
    -0
      server_side/api/modules/databases/bus.json
  49. +0
    -0
      server_side/api/modules/databases/bus_locations.json
  50. +26
    -0
      server_side/api/modules/databases/complaints.json
  51. +3
    -0
      server_side/api/modules/databases/denunciations.json
  52. +0
    -0
      server_side/api/modules/databases/locations.json
  53. +0
    -0
      server_side/api/modules/databases/park_data.json
  54. +0
    -0
      server_side/api/modules/databases/ratings.json
  55. +0
    -0
      server_side/api/modules/databases/users.json
  56. +0
    -0
      server_side/api/modules/databases/votings.json
  57. +0
    -0
      server_side/api/modules/denunciation.py
  58. +0
    -0
      server_side/api/modules/image5.jpg
  59. +0
    -0
      server_side/api/modules/lot.jpg
  60. +0
    -0
      server_side/api/modules/navigation.py
  61. +0
    -0
      server_side/api/modules/rating_system.py
  62. +0
    -1
      server_side/api/modules/smart_park.py
  63. +0
    -0
      server_side/api/modules/user_info.py
  64. +0
    -0
      server_side/api/modules/utility.py
  65. +0
    -0
      server_side/api/modules/utils.py
  66. +0
    -0
      server_side/api/modules/voting_system.py
  67. +0
    -0
      server_side/api/requirements.txt
  68. BIN
      server_side/apia/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq.png
  69. BIN
      server_side/apia/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq_qr.png
  70. +0
    -0
      server_side/apia/modules/__init__.py
  71. +0
    -206
      server_side/apia/modules/databases/denunciations.json
  72. +84
    -60
      traffic_analyzer/ambulance_detect.py
  73. +0
    -0
      traffic_analyzer/debug_data/amb_1.mp4
  74. +0
    -0
      traffic_analyzer/debug_data/amb_2.mp4
  75. +0
    -0
      traffic_analyzer/debug_data/frame_data.pkl
  76. +0
    -0
      traffic_analyzer/images/1_coordinates.txt
  77. +626
    -0
      traffic_analyzer/images/coordinates.json
  78. +95
    -0
      traffic_analyzer/images/train_image_taker.py
  79. +2
    -2
      traffic_analyzer/object_detection/data/mscoco_label_map.pbtxt
  80. +0
    -53
      traffic_analyzer/receive.py
  81. +0
    -11
      traffic_analyzer/saver.py
  82. +0
    -112
      traffic_analyzer/sender.py
  83. +3
    -0
      traffic_analyzer/traffic_analyzer.iml
  84. +0
    -66
      traffic_analyzer/train_image_taker.py
  85. +0
    -73
      traffic_analyzer/windowsTemp.py

+ 11
- 0
MyCity/.gitignore View File

@ -13,3 +13,14 @@ rfcn_resnet101_coco_11_06_2017\
ssd_inception_v2_coco_2017_11_17\ ssd_inception_v2_coco_2017_11_17\
ssd_mobilenet_v1_coco_2017_11_17\ ssd_mobilenet_v1_coco_2017_11_17\
faster_rcnn_resnet101_coco_11_06_2017\ faster_rcnn_resnet101_coco_11_06_2017\
ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20/
road.svg
ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20/
ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20/
frozen_inference_graph.pb
model.ckpt.data-00000-of-00001
model.ckpt.index
model.ckpt.meta
pipeline.config
saved_model.pb
variables/

+ 4
- 6
MyCity/app/build.gradle View File

@ -12,7 +12,7 @@ android {
android.defaultConfig.vectorDrawables.useSupportLibrary = true android.defaultConfig.vectorDrawables.useSupportLibrary = true
multiDexEnabled true multiDexEnabled true
} }
buildTypes { buildTypes {
release { release {
@ -23,15 +23,13 @@ android {
} }
} }
dependencies { dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs') implementation fileTree(include: ['*.jar'], dir: 'libs')
api 'cz.msebera.android:httpclient:4.4.1.2' api 'cz.msebera.android:httpclient:4.4.1.2'
implementation ('com.github.chathuralakmal:AndroidImagePopup:1.2.1',{
implementation('com.github.chathuralakmal:AndroidImagePopup:1.2.1', {
exclude module: "com.github.bumptech.glide" exclude module: "com.github.bumptech.glide"
}) })
implementation 'com.journeyapps:zxing-android-embedded:3.6.0' implementation 'com.journeyapps:zxing-android-embedded:3.6.0'
@ -45,9 +43,9 @@ dependencies {
testImplementation 'junit:junit:4.12' testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2' androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
implementation ('com.google.android.libraries.places:places:1.1.0',{
implementation('com.google.android.libraries.places:places:1.1.0', {
exclude group: "com.github.bumptech.glide" exclude group: "com.github.bumptech.glide"
}) })
implementation 'com.google.android.gms:play-services-maps:16.1.0' implementation 'com.google.android.gms:play-services-maps:16.1.0'
}
}

+ 1
- 0
MyCity/app/src/main/AndroidManifest.xml View File

@ -38,6 +38,7 @@
<category android:name="android.intent.category.LAUNCHER"/> <category android:name="android.intent.category.LAUNCHER"/>
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
</manifest> </manifest>

+ 98
- 5
MyCity/app/src/main/java/gq/yigit/mycity/MainActivity.java View File

@ -2,20 +2,25 @@ package gq.yigit.mycity;
import android.Manifest; import android.Manifest;
import android.app.Activity; import android.app.Activity;
import android.app.Fragment;
import android.content.Context; import android.content.Context;
import android.content.DialogInterface; import android.content.DialogInterface;
import android.content.Intent; import android.content.Intent;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.Uri; import android.net.Uri;
import android.os.Bundle; import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.NavigationView.OnNavigationItemSelectedListener; import android.support.design.widget.NavigationView.OnNavigationItemSelectedListener;
import android.support.v4.app.ActivityCompat; import android.support.v4.app.ActivityCompat;
import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction; import android.support.v4.app.FragmentTransaction;
import android.support.v4.content.ContextCompat; import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog; import android.support.v7.app.AlertDialog;
import android.util.DisplayMetrics;
import android.util.Log; import android.util.Log;
import android.view.*; import android.view.*;
import android.support.design.widget.NavigationView; import android.support.design.widget.NavigationView;
@ -28,7 +33,10 @@ import android.widget.EditText;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.TextView; import android.widget.TextView;
import android.widget.Toast; import android.widget.Toast;
import com.bumptech.glide.Glide;
import gq.yigit.mycity.complaintsFragment.ComplaintFragment;
import gq.yigit.mycity.complaintsFragment.ComplaintViewFragment;
import gq.yigit.mycity.complaintsFragment.ComplaintsContent;
import gq.yigit.mycity.complaintsFragment.ComplaintsFragment;
import gq.yigit.mycity.navigation.TransitFragment; import gq.yigit.mycity.navigation.TransitFragment;
import gq.yigit.mycity.tools.*; import gq.yigit.mycity.tools.*;
import gq.yigit.mycity.tools.WebRequest.responseListener; import gq.yigit.mycity.tools.WebRequest.responseListener;
@ -40,6 +48,7 @@ import gq.yigit.mycity.votesFragment.VotesFragment.OnListFragmentInteractionList
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import static gq.yigit.mycity.tools.ImageDownload.*; import static gq.yigit.mycity.tools.ImageDownload.*;
@ -56,6 +65,9 @@ public class MainActivity extends AppCompatActivity
QRFragment.OnFragmentInteractionListener, QRFragment.OnFragmentInteractionListener,
OnFragmentInteractionListener, OnFragmentInteractionListener,
ParkFragment.OnFragmentInteractionListener, ParkFragment.OnFragmentInteractionListener,
ComplaintFragment.OnComplaintsClicked,
ComplaintsFragment.OnListFragmentInteractionListener,
ComplaintViewFragment.OnFragmentInteractionListener,
responseListener, responseListener,
imageListener { imageListener {
@ -66,13 +78,54 @@ public class MainActivity extends AppCompatActivity
private ImageView avatarView; private ImageView avatarView;
private TextView userName; private TextView userName;
public static Activity mainActivity; public static Activity mainActivity;
public static DisplayMetrics pix_density;
public static String apikey = "AIzaSyBuOC03IHPA_6TPnfk18b0SAgD1uge4-dk"; public static String apikey = "AIzaSyBuOC03IHPA_6TPnfk18b0SAgD1uge4-dk";
public boolean present = true;
public MenuItem present_item;
public static LocationManager locationManager;
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
Log.d("[BOOKMARK]","Started creating activity"); Log.d("[BOOKMARK]","Started creating activity");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
pix_density = getApplicationContext().getResources().getDisplayMetrics();
locationManager = (LocationManager)getSystemService(Context.LOCATION_SERVICE);
if ( ContextCompat.checkSelfPermission( this, android.Manifest.permission.ACCESS_COARSE_LOCATION ) != PackageManager.PERMISSION_GRANTED ) {
ActivityCompat.requestPermissions( this, new String[] { android.Manifest.permission.ACCESS_COARSE_LOCATION },
1 );
}else if( ContextCompat.checkSelfPermission( this, Manifest.permission.ACCESS_FINE_LOCATION ) != PackageManager.PERMISSION_GRANTED ) {
ActivityCompat.requestPermissions( this, new String[] { Manifest.permission.ACCESS_FINE_LOCATION },
1 );
}
try {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 10, new LocationListener() {
@Override
public void onLocationChanged(Location location) {
Log.i("[INFO]", "Location changed to lat:" + location.getLatitude() + " lng:" + location.getLongitude());
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
Log.i("[INFO]", "Provider enabled: " + provider);
}
@Override
public void onProviderDisabled(String provider) {
Log.i("[INFO]", "Provider disabled: " + provider);
}
});
}catch (SecurityException e){
Log.e("[ERROR]", "An error occured with location permissions");
}
setContentView(R.layout.activity_main); setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar); setSupportActionBar(toolbar);
@ -83,6 +136,7 @@ public class MainActivity extends AppCompatActivity
drawer.addDrawerListener(toggle); drawer.addDrawerListener(toggle);
toggle.syncState(); toggle.syncState();
FileActions file_manager = new FileActions(); FileActions file_manager = new FileActions();
url = file_manager.readFromFile(cntxt,"server.config").trim(); url = file_manager.readFromFile(cntxt,"server.config").trim();
HashMap<String,String> request = new HashMap<>(); HashMap<String,String> request = new HashMap<>();
@ -95,6 +149,9 @@ public class MainActivity extends AppCompatActivity
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
navigationView.setNavigationItemSelectedListener(this); navigationView.setNavigationItemSelectedListener(this);
Menu menu = navigationView.getMenu();
present_item = menu.findItem(R.id.present_items);
present_item.setVisible(!present);
MainFragment fragment = new MainFragment(); MainFragment fragment = new MainFragment();
View header = navigationView.getHeaderView(0); View header = navigationView.getHeaderView(0);
@ -154,6 +211,15 @@ public class MainActivity extends AppCompatActivity
alert.show(); alert.show();
return true; return true;
}else if(id == R.id.action_presentation){
present = !present;
present_item.setVisible(!present);
Toast.makeText(getApplicationContext(),"Toggled presentation mode!",Toast.LENGTH_LONG).show();
}else if(id == R.id.action_restart){
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
finish();
} }
return super.onOptionsItemSelected(item); return super.onOptionsItemSelected(item);
@ -201,10 +267,13 @@ public class MainActivity extends AppCompatActivity
QRFragment fragment= new QRFragment(); QRFragment fragment= new QRFragment();
fragmentTransaction.replace(R.id.app_bar_main, fragment); fragmentTransaction.replace(R.id.app_bar_main, fragment);
fragmentTransaction.commit(); fragmentTransaction.commit();
}else if (id == R.id.complaint){
ComplaintFragment fragment= new ComplaintFragment();
fragmentTransaction.replace(R.id.app_bar_main, fragment);
fragmentTransaction.commit();
} }
fragmentTransaction.addToBackStack(null);
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START); drawer.closeDrawer(GravityCompat.START);
return true; return true;
@ -219,6 +288,20 @@ public class MainActivity extends AppCompatActivity
} }
public void onFragmentInteraction(Uri uri){ public void onFragmentInteraction(Uri uri){
}
public void ComplaintsClicked(@Nullable ComplaintsContent.ComplaintItem item){
if(item == null) {
FragmentManager fragmentManager = getSupportFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
ComplaintsFragment fragment = new ComplaintsFragment();
fragmentTransaction.replace(R.id.app_bar_main, fragment);
fragmentTransaction.commit();
return;
}
} }
@ -255,5 +338,15 @@ public class MainActivity extends AppCompatActivity
} }
} }
@Override
public void onListFragmentInteraction(ComplaintsContent.ComplaintItem item){
FragmentManager fragmentManager = getSupportFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
ComplaintViewFragment fragment = ComplaintViewFragment.newInstance(item.toString());
fragmentTransaction.replace(R.id.app_bar_main, fragment);
fragmentTransaction.commit();
}
} }

+ 15
- 3
MyCity/app/src/main/java/gq/yigit/mycity/MainFragment.java View File

@ -2,6 +2,8 @@ package gq.yigit.mycity;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.location.Location;
import android.location.LocationManager;
import android.net.Uri; import android.net.Uri;
import android.os.Bundle; import android.os.Bundle;
import android.support.v4.app.Fragment; import android.support.v4.app.Fragment;
@ -36,6 +38,7 @@ public class MainFragment extends Fragment implements WebRequest.responseListene
private TextView temp_text; private TextView temp_text;
private TextView humi_text; private TextView humi_text;
private TextView pres_text; private TextView pres_text;
private TextView city_text;
private ImageView weather_img; private ImageView weather_img;
private RecyclerView recyclerView; private RecyclerView recyclerView;
private SwipeRefreshLayout swipeRefreshLayout; private SwipeRefreshLayout swipeRefreshLayout;
@ -64,13 +67,22 @@ public class MainFragment extends Fragment implements WebRequest.responseListene
View rootView = inflater.inflate(R.layout.fragment_main, container, false); View rootView = inflater.inflate(R.layout.fragment_main, container, false);
temp_text = rootView.findViewById(R.id.temp_text); temp_text = rootView.findViewById(R.id.temp_text);
humi_text = rootView.findViewById(R.id.humidity); humi_text = rootView.findViewById(R.id.humidity);
city_text = rootView.findViewById(R.id.city_name);
pres_text = rootView.findViewById(R.id.pressure); pres_text = rootView.findViewById(R.id.pressure);
weather_img = rootView.findViewById(R.id.forecast_img); weather_img = rootView.findViewById(R.id.forecast_img);
recyclerView = rootView.findViewById(R.id.anouncements); recyclerView = rootView.findViewById(R.id.anouncements);
swipeRefreshLayout = rootView.findViewById(R.id.simpleSwipeRefreshLayout); swipeRefreshLayout = rootView.findViewById(R.id.simpleSwipeRefreshLayout);
HashMap<String,String> params = new HashMap<>(); HashMap<String,String> params = new HashMap<>();
params.put("q","Ankara,tr");
try {
Location curloc = MainActivity.locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
params.put("lat",String.valueOf(curloc.getLatitude()));
params.put("lon",String.valueOf(curloc.getLongitude()));
}catch (SecurityException e){
Log.e("[ERROR]", "An error occured with location permissions");
}
params.put("appid",key); params.put("appid",key);
FileActions file_manager = new FileActions(); FileActions file_manager = new FileActions();
@ -108,7 +120,7 @@ public class MainFragment extends Fragment implements WebRequest.responseListene
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }
@ -132,7 +144,7 @@ public class MainFragment extends Fragment implements WebRequest.responseListene
temp_text.setText((int)(Float.parseFloat(temp.getString("temp")) - 272.15) + " °C"); temp_text.setText((int)(Float.parseFloat(temp.getString("temp")) - 272.15) + " °C");
humi_text.setText("Humidity: %" + temp.getString("humidity")); humi_text.setText("Humidity: %" + temp.getString("humidity"));
pres_text.setText("Pressure: " + temp.getString("pressure") + "hpa"); pres_text.setText("Pressure: " + temp.getString("pressure") + "hpa");
city_text.setText(new JSONObject(response).getString("name"));
ImageDownload imageDownload = new ImageDownload(); ImageDownload imageDownload = new ImageDownload();
imageDownload.addListener(this); imageDownload.addListener(this);
imageDownload.execute(String.format("http://openweathermap.org/img/w/%s.png", weatherdata.getString("icon"))); imageDownload.execute(String.format("http://openweathermap.org/img/w/%s.png", weatherdata.getString("icon")));


+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/ParkFragment.java View File

@ -132,7 +132,7 @@ public class ParkFragment extends Fragment implements WebRequest.responseListene
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/QRFragment.java View File

@ -71,7 +71,7 @@ public class QRFragment extends Fragment {
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/RateFragment.java View File

@ -152,7 +152,7 @@ public class RateFragment extends Fragment implements WebRequest.responseListene
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 209
- 0
MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintFragment.java View File

@ -0,0 +1,209 @@
package gq.yigit.mycity.complaintsFragment;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.location.Location;
import android.location.LocationManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.util.Base64;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import gq.yigit.mycity.MainActivity;
import gq.yigit.mycity.R;
import gq.yigit.mycity.tools.FileActions;
import gq.yigit.mycity.tools.WebRequest;
import org.json.JSONException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.HashMap;
import static android.app.Activity.RESULT_OK;
public class ComplaintFragment extends Fragment implements WebRequest.responseListener {
private OnComplaintsClicked mListener;
private ImageView complaint_image;
private ComplaintFragment activity;
private Uri mImageUri;
private Bitmap img;
private String img_b64 = "";
private Button submit_button;
private EditText text_in;
private String url;
public ComplaintFragment() {
// Required empty public constructor
}
public static ComplaintFragment newInstance(String param1, String param2) {
ComplaintFragment fragment = new ComplaintFragment();
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
private File createTemporaryFile(String part, String ext) throws Exception
{
File tempDir= Environment.getExternalStorageDirectory();
tempDir=new File(tempDir.getAbsolutePath()+"/.temp/");
if(!tempDir.exists())
{
tempDir.mkdirs();
}
return File.createTempFile(part, ext, tempDir);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_complaint, container, false);
activity = this;
FileActions file_manager = new FileActions();
url = file_manager.readFromFile(getContext(),"server.config").trim();
complaint_image = rootView.findViewById(R.id.complaint_image);
submit_button = rootView.findViewById(R.id.compaint_submit);
text_in = rootView.findViewById(R.id.complaint_text);
rootView.findViewById(R.id.complaints_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mListener.ComplaintsClicked(null);
}
});
submit_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
HashMap<String,String> params = new HashMap<>();
try {
Location curloc = MainActivity.locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
params.put("lat",String.valueOf(curloc.getLatitude()));
params.put("lng",String.valueOf(curloc.getLongitude()));
}catch (SecurityException e){
Log.e("[ERROR]", "An error occured with location permissions");
}
if(img_b64.isEmpty()){
Toast.makeText(getContext(),"Please take a photo of the complaint!",Toast.LENGTH_LONG).show();
return;
}
if(text_in.getText().toString().length() < 10){
Toast.makeText(getContext(),"Complaint should be minimum 10 characters",Toast.LENGTH_LONG).show();
return;
}
params.put("img",img_b64);
params.put("content",text_in.getText().toString());
try {
params.put("id", MainActivity.userData.getString("id"));
}catch (JSONException e){
Log.e("[ERROR]","Cannot get id");
}
WebRequest request = new WebRequest(url+"/complaint",false, params,0);
request.addListener(activity);
request.execute();
}
});
complaint_image.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
File photo;
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
try
{
// place where to store camera taken picture
photo = createTemporaryFile("picture", ".jpg");
photo.delete();
Uri mImageUri = Uri.fromFile(photo);
}
catch(Exception e)
{
Log.v("[ERROR]", "Can't create file to take picture!");
Toast.makeText(getContext(), "Please check SD card! Image shot is impossible!", Toast.LENGTH_LONG);
}
intent.putExtra(MediaStore.EXTRA_OUTPUT, mImageUri);
//start camera intent
activity.startActivityForResult(intent, 100);
}
});
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnComplaintsClicked) {
mListener = (OnComplaintsClicked) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnComplaintsClicked");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnComplaintsClicked {
void ComplaintsClicked(@Nullable ComplaintsContent.ComplaintItem item);
}
//called after camera intent finished
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
if(requestCode==100 && resultCode==RESULT_OK)
{
Bitmap photo = (Bitmap) data.getExtras().get("data");
img = photo;
complaint_image.setImageBitmap(img);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
img.compress(Bitmap.CompressFormat.PNG, 100, byteArrayOutputStream);
byte[] byteArray = byteArrayOutputStream.toByteArray();
img_b64= Base64.encodeToString(byteArray, Base64.DEFAULT);
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public void receivedResponse(boolean success, String response,int id){
if(success){
Toast.makeText(getContext(),"Complaint send successfully!",Toast.LENGTH_SHORT).show();
}
}
}

+ 116
- 0
MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintViewFragment.java View File

@ -0,0 +1,116 @@
package gq.yigit.mycity.complaintsFragment;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Base64;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import gq.yigit.mycity.R;
import org.json.JSONException;
import org.json.JSONObject;
public class ComplaintViewFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
// TODO: Rename and change types of parameters
private JSONObject mParam1;
private OnFragmentInteractionListener mListener;
public ComplaintViewFragment() {
// Required empty public constructor
}
public static ComplaintViewFragment newInstance(String param1) {
ComplaintViewFragment fragment = new ComplaintViewFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
try {
mParam1 = new JSONObject(getArguments().getString(ARG_PARAM1));
}catch(JSONException e){
Log.e("[ERROR]", "JSON error occured while getting params in ComplaintViewFragment");
}
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_complaint_view, container, false);
TextView address = rootView.findViewById(R.id.cv_address);
TextView comment = rootView.findViewById(R.id.cv_message);
TextView priority = rootView.findViewById(R.id.cv_priority);
TextView comment_action = rootView.findViewById(R.id.cv_response_message);
TextView main_action = rootView.findViewById(R.id.cv_state_text);
ImageView image_main = rootView.findViewById(R.id.cv_image);
ImageView action_image = rootView.findViewById(R.id.cv_state_img);
try {
address.setText(mParam1.getString("address"));
comment.setText(mParam1.getString("content"));
priority.setText(mParam1.getJSONObject("status").getString("priority"));
main_action.setText("Your complaint is being processed");
comment_action.setText("Your complaint is being processed");
action_image.setImageResource(R.drawable.status_pending);
if(mParam1.getJSONObject("status").getBoolean("status")){
main_action.setText("Your complaint has been processed");
comment_action.setText(mParam1.getJSONObject("status").getString("comment"));
action_image.setImageResource(R.drawable.status_done);
}
byte[] decodedString = Base64.decode(mParam1.getString("img"), Base64.DEFAULT);
Bitmap decodedByte = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length);
image_main.setImageBitmap(decodedByte);
}catch (JSONException e){
Log.e("[ERROR]","JSONException occured while setting up ComplaintViewFragment");
}
return rootView;
}
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnFragmentInteractionListener) {
mListener = (OnFragmentInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
void onFragmentInteraction(Uri uri);
}
}

+ 110
- 0
MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintsContent.java View File

@ -0,0 +1,110 @@
package gq.yigit.mycity.complaintsFragment;
import android.graphics.Bitmap;
import android.location.Address;
import android.location.Geocoder;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ComplaintsContent {
public static final List<ComplaintItem> ITEMS = new ArrayList<ComplaintItem>();
public static final Map<String, ComplaintItem> ITEM_MAP = new HashMap<String, ComplaintItem>();
public static void addItem(ComplaintItem item) {
ITEMS.add(item);
ITEM_MAP.put(item.id, item);
}
public static ComplaintItem createComplaintItem(JSONObject obj,Geocoder geocoder, ArrayList<Bitmap> stat_imgs) throws JSONException{
List<Address> addresses = null;
try {
addresses = geocoder.getFromLocation(
Double.parseDouble(obj.getString("lat")),
Double.parseDouble(obj.getString("lng")),
1);
} catch (IOException ioException) {
// Catch network or other I/O problems.
Log.e("[ERROR]", "Service not available", ioException);
} catch (IllegalArgumentException illegalArgumentException) {
// Catch invalid latitude or longitude values.
Log.e("[ERROR]", "LatLng not appropriate. Lattitude:" +
obj.getString("lat") +
", Longitude = " +
obj.getString("lng"), illegalArgumentException);
}
Address address = null;
if (!(addresses == null || addresses.size() == 0)) {
address = addresses.get(0);
ArrayList<String> addressFragments = new ArrayList<String>();
for(int i = 0; i <= address.getMaxAddressLineIndex(); i++) {
addressFragments.add(address.getAddressLine(i));
}
Log.i("[INFO]", "Address found " + address.getAddressLine(0));
}
Bitmap stat_img = stat_imgs.get(1);
if(obj.getJSONObject("response").getBoolean("status")){
stat_img = stat_imgs.get(0);
}
return new ComplaintItem(address.getAddressLine(0),
obj.getString("datetime"),
obj.getString("img"),
obj.getJSONObject("response"),
obj.getString("content"),
String.valueOf(ITEMS.size()),
stat_img
);
}
public static class ComplaintItem{
public final String loc;
public final String datetime;
public final String image;
public final JSONObject status;
public final String content;
public final String id;
public final Bitmap status_img;
public ComplaintItem(String loc, String datetime, String image, JSONObject status, String content, String id, Bitmap status_img) {
this.loc = loc;
this.datetime = datetime;
this.image = image;
this.status = status;
this.content = content;
this.id = id;
this.status_img = status_img;
}
@Override
public String toString() {
String data = "";
try{
JSONObject obj = new JSONObject();
obj.put("address",loc);
obj.put("datetime",datetime);
obj.put("img",image);
obj.put("status",status);
obj.put("content",content);
data = obj.toString();
}catch (JSONException e){
Log.e("[ERROR]","JSONException occured in ComplaintItem.toString() method");
}
return data;
}
}
}

+ 134
- 0
MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/ComplaintsFragment.java View File

@ -0,0 +1,134 @@
package gq.yigit.mycity.complaintsFragment;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.location.Geocoder;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import gq.yigit.mycity.MainActivity;
import gq.yigit.mycity.R;
import gq.yigit.mycity.complaintsFragment.ComplaintsContent.ComplaintItem;
import gq.yigit.mycity.tools.FileActions;
import gq.yigit.mycity.tools.WebRequest;
import org.json.JSONArray;
import org.json.JSONException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
public class ComplaintsFragment extends Fragment implements WebRequest.responseListener {
private static final String ARG_COLUMN_COUNT = "column-count";
private int mColumnCount = 1;
private OnListFragmentInteractionListener mListener;
public static Geocoder geocoder;
public ArrayList<Bitmap> stat_imgs;
private RecyclerView recyclerView;
public ComplaintsFragment() {
}
@SuppressWarnings("unused")
public static ComplaintsFragment newInstance(int columnCount) {
ComplaintsFragment fragment = new ComplaintsFragment();
Bundle args = new Bundle();
args.putInt(ARG_COLUMN_COUNT, columnCount);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mColumnCount = getArguments().getInt(ARG_COLUMN_COUNT);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_complaint_list, container, false);
geocoder = new Geocoder(getContext(), Locale.getDefault());
HashMap<String,String> params = new HashMap<>();
ComplaintsContent.ITEMS.clear();
ComplaintsContent.ITEM_MAP.clear();
try {
params.put("id", MainActivity.userData.getString("id"));
}catch (JSONException e){
Log.e("[ERROR]","Cannot get id");
}
stat_imgs = new ArrayList<>();
stat_imgs.add(BitmapFactory.decodeResource(getContext().getResources(), R.drawable.status_done));
stat_imgs.add(BitmapFactory.decodeResource(getContext().getResources(), R.drawable.status_pending));
FileActions file_manager = new FileActions();
String url = file_manager.readFromFile(getContext(),"server.config").trim();
WebRequest request = new WebRequest(url + "/complaints",false,params,0);
request.addListener(this);
request.execute();
if (view instanceof RecyclerView) {
Context context = view.getContext();
recyclerView = (RecyclerView) view;
if (mColumnCount <= 1) {
recyclerView.setLayoutManager(new LinearLayoutManager(context));
} else {
recyclerView.setLayoutManager(new GridLayoutManager(context, mColumnCount));
}
}
return view;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnListFragmentInteractionListener) {
mListener = (OnListFragmentInteractionListener) context;
} else {
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnListFragmentInteractionListener {
void onListFragmentInteraction(ComplaintItem item);
}
@Override
public void receivedResponse(boolean success,String response,int id){
if(success) {
try {
JSONArray data = new JSONArray(response);
for (int i = 0; i < data.length(); i++) {
ComplaintItem item =ComplaintsContent.createComplaintItem(data.getJSONObject(i), geocoder, stat_imgs);
ComplaintsContent.addItem(item);
}
} catch (JSONException e) {
Log.e("[ERROR]", "Error occured with complaints response!");
}
recyclerView.setAdapter(new MyComplaintRecyclerViewAdapter(ComplaintsContent.ITEMS, mListener));
}
}
}

+ 108
- 0
MyCity/app/src/main/java/gq/yigit/mycity/complaintsFragment/MyComplaintRecyclerViewAdapter.java View File

@ -0,0 +1,108 @@
package gq.yigit.mycity.complaintsFragment;
import android.graphics.BitmapFactory;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import gq.yigit.mycity.MainActivity;
import gq.yigit.mycity.R;
import org.json.JSONException;
import java.util.List;
public class MyComplaintRecyclerViewAdapter extends RecyclerView.Adapter<MyComplaintRecyclerViewAdapter.ViewHolder> {
private final List<ComplaintsContent.ComplaintItem> mValues;
private final ComplaintsFragment.OnListFragmentInteractionListener mListener;
public MyComplaintRecyclerViewAdapter(List<ComplaintsContent.ComplaintItem> items, ComplaintsFragment.OnListFragmentInteractionListener listener) {
mValues = items;
mListener = listener;
}
@Override
public MyComplaintRecyclerViewAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.votes_list_item, parent, false);
return new MyComplaintRecyclerViewAdapter.ViewHolder(view);
}
@Override
public void onBindViewHolder(final MyComplaintRecyclerViewAdapter.ViewHolder holder, int position) {
holder.mItem = mValues.get(position);
holder.mIdView.setText(mValues.get(position).loc);
holder.mContentView.setText(mValues.get(position).datetime);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT
);
params.height = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP,
50,
MainActivity.pix_density
);
params.width = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP,
70,
MainActivity.pix_density
);
params.topMargin = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP,
10,
MainActivity.pix_density
);
holder.mImageView.setLayoutParams(params);
try {
holder.mImageView.setImageResource(R.drawable.status_pending);
if (mValues.get(position).status.getBoolean("status")) {
holder.mImageView.setImageResource(R.drawable.status_done);
}
}catch (JSONException e){
Log.e("[ERROR]","An error occured with image");
}
holder.mView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (null != mListener) {
mListener.onListFragmentInteraction(holder.mItem);
}
}
});
}
@Override
public int getItemCount() {
return mValues.size();
}
public class ViewHolder extends RecyclerView.ViewHolder {
public final View mView;
public final TextView mIdView;
public final TextView mContentView;
public final ImageView mImageView;
public ComplaintsContent.ComplaintItem mItem;
public ViewHolder(View view) {
super(view);
mView = view;
mIdView = (TextView) view.findViewById(R.id.item_number);
mContentView = (TextView) view.findViewById(R.id.content);
mImageView = (ImageView) view.findViewById(R.id.vote_img);
}
@Override
public String toString() {
return super.toString() + " '" + mContentView.getText() + "'";
}
}
}

+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/navigation/TransitFragment.java View File

@ -123,7 +123,7 @@ public class TransitFragment extends Fragment implements WebRequest.responseList
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 0
- 2
MyCity/app/src/main/java/gq/yigit/mycity/tools/WebRequest.java View File

@ -80,9 +80,7 @@ public class WebRequest extends AsyncTask<Void,Void,String> {
iterator.remove(); iterator.remove();
} }
post_request.setEntity(new UrlEncodedFormEntity(pairs)); post_request.setEntity(new UrlEncodedFormEntity(pairs));
Log.d("[BOOKMARK]","Started execute");
response = client.execute(post_request); response = client.execute(post_request);
Log.d("[BOOKMARK]","Done execute");
}catch (Exception e){ }catch (Exception e){
Log.e("[ERROR](request:86): ", e.toString()); Log.e("[ERROR](request:86): ", e.toString());
} }


+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/utility/UtilityMain.java View File

@ -84,7 +84,7 @@ public class UtilityMain extends Fragment {
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 1
- 1
MyCity/app/src/main/java/gq/yigit/mycity/votesFragment/VoteFragment.java View File

@ -95,7 +95,7 @@ public class VoteFragment extends Fragment implements responseListener, imageLis
mListener = (OnFragmentInteractionListener) context; mListener = (OnFragmentInteractionListener) context;
} else { } else {
throw new RuntimeException(context.toString() throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
+ " must implement OnComplaintsClicked");
} }
} }


+ 3
- 3
MyCity/app/src/main/res/drawable-v24/side_nav_bar.xml View File

@ -2,8 +2,8 @@
android:shape="rectangle"> android:shape="rectangle">
<gradient <gradient
android:angle="135" android:angle="135"
android:centerColor="#009688"
android:endColor="#00695C"
android:startColor="#4DB6AC"
android:endColor="#FF6F00"
android:centerColor="#FF8F00"
android:startColor="#FFA000"
android:type="linear"/> android:type="linear"/>
</shape> </shape>

+ 4
- 0
MyCity/app/src/main/res/drawable/camera.xml View File

@ -0,0 +1,4 @@
<vector android:height="24dp" android:viewportHeight="100"
android:viewportWidth="100" android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#030104" android:pathData="M50,40c-8.285,0 -15,6.718 -15,15c0,8.285 6.715,15 15,15c8.283,0 15,-6.715 15,-15C65,46.718 58.283,40 50,40zM90,25H78c-1.65,0 -3.428,-1.28 -3.949,-2.846l-3.102,-9.309C70.426,11.28 68.65,10 67,10H33c-1.65,0 -3.428,1.28 -3.949,2.846l-3.102,9.309C25.426,23.72 23.65,25 22,25H10C4.5,25 0,29.5 0,35v45c0,5.5 4.5,10 10,10h80c5.5,0 10,-4.5 10,-10V35C100,29.5 95.5,25 90,25zM50,80c-13.807,0 -25,-11.193 -25,-25c0,-13.806 11.193,-25 25,-25c13.805,0 25,11.194 25,25C75,68.807 63.805,80 50,80zM86.5,41.993c-1.932,0 -3.5,-1.566 -3.5,-3.5c0,-1.932 1.568,-3.5 3.5,-3.5c1.934,0 3.5,1.568 3.5,3.5C90,40.427 88.433,41.993 86.5,41.993z"/>
</vector>

+ 5
- 0
MyCity/app/src/main/res/drawable/ic_home.xml View File

@ -0,0 +1,5 @@
<vector android:height="24dp" android:viewportHeight="512.001"
android:viewportWidth="512.001" android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#FFFFFFFF" android:pathData="M503.402,228.885L273.684,19.567c-10.083,-9.189 -25.288,-9.188 -35.367,-0.001L8.598,228.886c-8.077,7.36 -10.745,18.7 -6.799,28.889c3.947,10.189 13.557,16.772 24.484,16.772h36.69v209.721c0,8.315 6.742,15.057 15.057,15.057h125.914c8.315,0 15.057,-6.741 15.057,-15.057V356.932h74.002v127.337c0,8.315 6.742,15.057 15.057,15.057h125.908c8.315,0 15.057,-6.741 15.057,-15.057V274.547h36.697c10.926,0 20.537,-6.584 24.484,-16.772C514.147,247.585 511.479,236.246 503.402,228.885z"/>
<path android:fillColor="#FFFFFFFF" android:pathData="M445.092,42.73H343.973l116.176,105.636v-90.58C460.149,49.471 453.408,42.73 445.092,42.73z"/>
</vector>

+ 4
- 0
MyCity/app/src/main/res/drawable/road_cracked.xml View File

@ -0,0 +1,4 @@
<vector android:height="24dp" android:viewportHeight="512"
android:viewportWidth="511" android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#FF000000" android:pathData="m443.238,297.336 l-56.949,17.074 32.68,-52.84 -21.078,-19.594 31.477,-30.355 -31.707,-211.621h-283.609l-16.293,106.207 31.453,24.887 -36.859,10.355 -56.852,370.551h440.664zM270.824,447.34h-29.98v-49.973h29.98zM270.824,362.387h-29.98v-49.969h29.98zM270.824,277.438h-29.98v-49.969h29.98zM270.824,192.488h-29.98v-49.969h29.98zM270.824,107.539h-29.98v-49.973h29.98zM270.824,107.539"/>
</vector>

+ 4
- 0
MyCity/app/src/main/res/drawable/status_done.xml View File

@ -0,0 +1,4 @@
<vector android:height="24dp" android:viewportHeight="426.667"
android:viewportWidth="426.667" android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#6AC259" android:pathData="M213.333,0C95.518,0 0,95.514 0,213.333s95.518,213.333 213.333,213.333c117.828,0 213.333,-95.514 213.333,-213.333S331.157,0 213.333,0zM174.199,322.918l-93.935,-93.931l31.309,-31.309l62.626,62.622l140.894,-140.898l31.309,31.309L174.199,322.918z"/>
</vector>

+ 6
- 0
MyCity/app/src/main/res/drawable/status_pending.xml View File

@ -0,0 +1,6 @@
<vector android:height="24dp" android:viewportHeight="60"
android:viewportWidth="60" android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="#3083C9" android:pathData="M30,29m-29,0a29,29 0,1 1,58 0a29,29 0,1 1,-58 0"/>
<path android:fillColor="#A1C8EC" android:pathData="M30,60C13.458,60 0,46.542 0,30c0,-7.993 3.107,-15.514 8.749,-21.176c0.779,-0.785 2.047,-0.785 2.828,-0.006c0.783,0.78 0.785,2.046 0.006,2.828C6.693,16.555 4,23.072 4,30c0,14.337 11.663,26 26,26s26,-11.663 26,-26C56,16.337 45.405,5.101 32,4.076v10.757c0,1.104 -0.896,2 -2,2s-2,-0.896 -2,-2V2c0,-1.104 0.896,-2 2,-2c16.542,0 30,13.458 30,30S46.542,60 30,60z"/>
<path android:fillColor="#A1C8EC" android:pathData="M20,20.121L20,20.121l12.944,9.363c1.274,0.926 1.419,2.772 0.305,3.886l0,0c-1.114,1.114 -2.959,0.969 -3.886,-0.305L20,20.121z"/>
</vector>

+ 42
- 0
MyCity/app/src/main/res/layout/fragment_complaint.xml View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".complaintsFragment.ComplaintFragment">
<ImageView
android:src="@drawable/camera"
android:layout_width="415dp"
android:layout_height="242dp"
android:id="@+id/complaint_image" android:layout_marginTop="60dp"
app:layout_constraintTop_toTopOf="parent" app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent" android:layout_marginEnd="8dp"
app:layout_constraintHorizontal_bias="0.428"/>
<EditText
android:layout_width="0dp"
android:layout_height="168dp"
android:inputType="textMultiLine"
android:ems="10"
android:id="@+id/complaint_text" app:layout_constraintEnd_toEndOf="parent"
android:layout_marginEnd="8dp" app:layout_constraintStart_toStartOf="parent"
android:layout_marginStart="8dp"
app:layout_constraintHorizontal_bias="0.0"
android:layout_marginBottom="8dp" app:layout_constraintBottom_toTopOf="@+id/compaint_submit"
android:layout_marginTop="8dp" app:layout_constraintTop_toBottomOf="@+id/complaint_image"
app:layout_constraintVertical_bias="0.0"/>
<Button
android:text="Submit"
android:layout_width="114dp"
android:layout_height="wrap_content"
android:id="@+id/compaint_submit"
android:layout_marginBottom="28dp" app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintStart_toStartOf="parent" android:layout_marginStart="32dp"/>
<Button
android:text="Complaints"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/complaints_button"
app:layout_constraintTop_toTopOf="@+id/compaint_submit"
app:layout_constraintBottom_toBottomOf="@+id/compaint_submit" app:layout_constraintVertical_bias="0.0"
app:layout_constraintEnd_toEndOf="parent" android:layout_marginEnd="32dp"/>
</android.support.constraint.ConstraintLayout>

+ 15
- 0
MyCity/app/src/main/res/layout/fragment_complaint_list.xml View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.v7.widget.RecyclerView
android:layout_marginTop="@dimen/fragment_margin"
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/list"
android:name="gq.yigit.mycity.ComplaintsFragment"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginLeft="16dp"
android:layout_marginRight="16dp"
app:layoutManager="LinearLayoutManager"
tools:context=".complaintsFragment.ComplaintsFragment"
tools:listitem="@layout/votes_list_item"/>

+ 81
- 0
MyCity/app/src/main/res/layout/fragment_complaint_view.xml View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".complaintsFragment.ComplaintViewFragment">
<ImageView
android:src="@drawable/app_icon"
android:layout_width="358dp"
android:layout_height="236dp"
android:id="@+id/cv_image" app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent" android:layout_marginEnd="8dp" android:layout_marginTop="8dp"
app:layout_constraintTop_toTopOf="parent"/>
<TextView
android:text="TextView"
android:layout_width="252dp"
android:layout_height="41dp"
android:id="@+id/cv_address" app:layout_constraintEnd_toEndOf="@+id/cv_image"
android:layout_marginEnd="12dp" android:layout_marginTop="24dp"
app:layout_constraintTop_toBottomOf="@+id/cv_image"/>
<TextView
android:text="Address:"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cv_address_static" android:textColor="#000000"
android:textStyle="bold" android:layout_marginEnd="8dp"
app:layout_constraintEnd_toStartOf="@+id/cv_address" android:layout_marginStart="8dp"
app:layout_constraintStart_toStartOf="@+id/cv_image" app:layout_constraintHorizontal_bias="1.0"
app:layout_constraintTop_toTopOf="@+id/cv_address"/>
<TextView
android:text="Your message:"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cv_message_static" android:textColor="#000000"
android:textStyle="bold"
app:layout_constraintStart_toStartOf="@+id/cv_address_static" android:layout_marginEnd="8dp"
app:layout_constraintEnd_toStartOf="@+id/cv_message" app:layout_constraintHorizontal_bias="1.0"
android:layout_marginTop="24dp" app:layout_constraintTop_toBottomOf="@+id/cv_address_static"/>
<TextView
android:text="TextView"
android:layout_width="214dp"
android:layout_height="61dp"
android:id="@+id/cv_message"
app:layout_constraintEnd_toEndOf="@+id/cv_address"
app:layout_constraintTop_toTopOf="@+id/cv_message_static"/>
<ImageView
android:src="@drawable/status_pending"
android:layout_width="87dp"
android:layout_height="79dp"
android:id="@+id/cv_state_img" app:layout_constraintEnd_toStartOf="@+id/cv_response_message"
android:layout_marginEnd="20dp"
app:layout_constraintBottom_toBottomOf="@+id/cv_response_message"/>
<TextView
android:text="Your complaint is being processed:"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cv_state_text" android:textColor="#000000"
android:textStyle="bold" app:layout_constraintEnd_toEndOf="@+id/cv_message" android:layout_marginTop="76dp"
app:layout_constraintTop_toBottomOf="@+id/cv_message" android:layout_marginEnd="8dp"/>
<TextView
android:text="TextView"
android:layout_width="204dp"
android:layout_height="59dp"
android:id="@+id/cv_response_message" app:layout_constraintEnd_toEndOf="@+id/cv_state_text"
app:layout_constraintTop_toBottomOf="@+id/cv_state_text"/>
<TextView
android:text="Priority:"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cv_priority_static" android:textStyle="bold"
android:textColor="#000000" android:layout_marginTop="52dp"
app:layout_constraintTop_toBottomOf="@+id/cv_message_static"
app:layout_constraintStart_toStartOf="@+id/cv_message_static"/>
<TextView
android:text="TextView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cv_priority" app:layout_constraintStart_toEndOf="@+id/cv_priority_static"
android:layout_marginStart="16dp"
app:layout_constraintBottom_toBottomOf="@+id/cv_priority_static"/>
</android.support.constraint.ConstraintLayout>

+ 1
- 1
MyCity/app/src/main/res/layout/nav_header_main.xml View File

@ -27,6 +27,6 @@
android:paddingTop="@dimen/nav_header_vertical_spacing" android:paddingTop="@dimen/nav_header_vertical_spacing"
android:text="@string/nav_header_title" android:text="@string/nav_header_title"
android:textAppearance="@style/TextAppearance.AppCompat.Body1" android:id="@+id/uname" android:textAppearance="@style/TextAppearance.AppCompat.Body1" android:id="@+id/uname"
android:layout_marginLeft="25dp"/>
android:layout_marginLeft="30dp"/>
</LinearLayout> </LinearLayout>

+ 16
- 4
MyCity/app/src/main/res/menu/activity_main_drawer.xml View File

@ -9,14 +9,20 @@
android:id="@+id/transit" android:id="@+id/transit"
android:icon="@drawable/subway" android:icon="@drawable/subway"
android:title="Public Transit"/> android:title="Public Transit"/>
<item
android:id="@+id/parking"
android:icon="@drawable/parking"
android:title="Smart Parking"/>
</menu> </menu>
</item> </item>
<item android:title="@string/foryou"> <item android:title="@string/foryou">
<menu>
<item
android:id="@+id/complaint"
android:icon="@drawable/road_cracked"
android:title="Complaint"/>
</menu>
</item>
<item android:title="Presentation" android:id="@+id/present_items">
<menu> <menu>
<item <item
android:id="@+id/rating" android:id="@+id/rating"
@ -40,6 +46,12 @@
android:id="@+id/qr_code" android:id="@+id/qr_code"
android:icon="@drawable/qr_code" android:icon="@drawable/qr_code"
android:title="QR Code"/> android:title="QR Code"/>
<item
android:id="@+id/parking"
android:icon="@drawable/parking"
android:title="Smart Parking"/>
</menu> </menu>
</item> </item>


+ 12
- 0
MyCity/app/src/main/res/menu/main.xml View File

@ -1,8 +1,20 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android" <menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"> xmlns:app="http://schemas.android.com/apk/res-auto">
<item android:id="@+id/action_restart"
android:title="Restart"
android:orderInCategory="100"
app:showAsAction="never"/>
<item android:id="@+id/action_settings" <item android:id="@+id/action_settings"
android:title="@string/action_settings" android:title="@string/action_settings"
android:orderInCategory="100" android:orderInCategory="100"
app:showAsAction="never"/> app:showAsAction="never"/>
<item android:id="@+id/action_presentation"
android:title="Toggle presentation mode"
android:orderInCategory="100"
app:showAsAction="never"/>
</menu> </menu>

+ 2
- 2
MyCity/app/src/main/res/values/colors.xml View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<color name="colorPrimary">#3F51B5</color>
<color name="colorPrimaryDark">#303F9F</color>
<color name="colorPrimary">#FFC107</color>
<color name="colorPrimaryDark">#FFB300</color>
<color name="colorAccent">#FF4081</color> <color name="colorAccent">#FF4081</color>
</resources> </resources>

+ 1
- 1
MyCity/app/src/main/res/values/strings.xml View File

@ -5,7 +5,7 @@
<string name="nav_header_title">Android Studio</string> <string name="nav_header_title">Android Studio</string>
<string name="nav_header_subtitle">android.studio@android.com</string> <string name="nav_header_subtitle">android.studio@android.com</string>
<string name="nav_header_desc">Navigation header</string> <string name="nav_header_desc">Navigation header</string>
<string name="action_settings">Settings</string>
<string name="action_settings">Set Server IP</string>
<string name="transport">Transport</string> <string name="transport">Transport</string>
<string name="foryou">For you</string> <string name="foryou">For you</string>
<string name="mystore_password">yigit007</string> <string name="mystore_password">yigit007</string>


server_side/apia/app.py → server_side/api/app.py View File


server_side/apia/encryption/mycity-decrypted.key → server_side/api/encryption/mycity-decrypted.key View File


server_side/apia/encryption/mycity.crt → server_side/api/encryption/mycity.crt View File


server_side/apia/encryption/mycity.csr → server_side/api/encryption/mycity.csr View File


server_side/apia/encryption/mycity.key → server_side/api/encryption/mycity.key View File


server_side/apia/encryption/mycity.pem → server_side/api/encryption/mycity.pem View File


BIN
server_side/api/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq.png View File

Before After
Width: 512  |  Height: 512  |  Size: 16 KiB

server_side/apia/images/muhtarlik.jpg → server_side/api/images/muhtarlik.jpg View File


server_side/apia/images/park.jpg → server_side/api/images/park.jpg View File


server_side/apia/images/voting.jpg → server_side/api/images/voting.jpg View File


server_side/apia/modules/SpotSelector.py → server_side/api/modules/SpotSelector.py View File


server_side/apia/__init__.py → server_side/api/modules/__init__.py View File


server_side/apia/modules/announcements.py → server_side/api/modules/announcements.py View File


server_side/apia/modules/bus_stops.py → server_side/api/modules/bus_stops.py View File


+ 143
- 0
server_side/api/modules/complaint.py View File

@ -0,0 +1,143 @@
from flask import Flask, request
from flask_restful import Resource, Api, abort
import json
import io
import base64
from PIL import Image
import sys
import datetime
if sys.platform == "win32":
import tensorflow as tf
import numpy as np
import pickle
from utils import label_map_util
from utils import visualization_utils as vis_util
app = Flask(__name__)
api = Api(app)
with open("modules/databases/complaints.json","r") as f:
complaints = json.loads(f.read())
complaints_file = open("modules/databases/complaints.json","w")
complaints_file.write(json.dumps(complaints,indent=4))
if sys.platform == "win32":
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = 'trainedModels/ssd_mobilenet_RoadDamageDetector.pb'
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = 'trainedModels/crack_label_map.pbtxt'
NUM_CLASSES = 8
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
def process_img(img_base64):
if sys.platform == "win32":
img = Image.open(io.BytesIO(base64.b64decode(img_base64)))
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
image_np = load_image_into_numpy_array(img)
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
category_index,
min_score_thresh=0.3,
use_normalized_coordinates=True,
line_thickness=8)
output_dict = {'detection_classes': classes, 'detection_scores': scores}
defects = []
defect_scores = {}
for i in output_dict['detection_classes']:
cont = False
index = np.where(output_dict['detection_classes'] == i)[0][0]
score = output_dict['detection_scores'][index]
if score > 0.3:
defects.append(defect_scores[i])
priority = sum(defects)//10
if priority > 10:
priority = 10
return base64.b64encode(pickle.dumps(image_np)).decode('ascii'),priority
return img_base64, 7
class Complaint(Resource):
def post(self):
complaint = {}
args = request.form.to_dict()
complaint = args
complaint["response"] = {"status":False}
img_process,priority = process_img(complaint["img"])
complaint["img"] = img_process
complaint["response"]["priority"] = priority
complaint["datetime"] = datetime.datetime.now().strftime('%b-%d-%I:%M %p-%G')
try:
complaints[complaint["id"]].append(complaint)
except KeyError:
complaints[complaint["id"]]= [complaint]
del complaints[complaint["id"]][-1]["id"]
complaints_file.seek(0)
complaints_file.truncate()
complaints_file.write(json.dumps(complaints,indent=4))
class Complaints(Resource):
def post(self):
id = request.form["id"]
return complaints[id]
class ComplaintsAdmin(Resource):
def get(self): return complaints

server_side/apia/modules/databases/announcements.json → server_side/api/modules/databases/announcements.json View File


server_side/apia/modules/databases/bus.json → server_side/api/modules/databases/bus.json View File


server_side/apia/modules/databases/bus_locations.json → server_side/api/modules/databases/bus_locations.json View File


+ 26
- 0
server_side/api/modules/databases/complaints.json
File diff suppressed because it is too large
View File


+ 3
- 0
server_side/api/modules/databases/denunciations.json View File

@ -0,0 +1,3 @@
{
}

server_side/apia/modules/databases/locations.json → server_side/api/modules/databases/locations.json View File


server_side/apia/modules/databases/park_data.json → server_side/api/modules/databases/park_data.json View File


server_side/apia/modules/databases/ratings.json → server_side/api/modules/databases/ratings.json View File


server_side/apia/modules/databases/users.json → server_side/api/modules/databases/users.json View File


server_side/apia/modules/databases/votings.json → server_side/api/modules/databases/votings.json View File


server_side/apia/modules/denunciation.py → server_side/api/modules/denunciation.py View File


server_side/apia/modules/image5.jpg → server_side/api/modules/image5.jpg View File


server_side/apia/modules/lot.jpg → server_side/api/modules/lot.jpg View File


server_side/apia/modules/navigation.py → server_side/api/modules/navigation.py View File


server_side/apia/modules/rating_system.py → server_side/api/modules/rating_system.py View File


server_side/apia/modules/smart_park.py → server_side/api/modules/smart_park.py View File


server_side/apia/modules/user_info.py → server_side/api/modules/user_info.py View File


server_side/apia/modules/utility.py → server_side/api/modules/utility.py View File


server_side/apia/modules/utils.py → server_side/api/modules/utils.py View File


server_side/apia/modules/voting_system.py → server_side/api/modules/voting_system.py View File


server_side/apia/requirements.txt → server_side/api/requirements.txt View File


BIN
server_side/apia/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq.png View File

Before After
Width: 256  |  Height: 256  |  Size: 7.5 KiB

BIN
server_side/apia/images/9vard12ty0ad2yvwp3q53rsf3h43r2vq_qr.png View File

Before After
Width: 410  |  Height: 410  |  Size: 802 B

+ 0
- 0
server_side/apia/modules/__init__.py View File


+ 0
- 206
server_side/apia/modules/databases/denunciations.json
File diff suppressed because it is too large
View File


+ 84
- 60
traffic_analyzer/ambulance_detect.py View File

@ -2,15 +2,24 @@
import pickle import pickle
import threading import threading
import numpy as np
import os
import sys import sys
import tensorflow as tf
import cv2 import cv2
from distutils.version import StrictVersion
from utils import label_map_util
import os
import numpy as np
from utils import label_map_util
from utils import visualization_utils as vis_util from utils import visualization_utils as vis_util
if sys.platform == "win32":
import tensorflow as tf
from distutils.version import StrictVersion
if StrictVersion(tf.__version__) < StrictVersion('1.12.0'):
raise ImportError('Please upgrade your TensorFlow installation to v1.12.*.')
else:
import psutil
import json import json
import base64 import base64
@ -26,8 +35,7 @@ sys.path.append("..")
import time import time
from object_detection.utils import ops as utils_ops from object_detection.utils import ops as utils_ops
if StrictVersion(tf.__version__) < StrictVersion('1.12.0'):
raise ImportError('Please upgrade your TensorFlow installation to v1.12.*.')
# What model to download. # What model to download.
@ -38,6 +46,7 @@ encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
#MODEL_NAME = "ssd_inception_v2_coco_2017_11_17" # not bad and fast #MODEL_NAME = "ssd_inception_v2_coco_2017_11_17" # not bad and fast
MODEL_NAME = "rfcn_resnet101_coco_11_06_2017" # WORKS BEST BUT takes 4 times longer per image MODEL_NAME = "rfcn_resnet101_coco_11_06_2017" # WORKS BEST BUT takes 4 times longer per image
#MODEL_NAME = "faster_rcnn_resnet101_coco_11_06_2017" # too slow #MODEL_NAME = "faster_rcnn_resnet101_coco_11_06_2017" # too slow
#MODEL_NAME = "ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20"
MODEL_FILE = MODEL_NAME + '.tar.gz' MODEL_FILE = MODEL_NAME + '.tar.gz'
DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/' DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'
@ -47,35 +56,44 @@ PATH_TO_FROZEN_GRAPH = MODEL_NAME + '/frozen_inference_graph.pb'
# List of the strings that is used to add correct label for each box. # List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('object_detection/data', 'mscoco_label_map.pbtxt') PATH_TO_LABELS = os.path.join('object_detection/data', 'mscoco_label_map.pbtxt')
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=True) category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=True)
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
# For the sake of simplicity we will use only 2 images:
# image1.jpg
# image2.jpg
# If you want to test the code with your images, just add path to the images to the TEST_IMAGE_PATHS.
PATH_TO_TEST_IMAGES_DIR = 'object_detection/test_images'
TEST_IMAGE_PATHS = [ os.path.join(PATH_TO_TEST_IMAGES_DIR, 'image{}.jpg'.format(i)) for i in range(3, 6) ]
# Size, in inches, of the output images.
if sys.platform == "win32":
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
# For the sake of simplicity we will use only 2 images:
# image1.jpg
# image2.jpg
# If you want to test the code with your images, just add path to the images to the TEST_IMAGE_PATHS.
PATH_TO_TEST_IMAGES_DIR = 'object_detection/test_images'
TEST_IMAGE_PATHS = [ os.path.join(PATH_TO_TEST_IMAGES_DIR, 'image{}.jpg'.format(i)) for i in range(3, 6) ]
# Size, in inches, of the output images.
sess = 0 sess = 0
switch = 1
data = {"gpu_temp":"10C","gpu_load":"15%","cpu_temp":"47C","cpu_load":"15%","mem_temp":"NaN","mem_load":"17%","fan_speed":"10000RPM"} data = {"gpu_temp":"10C","gpu_load":"15%","cpu_temp":"47C","cpu_load":"15%","mem_temp":"NaN","mem_load":"17%","fan_speed":"10000RPM"}
def get_temps(): def get_temps():
global data global data
if not sys.platform == "win32":
temps = psutil.sensors_temperatures()
data["cpu_temp"] = str(int(temps["dell_smm"][0][1]))+"°C"
data["cpu_load"] = str(psutil.cpu_percent())+"%"
data["mem_load"] = str(dict(psutil.virtual_memory()._asdict())["percent"])+"%"
data["fan_speed"] = str(psutil.sensors_fans()["dell_smm"][0][1])+"RPM"
def run_inference_for_single_image(image, graph): def run_inference_for_single_image(image, graph):
@ -138,38 +156,46 @@ def listener(port=8385):
print('Bye!') print('Bye!')
cut = [115, 100, 400, 150] cut = [115, 100, 400, 150]
cut_send = [0, 0, 0, 0] cut_send = [0, 0, 0, 0]
img_counter = 0 img_counter = 0
socket_switch = True socket_switch = True
dont_send = True
cam = cv2.VideoCapture(0)
#cam = cv2.VideoCapture('amb_1.mp4')
thread = threading.Thread(target=listener) thread = threading.Thread(target=listener)
thread.start() thread.start()
with detection_graph.as_default():
sess = tf.Session()
if sys.platform == "win32":
with detection_graph.as_default():
sess = tf.Session()
cam = cv2.VideoCapture(0)
else:
cam = cv2.VideoCapture('debug_data/amb_1.mp4')
with open("debug_data/frame_data.pkl","rb") as pkl_file:
frame_data = pickle.load(pkl_file)
switch = 0 switch = 0
get_temps() get_temps()
amb_center = {'x': (400 + 550)/2, 'y': (115+215)/2} amb_center = {'x': (400 + 550)/2, 'y': (115+215)/2}
a = 0
# frame_data = []
#while 1:
# ret, image = cam.read()
for i in os.listdir('images/'):
if not i.endswith('.jpg'):
reps = -1
reps_vid = 0
while 1:
ret,image = cam.read()
reps_vid += 1
if not sys.platform == "win32" and not reps_vid % 2 == 0:
continue continue
image = cv2.imread('images/' + i)
a += 1
cv2.imwrite(f'images/{a}.png', image)
reps += 1
try: # Kavşak try: # Kavşak
t1 = time.time() t1 = time.time()
image_np = image image_np = image
image_np_expanded = np.expand_dims(image_np, axis=0) image_np_expanded = np.expand_dims(image_np, axis=0)
output_dict = run_inference_for_single_image(image_np_expanded, detection_graph)
if sys.platform == "win32":
output_dict = run_inference_for_single_image(image_np_expanded, detection_graph)
else:
output_dict = frame_data[reps]
height, width, channels = image_np.shape height, width, channels = image_np.shape
# frame_data.append(output_dict)
out_dict = {'detection_boxes': [], 'detection_classes': [], 'detection_scores': []} out_dict = {'detection_boxes': [], 'detection_classes': [], 'detection_scores': []}
for i in output_dict['detection_classes']: for i in output_dict['detection_classes']:
@ -188,8 +214,6 @@ for i in os.listdir('images/'):
cont = True cont = True
continue continue
if cont:
continue
out_dict['detection_classes'].append(i) out_dict['detection_classes'].append(i)
out_dict['detection_boxes'].append(output_dict['detection_boxes'][index]) out_dict['detection_boxes'].append(output_dict['detection_boxes'][index])
out_dict['detection_scores'].append(output_dict['detection_scores'][index]) out_dict['detection_scores'].append(output_dict['detection_scores'][index])
@ -197,15 +221,9 @@ for i in os.listdir('images/'):
out_dict['detection_classes'] = np.array(out_dict['detection_classes']) out_dict['detection_classes'] = np.array(out_dict['detection_classes'])
out_dict['detection_boxes'] = np.array(out_dict['detection_boxes']) out_dict['detection_boxes'] = np.array(out_dict['detection_boxes'])
out_dict['detection_scores'] = np.array(out_dict['detection_scores']) out_dict['detection_scores'] = np.array(out_dict['detection_scores'])
for i in out_dict['detection_boxes']:
(left, right, top, bottom) = (i[1] * width, i[3] * width,
i[0] * height, i[2] * height)
with open(f'images/{a}_coordinates.txt', 'a') as f:
f.write(','.join(map(int, [left, right, top, bottom])))
if abs(((left + right)/2) - amb_center['x']) < 15 and abs(((top + bottom)/2) - amb_center['y']) < 15:
print('Ambulance found!')
print(len(out_dict['detection_classes']), ' cars.') print(len(out_dict['detection_classes']), ' cars.')
vis_util.visualize_boxes_and_labels_on_image_array( vis_util.visualize_boxes_and_labels_on_image_array(
image_np, image_np,
out_dict['detection_boxes'], out_dict['detection_boxes'],
@ -224,13 +242,14 @@ for i in os.listdir('images/'):
t2 = time.time() t2 = time.time()
print("time taken for {}".format(t2-t1)) print("time taken for {}".format(t2-t1))
if dont_send:
continue
if not sys.platform == "win32":
time.sleep(0.1-(t2-t1))
send_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) send_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
if socket_switch: if socket_switch:
try: try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('192.168.1.36', 8485))
client_socket.settimeout(0.1)
client_socket.connect(('127.0.0.1', 8485))
connection = client_socket.makefile('wb') connection = client_socket.makefile('wb')
socket_switch = False socket_switch = False
except: except:
@ -254,16 +273,21 @@ for i in os.listdir('images/'):
if img_counter % 10 == 0: if img_counter % 10 == 0:
get_temps() get_temps()
except Exception as e: except Exception as e:
print(e)
if hasattr(e, 'message'):
print(e.message)
else:
print(e)
break break
if not socket_switch: if not socket_switch:
client_socket.sendall(b"Bye\n") client_socket.sendall(b"Bye\n")
cam.release() cam.release()
# with open('frame_data.pkl', 'wb') as f:
# pickle.dump(frame_data, f)
cv2.destroyAllWindows() cv2.destroyAllWindows()
cam.release() cam.release()
kill = False kill = False


traffic_analyzer/amb_1.mp4 → traffic_analyzer/debug_data/amb_1.mp4 View File


traffic_analyzer/amb_2.mp4 → traffic_analyzer/debug_data/amb_2.mp4 View File


traffic_analyzer/frame_data.pkl → traffic_analyzer/debug_data/frame_data.pkl View File


server_side/apia/__init__.py~f939ad8155badff39792aaabe628f74142004970 → traffic_analyzer/images/1_coordinates.txt View File


+ 626
- 0
traffic_analyzer/images/coordinates.json View File

@ -0,0 +1,626 @@
{
"ambulance-close_high-0": {
"x1": 272,
"y1": 187,
"x2": 383,
"y2": 421
},
"ambulance-close_high-30": {
"x1": 207,
"y1": 191,
"x2": 388,
"y2": 424
},
"ambulance-close_high-60": {
"x1": 204,
"y1": 194,
"x2": 423,
"y2": 412
},
"ambulance-close_high-90": {
"x1": 172,
"y1": 218,
"x2": 435,
"y2": 374
},
"ambulance-close_high-120": {
"x1": 178,
"y1": 219,
"x2": 398,
"y2": 403
},
"ambulance-close_high-150": {
"x1": 217,
"y1": 219,
"x2": 407,
"y2": 407
},
"ambulance-close_high-180": {
"x1": 278,
"y1": 223,
"x2": 381,
"y2": 429
},
"ambulance-close_high-210": {
"x1": 269,
"y1": 209,
"x2": 436,
"y2": 414
},
"ambulance-close_high-240": {
"x1": 242,
"y1": 236,
"x2": 457,
"y2": 434
},
"ambulance-close_high-270": {
"x1": 224,
"y1": 235,
"x2": 462,
"y2": 379
},
"ambulance-close_high-300": {
"x1": 243,
"y1": 211,
"x2": 476,
"y2": 423
},
"ambulance-close_high-330": {
"x1": 265,
"y1": 197,
"x2": 448,
"y2": 434
},
"ambulance-close_high-360": {
"x1": 278,
"y1": 192,
"x2": 403,
"y2": 427
},
"ambulance-far_high-0": {
"x1": 351,
"y1": 189,
"x2": 430,
"y2": 315
},
"ambulance-far_high-30": {
"x1": 321,
"y1": 185,
"x2": 420,
"y2": 318
},
"ambulance-far_high-60": {
"x1": 309,
"y1": 185,
"x2": 447,
"y2": 303
},
"ambulance-far_high-90": {
"x1": 306,
"y1": 198,
"x2": 461,
"y2": 302
},
"ambulance-far_high-120": {
"x1": 286,
"y1": 200,
"x2": 455,
"y2": 305
},
"ambulance-far_high-150": {
"x1": 303,
"y1": 195,
"x2": 455,
"y2": 308
},
"ambulance-far_high-180": {
"x1": 343,
"y1": 197,
"x2": 433,
"y2": 322
},
"ambulance-far_high-210": {
"x1": 333,
"y1": 201,
"x2": 454,
"y2": 316
},
"ambulance-far_high-240": {
"x1": 320,
"y1": 207,
"x2": 471,
"y2": 313
},
"ambulance-far_high-270": {
"x1": 327,
"y1": 203,
"x2": 485,
"y2": 302
},
"ambulance-far_high-300": {
"x1": 306,
"y1": 190,
"x2": 456,
"y2": 309
},
"ambulance-far_high-330": {
"x1": 308,
"y1": 190,
"x2": 453,
"y2": 317
},
"ambulance-far_high-360": {
"x1": 356,
"y1": 183,
"x2": 424,
"y2": 319
},
"ambulance-close_low-0": {
"x1": 217,
"y1": 132,
"x2": 341,
"y2": 394
},
"ambulance-close_low-30": {
"x1": 149,
"y1": 120,
"x2": 370,
"y2": 388
},
"ambulance-close_low-60": {
"x1": 146,
"y1": 125,
"x2": 419,
"y2": 376
},
"ambulance-close_low-90": {
"x1": 94,
"y1": 147,
"x2": 405,
"y2": 334
},
"ambulance-close_low-120": {
"x1": 127,
"y1": 155,
"x2": 410,
"y2": 346
},
"ambulance-close_low-150": {
"x1": 151,
"y1": 146,
"x2": 391,
"y2": 363
},
"ambulance-close_low-180": {
"x1": 207,
"y1": 150,
"x2": 345,
"y2": 385
},
"ambulance-close_low-210": {
"x1": 102,
"y1": 167,
"x2": 388,
"y2": 381
},
"ambulance-close_low-240": {
"x1": 111,
"y1": 153,
"x2": 412,
"y2": 348
},
"ambulance-close_low-270": {
"x1": 126,
"y1": 140,
"x2": 420,
"y2": 332
},
"ambulance-close_low-300": {
"x1": 146,
"y1": 141,
"x2": 428,
"y2": 379
},
"ambulance-close_low-330": {
"x1": 171,
"y1": 110,
"x2": 404,
"y2": 382
},
"ambulance-close_low-360": {
"x1": 228,
"y1": 112,
"x2": 362,
"y2": 386
},
"ambulance-far_low-0": {
"x1": 352,
"y1": 59,
"x2": 436,
"y2": 201
},
"ambulance-far_low-30": {
"x1": 359,
"y1": 60,
"x2": 437,
"y2": 194
},
"ambulance-far_low-60": {
"x1": 281,
"y1": 63,
"x2": 459,
"y2": 191
},
"ambulance-far_low-90": {
"x1": 295,
"y1": 64,
"x2": 472,
"y2": 174
},
"ambulance-far_low-120": {
"x1": 297,
"y1": 65,
"x2": 469,
"y2": 179
},
"ambulance-far_low-150": {
"x1": 297,
"y1": 61,
"x2": 463,
"y2": 183
},
"ambulance-far_low-180": {
"x1": 349,
"y1": 71,
"x2": 430,
"y2": 194
},
"ambulance-far_low-210": {
"x1": 315,
"y1": 66,
"x2": 468,
"y2": 188
},
"ambulance-far_low-240": {
"x1": 316,
"y1": 68,
"x2": 492,
"y2": 187
},
"ambulance-far_low-270": {
"x1": 308,
"y1": 60,
"x2": 475,
"y2": 183
},
"ambulance-far_low-300": {
"x1": 299,
"y1": 59,
"x2": 479,
"y2": 186
},
"ambulance-far_low-330": {
"x1": 309,
"y1": 65,
"x2": 459,
"y2": 196
},
"ambulance-far_low-360": {
"x1": 358,
"y1": 52,
"x2": 431,
"y2": 199
},
"car1-close_high-0": {
"x1": 289,
"y1": 87,
"x2": 358,
"y2": 225
},
"car1-close_high-30": {
"x1": 227,
"y1": 102,
"x2": 374,
"y2": 226
},
"car1-close_high-60": {
"x1": 207,
"y1": 103,
"x2": 354,
"y2": 214
},
"car1-close_high-90": {
"x1": 235,
"y1": 104,
"x2": 399,
"y2": 186
},
"car1-close_high-120": {
"x1": 225,
"y1": 96,
"x2": 371,
"y2": 203
},
"car1-close_high-150": {
"x1": 240,
"y1": 82,
"x2": 362,
"y2": 201
},
"car1-close_high-180": {
"x1": 283,
"y1": 72,
"x2": 357,
"y2": 203
},
"car1-close_high-210": {
"x1": 259,
"y1": 88,
"x2": 389,
"y2": 202
},
"car1-close_high-240": {
"x1": 278,
"y1": 92,
"x2": 419,
"y2": 193
},
"car1-close_high-270": {
"x1": 248,
"y1": 104,
"x2": 399,
"y2": 187
},
"car1-close_high-300": {
"x1": 233,
"y1": 98,
"x2": 389,
"y2": 198
},
"car1-close_high-330": {
"x1": 247,
"y1": 86,
"x2": 388,
"y2": 207
},
"car1-close_high-360": {
"x1": 306,
"y1": 79,
"x2": 371,
"y2": 213
},
"car1-far_high-0": {
"x1": 374,
"y1": 189,
"x2": 419,
"y2": 252
},
"car1-far_high-30": {
"x1": 350,
"y1": 181,
"x2": 424,
"y2": 246
},
"car1-far_high-60": {
"x1": 326,
"y1": 186,
"x2": 418,
"y2": 239
},
"car1-far_high-90": {
"x1": 332,
"y1": 183,
"x2": 437,
"y2": 237
},
"car1-far_high-120": {
"x1": 335,
"y1": 173,
"x2": 430,
"y2": 232
},
"car1-far_high-150": {
"x1": 345,
"y1": 182,
"x2": 433,
"y2": 230
},
"car1-far_high-180": {
"x1": 371,
"y1": 172,
"x2": 421,
"y2": 240
},
"car1-far_high-210": {
"x1": 359,
"y1": 174,
"x2": 435,
"y2": 240
},
"car1-far_high-240": {
"x1": 355,
"y1": 187,
"x2": 441,
"y2": 237
},
"car1-far_high-270": {
"x1": 342,
"y1": 184,
"x2": 447,
"y2": 229
},
"car1-far_high-300": {
"x1": 348,
"y1": 183,
"x2": 446,
"y2": 241
},
"car1-far_high-330": {
"x1": 342,
"y1": 177,
"x2": 441,
"y2": 245
},
"car1-far_high-360": {
"x1": 368,
"y1": 176,
"x2": 417,
"y2": 252
},
"car1-close_low-0": {
"x1": 294,
"y1": 168,
"x2": 369,
"y2": 330
},
"car1-close_low-30": {
"x1": 244,
"y1": 176,
"x2": 380,
"y2": 314
},
"car1-close_low-60": {
"x1": 210,
"y1": 178,
"x2": 378,
"y2": 315
},
"car1-close_low-90": {
"x1": 215,
"y1": 186,
"x2": 401,
"y2": 283
},
"car1-close_low-120": {
"x1": 216,
"y1": 171,
"x2": 395,
"y2": 291
},
"car1-close_low-150": {
"x1": 214,
"y1": 168,
"x2": 394,
"y2": 287
},
"car1-close_low-180": {
"x1": 307,
"y1": 172,
"x2": 387,
"y2": 307
},
"car1-close_low-210": {
"x1": 267,
"y1": 173,
"x2": 413,
"y2": 295
},
"car1-close_low-240": {
"x1": 269,
"y1": 183,
"x2": 439,
"y2": 290
},
"car1-close_low-270": {
"x1": 271,
"y1": 195,
"x2": 455,
"y2": 276
},
"car1-close_low-300": {
"x1": 244,
"y1": 166,
"x2": 424,
"y2": 295
},
"car1-close_low-330": {
"x1": 266,
"y1": 176,
"x2": 402,
"y2": 311
},
"car1-close_low-360": {
"x1": 314,
"y1": 168,
"x2": 387,
"y2": 311
},
"car1-far_low-0": {
"x1": 405,
"y1": 61,
"x2": 460,
"y2": 136
},
"car1-far_low-30": {
"x1": 381,
"y1": 56,
"x2": 460,
"y2": 129
},
"car1-far_low-60": {
"x1": 364,
"y1": 67,
"x2": 463,
"y2": 126
},
"car1-far_low-90": {
"x1": 368,
"y1": 70,
"x2": 477,
"y2": 118
},
"car1-far_low-120": {
"x1": 371,
"y1": 64,
"x2": 477,
"y2": 125
},
"car1-far_low-150": {
"x1": 383,
"y1": 68,
"x2": 466,
"y2": 125
},
"car1-far_low-180": {
"x1": 409,
"y1": 62,
"x2": 469,
"y2": 131
},
"car1-far_low-210": {
"x1": 390,
"y1": 64,
"x2": 479,
"y2": 130
},
"car1-far_low-240": {
"x1": 389,
"y1": 71,
"x2": 492,
"y2": 120
},
"car1-far_low-270": {
"x1": 389,
"y1": 66,
"x2": 501,
"y2": 118
},
"car1-far_low-300": {
"x1": 388,
"y1": 64,
"x2": 500,
"y2": 128
},
"car1-far_low-330": {
"x1": 380,
"y1": 61,
"x2": 483,
"y2": 130
},
"car1-far_low-360": {
"x1": 424,
"y1": 58,
"x2": 471,
"y2": 134
}
}

+ 95
- 0
traffic_analyzer/images/train_image_taker.py View File

@ -0,0 +1,95 @@
import cv2
import time
import os
import numpy as np
import json
vehicles = ["ambulance", "car1"]
sides = [str(x) for x in range(0,361,30)]
distances =["close_high","far_high","close_low","far_low"]
def select_rect(im):
rects=[]
scale_percent = 100
width = int(im.shape[1] * scale_percent / 100)
height = int(im.shape[0] * scale_percent / 100)
# Select ROI
fromCenter = False
r = cv2.selectROI(cv2.resize(im,(width,height)))
# Crop image
if(r == (0,0,0,0)):
cv2.destroyAllWindows()
imCrop = im[int(r[1]*100/scale_percent):int(r[1]*100/scale_percent+r[3]*100/scale_percent), int(r[0]*100/scale_percent):int(r[0]*100/scale_percent+r[2]*100/scale_percent)]
# Display cropped image
cv2.imshow("Image", imCrop)
cv2.waitKey(0)
cv2.destroyAllWindows()
for i in r:
rects.append(int(i)*int(100/scale_percent))
print(rects)
locs = {
"x1": rects[0],
"y1": rects[1],
"x2": rects[0]+rects[2],
"y2": rects[1]+rects[3],
}
return locs
def take_image():
cam_no = 0
while 1:
cam = cv2.VideoCapture(cam_no)
if not (cam is None or not cam.isOpened()):
break
cam_no+=1
for vehicle in vehicles:
if vehicle == "ambulance":
continue
if not os.path.exists(vehicle):
os.makedirs(vehicle)
for distance in distances:
for side in sides:
for i in range(1,3):
ret,img= cam.read()
cv2.imwrite("{}/{}-{}({}).jpg".format(vehicle,distance,side,i),img)
cv2.imshow("current",img)
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
print("Took side {}:distance:{}, waiting 7 seconds".format(side,distance))
time.sleep(7)
print("Finished distance:"+distance)
while not cv2.waitKey(1) & 0xFF in ex_c:
ret,img= cam.read()
cv2.imshow("current",img)
print("Finished vehicle:"+vehicle)
while not cv2.waitKey(1) & 0xFF in ex_c:
ret,img= cam.read()
cv2.imshow("current",img)
def cut_image():
cut_rects = {}
for vehicle in vehicles:
if not os.path.exists(vehicle):
os.makedirs(vehicle)
for distance in distances:
for side in sides:
img = cv2.imread("{}/{}-{}({}).jpg".format(vehicle,distance,side,1))
cut_rects[vehicle + "-" + distance + "-" + side] = select_rect(img)
cv2.destroyAllWindows()
return cut_rects
coordinates = cut_image()
print(json.dumps(coordinates,indent=4))
with open("coordinates.json","w") as file:
file.write(json.dumps(coordinates,indent=4))

+ 2
- 2
traffic_analyzer/object_detection/data/mscoco_label_map.pbtxt View File

@ -26,7 +26,7 @@ item {
item { item {
name: "/m/01bjv" name: "/m/01bjv"
id: 6 id: 6
display_name: "bus"
display_name: "ambulance"
} }
item { item {
name: "/m/07jdr" name: "/m/07jdr"
@ -36,7 +36,7 @@ item {
item { item {
name: "/m/07r04" name: "/m/07r04"
id: 8 id: 8
display_name: "truck"
display_name: "ambulance"
} }
item { item {
name: "/m/019jd" name: "/m/019jd"


+ 0
- 53
traffic_analyzer/receive.py View File

@ -1,53 +0,0 @@
import socket
import cv2
import pickle
import struct ## new
HOST=''
PORT=8485
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print('Socket created')
s.bind((HOST,PORT))
print('Socket bind complete')
s.listen(10)
print('Socket now listening')
data = b""
payload_size = struct.calcsize(">L")
print("payload_size: {}".format(payload_size))
switch = True
while True:
while switch:
conn,addr = s.accept()
switch = False
try:
while len(data) < payload_size:
print("Recv: {}".format(len(data)))
data += conn.recv(4096)
print("Done Recv: {}".format(len(data)))
packed_msg_size = data[:payload_size]
data = data[payload_size:]
msg_size = struct.unpack(">L", packed_msg_size)[0]
print("msg_size: {}".format(msg_size))
while len(data) < msg_size:
try:
data += conn.recv(4096)
except:
pass
frame_data = data[:msg_size]
data = data[msg_size:]
frame=pickle.loads(frame_data, fix_imports=True, encoding="bytes")
frame = cv2.imdecode(frame, cv2.IMREAD_COLOR)
cv2.imshow('ImageWindow',frame)
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
except:
switch=True

+ 0
- 11
traffic_analyzer/saver.py View File

@ -1,11 +0,0 @@
import os
import cv2
cap = cv2.VideoCapture(0)
frame_width = int(cap.get(3))
frame_height = int(cap.get(4))
while True:
ret, image = cap.read()

+ 0
- 112
traffic_analyzer/sender.py View File

@ -1,112 +0,0 @@
import cv2
import socket
import json
import base64
from PIL import Image
from io import BytesIO
import psutil
<<<<<<< Updated upstream
<<<<<<< Updated upstream
import multiprocessing
cam = cv2.VideoCapture(0)
=======
cam = cv2.VideoCapture(1)
>>>>>>> Stashed changes
def open_switch():
=======
cam = cv2.VideoCapture(1)
>>>>>>> Stashed changes
HOST = '127.0.0.1' # Standard loopback interface address (localhost)
PORT = 8385 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
s.listen()
while 1:
conn, addr = s.accept()
with conn:
while True:
data = conn.recv(1024)
if not data:
break
conn.sendall(data)
img_counter = 0
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
socket_switch = True
cut=[-175,-1,-175,-1]
cut_send = [0,0,0,0]
data = {"gpu_temp":"10C","gpu_load":"15%","cpu_temp":"47C","cpu_load":"15%","mem_temp":"NaN","mem_load":"17%","fan_speed":"10000RPM"}
def get_temps():
global data
temps = psutil.sensors_temperatures()
data["cpu_temp"] = str(int(temps["dell_smm"][0][1]))+"°C"
data["cpu_load"] = str(psutil.cpu_percent())+"%"
data["mem_load"] = str(dict(psutil.virtual_memory()._asdict())["percent"])+"%"
data["fan_speed"] = str(psutil.sensors_fans()["dell_smm"][0][1])+"RPM"
p1 = multiprocessing.Process(target=open_switch)
p1.start()
while True:
try:
ret, frame = cam.read()
lens = [len(frame),0,len(frame[0])]
for i in range(0,len(cut),2):
if cut[i]<0:
cut_send[i] = lens[i] + cut[i]
cut_send[i+1] = abs(cut[i])-abs(cut[i+1])
backup = frame
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)
crop_img = frame.copy(order='C')
crop_img = Image.fromarray(crop_img,"RGB")
buffered = BytesIO()
crop_img.save(buffered, format="JPEG")
img = base64.b64encode(buffered.getvalue()).decode("ascii")
frame_cut=backup[cut[0]:cut[1],cut[2]:cut[3]]
cv2.imshow("base",backup)
cv2.imshow("cut",frame_cut)
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
if socket_switch:
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('10.10.26.163', 8485))
connection = client_socket.makefile('wb')
socket_switch = False
except:
socket_switch=True
continue
try:
client_socket.sendall(json.dumps({"image_full":img,"image_sizes":{"x":cut_send[2],"y":cut_send[0],"width":cut_send[3],"height":cut_send[1]},"load":data}).encode('gbk')+b"\n")
print(img)
except:
socket_switch=True
img_counter += 1
if img_counter % 10 ==0:
get_temps()
except KeyboardInterrupt:
if not socket_switch:
client_socket.sendall(b"Bye\n")
cam.release()
p1.terminate()
break
cv2.destroyAllWindows()
p1.terminate()

+ 3
- 0
traffic_analyzer/traffic_analyzer.iml View File

@ -6,4 +6,7 @@
<orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
</module> </module>

+ 0
- 66
traffic_analyzer/train_image_taker.py View File

@ -1,66 +0,0 @@
import cv2
import time
import os
import numpy as np
vehicles = ["ambulance", "car1"]
sides = [str(x) for x in range(0,361,30)]
distances =["close_high","far_high","close_low","far_low"]
cam_no = 0
while 1:
cam = cv2.VideoCapture(cam_no)
if not (cam is None or not cam.isOpened()):
break
cam_no+=1
def take_image():
for vehicle in vehicles:
if vehicle == "ambulance":
continue
if not os.path.exists("images\\"+vehicle):
os.makedirs("images\\"+vehicle)
for distance in distances:
for side in sides:
for i in range(1,3):
ret,img= cam.read()
cv2.imwrite("images\\{}\\{}-{}({}).jpg".format(vehicle,distance,side,i),img)
cv2.imshow("current",img)
ex_c = [27, ord("q"), ord("Q")]
if cv2.waitKey(1) & 0xFF in ex_c:
break
print("Took side {}:distance:{}, waiting 7 seconds".format(side,distance))
time.sleep(7)
print("Finished distance:"+distance)
while not cv2.waitKey(1) & 0xFF in ex_c:
ret,img= cam.read()
cv2.imshow("current",img)
print("Finished vehicle:"+vehicle)
while not cv2.waitKey(1) & 0xFF in ex_c:
ret,img= cam.read()
cv2.imshow("current",img)
def cut_image():
for vehicle in vehicles:
images = []
image_main = None
if not os.path.exists("images\\"+vehicle):
os.makedirs("images\\"+vehicle)
for distance in distances:
for side in sides:
for i in range(1,3):
img = cv2.imread("images\\{}\\{}-{}({}).jpg".format(vehicle,distance,side,i))
images.append(img)
image_main = np.zeros_like(images[0])
sums = np.array(image_main, dtype='int64')
for i in range(len(images)):
sums += np.array(images[i],dtype='int64')
image_main = np.array(sums/(len(images)+1)).astype(uint8)
cv2.imshow("a",image_main)
cv2.waitKey(0)
cut_image()

+ 0
- 73
traffic_analyzer/windowsTemp.py View File

@ -1,73 +0,0 @@
import clr #package pythonnet, not clr
import wmi
import time
openhardwaremonitor_hwtypes = ['Mainboard','SuperIO','CPU','RAM','GpuNvidia','GpuAti','TBalancer','Heatmaster','HDD']
cputhermometer_hwtypes = ['Mainboard','SuperIO','CPU','GpuNvidia','GpuAti','TBalancer','Heatmaster','HDD']
openhardwaremonitor_sensortypes = ['Voltage','Clock','Temperature','Load','Fan','Flow','Control','Level','Factor','Power','Data','SmallData']
cputhermometer_sensortypes = ['Voltage','Clock','Temperature','Load','Fan','Flow','Control','Level']
def initialize_openhardwaremonitor():
file = 'OpenHardwareMonitorLib'
clr.AddReference(file)
from OpenHardwareMonitor import Hardware
handle = Hardware.Computer()
handle.MainboardEnabled = True
handle.CPUEnabled = True
handle.RAMEnabled = True
handle.GPUEnabled = True
handle.HDDEnabled = True
handle.Open()
return handle
def initialize_cputhermometer():
file = r'C:\Users\Tednokent01\Downloads\MyCity\traffic_analyzer\CPUThermometerLib'
clr.FindAssembly(file)
clr.AddReference(file)
from CPUThermometer import Hardware
handle = Hardware.Computer()
handle.CPUEnabled = True
handle.Open()
return handle
def fetch_stats(handle):
for i in handle.Hardware:
i.Update()
for sensor in i.Sensors:
parse_sensor(sensor)
for j in i.SubHardware:
j.Update()
for subsensor in j.Sensors:
parse_sensor(subsensor)
def parse_sensor(sensor):
if sensor.Value is not None:
if type(sensor).__module__ == 'CPUThermometer.Hardware':
sensortypes = cputhermometer_sensortypes
hardwaretypes = cputhermometer_hwtypes
elif type(sensor).__module__ == 'OpenHardwareMonitor.Hardware':
sensortypes = openhardwaremonitor_sensortypes
hardwaretypes = openhardwaremonitor_hwtypes
else:
return
if sensor.SensorType == sensortypes.index('Temperature'):
print(u"%s %s Temperature Sensor #%i %s - %s\u00B0C" % (hardwaretypes[sensor.Hardware.HardwareType], sensor.Hardware.Name, sensor.Index, sensor.Name, sensor.Value))
if __name__ == "__main__":
print("OpenHardwareMonitor:")
start = time.time()
#HardwareHandle = initialize_openhardwaremonitor()
#print(time.time() - start)
#start = time.time()
#fetch_stats(HardwareHandle)
print(time.time() - start)
print("\nCPUMonitor:")
CPUHandle = initialize_cputhermometer()
fetch_stats(CPUHandle)

Loading…
Cancel
Save