diff --git a/analysis/calculations.py b/analysis/calculations.py
index 624d303..8baaa34 100644
--- a/analysis/calculations.py
+++ b/analysis/calculations.py
@@ -1,8 +1,11 @@
import logging
from shapely.geometry import Point, LineString
from shapely import wkb
+from shapely.geometry import shape
from db_connector import RemoteDB
import pandas as pd
+from pyproj import Proj, transform
+from geopy.distance import geodesic
speedLimits = ["T0", "T20", "T30", "T50","T60", "T80", "T100"]
@@ -36,11 +39,11 @@ def get_data(db):
def process_data(sig_speed_df, accident_df):
- result_df = pd.DataFrame(columns= ['TempoLim', 'Accidents_total'])
+ result_df = pd.DataFrame(columns= ['TempoLim', 'Accidents_total', ])
for speed in speedLimits:
print("Checking for zone: " + speed)
filtered_df = sig_speed_df[sig_speed_df["temporegime_technical"].str.contains(speed, case=False, na=False)]
- current_result = count_points_near_multilinestrings(accident_df, filtered_df, 0.000005)
+ current_result = count_points_near_multilinestrings(accident_df, filtered_df, 0.00001)
result_df.loc[len(result_df)] = {'TempoLim': speed, 'Accidents_total': current_result}
print("FINAL RESULT")
print(result_df)
@@ -57,11 +60,31 @@ def count_points_near_multilinestrings(points_df, multilinestrings_df, threshold
result_df = pd.DataFrame(result_counts)
return result_df['CountNear'].sum()
+def calculate_sigspeed_length(db):
+ for speed in speedLimits:
+ get_data_sql = f"""
+ SELECT wkb_geometry, temporegime_technical
+ FROM signaled_speeds
+ WHERE temporegime_technical = '{speed}';
+ """
+
+ result = db.execute_query(get_data_sql)
+ result_df = pd.DataFrame(result)
+ result_df['wkb_geometry'] = result_df['wkb_geometry'].apply(lambda x: wkb.loads(x, hex=True))
+ sigspeed_length = result_df['wkb_geometry'].apply(lambda x:get_accumulated_distance(x)).sum()
+ sigspeed_length = str(round(sigspeed_length * 1000, 2)) + " km"
+ print("Length for " + speed + ": " + sigspeed_length)
+
+def get_accumulated_distance(coords_str):
+ polyline_geometry = shape(coords_str)
+ return polyline_geometry.length
if __name__ == "__main__":
remote_db = RemoteDB()
+
try:
- get_data(remote_db)
+ #get_data(remote_db)
+ calculate_sigspeed_length(remote_db)
except Exception as e:
print(f"Exception {e} in calculations.py")
finally:
diff --git a/analysis/database/accident_copy.sql b/analysis/database/accident_copy.sql
index 2e23c7b..559746c 100644
--- a/analysis/database/accident_copy.sql
+++ b/analysis/database/accident_copy.sql
@@ -3,6 +3,7 @@ drop table if exists accident_copy;
create table accident_copy as
select * from accidents;
alter table accident_copy add severity varchar;
+alter table accident_copy add foreign key (accidentuid) references accidents;
update accident_copy set severity = 'Accident with property damage'
where accidentseveritycategory='as4';
diff --git a/analysis/database/mivcount_copy b/analysis/database/assertions.sql
similarity index 100%
rename from analysis/database/mivcount_copy
rename to analysis/database/assertions.sql
diff --git a/analysis/database/fbcount_copy.sql b/analysis/database/fbcount_copy.sql
index 11065e7..069c346 100644
--- a/analysis/database/fbcount_copy.sql
+++ b/analysis/database/fbcount_copy.sql
@@ -6,10 +6,12 @@ CREATE TABLE fbcount_copy AS
ALTER TABLE fbcount_copy ADD fuss_total INTEGER;
UPDATE fbcount_copy SET fuss_total = fuss_in + fuss_out;
+
+
ALTER TABLE fbcount_copy
DROP COLUMN IF EXISTS fuss_in,
DROP COLUMN IF EXISTS fuss_out,
- ADD PRIMARY KEY (id);
+ ADD FOREIGN KEY (id) REFERENCES footbikecount;
ALTER TABLE fbcount_copy ADD velo_total INTEGER;
UPDATE fbcount_copy SET velo_total = velo_in + velo_out;
diff --git a/analysis/html/index.html b/analysis/html/index.html
new file mode 100644
index 0000000..38a5ea2
--- /dev/null
+++ b/analysis/html/index.html
@@ -0,0 +1,44 @@
+
+
+
+
+
+ DB_23
+
+
+
+
+
+ 0
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/analysis/map.py b/analysis/map.py
index 4edccc1..75581d7 100644
--- a/analysis/map.py
+++ b/analysis/map.py
@@ -3,6 +3,7 @@ import geopandas as gpd
import colorsys
import folium
from folium import plugins
+from pyproj import CRS, Transformer
import logging
from folium.plugins import HeatMap
@@ -46,7 +47,7 @@ gradient = {
0.9: 'red'
}
-speedLimits = ["T0","T20","T30","T50","T60","T80","T100"]
+speedLimits = ["T0", "T20", "T30", "T50", "T60", "T80", "T100"]
color_dict = {
"T0": "red",
"T20": "orange",
@@ -68,7 +69,7 @@ def create_heat_map_with_time(folium_map):
assert not heat_df.empty, f" Heat Dataframe is empty: {heat_df.head(5)}"
add_heat_map_time(heat_df, folium_map)
logger.info(f"Heat map time added to time map.")
- #interactive_map.save("test.html")
+ # interactive_map.save("test.html")
add_signaled_speeds(folium_map)
@@ -76,7 +77,7 @@ def create_heat_map_with_time(folium_map):
add_bike_heat_map_time(folium_map)
- #Pedestrian Part
+ # Pedestrian Part
add_pedestrian_heat_map_time(folium_map)
@@ -84,7 +85,6 @@ def create_heat_map_with_time(folium_map):
def create_heat_map_toggle(folium_map):
-
heat_view_data = get_view("heat")
heat_gdf = gpd.GeoDataFrame(heat_view_data, columns=['latitude', 'longitude', 'year'])
@@ -102,14 +102,15 @@ def create_heat_map_toggle(folium_map):
# Layer Adding Methods ================================================================================================
def add_bike_heat_map_time(folium_map):
-
# Process heat map data
bike_heat_view_data = get_view('bikeheat', 'latitude, longitude, year')
bike_heat_df = gpd.GeoDataFrame(bike_heat_view_data, columns=['latitude', 'longitude', 'year'])
assert not bike_heat_df.empty, f" Heat Dataframe is empty: {bike_heat_df.head(5)}"
- heat_data = [[[row['latitude'], row['longitude'], 0.1] for index, row in bike_heat_df[bike_heat_df['year'] == i].iterrows()] for
- i in range(2011, 2023)]
+ heat_data = [
+ [[row['latitude'], row['longitude'], 0.1] for index, row in bike_heat_df[bike_heat_df['year'] == i].iterrows()]
+ for
+ i in range(2011, 2023)]
logger.debug(f"First element of heat data: {heat_data[0]}")
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
AccidentType = "Bicycles: "
@@ -131,20 +132,20 @@ def add_bike_heat_map_time(folium_map):
def add_pedestrian_heat_map_time(folium_map):
-
# Process heat map data
pedestrian_heat_view_data = get_view("pedestrianheat")
heat_df = gpd.GeoDataFrame(pedestrian_heat_view_data, columns=['latitude', 'longitude', 'year'])
assert not heat_df.empty, f" Heat Dataframe is empty: {heat_df.head(5)}"
- heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()] for
+ heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()]
+ for
i in range(2011, 2023)]
logger.debug(f"First element of PED heat data: {heat_data[0]}")
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
AccidentType = "Pedestrians: "
index = [str(element) for element in index]
index = [AccidentType + element for element in index]
- #gradient =
+ # gradient =
# plot heat map
gradient = generate_hue_gradient(0.2, 5)
hm = plugins.HeatMapWithTime(heat_data,
@@ -161,7 +162,8 @@ def add_pedestrian_heat_map_time(folium_map):
def add_heat_map_time(heat_df, folium_map):
- heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()] for
+ heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()]
+ for
i in range(2011, 2023)]
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
# create heat map
@@ -259,18 +261,73 @@ def generate_hue_gradient(hue, num_colors):
lightness = 0.1 + 0.8 * (i / (num_colors - 1))
saturation = 0.1 + 0.8 * (i / (num_colors - 1))
rgb = colorsys.hls_to_rgb(hue, lightness, saturation)
- gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))
+ gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgb[0] * 255), int(rgb[1] * 255),
+ int(rgb[2] * 255))
return gradient
+
def generate_contrasting_gradient(num_colors):
- cmap = plt.get_cmap('viridis') # viridis is a map with contrasting colors
+ cmap = plt.get_cmap('viridis') # viridis is a map with contrasting colors
gradient = {}
for i in range(num_colors):
rgba = cmap(i / (num_colors - 1))
- gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgba[0]*255), int(rgba[1]*255), int(rgba[2]*255))
+ gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgba[0] * 255), int(rgba[1] * 255),
+ int(rgba[2] * 255))
return gradient
+def add_miv_count_station_locations():
+ get_data_mic_sql = """
+ SELECT
+ zsid, ekoord, nkoord,
+ AVG(anzfahrzeuge) AS average_count
+ FROM
+ mivcount
+ GROUP BY
+ zsid, ekoord, nkoord
+ """
+ remote_db = RemoteDB()
+ miv_result = remote_db.execute_query(get_data_mic_sql)
+ miv_df = pd.DataFrame(miv_result)
+ miv_df[['lon', 'lat']] = miv_df.apply(lambda row: convert_to_wgs84(row['ekoord'], row['nkoord']), axis=1)
+ miv_df['average_count'] = miv_df['average_count'].apply(lambda x: round(float(x)))
+ count_stations_layer = folium.FeatureGroup(name='Count-stations cars', show=False)
+ for index, row in miv_df.iterrows():
+ folium.Marker(location=[row['lat'], row['lon']], popup="avg. " + str(row['average_count']), show=False).add_to(count_stations_layer)
+ count_stations_layer.add_to(toggle_map)
+ remote_db.close()
+def add_fb_count_station_locations():
+ get_data_mic_sql = """
+ SELECT DISTINCT
+ ost,
+ nord,
+ AVG(velo_total) as average_velo_count,
+ AVG(fuss_total) as average_fuss_count
+ FROM fbcount_copy
+ GROUP BY ost,nord;
+ """
+ remote_db = RemoteDB()
+ FB_result = remote_db.execute_query(get_data_mic_sql)
+ FB_df = pd.DataFrame(FB_result)
+ FB_df[['ost', 'nord']] = FB_df.apply(lambda row: convert_to_wgs84(row['ost'], row['nord']), axis=1)
+ FB_df['average_velo_count'] = FB_df['average_velo_count'].apply(lambda x: round(float(x)))
+ FB_df['average_velo_count'] = FB_df['average_velo_count'].astype(str)
+ FB_df['average_fuss_count'] = FB_df['average_fuss_count'].apply(lambda x: round(float(x)))
+ FB_df['average_fuss_count'] = FB_df['average_fuss_count'].astype(str)
+ count_stations_layer = folium.FeatureGroup(name='Count-stations pedestrians and bicycles', show=False)
+ for index, row in FB_df.iterrows():
+ folium.Marker(location=[row['nord'], row['ost']], popup="Bicycle and pedestrian count station", show=False).add_to(count_stations_layer)
+ count_stations_layer.add_to(toggle_map)
+ remote_db.close()
+def convert_to_wgs84(lon, lat):
+ swiss_crs = CRS.from_epsg(2056)
+ wgs84_crs = CRS.from_epsg(4326)
+
+ transformer = Transformer.from_crs(swiss_crs, wgs84_crs, always_xy=True)
+ lon, lat = transformer.transform(lon, lat)
+
+ return pd.Series({'lon': lon, 'lat': lat})
+
if __name__ == "__main__":
time_map = folium.Map(
@@ -293,11 +350,43 @@ if __name__ == "__main__":
tiles="cartodb positron"
)
+ add_miv_count_station_locations()
+ add_fb_count_station_locations()
#setup_views()
create_heat_map_with_time(time_map)
create_heat_map_toggle(toggle_map)
## Save Maps ============================================================================================
- save_map_as_html(toggle_map, "heat_map_toggle")
+ save_map_as_html(toggle_map, "html/heat_map_toggle")
save_map_as_html(time_map, "html/heat_map_time")
+
+ ## Create Maps with fixed center=============================================================================
+ time_map_fix = folium.Map(
+ location=zurich_coordinates,
+ zoom_start=13,
+ zoom_control=True,
+ dragging=False,
+ scrollWheelZoom=True,
+ doubleClickZoom=False,
+ tiles="cartodb positron"
+ )
+
+ toggle_map_fix = folium.Map(
+ location=zurich_coordinates,
+ zoom_start=13,
+ zoom_control=True,
+ dragging=False,
+ scrollWheelZoom=True,
+ doubleClickZoom=False,
+ tiles="cartodb positron"
+ )
+
+ #setup_views()
+
+ create_heat_map_with_time(time_map_fix)
+ create_heat_map_toggle(toggle_map_fix)
+
+ ## Save Maps ============================================================================================
+ save_map_as_html(toggle_map_fix, "html/heat_map_toggle_fix")
+ save_map_as_html(time_map_fix, "html/heat_map_time_fix")
diff --git a/analysis/plots.py b/analysis/plots.py
index 9980e5e..bf783d9 100644
--- a/analysis/plots.py
+++ b/analysis/plots.py
@@ -20,7 +20,7 @@ def plt_acc_by_year(db):
result = db.execute_query(acc_year_sql)
result_df = pd.DataFrame(result)
- fig = px.bar(result_df, y='year', x='count', orientation='h', title='No. of Accidents per Year')
+ fig = px.bar(result_df, y='year', x='count', orientation='h', title='Total Accidents per Year')
fig.write_image("fig/acc_by_year.png")
fig.write_html("html/acc_by_year.html")
@@ -67,11 +67,11 @@ def plt_acc_by_day_year(db):
df,
x='weekday',
y='count',
- title='Accidents by Weekday',
+ title='Accidents by Weekday over the Years',
animation_frame='year',
labels={'weekday': 'Weekday', 'count': 'Number of Accidents'},
category_orders={'weekday': ['Saturday', 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']},
- orientation='h'
+ orientation='v'
)
fig.update_yaxes(range=[0, 1000])
# Customize the layout to include a slider
@@ -136,7 +136,7 @@ def plt_acc_by_daytime(db):
result = db.execute_query(acc_weekday_sql)
result_df = pd.DataFrame(result)
- fig = px.bar(result_df, y='hour', x='count', orientation='h', title='Accidents by day')
+ fig = px.bar(result_df, y='hour', x='count', orientation='h', title='Accidents by hour')
fig.write_image("fig/acc_by_daytime.png")
fig.write_html("html/acc_by_daytime.html")