added some plots and calculations

This commit is contained in:
SoulKindred 2024-01-08 15:49:50 +01:00
parent beef5ad2c7
commit 4a39a7c265
3 changed files with 106 additions and 21 deletions

View File

@ -1,8 +1,11 @@
import logging
from shapely.geometry import Point, LineString
from shapely import wkb
from shapely.geometry import shape
from db_connector import RemoteDB
import pandas as pd
from pyproj import Proj, transform
from geopy.distance import geodesic
speedLimits = ["T0", "T20", "T30", "T50","T60", "T80", "T100"]
@ -36,11 +39,11 @@ def get_data(db):
def process_data(sig_speed_df, accident_df):
result_df = pd.DataFrame(columns= ['TempoLim', 'Accidents_total'])
result_df = pd.DataFrame(columns= ['TempoLim', 'Accidents_total', ])
for speed in speedLimits:
print("Checking for zone: " + speed)
filtered_df = sig_speed_df[sig_speed_df["temporegime_technical"].str.contains(speed, case=False, na=False)]
current_result = count_points_near_multilinestrings(accident_df, filtered_df, 0.000005)
current_result = count_points_near_multilinestrings(accident_df, filtered_df, 0.00001)
result_df.loc[len(result_df)] = {'TempoLim': speed, 'Accidents_total': current_result}
print("FINAL RESULT")
print(result_df)
@ -57,11 +60,31 @@ def count_points_near_multilinestrings(points_df, multilinestrings_df, threshold
result_df = pd.DataFrame(result_counts)
return result_df['CountNear'].sum()
def calculate_sigspeed_length(db):
for speed in speedLimits:
get_data_sql = f"""
SELECT wkb_geometry, temporegime_technical
FROM signaled_speeds
WHERE temporegime_technical = '{speed}';
"""
result = db.execute_query(get_data_sql)
result_df = pd.DataFrame(result)
result_df['wkb_geometry'] = result_df['wkb_geometry'].apply(lambda x: wkb.loads(x, hex=True))
sigspeed_length = result_df['wkb_geometry'].apply(lambda x:get_accumulated_distance(x)).sum()
sigspeed_length = str(round(sigspeed_length * 1000, 2)) + " km"
print("Length for " + speed + ": " + sigspeed_length)
def get_accumulated_distance(coords_str):
polyline_geometry = shape(coords_str)
return polyline_geometry.length
if __name__ == "__main__":
remote_db = RemoteDB()
try:
get_data(remote_db)
#get_data(remote_db)
calculate_sigspeed_length(remote_db)
except Exception as e:
print(f"Exception {e} in calculations.py")
finally:

View File

@ -19,12 +19,17 @@
iframe[src="heat_map_time.html"] {
width: 100%;
height: 900px;
}
iframe[src="heat_map_toggle.html"] {
width: 100%;
height: 900px;
}
</style>
</head>
<body>
<iframe src="heat_map_time.html"></iframe>
<iframe src="heat_map_toggle.html"></iframe>0
<iframe src="acc_by_year.html"></iframe>
<iframe src="acc_by_weekday.html"></iframe>
<iframe src="acc_by_daytime.html"></iframe>

View File

@ -3,6 +3,7 @@ import geopandas as gpd
import colorsys
import folium
from folium import plugins
from pyproj import CRS, Transformer
import logging
from folium.plugins import HeatMap
@ -23,7 +24,6 @@ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(messag
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
accidents_filepath = "../src/datasets/integrated/Accidents.geojson"
signaled_speeds_filepath = "../src/datasets/integrated/signaled_speeds.geojson.geojson"
@ -60,7 +60,6 @@ color_dict = {
# Create Maps =========================================================================================================
def create_heat_map_with_time(folium_map):
# Process heat map data
heat_view_data = get_view("heat")
heat_df = gpd.GeoDataFrame(heat_view_data, columns=['latitude', 'longitude', 'year'])
@ -84,7 +83,6 @@ def create_heat_map_with_time(folium_map):
def create_heat_map_toggle(folium_map):
heat_view_data = get_view("heat")
heat_gdf = gpd.GeoDataFrame(heat_view_data, columns=['latitude', 'longitude', 'year'])
@ -102,13 +100,14 @@ def create_heat_map_toggle(folium_map):
# Layer Adding Methods ================================================================================================
def add_bike_heat_map_time(folium_map):
# Process heat map data
bike_heat_view_data = get_view('bikeheat', 'latitude, longitude, year')
bike_heat_df = gpd.GeoDataFrame(bike_heat_view_data, columns=['latitude', 'longitude', 'year'])
assert not bike_heat_df.empty, f" Heat Dataframe is empty: {bike_heat_df.head(5)}"
heat_data = [[[row['latitude'], row['longitude'], 0.1] for index, row in bike_heat_df[bike_heat_df['year'] == i].iterrows()] for
heat_data = [
[[row['latitude'], row['longitude'], 0.1] for index, row in bike_heat_df[bike_heat_df['year'] == i].iterrows()]
for
i in range(2011, 2023)]
logger.debug(f"First element of heat data: {heat_data[0]}")
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
@ -131,13 +130,13 @@ def add_bike_heat_map_time(folium_map):
def add_pedestrian_heat_map_time(folium_map):
# Process heat map data
pedestrian_heat_view_data = get_view("pedestrianheat")
heat_df = gpd.GeoDataFrame(pedestrian_heat_view_data, columns=['latitude', 'longitude', 'year'])
assert not heat_df.empty, f" Heat Dataframe is empty: {heat_df.head(5)}"
heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()] for
heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()]
for
i in range(2011, 2023)]
logger.debug(f"First element of PED heat data: {heat_data[0]}")
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
@ -161,7 +160,8 @@ def add_pedestrian_heat_map_time(folium_map):
def add_heat_map_time(heat_df, folium_map):
heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()] for
heat_data = [[[row['latitude'], row['longitude'], 0.5] for index, row in heat_df[heat_df['year'] == i].iterrows()]
for
i in range(2011, 2023)]
index = [2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022]
# create heat map
@ -259,18 +259,73 @@ def generate_hue_gradient(hue, num_colors):
lightness = 0.1 + 0.8 * (i / (num_colors - 1))
saturation = 0.1 + 0.8 * (i / (num_colors - 1))
rgb = colorsys.hls_to_rgb(hue, lightness, saturation)
gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))
gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgb[0] * 255), int(rgb[1] * 255),
int(rgb[2] * 255))
return gradient
def generate_contrasting_gradient(num_colors):
cmap = plt.get_cmap('viridis') # viridis is a map with contrasting colors
gradient = {}
for i in range(num_colors):
rgba = cmap(i / (num_colors - 1))
gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgba[0]*255), int(rgba[1]*255), int(rgba[2]*255))
gradient[i / (num_colors - 1)] = '#{:02x}{:02x}{:02x}'.format(int(rgba[0] * 255), int(rgba[1] * 255),
int(rgba[2] * 255))
return gradient
def add_miv_count_station_locations():
get_data_mic_sql = """
SELECT
zsid, ekoord, nkoord,
AVG(anzfahrzeuge) AS average_count
FROM
mivcount
GROUP BY
zsid, ekoord, nkoord
"""
remote_db = RemoteDB()
miv_result = remote_db.execute_query(get_data_mic_sql)
miv_df = pd.DataFrame(miv_result)
miv_df[['lon', 'lat']] = miv_df.apply(lambda row: convert_to_wgs84(row['ekoord'], row['nkoord']), axis=1)
miv_df['average_count'] = miv_df['average_count'].apply(lambda x: round(float(x)))
count_stations_layer = folium.FeatureGroup(name='Count-stations cars', show=False)
for index, row in miv_df.iterrows():
folium.Marker(location=[row['lat'], row['lon']], popup="avg. " + str(row['average_count']), show=False).add_to(count_stations_layer)
count_stations_layer.add_to(toggle_map)
remote_db.close()
def add_fb_count_station_locations():
get_data_mic_sql = """
SELECT DISTINCT
ost,
nord,
AVG(velo_total) as average_velo_count,
AVG(fuss_total) as average_fuss_count
FROM fbcount_copy
GROUP BY ost,nord;
"""
remote_db = RemoteDB()
FB_result = remote_db.execute_query(get_data_mic_sql)
FB_df = pd.DataFrame(FB_result)
FB_df[['ost', 'nord']] = FB_df.apply(lambda row: convert_to_wgs84(row['ost'], row['nord']), axis=1)
FB_df['average_velo_count'] = FB_df['average_velo_count'].apply(lambda x: round(float(x)))
FB_df['average_velo_count'] = FB_df['average_velo_count'].astype(str)
FB_df['average_fuss_count'] = FB_df['average_fuss_count'].apply(lambda x: round(float(x)))
FB_df['average_fuss_count'] = FB_df['average_fuss_count'].astype(str)
count_stations_layer = folium.FeatureGroup(name='Count-stations pedestrians and bicycles', show=False)
for index, row in FB_df.iterrows():
folium.Marker(location=[row['nord'], row['ost']], popup="Bicycle and pedestrian count station", show=False).add_to(count_stations_layer)
count_stations_layer.add_to(toggle_map)
remote_db.close()
def convert_to_wgs84(lon, lat):
swiss_crs = CRS.from_epsg(2056)
wgs84_crs = CRS.from_epsg(4326)
transformer = Transformer.from_crs(swiss_crs, wgs84_crs, always_xy=True)
lon, lat = transformer.transform(lon, lat)
return pd.Series({'lon': lon, 'lat': lat})
if __name__ == "__main__":
time_map = folium.Map(
@ -293,11 +348,13 @@ if __name__ == "__main__":
tiles="cartodb positron"
)
add_miv_count_station_locations()
add_fb_count_station_locations()
#setup_views()
create_heat_map_with_time(time_map)
create_heat_map_toggle(toggle_map)
## Save Maps ============================================================================================
save_map_as_html(toggle_map, "heat_map_toggle")
save_map_as_html(toggle_map, "html/heat_map_toggle")
save_map_as_html(time_map, "html/heat_map_time")