Merge branch 'sebl' into 'main'
Calculations.py goes through all speed-limit zones and calculates how many... See merge request dbis/lecture-groups/database-systems/2023hs/group-1!6
This commit is contained in:
commit
eda751f4e1
68
analysis/calculations.py
Normal file
68
analysis/calculations.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import logging
|
||||||
|
from shapely.geometry import Point, LineString
|
||||||
|
from shapely import wkb
|
||||||
|
from db_connector import RemoteDB
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
speedLimits = ["T0", "T20", "T30", "T50","T60", "T80", "T100"]
|
||||||
|
|
||||||
|
|
||||||
|
def is_point_near_multilinestring(point, multilinestring, threshold_distance):
|
||||||
|
point_geometry = Point(point)
|
||||||
|
return point_geometry.distance(multilinestring) < threshold_distance
|
||||||
|
|
||||||
|
|
||||||
|
def get_data(db):
|
||||||
|
get_speeds_sql = """
|
||||||
|
SELECT wkb_geometry,
|
||||||
|
temporegime_technical
|
||||||
|
FROM signaled_speeds;
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = db.execute_query(get_speeds_sql)
|
||||||
|
sig_speed_df = pd.DataFrame(result)
|
||||||
|
sig_speed_df.rename(columns={'wkb_geometry': 'geometry'}, inplace=True)
|
||||||
|
sig_speed_df['geometry'] = sig_speed_df['geometry'].apply(lambda x: wkb.loads(x, hex=True))
|
||||||
|
|
||||||
|
get_accidents = """
|
||||||
|
SELECT geometry
|
||||||
|
FROM accidents;
|
||||||
|
"""
|
||||||
|
result = db.execute_query(get_accidents)
|
||||||
|
accident_df = pd.DataFrame(result)
|
||||||
|
accident_df['geometry'] = accident_df['geometry'].apply(lambda x: wkb.loads(x, hex=True))
|
||||||
|
|
||||||
|
process_data(sig_speed_df, accident_df)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data(sig_speed_df, accident_df):
|
||||||
|
result_df = pd.DataFrame(columns= ['TempoLim', 'Accidents_total'])
|
||||||
|
for speed in speedLimits:
|
||||||
|
print("Checking for zone: " + speed)
|
||||||
|
filtered_df = sig_speed_df[sig_speed_df["temporegime_technical"].str.contains(speed, case=False, na=False)]
|
||||||
|
current_result = count_points_near_multilinestrings(accident_df, filtered_df, 0.000005)
|
||||||
|
result_df.loc[len(result_df)] = {'TempoLim': speed, 'Accidents_total': current_result}
|
||||||
|
print("FINAL RESULT")
|
||||||
|
print(result_df)
|
||||||
|
|
||||||
|
|
||||||
|
def count_points_near_multilinestrings(points_df, multilinestrings_df, threshold_distance):
|
||||||
|
result_counts = []
|
||||||
|
|
||||||
|
for idx, multilinestring_row in multilinestrings_df.iterrows():
|
||||||
|
multilinestring = multilinestring_row['geometry']
|
||||||
|
count_near = sum(points_df['geometry'].apply(
|
||||||
|
lambda point: is_point_near_multilinestring(point, multilinestring, threshold_distance)))
|
||||||
|
result_counts.append({'temporegime_technical': multilinestring_row['temporegime_technical'], 'CountNear': count_near})
|
||||||
|
result_df = pd.DataFrame(result_counts)
|
||||||
|
return result_df['CountNear'].sum()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
remote_db = RemoteDB()
|
||||||
|
try:
|
||||||
|
get_data(remote_db)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Exception {e} in calculations.py")
|
||||||
|
finally:
|
||||||
|
remote_db.close()
|
||||||
@ -1,5 +1,5 @@
|
|||||||
# config.py, adjust as needed
|
# config.py, adjust as needed
|
||||||
# TODO RENAME THIS FILE TO "config.py"
|
# TODO COPY and then RENAME TO "config.py"
|
||||||
SSH_HOST = 'slenzlinger.dev'
|
SSH_HOST = 'slenzlinger.dev'
|
||||||
SSH_USERNAME = 'sebl' #TODO: Enter own username
|
SSH_USERNAME = 'sebl' #TODO: Enter own username
|
||||||
SSH_PASSWORD = 'your_ssh_password' # TODO: to not push to git
|
SSH_PASSWORD = 'your_ssh_password' # TODO: to not push to git
|
||||||
|
|||||||
@ -300,4 +300,4 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
## Save Maps ============================================================================================
|
## Save Maps ============================================================================================
|
||||||
save_map_as_html(toggle_map, "heat_map_toggle")
|
save_map_as_html(toggle_map, "heat_map_toggle")
|
||||||
save_map_as_html(time_map, "heat_map_time")
|
save_map_as_html(time_map, "html/heat_map_time")
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import numpy as np
|
|||||||
logging.getLogger("matplotlib").setLevel(logging.WARNING)
|
logging.getLogger("matplotlib").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Summary charts ======================================================================================================
|
# Summary charts ======================================================================================================
|
||||||
def plt_acc_by_year(db):
|
def plt_acc_by_year(db):
|
||||||
acc_year_sql = """
|
acc_year_sql = """
|
||||||
@ -73,6 +71,7 @@ def plt_acc_by_day_year(db):
|
|||||||
animation_frame='year',
|
animation_frame='year',
|
||||||
labels={'weekday': 'Weekday', 'count': 'Number of Accidents'},
|
labels={'weekday': 'Weekday', 'count': 'Number of Accidents'},
|
||||||
category_orders={'weekday': ['Saturday', 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']},
|
category_orders={'weekday': ['Saturday', 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']},
|
||||||
|
orientation='h'
|
||||||
)
|
)
|
||||||
fig.update_yaxes(range=[0, 1000])
|
fig.update_yaxes(range=[0, 1000])
|
||||||
# Customize the layout to include a slider
|
# Customize the layout to include a slider
|
||||||
@ -137,9 +136,9 @@ def plt_acc_by_daytime(db):
|
|||||||
result = db.execute_query(acc_weekday_sql)
|
result = db.execute_query(acc_weekday_sql)
|
||||||
result_df = pd.DataFrame(result)
|
result_df = pd.DataFrame(result)
|
||||||
|
|
||||||
fig = px.bar(result_df, y='hour', x='count', orientation='h')
|
fig = px.bar(result_df, y='hour', x='count', orientation='h', title='Accidents by day')
|
||||||
fig.write_image("fig/acc_by_day.png")
|
fig.write_image("fig/acc_by_daytime.png")
|
||||||
fig.write_html("html/acc_by_day.html")
|
fig.write_html("html/acc_by_daytime.html")
|
||||||
|
|
||||||
# Time Series charts ==================================================================================================
|
# Time Series charts ==================================================================================================
|
||||||
def acc_by_type(db):
|
def acc_by_type(db):
|
||||||
@ -260,8 +259,6 @@ def severity_by_month(db):
|
|||||||
#fig.show()
|
#fig.show()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Utilities ===========================================================================================================
|
# Utilities ===========================================================================================================
|
||||||
def save_as_barplot(df, xname, yname, orientation, file_name):
|
def save_as_barplot(df, xname, yname, orientation, file_name):
|
||||||
pass
|
pass
|
||||||
|
|||||||
Reference in New Issue
Block a user