|
|
@@ -0,0 +1,256 @@
|
|
|
+import pandas as pd
|
|
|
+import numpy as np
|
|
|
+import datetime
|
|
|
+import json
|
|
|
+import os
|
|
|
+import plotly
|
|
|
+import plotly.graph_objs as go
|
|
|
+import plotly.express as px
|
|
|
+
|
|
|
+def round_float(s):
|
|
|
+ '''1. if s is float, round it to 0 decimals
|
|
|
+ 2. else return s as is
|
|
|
+ '''
|
|
|
+ import re
|
|
|
+ m = re.match("(\d+\.\d+)",s.__str__())
|
|
|
+ try:
|
|
|
+ r = round(float(m.groups(0)[0]),0)
|
|
|
+ except:
|
|
|
+ r = s
|
|
|
+ return r
|
|
|
+
|
|
|
+# https://stackoverflow.com/questions/775049/how-do-i-convert-seconds-to-hours-minutes-and-seconds
|
|
|
+def sec2time(sec, n_msec=3):
|
|
|
+ ''' Convert seconds to 'D days, HH:MM:SS.FFF' '''
|
|
|
+ if hasattr(sec,'__len__'):
|
|
|
+ return [sec2time(s) for s in sec]
|
|
|
+ m, s = divmod(sec, 60)
|
|
|
+ h, m = divmod(m, 60)
|
|
|
+ # d, h = divmod(h, 24)
|
|
|
+ d, h = divmod(h, 72)
|
|
|
+ if n_msec > 0:
|
|
|
+ pattern = '%%02d:%%02d:%%0%d.%df' % (n_msec+3, n_msec)
|
|
|
+ else:
|
|
|
+ pattern = r'%02d:%02d:%02d'
|
|
|
+ # if d == 0:
|
|
|
+ if d < 3:
|
|
|
+ return pattern % (h, m, s)
|
|
|
+ return ('%d Tage, ' + pattern) % (d, h, m, s)
|
|
|
+
|
|
|
+def read_data(csv_file):
|
|
|
+ df = pd.read_csv(csv_file, index_col='startTimeLocal', parse_dates=['startTimeLocal'])
|
|
|
+ df.drop( 'activityId'.split() , axis=1, inplace=True)
|
|
|
+ df.drop( 'Unnamed: 0', axis=1, inplace=True)
|
|
|
+ df['distance'] = round(df['distance'] / 1000, 3)
|
|
|
+ df['maxSpeed'] = round(df['maxSpeed'], 2)
|
|
|
+ df['averageSpeed'] = round(df['averageSpeed']*3.6, 2)
|
|
|
+ df['elevationGain'] = df['elevationGain'].astype(int)
|
|
|
+ df['maxElevation'] = round(df['maxElevation'],0)
|
|
|
+ df['intensityMinutes'] = df['moderateIntensityMinutes'] + df['vigorousIntensityMinutes']*2
|
|
|
+
|
|
|
+ # print(df.index)
|
|
|
+
|
|
|
+ df['startzeit'] = df.index
|
|
|
+ # print(df.keys())
|
|
|
+ return(df)
|
|
|
+
|
|
|
+def render_weekly_sum_barchart (datafield, factor):
|
|
|
+ df2 = read_data()
|
|
|
+ df2['startzeit'] = df2.index
|
|
|
+
|
|
|
+ df = df2.sort_values(by="startzeit",ascending=True).set_index("startzeit").last("3M")
|
|
|
+ df['startTimeLocal'] = df.index
|
|
|
+
|
|
|
+ chart_data = pd.concat([
|
|
|
+ pd.Series(df.set_index('startTimeLocal').index.to_period('W').to_timestamp(how='start').values, index=df.index, name='startTimeLocal|W'),
|
|
|
+ df[f'{datafield}'] / factor,], axis=1)
|
|
|
+ chart_data = chart_data.sort_values(['startTimeLocal|W'])
|
|
|
+ chart_data = chart_data.rename(columns={'startTimeLocal|W': 'x'})
|
|
|
+ chart_data_sum = chart_data.groupby(['x'], dropna=True)[[f'{datafield}']].sum()
|
|
|
+ chart_data_sum.columns = [f'{datafield}|sum']
|
|
|
+ chart_data = chart_data_sum.reset_index()
|
|
|
+ chart_data = chart_data.dropna()
|
|
|
+ charts = []
|
|
|
+ charts.append(go.Bar(
|
|
|
+ x=chart_data['x'],
|
|
|
+ y=chart_data[f'{datafield}|sum']
|
|
|
+ ))
|
|
|
+ fig = go.Figure(data=charts, layout=go.Layout({
|
|
|
+ 'barmode': 'stack',
|
|
|
+ 'legend': {'orientation': 'h', 'y': -0.3},
|
|
|
+ 'title': {'text': f'Sum of {datafield} by startTimeLocal (Weekly)'},
|
|
|
+ 'xaxis': {'title': {'text': 'startzeit (Weekly)'}},
|
|
|
+ 'yaxis': {'title': {'text': f'Sum of {datafield}'}, 'type': 'linear'}
|
|
|
+ }))
|
|
|
+ return(fig)
|
|
|
+
|
|
|
+
|
|
|
+def monthly_sum_barchart (datafield, factor, heading, y_axis, x_axis):
|
|
|
+ df = read_data()
|
|
|
+ df = df.reset_index().drop('index', axis=1, errors='ignore')
|
|
|
+
|
|
|
+ chart_data = pd.concat([
|
|
|
+ pd.Series(df.set_index('startTimeLocal').index.to_period('M').to_timestamp(how='start').values, index=df.index, name='startTimeLocal|M'),
|
|
|
+ df[f'{datafield}'],], axis=1)
|
|
|
+ chart_data = chart_data.sort_values(['startTimeLocal|M'])
|
|
|
+ chart_data = chart_data.rename(columns={'startTimeLocal|M': 'x'})
|
|
|
+ chart_data_sum = chart_data.groupby(['x'], dropna=True)[[f'{datafield}']].sum()
|
|
|
+ chart_data_sum.columns = [f'{datafield}|sum']
|
|
|
+ print(chartdata.info())
|
|
|
+ chart_data = chart_data_sum.reset_index()
|
|
|
+ chart_data = chart_data.dropna()
|
|
|
+ charts = []
|
|
|
+ charts.append(go.Bar(
|
|
|
+ x=chart_data['x'],
|
|
|
+ y=chart_data[f'{datafield}|sum']
|
|
|
+ ))
|
|
|
+ fig = go.Figure(data=charts, layout=go.Layout({
|
|
|
+ 'barmode': 'stack',
|
|
|
+ 'legend': {'orientation': 'h', 'y': -0.3},
|
|
|
+ 'title': {'text': heading},
|
|
|
+ 'xaxis': {'title': {'text': x_axis}},
|
|
|
+ 'yaxis': {'title': {'text': y_axis}}
|
|
|
+ }))
|
|
|
+ return(fig)
|
|
|
+
|
|
|
+def monthly_sum_barchart_concurrent2 (csv_file, datafield, factor, heading, y_axis, x_axis, grouptitle):
|
|
|
+ df = read_data(csv_file)
|
|
|
+ df = df.reset_index().drop('index', axis=1, errors='ignore')
|
|
|
+ df['month'] = df['startTimeLocal'].dt.month
|
|
|
+ df['year'] = df['startTimeLocal'].dt.year
|
|
|
+ df['movingDuration'] = df['movingDuration'].map(lambda x: round(x/3600,2))
|
|
|
+# print(df.head())
|
|
|
+ df = df.reset_index().drop('index', axis=1, errors='ignore')
|
|
|
+
|
|
|
+ grouped_df = df.groupby(by=["month", "year"], as_index=False).agg( {f"{datafield}": "sum"} )
|
|
|
+ grouped_df.year = grouped_df['year'].apply(str)
|
|
|
+ grouped_df.sort_values(by=['year'], inplace=True)
|
|
|
+
|
|
|
+ fig = px.bar(
|
|
|
+ data_frame = grouped_df,
|
|
|
+ x = "month",
|
|
|
+ y = f"{datafield}",
|
|
|
+ color = "year",
|
|
|
+ barmode = "group",
|
|
|
+ labels = { "year" : x_axis,
|
|
|
+ f"{datafield}" : y_axis },
|
|
|
+ title = heading,
|
|
|
+ )
|
|
|
+ fig.update_layout(
|
|
|
+ font_family="Courier New",
|
|
|
+ font_color="black",
|
|
|
+ xaxis_title="Monat",
|
|
|
+ )
|
|
|
+ fig.update_xaxes( labelalias = { 1:'Jan',2:'Feb',3:'Mar',4:'Apr',5:'Mai',6:'Jun',7:'Jul',8:'"Aug"',9:'Sep',10:'Okt',11:'Nov',12:'Dez' } )
|
|
|
+ # fig.update_xaxes( labelalias = dict( 2="Saturday", Sun="Sunday") )
|
|
|
+
|
|
|
+ # fig.update_xaxes(
|
|
|
+ # tickangle = 90,
|
|
|
+ # title_text = "Month",
|
|
|
+ # title_font = {"size": 20},
|
|
|
+ # title_standoff = 25)
|
|
|
+ return(fig)
|
|
|
+
|
|
|
+def monthly_sum_barchart_concurrent (datafield, factor, heading, y_axis, x_axis):
|
|
|
+ df = read_data()
|
|
|
+ df = df.reset_index().drop('index', axis=1, errors='ignore')
|
|
|
+ df['month'] = df['startTimeLocal'].dt.month
|
|
|
+ df['year'] = df['startTimeLocal'].dt.year
|
|
|
+
|
|
|
+ df = df.reset_index().drop('index', axis=1, errors='ignore')
|
|
|
+ #df.columns = [str(c) for c in df.columns] # update columns to strings in case they are numbers
|
|
|
+
|
|
|
+ grouped_df = df.groupby(by=["month", "year"], as_index=False).agg( {"elevationGain": "sum"} )
|
|
|
+ grouped_df.year = grouped_df['year'].apply(str)
|
|
|
+ grouped_df.sort_values(by=['year'], inplace=True)
|
|
|
+ # print(data.head())
|
|
|
+
|
|
|
+ fig = px.bar(
|
|
|
+ data_frame=grouped_df,
|
|
|
+ x="month",
|
|
|
+ y="elevationGain",
|
|
|
+ color="year",
|
|
|
+ barmode="group",
|
|
|
+ )
|
|
|
+ return(fig)
|
|
|
+
|
|
|
+def homepage_data (csv_file):
|
|
|
+ df2 = read_data(csv_file)
|
|
|
+ df2['startzeit'] = df2.index
|
|
|
+ df = df2.sort_values(by="startzeit",ascending=True).set_index("startzeit")
|
|
|
+ df['startzeit'] = df.index
|
|
|
+ df['startTimeLocal'] = df.index
|
|
|
+ wf = df.groupby([pd.Grouper(key='startTimeLocal', freq='W')]).aggregate({
|
|
|
+ 'elevationGain': sum,
|
|
|
+ 'distance': sum,
|
|
|
+ 'averageHR':np.mean,
|
|
|
+ 'maxHR':np.max,
|
|
|
+ 'movingDuration': sum,
|
|
|
+ 'intensityMinutes': sum
|
|
|
+ })
|
|
|
+ mf = df.groupby([pd.Grouper(key='startzeit', freq='MS')]).aggregate({
|
|
|
+ 'elevationGain': sum,
|
|
|
+ 'distance': sum,
|
|
|
+ 'averageHR':np.mean,
|
|
|
+ 'maxHR':np.max,
|
|
|
+ 'movingDuration': sum,
|
|
|
+ 'intensityMinutes': sum
|
|
|
+ })
|
|
|
+ yf = df.groupby([pd.Grouper(key='startzeit', freq='YS')]).aggregate({
|
|
|
+ 'elevationGain': sum,
|
|
|
+ 'distance': sum,
|
|
|
+ 'averageHR':np.mean,
|
|
|
+ 'maxHR':np.max,
|
|
|
+ 'movingDuration': sum,
|
|
|
+ 'intensityMinutes': sum
|
|
|
+ })
|
|
|
+ d = { 'elevationtotal' : df['elevationGain'].sum(),
|
|
|
+ 'elevation0y' : yf['elevationGain'].take([-1])[0],
|
|
|
+ 'elevation1y' : yf['elevationGain'].take([-2])[0],
|
|
|
+ 'elevation1m' : mf['elevationGain'].take([-2])[0],
|
|
|
+ 'elevation0m' : mf['elevationGain'].take([-1])[0],
|
|
|
+ 'elevation1w' : wf['elevationGain'].take([-2])[0],
|
|
|
+ 'elevation0w' : wf['elevationGain'].take([-1])[0],
|
|
|
+ # '0w' : wf.take([-1]).keys,
|
|
|
+ 'distancetotal' : round(df['distance'].sum(), 1),
|
|
|
+ 'distance0y' : round(yf['distance'].take([-1])[0], 1),
|
|
|
+ 'distance1y' : round(yf['distance'].take([-2])[0], 1),
|
|
|
+ 'distance1m' : round(mf['distance'].take([-2])[0], 1),
|
|
|
+ 'distance0m' : round(mf['distance'].take([-1])[0], 1),
|
|
|
+ 'distance1w' : round(wf['distance'].take([-2])[0], 1),
|
|
|
+ 'distance0w' : round(wf['distance'].take([-1])[0], 1),
|
|
|
+ 'durationtotal' : sec2time(df['movingDuration'].sum(), 0),
|
|
|
+ 'duration0y' : sec2time(yf['movingDuration'].take([-1])[0], 0),
|
|
|
+ 'duration1y' : sec2time(yf['movingDuration'].take([-2])[0], 0),
|
|
|
+ 'duration1m' : sec2time(mf['movingDuration'].take([-2])[0], 0),
|
|
|
+ 'duration0m' : sec2time(mf['movingDuration'].take([-1])[0], 0),
|
|
|
+ 'duration1w' : sec2time(wf['movingDuration'].take([-2])[0], 0),
|
|
|
+ 'duration0w' : sec2time(wf['movingDuration'].take([-1])[0], 0),
|
|
|
+ 'averageHRtotal' : round(df['averageHR'].mean()),
|
|
|
+ 'averageHR0y' : round(df['averageHR'].take([-1])[0]),
|
|
|
+ 'averageHR1y' : round(df['averageHR'].take([-2])[0]),
|
|
|
+ 'averageHR1m' : round(mf['averageHR'].take([-2])[0]),
|
|
|
+ 'averageHR0m' : round(mf['averageHR'].take([-1])[0]),
|
|
|
+ 'averageHR1w' : round_float(wf['averageHR'].take([-2])[0]),
|
|
|
+ 'averageHR0w' : round_float(wf['averageHR'].take([-1])[0]),
|
|
|
+ 'maxHRtotal' : round(df['maxHR'].max()),
|
|
|
+ 'maxHR0y' : round(df['maxHR'].take([-1])[0]),
|
|
|
+ 'maxHR1y' : round(df['maxHR'].take([-2])[0]),
|
|
|
+ 'maxHR1m' : round(mf['maxHR'].take([-2])[0]),
|
|
|
+ 'maxHR0m' : round(mf['maxHR'].take([-1])[0]),
|
|
|
+ 'maxHR1w' : round_float(wf['maxHR'].take([-2])[0]),
|
|
|
+ 'maxHR0w' : round_float(wf['maxHR'].take([-1])[0]),
|
|
|
+ 'intensityMinutestotal' : round(df['intensityMinutes'].sum()),
|
|
|
+ 'intensityMinutes0y' : round(yf['intensityMinutes'].take([-1])[0]),
|
|
|
+ 'intensityMinutes1y' : round(yf['intensityMinutes'].take([-2])[0]),
|
|
|
+ 'intensityMinutes1m' : round(mf['intensityMinutes'].take([-2])[0]),
|
|
|
+ 'intensityMinutes0m' : round(mf['intensityMinutes'].take([-1])[0]),
|
|
|
+ 'intensityMinutes1w' : round(wf['intensityMinutes'].take([-2])[0]),
|
|
|
+ 'intensityMinutes0w' : round(wf['intensityMinutes'].take([-1])[0]),
|
|
|
+ }
|
|
|
+ return(d)
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+ # render_monthly_sum_barchart ('elevationGain')
|
|
|
+ #print(render_homepage_data())
|
|
|
+ print(monthly_sum_barchart_concurrent2('elevationGain', 1, 'Höhenmeter pro Monat', '(m)', 'Monat', 'grouptitle'))
|