From 6c369597b17a8772dd850af78c73376e7bbea7a2 Mon Sep 17 00:00:00 2001 From: merlin Date: Sun, 19 Jan 2025 11:32:09 -0500 Subject: [PATCH 1/5] C3 riders vs low temp --- freezing/web/templates/base.html | 3 + .../explore/distance_by_lowtemp.html | 129 +++++++++--------- .../templates/explore/riders_by_lowtemp.html | 125 ++++++++++------- .../templates/leaderboard/team_various.html | 2 +- freezing/web/views/general.py | 7 + 5 files changed, 155 insertions(+), 111 deletions(-) diff --git a/freezing/web/templates/base.html b/freezing/web/templates/base.html index ded9ed8c..6beea40c 100644 --- a/freezing/web/templates/base.html +++ b/freezing/web/templates/base.html @@ -172,6 +172,9 @@
  • Distance vs. Avg Low Temp
  • +
  • + Riders vs. Avg Low Temp +
  • Individual Dist/Elev/Speed
  • diff --git a/freezing/web/templates/explore/distance_by_lowtemp.html b/freezing/web/templates/explore/distance_by_lowtemp.html index e08580e7..143d247d 100644 --- a/freezing/web/templates/explore/distance_by_lowtemp.html +++ b/freezing/web/templates/explore/distance_by_lowtemp.html @@ -11,84 +11,85 @@ integrity="sha384-xOzJ6ocROuPNeY/PUgXyXgiPyvvePCiWVkKXro4Lb1gWoGJOtrJw+zrpf3NBx1B5" crossorigin="anonymous"> {% endblock %} {% block content %} -
    Miles Logged vs. Avg. Low Temp
    Relationship between total number of miles logged riders and the average low daily temperatures.
    - {# djlint:off H021 #} +
    - {# djlint:on #} + + + +  Loading chart data...
    {% endblock %} diff --git a/freezing/web/templates/explore/riders_by_lowtemp.html b/freezing/web/templates/explore/riders_by_lowtemp.html index e132c5cd..204dc3f7 100644 --- a/freezing/web/templates/explore/riders_by_lowtemp.html +++ b/freezing/web/templates/explore/riders_by_lowtemp.html @@ -1,55 +1,88 @@ {% extends "base.html" %} -{% block head %} - + + {% endblock %} {% block content %} -

    - Riders by Low Temp -

    -

    +

    + Riders vs. Avg. Low Temp +
    +
    Relationship between number of participating riders and the average low daily temperatures. -

    +
    -
    +
    -
    +
    {% endblock %} diff --git a/freezing/web/views/general.py b/freezing/web/views/general.py index 8907a2c0..520c7d22 100644 --- a/freezing/web/views/general.py +++ b/freezing/web/views/general.py @@ -404,6 +404,13 @@ def distance_by_lowtemp(): ) +@blueprint.route("/explore/riders_by_lowtemp") +def riders_by_lowtemp(): + return render_template( + "explore/riders_by_lowtemp.html", + ) + + @blueprint.route("/explore/team_cumul") def team_cumul_trend(): return render_template( From fd38d9a11ca7f76c39c881e50dfd59ac9ed387ff Mon Sep 17 00:00:00 2001 From: merlin Date: Sun, 19 Jan 2025 12:01:37 -0500 Subject: [PATCH 2/5] C3 team various --- freezing/web/templates/leaderboard/team.html | 1 - .../templates/leaderboard/team_various.html | 110 ++++++---- freezing/web/views/chartdata.py | 190 +++++++++--------- 3 files changed, 166 insertions(+), 135 deletions(-) diff --git a/freezing/web/templates/leaderboard/team.html b/freezing/web/templates/leaderboard/team.html index 2986d9c7..a2f9db5e 100644 --- a/freezing/web/templates/leaderboard/team.html +++ b/freezing/web/templates/leaderboard/team.html @@ -19,7 +19,6 @@ $('#chart_team_leaderboard').height(jsonData.labels.length * 23 + 35) var chart = c3.generate({ bindto: '#chart_team_leaderboard', - height: 400, bar: { width: 21, space: 2 diff --git a/freezing/web/templates/leaderboard/team_various.html b/freezing/web/templates/leaderboard/team_various.html index 93f21d3b..ed21d08a 100644 --- a/freezing/web/templates/leaderboard/team_various.html +++ b/freezing/web/templates/leaderboard/team_various.html @@ -1,61 +1,88 @@ {% extends "base.html" %} -{% block head %} +{% block foot %} + + + -{% endblock %} -{% block foot %} - {% endblock %} {% block content %} -

    +

    Team Leaderboards

    -
    - @@ -72,10 +99,13 @@

    Hains Point Laps - +
    + +
    diff --git a/freezing/web/views/chartdata.py b/freezing/web/views/chartdata.py index 34be56e3..9b8d0c08 100644 --- a/freezing/web/views/chartdata.py +++ b/freezing/web/views/chartdata.py @@ -99,25 +99,25 @@ def team_elev_gain(): team_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Score", "type": "number"}, - # {"id":"","label":"","pattern":"","type":"number","p":{"role":"interval"}}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(team_q): - place = i + 1 - cells = [ - { - "v": res["team_name"], - "f": "{0} [{1}]".format(short(res["team_name"]), place), - }, - {"v": res["cumul_elev_gain"], "f": str(int(res["cumul_elev_gain"]))}, - ] - rows.append({"c": cells}) - - return gviz_api_jsonify({"cols": cols, "rows": rows}) + ranks.append(i + 1) + labels.append(res["team_name"]) + values.append(res["cumul_elev_gain"]) + + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "elevation gain", + "suffix": " ft", + "unit": "feet", + } + ) @blueprint.route("/indiv_elev_gain") @@ -208,30 +208,29 @@ def team_moving_time(): """ ) - indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - - cols = [ - {"id": "name", "label": "Team", "type": "string"}, - {"id": "score", "label": "Moving Time", "type": "number"}, - ] - - rows = [] - for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["team_name"], - "f": "{0} [{1}]".format(short(res["team_name"]), place), - }, - { - "v": res["total_moving_time"], - "f": str(timedelta(seconds=int(res["total_moving_time"]))), - }, - ] - rows.append({"c": cells}) + team_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - return gviz_api_jsonify({"cols": cols, "rows": rows}) + labels = [] + ranks = [] + values = [] + tooltips = [] + for i, res in enumerate(team_q): + ranks.append(i + 1) + labels.append(res["team_name"]) + values.append(int(res["total_moving_time"] / 60)) + tooltips.append(str(timedelta(seconds=int(res["total_moving_time"])))) + + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "tooltips": tooltips, + "key": "total moving time", + "unit": "minutes", + } + ) @blueprint.route("/indiv_number_sleaze_days") def indiv_number_sleaze_days(): @@ -263,26 +262,27 @@ def indiv_number_sleaze_days(): def team_number_sleaze_days(): q = team_sleaze_query() - indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - - cols = [ - {"id": "name", "label": "Team", "type": "string"}, - {"id": "score", "label": "Sleaze Days", "type": "number"}, - ] + team_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - rows = [] - for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["team_name"], - "f": "{0} [{1}]".format(short(res["team_name"]), place), - }, - {"v": res["num_sleaze_days"], "f": str(int(res["num_sleaze_days"]))}, - ] - rows.append({"c": cells}) + labels = [] + ranks = [] + values = [] - return gviz_api_jsonify({"cols": cols, "rows": rows}) + for i, res in enumerate(team_q): + ranks.append(i + 1) + labels.append(res["team_name"]) + values.append(res["num_sleaze_days"]) + + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "sleaze days", + "suffix": "", + "unit": "days", + } + ) @blueprint.route("/indiv_kidical") @@ -387,29 +387,29 @@ def team_segment(segment_id): q = team_segment_query() - indiv_q = meta.engine.execute( + team_q = meta.engine.execute( q, segment_id=segment_id ).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Team", "type": "string"}, - {"id": "score", "label": "Times Ridden", "type": "number"}, - ] - - rows = [] - for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["team_name"], - "f": "{0} [{1}]".format(short(res["team_name"]), place), - }, - {"v": res["segment_rides"], "f": str(int(res["segment_rides"]))}, - ] - rows.append({"c": cells}) - - return gviz_api_jsonify({"cols": cols, "rows": rows}) + labels = [] + ranks = [] + values = [] + for i, res in enumerate(team_q): + ranks.append(i + 1) + labels.append(res["team_name"]) + values.append(res["segment_rides"]) + + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "rides", + "suffix": "", + "unit": "", + } + ) @blueprint.route("/indiv_avg_speed") def indiv_avg_speed(): @@ -462,26 +462,28 @@ def team_avg_speed(): """ ) - indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - - cols = [ - {"id": "name", "label": "Team", "type": "string"}, - {"id": "score", "label": "Average Speed", "type": "number"}, - ] + team_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - rows = [] - for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["team_name"], - "f": "{0} [{1}]".format(short(res["team_name"]), place), - }, - {"v": res["avg_speed"], "f": "{0:.2f}".format(res["avg_speed"])}, - ] - rows.append({"c": cells}) + labels = [] + ranks = [] + values = [] - return gviz_api_jsonify({"cols": cols, "rows": rows}) + for i, res in enumerate(team_q): + ranks.append(i + 1) + labels.append(res["team_name"]) + values.append(res["avg_speed"]) + + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "average speed", + "suffix": " mph", + "unit": "mph", + "precision": 1, + } + ) @blueprint.route("/indiv_freezing") From 2fab40b052f61288c3ee53d5e9ee8c8257a71344 Mon Sep 17 00:00:00 2001 From: merlin Date: Sun, 19 Jan 2025 12:38:17 -0500 Subject: [PATCH 3/5] indiv various --- .../templates/explore/riders_by_lowtemp.html | 22 +- freezing/web/templates/leaderboard/indiv.html | 1 - .../templates/leaderboard/indiv_various.html | 112 ++++-- .../templates/leaderboard/team_various.html | 2 +- freezing/web/views/chartdata.py | 371 +++++++++--------- 5 files changed, 278 insertions(+), 230 deletions(-) diff --git a/freezing/web/templates/explore/riders_by_lowtemp.html b/freezing/web/templates/explore/riders_by_lowtemp.html index 204dc3f7..956a5ce6 100644 --- a/freezing/web/templates/explore/riders_by_lowtemp.html +++ b/freezing/web/templates/explore/riders_by_lowtemp.html @@ -1,16 +1,16 @@ {% extends "base.html" %} {% block foot %} - - - - + + + -{% endblock %} -{% block foot %} - {% endblock %} {% block content %} -

    +

    Individual Leaderboards

    -
    - @@ -96,11 +125,14 @@

    Freeze Points - +
    + +
    -
    +
    {% endblock %} diff --git a/freezing/web/templates/leaderboard/team_various.html b/freezing/web/templates/leaderboard/team_various.html index ed21d08a..b6b2cafb 100644 --- a/freezing/web/templates/leaderboard/team_various.html +++ b/freezing/web/templates/leaderboard/team_various.html @@ -99,7 +99,7 @@

    Hains Point Laps - diff --git a/freezing/web/views/chartdata.py b/freezing/web/views/chartdata.py index 9b8d0c08..a3dbde37 100644 --- a/freezing/web/views/chartdata.py +++ b/freezing/web/views/chartdata.py @@ -135,25 +135,25 @@ def indiv_elev_gain(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Elevation", "type": "number"}, - # {"id":"","label":"","pattern":"","type":"number","p":{"role":"interval"}}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["cumul_elev_gain"], "f": str(int(res["cumul_elev_gain"]))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["cumul_elev_gain"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "elevation gain", + "suffix": " ft", + "unit": "feet", + } + ) @blueprint.route("/indiv_moving_time") @@ -171,27 +171,27 @@ def indiv_moving_time(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Moving Time", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] + tooltips = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - { - "v": res["total_moving_time"], - "f": str(timedelta(seconds=int(res["total_moving_time"]))), - }, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["total_moving_time"] / 60) + tooltips.append(str(timedelta(seconds=int(res["total_moving_time"])))) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "tooltips": tooltips, + "key": "total moving time", + "unit": "minutes", + } + ) @blueprint.route("/team_moving_time") @@ -218,7 +218,7 @@ def team_moving_time(): for i, res in enumerate(team_q): ranks.append(i + 1) labels.append(res["team_name"]) - values.append(int(res["total_moving_time"] / 60)) + values.append(res["total_moving_time"] / 60) tooltips.append(str(timedelta(seconds=int(res["total_moving_time"])))) return jsonify( @@ -232,30 +232,32 @@ def team_moving_time(): } ) + @blueprint.route("/indiv_number_sleaze_days") def indiv_number_sleaze_days(): q = indiv_sleaze_query() indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Sleaze Days", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["num_sleaze_days"], "f": str(int(res["num_sleaze_days"]))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["num_sleaze_days"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "sleaze days", + "suffix": "", + "unit": "days", + } + ) @blueprint.route("/team_number_sleaze_days") @@ -303,24 +305,25 @@ def indiv_kidical(): indiv_q = meta.engine.execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Kidical Rides", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["kidical_rides"], "f": str(int(res["kidical_rides"]))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["kidical_rides"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "kidical rides", + "suffix": "", + "unit": "rides", + } + ) @blueprint.route("/indiv_freeze_points") @@ -328,27 +331,25 @@ def indiv_freeze_points(): q = indiv_freeze_query() indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Freeze Points", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - { - "v": res["freeze_points_total"], - "f": "{0:.2f}".format(res["freeze_points_total"]), - }, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["freeze_points_total"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "points", + "suffix": "", + "unit": "Freeze Points", + } + ) @blueprint.route("/indiv_segment/") @@ -361,24 +362,25 @@ def indiv_segment(segment_id): q, segment_id=segment_id ).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Times Ridden", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["segment_rides"], "f": str(int(res["segment_rides"]))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["segment_rides"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "rides", + "suffix": "", + "unit": "", + } + ) @blueprint.route("/team_segment/") @@ -411,6 +413,7 @@ def team_segment(segment_id): } ) + @blueprint.route("/indiv_avg_speed") def indiv_avg_speed(): q = text( @@ -427,24 +430,26 @@ def indiv_avg_speed(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Average Speed", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["avg_speed"], "f": "{0:.2f}".format(res["avg_speed"])}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["avg_speed"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "average speed", + "suffix": " mph", + "unit": "mph", + "precision": 1, + } + ) @blueprint.route("/team_avg_speed") @@ -503,24 +508,26 @@ def indiv_freezing(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Miles Below Freezing", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["distance"], "f": "{0:.2f}".format(res["distance"])}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["distance"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": "distance", + "suffix": " mi", + "unit": "miles", + "precision": 1, + } + ) @blueprint.route("/indiv_before_sunrise") @@ -540,24 +547,27 @@ def indiv_before_sunrise(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "Before Sunrise", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] + tooltips = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["dark"], "f": str(timedelta(seconds=int(res["dark"])))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["dark"]) + tooltips.append(str(timedelta(seconds=int(res["dark"])))) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "tooltips": tooltips, + "key": "time", + "unit": "minutes", + } + ) @blueprint.route("/indiv_after_sunset") @@ -577,24 +587,27 @@ def indiv_after_sunset(): indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": "After Sunset", "type": "number"}, - ] + labels = [] + ranks = [] + values = [] + tooltips = [] - rows = [] for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res["dark"], "f": str(timedelta(seconds=int(res["dark"])))}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res["dark"]) + tooltips.append(str(timedelta(seconds=int(res["dark"])))) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "tooltips": tooltips, + "key": "time", + "unit": "minutes", + } + ) def competition_start(): @@ -976,27 +989,31 @@ def exec_and_jsonify_query( q, display_label, query_label, + suffix, hover_lambda=lambda res, query_label: str(int(round(res[query_label]))), ): - cols = [ - {"id": "name", "label": "Athlete", "type": "string"}, - {"id": "score", "label": display_label, "type": "number"}, - ] - indiv_q = meta.scoped_session().execute(q).fetchall() - rows = [] + + labels = [] + ranks = [] + values = [] + for i, res in enumerate(indiv_q): - place = i + 1 - cells = [ - { - "v": res["athlete_name"], - "f": "{0} [{1}]".format(short(res["athlete_name"]), place), - }, - {"v": res[query_label], "f": hover_lambda(res, query_label)}, - ] - rows.append({"c": cells}) + ranks.append(i + 1) + labels.append(res["athlete_name"]) + values.append(res[query_label]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "values": values, + "ranks": ranks, + "key": query_label, + "suffix": suffix, + "unit": display_label, + "precision": 1, + } + ) def fmt_date(dt): @@ -1057,7 +1074,7 @@ def hl(res, ql): res["loc"], ) - return exec_and_jsonify_query(q, "Temperature", "temp_start", hover_lambda=hl) + return exec_and_jsonify_query(q, "", "temp_start", "º F", hover_lambda=hl) @blueprint.route("/indiv_snowiest") @@ -1080,7 +1097,7 @@ def hl(res, ql): res["loc"], ) - return exec_and_jsonify_query(q, "Snowfall", "snow", hover_lambda=hl) + return exec_and_jsonify_query(q, "Snowfall", "snow", '"', hover_lambda=hl) @blueprint.route("/indiv_rainiest") @@ -1103,4 +1120,4 @@ def hl(res, ql): res["loc"], ) - return exec_and_jsonify_query(q, "Rainfall", "rain", hover_lambda=hl) + return exec_and_jsonify_query(q, "Rainfall", "rain", '"', hover_lambda=hl) From 312fdc172ecf2b07d9d6e03835498b15530e3487 Mon Sep 17 00:00:00 2001 From: merlin Date: Sun, 19 Jan 2025 14:06:38 -0500 Subject: [PATCH 4/5] the rest --- freezing/web/static/css/styles.css | 2 +- freezing/web/templates/base.html | 1 - .../templates/explore/indiv_elev_dist.html | 112 +- .../web/templates/explore/team_cumul.html | 14 +- .../templates/explore/team_weekly_points.html | 7 +- freezing/web/templates/leaderboard/indiv.html | 10 +- .../templates/leaderboard/indiv_various.html | 9 +- freezing/web/templates/leaderboard/team.html | 12 +- .../templates/leaderboard/team_various.html | 7 +- freezing/web/templates/user/rides.html | 14 +- freezing/web/utils/gviz_api.py | 1185 ----------------- freezing/web/views/chartdata.py | 67 +- 12 files changed, 146 insertions(+), 1294 deletions(-) delete mode 100755 freezing/web/utils/gviz_api.py diff --git a/freezing/web/static/css/styles.css b/freezing/web/static/css/styles.css index 9126cbcd..e8cfc704 100644 --- a/freezing/web/static/css/styles.css +++ b/freezing/web/static/css/styles.css @@ -11,7 +11,7 @@ div.row > div > div.row, .big-bottom { margin-bottom: 15px; } -iframe, .g-chart svg { +iframe { border-radius: var(--bs-border-radius); } diff --git a/freezing/web/templates/base.html b/freezing/web/templates/base.html index 6beea40c..aa80b2a1 100644 --- a/freezing/web/templates/base.html +++ b/freezing/web/templates/base.html @@ -64,7 +64,6 @@ - {% block head %} {% endblock %} diff --git a/freezing/web/templates/explore/indiv_elev_dist.html b/freezing/web/templates/explore/indiv_elev_dist.html index e56530c6..407c6839 100644 --- a/freezing/web/templates/explore/indiv_elev_dist.html +++ b/freezing/web/templates/explore/indiv_elev_dist.html @@ -1,51 +1,81 @@ {% extends "base.html" %} -{% block head %} - +{% block foot %} + + + + {% endblock %} {% block content %}

    Individual Distance, Elevation, Speed

    +
    + Correlation between distance, elevation and average speed +
    -
    +
    -  Loading correlation chart... +  Loading chart data...
    {% endblock %} diff --git a/freezing/web/templates/explore/team_cumul.html b/freezing/web/templates/explore/team_cumul.html index bbf21c51..856ae8fd 100644 --- a/freezing/web/templates/explore/team_cumul.html +++ b/freezing/web/templates/explore/team_cumul.html @@ -30,12 +30,15 @@ type: 'timeseries', }, y: { - label: 'points' + label: 'points', + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { format: { - value: (value) => Math.round(value) + " pts", + value: (value) => Math.round(value).toLocaleString() + " pts", } } }); @@ -59,12 +62,15 @@ type: 'timeseries', }, y: { - label: 'miles' + label: 'miles', + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { format: { - value: (value) => Math.round(value) + " mi", + value: (value) => Math.round(value).toLocaleString() + " mi", } } }); diff --git a/freezing/web/templates/explore/team_weekly_points.html b/freezing/web/templates/explore/team_weekly_points.html index e7a6c18e..98c76e5b 100644 --- a/freezing/web/templates/explore/team_weekly_points.html +++ b/freezing/web/templates/explore/team_weekly_points.html @@ -33,6 +33,11 @@ tick: { format: d => "Week " + d, } + }, + y: { + tick: { + format: d => d.toLocaleString() + } } }, bar: { @@ -42,7 +47,7 @@ }, tooltip: { format: { - value: (value) => Math.round(value), + value: (value) => Math.round(value).toLocaleString(), } }, legend: { diff --git a/freezing/web/templates/leaderboard/indiv.html b/freezing/web/templates/leaderboard/indiv.html index 3f90302d..0cc98c41 100644 --- a/freezing/web/templates/leaderboard/indiv.html +++ b/freezing/web/templates/leaderboard/indiv.html @@ -36,7 +36,7 @@ ], type: 'bar', labels: { - format: (value) => Math.round(value) + format: (value) => Math.round(value).toLocaleString() }, hide: ['rank'] }, @@ -49,13 +49,19 @@ multiline: true, multilineMax: 1, } + }, + y: { + label: 'Points', + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { grouped: true, format: { title: (i) => '#' + jsonData.ranks[i] + ': ' + jsonData.labels[i], - value: (value) => Math.round(value), + value: (value) => Math.round(value).toLocaleString(), } }, legend: { diff --git a/freezing/web/templates/leaderboard/indiv_various.html b/freezing/web/templates/leaderboard/indiv_various.html index 1b074b40..e3194434 100644 --- a/freezing/web/templates/leaderboard/indiv_various.html +++ b/freezing/web/templates/leaderboard/indiv_various.html @@ -38,7 +38,7 @@ ], type: 'bar', labels: { - format: (value) => +value.toFixed(jsonData.precision ?? 0) + format: (value) => (+value.toFixed(jsonData.precision ?? 0)).toLocaleString() }, }, axis: { @@ -52,7 +52,10 @@ } }, y: { - label: jsonData.unit + label: jsonData.unit, + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { @@ -60,7 +63,7 @@ format: { title: (i) => '#' + jsonData.ranks[i] + ': ' + jsonData.labels[i], value: (value, ratio, id, index) => - jsonData.tooltips ? jsonData.tooltips[index] : +value.toFixed(jsonData.precision ?? 0) + jsonData.suffix + jsonData.tooltips ? jsonData.tooltips[index] : (+value.toFixed(jsonData.precision ?? 0)).toLocaleString() + jsonData.suffix }, }, legend: { diff --git a/freezing/web/templates/leaderboard/team.html b/freezing/web/templates/leaderboard/team.html index a2f9db5e..366da967 100644 --- a/freezing/web/templates/leaderboard/team.html +++ b/freezing/web/templates/leaderboard/team.html @@ -17,7 +17,7 @@ dataType:"json", }).done(function(jsonData) { $('#chart_team_leaderboard').height(jsonData.labels.length * 23 + 35) - var chart = c3.generate({ + c3.generate({ bindto: '#chart_team_leaderboard', bar: { width: 21, @@ -36,7 +36,7 @@ ], type: 'bar', labels: { - format: (value) => Math.round(value) + format: (value) => Math.round(value).toLocaleString() }, hide: ['rank'] }, @@ -49,13 +49,19 @@ multiline: true, multilineMax: 1, } + }, + y: { + label: 'Points', + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { grouped: true, format: { title: (i) => '#' + jsonData.ranks[i] + ': ' + jsonData.labels[i], - value: (value) => Math.round(value) + value: (value) => Math.round(value).toLocaleString() }, }, legend: { diff --git a/freezing/web/templates/leaderboard/team_various.html b/freezing/web/templates/leaderboard/team_various.html index b6b2cafb..b83336e6 100644 --- a/freezing/web/templates/leaderboard/team_various.html +++ b/freezing/web/templates/leaderboard/team_various.html @@ -38,7 +38,7 @@ ], type: 'bar', labels: { - format: (value) => +value.toFixed(jsonData.precision ?? 0) + format: (value) => (+value.toFixed(jsonData.precision ?? 0)).toLocaleString() }, }, axis: { @@ -52,7 +52,10 @@ } }, y: { - label: jsonData.unit + label: jsonData.unit, + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { diff --git a/freezing/web/templates/user/rides.html b/freezing/web/templates/user/rides.html index 551e46c7..4caa4e40 100644 --- a/freezing/web/templates/user/rides.html +++ b/freezing/web/templates/user/rides.html @@ -51,11 +51,16 @@ tick: { format: "%m/%d", } + }, + y: { + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { format: { - value: (value) => Math.round(value), + value: (value) => Math.round(value).toLocaleString(), } }, legend: { @@ -88,11 +93,16 @@ tick: { format: d => "Week " + d, } + }, + y: { + tick: { + format: d => d.toLocaleString() + } } }, tooltip: { format: { - value: (value) => Math.round(value), + value: (value) => Math.round(value).toLocaleString(), } }, legend: { diff --git a/freezing/web/utils/gviz_api.py b/freezing/web/utils/gviz_api.py deleted file mode 100755 index 712dbc1e..00000000 --- a/freezing/web/utils/gviz_api.py +++ /dev/null @@ -1,1185 +0,0 @@ -#!/usr/bin/python -# -# Copyright (C) 2009 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Converts Python data into data for Google Visualization API clients. - -This library can be used to create a google.visualization.DataTable usable by -visualizations built on the Google Visualization API. Output formats are raw -JSON, JSON response, JavaScript, CSV, and HTML table. - -See http://code.google.com/apis/visualization/ for documentation on the -Google Visualization API. -""" - -__author__ = "Amit Weinstein, Misha Seltzer, Jacob Baskin" - -import cgi -import csv -import datetime -from io import StringIO - -try: - import json -except ImportError: - import simplejson as json - -import types -from decimal import Decimal - - -class DataTableException(Exception): - """The general exception object thrown by DataTable.""" - - pass - - -class DataTableJSONEncoder(json.JSONEncoder): - """JSON encoder that handles date/time/datetime objects correctly.""" - - # def __init__(self, *args, **kwargs): - # json.JSONEncoder.__init__(self, - # separators=(",", ":"), - # ensure_ascii=False) - - def default(self, o): - if isinstance(o, datetime.datetime): - if o.microsecond == 0: - # If the time doesn't have ms-resolution, leave it out to keep - # things smaller. - return "Date(%d,%d,%d,%d,%d,%d)" % ( - o.year, - o.month - 1, - o.day, - o.hour, - o.minute, - o.second, - ) - else: - return "Date(%d,%d,%d,%d,%d,%d,%d)" % ( - o.year, - o.month - 1, - o.day, - o.hour, - o.minute, - o.second, - o.microsecond / 1000, - ) - elif isinstance(o, datetime.date): - return "Date(%d,%d,%d)" % (o.year, o.month - 1, o.day) - elif isinstance(o, datetime.time): - return [o.hour, o.minute, o.second] - elif isinstance(o, Decimal): - return float(o) - else: - return super(DataTableJSONEncoder, self).default(o) - - -class DataTable(object): - """Wraps the data to convert to a Google Visualization API DataTable. - - Create this object, populate it with data, then call one of the ToJS... - methods to return a string representation of the data in the format described. - - You can clear all data from the object to reuse it, but you cannot clear - individual cells, rows, or columns. You also cannot modify the table schema - specified in the class constructor. - - You can add new data one or more rows at a time. All data added to an - instantiated DataTable must conform to the schema passed in to __init__(). - - You can reorder the columns in the output table, and also specify row sorting - order by column. The default column order is according to the original - table_description parameter. Default row sort order is ascending, by column - 1 values. For a dictionary, we sort the keys for order. - - The data and the table_description are closely tied, as described here: - - The table schema is defined in the class constructor's table_description - parameter. The user defines each column using a tuple of - (id[, type[, label[, custom_properties]]]). The default value for type is - string, label is the same as ID if not specified, and custom properties is - an empty dictionary if not specified. - - table_description is a dictionary or list, containing one or more column - descriptor tuples, nested dictionaries, and lists. Each dictionary key, list - element, or dictionary element must eventually be defined as - a column description tuple. Here's an example of a dictionary where the key - is a tuple, and the value is a list of two tuples: - {('a', 'number'): [('b', 'number'), ('c', 'string')]} - - This flexibility in data entry enables you to build and manipulate your data - in a Python structure that makes sense for your program. - - Add data to the table using the same nested design as the table's - table_description, replacing column descriptor tuples with cell data, and - each row is an element in the top level collection. This will be a bit - clearer after you look at the following examples showing the - table_description, matching data, and the resulting table: - - Columns as list of tuples [col1, col2, col3] - table_description: [('a', 'number'), ('b', 'string')] - AppendData( [[1, 'z'], [2, 'w'], [4, 'o'], [5, 'k']] ) - Table: - a b <--- these are column ids/labels - 1 z - 2 w - 4 o - 5 k - - Dictionary of columns, where key is a column, and value is a list of - columns {col1: [col2, col3]} - table_description: {('a', 'number'): [('b', 'number'), ('c', 'string')]} - AppendData( data: {1: [2, 'z'], 3: [4, 'w']} - Table: - a b c - 1 2 z - 3 4 w - - Dictionary where key is a column, and the value is itself a dictionary of - columns {col1: {col2, col3}} - table_description: {('a', 'number'): {'b': 'number', 'c': 'string'}} - AppendData( data: {1: {'b': 2, 'c': 'z'}, 3: {'b': 4, 'c': 'w'}} - Table: - a b c - 1 2 z - 3 4 w - """ - - def __init__(self, table_description, data=None, custom_properties=None): - """Initialize the data table from a table schema and (optionally) data. - - See the class documentation for more information on table schema and data - values. - - Args: - table_description: A table schema, following one of the formats described - in TableDescriptionParser(). Schemas describe the - column names, data types, and labels. See - TableDescriptionParser() for acceptable formats. - data: Optional. If given, fills the table with the given data. The data - structure must be consistent with schema in table_description. See - the class documentation for more information on acceptable data. You - can add data later by calling AppendData(). - custom_properties: Optional. A dictionary from string to string that - goes into the table's custom properties. This can be - later changed by changing self.custom_properties. - - Raises: - DataTableException: Raised if the data and the description did not match, - or did not use the supported formats. - """ - self.__columns = self.TableDescriptionParser(table_description) - self.__data = [] - self.custom_properties = {} - if custom_properties is not None: - self.custom_properties = custom_properties - if data: - self.LoadData(data) - - @staticmethod - def CoerceValue(value, value_type): - """Coerces a single value into the type expected for its column. - - Internal helper method. - - Args: - value: The value which should be converted - value_type: One of "string", "number", "boolean", "date", "datetime" or - "timeofday". - - Returns: - An item of the Python type appropriate to the given value_type. Strings - are also converted to Unicode using UTF-8 encoding if necessary. - If a tuple is given, it should be in one of the following forms: - - (value, formatted value) - - (value, formatted value, custom properties) - where the formatted value is a string, and custom properties is a - dictionary of the custom properties for this cell. - To specify custom properties without specifying formatted value, one can - pass None as the formatted value. - One can also have a null-valued cell with formatted value and/or custom - properties by specifying None for the value. - This method ignores the custom properties except for checking that it is a - dictionary. The custom properties are handled in the ToJSon and ToJSCode - methods. - The real type of the given value is not strictly checked. For example, - any type can be used for string - as we simply take its str( ) and for - boolean value we just check "if value". - Examples: - CoerceValue(None, "string") returns None - CoerceValue((5, "5$"), "number") returns (5, "5$") - CoerceValue(100, "string") returns "100" - CoerceValue(0, "boolean") returns False - - Raises: - DataTableException: The value and type did not match in a not-recoverable - way, for example given value 'abc' for type 'number'. - """ - if isinstance(value, tuple): - # In case of a tuple, we run the same function on the value itself and - # add the formatted value. - if len(value) not in [2, 3] or ( - len(value) == 3 and not isinstance(value[2], dict) - ): - raise DataTableException( - "Wrong format for value and formatting - %s." % str(value) - ) - if not isinstance(value[1], types.StringTypes + (types.NoneType,)): - raise DataTableException( - "Formatted value is not string, given %s." % type(value[1]) - ) - js_value = DataTable.CoerceValue(value[0], value_type) - return (js_value,) + value[1:] - - t_value = type(value) - if value is None: - return value - if value_type == "boolean": - return bool(value) - - elif value_type == "number": - if isinstance(value, (int, float)): - return value - raise DataTableException("Wrong type %s when expected number" % t_value) - - elif value_type == "string": - if isinstance(value, str): - return value - else: - return bytes(value).decode("utf-8") - - elif value_type == "date": - if isinstance(value, datetime.datetime): - return datetime.date(value.year, value.month, value.day) - elif isinstance(value, datetime.date): - return value - else: - raise DataTableException("Wrong type %s when expected date" % t_value) - - elif value_type == "timeofday": - if isinstance(value, datetime.datetime): - return datetime.time(value.hour, value.minute, value.second) - elif isinstance(value, datetime.time): - return value - else: - raise DataTableException("Wrong type %s when expected time" % t_value) - - elif value_type == "datetime": - if isinstance(value, datetime.datetime): - return value - else: - raise DataTableException( - "Wrong type %s when expected datetime" % t_value - ) - # If we got here, it means the given value_type was not one of the - # supported types. - raise DataTableException("Unsupported type %s" % value_type) - - @staticmethod - def EscapeForJSCode(encoder, value): - if value is None: - return "null" - elif isinstance(value, datetime.datetime): - if value.microsecond == 0: - # If it's not ms-resolution, leave that out to save space. - return "new Date(%d,%d,%d,%d,%d,%d)" % ( - value.year, - value.month - 1, # To match JS - value.day, - value.hour, - value.minute, - value.second, - ) - else: - return "new Date(%d,%d,%d,%d,%d,%d,%d)" % ( - value.year, - value.month - 1, # match JS - value.day, - value.hour, - value.minute, - value.second, - value.microsecond / 1000, - ) - elif isinstance(value, datetime.date): - return "new Date(%d,%d,%d)" % (value.year, value.month - 1, value.day) - else: - return encoder.encode(value) - - @staticmethod - def ToString(value): - if value is None: - return "(empty)" - elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)): - return str(value) - elif isinstance(value, str): - return value - elif isinstance(value, bool): - return str(value).lower() - else: - return bytes(value).decode("utf-8") - - @staticmethod - def ColumnTypeParser(description): - """Parses a single column description. Internal helper method. - - Args: - description: a column description in the possible formats: - 'id' - ('id',) - ('id', 'type') - ('id', 'type', 'label') - ('id', 'type', 'label', {'custom_prop1': 'custom_val1'}) - Returns: - Dictionary with the following keys: id, label, type, and - custom_properties where: - - If label not given, it equals the id. - - If type not given, string is used by default. - - If custom properties are not given, an empty dictionary is used by - default. - - Raises: - DataTableException: The column description did not match the RE, or - unsupported type was passed. - """ - if not description: - raise DataTableException("Description error: empty description given") - - if not isinstance(description, (types.StringTypes, tuple)): - raise DataTableException( - "Description error: expected either string or " - "tuple, got %s." % type(description) - ) - - if isinstance(description, types.StringTypes): - description = (description,) - - # According to the tuple's length, we fill the keys - # We verify everything is of type string - for elem in description[:3]: - if not isinstance(elem, types.StringTypes): - raise DataTableException( - "Description error: expected tuple of " - "strings, current element of type %s." % type(elem) - ) - desc_dict = { - "id": description[0], - "label": description[0], - "type": "string", - "custom_properties": {}, - } - if len(description) > 1: - desc_dict["type"] = description[1].lower() - if len(description) > 2: - desc_dict["label"] = description[2] - if len(description) > 3: - if not isinstance(description[3], dict): - raise DataTableException( - "Description error: expected custom " - "properties of type dict, current element " - "of type %s." % type(description[3]) - ) - desc_dict["custom_properties"] = description[3] - if len(description) > 4: - raise DataTableException( - "Description error: tuple of length > 4" - ) - if desc_dict["type"] not in [ - "string", - "number", - "boolean", - "date", - "datetime", - "timeofday", - ]: - raise DataTableException( - "Description error: unsupported type '%s'" % desc_dict["type"] - ) - return desc_dict - - @staticmethod - def TableDescriptionParser(table_description, depth=0): - """Parses the table_description object for internal use. - - Parses the user-submitted table description into an internal format used - by the Python DataTable class. Returns the flat list of parsed columns. - - Args: - table_description: A description of the table which should comply - with one of the formats described below. - depth: Optional. The depth of the first level in the current description. - Used by recursive calls to this function. - - Returns: - List of columns, where each column represented by a dictionary with the - keys: id, label, type, depth, container which means the following: - - id: the id of the column - - name: The name of the column - - type: The datatype of the elements in this column. Allowed types are - described in ColumnTypeParser(). - - depth: The depth of this column in the table description - - container: 'dict', 'iter' or 'scalar' for parsing the format easily. - - custom_properties: The custom properties for this column. - The returned description is flattened regardless of how it was given. - - Raises: - DataTableException: Error in a column description or in the description - structure. - - Examples: - A column description can be of the following forms: - 'id' - ('id',) - ('id', 'type') - ('id', 'type', 'label') - ('id', 'type', 'label', {'custom_prop1': 'custom_val1'}) - or as a dictionary: - 'id': 'type' - 'id': ('type',) - 'id': ('type', 'label') - 'id': ('type', 'label', {'custom_prop1': 'custom_val1'}) - If the type is not specified, we treat it as string. - If no specific label is given, the label is simply the id. - If no custom properties are given, we use an empty dictionary. - - input: [('a', 'date'), ('b', 'timeofday', 'b', {'foo': 'bar'})] - output: [{'id': 'a', 'label': 'a', 'type': 'date', - 'depth': 0, 'container': 'iter', 'custom_properties': {}}, - {'id': 'b', 'label': 'b', 'type': 'timeofday', - 'depth': 0, 'container': 'iter', - 'custom_properties': {'foo': 'bar'}}] - - input: {'a': [('b', 'number'), ('c', 'string', 'column c')]} - output: [{'id': 'a', 'label': 'a', 'type': 'string', - 'depth': 0, 'container': 'dict', 'custom_properties': {}}, - {'id': 'b', 'label': 'b', 'type': 'number', - 'depth': 1, 'container': 'iter', 'custom_properties': {}}, - {'id': 'c', 'label': 'column c', 'type': 'string', - 'depth': 1, 'container': 'iter', 'custom_properties': {}}] - - input: {('a', 'number', 'column a'): { 'b': 'number', 'c': 'string'}} - output: [{'id': 'a', 'label': 'column a', 'type': 'number', - 'depth': 0, 'container': 'dict', 'custom_properties': {}}, - {'id': 'b', 'label': 'b', 'type': 'number', - 'depth': 1, 'container': 'dict', 'custom_properties': {}}, - {'id': 'c', 'label': 'c', 'type': 'string', - 'depth': 1, 'container': 'dict', 'custom_properties': {}}] - - input: { ('w', 'string', 'word'): ('c', 'number', 'count') } - output: [{'id': 'w', 'label': 'word', 'type': 'string', - 'depth': 0, 'container': 'dict', 'custom_properties': {}}, - {'id': 'c', 'label': 'count', 'type': 'number', - 'depth': 1, 'container': 'scalar', 'custom_properties': {}}] - - input: {'a': ('number', 'column a'), 'b': ('string', 'column b')} - output: [{'id': 'a', 'label': 'column a', 'type': 'number', 'depth': 0, - 'container': 'dict', 'custom_properties': {}}, - {'id': 'b', 'label': 'column b', 'type': 'string', 'depth': 0, - 'container': 'dict', 'custom_properties': {}} - - NOTE: there might be ambiguity in the case of a dictionary representation - of a single column. For example, the following description can be parsed - in 2 different ways: {'a': ('b', 'c')} can be thought of a single column - with the id 'a', of type 'b' and the label 'c', or as 2 columns: one named - 'a', and the other named 'b' of type 'c'. We choose the first option by - default, and in case the second option is the right one, it is possible to - make the key into a tuple (i.e. {('a',): ('b', 'c')}) or add more info - into the tuple, thus making it look like this: {'a': ('b', 'c', 'b', {})} - -- second 'b' is the label, and {} is the custom properties field. - """ - # For the recursion step, we check for a scalar object (string or tuple) - if isinstance(table_description, (types.StringTypes, tuple)): - parsed_col = DataTable.ColumnTypeParser(table_description) - parsed_col["depth"] = depth - parsed_col["container"] = "scalar" - return [parsed_col] - - # Since it is not scalar, table_description must be iterable. - if not hasattr(table_description, "__iter__"): - raise DataTableException( - "Expected an iterable object, got %s" % type(table_description) - ) - if not isinstance(table_description, dict): - # We expects a non-dictionary iterable item. - columns = [] - for desc in table_description: - parsed_col = DataTable.ColumnTypeParser(desc) - parsed_col["depth"] = depth - parsed_col["container"] = "iter" - columns.append(parsed_col) - if not columns: - raise DataTableException( - "Description iterable objects should not" " be empty." - ) - return columns - # The other case is a dictionary - if not table_description: - raise DataTableException( - "Empty dictionaries are not allowed inside" " description" - ) - - # To differentiate between the two cases of more levels below or this is - # the most inner dictionary, we consider the number of keys (more then one - # key is indication for most inner dictionary) and the type of the key and - # value in case of only 1 key (if the type of key is string and the type of - # the value is a tuple of 0-3 items, we assume this is the most inner - # dictionary). - # NOTE: this way of differentiating might create ambiguity. See docs. - if len(table_description) != 1 or ( - isinstance(table_description.keys()[0], types.StringTypes) - and isinstance(table_description.values()[0], tuple) - and len(table_description.values()[0]) < 4 - ): - # This is the most inner dictionary. Parsing types. - columns = [] - # We sort the items, equivalent to sort the keys since they are unique - for key, value in sorted(table_description.items()): - # We parse the column type as (key, type) or (key, type, label) using - # ColumnTypeParser. - if isinstance(value, tuple): - parsed_col = DataTable.ColumnTypeParser((key,) + value) - else: - parsed_col = DataTable.ColumnTypeParser((key, value)) - parsed_col["depth"] = depth - parsed_col["container"] = "dict" - columns.append(parsed_col) - return columns - # This is an outer dictionary, must have at most one key. - parsed_col = DataTable.ColumnTypeParser(table_description.keys()[0]) - parsed_col["depth"] = depth - parsed_col["container"] = "dict" - return [parsed_col] + DataTable.TableDescriptionParser( - table_description.values()[0], depth=depth + 1 - ) - - @property - def columns(self): - """Returns the parsed table description.""" - return self.__columns - - def NumberOfRows(self): - """Returns the number of rows in the current data stored in the table.""" - return len(self.__data) - - def SetRowsCustomProperties(self, rows, custom_properties): - """Sets the custom properties for given row(s). - - Can accept a single row or an iterable of rows. - Sets the given custom properties for all specified rows. - - Args: - rows: The row, or rows, to set the custom properties for. - custom_properties: A string to string dictionary of custom properties to - set for all rows. - """ - if not hasattr(rows, "__iter__"): - rows = [rows] - for row in rows: - self.__data[row] = (self.__data[row][0], custom_properties) - - def LoadData(self, data, custom_properties=None): - """Loads new rows to the data table, clearing existing rows. - - May also set the custom_properties for the added rows. The given custom - properties dictionary specifies the dictionary that will be used for *all* - given rows. - - Args: - data: The rows that the table will contain. - custom_properties: A dictionary of string to string to set as the custom - properties for all rows. - """ - self.__data = [] - self.AppendData(data, custom_properties) - - def AppendData(self, data, custom_properties=None): - """Appends new data to the table. - - Data is appended in rows. Data must comply with - the table schema passed in to __init__(). See CoerceValue() for a list - of acceptable data types. See the class documentation for more information - and examples of schema and data values. - - Args: - data: The row to add to the table. The data must conform to the table - description format. - custom_properties: A dictionary of string to string, representing the - custom properties to add to all the rows. - - Raises: - DataTableException: The data structure does not match the description. - """ - # If the maximal depth is 0, we simply iterate over the data table - # lines and insert them using _InnerAppendData. Otherwise, we simply - # let the _InnerAppendData handle all the levels. - if not self.__columns[-1]["depth"]: - for row in data: - self._InnerAppendData(({}, custom_properties), row, 0) - else: - self._InnerAppendData(({}, custom_properties), data, 0) - - def _InnerAppendData(self, prev_col_values, data, col_index): - """Inner function to assist LoadData.""" - # We first check that col_index has not exceeded the columns size - if col_index >= len(self.__columns): - raise DataTableException("The data does not match description, too deep") - - # Dealing with the scalar case, the data is the last value. - if self.__columns[col_index]["container"] == "scalar": - prev_col_values[0][self.__columns[col_index]["id"]] = data - self.__data.append(prev_col_values) - return - - if self.__columns[col_index]["container"] == "iter": - if not hasattr(data, "__iter__") or isinstance(data, dict): - raise DataTableException( - "Expected iterable object, got %s" % type(data) - ) - # We only need to insert the rest of the columns - # If there are less items than expected, we only add what there is. - for value in data: - if col_index >= len(self.__columns): - raise DataTableException("Too many elements given in data") - prev_col_values[0][self.__columns[col_index]["id"]] = value - col_index += 1 - self.__data.append(prev_col_values) - return - - # We know the current level is a dictionary, we verify the type. - if not isinstance(data, dict): - raise DataTableException( - "Expected dictionary at current level, got %s" % type(data) - ) - # We check if this is the last level - if self.__columns[col_index]["depth"] == self.__columns[-1]["depth"]: - # We need to add the keys in the dictionary as they are - for col in self.__columns[col_index:]: - if col["id"] in data: - prev_col_values[0][col["id"]] = data[col["id"]] - self.__data.append(prev_col_values) - return - - # We have a dictionary in an inner depth level. - if not data.keys(): - # In case this is an empty dictionary, we add a record with the columns - # filled only until this point. - self.__data.append(prev_col_values) - else: - for key in sorted(data): - col_values = dict(prev_col_values[0]) - col_values[self.__columns[col_index]["id"]] = key - self._InnerAppendData( - (col_values, prev_col_values[1]), data[key], col_index + 1 - ) - - def _PreparedData(self, order_by=()): - """Prepares the data for enumeration - sorting it by order_by. - - Args: - order_by: Optional. Specifies the name of the column(s) to sort by, and - (optionally) which direction to sort in. Default sort direction - is asc. Following formats are accepted: - "string_col_name" -- For a single key in default (asc) order. - ("string_col_name", "asc|desc") -- For a single key. - [("col_1","asc|desc"), ("col_2","asc|desc")] -- For more than - one column, an array of tuples of (col_name, "asc|desc"). - - Returns: - The data sorted by the keys given. - - Raises: - DataTableException: Sort direction not in 'asc' or 'desc' - """ - if not order_by: - return self.__data - - proper_sort_keys = [] - if isinstance(order_by, types.StringTypes) or ( - isinstance(order_by, tuple) - and len(order_by) == 2 - and order_by[1].lower() in ["asc", "desc"] - ): - order_by = (order_by,) - for key in order_by: - if isinstance(key, types.StringTypes): - proper_sort_keys.append((key, 1)) - elif ( - isinstance(key, (list, tuple)) - and len(key) == 2 - and key[1].lower() in ("asc", "desc") - ): - proper_sort_keys.append((key[0], key[1].lower() == "asc" and 1 or -1)) - else: - raise DataTableException( - "Expected tuple with second value: " "'asc' or 'desc'" - ) - - # Thanks https://stackoverflow.com/a/22490617/424301 - def cmp(a, b): - return (a > b) - (a < b) - - def SortCmpFunc(row1, row2): - """cmp function for sorted. Compares by keys and 'asc'/'desc' keywords.""" - for key, asc_mult in proper_sort_keys: - cmp_result = asc_mult * cmp(row1[0].get(key), row2[0].get(key)) - if cmp_result: - return cmp_result - return 0 - - return sorted(self.__data, cmp=SortCmpFunc) - - def ToJSCode(self, name, columns_order=None, order_by=()): - """Writes the data table as a JS code string. - - This method writes a string of JS code that can be run to - generate a DataTable with the specified data. Typically used for debugging - only. - - Args: - name: The name of the table. The name would be used as the DataTable's - variable name in the created JS code. - columns_order: Optional. Specifies the order of columns in the - output table. Specify a list of all column IDs in the order - in which you want the table created. - Note that you must list all column IDs in this parameter, - if you use it. - order_by: Optional. Specifies the name of the column(s) to sort by. - Passed as is to _PreparedData. - - Returns: - A string of JS code that, when run, generates a DataTable with the given - name and the data stored in the DataTable object. - Example result: - "var tab1 = new google.visualization.DataTable(); - tab1.addColumn("string", "a", "a"); - tab1.addColumn("number", "b", "b"); - tab1.addColumn("boolean", "c", "c"); - tab1.addRows(10); - tab1.setCell(0, 0, "a"); - tab1.setCell(0, 1, 1, null, {"foo": "bar"}); - tab1.setCell(0, 2, true); - ... - tab1.setCell(9, 0, "c"); - tab1.setCell(9, 1, 3, "3$"); - tab1.setCell(9, 2, false);" - - Raises: - DataTableException: The data does not match the type. - """ - - encoder = DataTableJSONEncoder() - - if columns_order is None: - columns_order = [col["id"] for col in self.__columns] - col_dict = dict([(col["id"], col) for col in self.__columns]) - - # We first create the table with the given name - jscode = "var %s = new google.visualization.DataTable();\n" % name - if self.custom_properties: - jscode += "%s.setTableProperties(%s);\n" % ( - name, - encoder.encode(self.custom_properties), - ) - - # We add the columns to the table - for i, col in enumerate(columns_order): - jscode += "%s.addColumn(%s, %s, %s);\n" % ( - name, - encoder.encode(col_dict[col]["type"]), - encoder.encode(col_dict[col]["label"]), - encoder.encode(col_dict[col]["id"]), - ) - if col_dict[col]["custom_properties"]: - jscode += "%s.setColumnProperties(%d, %s);\n" % ( - name, - i, - encoder.encode(col_dict[col]["custom_properties"]), - ) - jscode += "%s.addRows(%d);\n" % (name, len(self.__data)) - - # We now go over the data and add each row - for i, (row, cp) in enumerate(self._PreparedData(order_by)): - # We add all the elements of this row by their order - for j, col in enumerate(columns_order): - if col not in row or row[col] is None: - continue - value = self.CoerceValue(row[col], col_dict[col]["type"]) - if isinstance(value, tuple): - cell_cp = "" - if len(value) == 3: - cell_cp = ", %s" % encoder.encode(row[col][2]) - # We have a formatted value or custom property as well - jscode += "%s.setCell(%d, %d, %s, %s%s);\n" % ( - name, - i, - j, - self.EscapeForJSCode(encoder, value[0]), - self.EscapeForJSCode(encoder, value[1]), - cell_cp, - ) - else: - jscode += "%s.setCell(%d, %d, %s);\n" % ( - name, - i, - j, - self.EscapeForJSCode(encoder, value), - ) - if cp: - jscode += "%s.setRowProperties(%d, %s);\n" % ( - name, - i, - encoder.encode(cp), - ) - return jscode - - def ToHtml(self, columns_order=None, order_by=()): - """Writes the data table as an HTML table code string. - - Args: - columns_order: Optional. Specifies the order of columns in the - output table. Specify a list of all column IDs in the order - in which you want the table created. - Note that you must list all column IDs in this parameter, - if you use it. - order_by: Optional. Specifies the name of the column(s) to sort by. - Passed as is to _PreparedData. - - Returns: - An HTML table code string. - Example result (the result is without the newlines): - - - - - - -
    abc
    1"z"2
    "3$""w"
    - - Raises: - DataTableException: The data does not match the type. - """ - table_template = '%s
    ' - columns_template = "%s" - rows_template = "%s" - row_template = "%s" - header_cell_template = "%s" - cell_template = "%s" - - if columns_order is None: - columns_order = [col["id"] for col in self.__columns] - col_dict = dict([(col["id"], col) for col in self.__columns]) - - columns_list = [] - for col in columns_order: - columns_list.append( - header_cell_template % cgi.escape(col_dict[col]["label"]) - ) - columns_html = columns_template % "".join(columns_list) - - rows_list = [] - # We now go over the data and add each row - for row, unused_cp in self._PreparedData(order_by): - cells_list = [] - # We add all the elements of this row by their order - for col in columns_order: - # For empty string we want empty quotes (""). - value = "" - if col in row and row[col] is not None: - value = self.CoerceValue(row[col], col_dict[col]["type"]) - if isinstance(value, tuple): - # We have a formatted value and we're going to use it - cells_list.append( - cell_template % cgi.escape(self.ToString(value[1])) - ) - else: - cells_list.append(cell_template % cgi.escape(self.ToString(value))) - rows_list.append(row_template % "".join(cells_list)) - rows_html = rows_template % "".join(rows_list) - - return table_template % (columns_html + rows_html) - - def ToCsv(self, columns_order=None, order_by=(), separator=","): - """Writes the data table as a CSV string. - - Output is encoded in UTF-8 because the Python "csv" module can't handle - Unicode properly according to its documentation. - - Args: - columns_order: Optional. Specifies the order of columns in the - output table. Specify a list of all column IDs in the order - in which you want the table created. - Note that you must list all column IDs in this parameter, - if you use it. - order_by: Optional. Specifies the name of the column(s) to sort by. - Passed as is to _PreparedData. - separator: Optional. The separator to use between the values. - - Returns: - A CSV string representing the table. - Example result: - 'a','b','c' - 1,'z',2 - 3,'w','' - - Raises: - DataTableException: The data does not match the type. - """ - - csv_buffer = StringIO.StringIO() - writer = csv.writer(csv_buffer, delimiter=separator) - - if columns_order is None: - columns_order = [col["id"] for col in self.__columns] - col_dict = dict([(col["id"], col) for col in self.__columns]) - - writer.writerow( - [col_dict[col]["label"].encode("utf-8") for col in columns_order] - ) - - # We now go over the data and add each row - for row, unused_cp in self._PreparedData(order_by): - cells_list = [] - # We add all the elements of this row by their order - for col in columns_order: - value = "" - if col in row and row[col] is not None: - value = self.CoerceValue(row[col], col_dict[col]["type"]) - if isinstance(value, tuple): - # We have a formatted value. Using it only for date/time types. - if col_dict[col]["type"] in ["date", "datetime", "timeofday"]: - cells_list.append(self.ToString(value[1]).encode("utf-8")) - else: - cells_list.append(self.ToString(value[0]).encode("utf-8")) - else: - cells_list.append(self.ToString(value).encode("utf-8")) - writer.writerow(cells_list) - return csv_buffer.getvalue() - - def ToTsvExcel(self, columns_order=None, order_by=()): - """Returns a file in tab-separated-format readable by MS Excel. - - Returns a file in UTF-16 little endian encoding, with tabs separating the - values. - - Args: - columns_order: Delegated to ToCsv. - order_by: Delegated to ToCsv. - - Returns: - A tab-separated little endian UTF16 file representing the table. - """ - return ( - self.ToCsv(columns_order, order_by, separator="\t") - .decode("utf-8") - .encode("UTF-16LE") - ) - - def _ToJSonObj(self, columns_order=None, order_by=()): - """Returns an object suitable to be converted to JSON. - - Args: - columns_order: Optional. A list of all column IDs in the order in which - you want them created in the output table. If specified, - all column IDs must be present. - order_by: Optional. Specifies the name of the column(s) to sort by. - Passed as is to _PreparedData(). - - Returns: - A dictionary object for use by ToJSon or ToJSonResponse. - """ - if columns_order is None: - columns_order = [col["id"] for col in self.__columns] - col_dict = dict([(col["id"], col) for col in self.__columns]) - - # Creating the column JSON objects - col_objs = [] - for col_id in columns_order: - col_obj = { - "id": col_dict[col_id]["id"], - "label": col_dict[col_id]["label"], - "type": col_dict[col_id]["type"], - } - if col_dict[col_id]["custom_properties"]: - col_obj["p"] = col_dict[col_id]["custom_properties"] - col_objs.append(col_obj) - - # Creating the rows jsons - row_objs = [] - for row, cp in self._PreparedData(order_by): - cell_objs = [] - for col in columns_order: - value = self.CoerceValue(row.get(col, None), col_dict[col]["type"]) - if value is None: - cell_obj = None - elif isinstance(value, tuple): - cell_obj = {"v": value[0]} - if len(value) > 1 and value[1] is not None: - cell_obj["f"] = value[1] - if len(value) == 3: - cell_obj["p"] = value[2] - else: - cell_obj = {"v": value} - cell_objs.append(cell_obj) - row_obj = {"c": cell_objs} - if cp: - row_obj["p"] = cp - row_objs.append(row_obj) - - json_obj = {"cols": col_objs, "rows": row_objs} - if self.custom_properties: - json_obj["p"] = self.custom_properties - - return json_obj - - def ToJSon(self, columns_order=None, order_by=()): - """Returns a string that can be used in a JS DataTable constructor. - - This method writes a JSON string that can be passed directly into a Google - Visualization API DataTable constructor. Use this output if you are - hosting the visualization HTML on your site, and want to code the data - table in Python. Pass this string into the - google.visualization.DataTable constructor, e.g,: - ... on my page that hosts my visualization ... - google.setOnLoadCallback(drawTable); - function drawTable() { - var data = new google.visualization.DataTable(_my_JSon_string, 0.6); - myTable.draw(data); - } - - Args: - columns_order: Optional. Specifies the order of columns in the - output table. Specify a list of all column IDs in the order - in which you want the table created. - Note that you must list all column IDs in this parameter, - if you use it. - order_by: Optional. Specifies the name of the column(s) to sort by. - Passed as is to _PreparedData(). - - Returns: - A JSon constructor string to generate a JS DataTable with the data - stored in the DataTable object. - Example result (the result is without the newlines): - {cols: [{id:"a",label:"a",type:"number"}, - {id:"b",label:"b",type:"string"}, - {id:"c",label:"c",type:"number"}], - rows: [{c:[{v:1},{v:"z"},{v:2}]}, c:{[{v:3,f:"3$"},{v:"w"},{v:null}]}], - p: {'foo': 'bar'}} - - Raises: - DataTableException: The data does not match the type. - """ - - encoder = DataTableJSONEncoder() - return encoder.encode(self._ToJSonObj(columns_order, order_by)).encode("utf-8") - - def ToJSonResponse( - self, - columns_order=None, - order_by=(), - req_id=0, - response_handler="google.visualization.Query.setResponse", - ): - """Writes a table as a JSON response that can be returned as-is to a client. - - This method writes a JSON response to return to a client in response to a - Google Visualization API query. This string can be processed by the calling - page, and is used to deliver a data table to a visualization hosted on - a different page. - - Args: - columns_order: Optional. Passed straight to self.ToJSon(). - order_by: Optional. Passed straight to self.ToJSon(). - req_id: Optional. The response id, as retrieved by the request. - response_handler: Optional. The response handler, as retrieved by the - request. - - Returns: - A JSON response string to be received by JS the visualization Query - object. This response would be translated into a DataTable on the - client side. - Example result (newlines added for readability): - google.visualization.Query.setResponse({ - 'version':'0.6', 'reqId':'0', 'status':'OK', - 'table': {cols: [...], rows: [...]}}); - - Note: The URL returning this string can be used as a data source by Google - Visualization Gadgets or from JS code. - """ - - response_obj = { - "version": "0.6", - "reqId": str(req_id), - "table": self._ToJSonObj(columns_order, order_by), - "status": "ok", - } - encoder = DataTableJSONEncoder() - return "%s(%s);" % ( - response_handler, - encoder.encode(response_obj).encode("utf-8"), - ) - - def ToResponse(self, columns_order=None, order_by=(), tqx=""): - """Writes the right response according to the request string passed in tqx. - - This method parses the tqx request string (format of which is defined in - the documentation for implementing a data source of Google Visualization), - and returns the right response according to the request. - It parses out the "out" parameter of tqx, calls the relevant response - (ToJSonResponse() for "json", ToCsv() for "csv", ToHtml() for "html", - ToTsvExcel() for "tsv-excel") and passes the response function the rest of - the relevant request keys. - - Args: - columns_order: Optional. Passed as is to the relevant response function. - order_by: Optional. Passed as is to the relevant response function. - tqx: Optional. The request string as received by HTTP GET. Should be in - the format "key1:value1;key2:value2...". All keys have a default - value, so an empty string will just do the default (which is calling - ToJSonResponse() with no extra parameters). - - Returns: - A response string, as returned by the relevant response function. - - Raises: - DataTableException: One of the parameters passed in tqx is not supported. - """ - tqx_dict = {} - if tqx: - tqx_dict = dict(opt.split(":") for opt in tqx.split(";")) - if tqx_dict.get("version", "0.6") != "0.6": - raise DataTableException( - "Version (%s) passed by request is not supported." % tqx_dict["version"] - ) - - if tqx_dict.get("out", "json") == "json": - response_handler = tqx_dict.get( - "responseHandler", "google.visualization.Query.setResponse" - ) - return self.ToJSonResponse( - columns_order, - order_by, - req_id=tqx_dict.get("reqId", 0), - response_handler=response_handler, - ) - elif tqx_dict["out"] == "html": - return self.ToHtml(columns_order, order_by) - elif tqx_dict["out"] == "csv": - return self.ToCsv(columns_order, order_by) - elif tqx_dict["out"] == "tsv-excel": - return self.ToTsvExcel(columns_order, order_by) - else: - raise DataTableException( - "'out' parameter: '%s' is not supported" % tqx_dict["out"] - ) diff --git a/freezing/web/views/chartdata.py b/freezing/web/views/chartdata.py index a3dbde37..bc984c18 100644 --- a/freezing/web/views/chartdata.py +++ b/freezing/web/views/chartdata.py @@ -5,19 +5,17 @@ """ import copy -import json from collections import defaultdict from datetime import datetime, timedelta from dateutil import rrule -from flask import Blueprint, current_app, jsonify +from flask import Blueprint, jsonify from freezing.model import meta from freezing.model.orm import Team from pytz import utc from sqlalchemy import text from freezing.web import config -from freezing.web.utils import gviz_api from freezing.web.views.shared_sql import ( indiv_freeze_query, indiv_segment_query, @@ -858,46 +856,31 @@ def indiv_elev_dist(): left join teams T on T.id = A.team_id where not R.manual group by R.athlete_id, athlete_name, team_name + order by SUM(R.distance) ; """ ) indiv_q = meta.scoped_session().execute(q).fetchall() # @UndefinedVariable - cols = [ - {"id": "ID", "label": "ID", "type": "string"}, - {"id": "score", "label": "Distance", "type": "number"}, - {"id": "score", "label": "Elevation", "type": "number"}, - {"id": "ID", "label": "Team", "type": "string"}, - {"id": "score", "label": "Average Speed", "type": "number"}, - ] - - rows = [] + labels = [] + elevations = [] + distances = [] + speeds = [] for i, res in enumerate(indiv_q): - name_parts = res["athlete_name"].split(" ") - if len(name_parts) > 1: - short_name = " ".join([name_parts[0], name_parts[-1]]) - else: - short_name = res["athlete_name"] - - if res["team_name"] is None: - team_name = "(No team)" - else: - team_name = res["team_name"] - - cells = [ - {"v": res["athlete_name"], "f": short(short_name)}, - {"v": res["total_distance"], "f": "{0:.2f}".format(res["total_distance"])}, - { - "v": res["total_elevation_gain"], - "f": "{0:.2f}".format(res["total_elevation_gain"]), - }, - {"v": team_name, "f": team_name}, - {"v": res["avg_speed"], "f": "{0:.2f}".format(res["avg_speed"])}, - ] - rows.append({"c": cells}) + labels.append(res["athlete_name"]) + elevations.append(int(res["total_elevation_gain"])) + distances.append(res["total_distance"]) + speeds.append(res["avg_speed"]) - return gviz_api_jsonify({"cols": cols, "rows": rows}) + return jsonify( + { + "labels": labels, + "elevations": elevations, + "distances": distances, + "speeds": speeds, + } + ) @blueprint.route("/riders_by_lowtemp") @@ -964,20 +947,6 @@ def distance_by_lowtemp(): return jsonify({"data": rows}) -def gviz_api_jsonify(*args, **kwargs): - """ - Override default Flask jsonify to handle JSON for Google Chart API. - """ - return current_app.response_class( - json.dumps( - dict(*args, **kwargs), - indent=None, - cls=gviz_api.DataTableJSONEncoder, - ), - mimetype="application/json", - ) - - def short(name, max_len=17): if len(name) < max_len: return name From a37a325e2639ffdb7bedf76057c30dea16da2c0c Mon Sep 17 00:00:00 2001 From: merlin Date: Sun, 19 Jan 2025 14:14:53 -0500 Subject: [PATCH 5/5] simplejson no more --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index da575392..b1f284e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,6 @@ dependencies = [ "marshmallow==3.23.2", "python-instagram==1.3.2", "pytz==2024.2", - "simplejson==3.19.3", "stravalib==1.2.0", ] classifiers = [