2018-05-30 13:35:27 +02:00
|
|
|
import hashlib
|
2018-05-09 18:23:17 +02:00
|
|
|
import os
|
2018-05-11 13:45:54 +02:00
|
|
|
import tempfile
|
2018-05-29 16:28:59 +02:00
|
|
|
import zipfile
|
2018-05-09 18:23:17 +02:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
2018-05-01 17:51:38 +02:00
|
|
|
import gpxpy.gpx
|
2018-06-11 16:44:49 +02:00
|
|
|
import pytz
|
2018-07-01 17:12:37 +02:00
|
|
|
from fittrackee_api import appLog, db
|
2018-05-09 18:23:17 +02:00
|
|
|
from flask import current_app
|
2018-05-29 12:53:13 +02:00
|
|
|
from sqlalchemy import exc
|
2018-05-30 12:38:28 +02:00
|
|
|
from staticmap import Line, StaticMap
|
2018-05-29 12:53:13 +02:00
|
|
|
from werkzeug.utils import secure_filename
|
2018-05-09 18:23:17 +02:00
|
|
|
|
2018-06-11 16:44:49 +02:00
|
|
|
from ..users.models import User
|
2018-05-15 16:00:13 +02:00
|
|
|
from .models import Activity, ActivitySegment, Sport
|
2018-07-04 13:10:47 +02:00
|
|
|
from .utils_files import get_absolute_file_path
|
2018-06-13 17:18:12 +02:00
|
|
|
from .utils_weather import get_weather
|
2018-05-14 13:25:00 +02:00
|
|
|
|
|
|
|
|
2018-05-29 12:53:13 +02:00
|
|
|
class ActivityException(Exception):
|
|
|
|
def __init__(self, status, message, e):
|
|
|
|
self.status = status
|
|
|
|
self.message = message
|
|
|
|
self.e = e
|
|
|
|
|
|
|
|
|
2018-06-11 19:38:20 +02:00
|
|
|
def get_datetime_with_tz(timezone, activity_date, gpx_data=None):
|
2018-06-11 17:07:04 +02:00
|
|
|
activity_date_tz = None
|
2018-06-14 13:39:18 +02:00
|
|
|
if timezone:
|
2018-06-11 19:38:20 +02:00
|
|
|
user_tz = pytz.timezone(timezone)
|
2018-06-14 13:39:18 +02:00
|
|
|
utc_tz = pytz.utc
|
|
|
|
if gpx_data:
|
|
|
|
# activity date in gpx is in UTC, but in naive datetime
|
|
|
|
fmt = '%Y-%m-%d %H:%M:%S'
|
|
|
|
activity_date_string = activity_date.strftime(fmt)
|
|
|
|
activity_date_tmp = utc_tz.localize(
|
|
|
|
datetime.strptime(activity_date_string, fmt))
|
|
|
|
activity_date_tz = activity_date_tmp.astimezone(user_tz)
|
|
|
|
else:
|
|
|
|
activity_date_tz = user_tz.localize(activity_date)
|
|
|
|
activity_date = activity_date_tz.astimezone(utc_tz)
|
2018-06-11 17:07:04 +02:00
|
|
|
# make datetime 'naive' like in gpx file
|
|
|
|
activity_date = activity_date.replace(tzinfo=None)
|
|
|
|
|
|
|
|
return activity_date_tz, activity_date
|
|
|
|
|
|
|
|
|
2018-05-14 13:25:00 +02:00
|
|
|
def update_activity_data(activity, gpx_data):
|
|
|
|
"""activity could be a complete activity or an activity segment"""
|
|
|
|
activity.pauses = gpx_data['stop_time']
|
|
|
|
activity.moving = gpx_data['moving_time']
|
|
|
|
activity.min_alt = gpx_data['elevation_min']
|
|
|
|
activity.max_alt = gpx_data['elevation_max']
|
|
|
|
activity.descent = gpx_data['downhill']
|
|
|
|
activity.ascent = gpx_data['uphill']
|
|
|
|
activity.max_speed = gpx_data['max_speed']
|
|
|
|
activity.ave_speed = gpx_data['average_speed']
|
|
|
|
return activity
|
2018-05-09 18:23:17 +02:00
|
|
|
|
|
|
|
|
2018-05-10 10:21:58 +02:00
|
|
|
def create_activity(
|
2018-06-11 19:38:20 +02:00
|
|
|
user, activity_data, gpx_data=None
|
2018-05-10 10:21:58 +02:00
|
|
|
):
|
|
|
|
activity_date = gpx_data['start'] if gpx_data else datetime.strptime(
|
|
|
|
activity_data.get('activity_date'), '%Y-%m-%d %H:%M')
|
2018-06-11 17:07:04 +02:00
|
|
|
activity_date_tz, activity_date = get_datetime_with_tz(
|
2018-06-11 19:38:20 +02:00
|
|
|
user.timezone, activity_date, gpx_data)
|
2018-06-11 16:44:49 +02:00
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
duration = gpx_data['duration'] if gpx_data \
|
2018-05-10 10:21:58 +02:00
|
|
|
else timedelta(seconds=activity_data.get('duration'))
|
2018-05-11 17:55:46 +02:00
|
|
|
distance = gpx_data['distance'] if gpx_data \
|
|
|
|
else activity_data.get('distance')
|
2018-05-13 12:52:22 +02:00
|
|
|
title = gpx_data['name'] if gpx_data \
|
|
|
|
else activity_data.get('title')
|
2018-05-09 18:23:17 +02:00
|
|
|
|
|
|
|
new_activity = Activity(
|
2018-06-11 19:38:20 +02:00
|
|
|
user_id=user.id,
|
2018-05-09 18:23:17 +02:00
|
|
|
sport_id=activity_data.get('sport_id'),
|
2018-05-10 10:21:58 +02:00
|
|
|
activity_date=activity_date,
|
2018-05-11 17:55:46 +02:00
|
|
|
distance=distance,
|
2018-05-10 10:21:58 +02:00
|
|
|
duration=duration
|
2018-05-09 18:23:17 +02:00
|
|
|
)
|
2018-06-13 18:17:28 +02:00
|
|
|
new_activity.notes = activity_data.get('notes')
|
2018-05-10 10:21:58 +02:00
|
|
|
|
2018-05-16 23:52:55 +02:00
|
|
|
if title is not None and title != '':
|
2018-05-13 12:52:22 +02:00
|
|
|
new_activity.title = title
|
|
|
|
else:
|
|
|
|
sport = Sport.query.filter_by(id=new_activity.sport_id).first()
|
2018-06-11 16:44:49 +02:00
|
|
|
fmt = "%Y-%m-%d %H:%M:%S"
|
|
|
|
activity_datetime = (
|
|
|
|
activity_date_tz.strftime(fmt)
|
|
|
|
if activity_date_tz
|
|
|
|
else new_activity.activity_date.strftime(fmt))
|
|
|
|
new_activity.title = f'{sport.label} - {activity_datetime}'
|
2018-05-13 12:52:22 +02:00
|
|
|
|
2018-05-10 10:21:58 +02:00
|
|
|
if gpx_data:
|
2018-05-11 13:45:54 +02:00
|
|
|
new_activity.gpx = gpx_data['filename']
|
2018-05-14 14:51:03 +02:00
|
|
|
new_activity.bounds = gpx_data['bounds']
|
2018-05-14 13:25:00 +02:00
|
|
|
update_activity_data(new_activity, gpx_data)
|
2018-05-10 10:21:58 +02:00
|
|
|
else:
|
|
|
|
new_activity.moving = duration
|
2018-05-16 14:02:54 +02:00
|
|
|
new_activity.ave_speed = (None
|
|
|
|
if duration.seconds == 0
|
|
|
|
else float(new_activity.distance) /
|
|
|
|
(duration.seconds / 3600))
|
2018-05-10 10:21:58 +02:00
|
|
|
new_activity.max_speed = new_activity.ave_speed
|
2018-05-09 18:23:17 +02:00
|
|
|
return new_activity
|
2018-05-01 17:51:38 +02:00
|
|
|
|
|
|
|
|
2018-05-14 13:25:00 +02:00
|
|
|
def create_segment(activity_id, segment_data):
|
|
|
|
new_segment = ActivitySegment(
|
|
|
|
activity_id=activity_id,
|
|
|
|
segment_id=segment_data['idx']
|
|
|
|
)
|
|
|
|
new_segment.duration = segment_data['duration']
|
|
|
|
new_segment.distance = segment_data['distance']
|
|
|
|
update_activity_data(new_segment, segment_data)
|
|
|
|
return new_segment
|
|
|
|
|
|
|
|
|
2018-06-11 16:44:49 +02:00
|
|
|
def edit_activity(activity, activity_data, auth_user_id):
|
2018-06-11 19:38:20 +02:00
|
|
|
user = User.query.filter_by(id=auth_user_id).first()
|
2018-05-16 14:02:54 +02:00
|
|
|
if activity_data.get('sport_id'):
|
|
|
|
activity.sport_id = activity_data.get('sport_id')
|
2018-05-13 12:52:22 +02:00
|
|
|
if activity_data.get('title'):
|
|
|
|
activity.title = activity_data.get('title')
|
2018-06-13 18:17:28 +02:00
|
|
|
if activity_data.get('notes'):
|
|
|
|
activity.notes = activity_data.get('notes')
|
2018-05-11 17:55:46 +02:00
|
|
|
if not activity.gpx:
|
|
|
|
if activity_data.get('activity_date'):
|
2018-06-11 16:44:49 +02:00
|
|
|
activity_date = datetime.strptime(
|
2018-05-11 17:55:46 +02:00
|
|
|
activity_data.get('activity_date'), '%Y-%m-%d %H:%M')
|
2018-06-11 17:07:04 +02:00
|
|
|
_, activity.activity_date = get_datetime_with_tz(
|
2018-06-11 19:38:20 +02:00
|
|
|
user.timezone, activity_date)
|
2018-06-11 17:07:04 +02:00
|
|
|
|
2018-05-11 17:55:46 +02:00
|
|
|
if activity_data.get('duration'):
|
2018-05-13 12:52:22 +02:00
|
|
|
activity.duration = timedelta(
|
|
|
|
seconds=activity_data.get('duration'))
|
2018-05-11 17:55:46 +02:00
|
|
|
activity.moving = activity.duration
|
2018-06-11 17:07:04 +02:00
|
|
|
|
2018-05-11 17:55:46 +02:00
|
|
|
if activity_data.get('distance'):
|
|
|
|
activity.distance = activity_data.get('distance')
|
2018-06-11 17:07:04 +02:00
|
|
|
|
2018-05-16 14:02:54 +02:00
|
|
|
activity.ave_speed = (None if activity.duration.seconds == 0
|
|
|
|
else float(activity.distance) /
|
|
|
|
(activity.duration.seconds / 3600))
|
2018-05-11 17:55:46 +02:00
|
|
|
activity.max_speed = activity.ave_speed
|
2018-05-09 20:45:01 +02:00
|
|
|
return activity
|
|
|
|
|
|
|
|
|
2019-01-05 21:44:52 +01:00
|
|
|
def get_gpx_data(parsed_gpx, max_speed, start, stopped_time_btwn_seg):
|
2018-05-12 23:38:33 +02:00
|
|
|
gpx_data = {'max_speed': (max_speed / 1000) * 3600, 'start': start}
|
2018-05-01 17:51:38 +02:00
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
duration = parsed_gpx.get_duration()
|
2019-01-05 21:44:52 +01:00
|
|
|
gpx_data['duration'] = timedelta(seconds=duration) + stopped_time_btwn_seg
|
2018-05-01 17:51:38 +02:00
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
ele = parsed_gpx.get_elevation_extremes()
|
2018-05-01 17:51:38 +02:00
|
|
|
gpx_data['elevation_max'] = ele.maximum
|
|
|
|
gpx_data['elevation_min'] = ele.minimum
|
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
hill = parsed_gpx.get_uphill_downhill()
|
2018-05-01 17:51:38 +02:00
|
|
|
gpx_data['uphill'] = hill.uphill
|
|
|
|
gpx_data['downhill'] = hill.downhill
|
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
mv = parsed_gpx.get_moving_data()
|
|
|
|
gpx_data['moving_time'] = timedelta(seconds=mv.moving_time)
|
2019-01-05 21:44:52 +01:00
|
|
|
gpx_data['stop_time'] = (timedelta(seconds=mv.stopped_time)
|
|
|
|
+ stopped_time_btwn_seg)
|
2018-05-01 17:51:38 +02:00
|
|
|
distance = mv.moving_distance + mv.stopped_distance
|
2018-05-12 23:38:33 +02:00
|
|
|
gpx_data['distance'] = distance / 1000
|
2018-05-01 17:51:38 +02:00
|
|
|
|
2018-05-29 15:23:20 +02:00
|
|
|
average_speed = distance / mv.moving_time if mv.moving_time > 0 else 0
|
2018-05-01 17:51:38 +02:00
|
|
|
gpx_data['average_speed'] = (average_speed / 1000) * 3600
|
|
|
|
|
|
|
|
return gpx_data
|
2018-05-09 18:23:17 +02:00
|
|
|
|
|
|
|
|
2018-05-28 14:38:32 +02:00
|
|
|
def open_gpx_file(gpx_file):
|
2018-05-12 23:38:33 +02:00
|
|
|
gpx_file = open(gpx_file, 'r')
|
2018-05-29 12:53:13 +02:00
|
|
|
gpx = gpxpy.parse(gpx_file)
|
2018-05-12 23:38:33 +02:00
|
|
|
if len(gpx.tracks) == 0:
|
|
|
|
return None
|
2018-05-28 14:38:32 +02:00
|
|
|
return gpx
|
|
|
|
|
|
|
|
|
|
|
|
def get_gpx_info(gpx_file):
|
|
|
|
gpx = open_gpx_file(gpx_file)
|
|
|
|
if gpx is None:
|
|
|
|
return None
|
|
|
|
|
2018-05-12 23:38:33 +02:00
|
|
|
gpx_data = {
|
|
|
|
'name': gpx.tracks[0].name,
|
2018-05-14 10:55:01 +02:00
|
|
|
'segments': []
|
|
|
|
}
|
2018-05-12 23:38:33 +02:00
|
|
|
max_speed = 0
|
|
|
|
start = 0
|
2018-05-30 12:38:28 +02:00
|
|
|
map_data = []
|
2018-06-13 17:18:12 +02:00
|
|
|
weather_data = []
|
2019-01-05 21:44:52 +01:00
|
|
|
segments_nb = len(gpx.tracks[0].segments)
|
|
|
|
prev_seg_last_point = None
|
|
|
|
no_stopped_time = timedelta(seconds=0)
|
|
|
|
stopped_time_btwn_seg = no_stopped_time
|
2018-05-12 23:38:33 +02:00
|
|
|
|
|
|
|
for segment_idx, segment in enumerate(gpx.tracks[0].segments):
|
|
|
|
segment_start = 0
|
2019-01-05 21:44:52 +01:00
|
|
|
segment_points_nb = len(segment.points)
|
2018-05-12 23:38:33 +02:00
|
|
|
for point_idx, point in enumerate(segment.points):
|
2019-01-05 21:44:52 +01:00
|
|
|
if point_idx == 0:
|
|
|
|
# first gpx point => get weather
|
|
|
|
if start == 0:
|
|
|
|
start = point.time
|
|
|
|
weather_data.append(get_weather(point))
|
|
|
|
|
|
|
|
# if a previous segment exists, calculate stopped time between
|
|
|
|
# the two segments
|
|
|
|
if prev_seg_last_point:
|
|
|
|
stopped_time_btwn_seg = point.time - prev_seg_last_point
|
|
|
|
|
|
|
|
# last segment point
|
|
|
|
if point_idx == (segment_points_nb - 1):
|
|
|
|
prev_seg_last_point = point.time
|
|
|
|
|
|
|
|
# last gpx point => get weather
|
|
|
|
if segment_idx == (segments_nb - 1):
|
|
|
|
weather_data.append(get_weather(point))
|
2018-05-30 12:38:28 +02:00
|
|
|
map_data.append([
|
|
|
|
point.longitude, point.latitude
|
|
|
|
])
|
2018-05-20 15:33:09 +02:00
|
|
|
segment_max_speed = (segment.get_moving_data().max_speed
|
|
|
|
if segment.get_moving_data().max_speed
|
|
|
|
else 0)
|
|
|
|
|
|
|
|
if segment_max_speed > max_speed:
|
|
|
|
max_speed = segment_max_speed
|
2018-05-12 23:38:33 +02:00
|
|
|
|
|
|
|
segment_data = get_gpx_data(
|
2019-01-05 21:44:52 +01:00
|
|
|
segment, segment_max_speed, segment_start, no_stopped_time
|
2018-05-12 23:38:33 +02:00
|
|
|
)
|
|
|
|
segment_data['idx'] = segment_idx
|
|
|
|
gpx_data['segments'].append(segment_data)
|
|
|
|
|
2019-01-05 21:44:52 +01:00
|
|
|
full_gpx_data = get_gpx_data(gpx, max_speed, start, stopped_time_btwn_seg)
|
2018-05-20 15:33:09 +02:00
|
|
|
gpx_data = {**gpx_data, **full_gpx_data}
|
2018-05-12 23:38:33 +02:00
|
|
|
bounds = gpx.get_bounds()
|
|
|
|
gpx_data['bounds'] = [
|
|
|
|
bounds.min_latitude,
|
|
|
|
bounds.min_longitude,
|
|
|
|
bounds.max_latitude,
|
|
|
|
bounds.max_longitude
|
|
|
|
]
|
|
|
|
|
2018-06-13 17:18:12 +02:00
|
|
|
return gpx_data, map_data, weather_data
|
2018-05-12 23:38:33 +02:00
|
|
|
|
|
|
|
|
2018-05-28 14:38:32 +02:00
|
|
|
def get_chart_data(gpx_file):
|
|
|
|
gpx = open_gpx_file(gpx_file)
|
|
|
|
if gpx is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
chart_data = []
|
|
|
|
first_point = None
|
|
|
|
previous_point = None
|
|
|
|
previous_distance = 0
|
|
|
|
|
|
|
|
for segment_idx, segment in enumerate(gpx.tracks[0].segments):
|
|
|
|
for point_idx, point in enumerate(segment.points):
|
|
|
|
if segment_idx == 0 and point_idx == 0:
|
|
|
|
first_point = point
|
|
|
|
distance = (point.distance_3d(previous_point)
|
|
|
|
if (point.elevation
|
|
|
|
and previous_point
|
|
|
|
and previous_point.elevation)
|
|
|
|
else point.distance_2d(previous_point)
|
|
|
|
)
|
|
|
|
distance = 0 if distance is None else distance
|
|
|
|
distance += previous_distance
|
2018-05-28 23:37:53 +02:00
|
|
|
speed = (round((segment.get_speed(point_idx) / 1000)*3600, 2)
|
|
|
|
if segment.get_speed(point_idx) is not None
|
|
|
|
else 0)
|
2018-05-28 14:38:32 +02:00
|
|
|
chart_data.append({
|
2018-05-29 15:23:20 +02:00
|
|
|
'distance': (round(distance / 1000, 2)
|
|
|
|
if distance is not None else 0),
|
2018-05-28 14:38:32 +02:00
|
|
|
'duration': point.time_difference(first_point),
|
2018-05-29 15:23:20 +02:00
|
|
|
'elevation': (round(point.elevation, 1)
|
|
|
|
if point.elevation is not None else 0),
|
2018-05-28 14:38:32 +02:00
|
|
|
'speed': speed,
|
|
|
|
'time': point.time,
|
|
|
|
})
|
|
|
|
previous_point = point
|
|
|
|
previous_distance = distance
|
|
|
|
|
|
|
|
return chart_data
|
|
|
|
|
|
|
|
|
2018-07-04 13:10:47 +02:00
|
|
|
def get_file_path(dir_path, filename):
|
2018-05-09 18:23:17 +02:00
|
|
|
if not os.path.exists(dir_path):
|
|
|
|
os.makedirs(dir_path)
|
|
|
|
file_path = os.path.join(dir_path, filename)
|
|
|
|
return file_path
|
2018-05-11 13:45:54 +02:00
|
|
|
|
|
|
|
|
2018-05-30 12:38:28 +02:00
|
|
|
def get_new_file_path(
|
|
|
|
auth_user_id, activity_date, sport, old_filename=None, extension=None
|
|
|
|
):
|
|
|
|
if not extension:
|
|
|
|
extension = f".{old_filename.rsplit('.', 1)[1].lower()}"
|
2018-05-11 17:55:46 +02:00
|
|
|
_, new_filename = tempfile.mkstemp(
|
2018-05-29 11:52:48 +02:00
|
|
|
prefix=f'{activity_date}_{sport}_',
|
2018-05-11 13:45:54 +02:00
|
|
|
suffix=extension
|
|
|
|
)
|
2018-07-04 13:10:47 +02:00
|
|
|
dir_path = os.path.join('activities', str(auth_user_id))
|
2018-05-11 13:45:54 +02:00
|
|
|
if not os.path.exists(dir_path):
|
|
|
|
os.makedirs(dir_path)
|
2018-05-13 12:52:22 +02:00
|
|
|
file_path = os.path.join(dir_path,
|
2018-12-31 11:22:26 +01:00
|
|
|
new_filename.split('/')[-1])
|
2018-05-11 13:45:54 +02:00
|
|
|
return file_path
|
2018-05-29 12:53:13 +02:00
|
|
|
|
|
|
|
|
2018-05-30 12:38:28 +02:00
|
|
|
def generate_map(map_filepath, map_data):
|
|
|
|
m = StaticMap(400, 225, 10)
|
|
|
|
line = Line(map_data, '#3388FF', 4)
|
|
|
|
m.add_line(line)
|
|
|
|
image = m.render()
|
|
|
|
image.save(map_filepath)
|
|
|
|
|
|
|
|
|
2018-05-30 13:35:27 +02:00
|
|
|
def get_map_hash(map_filepath):
|
2018-05-30 17:48:58 +02:00
|
|
|
"""
|
|
|
|
md5 hash is used as id instead of activity id, to retrieve map image
|
|
|
|
(maps are sensitive data)
|
|
|
|
"""
|
2018-05-30 13:35:27 +02:00
|
|
|
md5 = hashlib.md5()
|
2018-07-04 13:10:47 +02:00
|
|
|
absolute_map_filepath = get_absolute_file_path(map_filepath)
|
|
|
|
with open(absolute_map_filepath, 'rb') as f:
|
2018-05-30 13:35:27 +02:00
|
|
|
for chunk in iter(lambda: f.read(128 * md5.block_size), b''):
|
|
|
|
md5.update(chunk)
|
|
|
|
return md5.hexdigest()
|
|
|
|
|
|
|
|
|
2018-05-31 15:17:40 +02:00
|
|
|
def process_one_gpx_file(params, filename):
|
2018-05-29 12:53:13 +02:00
|
|
|
try:
|
2018-06-13 17:18:12 +02:00
|
|
|
gpx_data, map_data, weather_data = get_gpx_info(params['file_path'])
|
2018-06-11 19:38:20 +02:00
|
|
|
auth_user_id = params['user'].id
|
2018-05-29 12:53:13 +02:00
|
|
|
new_filepath = get_new_file_path(
|
2018-06-11 19:38:20 +02:00
|
|
|
auth_user_id=auth_user_id,
|
2018-05-29 12:53:13 +02:00
|
|
|
activity_date=gpx_data['start'],
|
|
|
|
old_filename=filename,
|
2018-05-31 15:17:40 +02:00
|
|
|
sport=params['sport_label']
|
2018-05-29 12:53:13 +02:00
|
|
|
)
|
2018-07-04 13:10:47 +02:00
|
|
|
absolute_gpx_filepath = get_absolute_file_path(new_filepath)
|
|
|
|
os.rename(params['file_path'], absolute_gpx_filepath)
|
2018-05-29 12:53:13 +02:00
|
|
|
gpx_data['filename'] = new_filepath
|
2018-05-30 12:38:28 +02:00
|
|
|
|
|
|
|
map_filepath = get_new_file_path(
|
2018-06-11 19:38:20 +02:00
|
|
|
auth_user_id=auth_user_id,
|
2018-05-30 12:38:28 +02:00
|
|
|
activity_date=gpx_data['start'],
|
|
|
|
extension='.png',
|
2018-05-31 15:17:40 +02:00
|
|
|
sport=params['sport_label']
|
2018-05-30 12:38:28 +02:00
|
|
|
)
|
2018-07-04 13:10:47 +02:00
|
|
|
absolute_map_filepath = get_absolute_file_path(map_filepath)
|
|
|
|
generate_map(absolute_map_filepath, map_data)
|
2018-05-29 12:53:13 +02:00
|
|
|
except (gpxpy.gpx.GPXXMLSyntaxException, TypeError) as e:
|
|
|
|
raise ActivityException('error', 'Error during gpx file parsing.', e)
|
|
|
|
except Exception as e:
|
2019-01-05 21:44:52 +01:00
|
|
|
raise ActivityException('error', 'Error during gpx processing.', e)
|
2018-05-29 12:53:13 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
new_activity = create_activity(
|
2018-06-11 19:38:20 +02:00
|
|
|
params['user'], params['activity_data'], gpx_data)
|
2018-05-30 12:38:28 +02:00
|
|
|
new_activity.map = map_filepath
|
2018-05-30 13:35:27 +02:00
|
|
|
new_activity.map_id = get_map_hash(map_filepath)
|
2018-06-13 17:18:12 +02:00
|
|
|
new_activity.weather_start = weather_data[0]
|
|
|
|
new_activity.weather_end = weather_data[1]
|
2018-05-29 12:53:13 +02:00
|
|
|
db.session.add(new_activity)
|
|
|
|
db.session.flush()
|
|
|
|
|
|
|
|
for segment_data in gpx_data['segments']:
|
|
|
|
new_segment = create_segment(new_activity.id, segment_data)
|
|
|
|
db.session.add(new_segment)
|
|
|
|
db.session.commit()
|
|
|
|
return new_activity
|
|
|
|
except (exc.IntegrityError, ValueError) as e:
|
2018-05-31 15:17:40 +02:00
|
|
|
raise ActivityException('fail', 'Error during activity save.', e)
|
2018-05-29 16:28:59 +02:00
|
|
|
|
|
|
|
|
2018-05-31 15:17:40 +02:00
|
|
|
def process_zip_archive(common_params, extract_dir):
|
|
|
|
with zipfile.ZipFile(common_params['file_path'], "r") as zip_ref:
|
2018-05-29 16:28:59 +02:00
|
|
|
zip_ref.extractall(extract_dir)
|
|
|
|
|
|
|
|
new_activities = []
|
2018-07-01 17:48:45 +02:00
|
|
|
gpx_files_limit = os.getenv('REACT_APP_GPX_LIMIT_IMPORT')
|
|
|
|
if gpx_files_limit and gpx_files_limit.isdigit():
|
|
|
|
gpx_files_limit = int(gpx_files_limit)
|
2018-07-01 17:12:37 +02:00
|
|
|
else:
|
|
|
|
gpx_files_limit = 10
|
|
|
|
appLog.error('GPX limit not configured, set to 10.')
|
|
|
|
gpx_files_ok = 0
|
2018-05-29 16:28:59 +02:00
|
|
|
|
|
|
|
for gpx_file in os.listdir(extract_dir):
|
|
|
|
if ('.' in gpx_file and gpx_file.rsplit('.', 1)[1].lower()
|
|
|
|
in current_app.config.get('ACTIVITY_ALLOWED_EXTENSIONS')):
|
2018-07-01 17:12:37 +02:00
|
|
|
gpx_files_ok += 1
|
|
|
|
if gpx_files_ok > gpx_files_limit:
|
|
|
|
break
|
2018-05-29 16:28:59 +02:00
|
|
|
file_path = os.path.join(extract_dir, gpx_file)
|
2018-05-31 15:17:40 +02:00
|
|
|
params = common_params
|
|
|
|
params['file_path'] = file_path
|
|
|
|
new_activity = process_one_gpx_file(params, gpx_file)
|
2018-05-29 16:28:59 +02:00
|
|
|
new_activities.append(new_activity)
|
|
|
|
|
|
|
|
return new_activities
|
|
|
|
|
|
|
|
|
2018-05-29 19:06:33 +02:00
|
|
|
def process_files(auth_user_id, activity_data, activity_file, folders):
|
2018-05-29 16:28:59 +02:00
|
|
|
filename = secure_filename(activity_file.filename)
|
|
|
|
extension = f".{filename.rsplit('.', 1)[1].lower()}"
|
2018-07-04 13:10:47 +02:00
|
|
|
file_path = get_file_path(folders['tmp_dir'], filename)
|
2018-05-31 15:17:40 +02:00
|
|
|
sport = Sport.query.filter_by(id=activity_data.get('sport_id')).first()
|
|
|
|
if not sport:
|
|
|
|
raise ActivityException(
|
|
|
|
'error',
|
|
|
|
f"Sport id: {activity_data.get('sport_id')} does not exist",
|
|
|
|
None
|
|
|
|
)
|
2018-06-11 19:38:20 +02:00
|
|
|
user = User.query.filter_by(id=auth_user_id).first()
|
2018-05-31 15:17:40 +02:00
|
|
|
|
|
|
|
common_params = {
|
2018-06-11 19:38:20 +02:00
|
|
|
'user': user,
|
2018-05-31 15:17:40 +02:00
|
|
|
'activity_data': activity_data,
|
|
|
|
'file_path': file_path,
|
|
|
|
'sport_label': sport.label,
|
|
|
|
}
|
2018-05-29 16:28:59 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
activity_file.save(file_path)
|
|
|
|
except Exception as e:
|
|
|
|
raise ActivityException('error', 'Error during activity file save.', e)
|
|
|
|
|
|
|
|
if extension == ".gpx":
|
2018-05-31 15:17:40 +02:00
|
|
|
return [process_one_gpx_file(common_params, filename)]
|
2018-05-29 16:28:59 +02:00
|
|
|
else:
|
2018-05-31 15:17:40 +02:00
|
|
|
return process_zip_archive(common_params, folders['extract_dir'])
|