Merge branch 'master' into v0.2

This commit is contained in:
Sam 2019-01-06 16:03:52 +01:00
commit 33dfe99942
8 changed files with 390 additions and 58 deletions

View File

@ -50,6 +50,9 @@ lint-react:
migrate-db:
$(FLASK) db migrate --directory $(MIGRATIONS)
recalculate:
$(FLASK) recalculate
run:
$(MAKE) P="run-server run-client" make-p

View File

@ -14,6 +14,7 @@ staticmap = "*"
pytz = "*"
python-forecastio = "*"
gunicorn = "*"
tqdm = "*"
[dev-packages]
pytest = "*"

View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "14427cb3567526a238489ee59e347d0e0ebb51ec01a8aa289c8230464c8e6a94"
"sha256": "cff03ac2e04f1129adfe93104ed81927a99c33dccd3009ff412ebf6694931438"
},
"pipfile-spec": 6,
"requires": {
@ -211,38 +211,38 @@
},
"pillow": {
"hashes": [
"sha256:00203f406818c3f45d47bb8fe7e67d3feddb8dcbbd45a289a1de7dd789226360",
"sha256:0616f800f348664e694dddb0b0c88d26761dd5e9f34e1ed7b7a7d2da14b40cb7",
"sha256:1f7908aab90c92ad85af9d2fec5fc79456a89b3adcc26314d2cde0e238bd789e",
"sha256:2ea3517cd5779843de8a759c2349a3cd8d3893e03ab47053b66d5ec6f8bc4f93",
"sha256:48a9f0538c91fc136b3a576bee0e7cd174773dc9920b310c21dcb5519722e82c",
"sha256:5280ebc42641a1283b7b1f2c20e5b936692198b9dd9995527c18b794850be1a8",
"sha256:5e34e4b5764af65551647f5cc67cf5198c1d05621781d5173b342e5e55bf023b",
"sha256:63b120421ab85cad909792583f83b6ca3584610c2fe70751e23f606a3c2e87f0",
"sha256:696b5e0109fe368d0057f484e2e91717b49a03f1e310f857f133a4acec9f91dd",
"sha256:870ed021a42b1b02b5fe4a739ea735f671a84128c0a666c705db2cb9abd528eb",
"sha256:916da1c19e4012d06a372127d7140dae894806fad67ef44330e5600d77833581",
"sha256:9303a289fa0811e1c6abd9ddebfc770556d7c3311cb2b32eff72164ddc49bc64",
"sha256:9577888ecc0ad7d06c3746afaba339c94d62b59da16f7a5d1cff9e491f23dace",
"sha256:987e1c94a33c93d9b209315bfda9faa54b8edfce6438a1e93ae866ba20de5956",
"sha256:99a3bbdbb844f4fb5d6dd59fac836a40749781c1fa63c563bc216c27aef63f60",
"sha256:99db8dc3097ceafbcff9cb2bff384b974795edeb11d167d391a02c7bfeeb6e16",
"sha256:a5a96cf49eb580756a44ecf12949e52f211e20bffbf5a95760ac14b1e499cd37",
"sha256:aa6ca3eb56704cdc0d876fc6047ffd5ee960caad52452fbee0f99908a141a0ae",
"sha256:aade5e66795c94e4a2b2624affeea8979648d1b0ae3fcee17e74e2c647fc4a8a",
"sha256:b78905860336c1d292409e3df6ad39cc1f1c7f0964e66844bbc2ebfca434d073",
"sha256:b92f521cdc4e4a3041cc343625b699f20b0b5f976793fb45681aac1efda565f8",
"sha256:bfde84bbd6ae5f782206d454b67b7ee8f7f818c29b99fd02bf022fd33bab14cb",
"sha256:c2b62d3df80e694c0e4a0ed47754c9480521e25642251b3ab1dff050a4e60409",
"sha256:c5e2be6c263b64f6f7656e23e18a4a9980cffc671442795682e8c4e4f815dd9f",
"sha256:c99aa3c63104e0818ec566f8ff3942fb7c7a8f35f9912cb63fd8e12318b214b2",
"sha256:dae06620d3978da346375ebf88b9e2dd7d151335ba668c995aea9ed07af7add4",
"sha256:db5499d0710823fa4fb88206050d46544e8f0e0136a9a5f5570b026584c8fd74",
"sha256:f36baafd82119c4a114b9518202f2a983819101dcc14b26e43fc12cbefdce00e",
"sha256:f52b79c8796d81391ab295b04e520bda6feed54d54931708872e8f9ae9db0ea1",
"sha256:ff8cff01582fa1a7e533cb97f628531c4014af4b5f38e33cdcfe5eec29b6d888"
"sha256:0cd42fe2d99ec6ce23aaf00947a7b7956ad2ed4b1695fd37545c3b8eae06d95a",
"sha256:137bed8972089d65da63fb79b4949b0f2b99e9a58f1b494e82be43ba8b0f4226",
"sha256:14eb2b2e4f2a14f5c89fd0edf55c5af0bf1a40fdf3838d81867f26f131cd557d",
"sha256:1fc43ce8c4fa3754222cd6831d599ad17ca2fc9868d2fb52f4e5362dfbfaf379",
"sha256:26dfeee23a86dad6277a63d18f61f53b957cb2cd3506dbbd74b88ba2fa65b3b1",
"sha256:2e0e582942e025cc58f669499a8e0bffde5bcc8d42b65729f294c1dac54e4672",
"sha256:3bb8dd3ce101dd8b0b37eaae924a5bb93abb6ffdd034bf68a066a808e11768ab",
"sha256:3f07da3874f0b085421f1d4f979785131aa9d497501d8610d82f7378b33858f8",
"sha256:429b2b5ae5f57f8fd9ec2e012c1e7b342ff10f1a8977dc291976b9a3b4c096e1",
"sha256:4a000fdd89d77b6b675de27e1ab91c6fba517c08f19ee83e6716b78930634e04",
"sha256:4ccbe7cce6156391a3ecf447c79a7d4a1a0ecd3de79bdec9ca5e4f7242a306d1",
"sha256:4d08034196db41acb7392e4fccfc0448e7a87192c41d3011ad4093eac2c31ffd",
"sha256:6b202b1cb524bc76ed52a7eb0314f4b0a0497c7cceb9a93539b5a25800e1f2b6",
"sha256:8563b56fa7c34f1606848c2143ea67d27cf225b9726a1b041c3d27cf85e46edc",
"sha256:86d7421e8803d7bae2e594765c378a867b629d46b32fbfe5ed9fd95b30989feb",
"sha256:8d4bddedcb4ab99131d9705a75720efc48b3d006122dae1a4cc329496ac47c9a",
"sha256:a4929c6de9590635c34533609402c9da12b22bfc2feb8c0c4f38c39bab48a9ad",
"sha256:b0736e21798448cee3e663c0df7a6dfa83d805b3f3a45e67f7457a2f019e5fca",
"sha256:b669acba91d47395de84c9ca52a7ad393b487e5ae2e20b9b2790b22a57d479fa",
"sha256:bba993443921f2d077195b425a3283357f52b07807d53704610c1249d20b183a",
"sha256:bdf706a93d00547c9443b2654ae424fd54d5dece4bc4333e7035740aeb7a7cea",
"sha256:c5aa93e55175b9cde95279ccd03c93d218976b376480222d37be41d2c9c54510",
"sha256:cc11fd997d8ad71bb0412e983b711e49639c2ddba9b9dce04d4bdab575fe5f84",
"sha256:d584f1c33995c3dc16a35e30ef43e0881fa0d085f0fef29cebf154ffb5643363",
"sha256:d88f54bdefb7ddccb68efdd710d689aa6a09b875cc3e44b7e81ef54e0751e3a7",
"sha256:de0d323072be72fa4d74f4e013cd594e3f8ee03b2e0eac5876a3249fa076ef7b",
"sha256:f139c963c6679d236b2c45369524338eabd36a853fe23abd39ba246ab0a75aec",
"sha256:f41c0bf667c4c1c30b873eaa8d6bb894f6d721b3e38e9c993bddd1263c02fb1f",
"sha256:fbd0ea468b4ec04270533bf5206f1cd57746fcf226520bb133318fa276de2644",
"sha256:fe2d2850521c467c915ff0a6e27dc64c3c04c2f66612e0072672bd1bd4854b61"
],
"version": "==5.3.0"
"version": "==5.4.0"
},
"psycopg2-binary": {
"hashes": [
@ -356,6 +356,14 @@
"index": "pypi",
"version": "==0.5.4"
},
"tqdm": {
"hashes": [
"sha256:3c4d4a5a41ef162dd61f1edb86b0e1c7859054ab656b2e7c7b77e7fbf6d9f392",
"sha256:5b4d5549984503050883bc126280b386f5f4ca87e6c023c5d015655ad75bdebb"
],
"index": "pypi",
"version": "==4.28.1"
},
"urllib3": {
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",

View File

@ -119,8 +119,33 @@ def create_segment(activity_id, segment_data):
return new_segment
def update_activity(activity):
"""
Note: only gpx_data is be updated for now (the gpx file is NOT modified)
In a next version, map_data and weather_data will be updated
(case of a modified gpx file, see issue #7)
"""
gpx_data, _, _ = get_gpx_info(get_absolute_file_path(activity.gpx))
updated_activity = update_activity_data(activity, gpx_data)
updated_activity.duration = gpx_data['duration']
updated_activity.distance = gpx_data['distance']
db.session.flush()
for segment_idx, segment in enumerate(updated_activity.segments):
segment_data = gpx_data['segments'][segment_idx]
updated_segment = update_activity_data(segment, segment_data)
updated_segment.duration = segment_data['duration']
updated_segment.distance = segment_data['distance']
db.session.flush()
return updated_activity
def edit_activity(activity, activity_data, auth_user_id):
user = User.query.filter_by(id=auth_user_id).first()
if activity_data.get('refresh'):
activity = update_activity(activity)
if activity_data.get('sport_id'):
activity.sport_id = activity_data.get('sport_id')
if activity_data.get('title'):
@ -149,11 +174,11 @@ def edit_activity(activity, activity_data, auth_user_id):
return activity
def get_gpx_data(parsed_gpx, max_speed, start):
def get_gpx_data(parsed_gpx, max_speed, start, stopped_time_btwn_seg):
gpx_data = {'max_speed': (max_speed / 1000) * 3600, 'start': start}
duration = parsed_gpx.get_duration()
gpx_data['duration'] = timedelta(seconds=duration)
gpx_data['duration'] = timedelta(seconds=duration) + stopped_time_btwn_seg
ele = parsed_gpx.get_elevation_extremes()
gpx_data['elevation_max'] = ele.maximum
@ -165,7 +190,8 @@ def get_gpx_data(parsed_gpx, max_speed, start):
mv = parsed_gpx.get_moving_data()
gpx_data['moving_time'] = timedelta(seconds=mv.moving_time)
gpx_data['stop_time'] = timedelta(seconds=mv.stopped_time)
gpx_data['stop_time'] = (timedelta(seconds=mv.stopped_time)
+ stopped_time_btwn_seg)
distance = mv.moving_distance + mv.stopped_distance
gpx_data['distance'] = distance / 1000
@ -183,7 +209,7 @@ def open_gpx_file(gpx_file):
return gpx
def get_gpx_info(gpx_file):
def get_gpx_info(gpx_file, update_map_data=True, update_weather_data=True):
gpx = open_gpx_file(gpx_file)
if gpx is None:
return None
@ -196,16 +222,36 @@ def get_gpx_info(gpx_file):
start = 0
map_data = []
weather_data = []
segments_nb = len(gpx.tracks[0].segments)
prev_seg_last_point = None
no_stopped_time = timedelta(seconds=0)
stopped_time_btwn_seg = no_stopped_time
for segment_idx, segment in enumerate(gpx.tracks[0].segments):
segment_start = 0
segment_points_nb = len(segment.points)
for point_idx, point in enumerate(segment.points):
if point_idx == 0 and start == 0:
if point_idx == 0:
# first gpx point => get weather
if start == 0:
start = point.time
if update_weather_data:
weather_data.append(get_weather(point))
if (point_idx == (len(segment.points) - 1) and
segment_idx == (len(gpx.tracks[0].segments) - 1)):
# if a previous segment exists, calculate stopped time between
# the two segments
if prev_seg_last_point:
stopped_time_btwn_seg = point.time - prev_seg_last_point
# last segment point
if point_idx == (segment_points_nb - 1):
prev_seg_last_point = point.time
# last gpx point => get weather
if segment_idx == (segments_nb - 1) and update_weather_data:
weather_data.append(get_weather(point))
if update_map_data:
map_data.append([
point.longitude, point.latitude
])
@ -217,13 +263,15 @@ def get_gpx_info(gpx_file):
max_speed = segment_max_speed
segment_data = get_gpx_data(
segment, segment_max_speed, segment_start
segment, segment_max_speed, segment_start, no_stopped_time
)
segment_data['idx'] = segment_idx
gpx_data['segments'].append(segment_data)
full_gpx_data = get_gpx_data(gpx, max_speed, start)
full_gpx_data = get_gpx_data(gpx, max_speed, start, stopped_time_btwn_seg)
gpx_data = {**gpx_data, **full_gpx_data}
if update_map_data:
bounds = gpx.get_bounds()
gpx_data['bounds'] = [
bounds.min_latitude,
@ -345,7 +393,7 @@ def process_one_gpx_file(params, filename):
except (gpxpy.gpx.GPXXMLSyntaxException, TypeError) as e:
raise ActivityException('error', 'Error during gpx file parsing.', e)
except Exception as e:
raise ActivityException('error', 'Error during activity file save.', e)
raise ActivityException('error', 'Error during gpx processing.', e)
try:
new_activity = create_activity(

View File

@ -468,3 +468,120 @@ def gpx_file_invalid_xml():
'<gpx xmlns:gpxdata="http://www.cluetrust.com/XML/GPXDATA/1/0" xmlns:gpxtpx="http://www.garmin.com/xmlschemas/TrackPointExtension/v1" xmlns:gpxext="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xmlns="http://www.topografix.com/GPX/1/1">' # noqa
' <metadata/>'
)
@pytest.fixture()
def gpx_file_with_segments():
return (
'<?xml version=\'1.0\' encoding=\'UTF-8\'?>'
'<gpx xmlns:gpxdata="http://www.cluetrust.com/XML/GPXDATA/1/0" xmlns:gpxtpx="http://www.garmin.com/xmlschemas/TrackPointExtension/v1" xmlns:gpxext="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xmlns="http://www.topografix.com/GPX/1/1">' # noqa
' <metadata/>'
' <trk>'
' <name>just an activity</name>'
' <trkseg>'
' <trkpt lat="44.68095" lon="6.07367">'
' <ele>998</ele>'
' <time>2018-03-13T12:44:45Z</time>'
' </trkpt>'
' <trkpt lat="44.68091" lon="6.07367">'
' <ele>998</ele>'
' <time>2018-03-13T12:44:50Z</time>'
' </trkpt>'
' <trkpt lat="44.6808" lon="6.07364">'
' <ele>994</ele>'
' <time>2018-03-13T12:45:00Z</time>'
' </trkpt>'
' <trkpt lat="44.68075" lon="6.07364">'
' <ele>994</ele>'
' <time>2018-03-13T12:45:05Z</time>'
' </trkpt>'
' <trkpt lat="44.68071" lon="6.07364">'
' <ele>994</ele>'
' <time>2018-03-13T12:45:10Z</time>'
' </trkpt>'
' <trkpt lat="44.68049" lon="6.07361">'
' <ele>993</ele>'
' <time>2018-03-13T12:45:30Z</time>'
' </trkpt>'
' <trkpt lat="44.68019" lon="6.07356">'
' <ele>992</ele>'
' <time>2018-03-13T12:45:55Z</time>'
' </trkpt>'
' <trkpt lat="44.68014" lon="6.07355">'
' <ele>992</ele>'
' <time>2018-03-13T12:46:00Z</time>'
' </trkpt>'
' <trkpt lat="44.67995" lon="6.07358">'
' <ele>987</ele>'
' <time>2018-03-13T12:46:15Z</time>'
' </trkpt>'
' </trkseg>'
' <trkseg>'
' <trkpt lat="44.67977" lon="6.07364">'
' <ele>987</ele>'
' <time>2018-03-13T12:46:30Z</time>'
' </trkpt>'
' <trkpt lat="44.67972" lon="6.07367">'
' <ele>987</ele>'
' <time>2018-03-13T12:46:35Z</time>'
' </trkpt>'
' <trkpt lat="44.67966" lon="6.07368">'
' <ele>987</ele>'
' <time>2018-03-13T12:46:40Z</time>'
' </trkpt>'
' <trkpt lat="44.67961" lon="6.0737">'
' <ele>986</ele>'
' <time>2018-03-13T12:46:45Z</time>'
' </trkpt>'
' <trkpt lat="44.67938" lon="6.07377">'
' <ele>986</ele>'
' <time>2018-03-13T12:47:05Z</time>'
' </trkpt>'
' <trkpt lat="44.67933" lon="6.07381">'
' <ele>986</ele>'
' <time>2018-03-13T12:47:10Z</time>'
' </trkpt>'
' <trkpt lat="44.67922" lon="6.07385">'
' <ele>985</ele>'
' <time>2018-03-13T12:47:20Z</time>'
' </trkpt>'
' <trkpt lat="44.67911" lon="6.0739">'
' <ele>980</ele>'
' <time>2018-03-13T12:47:30Z</time>'
' </trkpt>'
' <trkpt lat="44.679" lon="6.07399">'
' <ele>980</ele>'
' <time>2018-03-13T12:47:40Z</time>'
' </trkpt>'
' <trkpt lat="44.67896" lon="6.07402">'
' <ele>980</ele>'
' <time>2018-03-13T12:47:45Z</time>'
' </trkpt>'
' <trkpt lat="44.67884" lon="6.07408">'
' <ele>979</ele>'
' <time>2018-03-13T12:47:55Z</time>'
' </trkpt>'
' <trkpt lat="44.67863" lon="6.07423">'
' <ele>981</ele>'
' <time>2018-03-13T12:48:15Z</time>'
' </trkpt>'
' <trkpt lat="44.67858" lon="6.07425">'
' <ele>980</ele>'
' <time>2018-03-13T12:48:20Z</time>'
' </trkpt>'
' <trkpt lat="44.67842" lon="6.07434">'
' <ele>979</ele>'
' <time>2018-03-13T12:48:35Z</time>'
' </trkpt>'
' <trkpt lat="44.67837" lon="6.07435">'
' <ele>979</ele>'
' <time>2018-03-13T12:48:40Z</time>'
' </trkpt>'
' <trkpt lat="44.67822" lon="6.07442">'
' <ele>975</ele>'
' <time>2018-03-13T12:48:55Z</time>'
' </trkpt>'
' </trkseg>'
' </trk>'
'</gpx>'
)

View File

@ -66,6 +66,75 @@ def assert_activity_data_with_gpx(data):
assert records[3]['value'] == 4.61
def assert_activity_data_with_gpx_segments(data):
assert 'creation_date' in data['data']['activities'][0]
assert 'Tue, 13 Mar 2018 12:44:45 GMT' == data['data']['activities'][0]['activity_date'] # noqa
assert 1 == data['data']['activities'][0]['user_id']
assert 1 == data['data']['activities'][0]['sport_id']
assert '0:04:10' == data['data']['activities'][0]['duration']
assert data['data']['activities'][0]['ascent'] == 0.4
assert data['data']['activities'][0]['ave_speed'] == 4.59
assert data['data']['activities'][0]['descent'] == 23.4
assert data['data']['activities'][0]['distance'] == 0.3
assert data['data']['activities'][0]['max_alt'] == 998.0
assert data['data']['activities'][0]['max_speed'] is None # not enough points # noqa
assert data['data']['activities'][0]['min_alt'] == 975.0
assert data['data']['activities'][0]['moving'] == '0:03:55'
assert data['data']['activities'][0]['pauses'] == '0:00:15'
assert data['data']['activities'][0]['with_gpx'] is True
assert data['data']['activities'][0]['map'] is not None
assert data['data']['activities'][0]['weather_start'] is None
assert data['data']['activities'][0]['weather_end'] is None
assert data['data']['activities'][0]['notes'] is None
assert len(data['data']['activities'][0]['segments']) == 2
segment = data['data']['activities'][0]['segments'][0]
assert segment['activity_id'] == 1
assert segment['segment_id'] == 0
assert segment['duration'] == '0:01:30'
assert segment['ascent'] is None
assert segment['ave_speed'] == 4.53
assert segment['descent'] == 11.0
assert segment['distance'] == 0.113
assert segment['max_alt'] == 998.0
assert segment['max_speed'] is None
assert segment['min_alt'] == 987.0
assert segment['moving'] == '0:01:30'
assert segment['pauses'] is None
segment = data['data']['activities'][0]['segments'][1]
assert segment['activity_id'] == 1
assert segment['segment_id'] == 1
assert segment['duration'] == '0:02:25'
assert segment['ascent'] == 0.4
assert segment['ave_speed'] == 4.62
assert segment['descent'] == 12.4
assert segment['distance'] == 0.186
assert segment['max_alt'] == 987.0
assert segment['max_speed'] is None
assert segment['min_alt'] == 975.0
assert segment['moving'] == '0:02:25'
assert segment['pauses'] is None
records = data['data']['activities'][0]['records']
assert len(records) == 3
assert records[0]['sport_id'] == 1
assert records[0]['activity_id'] == 1
assert records[0]['record_type'] == 'LD'
assert records[0]['activity_date'] == 'Tue, 13 Mar 2018 12:44:45 GMT'
assert records[0]['value'] == '0:03:55'
assert records[1]['sport_id'] == 1
assert records[1]['activity_id'] == 1
assert records[1]['record_type'] == 'FD'
assert records[1]['activity_date'] == 'Tue, 13 Mar 2018 12:44:45 GMT'
assert records[1]['value'] == 0.3
assert records[2]['sport_id'] == 1
assert records[2]['activity_id'] == 1
assert records[2]['record_type'] == 'AS'
assert records[2]['activity_date'] == 'Tue, 13 Mar 2018 12:44:45 GMT'
assert records[2]['value'] == 4.59
def assert_activity_data_wo_gpx(data):
assert 'creation_date' in data['data']['activities'][0]
assert data['data']['activities'][0]['activity_date'] == 'Tue, 15 May 2018 14:05:00 GMT' # noqa
@ -146,7 +215,7 @@ def test_add_an_activity_gpx(app, user_1, sport_1_cycling, gpx_file):
assert_activity_data_with_gpx(data)
def test_get_an_activity_with_gpx(app, user_1, sport_1_cycling, gpx_file):
def activity_assertion(app, user_1, sport_1_cycling, gpx_file, with_segments):
client = app.test_client()
resp_login = client.post(
'/api/auth/login',
@ -183,6 +252,9 @@ def test_get_an_activity_with_gpx(app, user_1, sport_1_cycling, gpx_file):
assert 'success' in data['status']
assert len(data['data']['activities']) == 1
assert 'just an activity' == data['data']['activities'][0]['title']
if with_segments:
assert_activity_data_with_gpx_segments(data)
else:
assert_activity_data_with_gpx(data)
map_id = data['data']['activities'][0]['map']
@ -236,6 +308,16 @@ def test_get_an_activity_with_gpx(app, user_1, sport_1_cycling, gpx_file):
assert data['message'] == 'internal error.'
def test_get_an_activity_with_gpx(app, user_1, sport_1_cycling, gpx_file):
return activity_assertion(app, user_1, sport_1_cycling, gpx_file, False)
def test_get_an_activity_with_gpx_segments(
app, user_1, sport_1_cycling, gpx_file_with_segments):
return activity_assertion(
app, user_1, sport_1_cycling, gpx_file_with_segments, True)
def test_get_an_activity_with_gpx_different_user(
app, user_1, user_2, sport_1_cycling, gpx_file):
client = app.test_client()

View File

@ -1,6 +1,8 @@
import json
from io import BytesIO
from fittrackee_api.activities.models import Activity
def assert_activity_data_with_gpx(data):
assert 'creation_date' in data['data']['activities'][0]
@ -933,3 +935,56 @@ def test_edit_an_activity_no_activity(
assert response.status_code == 404
assert 'not found' in data['status']
assert len(data['data']['activities']) == 0
def test_refresh_an_activity_with_gpx(
app, user_1, sport_1_cycling, sport_2_running, gpx_file
):
client = app.test_client()
resp_login = client.post(
'/api/auth/login',
data=json.dumps(dict(
email='test@test.com',
password='12345678'
)),
content_type='application/json'
)
client.post(
'/api/activities',
data=dict(
file=(BytesIO(str.encode(gpx_file)), 'example.gpx'),
data='{"sport_id": 1}'
),
headers=dict(
content_type='multipart/form-data',
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
# Edit some activity data
activity = Activity.query.filter_by(id=1).first()
activity.ascent = 1000
activity.min_alt = -100
response = client.patch(
'/api/activities/1',
content_type='application/json',
data=json.dumps(dict(
refresh=True,
)),
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
assert response.status_code == 200
assert 'success' in data['status']
assert len(data['data']['activities']) == 1
assert 1 == data['data']['activities'][0]['sport_id']
assert 0.4 == data['data']['activities'][0]['ascent']
assert 975.0 == data['data']['activities'][0]['min_alt']

View File

@ -1,8 +1,10 @@
import shutil
from fittrackee_api import create_app, db
from fittrackee_api.activities.models import Sport
from fittrackee_api.activities.models import Activity, Sport
from fittrackee_api.activities.utils import update_activity
from fittrackee_api.users.models import User
from tqdm import tqdm
app = create_app()
@ -56,5 +58,21 @@ def initdata():
print('Initial data stored in database.')
@app.cli.command()
def recalculate():
print("Starting activities data refresh")
activities = Activity.query.filter(Activity.gpx != None).order_by( # noqa
Activity.activity_date.asc()
).all()
if len(activities) == 0:
print('➡️ no activities to upgrade.')
return None
pbar = tqdm(activities)
for activity in pbar:
update_activity(activity)
pbar.set_postfix(activitiy_id=activity.id)
db.session.commit()
if __name__ == '__main__':
app.run()