API: generate a static map for activities w/ gpx
This commit is contained in:
parent
b07b20c5e8
commit
2c5c7f609a
30
mpwo_api/migrations/versions/5a42db64e872_.py
Normal file
30
mpwo_api/migrations/versions/5a42db64e872_.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: 5a42db64e872
|
||||||
|
Revises: 92adde6ac0d0
|
||||||
|
Create Date: 2018-05-30 10:52:33.433687
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '5a42db64e872'
|
||||||
|
down_revision = '92adde6ac0d0'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('activities', sa.Column('map', sa.String(length=255), nullable=True))
|
||||||
|
op.create_unique_constraint(None, 'sports', ['img'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, 'sports', type_='unique')
|
||||||
|
op.drop_column('activities', 'map')
|
||||||
|
# ### end Alembic commands ###
|
@ -280,13 +280,8 @@ def delete_activity(auth_user_id, activity_id):
|
|||||||
try:
|
try:
|
||||||
activity = Activity.query.filter_by(id=activity_id).first()
|
activity = Activity.query.filter_by(id=activity_id).first()
|
||||||
if activity:
|
if activity:
|
||||||
gpx_filepath = activity.gpx
|
|
||||||
db.session.delete(activity)
|
db.session.delete(activity)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
if gpx_filepath:
|
|
||||||
os.remove(gpx_filepath)
|
|
||||||
|
|
||||||
response_object = {
|
response_object = {
|
||||||
'status': 'no content'
|
'status': 'no content'
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
from mpwo_api import db
|
from mpwo_api import db
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
@ -133,6 +134,7 @@ class Activity(db.Model):
|
|||||||
max_speed = db.Column(db.Numeric(6, 2), nullable=True) # km/h
|
max_speed = db.Column(db.Numeric(6, 2), nullable=True) # km/h
|
||||||
ave_speed = db.Column(db.Numeric(6, 2), nullable=True) # km/h
|
ave_speed = db.Column(db.Numeric(6, 2), nullable=True) # km/h
|
||||||
bounds = db.Column(postgresql.ARRAY(db.Float), nullable=True)
|
bounds = db.Column(postgresql.ARRAY(db.Float), nullable=True)
|
||||||
|
map = db.Column(db.String(255), nullable=True)
|
||||||
segments = db.relationship('ActivitySegment',
|
segments = db.relationship('ActivitySegment',
|
||||||
lazy=True,
|
lazy=True,
|
||||||
cascade='all, delete',
|
cascade='all, delete',
|
||||||
@ -196,7 +198,8 @@ class Activity(db.Model):
|
|||||||
"previous_activity": previous_activity.id if previous_activity else None, # noqa
|
"previous_activity": previous_activity.id if previous_activity else None, # noqa
|
||||||
"next_activity": next_activity.id if next_activity else None,
|
"next_activity": next_activity.id if next_activity else None,
|
||||||
"segments": [segment.serialize() for segment in self.segments],
|
"segments": [segment.serialize() for segment in self.segments],
|
||||||
"records": [record.serialize() for record in self.records]
|
"records": [record.serialize() for record in self.records],
|
||||||
|
"with_map": self.map is not None
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -248,6 +251,17 @@ def on_activity_update(mapper, connection, activity):
|
|||||||
update_records(activity.user_id, sport_id, connection, session)
|
update_records(activity.user_id, sport_id, connection, session)
|
||||||
|
|
||||||
|
|
||||||
|
@listens_for(Activity, 'after_delete')
|
||||||
|
def on_activity_delete(mapper, connection, old_record):
|
||||||
|
|
||||||
|
@listens_for(db.Session, 'after_flush', once=True)
|
||||||
|
def receive_after_flush(session, context):
|
||||||
|
if old_record.map:
|
||||||
|
os.remove(old_record.map)
|
||||||
|
if old_record.gpx:
|
||||||
|
os.remove(old_record.gpx)
|
||||||
|
|
||||||
|
|
||||||
class ActivitySegment(db.Model):
|
class ActivitySegment(db.Model):
|
||||||
__tablename__ = "activity_segments"
|
__tablename__ = "activity_segments"
|
||||||
activity_id = db.Column(
|
activity_id = db.Column(
|
||||||
|
@ -7,6 +7,7 @@ import gpxpy.gpx
|
|||||||
from flask import current_app
|
from flask import current_app
|
||||||
from mpwo_api import db
|
from mpwo_api import db
|
||||||
from sqlalchemy import exc
|
from sqlalchemy import exc
|
||||||
|
from staticmap import Line, StaticMap
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
from .models import Activity, ActivitySegment, Sport
|
from .models import Activity, ActivitySegment, Sport
|
||||||
@ -150,12 +151,16 @@ def get_gpx_info(gpx_file):
|
|||||||
}
|
}
|
||||||
max_speed = 0
|
max_speed = 0
|
||||||
start = 0
|
start = 0
|
||||||
|
map_data = []
|
||||||
|
|
||||||
for segment_idx, segment in enumerate(gpx.tracks[0].segments):
|
for segment_idx, segment in enumerate(gpx.tracks[0].segments):
|
||||||
segment_start = 0
|
segment_start = 0
|
||||||
for point_idx, point in enumerate(segment.points):
|
for point_idx, point in enumerate(segment.points):
|
||||||
if point_idx == 0 and start == 0:
|
if point_idx == 0 and start == 0:
|
||||||
start = point.time
|
start = point.time
|
||||||
|
map_data.append([
|
||||||
|
point.longitude, point.latitude
|
||||||
|
])
|
||||||
segment_max_speed = (segment.get_moving_data().max_speed
|
segment_max_speed = (segment.get_moving_data().max_speed
|
||||||
if segment.get_moving_data().max_speed
|
if segment.get_moving_data().max_speed
|
||||||
else 0)
|
else 0)
|
||||||
@ -179,7 +184,7 @@ def get_gpx_info(gpx_file):
|
|||||||
bounds.max_longitude
|
bounds.max_longitude
|
||||||
]
|
]
|
||||||
|
|
||||||
return gpx_data
|
return gpx_data, map_data
|
||||||
|
|
||||||
|
|
||||||
def get_chart_data(gpx_file):
|
def get_chart_data(gpx_file):
|
||||||
@ -229,8 +234,11 @@ def get_file_path(auth_user_id, dir_path, filename):
|
|||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
def get_new_file_path(auth_user_id, activity_date, old_filename, sport):
|
def get_new_file_path(
|
||||||
extension = f".{old_filename.rsplit('.', 1)[1].lower()}"
|
auth_user_id, activity_date, sport, old_filename=None, extension=None
|
||||||
|
):
|
||||||
|
if not extension:
|
||||||
|
extension = f".{old_filename.rsplit('.', 1)[1].lower()}"
|
||||||
_, new_filename = tempfile.mkstemp(
|
_, new_filename = tempfile.mkstemp(
|
||||||
prefix=f'{activity_date}_{sport}_',
|
prefix=f'{activity_date}_{sport}_',
|
||||||
suffix=extension
|
suffix=extension
|
||||||
@ -244,9 +252,17 @@ def get_new_file_path(auth_user_id, activity_date, old_filename, sport):
|
|||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
|
def generate_map(map_filepath, map_data):
|
||||||
|
m = StaticMap(400, 225, 10)
|
||||||
|
line = Line(map_data, '#3388FF', 4)
|
||||||
|
m.add_line(line)
|
||||||
|
image = m.render()
|
||||||
|
image.save(map_filepath)
|
||||||
|
|
||||||
|
|
||||||
def process_one_gpx_file(auth_user_id, activity_data, file_path, filename):
|
def process_one_gpx_file(auth_user_id, activity_data, file_path, filename):
|
||||||
try:
|
try:
|
||||||
gpx_data = get_gpx_info(file_path)
|
gpx_data, map_data = get_gpx_info(file_path)
|
||||||
|
|
||||||
sport = Sport.query.filter_by(id=activity_data.get('sport_id')).first()
|
sport = Sport.query.filter_by(id=activity_data.get('sport_id')).first()
|
||||||
new_filepath = get_new_file_path(
|
new_filepath = get_new_file_path(
|
||||||
@ -257,6 +273,14 @@ def process_one_gpx_file(auth_user_id, activity_data, file_path, filename):
|
|||||||
)
|
)
|
||||||
os.rename(file_path, new_filepath)
|
os.rename(file_path, new_filepath)
|
||||||
gpx_data['filename'] = new_filepath
|
gpx_data['filename'] = new_filepath
|
||||||
|
|
||||||
|
map_filepath = get_new_file_path(
|
||||||
|
auth_user_id=auth_user_id,
|
||||||
|
activity_date=gpx_data['start'],
|
||||||
|
extension='.png',
|
||||||
|
sport=sport.label
|
||||||
|
)
|
||||||
|
generate_map(map_filepath, map_data)
|
||||||
except (gpxpy.gpx.GPXXMLSyntaxException, TypeError) as e:
|
except (gpxpy.gpx.GPXXMLSyntaxException, TypeError) as e:
|
||||||
raise ActivityException('error', 'Error during gpx file parsing.', e)
|
raise ActivityException('error', 'Error during gpx file parsing.', e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -265,6 +289,7 @@ def process_one_gpx_file(auth_user_id, activity_data, file_path, filename):
|
|||||||
try:
|
try:
|
||||||
new_activity = create_activity(
|
new_activity = create_activity(
|
||||||
auth_user_id, activity_data, gpx_data)
|
auth_user_id, activity_data, gpx_data)
|
||||||
|
new_activity.map = map_filepath
|
||||||
db.session.add(new_activity)
|
db.session.add(new_activity)
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
|
|
||||||
|
@ -19,6 +19,7 @@ def assert_activity_data_with_gpx(data):
|
|||||||
assert data['data']['activities'][0]['moving'] == '0:04:10'
|
assert data['data']['activities'][0]['moving'] == '0:04:10'
|
||||||
assert data['data']['activities'][0]['pauses'] is None
|
assert data['data']['activities'][0]['pauses'] is None
|
||||||
assert data['data']['activities'][0]['with_gpx'] is True
|
assert data['data']['activities'][0]['with_gpx'] is True
|
||||||
|
assert data['data']['activities'][0]['with_map'] is True
|
||||||
assert len(data['data']['activities'][0]['segments']) == 1
|
assert len(data['data']['activities'][0]['segments']) == 1
|
||||||
|
|
||||||
segment = data['data']['activities'][0]['segments'][0]
|
segment = data['data']['activities'][0]['segments'][0]
|
||||||
@ -76,6 +77,7 @@ def assert_activity_data_wo_gpx(data):
|
|||||||
assert data['data']['activities'][0]['moving'] == '1:00:00'
|
assert data['data']['activities'][0]['moving'] == '1:00:00'
|
||||||
assert data['data']['activities'][0]['pauses'] is None
|
assert data['data']['activities'][0]['pauses'] is None
|
||||||
assert data['data']['activities'][0]['with_gpx'] is False
|
assert data['data']['activities'][0]['with_gpx'] is False
|
||||||
|
assert data['data']['activities'][0]['with_map'] is False
|
||||||
|
|
||||||
assert len(data['data']['activities'][0]['segments']) == 0
|
assert len(data['data']['activities'][0]['segments']) == 0
|
||||||
|
|
||||||
|
@ -762,8 +762,6 @@ def test_get_records_after_sport_change(
|
|||||||
)
|
)
|
||||||
data = json.loads(response.data.decode())
|
data = json.loads(response.data.decode())
|
||||||
|
|
||||||
print(data['data']['records'])
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert 'success' in data['status']
|
assert 'success' in data['status']
|
||||||
assert len(data['data']['records']) == 8
|
assert len(data['data']['records']) == 8
|
||||||
|
@ -27,6 +27,7 @@ MarkupSafe==1.0
|
|||||||
mccabe==0.6.1
|
mccabe==0.6.1
|
||||||
more-itertools==4.2.0
|
more-itertools==4.2.0
|
||||||
mysqlclient==1.3.12
|
mysqlclient==1.3.12
|
||||||
|
Pillow==5.1.0
|
||||||
pluggy==0.6.0
|
pluggy==0.6.0
|
||||||
psycopg2==2.7.3.2
|
psycopg2==2.7.3.2
|
||||||
py==1.5.3
|
py==1.5.3
|
||||||
@ -45,6 +46,7 @@ python-editor==1.0.3
|
|||||||
requests==2.18.4
|
requests==2.18.4
|
||||||
six==1.11.0
|
six==1.11.0
|
||||||
SQLAlchemy==1.2.7
|
SQLAlchemy==1.2.7
|
||||||
|
staticmap==0.5.3
|
||||||
testfixtures==5.3.1
|
testfixtures==5.3.1
|
||||||
urllib3==1.22
|
urllib3==1.22
|
||||||
Werkzeug==0.14.1
|
Werkzeug==0.14.1
|
||||||
|
Loading…
Reference in New Issue
Block a user