API - add commands to export and delete archives

This commit is contained in:
Sam 2023-03-04 15:57:38 +01:00
parent 576b3fd66c
commit fe00b868ee
3 changed files with 257 additions and 1 deletions

View File

@ -1,11 +1,18 @@
import os
import secrets
from datetime import datetime, timedelta
from typing import Optional, Tuple
from unittest.mock import Mock, call, patch
from flask import Flask
from fittrackee import db
from fittrackee.users.export_data import UserDataExporter, export_user_data
from fittrackee.users.export_data import (
UserDataExporter,
clean_user_data_export,
export_user_data,
generate_user_data_archives,
)
from fittrackee.users.models import User, UserDataExport
from fittrackee.workouts.models import Sport, Workout
@ -465,3 +472,165 @@ class TestExportUserData:
'fittrackee_url': 'http://0.0.0.0:5000',
},
)
class UserDataExportTestCase:
@staticmethod
def create_user_request(
user: User, days: int = 0, completed: bool = True
) -> UserDataExport:
user_data_export = UserDataExport(
user_id=user.id,
created_at=datetime.now() - timedelta(days=days),
)
db.session.add(user_data_export)
user_data_export.completed = completed
db.session.commit()
return user_data_export
def generate_archive(
self, user: User
) -> Tuple[UserDataExport, Optional[str]]:
user_data_export = self.create_user_request(user, days=7)
exporter = UserDataExporter(user)
archive_path, archive_file_name = exporter.generate_archive()
user_data_export.file_name = archive_file_name
user_data_export.file_size = random_int()
db.session.commit()
return user_data_export, archive_path
class TestCleanUserDataExport(UserDataExportTestCase):
def test_it_returns_0_when_no_export_requests(self, app: Flask) -> None:
counts = clean_user_data_export(days=7)
assert counts["deleted_requests"] == 0
def test_it_returns_0_when_export_request_is_not_completed(
self, app: Flask, user_1: User
) -> None:
self.create_user_request(user_1, days=7, completed=False)
counts = clean_user_data_export(days=7)
assert counts["deleted_requests"] == 0
def test_it_returns_0_when_export_request_created_less_than_given_days(
self, app: Flask, user_1: User
) -> None:
self.create_user_request(user_1, days=1)
counts = clean_user_data_export(days=7)
assert counts["deleted_requests"] == 0
def test_it_returns_export_requests_created_more_than_given_days_count(
self, app: Flask, user_1: User, user_2: User
) -> None:
self.create_user_request(user_1, days=7)
self.create_user_request(user_2, days=7)
counts = clean_user_data_export(days=7)
assert counts["deleted_requests"] == 2
def test_it_returns_counts(
self, app: Flask, user_1: User, user_2: User, user_3: User
) -> None:
user_1_data_export, archive_path = self.generate_archive(user_1)
user_2_data_export, archive_path = self.generate_archive(user_2)
self.create_user_request(user_3, days=7)
counts = clean_user_data_export(days=7)
assert counts["deleted_requests"] == 3
assert counts["deleted_archives"] == 2
assert counts["freed_space"] == (
user_1_data_export.file_size + user_2_data_export.file_size
)
def test_it_deletes_archive(
self, app: Flask, user_1: User, user_2: User
) -> None:
_, archive_path = self.generate_archive(user_1)
clean_user_data_export(days=7)
assert os.path.exists(archive_path) is False # type: ignore
def test_it_deletes_requests(
self, app: Flask, user_1: User, user_2: User
) -> None:
self.generate_archive(user_1)
clean_user_data_export(days=7)
assert (
UserDataExport.query.filter_by(user_id=user_1.id).first() is None
)
class TestGenerateUsersArchives(UserDataExportTestCase):
def test_it_returns_0_when_no_request(self, app: Flask) -> None:
count = generate_user_data_archives(max_count=1)
assert count == 0
def test_it_returns_0_when_request_request_completed(
self, app: Flask, user_1: User
) -> None:
self.create_user_request(user_1, completed=True)
count = generate_user_data_archives(max_count=1)
assert count == 0
def test_it_returns_count_when_archive_is_generated_user_archive(
self, app: Flask, user_1: User
) -> None:
self.create_user_request(user_1, completed=False)
count = generate_user_data_archives(max_count=1)
assert count == 1
@patch.object(secrets, 'token_urlsafe')
def test_it_generates_user_archive(
self, secrets_mock: Mock, app: Flask, user_1: User
) -> None:
token_urlsafe = random_string()
secrets_mock.return_value = token_urlsafe
archive_path = os.path.join(
app.config['UPLOAD_FOLDER'],
'exports',
str(user_1.id),
f"archive_{token_urlsafe}.zip",
)
self.create_user_request(user_1, completed=False)
generate_user_data_archives(max_count=1)
assert os.path.exists(archive_path) is True # type: ignore
def test_it_generates_max_count_of_archives(
self, app: Flask, user_1: User, user_2: User, user_3: User
) -> None:
self.create_user_request(user_3, completed=False)
self.create_user_request(user_1, completed=False)
self.create_user_request(user_2, completed=False)
count = generate_user_data_archives(max_count=2)
assert count == 2
assert (
UserDataExport.query.filter_by(user_id=user_1.id).first().completed
is True
)
assert (
UserDataExport.query.filter_by(user_id=user_2.id).first().completed
is False
)
assert (
UserDataExport.query.filter_by(user_id=user_3.id).first().completed
is True
)

View File

@ -2,9 +2,14 @@ import logging
from typing import Optional
import click
from humanize import naturalsize
from fittrackee.cli.app import app
from fittrackee.users.exceptions import UserNotFoundException
from fittrackee.users.export_data import (
clean_user_data_export,
generate_user_data_archives,
)
from fittrackee.users.utils.admin import UserManagerService
from fittrackee.users.utils.token import clean_blacklisted_tokens
@ -80,3 +85,39 @@ def clean(
with app.app_context():
deleted_rows = clean_blacklisted_tokens(days)
logger.info(f'Blacklisted tokens deleted: {deleted_rows}.')
@users_cli.command('clean_archives')
@click.option('--days', type=int, required=True, help='Number of days.')
def clean_export_archives(
days: int,
) -> None:
"""
Clean user export archives created for more than provided number of days.
"""
with app.app_context():
counts = clean_user_data_export(days)
logger.info(
f'Deleted data export requests: {counts["deleted_requests"]}.'
)
logger.info(f'Deleted archives: {counts["deleted_archives"]}.')
logger.info(f'Freed space: {naturalsize(counts["freed_space"])}.')
@users_cli.command('export_archives')
@click.option(
'--max',
type=int,
required=True,
help='Maximum number of archives to generate.',
)
def export_archives(
max: int,
) -> None:
"""
Export user data in zip archive if incomplete requests exist.
To use in case redis is not set.
"""
with app.app_context():
count = generate_user_data_archives(max)
logger.info(f'Generated archives: {count}.')

View File

@ -1,6 +1,7 @@
import json
import os
import secrets
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple, Union
from zipfile import ZipFile
@ -135,3 +136,48 @@ def export_user_data(export_request_id: int) -> None:
db.session.commit()
except Exception as e:
appLog.error(f'Error when exporting user data: {str(e)}')
def clean_user_data_export(days: int) -> Dict:
counts = {"deleted_requests": 0, "deleted_archives": 0, "freed_space": 0}
limit = datetime.now() - timedelta(days=days)
export_requests = UserDataExport.query.filter(
UserDataExport.created_at < limit,
UserDataExport.completed == True, # noqa
).all()
if not export_requests:
return counts
archive_directory = get_absolute_file_path("exports")
for request in export_requests:
if request.file_name:
archive_path = os.path.join(
archive_directory, f"{request.user_id}", request.file_name
)
if os.path.exists(archive_path):
counts["deleted_archives"] += 1
counts["freed_space"] += request.file_size
# Archive is deleted when row is deleted
db.session.delete(request)
counts["deleted_requests"] += 1
db.session.commit()
return counts
def generate_user_data_archives(max_count: int) -> int:
count = 0
export_requests = (
db.session.query(UserDataExport)
.filter(UserDataExport.completed == False) # noqa
.order_by(UserDataExport.created_at)
.limit(max_count)
.all()
)
for export_request in export_requests:
export_user_data(export_request.id)
count += 1
return count