API - add commands to export and delete archives
This commit is contained in:
@ -2,9 +2,14 @@ import logging
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
from humanize import naturalsize
|
||||
|
||||
from fittrackee.cli.app import app
|
||||
from fittrackee.users.exceptions import UserNotFoundException
|
||||
from fittrackee.users.export_data import (
|
||||
clean_user_data_export,
|
||||
generate_user_data_archives,
|
||||
)
|
||||
from fittrackee.users.utils.admin import UserManagerService
|
||||
from fittrackee.users.utils.token import clean_blacklisted_tokens
|
||||
|
||||
@ -80,3 +85,39 @@ def clean(
|
||||
with app.app_context():
|
||||
deleted_rows = clean_blacklisted_tokens(days)
|
||||
logger.info(f'Blacklisted tokens deleted: {deleted_rows}.')
|
||||
|
||||
|
||||
@users_cli.command('clean_archives')
|
||||
@click.option('--days', type=int, required=True, help='Number of days.')
|
||||
def clean_export_archives(
|
||||
days: int,
|
||||
) -> None:
|
||||
"""
|
||||
Clean user export archives created for more than provided number of days.
|
||||
"""
|
||||
with app.app_context():
|
||||
counts = clean_user_data_export(days)
|
||||
logger.info(
|
||||
f'Deleted data export requests: {counts["deleted_requests"]}.'
|
||||
)
|
||||
logger.info(f'Deleted archives: {counts["deleted_archives"]}.')
|
||||
logger.info(f'Freed space: {naturalsize(counts["freed_space"])}.')
|
||||
|
||||
|
||||
@users_cli.command('export_archives')
|
||||
@click.option(
|
||||
'--max',
|
||||
type=int,
|
||||
required=True,
|
||||
help='Maximum number of archives to generate.',
|
||||
)
|
||||
def export_archives(
|
||||
max: int,
|
||||
) -> None:
|
||||
"""
|
||||
Export user data in zip archive if incomplete requests exist.
|
||||
To use in case redis is not set.
|
||||
"""
|
||||
with app.app_context():
|
||||
count = generate_user_data_archives(max)
|
||||
logger.info(f'Generated archives: {count}.')
|
||||
|
@ -1,6 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from zipfile import ZipFile
|
||||
|
||||
@ -135,3 +136,48 @@ def export_user_data(export_request_id: int) -> None:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
appLog.error(f'Error when exporting user data: {str(e)}')
|
||||
|
||||
|
||||
def clean_user_data_export(days: int) -> Dict:
|
||||
counts = {"deleted_requests": 0, "deleted_archives": 0, "freed_space": 0}
|
||||
limit = datetime.now() - timedelta(days=days)
|
||||
export_requests = UserDataExport.query.filter(
|
||||
UserDataExport.created_at < limit,
|
||||
UserDataExport.completed == True, # noqa
|
||||
).all()
|
||||
|
||||
if not export_requests:
|
||||
return counts
|
||||
|
||||
archive_directory = get_absolute_file_path("exports")
|
||||
for request in export_requests:
|
||||
if request.file_name:
|
||||
archive_path = os.path.join(
|
||||
archive_directory, f"{request.user_id}", request.file_name
|
||||
)
|
||||
if os.path.exists(archive_path):
|
||||
counts["deleted_archives"] += 1
|
||||
counts["freed_space"] += request.file_size
|
||||
# Archive is deleted when row is deleted
|
||||
db.session.delete(request)
|
||||
counts["deleted_requests"] += 1
|
||||
|
||||
db.session.commit()
|
||||
return counts
|
||||
|
||||
|
||||
def generate_user_data_archives(max_count: int) -> int:
|
||||
count = 0
|
||||
export_requests = (
|
||||
db.session.query(UserDataExport)
|
||||
.filter(UserDataExport.completed == False) # noqa
|
||||
.order_by(UserDataExport.created_at)
|
||||
.limit(max_count)
|
||||
.all()
|
||||
)
|
||||
|
||||
for export_request in export_requests:
|
||||
export_user_data(export_request.id)
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
Reference in New Issue
Block a user