/files/: make deletes cascade properly
This commit is contained in:
parent
c584950f11
commit
47d4e36930
@ -6,6 +6,9 @@ from flask_marshmallow import Marshmallow
|
|||||||
from flask_bcrypt import Bcrypt
|
from flask_bcrypt import Bcrypt
|
||||||
from flask_migrate import Migrate
|
from flask_migrate import Migrate
|
||||||
from .config import DevelopmentConfig, ProductionConfig, TestingConfig, overlay_config
|
from .config import DevelopmentConfig, ProductionConfig, TestingConfig, overlay_config
|
||||||
|
from sqlalchemy import event
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlite3 import Connection as SQLite3Connection
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
CORS(app)
|
CORS(app)
|
||||||
@ -33,6 +36,15 @@ storage = None
|
|||||||
from sachet.storage import FileSystem
|
from sachet.storage import FileSystem
|
||||||
|
|
||||||
|
|
||||||
|
# https://stackoverflow.com/questions/57726047/
|
||||||
|
@event.listens_for(Engine, "connect")
|
||||||
|
def _set_sqlite_pragma(dbapi_connection, connection_record):
|
||||||
|
if isinstance(dbapi_connection, SQLite3Connection):
|
||||||
|
cursor = dbapi_connection.cursor()
|
||||||
|
cursor.execute("PRAGMA foreign_keys=ON;")
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
db.create_all()
|
db.create_all()
|
||||||
if _storage_method == "filesystem":
|
if _storage_method == "filesystem":
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import click
|
import click
|
||||||
from sachet.server import app, db
|
from sachet.server import app, db
|
||||||
from sachet.server.models import User, Share, Permissions
|
from sachet.server.models import User, Share, Permissions, Upload
|
||||||
from sachet.server.users import manage
|
from sachet.server.users import manage
|
||||||
from flask.cli import AppGroup
|
from flask.cli import AppGroup
|
||||||
from bitmask import Bitmask
|
from bitmask import Bitmask
|
||||||
@ -51,15 +51,21 @@ app.cli.add_command(user_cli)
|
|||||||
def cleanup():
|
def cleanup():
|
||||||
"""Clean up stale database entries.
|
"""Clean up stale database entries.
|
||||||
|
|
||||||
Shares that are not initialized are deleted if they are older than 25 minutes.
|
Uninitialized shares and unfinished uploads older than a day are deleted.
|
||||||
"""
|
"""
|
||||||
res = Share.query.filter(
|
res = Share.query.filter(
|
||||||
Share.create_date < (datetime.datetime.now() - datetime.timedelta(minutes=25)),
|
Share.create_date < (datetime.datetime.now() - datetime.timedelta(hours=24)),
|
||||||
# do not use `Share.initialized` or `is False` here
|
# do not use `Share.initialized` or `is False` here
|
||||||
# sqlalchemy doesn't like it
|
# sqlalchemy doesn't like it
|
||||||
|
# noqa: E712
|
||||||
Share.initialized == False,
|
Share.initialized == False,
|
||||||
)
|
)
|
||||||
res.delete()
|
res.delete()
|
||||||
|
|
||||||
|
res = Upload.query.filter(
|
||||||
|
Upload.create_date < (datetime.datetime.now() - datetime.timedelta(hours=24))
|
||||||
|
)
|
||||||
|
res.delete()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,7 +111,9 @@ class FileContentAPI(MethodView):
|
|||||||
|
|
||||||
if upload.completed:
|
if upload.completed:
|
||||||
share.initialized = True
|
share.initialized = True
|
||||||
db.session.delete(upload)
|
# really convoluted
|
||||||
|
# but otherwise it doesn't cascade deletes?
|
||||||
|
Upload.query.filter(Upload.upload_id == upload.upload_id).delete()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return jsonify(dict(status="success", message="Upload completed.")), 201
|
return jsonify(dict(status="success", message="Upload completed.")), 201
|
||||||
else:
|
else:
|
||||||
|
@ -334,9 +334,9 @@ class Upload(db.Model):
|
|||||||
|
|
||||||
chunks = db.relationship(
|
chunks = db.relationship(
|
||||||
"Chunk",
|
"Chunk",
|
||||||
backref=db.backref("upload"),
|
backref="upload",
|
||||||
|
passive_deletes=True,
|
||||||
order_by="Chunk.chunk_id",
|
order_by="Chunk.chunk_id",
|
||||||
cascade="all, delete",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, upload_id, total_chunks, share_id):
|
def __init__(self, upload_id, total_chunks, share_id):
|
||||||
@ -401,7 +401,9 @@ class Chunk(db.Model):
|
|||||||
chunk_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
chunk_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
|
||||||
create_date = db.Column(db.DateTime, nullable=False)
|
create_date = db.Column(db.DateTime, nullable=False)
|
||||||
index = db.Column(db.Integer, nullable=False)
|
index = db.Column(db.Integer, nullable=False)
|
||||||
upload_id = db.Column(db.String, db.ForeignKey("uploads.upload_id"))
|
upload_id = db.Column(
|
||||||
|
db.String, db.ForeignKey("uploads.upload_id", ondelete="CASCADE")
|
||||||
|
)
|
||||||
filename = db.Column(db.String, nullable=False)
|
filename = db.Column(db.String, nullable=False)
|
||||||
|
|
||||||
def __init__(self, index, upload_id, total_chunks, share, data):
|
def __init__(self, index, upload_id, total_chunks, share, data):
|
||||||
@ -421,9 +423,11 @@ class Chunk(db.Model):
|
|||||||
with file.open(mode="wb") as f:
|
with file.open(mode="wb") as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __declare_last__(cls):
|
@event.listens_for(db.session, "persistent_to_deleted")
|
||||||
@event.listens_for(cls, "before_delete")
|
def chunk_delete_listener(session, instance):
|
||||||
def chunk_before_delete(mapper, connection, chunk):
|
# kinda hacky but i have no idea how to trigger event listener on cascaded delete
|
||||||
|
if isinstance(instance, Upload):
|
||||||
|
for chunk in instance.chunks:
|
||||||
file = storage.get_file(chunk.filename)
|
file = storage.get_file(chunk.filename)
|
||||||
file.delete()
|
file.delete()
|
||||||
|
@ -3,7 +3,7 @@ from sachet.server.commands import create_user, delete_user, cleanup
|
|||||||
from sqlalchemy import inspect
|
from sqlalchemy import inspect
|
||||||
from sachet.server import db
|
from sachet.server import db
|
||||||
import datetime
|
import datetime
|
||||||
from sachet.server.models import User, Share
|
from sachet.server.models import User, Share, Chunk, Upload
|
||||||
|
|
||||||
|
|
||||||
def test_user(client, cli):
|
def test_user(client, cli):
|
||||||
@ -28,12 +28,12 @@ def test_user(client, cli):
|
|||||||
|
|
||||||
|
|
||||||
def test_cleanup(client, cli):
|
def test_cleanup(client, cli):
|
||||||
"""Test the CLI's ability to destroy uninitialized shares past expiry."""
|
"""Test the CLI's ability to destroy stale entries."""
|
||||||
# create shares
|
# create shares
|
||||||
# this one will be destroyed
|
# this one will be destroyed
|
||||||
share = Share()
|
share = Share()
|
||||||
db.session.add(share)
|
db.session.add(share)
|
||||||
share.create_date = datetime.datetime.now() - datetime.timedelta(minutes=30)
|
share.create_date = datetime.datetime.now() - datetime.timedelta(hours=30)
|
||||||
destroyed = share.share_id
|
destroyed = share.share_id
|
||||||
# this one won't
|
# this one won't
|
||||||
share = Share()
|
share = Share()
|
||||||
@ -42,7 +42,7 @@ def test_cleanup(client, cli):
|
|||||||
# this one neither
|
# this one neither
|
||||||
share = Share()
|
share = Share()
|
||||||
share.initialized = True
|
share.initialized = True
|
||||||
share.create_date = datetime.datetime.now() - datetime.timedelta(minutes=30)
|
share.create_date = datetime.datetime.now() - datetime.timedelta(hours=30)
|
||||||
db.session.add(share)
|
db.session.add(share)
|
||||||
safe2 = share.share_id
|
safe2 = share.share_id
|
||||||
|
|
||||||
@ -53,3 +53,29 @@ def test_cleanup(client, cli):
|
|||||||
assert Share.query.filter_by(share_id=destroyed).first() is None
|
assert Share.query.filter_by(share_id=destroyed).first() is None
|
||||||
assert Share.query.filter_by(share_id=safe).first() is not None
|
assert Share.query.filter_by(share_id=safe).first() is not None
|
||||||
assert Share.query.filter_by(share_id=safe2).first() is not None
|
assert Share.query.filter_by(share_id=safe2).first() is not None
|
||||||
|
|
||||||
|
# test stale uploads and chunks
|
||||||
|
|
||||||
|
test_share = Share()
|
||||||
|
db.session.add(test_share)
|
||||||
|
|
||||||
|
chk = Chunk(0, "upload1", 1, test_share, b"test_data")
|
||||||
|
chk.upload.create_date = datetime.datetime.now() - datetime.timedelta(hours=30)
|
||||||
|
db.session.add(chk)
|
||||||
|
chk_upload_id = chk.upload.upload_id
|
||||||
|
|
||||||
|
chk_safe = Chunk(0, "upload2", 1, test_share, b"test_data")
|
||||||
|
db.session.add(chk_safe)
|
||||||
|
chk_safe_upload_id = chk_safe.upload.upload_id
|
||||||
|
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
chk_id = chk.chunk_id
|
||||||
|
chk_safe_id = chk_safe.chunk_id
|
||||||
|
|
||||||
|
result = cli.invoke(cleanup)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert Chunk.query.filter_by(chunk_id=chk_id).first() is None
|
||||||
|
assert Chunk.query.filter_by(chunk_id=chk_safe_id).first() is not None
|
||||||
|
assert Upload.query.filter_by(upload_id=chk_upload_id).first() is None
|
||||||
|
assert Upload.query.filter_by(upload_id=chk_safe_upload_id).first() is not None
|
||||||
|
@ -2,6 +2,7 @@ import pytest
|
|||||||
from os.path import basename
|
from os.path import basename
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from werkzeug.datastructures import FileStorage
|
from werkzeug.datastructures import FileStorage
|
||||||
|
from sachet.server.models import Upload, Chunk
|
||||||
from sachet.server import storage
|
from sachet.server import storage
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user