Fix storage page by storing file sizes in the db
This commit is contained in:
@@ -29,7 +29,7 @@ from app.tasks.forumtasks import import_topic_list, check_all_forum_accounts
|
||||
from app.tasks.importtasks import import_repo_screenshot, check_zip_release, check_for_updates, update_all_game_support, \
|
||||
import_languages, check_all_zip_files
|
||||
from app.tasks.usertasks import import_github_user_ids
|
||||
from app.tasks.pkgtasks import notify_about_git_forum_links, clear_removed_packages, check_package_for_broken_links
|
||||
from app.tasks.pkgtasks import notify_about_git_forum_links, clear_removed_packages, check_package_for_broken_links, update_file_size_bytes
|
||||
from app.utils import add_notification, get_system_user
|
||||
|
||||
actions = {}
|
||||
@@ -322,6 +322,13 @@ def do_check_all_zip_files():
|
||||
return redirect(url_for("tasks.check", id=task_id, r=url_for("admin.admin_page")))
|
||||
|
||||
|
||||
@action("Update file_size_bytes")
|
||||
def do_update_file_size_bytes():
|
||||
task_id = uuid()
|
||||
update_file_size_bytes.apply_async((), task_id=task_id)
|
||||
return redirect(url_for("tasks.check", id=task_id, r=url_for("admin.admin_page")))
|
||||
|
||||
|
||||
@action("DANGER: Delete less popular removed packages")
|
||||
def del_less_popular_removed_packages():
|
||||
task_id = uuid()
|
||||
|
||||
@@ -1103,6 +1103,7 @@ class PackageRelease(db.Model):
|
||||
commit_hash = db.Column(db.String(41), nullable=True, default=None)
|
||||
downloads = db.Column(db.Integer, nullable=False, default=0)
|
||||
release_notes = db.Column(db.UnicodeText, nullable=True, default=None)
|
||||
file_size_bytes = db.Column(db.Integer, nullable=False, default=0)
|
||||
|
||||
@property
|
||||
def summary(self) -> str:
|
||||
@@ -1126,14 +1127,14 @@ class PackageRelease(db.Model):
|
||||
def file_path(self):
|
||||
return self.url.replace("/uploads/", app.config["UPLOAD_DIR"])
|
||||
|
||||
@property
|
||||
def file_size_bytes(self):
|
||||
def calculate_file_size_bytes(self):
|
||||
path = self.file_path
|
||||
if not os.path.isfile(path):
|
||||
return 0
|
||||
self.file_size_bytes = 0
|
||||
return
|
||||
|
||||
file_stats = os.stat(path)
|
||||
return file_stats.st_size
|
||||
self.file_size_bytes = file_stats.st_size
|
||||
|
||||
@property
|
||||
def file_size(self):
|
||||
@@ -1263,6 +1264,8 @@ class PackageScreenshot(db.Model):
|
||||
width = db.Column(db.Integer, nullable=False)
|
||||
height = db.Column(db.Integer, nullable=False)
|
||||
|
||||
file_size_bytes = db.Column(db.Integer, nullable=False, default=0)
|
||||
|
||||
def is_very_small(self):
|
||||
return self.width < 720 or self.height < 405
|
||||
|
||||
@@ -1276,14 +1279,14 @@ class PackageScreenshot(db.Model):
|
||||
def file_path(self):
|
||||
return self.url.replace("/uploads/", app.config["UPLOAD_DIR"])
|
||||
|
||||
@property
|
||||
def file_size_bytes(self):
|
||||
def calculate_file_size_bytes(self):
|
||||
path = self.file_path
|
||||
if not os.path.isfile(path):
|
||||
return 0
|
||||
self.file_size_bytes = 0
|
||||
return
|
||||
|
||||
file_stats = os.stat(path)
|
||||
return file_stats.st_size
|
||||
self.file_size_bytes = file_stats.st_size
|
||||
|
||||
@property
|
||||
def file_size(self):
|
||||
|
||||
@@ -84,7 +84,7 @@ window.addEventListener("load", () => {
|
||||
bar.setAttribute("aria-valuenow", current);
|
||||
bar.setAttribute("aria-valuemax", total);
|
||||
|
||||
const packages = running.map(x => `${x.author}/${x.name}`).join(", ");
|
||||
const packages = (running ?? []).map(x => `${x.author}/${x.name}`).join(", ");
|
||||
document.getElementById("status").textContent = `Status: in progress (${current} / ${total})\n\n${packages}`;
|
||||
} else {
|
||||
progress.classList.add("d-none");
|
||||
@@ -98,6 +98,9 @@ window.addEventListener("load", () => {
|
||||
|
||||
pollTask(`/tasks/${taskId}/`, true, onProgress)
|
||||
.then(function() { location.reload() })
|
||||
.catch(function() { location.reload() })
|
||||
.catch(function(e) {
|
||||
console.error(e);
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -337,6 +337,7 @@ def check_zip_release(self, id, path):
|
||||
post_release_check_update(self, release, temp)
|
||||
|
||||
release.task_id = None
|
||||
release.calculate_file_size_bytes()
|
||||
release.approve(release.package.author)
|
||||
db.session.commit()
|
||||
|
||||
@@ -416,6 +417,7 @@ def make_vcs_release(self, id, branch):
|
||||
|
||||
release.url = "/uploads/" + filename
|
||||
release.task_id = None
|
||||
release.calculate_file_size_bytes()
|
||||
release.approve(release.package.author)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from app import app
|
||||
from sqlalchemy import or_, and_
|
||||
|
||||
from app.markdown import get_links, render_markdown
|
||||
from app.models import Package, db, PackageState, AuditLogEntry, AuditSeverity
|
||||
from app.models import db, Package, PackageState, PackageRelease, PackageScreenshot, AuditLogEntry, AuditSeverity
|
||||
from app.tasks import celery, TaskError
|
||||
from app.utils import post_bot_message, post_to_approval_thread, get_system_user, add_audit_log
|
||||
|
||||
@@ -209,3 +209,34 @@ def check_package_for_broken_links(package_id: int):
|
||||
if msg:
|
||||
post_bot_message(package, "Broken links", msg)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@celery.task(bind=True)
|
||||
def update_file_size_bytes(self):
|
||||
releases = PackageRelease.query.filter_by(file_size_bytes=0).all()
|
||||
screenshots = PackageScreenshot.query.filter_by(file_size_bytes=0).all()
|
||||
total = len(releases) + len(screenshots)
|
||||
self.update_state(state="PROGRESS", meta={
|
||||
"current": 0,
|
||||
"total": total,
|
||||
})
|
||||
|
||||
for i, release in enumerate(releases):
|
||||
release.calculate_file_size_bytes()
|
||||
|
||||
if i % 100 == 0:
|
||||
self.update_state(state="PROGRESS", meta={
|
||||
"current": i + 1,
|
||||
"total": total,
|
||||
})
|
||||
|
||||
for i, ss in enumerate(screenshots):
|
||||
ss.calculate_file_size_bytes()
|
||||
|
||||
if i % 100 == 0:
|
||||
self.update_state(state="PROGRESS", meta={
|
||||
"current": i + len(releases) + 1,
|
||||
"total": total,
|
||||
})
|
||||
|
||||
db.session.commit()
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
{{ form_scripts() }}
|
||||
{{ easymde_scripts() }}
|
||||
{% if enable_wizard %}
|
||||
<script src="/static/js/polltask.js?v=3"></script>
|
||||
<script src="/static/js/polltask.js?v=4"></script>
|
||||
<script src="/static/js/package_create.js"></script>
|
||||
{% endif %}
|
||||
<script src="/static/js/package_edit.js?v=4"></script>
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
{% if "error" in info or info.status == "FAILURE" or info.status == "REVOKED" %}
|
||||
<pre style="white-space: pre-wrap; word-wrap: break-word;">{{ info.error }}</pre>
|
||||
{% else %}
|
||||
<script src="/static/js/polltask.js?v=3"></script>
|
||||
<script src="/static/js/polltask.js?v=4"></script>
|
||||
<noscript>
|
||||
{{ _("Reload the page to check for updates.") }}
|
||||
</noscript>
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
version: '3'
|
||||
services:
|
||||
db:
|
||||
image: "postgres:14"
|
||||
|
||||
28
migrations/versions/c181c6c88bae_.py
Normal file
28
migrations/versions/c181c6c88bae_.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: c181c6c88bae
|
||||
Revises: daa040b727b2
|
||||
Create Date: 2025-07-02 17:21:33.554960
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c181c6c88bae'
|
||||
down_revision = 'daa040b727b2'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('package_release',
|
||||
sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default="0"))
|
||||
op.add_column('package_screenshot',
|
||||
sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default="0"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('package', 'file_size_bytes')
|
||||
op.drop_column('package_screenshot', 'file_size_bytes')
|
||||
Reference in New Issue
Block a user