fix bugs of solo_file_fetcher

This commit is contained in:
p23 2024-12-09 06:10:58 +00:00
parent b4fb889c5e
commit 05b1b18af8
3 changed files with 13 additions and 10 deletions

View File

@ -26,6 +26,7 @@ def listlog():
return jsonify(res), 200
# 指定顯示特定一條log
@log.route("/<int:id>", methods = ["GET"])
def getlog(id:int):

View File

@ -184,8 +184,9 @@ def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, gen
"content": res[1],
"files_hash": res[2],
"igid": res[3],
"comments_hash": [ c.sha1 for c in res[4] ]
}
if res[4]:
one["comments_hash"] = [ c.sha1 for c in res[4] ]
if role == "admin":
one["ip"] = res[6]
@ -194,6 +195,7 @@ def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, gen
return one, 200
# role (general) (owner) (admin)
# 獲取單一留言
def solo_comment_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
@ -362,14 +364,14 @@ def solo_comment_remover(role:str, hash:str=None, sha1:str=None) -> Tuple[Dict,
# 獲取檔案
def solo_file_fetcher(role:str, fnhash:str) -> Tuple[Response, int]: # general, admin
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
with db.getsession() as session:
arta = session.query(article).join(article_mark, article_mark.hash == article.hash).filter(article.file_list == func.any(fnhash))
arta="SELECT posts.id, posts.hash, mark.mark, f FROM posts " \
+"JOIN unnest(file_list) AS f ON 1=1 " \
+"JOIN mark ON posts.hash = mark.hash " \
+"WHERE f = :fnhash "
if role == "general":
arta = arta.filter(article_mark == 'visible')
aeta = arta.first()
arta += "AND mark.mark = 'visible'"
arta = session.execute(text(arta), {'fnhash':fnhash}).first()
if arta is None: # 檢查文章本體是否存在/可以閱覽
return error("File not found"), 404

View File

@ -7,6 +7,7 @@ import io
import sys
import minio
from minio.deleteobjects import DeleteObject
S3_BUCKET = os.getenv("S3_BUCKET")
@ -17,7 +18,7 @@ s3 = minio.Minio(endpoint=os.getenv("S3_ENDPOINT"),
# check exist
if not s3.bucket_exists(S3_BUCKET):
print("[!] Where is S3 bucket \"%s\"?"%S3_BUCKET)
print("Where is S3 bucket \"%s\"?"%S3_BUCKET)
sys.exit(0)
# methods
@ -60,8 +61,7 @@ def solo_file_fetcher(fnhash:str) -> Tuple[dict | None, int]:
def multi_file_remover(file_list) -> int:
try:
for f in file_list:
s3.remove_object(S3_BUCKET, f)
s3.remove_objects(S3_BUCKET, [ DeleteObject(f) for f in file_list ])
return 0
except:
return 1