Compare commits
No commits in common. "b4fb889c5ebbc22dc814edd4caecb131cb1a7987" and "34fe546cd6cd750e3315536f82abd52fb2da7857" have entirely different histories.
b4fb889c5e
...
34fe546cd6
20
app.py
20
app.py
@ -4,12 +4,11 @@ from flask import Flask
|
||||
from bcrypt import checkpw, gensalt, hashpw
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from utils import setting_loader, logger, dbhelper
|
||||
from utils.pgclass import Base, SQLuser
|
||||
from utils import pgclass, setting_loader, logger, dbhelper
|
||||
from utils.platform_consts import PLIST_ROOT
|
||||
from blueprints.article import article
|
||||
from blueprints.log import log
|
||||
# from blueprints.admin import admin
|
||||
from blueprints.admin import admin
|
||||
|
||||
# env
|
||||
PG_HOST = os.getenv("PG_HOST", None)
|
||||
@ -31,8 +30,9 @@ if len(errmsg):
|
||||
exit(0)
|
||||
|
||||
# Postgresql
|
||||
dbhelper.db = dbhelper.DB(create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME)))
|
||||
Base.metadata.create_all(dbhelper.db._engine)
|
||||
engine = create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME))
|
||||
pgclass.Base.metadata.create_all(engine)
|
||||
dbhelper.db(engine)
|
||||
|
||||
# settings checker
|
||||
settings = setting_loader.loadset()
|
||||
@ -44,12 +44,12 @@ for s in settings:
|
||||
# root checker
|
||||
pwhash = hashpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), gensalt()).decode("utf-8") # if needed, new password
|
||||
with dbhelper.db.getsession() as session:
|
||||
root = session.query(SQLuser).filter(SQLuser.user=="root").first()
|
||||
root = session.query(pgclass.SQLuser).filter(pgclass.SQLuser.user=="root").first()
|
||||
if root is None: # 沒有root
|
||||
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||
elif (not checkpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), root.password.encode("utf-8"))) or root.permission != PLIST_ROOT:
|
||||
session.delete(root)
|
||||
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||
session.commit()
|
||||
|
||||
# flask app
|
||||
@ -59,7 +59,7 @@ app.config["SECRET_KEY"] = os.urandom(64)
|
||||
# register blueprints
|
||||
app.register_blueprint(article, url_prefix = "/article")
|
||||
app.register_blueprint(log , url_prefix = "/log")
|
||||
# app.register_blueprint(admin , url_prefix = "/admin")
|
||||
app.register_blueprint(admin , url_prefix = "/admin")
|
||||
|
||||
# logger
|
||||
logger.logger("server.start", "Server is running")
|
||||
@ -72,3 +72,5 @@ def index():
|
||||
# app run
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=5000, debug=False)
|
||||
|
||||
# 檢查ctx跟content的混用(英文單字)
|
||||
|
@ -1,7 +1,3 @@
|
||||
"""
|
||||
not done
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import math
|
||||
|
@ -1,16 +1,19 @@
|
||||
import time
|
||||
import hashlib
|
||||
import secrets
|
||||
|
||||
import magic
|
||||
from flask import Blueprint, request, abort
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from utils import pgclass, setting_loader, dbhelper
|
||||
from utils.misc import internal_json2protobuf, error_proto
|
||||
from utils import logger, pgclass, setting_loader
|
||||
from utils.dbhelper import db, solo_article_fetcher, multi_article_fetcher, solo_file_fetcher, solo_article_remover
|
||||
from utils.misc import error, internal_json2protobuf
|
||||
from protobuf_files import niming_pb2
|
||||
|
||||
"""
|
||||
TODO:
|
||||
- 測試 rebuild 完成的功能
|
||||
- IG post ( Po文、刪文、只PO本體文章 )
|
||||
- 檔案傳輸加低畫質版本(縮圖)
|
||||
|
||||
- log 的方式之後要重新設計 > 正規化
|
||||
- IP Record (deploy之前配合rev proxy)
|
||||
@ -23,75 +26,50 @@ article = Blueprint('article', __name__)
|
||||
# 匿名文列表
|
||||
@article.route('/list', methods = ["GET"])
|
||||
def listing():
|
||||
res, code = dbhelper.multi_article_fetcher("general", request.args.get("page"), 30)
|
||||
res, code = multi_article_fetcher("general", request.args.get("page"), 30)
|
||||
return res, code
|
||||
|
||||
|
||||
# 獲取匿名文附檔
|
||||
@article.route("/file/<fnhash>", methods=["GET"])
|
||||
def getfile(fnhash:str):
|
||||
resp, code = dbhelper.solo_file_fetcher("general", fnhash)
|
||||
@article.route("/file/<int:id>", methods=["GET"])
|
||||
def getfile(id:int):
|
||||
resp, code = solo_file_fetcher("general", id)
|
||||
return resp, code
|
||||
|
||||
|
||||
# 只有發文者可以看到的獲取指定文章
|
||||
# 只有發文者可以做到的刪除文章
|
||||
@article.route("/own/<type>/<key>", methods = ["GET", "DELETE"])
|
||||
def owner_getarticle(type:str, key:str):
|
||||
@article.route("/own/<int:id>", methods = ["GET", "DELETE"])
|
||||
def owner_getarticle(id:int):
|
||||
# arguments
|
||||
sha256 = request.args.get("hash", None)
|
||||
if not sha256:
|
||||
return abort(400)
|
||||
return error("Arguments error"), 400
|
||||
sha256 = str(sha256)
|
||||
|
||||
if type == 'a':
|
||||
if not (len(key) > 0 and key.isdigit()):
|
||||
return abort(400)
|
||||
key = int(key) # id
|
||||
elif type == 'c':
|
||||
if not (len(key) > 0):
|
||||
return abort(400)
|
||||
key = str(key) # sha1
|
||||
else:
|
||||
return abort(400)
|
||||
|
||||
# 獲取指定文章/留言
|
||||
# 獲取指定文章
|
||||
if request.method == "GET":
|
||||
if type == 'a': # 文章
|
||||
resfn, code = dbhelper.solo_article_fetcher("owner", key=(sha256, key))
|
||||
elif type == 'c': # 留言
|
||||
resfn, code = dbhelper.solo_comment_fetcher("owner", key=(sha256, key))
|
||||
resfn, code = solo_article_fetcher("owner", key=(sha256, id))
|
||||
if code == 200:
|
||||
return internal_json2protobuf(resfn), code
|
||||
return resfn, code
|
||||
# 刪除指定文章跟他們的留言、檔案
|
||||
elif request.method == "DELETE":
|
||||
if type == 'a':
|
||||
result, code = dbhelper.solo_article_remover("owner", hash=sha256, id=key)
|
||||
elif type == 'c':
|
||||
result, code = dbhelper.solo_comment_remover("owner", hash=sha256, sha1=key)
|
||||
result, code = solo_article_remover("owner", hash=sha256, id=id)
|
||||
if not code == 200:
|
||||
return result, code
|
||||
|
||||
one = niming_pb2.FetchResponse.Message()
|
||||
if "id" in result: one.id = result["id"]
|
||||
|
||||
return niming_pb2.FetchResponse(posts=[one]).SerializeToString(), 200
|
||||
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
|
||||
%(result["id"], str(result["mark"])))
|
||||
return niming_pb2.FetchResponse(
|
||||
posts = [ niming_pb2.FetchResponse.Message(id = result["id"], mark = result["mark"]) ]
|
||||
).SerializeToString(), 200
|
||||
|
||||
|
||||
# 獲取指定文章
|
||||
@article.route("/a/<int:id>", methods = ["GET"])
|
||||
@article.route("/<int:id>", methods = ["GET"])
|
||||
def getarticle(id:int):
|
||||
resfn, code = dbhelper.solo_article_fetcher("general", key=id)
|
||||
if code == 200:
|
||||
return internal_json2protobuf(resfn), code
|
||||
return resfn, code
|
||||
|
||||
|
||||
# 獲取指定文章的留言
|
||||
@article.route("/c/<sha1>", methods = ["GET"])
|
||||
def getcomment(sha1:str):
|
||||
resfn, code = dbhelper.solo_comment_fetcher("general", key=sha1)
|
||||
resfn, code = solo_article_fetcher("general", key=id)
|
||||
if code == 200:
|
||||
return internal_json2protobuf(resfn), code
|
||||
return resfn, code
|
||||
@ -100,73 +78,101 @@ def getcomment(sha1:str):
|
||||
# 上傳文章 / 留言
|
||||
@article.route("/", methods = ["POST"])
|
||||
def posting():
|
||||
"""
|
||||
Work Flow:
|
||||
ctx -> reference -> file -> post( hash -> IP -> IG -> mark ) | -> log
|
||||
"""
|
||||
# flow:
|
||||
# ctx -> hash -> reference -> file -> IP -> IG -> mark -> post | -> log
|
||||
# loadset
|
||||
opt = setting_loader.loadset()
|
||||
chk_before_post = opt["Check_Before_Post"]
|
||||
maxword = opt["Niming_Max_Word"]
|
||||
# protobuf parse
|
||||
recv = niming_pb2.Post()
|
||||
try: recv.ParseFromString(request.data)
|
||||
except DecodeError:
|
||||
return error_proto("Failed to parse data."), 400
|
||||
except DecodeError: return abort(400)
|
||||
|
||||
# content and check
|
||||
content = str(recv.content)
|
||||
if len(content) == 0 or len(content) > maxword: # length check
|
||||
return error_proto("No content or too many words."), 400
|
||||
ctx = str(recv.content)
|
||||
if len(ctx) == 0 or len(ctx) > maxword: # length check
|
||||
return abort(400)
|
||||
|
||||
# hash
|
||||
seed = ctx + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
||||
hash = hashlib.sha256(seed.encode()).hexdigest()
|
||||
|
||||
# reference and check
|
||||
ref = int(recv.ref)
|
||||
if ref != 0:
|
||||
# 檢查指向的文章是否存在 且 可訪問
|
||||
with dbhelper.db.getsession() as session:
|
||||
article = pgclass.SQLarticle
|
||||
article_mark = pgclass.SQLmark
|
||||
tpid = session.query(article.id).join(article_mark, article.hash==article_mark.hash) \
|
||||
.filter(article.id==ref, article_mark.mark=="visible").first()
|
||||
if not tpid:
|
||||
return error_proto("Invalid Reference."), 400
|
||||
# 檢查指向的文章是否也是留言
|
||||
reftg, code = solo_article_fetcher(role="general", key=ref)
|
||||
if code != 200 or reftg["reference"]:
|
||||
return abort(400)
|
||||
else:
|
||||
ref = None
|
||||
|
||||
result_id, sha1, hash = 0, "", ""
|
||||
if ref is None: # only article (comment dont have files)
|
||||
# file processing & check
|
||||
files = recv.files
|
||||
# check - size
|
||||
atts = opt["Attachment_Count"]
|
||||
sizelimit = opt["Attachment_Size"]
|
||||
if len(files) > atts: return error_proto("Too many files"), 400
|
||||
for f in files:
|
||||
if len(f) <= 0 or len(f) > sizelimit: return error_proto("Empty file or file too big."), 400
|
||||
# check - mimetype
|
||||
allowed_mime = opt["Allowed_MIME"]
|
||||
fmimes = []
|
||||
for f in files:
|
||||
mime = magic.Magic(mime=True)
|
||||
type = mime.from_buffer(f)
|
||||
if not(type in allowed_mime): return error_proto("File type not allowed."), 400
|
||||
fmimes.append(type)
|
||||
# file processing and check
|
||||
files = recv.files
|
||||
# check - size
|
||||
atts = opt["Attachment_Count"]
|
||||
sizelimit = opt["Attachment_Size"]
|
||||
if len(files) > atts: return abort(400)
|
||||
for f in files:
|
||||
if len(f) <= 0 or len(f) > sizelimit: return abort(400)
|
||||
# check - mimetype
|
||||
allowed_mime = opt["Allowed_MIME"]
|
||||
fmimes = []
|
||||
for f in files:
|
||||
mime = magic.Magic(mime=True)
|
||||
type = mime.from_buffer(f)
|
||||
if not(type in allowed_mime): return abort(400)
|
||||
fmimes.append(type)
|
||||
|
||||
# posting
|
||||
result_id, hash = dbhelper.solo_article_uploader(content=content,
|
||||
file_list=files,
|
||||
fmimes=fmimes)
|
||||
if not result_id:
|
||||
return error_proto("Failed to Post"), 400
|
||||
else: # comments
|
||||
sha1, hash = dbhelper.solo_comment_uploader(content=content,
|
||||
ref=ref)
|
||||
if not sha1:
|
||||
return error_proto("Failed to Post"), 400
|
||||
# IP
|
||||
ip = request.remote_addr
|
||||
|
||||
# ig posting
|
||||
if chk_before_post:
|
||||
igid = None
|
||||
# Go posting
|
||||
igid = None
|
||||
# Coming Soon...
|
||||
|
||||
# mark
|
||||
if chk_before_post: mark = "pending"
|
||||
else: mark = "visible"
|
||||
|
||||
# posting
|
||||
table = pgclass.SQLarticle
|
||||
ftab = pgclass.SQLfile
|
||||
try:
|
||||
with db.getsession() as session:
|
||||
# post
|
||||
data = table(hash = hash, content = ctx, igid = igid, mark = mark, reference = ref, ip = ip)
|
||||
session.add(data)
|
||||
# file processor
|
||||
fmidx = 0
|
||||
fidarr = []
|
||||
for f in files:
|
||||
fsql = ftab(reference = hash, binary = f, type = fmimes[fmidx])
|
||||
fidarr.append(fsql)
|
||||
session.add(fsql)
|
||||
fmidx += 1
|
||||
# first commit
|
||||
session.commit()
|
||||
# set file list
|
||||
data.file_list = [ fid.id for fid in fidarr ]
|
||||
session.commit() # second commit
|
||||
result_id = data.id
|
||||
except:
|
||||
return abort(400)
|
||||
|
||||
# logger
|
||||
logger.logger("newpost", "New post (id=%d point to %s): %s"%(result_id, ref, mark))
|
||||
|
||||
# to protobuf & return
|
||||
proto_stres = niming_pb2.PostResponse(
|
||||
status = niming_pb2.Status.Success,
|
||||
hash = hash,
|
||||
id = int(result_id)
|
||||
).SerializeToString()
|
||||
return proto_stres, 200
|
||||
|
||||
# 介面全部改成protobuf傳輸
|
||||
# 檔案傳輸加低畫質版本(縮圖)
|
@ -1,7 +1,8 @@
|
||||
from flask import Blueprint, request, jsonify
|
||||
from sqlalchemy import desc
|
||||
|
||||
from utils import pgclass, dbhelper
|
||||
from utils import pgclass
|
||||
from utils.dbhelper import db
|
||||
from utils.misc import error
|
||||
|
||||
log = Blueprint('log', __name__)
|
||||
@ -17,7 +18,7 @@ def listlog():
|
||||
count = int(request.args.get("count"))
|
||||
|
||||
# getctx
|
||||
with dbhelper.db.getsession() as session:
|
||||
with db.getsession() as session:
|
||||
table = pgclass.SQLlog
|
||||
res = session.query(table).order_by(desc(table.id)).offset(rst).limit(count).all()
|
||||
|
||||
@ -30,7 +31,7 @@ def listlog():
|
||||
@log.route("/<int:id>", methods = ["GET"])
|
||||
def getlog(id:int):
|
||||
# db
|
||||
with dbhelper.db.getsession() as session:
|
||||
with db.getsession() as session:
|
||||
table = pgclass.SQLlog
|
||||
res = session.query(table).filter(table.id == id).first()
|
||||
if res is None:
|
||||
|
@ -26,12 +26,15 @@ message FetchResponse {
|
||||
uint64 id = 1;
|
||||
string content = 2;
|
||||
// reply to a post, like a mail chat.
|
||||
// optional uint64 ref = 3;
|
||||
optional uint64 ref = 3;
|
||||
// request files through /article/file/<id> with MIME type.
|
||||
// See it as a BLOB url;
|
||||
repeated string files_hash = 3;
|
||||
optional string igid = 4;
|
||||
repeated string comments_hash = 5;
|
||||
repeated uint64 files_id = 4;
|
||||
optional string hash = 5;
|
||||
string igid = 6;
|
||||
string mark = 7;
|
||||
optional string ip = 8;
|
||||
repeated uint64 comments_id = 9;
|
||||
}
|
||||
// Several post info
|
||||
repeated Message posts = 1;
|
||||
|
@ -13,21 +13,21 @@ _sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xa5\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1am\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x12\n\nfiles_hash\x18\x03 \x03(\t\x12\x11\n\x04igid\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\rcomments_hash\x18\x05 \x03(\tB\x07\n\x05_igid*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xf0\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1a\xb7\x01\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x10\n\x03ref\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12\x10\n\x08\x66iles_id\x18\x04 \x03(\x04\x12\x11\n\x04hash\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x0c\n\x04igid\x18\x06 \x01(\t\x12\x0c\n\x04mark\x18\x07 \x01(\t\x12\x0f\n\x02ip\x18\x08 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x0b\x63omments_id\x18\t \x03(\x04\x42\x06\n\x04_refB\x07\n\x05_hashB\x05\n\x03_ip*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'niming_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_STATUS._serialized_start=365
|
||||
_STATUS._serialized_end=398
|
||||
_STATUS._serialized_start=440
|
||||
_STATUS._serialized_end=473
|
||||
_POST._serialized_start=16
|
||||
_POST._serialized_end=80
|
||||
_POSTRESPONSE._serialized_start=82
|
||||
_POSTRESPONSE._serialized_end=195
|
||||
_FETCHRESPONSE._serialized_start=198
|
||||
_FETCHRESPONSE._serialized_end=363
|
||||
_FETCHRESPONSE_MESSAGE._serialized_start=254
|
||||
_FETCHRESPONSE_MESSAGE._serialized_end=363
|
||||
_FETCHRESPONSE._serialized_end=438
|
||||
_FETCHRESPONSE_MESSAGE._serialized_start=255
|
||||
_FETCHRESPONSE_MESSAGE._serialized_end=438
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
|
@ -1,10 +1,7 @@
|
||||
psycopg2
|
||||
sqlalchemy
|
||||
flask
|
||||
pyjwt
|
||||
psycopg2
|
||||
protobuf==5.28.3
|
||||
python-magic
|
||||
bcrypt
|
||||
pytz
|
||||
sqlalchemy-utils
|
||||
minio
|
@ -1,233 +1,73 @@
|
||||
from typing import Tuple, Dict, List
|
||||
from datetime import datetime
|
||||
import time
|
||||
import secrets
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from flask import make_response, Response, abort, request
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy import desc, func, update, Engine, text, delete
|
||||
import pytz
|
||||
from flask import make_response, Response, abort
|
||||
from sqlalchemy.orm import sessionmaker, aliased
|
||||
from sqlalchemy import desc, func, literal, and_
|
||||
|
||||
from utils import pgclass, setting_loader, s3helper, logger
|
||||
from utils import pgclass
|
||||
from utils.misc import error
|
||||
from protobuf_files import niming_pb2
|
||||
|
||||
class DB:
|
||||
class db:
|
||||
_engine = None
|
||||
|
||||
@classmethod
|
||||
def __init__(cls, engine):
|
||||
cls._engine:Engine = engine
|
||||
cls._engine = engine
|
||||
|
||||
@classmethod
|
||||
def getsession(cls):
|
||||
Session = sessionmaker(bind=cls._engine)
|
||||
return Session()
|
||||
|
||||
db:DB = None
|
||||
|
||||
|
||||
# 上傳單一文章
|
||||
def solo_article_uploader(content:str, file_list, fmimes:List[str]) -> Tuple[int, str]:
|
||||
# loadset
|
||||
opt = setting_loader.loadset()
|
||||
chk_before_post = opt["Check_Before_Post"]
|
||||
|
||||
# hash
|
||||
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
||||
hash = hashlib.sha256(seed.encode()).hexdigest()
|
||||
|
||||
# IP
|
||||
ip = request.remote_addr
|
||||
|
||||
# ig posting (only article)
|
||||
if chk_before_post:
|
||||
igid = None
|
||||
# Go posting
|
||||
igid = None
|
||||
# Coming Soon...
|
||||
|
||||
# mark
|
||||
if chk_before_post: mark = "pending"
|
||||
else: mark = "visible"
|
||||
|
||||
# posting
|
||||
article = pgclass.SQLarticle
|
||||
article_mark = pgclass.SQLmark
|
||||
article_metadata = pgclass.SQLmeta
|
||||
result_id = 0
|
||||
try:
|
||||
with db.getsession() as session:
|
||||
# file processor
|
||||
fnlist, err = s3helper.multi_file_uploader(file_list, fmimes)
|
||||
if err:
|
||||
return 0, ""
|
||||
|
||||
# meta processor
|
||||
metaa = article_metadata(ip=ip,
|
||||
igid=igid,
|
||||
hash=hash)
|
||||
session.add(metaa)
|
||||
|
||||
# article processor
|
||||
posta = article(content=content,
|
||||
hash=hash,
|
||||
file_list=fnlist)
|
||||
session.add(posta)
|
||||
|
||||
# mark processor
|
||||
marka = article_mark(hash=hash,
|
||||
mark=mark)
|
||||
session.add(marka)
|
||||
|
||||
# commit
|
||||
session.commit()
|
||||
result_id = int(posta.id)
|
||||
|
||||
# logger
|
||||
logger.logger("newpost", "New post (id=%d): %s"%(result_id, mark))
|
||||
|
||||
return result_id, hash
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return 0, ""
|
||||
|
||||
|
||||
# 上傳單一留言
|
||||
def solo_comment_uploader(content:str, ref:int) -> Tuple[int | str, str]:
|
||||
# loadset
|
||||
opt = setting_loader.loadset()
|
||||
chk_before_post = opt["Check_Before_Post"]
|
||||
|
||||
# hash
|
||||
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
||||
hash = hashlib.sha256(seed.encode()).hexdigest()
|
||||
sha1 = hashlib.sha1(seed.encode()).hexdigest()
|
||||
|
||||
# IP
|
||||
ip = request.remote_addr
|
||||
|
||||
# mark
|
||||
if chk_before_post: mark = "pending"
|
||||
else: mark = "visible"
|
||||
|
||||
# posting
|
||||
article = pgclass.SQLarticle
|
||||
article_mark = pgclass.SQLmark
|
||||
try:
|
||||
with db.getsession() as session:
|
||||
# article processor
|
||||
cda = {
|
||||
"content":content,
|
||||
"ip":ip,
|
||||
"hash":hash,
|
||||
"created_at":datetime.now(pytz.timezone(os.getenv("TIMEZONE"))),
|
||||
"sha1":sha1
|
||||
}
|
||||
|
||||
session.execute(
|
||||
update(article)
|
||||
.where(article.id == ref)
|
||||
.values(comment_list=article.comment_list + [cda])
|
||||
)
|
||||
|
||||
# mark processor
|
||||
marka = article_mark(hash=hash,
|
||||
mark=mark)
|
||||
session.add(marka)
|
||||
|
||||
# commit
|
||||
session.commit()
|
||||
|
||||
# logger
|
||||
logger.logger("newcomment", "New comment %s points to %d: %s"%(sha1, ref, mark))
|
||||
|
||||
return sha1, hash
|
||||
except Exception as e:
|
||||
return 0, ""
|
||||
|
||||
|
||||
# role (general) (owner) (admin)
|
||||
# 獲取單一文章
|
||||
def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
|
||||
table = pgclass.SQLarticle # main
|
||||
table2 = aliased(table) # comment
|
||||
with db.getsession() as session:
|
||||
# query
|
||||
stmt = "SELECT posts.id AS posts_id, \
|
||||
posts.content AS posts_content, \
|
||||
posts.file_list AS posts_file_list, \
|
||||
article_meta.igid AS article_meta_igid, \
|
||||
posts.comment_list AS posts_comment_list, \
|
||||
posts.hash AS posts_hash, \
|
||||
article_meta.ip AS article_meta_ip \
|
||||
FROM posts \
|
||||
JOIN mark ON mark.hash = posts.hash \
|
||||
JOIN article_meta ON article_meta.hash = posts.hash "
|
||||
res = session.query(table.id,
|
||||
table.content,
|
||||
table.reference,
|
||||
table.file_list,
|
||||
table.hash,
|
||||
table.igid,
|
||||
table.mark,
|
||||
table.ip,
|
||||
func.coalesce(func.array_agg(table2.id), literal([])).label("comments"))
|
||||
|
||||
if role == "owner":
|
||||
stmt += "WHERE posts.id = :id AND posts.hash = :hash"
|
||||
result = session.execute(text(stmt), {"id":key[1], "hash":key[0]})
|
||||
res = res.join(table2, table2.reference == table.id, isouter=True) \
|
||||
.filter(table.hash == key[0], table.id == key[1])
|
||||
elif role == "admin":
|
||||
stmt += "WHERE posts.id = :id"
|
||||
result = session.execute(text(stmt), {"id":key})
|
||||
res = res.join(table2, table2.reference == table.id, isouter=True) \
|
||||
.filter(table.id == key)
|
||||
elif role == "general":
|
||||
stmt += "WHERE posts.id = :id AND mark.mark = 'visible'"
|
||||
result = session.execute(text(stmt), {"id":key})
|
||||
res = result.first()
|
||||
res = res.join(table2, and_(table2.reference == table.id, table2.mark == "visible"), isouter=True) \
|
||||
.filter(table.id == key, table.mark == "visible")
|
||||
|
||||
res = res.group_by(table.id, table.content, table.reference, table.file_list,
|
||||
table.hash, table.igid, table.mark, table.ip).first()
|
||||
if res is None:
|
||||
return abort(404)
|
||||
|
||||
# mapping
|
||||
one = {
|
||||
"id": res[0],
|
||||
"content": res[1],
|
||||
"files_hash": res[2],
|
||||
"igid": res[3],
|
||||
"comments_hash": [ c.sha1 for c in res[4] ]
|
||||
"content":res[1],
|
||||
"igid":res[5],
|
||||
"mark":res[6],
|
||||
"reference":res[2],
|
||||
"files_id":res[3],
|
||||
"comments":res[8]
|
||||
}
|
||||
|
||||
if role == "admin":
|
||||
one["ip"] = res[6]
|
||||
one["ip"] = res[7]
|
||||
if role == "owner" or role == "admin":
|
||||
one["hash"] = res[5]
|
||||
|
||||
return one, 200
|
||||
|
||||
# role (general) (owner) (admin)
|
||||
# 獲取單一留言
|
||||
def solo_comment_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
|
||||
with db.getsession() as session:
|
||||
# query
|
||||
stmt = "SELECT posts.id AS parent, c.* \
|
||||
FROM posts \
|
||||
JOIN mark ON mark.hash = posts.hash \
|
||||
JOIN unnest(posts.comment_list) AS c ON 1=1 "
|
||||
if role == "general":
|
||||
# 對一般用戶,sha1查詢,確保本體可見
|
||||
stmt += " WHERE c.sha1 = :key AND mark.mark = 'visible'"
|
||||
arta = session.execute(text(stmt), {'key':key}).first()
|
||||
elif role == "owner":
|
||||
# 對發文者,sha256查詢
|
||||
stmt += " WHERE c.hash = :key AND c.sha1 = :sha1"
|
||||
arta = session.execute(text(stmt), {'key':key[0], 'sha1':key[1]}).first()
|
||||
elif role == "admin":
|
||||
# 對管理員,sha1查詢
|
||||
stmt += " WHERE c.sha1 = :key"
|
||||
arta = session.execute(text(stmt), {'key':key}).first()
|
||||
if arta is None:
|
||||
return abort(404)
|
||||
|
||||
# mapping
|
||||
one = {
|
||||
"content": arta[1],
|
||||
"sha1": arta[5]
|
||||
}
|
||||
|
||||
if role == "admin":
|
||||
one["ip"] = arta[2]
|
||||
if role == "owner" or role == "admin":
|
||||
one["hash"] = arta[3]
|
||||
one["hash"] = res[4]
|
||||
|
||||
return one, 200
|
||||
|
||||
@ -239,145 +79,76 @@ def multi_article_fetcher(role:str, page:str, count:int) -> Tuple[bytes, int]: #
|
||||
return abort(400)
|
||||
page = int(page)*count
|
||||
|
||||
article = pgclass.SQLarticle
|
||||
article_meta = pgclass.SQLmeta
|
||||
article_mark = pgclass.SQLmark
|
||||
table = pgclass.SQLarticle
|
||||
resfn = niming_pb2.FetchResponse()
|
||||
|
||||
with db.getsession() as session:
|
||||
# query
|
||||
res = session.query(article.id, article.content, article.file_list, article_meta.igid, article.hash, article_meta.ip)
|
||||
res = res.join(article_meta, article_meta.hash==article.hash)
|
||||
res = res.join(article_mark, article_mark.hash==article.hash)
|
||||
res = session.query(table)
|
||||
if role == "general":
|
||||
res = res.filter(article_mark.mark == "visible")
|
||||
res = res.order_by(desc(article.id)).offset(page).limit(count).all()
|
||||
res = res.filter(table.mark == "visible", table.reference == None)
|
||||
elif role == "admin":
|
||||
res = res.filter(table.reference == None)
|
||||
res = res.order_by(desc(table.id)).offset(page).limit(count).all()
|
||||
|
||||
# mapping
|
||||
for r in res:
|
||||
one = niming_pb2.FetchResponse.Message(
|
||||
id = r[0],
|
||||
content = r[1],
|
||||
files_hash = r[2],
|
||||
igid = r[3],
|
||||
id = r.id,
|
||||
content = r.content,
|
||||
files_id = r.file_list,
|
||||
igid = r.igid,
|
||||
mark = r.mark,
|
||||
ref = r.reference
|
||||
)
|
||||
if role == "admin": # 如果是管理員 多給ip 跟 hash # proto那邊沒支援
|
||||
one.hash = r[4]
|
||||
one.ip = r[5]
|
||||
if role == "admin": # 如果是管理員 多給ip 跟 hash
|
||||
one.hash = r.hash
|
||||
one.ip = r.ip
|
||||
resfn.posts.append(one)
|
||||
|
||||
return resfn.SerializeToString(), 200
|
||||
|
||||
|
||||
# 刪除單一文章
|
||||
# 刪除文章
|
||||
def solo_article_remover(role:str, hash:str=None, id:int=None) -> Tuple[Dict, int]: # admin, owner
|
||||
key = None
|
||||
if role == "admin": key = id
|
||||
elif role == "owner": key = (hash, id)
|
||||
|
||||
article = pgclass.SQLarticle
|
||||
article_mark = pgclass.SQLmark
|
||||
table = pgclass.SQLarticle
|
||||
with db.getsession() as session:
|
||||
# 獲取本體
|
||||
pres = session.query(article.id, article.hash, article_mark.mark, article.file_list).join(article_mark, article_mark.hash==article.hash)
|
||||
if role == "admin":
|
||||
pres = pres.filter(article.id == key).first()
|
||||
res = session.query(table).filter(table.id == key).first()
|
||||
elif role == "owner":
|
||||
pres = pres.filter(article.id == key[1], article.hash == key[0]).first()
|
||||
if pres is None: # 如果本體不存在
|
||||
res = session.query(table).filter(table.hash == key[0], table.id == key[1]).first()
|
||||
if res is None: # 檢查本體是否存在
|
||||
return abort(404)
|
||||
|
||||
# 獲取本體的留言們(hash)
|
||||
stmt="SELECT c.hash as chash " \
|
||||
+ "FROM posts, unnest(posts.comment_list) AS c " \
|
||||
+ "WHERE posts.id = :id"
|
||||
cres = session.execute(text(stmt), {'id':pres[0]}).all()
|
||||
|
||||
# 刪除本體
|
||||
stmt = delete(article).where(article.hash == pres[1])
|
||||
session.execute(stmt)
|
||||
|
||||
# 刪除 mark (本體 & 留言)
|
||||
stmt = delete(article_mark).where(article_mark.hash == pres[1])
|
||||
session.execute(stmt)
|
||||
for c in cres:
|
||||
stmt = delete(article_mark).where(article_mark.hash == c[0])
|
||||
session.execute(stmt)
|
||||
|
||||
# 刪除檔案
|
||||
err = s3helper.multi_file_remover(pres[3])
|
||||
if err:
|
||||
return abort(500)
|
||||
|
||||
# 刪本體
|
||||
session.delete(res)
|
||||
session.commit()
|
||||
|
||||
# logger
|
||||
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
|
||||
%(int(pres[0]), str(pres[2])))
|
||||
|
||||
return {"id":pres[0], "mark":pres[2]}, 200
|
||||
|
||||
|
||||
# 刪除單一留言
|
||||
def solo_comment_remover(role:str, hash:str=None, sha1:str=None) -> Tuple[Dict, int]:
|
||||
key = None
|
||||
if role == "admin": key = sha1
|
||||
elif role == "owner": key = (hash, sha1)
|
||||
|
||||
article_mark = pgclass.SQLmark
|
||||
with db.getsession() as session:
|
||||
# 獲取留言本體
|
||||
stmt="SELECT posts.id AS parent, c.sha1, c.hash " \
|
||||
+ "FROM posts, unnest(posts.comment_list) AS c "
|
||||
if role == "admin":
|
||||
stmt += "WHERE c.sha1 = :sha1"
|
||||
cres = session.execute(text(stmt), {'sha1':key}).first()
|
||||
elif role == 'owner':
|
||||
stmt += "WHERE c.sha1 = :sha1 AND c.hash = :hash"
|
||||
cres = session.execute(text(stmt), {'sha1':key[1], 'hash':key[0]}).first()
|
||||
if cres is None: # 如果不存在
|
||||
return abort(404)
|
||||
|
||||
# 刪除本體
|
||||
stmt="UPDATE posts " \
|
||||
+"SET comment_list = ARRAY(" \
|
||||
+"SELECT c " \
|
||||
+"FROM unnest(comment_list) AS c " \
|
||||
+"WHERE (c.sha1, c.hash) != (:sha1, :hash)" \
|
||||
+")"
|
||||
session.execute(text(stmt), {'sha1':cres[1], 'hash':cres[2]})
|
||||
|
||||
# 刪除 mark (本體 & 留言)
|
||||
mark = session.query(article_mark.mark).filter(article_mark.hash == cres[2])
|
||||
stmt = delete(article_mark).where(article_mark.hash == cres[2])
|
||||
session.execute(stmt)
|
||||
|
||||
session.commit()
|
||||
|
||||
logger.logger("delcomment", "Delete comment (sha1=%s): last_status=%s"
|
||||
%(cres[1], str(mark)))
|
||||
|
||||
return {"sha1":cres[1], "mark":mark}, 200
|
||||
return {"id":res.id, "mark":res.mark}, 200
|
||||
|
||||
|
||||
# 獲取檔案
|
||||
def solo_file_fetcher(role:str, fnhash:str) -> Tuple[Response, int]: # general, admin
|
||||
article = pgclass.SQLarticle
|
||||
article_mark = pgclass.SQLmark
|
||||
def solo_file_fetcher(role:str, id:int) -> Tuple[Response, int]: # general, admin
|
||||
table = pgclass.SQLarticle
|
||||
ftab = pgclass.SQLfile
|
||||
|
||||
with db.getsession() as session:
|
||||
arta = session.query(article).join(article_mark, article_mark.hash == article.hash).filter(article.file_list == func.any(fnhash))
|
||||
if role == "general":
|
||||
arta = arta.filter(article_mark == 'visible')
|
||||
aeta = arta.first()
|
||||
if arta is None: # 檢查文章本體是否存在/可以閱覽
|
||||
fres = session.query(ftab).filter(ftab.id == id).first()
|
||||
if fres is None: # 檢查檔案是否存在
|
||||
return error("File not found"), 404
|
||||
|
||||
# fetch file
|
||||
f, err = s3helper.solo_file_fetcher(fnhash)
|
||||
if err:
|
||||
return error("File not found"), 404
|
||||
resp = make_response(f["binary"])
|
||||
resp.headers.set("Content-Type", f["mime"])
|
||||
resp.headers.set("Content-Disposition", f"attachment; filename=file_{fnhash}")
|
||||
if role == "general":
|
||||
article = session.query(table).filter(table.hash == fres.reference, table.mark == 'visible').first()
|
||||
elif role == "admin":
|
||||
article = session.query(table).filter(table.hash == fres.reference).first()
|
||||
if article is None: # 檢查文章本體是否存在/可以閱覽
|
||||
return error("File not found"), 404
|
||||
|
||||
resp = make_response(fres.binary)
|
||||
resp.headers.set("Content-Type", fres.type)
|
||||
resp.headers.set("Content-Disposition", f"attachment; filename=file{fres.id}")
|
||||
return resp, 200
|
@ -1,4 +1,5 @@
|
||||
from utils import pgclass, dbhelper
|
||||
from utils import pgclass
|
||||
from utils.dbhelper import db
|
||||
from utils.platform_consts import EVENT_TYPE
|
||||
|
||||
def logger(type, message):
|
||||
@ -13,7 +14,7 @@ def logger(type, message):
|
||||
|
||||
# session.add
|
||||
if flag:
|
||||
with dbhelper.db.getsession() as session:
|
||||
with db.getsession() as session:
|
||||
session.add(log)
|
||||
session.commit()
|
||||
|
||||
|
@ -6,32 +6,26 @@ def error(message:str) -> Response:
|
||||
return jsonify({"error":message})
|
||||
|
||||
|
||||
def error_proto(message:str) -> bytes:
|
||||
return niming_pb2.PostResponse(
|
||||
status = niming_pb2.Status.Failed,
|
||||
failed_message = message
|
||||
).SerializeToString()
|
||||
|
||||
|
||||
def internal_json2protobuf(original:list|dict) -> bytes:
|
||||
if isinstance(original, dict):
|
||||
original = [original]
|
||||
|
||||
res = niming_pb2.FetchResponse()
|
||||
res = niming_pb2.FetchResponse(status = niming_pb2.Status.Success)
|
||||
for o in original:
|
||||
# drop null object
|
||||
newo = {}
|
||||
for oc in o:
|
||||
if o[oc] is not None:
|
||||
newo[oc] = o[oc]
|
||||
o = newo
|
||||
|
||||
ob = niming_pb2.FetchResponse.Message()
|
||||
|
||||
if "id" in o: ob.id = o["id"]
|
||||
if "content" in o: ob.content = o["content"]
|
||||
if "igid" in o: ob.igid = o["igid"]
|
||||
if "files_hash" in o: ob.files_hash.extend(o["files_hash"])
|
||||
if "comments_hash" in o: ob.comments_hash.extend(o["comments_hash"])
|
||||
ob = niming_pb2.FetchResponse.Message(
|
||||
id = o["id"],
|
||||
content = o["content"],
|
||||
igid = o["igid"],
|
||||
mark = o["mark"],
|
||||
files_id = o["files_id"]
|
||||
)
|
||||
if None not in o["comments"]:
|
||||
ob.comments_id.extend(o["comments"])
|
||||
if o["reference"]:
|
||||
ob.ref = o["reference"]
|
||||
if "ip" in o:
|
||||
ob.ip = o["ip"]
|
||||
if "hash" in o:
|
||||
ob.hash = o["hash"]
|
||||
res.posts.append(ob)
|
||||
return res.SerializeToString()
|
||||
|
@ -1,54 +1,24 @@
|
||||
from sqlalchemy import Column, String, TIMESTAMP, func, BIGINT, LargeBinary, ARRAY
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy_utils.types.pg_composite import CompositeType
|
||||
from sqlalchemy.ext.mutable import MutableList
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
CompositeType.cache_ok = False
|
||||
|
||||
comment_type = CompositeType(
|
||||
'comment',
|
||||
[
|
||||
Column('content', String),
|
||||
Column('ip', String),
|
||||
Column('hash', String),
|
||||
Column('created_at', TIMESTAMP),
|
||||
Column("sha1", String)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
# posts
|
||||
class SQLarticle(Base):
|
||||
__tablename__ = 'posts'
|
||||
|
||||
id = Column(BIGINT, primary_key=True)
|
||||
content = Column(String)
|
||||
file_list = Column(ARRAY(String))
|
||||
hash = Column(String)
|
||||
comment_list = Column(MutableList.as_mutable(ARRAY(comment_type)))
|
||||
|
||||
|
||||
# post metadata
|
||||
class SQLmeta(Base):
|
||||
__tablename__ = 'article_meta'
|
||||
|
||||
hash = Column(String, primary_key=True)
|
||||
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
||||
hash = Column(String)
|
||||
content = Column(String)
|
||||
igid = Column(String)
|
||||
ip = Column(String)
|
||||
|
||||
|
||||
# post mark
|
||||
class SQLmark(Base):
|
||||
__tablename__ = 'mark'
|
||||
|
||||
hash = Column(String, primary_key=True)
|
||||
mark = Column(String)
|
||||
ip = Column(String)
|
||||
reference = Column(BIGINT)
|
||||
file_list = Column(ARRAY(BIGINT))
|
||||
|
||||
def __repr__(self):
|
||||
return f"<article(id={self.id}, hash={self.hash}, content={self.content}, igid={self.igid}, mark={self.mark}, created_at={self.created_at}, ip={self.ip}, reference={self.reference} file_list={self.file_list})>"
|
||||
|
||||
# logs
|
||||
class SQLlog(Base):
|
||||
__tablename__ = 'logs'
|
||||
|
||||
@ -60,16 +30,17 @@ class SQLlog(Base):
|
||||
def __repr__(self):
|
||||
return f"<log(id={self.id}, created_at={self.created_at}, message={self.message}, source={self.source})>"
|
||||
|
||||
|
||||
# deprecated
|
||||
class SQLfile(Base):
|
||||
__tablename__ = 'files'
|
||||
|
||||
id = Column(BIGINT, primary_key=True)
|
||||
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
||||
type = Column(String)
|
||||
reference = Column(String)
|
||||
binary = Column(LargeBinary)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<file(id={self.id}, created_at={self.created_at}, type={self.type}, binary={self.binary}, reference={self.reference})>"
|
||||
|
||||
class SQLuser(Base):
|
||||
__tablename__ = 'users'
|
||||
|
@ -4,7 +4,7 @@ PLIST_ROOT = PLIST + ["usermgr"]
|
||||
|
||||
# event type
|
||||
EVENT_TYPE = {
|
||||
"general": ["newpost", "delpost", "newcomment", "delcomment"],
|
||||
"general": ["newpost", "delpost"],
|
||||
"admin": ["login", "user.create", "user.delete", "article.delete", "article.pend", "setting.modify"],
|
||||
"server": ["server.start"]
|
||||
}
|
||||
|
@ -1,67 +0,0 @@
|
||||
from typing import Tuple, List
|
||||
import os
|
||||
import hashlib
|
||||
import secrets
|
||||
import time
|
||||
import io
|
||||
import sys
|
||||
|
||||
import minio
|
||||
|
||||
S3_BUCKET = os.getenv("S3_BUCKET")
|
||||
|
||||
s3 = minio.Minio(endpoint=os.getenv("S3_ENDPOINT"),
|
||||
access_key=os.getenv("S3_ACCESS_KEY"),
|
||||
secret_key=os.getenv("S3_SECRET_KEY"),
|
||||
secure=False)
|
||||
|
||||
# check exist
|
||||
if not s3.bucket_exists(S3_BUCKET):
|
||||
print("[!] Where is S3 bucket \"%s\"?"%S3_BUCKET)
|
||||
sys.exit(0)
|
||||
|
||||
# methods
|
||||
def multi_file_uploader(file_list, file_mines:List[str]) -> Tuple[List[str], int]:
|
||||
midx = 0
|
||||
fidlist = []
|
||||
try:
|
||||
for f in file_list:
|
||||
seed = f + (str(time.time())+str(secrets.token_urlsafe(nbytes=16))).encode()
|
||||
fnhash = hashlib.sha256(seed).hexdigest()
|
||||
s3.put_object(bucket_name=S3_BUCKET,
|
||||
object_name=fnhash,
|
||||
data=io.BytesIO(f),
|
||||
length=len(f),
|
||||
content_type=file_mines[midx])
|
||||
fidlist.append(fnhash)
|
||||
midx += 1
|
||||
return fidlist, 0
|
||||
except Exception as e:
|
||||
return [], 1
|
||||
|
||||
|
||||
def solo_file_fetcher(fnhash:str) -> Tuple[dict | None, int]:
|
||||
fnd = None
|
||||
err = 1
|
||||
try:
|
||||
res = s3.get_object(S3_BUCKET, fnhash)
|
||||
mime = res.getheader("Content-Type")
|
||||
fnd = res.data
|
||||
|
||||
err = 0
|
||||
fnd = {"binary":fnd, "mime":mime}
|
||||
except:
|
||||
fnd, err = None, 1
|
||||
|
||||
res.close()
|
||||
res.release_conn()
|
||||
return fnd, err
|
||||
|
||||
|
||||
def multi_file_remover(file_list) -> int:
|
||||
try:
|
||||
for f in file_list:
|
||||
s3.remove_object(S3_BUCKET, f)
|
||||
return 0
|
||||
except:
|
||||
return 1
|
Loading…
Reference in New Issue
Block a user