rebuild with changed DB and new S3
This commit is contained in:
parent
34fe546cd6
commit
5e076713b1
20
app.py
20
app.py
@ -4,11 +4,12 @@ from flask import Flask
|
|||||||
from bcrypt import checkpw, gensalt, hashpw
|
from bcrypt import checkpw, gensalt, hashpw
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
from utils import pgclass, setting_loader, logger, dbhelper
|
from utils import setting_loader, logger, dbhelper
|
||||||
|
from utils.pgclass import Base, SQLuser
|
||||||
from utils.platform_consts import PLIST_ROOT
|
from utils.platform_consts import PLIST_ROOT
|
||||||
from blueprints.article import article
|
from blueprints.article import article
|
||||||
from blueprints.log import log
|
from blueprints.log import log
|
||||||
from blueprints.admin import admin
|
# from blueprints.admin import admin
|
||||||
|
|
||||||
# env
|
# env
|
||||||
PG_HOST = os.getenv("PG_HOST", None)
|
PG_HOST = os.getenv("PG_HOST", None)
|
||||||
@ -30,9 +31,8 @@ if len(errmsg):
|
|||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
# Postgresql
|
# Postgresql
|
||||||
engine = create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME))
|
dbhelper.db = dbhelper.DB(create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME)))
|
||||||
pgclass.Base.metadata.create_all(engine)
|
Base.metadata.create_all(dbhelper.db._engine)
|
||||||
dbhelper.db(engine)
|
|
||||||
|
|
||||||
# settings checker
|
# settings checker
|
||||||
settings = setting_loader.loadset()
|
settings = setting_loader.loadset()
|
||||||
@ -44,12 +44,12 @@ for s in settings:
|
|||||||
# root checker
|
# root checker
|
||||||
pwhash = hashpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), gensalt()).decode("utf-8") # if needed, new password
|
pwhash = hashpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), gensalt()).decode("utf-8") # if needed, new password
|
||||||
with dbhelper.db.getsession() as session:
|
with dbhelper.db.getsession() as session:
|
||||||
root = session.query(pgclass.SQLuser).filter(pgclass.SQLuser.user=="root").first()
|
root = session.query(SQLuser).filter(SQLuser.user=="root").first()
|
||||||
if root is None: # 沒有root
|
if root is None: # 沒有root
|
||||||
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||||
elif (not checkpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), root.password.encode("utf-8"))) or root.permission != PLIST_ROOT:
|
elif (not checkpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), root.password.encode("utf-8"))) or root.permission != PLIST_ROOT:
|
||||||
session.delete(root)
|
session.delete(root)
|
||||||
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
# flask app
|
# flask app
|
||||||
@ -59,7 +59,7 @@ app.config["SECRET_KEY"] = os.urandom(64)
|
|||||||
# register blueprints
|
# register blueprints
|
||||||
app.register_blueprint(article, url_prefix = "/article")
|
app.register_blueprint(article, url_prefix = "/article")
|
||||||
app.register_blueprint(log , url_prefix = "/log")
|
app.register_blueprint(log , url_prefix = "/log")
|
||||||
app.register_blueprint(admin , url_prefix = "/admin")
|
# app.register_blueprint(admin , url_prefix = "/admin")
|
||||||
|
|
||||||
# logger
|
# logger
|
||||||
logger.logger("server.start", "Server is running")
|
logger.logger("server.start", "Server is running")
|
||||||
@ -72,5 +72,3 @@ def index():
|
|||||||
# app run
|
# app run
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app.run(host="0.0.0.0", port=5000, debug=False)
|
app.run(host="0.0.0.0", port=5000, debug=False)
|
||||||
|
|
||||||
# 檢查ctx跟content的混用(英文單字)
|
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
"""
|
||||||
|
not done
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import math
|
import math
|
||||||
|
@ -1,19 +1,16 @@
|
|||||||
import time
|
|
||||||
import hashlib
|
|
||||||
import secrets
|
|
||||||
|
|
||||||
import magic
|
import magic
|
||||||
from flask import Blueprint, request, abort
|
from flask import Blueprint, request, abort
|
||||||
from google.protobuf.message import DecodeError
|
from google.protobuf.message import DecodeError
|
||||||
|
|
||||||
from utils import logger, pgclass, setting_loader
|
from utils import pgclass, setting_loader, dbhelper
|
||||||
from utils.dbhelper import db, solo_article_fetcher, multi_article_fetcher, solo_file_fetcher, solo_article_remover
|
from utils.misc import internal_json2protobuf, error_proto
|
||||||
from utils.misc import error, internal_json2protobuf
|
|
||||||
from protobuf_files import niming_pb2
|
from protobuf_files import niming_pb2
|
||||||
|
|
||||||
"""
|
"""
|
||||||
TODO:
|
TODO:
|
||||||
|
- 測試 rebuild 完成的功能
|
||||||
- IG post ( Po文、刪文、只PO本體文章 )
|
- IG post ( Po文、刪文、只PO本體文章 )
|
||||||
|
- 檔案傳輸加低畫質版本(縮圖)
|
||||||
|
|
||||||
- log 的方式之後要重新設計 > 正規化
|
- log 的方式之後要重新設計 > 正規化
|
||||||
- IP Record (deploy之前配合rev proxy)
|
- IP Record (deploy之前配合rev proxy)
|
||||||
@ -26,50 +23,75 @@ article = Blueprint('article', __name__)
|
|||||||
# 匿名文列表
|
# 匿名文列表
|
||||||
@article.route('/list', methods = ["GET"])
|
@article.route('/list', methods = ["GET"])
|
||||||
def listing():
|
def listing():
|
||||||
res, code = multi_article_fetcher("general", request.args.get("page"), 30)
|
res, code = dbhelper.multi_article_fetcher("general", request.args.get("page"), 30)
|
||||||
return res, code
|
return res, code
|
||||||
|
|
||||||
|
|
||||||
# 獲取匿名文附檔
|
# 獲取匿名文附檔
|
||||||
@article.route("/file/<int:id>", methods=["GET"])
|
@article.route("/file/<fnhash>", methods=["GET"])
|
||||||
def getfile(id:int):
|
def getfile(fnhash:str):
|
||||||
resp, code = solo_file_fetcher("general", id)
|
resp, code = dbhelper.solo_file_fetcher("general", fnhash)
|
||||||
return resp, code
|
return resp, code
|
||||||
|
|
||||||
|
|
||||||
# 只有發文者可以看到的獲取指定文章
|
# 只有發文者可以看到的獲取指定文章
|
||||||
# 只有發文者可以做到的刪除文章
|
# 只有發文者可以做到的刪除文章
|
||||||
@article.route("/own/<int:id>", methods = ["GET", "DELETE"])
|
@article.route("/own/<type>/<key>", methods = ["GET", "DELETE"])
|
||||||
def owner_getarticle(id:int):
|
def owner_getarticle(type:str, key:str):
|
||||||
# arguments
|
# arguments
|
||||||
sha256 = request.args.get("hash", None)
|
sha256 = request.args.get("hash", None)
|
||||||
if not sha256:
|
if not sha256:
|
||||||
return error("Arguments error"), 400
|
return abort(400)
|
||||||
sha256 = str(sha256)
|
sha256 = str(sha256)
|
||||||
|
|
||||||
# 獲取指定文章
|
if type == 'a':
|
||||||
|
if not (len(key) > 0 and key.isdigit()):
|
||||||
|
return abort(400)
|
||||||
|
key = int(key) # id
|
||||||
|
elif type == 'c':
|
||||||
|
if not (len(key) > 0):
|
||||||
|
return abort(400)
|
||||||
|
key = str(key) # sha1
|
||||||
|
else:
|
||||||
|
return abort(400)
|
||||||
|
|
||||||
|
# 獲取指定文章/留言
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
resfn, code = solo_article_fetcher("owner", key=(sha256, id))
|
if type == 'a': # 文章
|
||||||
|
resfn, code = dbhelper.solo_article_fetcher("owner", key=(sha256, key))
|
||||||
|
elif type == 'c': # 留言
|
||||||
|
resfn, code = dbhelper.solo_comment_fetcher("owner", key=(sha256, key))
|
||||||
if code == 200:
|
if code == 200:
|
||||||
return internal_json2protobuf(resfn), code
|
return internal_json2protobuf(resfn), code
|
||||||
return resfn, code
|
return resfn, code
|
||||||
# 刪除指定文章跟他們的留言、檔案
|
# 刪除指定文章跟他們的留言、檔案
|
||||||
elif request.method == "DELETE":
|
elif request.method == "DELETE":
|
||||||
result, code = solo_article_remover("owner", hash=sha256, id=id)
|
if type == 'a':
|
||||||
|
result, code = dbhelper.solo_article_remover("owner", hash=sha256, id=key)
|
||||||
|
elif type == 'c':
|
||||||
|
result, code = dbhelper.solo_comment_remover("owner", hash=sha256, sha1=key)
|
||||||
if not code == 200:
|
if not code == 200:
|
||||||
return result, code
|
return result, code
|
||||||
|
|
||||||
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
|
one = niming_pb2.FetchResponse.Message()
|
||||||
%(result["id"], str(result["mark"])))
|
if "id" in result: one.id = result["id"]
|
||||||
return niming_pb2.FetchResponse(
|
|
||||||
posts = [ niming_pb2.FetchResponse.Message(id = result["id"], mark = result["mark"]) ]
|
return niming_pb2.FetchResponse(posts=[one]).SerializeToString(), 200
|
||||||
).SerializeToString(), 200
|
|
||||||
|
|
||||||
|
|
||||||
# 獲取指定文章
|
# 獲取指定文章
|
||||||
@article.route("/<int:id>", methods = ["GET"])
|
@article.route("/a/<int:id>", methods = ["GET"])
|
||||||
def getarticle(id:int):
|
def getarticle(id:int):
|
||||||
resfn, code = solo_article_fetcher("general", key=id)
|
resfn, code = dbhelper.solo_article_fetcher("general", key=id)
|
||||||
|
if code == 200:
|
||||||
|
return internal_json2protobuf(resfn), code
|
||||||
|
return resfn, code
|
||||||
|
|
||||||
|
|
||||||
|
# 獲取指定文章的留言
|
||||||
|
@article.route("/c/<sha1>", methods = ["GET"])
|
||||||
|
def getcomment(sha1:str):
|
||||||
|
resfn, code = dbhelper.solo_comment_fetcher("general", key=sha1)
|
||||||
if code == 200:
|
if code == 200:
|
||||||
return internal_json2protobuf(resfn), code
|
return internal_json2protobuf(resfn), code
|
||||||
return resfn, code
|
return resfn, code
|
||||||
@ -78,101 +100,73 @@ def getarticle(id:int):
|
|||||||
# 上傳文章 / 留言
|
# 上傳文章 / 留言
|
||||||
@article.route("/", methods = ["POST"])
|
@article.route("/", methods = ["POST"])
|
||||||
def posting():
|
def posting():
|
||||||
# flow:
|
"""
|
||||||
# ctx -> hash -> reference -> file -> IP -> IG -> mark -> post | -> log
|
Work Flow:
|
||||||
|
ctx -> reference -> file -> post( hash -> IP -> IG -> mark ) | -> log
|
||||||
|
"""
|
||||||
# loadset
|
# loadset
|
||||||
opt = setting_loader.loadset()
|
opt = setting_loader.loadset()
|
||||||
chk_before_post = opt["Check_Before_Post"]
|
|
||||||
maxword = opt["Niming_Max_Word"]
|
maxword = opt["Niming_Max_Word"]
|
||||||
# protobuf parse
|
# protobuf parse
|
||||||
recv = niming_pb2.Post()
|
recv = niming_pb2.Post()
|
||||||
try: recv.ParseFromString(request.data)
|
try: recv.ParseFromString(request.data)
|
||||||
except DecodeError: return abort(400)
|
except DecodeError:
|
||||||
|
return error_proto("Failed to parse data."), 400
|
||||||
|
|
||||||
# content and check
|
# content and check
|
||||||
ctx = str(recv.content)
|
content = str(recv.content)
|
||||||
if len(ctx) == 0 or len(ctx) > maxword: # length check
|
if len(content) == 0 or len(content) > maxword: # length check
|
||||||
return abort(400)
|
return error_proto("No content or too many words."), 400
|
||||||
|
|
||||||
# hash
|
|
||||||
seed = ctx + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
|
||||||
hash = hashlib.sha256(seed.encode()).hexdigest()
|
|
||||||
|
|
||||||
# reference and check
|
# reference and check
|
||||||
ref = int(recv.ref)
|
ref = int(recv.ref)
|
||||||
if ref != 0:
|
if ref != 0:
|
||||||
# 檢查指向的文章是否也是留言
|
# 檢查指向的文章是否存在 且 可訪問
|
||||||
reftg, code = solo_article_fetcher(role="general", key=ref)
|
with dbhelper.db.getsession() as session:
|
||||||
if code != 200 or reftg["reference"]:
|
article = pgclass.SQLarticle
|
||||||
return abort(400)
|
article_mark = pgclass.SQLmark
|
||||||
|
tpid = session.query(article.id).join(article_mark, article.hash==article_mark.hash) \
|
||||||
|
.filter(article.id==ref, article_mark.mark=="visible").first()
|
||||||
|
if not tpid:
|
||||||
|
return error_proto("Invalid Reference."), 400
|
||||||
else:
|
else:
|
||||||
ref = None
|
ref = None
|
||||||
|
|
||||||
# file processing and check
|
result_id, sha1, hash = 0, "", ""
|
||||||
|
if ref is None: # only article (comment dont have files)
|
||||||
|
# file processing & check
|
||||||
files = recv.files
|
files = recv.files
|
||||||
# check - size
|
# check - size
|
||||||
atts = opt["Attachment_Count"]
|
atts = opt["Attachment_Count"]
|
||||||
sizelimit = opt["Attachment_Size"]
|
sizelimit = opt["Attachment_Size"]
|
||||||
if len(files) > atts: return abort(400)
|
if len(files) > atts: return error_proto("Too many files"), 400
|
||||||
for f in files:
|
for f in files:
|
||||||
if len(f) <= 0 or len(f) > sizelimit: return abort(400)
|
if len(f) <= 0 or len(f) > sizelimit: return error_proto("Empty file or file too big."), 400
|
||||||
# check - mimetype
|
# check - mimetype
|
||||||
allowed_mime = opt["Allowed_MIME"]
|
allowed_mime = opt["Allowed_MIME"]
|
||||||
fmimes = []
|
fmimes = []
|
||||||
for f in files:
|
for f in files:
|
||||||
mime = magic.Magic(mime=True)
|
mime = magic.Magic(mime=True)
|
||||||
type = mime.from_buffer(f)
|
type = mime.from_buffer(f)
|
||||||
if not(type in allowed_mime): return abort(400)
|
if not(type in allowed_mime): return error_proto("File type not allowed."), 400
|
||||||
fmimes.append(type)
|
fmimes.append(type)
|
||||||
|
|
||||||
# IP
|
|
||||||
ip = request.remote_addr
|
|
||||||
|
|
||||||
# ig posting
|
|
||||||
if chk_before_post:
|
|
||||||
igid = None
|
|
||||||
# Go posting
|
|
||||||
igid = None
|
|
||||||
# Coming Soon...
|
|
||||||
|
|
||||||
# mark
|
|
||||||
if chk_before_post: mark = "pending"
|
|
||||||
else: mark = "visible"
|
|
||||||
|
|
||||||
# posting
|
# posting
|
||||||
table = pgclass.SQLarticle
|
result_id, hash = dbhelper.solo_article_uploader(content=content,
|
||||||
ftab = pgclass.SQLfile
|
file_list=files,
|
||||||
try:
|
fmimes=fmimes)
|
||||||
with db.getsession() as session:
|
if not result_id:
|
||||||
# post
|
return error_proto("Failed to Post"), 400
|
||||||
data = table(hash = hash, content = ctx, igid = igid, mark = mark, reference = ref, ip = ip)
|
else: # comments
|
||||||
session.add(data)
|
sha1, hash = dbhelper.solo_comment_uploader(content=content,
|
||||||
# file processor
|
ref=ref)
|
||||||
fmidx = 0
|
if not sha1:
|
||||||
fidarr = []
|
return error_proto("Failed to Post"), 400
|
||||||
for f in files:
|
|
||||||
fsql = ftab(reference = hash, binary = f, type = fmimes[fmidx])
|
|
||||||
fidarr.append(fsql)
|
|
||||||
session.add(fsql)
|
|
||||||
fmidx += 1
|
|
||||||
# first commit
|
|
||||||
session.commit()
|
|
||||||
# set file list
|
|
||||||
data.file_list = [ fid.id for fid in fidarr ]
|
|
||||||
session.commit() # second commit
|
|
||||||
result_id = data.id
|
|
||||||
except:
|
|
||||||
return abort(400)
|
|
||||||
|
|
||||||
# logger
|
|
||||||
logger.logger("newpost", "New post (id=%d point to %s): %s"%(result_id, ref, mark))
|
|
||||||
|
|
||||||
# to protobuf & return
|
# to protobuf & return
|
||||||
proto_stres = niming_pb2.PostResponse(
|
proto_stres = niming_pb2.PostResponse(
|
||||||
|
status = niming_pb2.Status.Success,
|
||||||
hash = hash,
|
hash = hash,
|
||||||
id = int(result_id)
|
id = int(result_id)
|
||||||
).SerializeToString()
|
).SerializeToString()
|
||||||
return proto_stres, 200
|
return proto_stres, 200
|
||||||
|
|
||||||
# 介面全部改成protobuf傳輸
|
|
||||||
# 檔案傳輸加低畫質版本(縮圖)
|
|
@ -1,8 +1,7 @@
|
|||||||
from flask import Blueprint, request, jsonify
|
from flask import Blueprint, request, jsonify
|
||||||
from sqlalchemy import desc
|
from sqlalchemy import desc
|
||||||
|
|
||||||
from utils import pgclass
|
from utils import pgclass, dbhelper
|
||||||
from utils.dbhelper import db
|
|
||||||
from utils.misc import error
|
from utils.misc import error
|
||||||
|
|
||||||
log = Blueprint('log', __name__)
|
log = Blueprint('log', __name__)
|
||||||
@ -18,7 +17,7 @@ def listlog():
|
|||||||
count = int(request.args.get("count"))
|
count = int(request.args.get("count"))
|
||||||
|
|
||||||
# getctx
|
# getctx
|
||||||
with db.getsession() as session:
|
with dbhelper.db.getsession() as session:
|
||||||
table = pgclass.SQLlog
|
table = pgclass.SQLlog
|
||||||
res = session.query(table).order_by(desc(table.id)).offset(rst).limit(count).all()
|
res = session.query(table).order_by(desc(table.id)).offset(rst).limit(count).all()
|
||||||
|
|
||||||
@ -31,7 +30,7 @@ def listlog():
|
|||||||
@log.route("/<int:id>", methods = ["GET"])
|
@log.route("/<int:id>", methods = ["GET"])
|
||||||
def getlog(id:int):
|
def getlog(id:int):
|
||||||
# db
|
# db
|
||||||
with db.getsession() as session:
|
with dbhelper.db.getsession() as session:
|
||||||
table = pgclass.SQLlog
|
table = pgclass.SQLlog
|
||||||
res = session.query(table).filter(table.id == id).first()
|
res = session.query(table).filter(table.id == id).first()
|
||||||
if res is None:
|
if res is None:
|
||||||
|
@ -26,15 +26,12 @@ message FetchResponse {
|
|||||||
uint64 id = 1;
|
uint64 id = 1;
|
||||||
string content = 2;
|
string content = 2;
|
||||||
// reply to a post, like a mail chat.
|
// reply to a post, like a mail chat.
|
||||||
optional uint64 ref = 3;
|
// optional uint64 ref = 3;
|
||||||
// request files through /article/file/<id> with MIME type.
|
// request files through /article/file/<id> with MIME type.
|
||||||
// See it as a BLOB url;
|
// See it as a BLOB url;
|
||||||
repeated uint64 files_id = 4;
|
repeated string files_hash = 3;
|
||||||
optional string hash = 5;
|
optional string igid = 4;
|
||||||
string igid = 6;
|
repeated string comments_hash = 5;
|
||||||
string mark = 7;
|
|
||||||
optional string ip = 8;
|
|
||||||
repeated uint64 comments_id = 9;
|
|
||||||
}
|
}
|
||||||
// Several post info
|
// Several post info
|
||||||
repeated Message posts = 1;
|
repeated Message posts = 1;
|
||||||
|
@ -13,21 +13,21 @@ _sym_db = _symbol_database.Default()
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xf0\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1a\xb7\x01\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x10\n\x03ref\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12\x10\n\x08\x66iles_id\x18\x04 \x03(\x04\x12\x11\n\x04hash\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x0c\n\x04igid\x18\x06 \x01(\t\x12\x0c\n\x04mark\x18\x07 \x01(\t\x12\x0f\n\x02ip\x18\x08 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x0b\x63omments_id\x18\t \x03(\x04\x42\x06\n\x04_refB\x07\n\x05_hashB\x05\n\x03_ip*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
|
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xa5\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1am\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x12\n\nfiles_hash\x18\x03 \x03(\t\x12\x11\n\x04igid\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\rcomments_hash\x18\x05 \x03(\tB\x07\n\x05_igid*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
|
||||||
|
|
||||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'niming_pb2', globals())
|
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'niming_pb2', globals())
|
||||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||||
|
|
||||||
DESCRIPTOR._options = None
|
DESCRIPTOR._options = None
|
||||||
_STATUS._serialized_start=440
|
_STATUS._serialized_start=365
|
||||||
_STATUS._serialized_end=473
|
_STATUS._serialized_end=398
|
||||||
_POST._serialized_start=16
|
_POST._serialized_start=16
|
||||||
_POST._serialized_end=80
|
_POST._serialized_end=80
|
||||||
_POSTRESPONSE._serialized_start=82
|
_POSTRESPONSE._serialized_start=82
|
||||||
_POSTRESPONSE._serialized_end=195
|
_POSTRESPONSE._serialized_end=195
|
||||||
_FETCHRESPONSE._serialized_start=198
|
_FETCHRESPONSE._serialized_start=198
|
||||||
_FETCHRESPONSE._serialized_end=438
|
_FETCHRESPONSE._serialized_end=363
|
||||||
_FETCHRESPONSE_MESSAGE._serialized_start=255
|
_FETCHRESPONSE_MESSAGE._serialized_start=254
|
||||||
_FETCHRESPONSE_MESSAGE._serialized_end=438
|
_FETCHRESPONSE_MESSAGE._serialized_end=363
|
||||||
# @@protoc_insertion_point(module_scope)
|
# @@protoc_insertion_point(module_scope)
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
|
psycopg2
|
||||||
sqlalchemy
|
sqlalchemy
|
||||||
flask
|
flask
|
||||||
pyjwt
|
pyjwt
|
||||||
psycopg2
|
|
||||||
protobuf==5.28.3
|
protobuf==5.28.3
|
||||||
python-magic
|
python-magic
|
||||||
bcrypt
|
bcrypt
|
||||||
|
pytz
|
||||||
|
sqlalchemy-utils
|
||||||
|
minio
|
@ -1,55 +1,179 @@
|
|||||||
from typing import Tuple, Dict, List
|
from typing import Tuple, Dict, List
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
from flask import make_response, Response, abort
|
from flask import make_response, Response, abort, request
|
||||||
from sqlalchemy.orm import sessionmaker, aliased
|
from sqlalchemy import desc, func, update, Engine, text, delete
|
||||||
from sqlalchemy import desc, func, literal, and_
|
import pytz
|
||||||
|
|
||||||
from utils import pgclass
|
from utils import pgclass, setting_loader, s3helper, logger
|
||||||
from utils.misc import error
|
from utils.misc import error
|
||||||
from protobuf_files import niming_pb2
|
from protobuf_files import niming_pb2
|
||||||
|
|
||||||
class db:
|
class DB:
|
||||||
_engine = None
|
_engine = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init__(cls, engine):
|
def __init__(cls, engine):
|
||||||
cls._engine = engine
|
cls._engine:Engine = engine
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def getsession(cls):
|
def getsession(cls):
|
||||||
Session = sessionmaker(bind=cls._engine)
|
Session = sessionmaker(bind=cls._engine)
|
||||||
return Session()
|
return Session()
|
||||||
|
|
||||||
|
db:DB = None
|
||||||
|
|
||||||
|
|
||||||
|
# 上傳單一文章
|
||||||
|
def solo_article_uploader(content:str, file_list, fmimes:List[str]) -> Tuple[int, str]:
|
||||||
|
# loadset
|
||||||
|
opt = setting_loader.loadset()
|
||||||
|
chk_before_post = opt["Check_Before_Post"]
|
||||||
|
|
||||||
|
# hash
|
||||||
|
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
||||||
|
hash = hashlib.sha256(seed.encode()).hexdigest()
|
||||||
|
|
||||||
|
# IP
|
||||||
|
ip = request.remote_addr
|
||||||
|
|
||||||
|
# ig posting (only article)
|
||||||
|
if chk_before_post:
|
||||||
|
igid = None
|
||||||
|
# Go posting
|
||||||
|
igid = None
|
||||||
|
# Coming Soon...
|
||||||
|
|
||||||
|
# mark
|
||||||
|
if chk_before_post: mark = "pending"
|
||||||
|
else: mark = "visible"
|
||||||
|
|
||||||
|
# posting
|
||||||
|
article = pgclass.SQLarticle
|
||||||
|
article_mark = pgclass.SQLmark
|
||||||
|
article_metadata = pgclass.SQLmeta
|
||||||
|
result_id = 0
|
||||||
|
try:
|
||||||
|
with db.getsession() as session:
|
||||||
|
# file processor
|
||||||
|
fnlist, err = s3helper.multi_file_uploader(file_list, fmimes)
|
||||||
|
if err:
|
||||||
|
return 0, ""
|
||||||
|
|
||||||
|
# meta processor
|
||||||
|
metaa = article_metadata(ip=ip,
|
||||||
|
igid=igid,
|
||||||
|
hash=hash)
|
||||||
|
session.add(metaa)
|
||||||
|
|
||||||
|
# article processor
|
||||||
|
posta = article(content=content,
|
||||||
|
hash=hash,
|
||||||
|
file_list=fnlist)
|
||||||
|
session.add(posta)
|
||||||
|
|
||||||
|
# mark processor
|
||||||
|
marka = article_mark(hash=hash,
|
||||||
|
mark=mark)
|
||||||
|
session.add(marka)
|
||||||
|
|
||||||
|
# commit
|
||||||
|
session.commit()
|
||||||
|
result_id = int(posta.id)
|
||||||
|
|
||||||
|
# logger
|
||||||
|
logger.logger("newpost", "New post (id=%d): %s"%(result_id, mark))
|
||||||
|
|
||||||
|
return result_id, hash
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return 0, ""
|
||||||
|
|
||||||
|
|
||||||
|
# 上傳單一留言
|
||||||
|
def solo_comment_uploader(content:str, ref:int) -> Tuple[int | str, str]:
|
||||||
|
# loadset
|
||||||
|
opt = setting_loader.loadset()
|
||||||
|
chk_before_post = opt["Check_Before_Post"]
|
||||||
|
|
||||||
|
# hash
|
||||||
|
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
|
||||||
|
hash = hashlib.sha256(seed.encode()).hexdigest()
|
||||||
|
sha1 = hashlib.sha1(seed.encode()).hexdigest()
|
||||||
|
|
||||||
|
# IP
|
||||||
|
ip = request.remote_addr
|
||||||
|
|
||||||
|
# mark
|
||||||
|
if chk_before_post: mark = "pending"
|
||||||
|
else: mark = "visible"
|
||||||
|
|
||||||
|
# posting
|
||||||
|
article = pgclass.SQLarticle
|
||||||
|
article_mark = pgclass.SQLmark
|
||||||
|
try:
|
||||||
|
with db.getsession() as session:
|
||||||
|
# article processor
|
||||||
|
cda = {
|
||||||
|
"content":content,
|
||||||
|
"ip":ip,
|
||||||
|
"hash":hash,
|
||||||
|
"created_at":datetime.now(pytz.timezone(os.getenv("TIMEZONE"))),
|
||||||
|
"sha1":sha1
|
||||||
|
}
|
||||||
|
|
||||||
|
session.execute(
|
||||||
|
update(article)
|
||||||
|
.where(article.id == ref)
|
||||||
|
.values(comment_list=article.comment_list + [cda])
|
||||||
|
)
|
||||||
|
|
||||||
|
# mark processor
|
||||||
|
marka = article_mark(hash=hash,
|
||||||
|
mark=mark)
|
||||||
|
session.add(marka)
|
||||||
|
|
||||||
|
# commit
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# logger
|
||||||
|
logger.logger("newcomment", "New comment %s points to %d: %s"%(sha1, ref, mark))
|
||||||
|
|
||||||
|
return sha1, hash
|
||||||
|
except Exception as e:
|
||||||
|
return 0, ""
|
||||||
|
|
||||||
|
|
||||||
# role (general) (owner) (admin)
|
# role (general) (owner) (admin)
|
||||||
# 獲取單一文章
|
# 獲取單一文章
|
||||||
def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
|
def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
|
||||||
table = pgclass.SQLarticle # main
|
|
||||||
table2 = aliased(table) # comment
|
|
||||||
with db.getsession() as session:
|
with db.getsession() as session:
|
||||||
# query
|
# query
|
||||||
res = session.query(table.id,
|
stmt = "SELECT posts.id AS posts_id, \
|
||||||
table.content,
|
posts.content AS posts_content, \
|
||||||
table.reference,
|
posts.file_list AS posts_file_list, \
|
||||||
table.file_list,
|
article_meta.igid AS article_meta_igid, \
|
||||||
table.hash,
|
posts.comment_list AS posts_comment_list, \
|
||||||
table.igid,
|
posts.hash AS posts_hash, \
|
||||||
table.mark,
|
article_meta.ip AS article_meta_ip \
|
||||||
table.ip,
|
FROM posts \
|
||||||
func.coalesce(func.array_agg(table2.id), literal([])).label("comments"))
|
JOIN mark ON mark.hash = posts.hash \
|
||||||
|
JOIN article_meta ON article_meta.hash = posts.hash "
|
||||||
|
|
||||||
if role == "owner":
|
if role == "owner":
|
||||||
res = res.join(table2, table2.reference == table.id, isouter=True) \
|
stmt += "WHERE posts.id = :id AND posts.hash = :hash"
|
||||||
.filter(table.hash == key[0], table.id == key[1])
|
result = session.execute(text(stmt), {"id":key[1], "hash":key[0]})
|
||||||
elif role == "admin":
|
elif role == "admin":
|
||||||
res = res.join(table2, table2.reference == table.id, isouter=True) \
|
stmt += "WHERE posts.id = :id"
|
||||||
.filter(table.id == key)
|
result = session.execute(text(stmt), {"id":key})
|
||||||
elif role == "general":
|
elif role == "general":
|
||||||
res = res.join(table2, and_(table2.reference == table.id, table2.mark == "visible"), isouter=True) \
|
stmt += "WHERE posts.id = :id AND mark.mark = 'visible'"
|
||||||
.filter(table.id == key, table.mark == "visible")
|
result = session.execute(text(stmt), {"id":key})
|
||||||
|
res = result.first()
|
||||||
res = res.group_by(table.id, table.content, table.reference, table.file_list,
|
|
||||||
table.hash, table.igid, table.mark, table.ip).first()
|
|
||||||
if res is None:
|
if res is None:
|
||||||
return abort(404)
|
return abort(404)
|
||||||
|
|
||||||
@ -57,17 +181,52 @@ def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, gen
|
|||||||
one = {
|
one = {
|
||||||
"id": res[0],
|
"id": res[0],
|
||||||
"content": res[1],
|
"content": res[1],
|
||||||
"igid":res[5],
|
"files_hash": res[2],
|
||||||
"mark":res[6],
|
"igid": res[3],
|
||||||
"reference":res[2],
|
"comments_hash": [ c.sha1 for c in res[4] ]
|
||||||
"files_id":res[3],
|
|
||||||
"comments":res[8]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if role == "admin":
|
if role == "admin":
|
||||||
one["ip"] = res[7]
|
one["ip"] = res[6]
|
||||||
if role == "owner" or role == "admin":
|
if role == "owner" or role == "admin":
|
||||||
one["hash"] = res[4]
|
one["hash"] = res[5]
|
||||||
|
|
||||||
|
return one, 200
|
||||||
|
|
||||||
|
# role (general) (owner) (admin)
|
||||||
|
# 獲取單一留言
|
||||||
|
def solo_comment_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
|
||||||
|
with db.getsession() as session:
|
||||||
|
# query
|
||||||
|
stmt = "SELECT posts.id AS parent, c.* \
|
||||||
|
FROM posts \
|
||||||
|
JOIN mark ON mark.hash = posts.hash \
|
||||||
|
JOIN unnest(posts.comment_list) AS c ON 1=1 "
|
||||||
|
if role == "general":
|
||||||
|
# 對一般用戶,sha1查詢,確保本體可見
|
||||||
|
stmt += " WHERE c.sha1 = :key AND mark.mark = 'visible'"
|
||||||
|
arta = session.execute(text(stmt), {'key':key}).first()
|
||||||
|
elif role == "owner":
|
||||||
|
# 對發文者,sha256查詢
|
||||||
|
stmt += " WHERE c.hash = :key AND c.sha1 = :sha1"
|
||||||
|
arta = session.execute(text(stmt), {'key':key[0], 'sha1':key[1]}).first()
|
||||||
|
elif role == "admin":
|
||||||
|
# 對管理員,sha1查詢
|
||||||
|
stmt += " WHERE c.sha1 = :key"
|
||||||
|
arta = session.execute(text(stmt), {'key':key}).first()
|
||||||
|
if arta is None:
|
||||||
|
return abort(404)
|
||||||
|
|
||||||
|
# mapping
|
||||||
|
one = {
|
||||||
|
"content": arta[1],
|
||||||
|
"sha1": arta[5]
|
||||||
|
}
|
||||||
|
|
||||||
|
if role == "admin":
|
||||||
|
one["ip"] = arta[2]
|
||||||
|
if role == "owner" or role == "admin":
|
||||||
|
one["hash"] = arta[3]
|
||||||
|
|
||||||
return one, 200
|
return one, 200
|
||||||
|
|
||||||
@ -79,76 +238,145 @@ def multi_article_fetcher(role:str, page:str, count:int) -> Tuple[bytes, int]: #
|
|||||||
return abort(400)
|
return abort(400)
|
||||||
page = int(page)*count
|
page = int(page)*count
|
||||||
|
|
||||||
table = pgclass.SQLarticle
|
article = pgclass.SQLarticle
|
||||||
|
article_meta = pgclass.SQLmeta
|
||||||
|
article_mark = pgclass.SQLmark
|
||||||
resfn = niming_pb2.FetchResponse()
|
resfn = niming_pb2.FetchResponse()
|
||||||
|
|
||||||
with db.getsession() as session:
|
with db.getsession() as session:
|
||||||
# query
|
# query
|
||||||
res = session.query(table)
|
res = session.query(article.id, article.content, article.file_list, article_meta.igid, article.hash, article_meta.ip)
|
||||||
|
res = res.join(article_meta, article_meta.hash==article.hash)
|
||||||
|
res = res.join(article_mark, article_mark.hash==article.hash)
|
||||||
if role == "general":
|
if role == "general":
|
||||||
res = res.filter(table.mark == "visible", table.reference == None)
|
res = res.filter(article_mark.mark == "visible")
|
||||||
elif role == "admin":
|
res = res.order_by(desc(article.id)).offset(page).limit(count).all()
|
||||||
res = res.filter(table.reference == None)
|
|
||||||
res = res.order_by(desc(table.id)).offset(page).limit(count).all()
|
|
||||||
|
|
||||||
# mapping
|
# mapping
|
||||||
for r in res:
|
for r in res:
|
||||||
one = niming_pb2.FetchResponse.Message(
|
one = niming_pb2.FetchResponse.Message(
|
||||||
id = r.id,
|
id = r[0],
|
||||||
content = r.content,
|
content = r[1],
|
||||||
files_id = r.file_list,
|
files_hash = r[2],
|
||||||
igid = r.igid,
|
igid = r[3],
|
||||||
mark = r.mark,
|
|
||||||
ref = r.reference
|
|
||||||
)
|
)
|
||||||
if role == "admin": # 如果是管理員 多給ip 跟 hash
|
if role == "admin": # 如果是管理員 多給ip 跟 hash # proto那邊沒支援
|
||||||
one.hash = r.hash
|
one.hash = r[4]
|
||||||
one.ip = r.ip
|
one.ip = r[5]
|
||||||
resfn.posts.append(one)
|
resfn.posts.append(one)
|
||||||
|
|
||||||
return resfn.SerializeToString(), 200
|
return resfn.SerializeToString(), 200
|
||||||
|
|
||||||
|
|
||||||
# 刪除文章
|
# 刪除單一文章
|
||||||
def solo_article_remover(role:str, hash:str=None, id:int=None) -> Tuple[Dict, int]: # admin, owner
|
def solo_article_remover(role:str, hash:str=None, id:int=None) -> Tuple[Dict, int]: # admin, owner
|
||||||
key = None
|
key = None
|
||||||
if role == "admin": key = id
|
if role == "admin": key = id
|
||||||
elif role == "owner": key = (hash, id)
|
elif role == "owner": key = (hash, id)
|
||||||
|
|
||||||
table = pgclass.SQLarticle
|
article = pgclass.SQLarticle
|
||||||
|
article_mark = pgclass.SQLmark
|
||||||
with db.getsession() as session:
|
with db.getsession() as session:
|
||||||
# 獲取本體
|
# 獲取本體
|
||||||
|
pres = session.query(article.id, article.hash, article_mark.mark, article.file_list).join(article_mark, article_mark.hash==article.hash)
|
||||||
if role == "admin":
|
if role == "admin":
|
||||||
res = session.query(table).filter(table.id == key).first()
|
pres = pres.filter(article.id == key).first()
|
||||||
elif role == "owner":
|
elif role == "owner":
|
||||||
res = session.query(table).filter(table.hash == key[0], table.id == key[1]).first()
|
pres = pres.filter(article.id == key[1], article.hash == key[0]).first()
|
||||||
if res is None: # 檢查本體是否存在
|
if pres is None: # 如果本體不存在
|
||||||
return abort(404)
|
return abort(404)
|
||||||
# 刪本體
|
|
||||||
session.delete(res)
|
# 獲取本體的留言們(hash)
|
||||||
|
stmt="SELECT c.hash as chash " \
|
||||||
|
+ "FROM posts, unnest(posts.comment_list) AS c " \
|
||||||
|
+ "WHERE posts.id = :id"
|
||||||
|
cres = session.execute(text(stmt), {'id':pres[0]}).all()
|
||||||
|
|
||||||
|
# 刪除本體
|
||||||
|
stmt = delete(article).where(article.hash == pres[1])
|
||||||
|
session.execute(stmt)
|
||||||
|
|
||||||
|
# 刪除 mark (本體 & 留言)
|
||||||
|
stmt = delete(article_mark).where(article_mark.hash == pres[1])
|
||||||
|
session.execute(stmt)
|
||||||
|
for c in cres:
|
||||||
|
stmt = delete(article_mark).where(article_mark.hash == c[0])
|
||||||
|
session.execute(stmt)
|
||||||
|
|
||||||
|
# 刪除檔案
|
||||||
|
err = s3helper.multi_file_remover(pres[3])
|
||||||
|
if err:
|
||||||
|
return abort(500)
|
||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {"id":res.id, "mark":res.mark}, 200
|
# logger
|
||||||
|
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
|
||||||
|
%(int(pres[0]), str(pres[2])))
|
||||||
|
|
||||||
|
return {"id":pres[0], "mark":pres[2]}, 200
|
||||||
|
|
||||||
|
|
||||||
|
# 刪除單一留言
|
||||||
|
def solo_comment_remover(role:str, hash:str=None, sha1:str=None) -> Tuple[Dict, int]:
|
||||||
|
key = None
|
||||||
|
if role == "admin": key = sha1
|
||||||
|
elif role == "owner": key = (hash, sha1)
|
||||||
|
|
||||||
|
article_mark = pgclass.SQLmark
|
||||||
|
with db.getsession() as session:
|
||||||
|
# 獲取留言本體
|
||||||
|
stmt="SELECT posts.id AS parent, c.sha1, c.hash " \
|
||||||
|
+ "FROM posts, unnest(posts.comment_list) AS c "
|
||||||
|
if role == "admin":
|
||||||
|
stmt += "WHERE c.sha1 = :sha1"
|
||||||
|
cres = session.execute(text(stmt), {'sha1':key}).first()
|
||||||
|
elif role == 'owner':
|
||||||
|
stmt += "WHERE c.sha1 = :sha1 AND c.hash = :hash"
|
||||||
|
cres = session.execute(text(stmt), {'sha1':key[1], 'hash':key[0]}).first()
|
||||||
|
if cres is None: # 如果不存在
|
||||||
|
return abort(404)
|
||||||
|
|
||||||
|
# 刪除本體
|
||||||
|
stmt="UPDATE posts " \
|
||||||
|
+"SET comment_list = ARRAY(" \
|
||||||
|
+"SELECT c " \
|
||||||
|
+"FROM unnest(comment_list) AS c " \
|
||||||
|
+"WHERE (c.sha1, c.hash) != (:sha1, :hash)" \
|
||||||
|
+")"
|
||||||
|
session.execute(text(stmt), {'sha1':cres[1], 'hash':cres[2]})
|
||||||
|
|
||||||
|
# 刪除 mark (本體 & 留言)
|
||||||
|
mark = session.query(article_mark.mark).filter(article_mark.hash == cres[2])
|
||||||
|
stmt = delete(article_mark).where(article_mark.hash == cres[2])
|
||||||
|
session.execute(stmt)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
logger.logger("delcomment", "Delete comment (sha1=%s): last_status=%s"
|
||||||
|
%(cres[1], str(mark)))
|
||||||
|
|
||||||
|
return {"sha1":cres[1], "mark":mark}, 200
|
||||||
|
|
||||||
|
|
||||||
# 獲取檔案
|
# 獲取檔案
|
||||||
def solo_file_fetcher(role:str, id:int) -> Tuple[Response, int]: # general, admin
|
def solo_file_fetcher(role:str, fnhash:str) -> Tuple[Response, int]: # general, admin
|
||||||
table = pgclass.SQLarticle
|
article = pgclass.SQLarticle
|
||||||
ftab = pgclass.SQLfile
|
article_mark = pgclass.SQLmark
|
||||||
|
|
||||||
with db.getsession() as session:
|
with db.getsession() as session:
|
||||||
fres = session.query(ftab).filter(ftab.id == id).first()
|
arta = session.query(article).join(article_mark, article_mark.hash == article.hash).filter(article.file_list == func.any(fnhash))
|
||||||
if fres is None: # 檢查檔案是否存在
|
|
||||||
return error("File not found"), 404
|
|
||||||
|
|
||||||
if role == "general":
|
if role == "general":
|
||||||
article = session.query(table).filter(table.hash == fres.reference, table.mark == 'visible').first()
|
arta = arta.filter(article_mark == 'visible')
|
||||||
elif role == "admin":
|
aeta = arta.first()
|
||||||
article = session.query(table).filter(table.hash == fres.reference).first()
|
if arta is None: # 檢查文章本體是否存在/可以閱覽
|
||||||
if article is None: # 檢查文章本體是否存在/可以閱覽
|
|
||||||
return error("File not found"), 404
|
return error("File not found"), 404
|
||||||
|
|
||||||
resp = make_response(fres.binary)
|
# fetch file
|
||||||
resp.headers.set("Content-Type", fres.type)
|
f, err = s3helper.solo_file_fetcher(fnhash)
|
||||||
resp.headers.set("Content-Disposition", f"attachment; filename=file{fres.id}")
|
if err:
|
||||||
|
return error("File not found"), 404
|
||||||
|
resp = make_response(f["binary"])
|
||||||
|
resp.headers.set("Content-Type", f["mime"])
|
||||||
|
resp.headers.set("Content-Disposition", f"attachment; filename=file_{fnhash}")
|
||||||
return resp, 200
|
return resp, 200
|
@ -1,5 +1,4 @@
|
|||||||
from utils import pgclass
|
from utils import pgclass, dbhelper
|
||||||
from utils.dbhelper import db
|
|
||||||
from utils.platform_consts import EVENT_TYPE
|
from utils.platform_consts import EVENT_TYPE
|
||||||
|
|
||||||
def logger(type, message):
|
def logger(type, message):
|
||||||
@ -14,7 +13,7 @@ def logger(type, message):
|
|||||||
|
|
||||||
# session.add
|
# session.add
|
||||||
if flag:
|
if flag:
|
||||||
with db.getsession() as session:
|
with dbhelper.db.getsession() as session:
|
||||||
session.add(log)
|
session.add(log)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
@ -6,26 +6,32 @@ def error(message:str) -> Response:
|
|||||||
return jsonify({"error":message})
|
return jsonify({"error":message})
|
||||||
|
|
||||||
|
|
||||||
|
def error_proto(message:str) -> bytes:
|
||||||
|
return niming_pb2.PostResponse(
|
||||||
|
status = niming_pb2.Status.Failed,
|
||||||
|
failed_message = message
|
||||||
|
).SerializeToString()
|
||||||
|
|
||||||
|
|
||||||
def internal_json2protobuf(original:list|dict) -> bytes:
|
def internal_json2protobuf(original:list|dict) -> bytes:
|
||||||
if isinstance(original, dict):
|
if isinstance(original, dict):
|
||||||
original = [original]
|
original = [original]
|
||||||
|
|
||||||
res = niming_pb2.FetchResponse(status = niming_pb2.Status.Success)
|
res = niming_pb2.FetchResponse()
|
||||||
for o in original:
|
for o in original:
|
||||||
ob = niming_pb2.FetchResponse.Message(
|
# drop null object
|
||||||
id = o["id"],
|
newo = {}
|
||||||
content = o["content"],
|
for oc in o:
|
||||||
igid = o["igid"],
|
if o[oc] is not None:
|
||||||
mark = o["mark"],
|
newo[oc] = o[oc]
|
||||||
files_id = o["files_id"]
|
o = newo
|
||||||
)
|
|
||||||
if None not in o["comments"]:
|
ob = niming_pb2.FetchResponse.Message()
|
||||||
ob.comments_id.extend(o["comments"])
|
|
||||||
if o["reference"]:
|
if "id" in o: ob.id = o["id"]
|
||||||
ob.ref = o["reference"]
|
if "content" in o: ob.content = o["content"]
|
||||||
if "ip" in o:
|
if "igid" in o: ob.igid = o["igid"]
|
||||||
ob.ip = o["ip"]
|
if "files_hash" in o: ob.files_hash.extend(o["files_hash"])
|
||||||
if "hash" in o:
|
if "comments_hash" in o: ob.comments_hash.extend(o["comments_hash"])
|
||||||
ob.hash = o["hash"]
|
|
||||||
res.posts.append(ob)
|
res.posts.append(ob)
|
||||||
return res.SerializeToString()
|
return res.SerializeToString()
|
||||||
|
@ -1,24 +1,54 @@
|
|||||||
from sqlalchemy import Column, String, TIMESTAMP, func, BIGINT, LargeBinary, ARRAY
|
from sqlalchemy import Column, String, TIMESTAMP, func, BIGINT, LargeBinary, ARRAY
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy_utils.types.pg_composite import CompositeType
|
||||||
|
from sqlalchemy.ext.mutable import MutableList
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
|
CompositeType.cache_ok = False
|
||||||
|
|
||||||
|
comment_type = CompositeType(
|
||||||
|
'comment',
|
||||||
|
[
|
||||||
|
Column('content', String),
|
||||||
|
Column('ip', String),
|
||||||
|
Column('hash', String),
|
||||||
|
Column('created_at', TIMESTAMP),
|
||||||
|
Column("sha1", String)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# posts
|
||||||
class SQLarticle(Base):
|
class SQLarticle(Base):
|
||||||
__tablename__ = 'posts'
|
__tablename__ = 'posts'
|
||||||
|
|
||||||
id = Column(BIGINT, primary_key=True)
|
id = Column(BIGINT, primary_key=True)
|
||||||
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
|
||||||
hash = Column(String)
|
|
||||||
content = Column(String)
|
content = Column(String)
|
||||||
|
file_list = Column(ARRAY(String))
|
||||||
|
hash = Column(String)
|
||||||
|
comment_list = Column(MutableList.as_mutable(ARRAY(comment_type)))
|
||||||
|
|
||||||
|
|
||||||
|
# post metadata
|
||||||
|
class SQLmeta(Base):
|
||||||
|
__tablename__ = 'article_meta'
|
||||||
|
|
||||||
|
hash = Column(String, primary_key=True)
|
||||||
|
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
||||||
igid = Column(String)
|
igid = Column(String)
|
||||||
mark = Column(String)
|
|
||||||
ip = Column(String)
|
ip = Column(String)
|
||||||
reference = Column(BIGINT)
|
|
||||||
file_list = Column(ARRAY(BIGINT))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<article(id={self.id}, hash={self.hash}, content={self.content}, igid={self.igid}, mark={self.mark}, created_at={self.created_at}, ip={self.ip}, reference={self.reference} file_list={self.file_list})>"
|
|
||||||
|
|
||||||
|
# post mark
|
||||||
|
class SQLmark(Base):
|
||||||
|
__tablename__ = 'mark'
|
||||||
|
|
||||||
|
hash = Column(String, primary_key=True)
|
||||||
|
mark = Column(String)
|
||||||
|
|
||||||
|
|
||||||
|
# logs
|
||||||
class SQLlog(Base):
|
class SQLlog(Base):
|
||||||
__tablename__ = 'logs'
|
__tablename__ = 'logs'
|
||||||
|
|
||||||
@ -30,17 +60,16 @@ class SQLlog(Base):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<log(id={self.id}, created_at={self.created_at}, message={self.message}, source={self.source})>"
|
return f"<log(id={self.id}, created_at={self.created_at}, message={self.message}, source={self.source})>"
|
||||||
|
|
||||||
|
|
||||||
|
# deprecated
|
||||||
class SQLfile(Base):
|
class SQLfile(Base):
|
||||||
__tablename__ = 'files'
|
__tablename__ = 'files'
|
||||||
|
|
||||||
id = Column(BIGINT, primary_key=True)
|
id = Column(BIGINT, primary_key=True)
|
||||||
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
|
||||||
type = Column(String)
|
type = Column(String)
|
||||||
reference = Column(String)
|
|
||||||
binary = Column(LargeBinary)
|
binary = Column(LargeBinary)
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<file(id={self.id}, created_at={self.created_at}, type={self.type}, binary={self.binary}, reference={self.reference})>"
|
|
||||||
|
|
||||||
class SQLuser(Base):
|
class SQLuser(Base):
|
||||||
__tablename__ = 'users'
|
__tablename__ = 'users'
|
||||||
|
@ -4,7 +4,7 @@ PLIST_ROOT = PLIST + ["usermgr"]
|
|||||||
|
|
||||||
# event type
|
# event type
|
||||||
EVENT_TYPE = {
|
EVENT_TYPE = {
|
||||||
"general": ["newpost", "delpost"],
|
"general": ["newpost", "delpost", "newcomment", "delcomment"],
|
||||||
"admin": ["login", "user.create", "user.delete", "article.delete", "article.pend", "setting.modify"],
|
"admin": ["login", "user.create", "user.delete", "article.delete", "article.pend", "setting.modify"],
|
||||||
"server": ["server.start"]
|
"server": ["server.start"]
|
||||||
}
|
}
|
||||||
|
67
utils/s3helper.py
Normal file
67
utils/s3helper.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
from typing import Tuple, List
|
||||||
|
import os
|
||||||
|
import hashlib
|
||||||
|
import secrets
|
||||||
|
import time
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import minio
|
||||||
|
|
||||||
|
S3_BUCKET = os.getenv("S3_BUCKET")
|
||||||
|
|
||||||
|
s3 = minio.Minio(endpoint=os.getenv("S3_ENDPOINT"),
|
||||||
|
access_key=os.getenv("S3_ACCESS_KEY"),
|
||||||
|
secret_key=os.getenv("S3_SECRET_KEY"),
|
||||||
|
secure=False)
|
||||||
|
|
||||||
|
# check exist
|
||||||
|
if not s3.bucket_exists(S3_BUCKET):
|
||||||
|
print("[!] Where is S3 bucket \"%s\"?"%S3_BUCKET)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# methods
|
||||||
|
def multi_file_uploader(file_list, file_mines:List[str]) -> Tuple[List[str], int]:
|
||||||
|
midx = 0
|
||||||
|
fidlist = []
|
||||||
|
try:
|
||||||
|
for f in file_list:
|
||||||
|
seed = f + (str(time.time())+str(secrets.token_urlsafe(nbytes=16))).encode()
|
||||||
|
fnhash = hashlib.sha256(seed).hexdigest()
|
||||||
|
s3.put_object(bucket_name=S3_BUCKET,
|
||||||
|
object_name=fnhash,
|
||||||
|
data=io.BytesIO(f),
|
||||||
|
length=len(f),
|
||||||
|
content_type=file_mines[midx])
|
||||||
|
fidlist.append(fnhash)
|
||||||
|
midx += 1
|
||||||
|
return fidlist, 0
|
||||||
|
except Exception as e:
|
||||||
|
return [], 1
|
||||||
|
|
||||||
|
|
||||||
|
def solo_file_fetcher(fnhash:str) -> Tuple[dict | None, int]:
|
||||||
|
fnd = None
|
||||||
|
err = 1
|
||||||
|
try:
|
||||||
|
res = s3.get_object(S3_BUCKET, fnhash)
|
||||||
|
mime = res.getheader("Content-Type")
|
||||||
|
fnd = res.data
|
||||||
|
|
||||||
|
err = 0
|
||||||
|
fnd = {"binary":fnd, "mime":mime}
|
||||||
|
except:
|
||||||
|
fnd, err = None, 1
|
||||||
|
|
||||||
|
res.close()
|
||||||
|
res.release_conn()
|
||||||
|
return fnd, err
|
||||||
|
|
||||||
|
|
||||||
|
def multi_file_remover(file_list) -> int:
|
||||||
|
try:
|
||||||
|
for f in file_list:
|
||||||
|
s3.remove_object(S3_BUCKET, f)
|
||||||
|
return 0
|
||||||
|
except:
|
||||||
|
return 1
|
Loading…
Reference in New Issue
Block a user