Compare commits

..

2 Commits

Author SHA1 Message Date
p23
b4fb889c5e fix some bugs 2024-12-08 19:25:03 +00:00
p23
5e076713b1 rebuild with changed DB and new S3 2024-12-08 19:24:22 +00:00
13 changed files with 560 additions and 235 deletions

20
app.py
View File

@ -4,11 +4,12 @@ from flask import Flask
from bcrypt import checkpw, gensalt, hashpw
from sqlalchemy import create_engine
from utils import pgclass, setting_loader, logger, dbhelper
from utils import setting_loader, logger, dbhelper
from utils.pgclass import Base, SQLuser
from utils.platform_consts import PLIST_ROOT
from blueprints.article import article
from blueprints.log import log
from blueprints.admin import admin
# from blueprints.admin import admin
# env
PG_HOST = os.getenv("PG_HOST", None)
@ -30,9 +31,8 @@ if len(errmsg):
exit(0)
# Postgresql
engine = create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME))
pgclass.Base.metadata.create_all(engine)
dbhelper.db(engine)
dbhelper.db = dbhelper.DB(create_engine('postgresql+psycopg2://%s:%s@%s:%s/%s'%(PG_USER, PG_PASS, PG_HOST, PG_PORT, PG_NAME)))
Base.metadata.create_all(dbhelper.db._engine)
# settings checker
settings = setting_loader.loadset()
@ -44,12 +44,12 @@ for s in settings:
# root checker
pwhash = hashpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), gensalt()).decode("utf-8") # if needed, new password
with dbhelper.db.getsession() as session:
root = session.query(pgclass.SQLuser).filter(pgclass.SQLuser.user=="root").first()
root = session.query(SQLuser).filter(SQLuser.user=="root").first()
if root is None: # 沒有root
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
elif (not checkpw(PLATFORM_ROOT_PASSWORD.encode("utf-8"), root.password.encode("utf-8"))) or root.permission != PLIST_ROOT:
session.delete(root)
session.add(pgclass.SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
session.add(SQLuser(user="root",password=pwhash, permission=PLIST_ROOT))
session.commit()
# flask app
@ -59,7 +59,7 @@ app.config["SECRET_KEY"] = os.urandom(64)
# register blueprints
app.register_blueprint(article, url_prefix = "/article")
app.register_blueprint(log , url_prefix = "/log")
app.register_blueprint(admin , url_prefix = "/admin")
# app.register_blueprint(admin , url_prefix = "/admin")
# logger
logger.logger("server.start", "Server is running")
@ -72,5 +72,3 @@ def index():
# app run
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=False)
# 檢查ctx跟content的混用(英文單字)

View File

@ -1,3 +1,7 @@
"""
not done
"""
import os
import time
import math

View File

@ -1,19 +1,16 @@
import time
import hashlib
import secrets
import magic
from flask import Blueprint, request, abort
from google.protobuf.message import DecodeError
from utils import logger, pgclass, setting_loader
from utils.dbhelper import db, solo_article_fetcher, multi_article_fetcher, solo_file_fetcher, solo_article_remover
from utils.misc import error, internal_json2protobuf
from utils import pgclass, setting_loader, dbhelper
from utils.misc import internal_json2protobuf, error_proto
from protobuf_files import niming_pb2
"""
TODO:
- 測試 rebuild 完成的功能
- IG post ( Po文刪文只PO本體文章 )
- 檔案傳輸加低畫質版本(縮圖)
- log 的方式之後要重新設計 > 正規化
- IP Record (deploy之前配合rev proxy)
@ -26,50 +23,75 @@ article = Blueprint('article', __name__)
# 匿名文列表
@article.route('/list', methods = ["GET"])
def listing():
res, code = multi_article_fetcher("general", request.args.get("page"), 30)
res, code = dbhelper.multi_article_fetcher("general", request.args.get("page"), 30)
return res, code
# 獲取匿名文附檔
@article.route("/file/<int:id>", methods=["GET"])
def getfile(id:int):
resp, code = solo_file_fetcher("general", id)
@article.route("/file/<fnhash>", methods=["GET"])
def getfile(fnhash:str):
resp, code = dbhelper.solo_file_fetcher("general", fnhash)
return resp, code
# 只有發文者可以看到的獲取指定文章
# 只有發文者可以做到的刪除文章
@article.route("/own/<int:id>", methods = ["GET", "DELETE"])
def owner_getarticle(id:int):
@article.route("/own/<type>/<key>", methods = ["GET", "DELETE"])
def owner_getarticle(type:str, key:str):
# arguments
sha256 = request.args.get("hash", None)
if not sha256:
return error("Arguments error"), 400
return abort(400)
sha256 = str(sha256)
# 獲取指定文章
if type == 'a':
if not (len(key) > 0 and key.isdigit()):
return abort(400)
key = int(key) # id
elif type == 'c':
if not (len(key) > 0):
return abort(400)
key = str(key) # sha1
else:
return abort(400)
# 獲取指定文章/留言
if request.method == "GET":
resfn, code = solo_article_fetcher("owner", key=(sha256, id))
if type == 'a': # 文章
resfn, code = dbhelper.solo_article_fetcher("owner", key=(sha256, key))
elif type == 'c': # 留言
resfn, code = dbhelper.solo_comment_fetcher("owner", key=(sha256, key))
if code == 200:
return internal_json2protobuf(resfn), code
return resfn, code
# 刪除指定文章跟他們的留言、檔案
elif request.method == "DELETE":
result, code = solo_article_remover("owner", hash=sha256, id=id)
if type == 'a':
result, code = dbhelper.solo_article_remover("owner", hash=sha256, id=key)
elif type == 'c':
result, code = dbhelper.solo_comment_remover("owner", hash=sha256, sha1=key)
if not code == 200:
return result, code
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
%(result["id"], str(result["mark"])))
return niming_pb2.FetchResponse(
posts = [ niming_pb2.FetchResponse.Message(id = result["id"], mark = result["mark"]) ]
).SerializeToString(), 200
one = niming_pb2.FetchResponse.Message()
if "id" in result: one.id = result["id"]
return niming_pb2.FetchResponse(posts=[one]).SerializeToString(), 200
# 獲取指定文章
@article.route("/<int:id>", methods = ["GET"])
@article.route("/a/<int:id>", methods = ["GET"])
def getarticle(id:int):
resfn, code = solo_article_fetcher("general", key=id)
resfn, code = dbhelper.solo_article_fetcher("general", key=id)
if code == 200:
return internal_json2protobuf(resfn), code
return resfn, code
# 獲取指定文章的留言
@article.route("/c/<sha1>", methods = ["GET"])
def getcomment(sha1:str):
resfn, code = dbhelper.solo_comment_fetcher("general", key=sha1)
if code == 200:
return internal_json2protobuf(resfn), code
return resfn, code
@ -78,101 +100,73 @@ def getarticle(id:int):
# 上傳文章 / 留言
@article.route("/", methods = ["POST"])
def posting():
# flow:
# ctx -> hash -> reference -> file -> IP -> IG -> mark -> post | -> log
"""
Work Flow:
ctx -> reference -> file -> post( hash -> IP -> IG -> mark ) | -> log
"""
# loadset
opt = setting_loader.loadset()
chk_before_post = opt["Check_Before_Post"]
maxword = opt["Niming_Max_Word"]
# protobuf parse
recv = niming_pb2.Post()
try: recv.ParseFromString(request.data)
except DecodeError: return abort(400)
except DecodeError:
return error_proto("Failed to parse data."), 400
# content and check
ctx = str(recv.content)
if len(ctx) == 0 or len(ctx) > maxword: # length check
return abort(400)
# hash
seed = ctx + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
hash = hashlib.sha256(seed.encode()).hexdigest()
content = str(recv.content)
if len(content) == 0 or len(content) > maxword: # length check
return error_proto("No content or too many words."), 400
# reference and check
ref = int(recv.ref)
if ref != 0:
# 檢查指向的文章是否也是留言
reftg, code = solo_article_fetcher(role="general", key=ref)
if code != 200 or reftg["reference"]:
return abort(400)
# 檢查指向的文章是否存在 且 可訪問
with dbhelper.db.getsession() as session:
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
tpid = session.query(article.id).join(article_mark, article.hash==article_mark.hash) \
.filter(article.id==ref, article_mark.mark=="visible").first()
if not tpid:
return error_proto("Invalid Reference."), 400
else:
ref = None
# file processing and check
files = recv.files
# check - size
atts = opt["Attachment_Count"]
sizelimit = opt["Attachment_Size"]
if len(files) > atts: return abort(400)
for f in files:
if len(f) <= 0 or len(f) > sizelimit: return abort(400)
# check - mimetype
allowed_mime = opt["Allowed_MIME"]
fmimes = []
for f in files:
mime = magic.Magic(mime=True)
type = mime.from_buffer(f)
if not(type in allowed_mime): return abort(400)
fmimes.append(type)
result_id, sha1, hash = 0, "", ""
if ref is None: # only article (comment dont have files)
# file processing & check
files = recv.files
# check - size
atts = opt["Attachment_Count"]
sizelimit = opt["Attachment_Size"]
if len(files) > atts: return error_proto("Too many files"), 400
for f in files:
if len(f) <= 0 or len(f) > sizelimit: return error_proto("Empty file or file too big."), 400
# check - mimetype
allowed_mime = opt["Allowed_MIME"]
fmimes = []
for f in files:
mime = magic.Magic(mime=True)
type = mime.from_buffer(f)
if not(type in allowed_mime): return error_proto("File type not allowed."), 400
fmimes.append(type)
# IP
ip = request.remote_addr
# ig posting
if chk_before_post:
igid = None
# Go posting
igid = None
# Coming Soon...
# mark
if chk_before_post: mark = "pending"
else: mark = "visible"
# posting
table = pgclass.SQLarticle
ftab = pgclass.SQLfile
try:
with db.getsession() as session:
# post
data = table(hash = hash, content = ctx, igid = igid, mark = mark, reference = ref, ip = ip)
session.add(data)
# file processor
fmidx = 0
fidarr = []
for f in files:
fsql = ftab(reference = hash, binary = f, type = fmimes[fmidx])
fidarr.append(fsql)
session.add(fsql)
fmidx += 1
# first commit
session.commit()
# set file list
data.file_list = [ fid.id for fid in fidarr ]
session.commit() # second commit
result_id = data.id
except:
return abort(400)
# logger
logger.logger("newpost", "New post (id=%d point to %s): %s"%(result_id, ref, mark))
# posting
result_id, hash = dbhelper.solo_article_uploader(content=content,
file_list=files,
fmimes=fmimes)
if not result_id:
return error_proto("Failed to Post"), 400
else: # comments
sha1, hash = dbhelper.solo_comment_uploader(content=content,
ref=ref)
if not sha1:
return error_proto("Failed to Post"), 400
# to protobuf & return
proto_stres = niming_pb2.PostResponse(
status = niming_pb2.Status.Success,
hash = hash,
id = int(result_id)
).SerializeToString()
return proto_stres, 200
# 介面全部改成protobuf傳輸
# 檔案傳輸加低畫質版本(縮圖)

View File

@ -1,8 +1,7 @@
from flask import Blueprint, request, jsonify
from sqlalchemy import desc
from utils import pgclass
from utils.dbhelper import db
from utils import pgclass, dbhelper
from utils.misc import error
log = Blueprint('log', __name__)
@ -18,7 +17,7 @@ def listlog():
count = int(request.args.get("count"))
# getctx
with db.getsession() as session:
with dbhelper.db.getsession() as session:
table = pgclass.SQLlog
res = session.query(table).order_by(desc(table.id)).offset(rst).limit(count).all()
@ -31,7 +30,7 @@ def listlog():
@log.route("/<int:id>", methods = ["GET"])
def getlog(id:int):
# db
with db.getsession() as session:
with dbhelper.db.getsession() as session:
table = pgclass.SQLlog
res = session.query(table).filter(table.id == id).first()
if res is None:

View File

@ -26,15 +26,12 @@ message FetchResponse {
uint64 id = 1;
string content = 2;
// reply to a post, like a mail chat.
optional uint64 ref = 3;
// optional uint64 ref = 3;
// request files through /article/file/<id> with MIME type.
// See it as a BLOB url;
repeated uint64 files_id = 4;
optional string hash = 5;
string igid = 6;
string mark = 7;
optional string ip = 8;
repeated uint64 comments_id = 9;
repeated string files_hash = 3;
optional string igid = 4;
repeated string comments_hash = 5;
}
// Several post info
repeated Message posts = 1;

View File

@ -13,21 +13,21 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xf0\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1a\xb7\x01\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x10\n\x03ref\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12\x10\n\x08\x66iles_id\x18\x04 \x03(\x04\x12\x11\n\x04hash\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x0c\n\x04igid\x18\x06 \x01(\t\x12\x0c\n\x04mark\x18\x07 \x01(\t\x12\x0f\n\x02ip\x18\x08 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x0b\x63omments_id\x18\t \x03(\x04\x42\x06\n\x04_refB\x07\n\x05_hashB\x05\n\x03_ip*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cniming.proto\"@\n\x04Post\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x10\n\x03ref\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\r\n\x05\x66iles\x18\x03 \x03(\x0c\x42\x06\n\x04_ref\"q\n\x0cPostResponse\x12\x17\n\x06status\x18\x01 \x01(\x0e\x32\x07.Status\x12\x0c\n\x04hash\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x04\x12\x1b\n\x0e\x66\x61iled_message\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x11\n\x0f_failed_message\"\xa5\x01\n\rFetchResponse\x12%\n\x05posts\x18\x01 \x03(\x0b\x32\x16.FetchResponse.Message\x1am\n\x07Message\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12\x12\n\nfiles_hash\x18\x03 \x03(\t\x12\x11\n\x04igid\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\rcomments_hash\x18\x05 \x03(\tB\x07\n\x05_igid*!\n\x06Status\x12\n\n\x06\x46\x61iled\x10\x00\x12\x0b\n\x07Success\x10\x01\x62\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'niming_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_STATUS._serialized_start=440
_STATUS._serialized_end=473
_STATUS._serialized_start=365
_STATUS._serialized_end=398
_POST._serialized_start=16
_POST._serialized_end=80
_POSTRESPONSE._serialized_start=82
_POSTRESPONSE._serialized_end=195
_FETCHRESPONSE._serialized_start=198
_FETCHRESPONSE._serialized_end=438
_FETCHRESPONSE_MESSAGE._serialized_start=255
_FETCHRESPONSE_MESSAGE._serialized_end=438
_FETCHRESPONSE._serialized_end=363
_FETCHRESPONSE_MESSAGE._serialized_start=254
_FETCHRESPONSE_MESSAGE._serialized_end=363
# @@protoc_insertion_point(module_scope)

View File

@ -1,7 +1,10 @@
psycopg2
sqlalchemy
flask
pyjwt
psycopg2
protobuf==5.28.3
python-magic
bcrypt
pytz
sqlalchemy-utils
minio

View File

@ -1,73 +1,233 @@
from typing import Tuple, Dict, List
from datetime import datetime
import time
import secrets
import hashlib
import os
from flask import make_response, Response, abort
from sqlalchemy.orm import sessionmaker, aliased
from sqlalchemy import desc, func, literal, and_
from flask import make_response, Response, abort, request
from sqlalchemy.orm import sessionmaker
from sqlalchemy import desc, func, update, Engine, text, delete
import pytz
from utils import pgclass
from utils import pgclass, setting_loader, s3helper, logger
from utils.misc import error
from protobuf_files import niming_pb2
class db:
class DB:
_engine = None
@classmethod
def __init__(cls, engine):
cls._engine = engine
cls._engine:Engine = engine
@classmethod
def getsession(cls):
Session = sessionmaker(bind=cls._engine)
return Session()
db:DB = None
# 上傳單一文章
def solo_article_uploader(content:str, file_list, fmimes:List[str]) -> Tuple[int, str]:
# loadset
opt = setting_loader.loadset()
chk_before_post = opt["Check_Before_Post"]
# hash
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
hash = hashlib.sha256(seed.encode()).hexdigest()
# IP
ip = request.remote_addr
# ig posting (only article)
if chk_before_post:
igid = None
# Go posting
igid = None
# Coming Soon...
# mark
if chk_before_post: mark = "pending"
else: mark = "visible"
# posting
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
article_metadata = pgclass.SQLmeta
result_id = 0
try:
with db.getsession() as session:
# file processor
fnlist, err = s3helper.multi_file_uploader(file_list, fmimes)
if err:
return 0, ""
# meta processor
metaa = article_metadata(ip=ip,
igid=igid,
hash=hash)
session.add(metaa)
# article processor
posta = article(content=content,
hash=hash,
file_list=fnlist)
session.add(posta)
# mark processor
marka = article_mark(hash=hash,
mark=mark)
session.add(marka)
# commit
session.commit()
result_id = int(posta.id)
# logger
logger.logger("newpost", "New post (id=%d): %s"%(result_id, mark))
return result_id, hash
except Exception as e:
print(e)
return 0, ""
# 上傳單一留言
def solo_comment_uploader(content:str, ref:int) -> Tuple[int | str, str]:
# loadset
opt = setting_loader.loadset()
chk_before_post = opt["Check_Before_Post"]
# hash
seed = content + str(time.time()) + str(secrets.token_urlsafe(nbytes=16))
hash = hashlib.sha256(seed.encode()).hexdigest()
sha1 = hashlib.sha1(seed.encode()).hexdigest()
# IP
ip = request.remote_addr
# mark
if chk_before_post: mark = "pending"
else: mark = "visible"
# posting
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
try:
with db.getsession() as session:
# article processor
cda = {
"content":content,
"ip":ip,
"hash":hash,
"created_at":datetime.now(pytz.timezone(os.getenv("TIMEZONE"))),
"sha1":sha1
}
session.execute(
update(article)
.where(article.id == ref)
.values(comment_list=article.comment_list + [cda])
)
# mark processor
marka = article_mark(hash=hash,
mark=mark)
session.add(marka)
# commit
session.commit()
# logger
logger.logger("newcomment", "New comment %s points to %d: %s"%(sha1, ref, mark))
return sha1, hash
except Exception as e:
return 0, ""
# role (general) (owner) (admin)
# 獲取單一文章
def solo_article_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
table = pgclass.SQLarticle # main
table2 = aliased(table) # comment
with db.getsession() as session:
# query
res = session.query(table.id,
table.content,
table.reference,
table.file_list,
table.hash,
table.igid,
table.mark,
table.ip,
func.coalesce(func.array_agg(table2.id), literal([])).label("comments"))
stmt = "SELECT posts.id AS posts_id, \
posts.content AS posts_content, \
posts.file_list AS posts_file_list, \
article_meta.igid AS article_meta_igid, \
posts.comment_list AS posts_comment_list, \
posts.hash AS posts_hash, \
article_meta.ip AS article_meta_ip \
FROM posts \
JOIN mark ON mark.hash = posts.hash \
JOIN article_meta ON article_meta.hash = posts.hash "
if role == "owner":
res = res.join(table2, table2.reference == table.id, isouter=True) \
.filter(table.hash == key[0], table.id == key[1])
stmt += "WHERE posts.id = :id AND posts.hash = :hash"
result = session.execute(text(stmt), {"id":key[1], "hash":key[0]})
elif role == "admin":
res = res.join(table2, table2.reference == table.id, isouter=True) \
.filter(table.id == key)
stmt += "WHERE posts.id = :id"
result = session.execute(text(stmt), {"id":key})
elif role == "general":
res = res.join(table2, and_(table2.reference == table.id, table2.mark == "visible"), isouter=True) \
.filter(table.id == key, table.mark == "visible")
res = res.group_by(table.id, table.content, table.reference, table.file_list,
table.hash, table.igid, table.mark, table.ip).first()
stmt += "WHERE posts.id = :id AND mark.mark = 'visible'"
result = session.execute(text(stmt), {"id":key})
res = result.first()
if res is None:
return abort(404)
# mapping
one = {
"id": res[0],
"content":res[1],
"igid":res[5],
"mark":res[6],
"reference":res[2],
"files_id":res[3],
"comments":res[8]
"content": res[1],
"files_hash": res[2],
"igid": res[3],
"comments_hash": [ c.sha1 for c in res[4] ]
}
if role == "admin":
one["ip"] = res[7]
one["ip"] = res[6]
if role == "owner" or role == "admin":
one["hash"] = res[4]
one["hash"] = res[5]
return one, 200
# role (general) (owner) (admin)
# 獲取單一留言
def solo_comment_fetcher(role:str, key) -> Tuple[Dict, int]: # admin, owner, general
with db.getsession() as session:
# query
stmt = "SELECT posts.id AS parent, c.* \
FROM posts \
JOIN mark ON mark.hash = posts.hash \
JOIN unnest(posts.comment_list) AS c ON 1=1 "
if role == "general":
# 對一般用戶sha1查詢確保本體可見
stmt += " WHERE c.sha1 = :key AND mark.mark = 'visible'"
arta = session.execute(text(stmt), {'key':key}).first()
elif role == "owner":
# 對發文者sha256查詢
stmt += " WHERE c.hash = :key AND c.sha1 = :sha1"
arta = session.execute(text(stmt), {'key':key[0], 'sha1':key[1]}).first()
elif role == "admin":
# 對管理員sha1查詢
stmt += " WHERE c.sha1 = :key"
arta = session.execute(text(stmt), {'key':key}).first()
if arta is None:
return abort(404)
# mapping
one = {
"content": arta[1],
"sha1": arta[5]
}
if role == "admin":
one["ip"] = arta[2]
if role == "owner" or role == "admin":
one["hash"] = arta[3]
return one, 200
@ -79,76 +239,145 @@ def multi_article_fetcher(role:str, page:str, count:int) -> Tuple[bytes, int]: #
return abort(400)
page = int(page)*count
table = pgclass.SQLarticle
article = pgclass.SQLarticle
article_meta = pgclass.SQLmeta
article_mark = pgclass.SQLmark
resfn = niming_pb2.FetchResponse()
with db.getsession() as session:
# query
res = session.query(table)
res = session.query(article.id, article.content, article.file_list, article_meta.igid, article.hash, article_meta.ip)
res = res.join(article_meta, article_meta.hash==article.hash)
res = res.join(article_mark, article_mark.hash==article.hash)
if role == "general":
res = res.filter(table.mark == "visible", table.reference == None)
elif role == "admin":
res = res.filter(table.reference == None)
res = res.order_by(desc(table.id)).offset(page).limit(count).all()
res = res.filter(article_mark.mark == "visible")
res = res.order_by(desc(article.id)).offset(page).limit(count).all()
# mapping
for r in res:
one = niming_pb2.FetchResponse.Message(
id = r.id,
content = r.content,
files_id = r.file_list,
igid = r.igid,
mark = r.mark,
ref = r.reference
id = r[0],
content = r[1],
files_hash = r[2],
igid = r[3],
)
if role == "admin": # 如果是管理員 多給ip 跟 hash
one.hash = r.hash
one.ip = r.ip
if role == "admin": # 如果是管理員 多給ip 跟 hash # proto那邊沒支援
one.hash = r[4]
one.ip = r[5]
resfn.posts.append(one)
return resfn.SerializeToString(), 200
# 刪除文章
# 刪除單一文章
def solo_article_remover(role:str, hash:str=None, id:int=None) -> Tuple[Dict, int]: # admin, owner
key = None
if role == "admin": key = id
elif role == "owner": key = (hash, id)
table = pgclass.SQLarticle
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
with db.getsession() as session:
# 獲取本體
pres = session.query(article.id, article.hash, article_mark.mark, article.file_list).join(article_mark, article_mark.hash==article.hash)
if role == "admin":
res = session.query(table).filter(table.id == key).first()
pres = pres.filter(article.id == key).first()
elif role == "owner":
res = session.query(table).filter(table.hash == key[0], table.id == key[1]).first()
if res is None: # 檢查本體是否存在
pres = pres.filter(article.id == key[1], article.hash == key[0]).first()
if pres is None: # 如果本體不存在
return abort(404)
# 刪本體
session.delete(res)
# 獲取本體的留言們(hash)
stmt="SELECT c.hash as chash " \
+ "FROM posts, unnest(posts.comment_list) AS c " \
+ "WHERE posts.id = :id"
cres = session.execute(text(stmt), {'id':pres[0]}).all()
# 刪除本體
stmt = delete(article).where(article.hash == pres[1])
session.execute(stmt)
# 刪除 mark (本體 & 留言)
stmt = delete(article_mark).where(article_mark.hash == pres[1])
session.execute(stmt)
for c in cres:
stmt = delete(article_mark).where(article_mark.hash == c[0])
session.execute(stmt)
# 刪除檔案
err = s3helper.multi_file_remover(pres[3])
if err:
return abort(500)
session.commit()
return {"id":res.id, "mark":res.mark}, 200
# logger
logger.logger("delpost", "Delete post (id=%d): last_status=%s"
%(int(pres[0]), str(pres[2])))
return {"id":pres[0], "mark":pres[2]}, 200
# 刪除單一留言
def solo_comment_remover(role:str, hash:str=None, sha1:str=None) -> Tuple[Dict, int]:
key = None
if role == "admin": key = sha1
elif role == "owner": key = (hash, sha1)
article_mark = pgclass.SQLmark
with db.getsession() as session:
# 獲取留言本體
stmt="SELECT posts.id AS parent, c.sha1, c.hash " \
+ "FROM posts, unnest(posts.comment_list) AS c "
if role == "admin":
stmt += "WHERE c.sha1 = :sha1"
cres = session.execute(text(stmt), {'sha1':key}).first()
elif role == 'owner':
stmt += "WHERE c.sha1 = :sha1 AND c.hash = :hash"
cres = session.execute(text(stmt), {'sha1':key[1], 'hash':key[0]}).first()
if cres is None: # 如果不存在
return abort(404)
# 刪除本體
stmt="UPDATE posts " \
+"SET comment_list = ARRAY(" \
+"SELECT c " \
+"FROM unnest(comment_list) AS c " \
+"WHERE (c.sha1, c.hash) != (:sha1, :hash)" \
+")"
session.execute(text(stmt), {'sha1':cres[1], 'hash':cres[2]})
# 刪除 mark (本體 & 留言)
mark = session.query(article_mark.mark).filter(article_mark.hash == cres[2])
stmt = delete(article_mark).where(article_mark.hash == cres[2])
session.execute(stmt)
session.commit()
logger.logger("delcomment", "Delete comment (sha1=%s): last_status=%s"
%(cres[1], str(mark)))
return {"sha1":cres[1], "mark":mark}, 200
# 獲取檔案
def solo_file_fetcher(role:str, id:int) -> Tuple[Response, int]: # general, admin
table = pgclass.SQLarticle
ftab = pgclass.SQLfile
def solo_file_fetcher(role:str, fnhash:str) -> Tuple[Response, int]: # general, admin
article = pgclass.SQLarticle
article_mark = pgclass.SQLmark
with db.getsession() as session:
fres = session.query(ftab).filter(ftab.id == id).first()
if fres is None: # 檢查檔案是否存在
return error("File not found"), 404
arta = session.query(article).join(article_mark, article_mark.hash == article.hash).filter(article.file_list == func.any(fnhash))
if role == "general":
article = session.query(table).filter(table.hash == fres.reference, table.mark == 'visible').first()
elif role == "admin":
article = session.query(table).filter(table.hash == fres.reference).first()
if article is None: # 檢查文章本體是否存在/可以閱覽
arta = arta.filter(article_mark == 'visible')
aeta = arta.first()
if arta is None: # 檢查文章本體是否存在/可以閱覽
return error("File not found"), 404
resp = make_response(fres.binary)
resp.headers.set("Content-Type", fres.type)
resp.headers.set("Content-Disposition", f"attachment; filename=file{fres.id}")
# fetch file
f, err = s3helper.solo_file_fetcher(fnhash)
if err:
return error("File not found"), 404
resp = make_response(f["binary"])
resp.headers.set("Content-Type", f["mime"])
resp.headers.set("Content-Disposition", f"attachment; filename=file_{fnhash}")
return resp, 200

View File

@ -1,5 +1,4 @@
from utils import pgclass
from utils.dbhelper import db
from utils import pgclass, dbhelper
from utils.platform_consts import EVENT_TYPE
def logger(type, message):
@ -14,7 +13,7 @@ def logger(type, message):
# session.add
if flag:
with db.getsession() as session:
with dbhelper.db.getsession() as session:
session.add(log)
session.commit()

View File

@ -6,26 +6,32 @@ def error(message:str) -> Response:
return jsonify({"error":message})
def error_proto(message:str) -> bytes:
return niming_pb2.PostResponse(
status = niming_pb2.Status.Failed,
failed_message = message
).SerializeToString()
def internal_json2protobuf(original:list|dict) -> bytes:
if isinstance(original, dict):
original = [original]
res = niming_pb2.FetchResponse(status = niming_pb2.Status.Success)
res = niming_pb2.FetchResponse()
for o in original:
ob = niming_pb2.FetchResponse.Message(
id = o["id"],
content = o["content"],
igid = o["igid"],
mark = o["mark"],
files_id = o["files_id"]
)
if None not in o["comments"]:
ob.comments_id.extend(o["comments"])
if o["reference"]:
ob.ref = o["reference"]
if "ip" in o:
ob.ip = o["ip"]
if "hash" in o:
ob.hash = o["hash"]
# drop null object
newo = {}
for oc in o:
if o[oc] is not None:
newo[oc] = o[oc]
o = newo
ob = niming_pb2.FetchResponse.Message()
if "id" in o: ob.id = o["id"]
if "content" in o: ob.content = o["content"]
if "igid" in o: ob.igid = o["igid"]
if "files_hash" in o: ob.files_hash.extend(o["files_hash"])
if "comments_hash" in o: ob.comments_hash.extend(o["comments_hash"])
res.posts.append(ob)
return res.SerializeToString()

View File

@ -1,24 +1,54 @@
from sqlalchemy import Column, String, TIMESTAMP, func, BIGINT, LargeBinary, ARRAY
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy_utils.types.pg_composite import CompositeType
from sqlalchemy.ext.mutable import MutableList
Base = declarative_base()
CompositeType.cache_ok = False
comment_type = CompositeType(
'comment',
[
Column('content', String),
Column('ip', String),
Column('hash', String),
Column('created_at', TIMESTAMP),
Column("sha1", String)
]
)
# posts
class SQLarticle(Base):
__tablename__ = 'posts'
id = Column(BIGINT, primary_key=True)
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
hash = Column(String)
content = Column(String)
file_list = Column(ARRAY(String))
hash = Column(String)
comment_list = Column(MutableList.as_mutable(ARRAY(comment_type)))
# post metadata
class SQLmeta(Base):
__tablename__ = 'article_meta'
hash = Column(String, primary_key=True)
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
igid = Column(String)
mark = Column(String)
ip = Column(String)
reference = Column(BIGINT)
file_list = Column(ARRAY(BIGINT))
def __repr__(self):
return f"<article(id={self.id}, hash={self.hash}, content={self.content}, igid={self.igid}, mark={self.mark}, created_at={self.created_at}, ip={self.ip}, reference={self.reference} file_list={self.file_list})>"
# post mark
class SQLmark(Base):
__tablename__ = 'mark'
hash = Column(String, primary_key=True)
mark = Column(String)
# logs
class SQLlog(Base):
__tablename__ = 'logs'
@ -30,17 +60,16 @@ class SQLlog(Base):
def __repr__(self):
return f"<log(id={self.id}, created_at={self.created_at}, message={self.message}, source={self.source})>"
# deprecated
class SQLfile(Base):
__tablename__ = 'files'
id = Column(BIGINT, primary_key=True)
created_at = Column(TIMESTAMP(timezone=True), server_default=func.now())
type = Column(String)
reference = Column(String)
binary = Column(LargeBinary)
def __repr__(self):
return f"<file(id={self.id}, created_at={self.created_at}, type={self.type}, binary={self.binary}, reference={self.reference})>"
class SQLuser(Base):
__tablename__ = 'users'

View File

@ -4,7 +4,7 @@ PLIST_ROOT = PLIST + ["usermgr"]
# event type
EVENT_TYPE = {
"general": ["newpost", "delpost"],
"general": ["newpost", "delpost", "newcomment", "delcomment"],
"admin": ["login", "user.create", "user.delete", "article.delete", "article.pend", "setting.modify"],
"server": ["server.start"]
}

67
utils/s3helper.py Normal file
View File

@ -0,0 +1,67 @@
from typing import Tuple, List
import os
import hashlib
import secrets
import time
import io
import sys
import minio
S3_BUCKET = os.getenv("S3_BUCKET")
s3 = minio.Minio(endpoint=os.getenv("S3_ENDPOINT"),
access_key=os.getenv("S3_ACCESS_KEY"),
secret_key=os.getenv("S3_SECRET_KEY"),
secure=False)
# check exist
if not s3.bucket_exists(S3_BUCKET):
print("[!] Where is S3 bucket \"%s\"?"%S3_BUCKET)
sys.exit(0)
# methods
def multi_file_uploader(file_list, file_mines:List[str]) -> Tuple[List[str], int]:
midx = 0
fidlist = []
try:
for f in file_list:
seed = f + (str(time.time())+str(secrets.token_urlsafe(nbytes=16))).encode()
fnhash = hashlib.sha256(seed).hexdigest()
s3.put_object(bucket_name=S3_BUCKET,
object_name=fnhash,
data=io.BytesIO(f),
length=len(f),
content_type=file_mines[midx])
fidlist.append(fnhash)
midx += 1
return fidlist, 0
except Exception as e:
return [], 1
def solo_file_fetcher(fnhash:str) -> Tuple[dict | None, int]:
fnd = None
err = 1
try:
res = s3.get_object(S3_BUCKET, fnhash)
mime = res.getheader("Content-Type")
fnd = res.data
err = 0
fnd = {"binary":fnd, "mime":mime}
except:
fnd, err = None, 1
res.close()
res.release_conn()
return fnd, err
def multi_file_remover(file_list) -> int:
try:
for f in file_list:
s3.remove_object(S3_BUCKET, f)
return 0
except:
return 1