first commit
This commit is contained in:
commit
d03e0c8e82
28 changed files with 1492 additions and 0 deletions
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
__pycache__
|
||||||
|
tmp
|
||||||
|
config/session.json
|
||||||
|
config/traceback.json
|
||||||
|
config/config.py
|
||||||
|
backend/db/id2igid.db
|
||||||
|
|
||||||
|
test
|
||||||
|
testfiles
|
16
Dockerfile
Normal file
16
Dockerfile
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
FROM python:3.12.10
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# apt install
|
||||||
|
RUN apt-get update && \
|
||||||
|
DEBAIN_FRONTEND=noninteractive apt-get install -qy ffmpeg libpq-dev libmagic1 libmagic-dev
|
||||||
|
|
||||||
|
# pip3 install
|
||||||
|
COPY ./requirements.txt /app/requirements.txt
|
||||||
|
COPY ./ffmpeg_python-0.2.0-py3-none-any.whl /app/ffmpeg_python-0.2.0-py3-none-any.whl
|
||||||
|
|
||||||
|
RUN pip3 install ffmpeg_python-0.2.0-py3-none-any.whl
|
||||||
|
RUN pip3 install -r /app/requirements.txt
|
||||||
|
|
||||||
|
EXPOSE 50051
|
45
PictureMaker/testing.py
Normal file
45
PictureMaker/testing.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
from PIL import Image, ImageDraw, ImageFont
|
||||||
|
|
||||||
|
from config.config import TMP
|
||||||
|
|
||||||
|
# variables
|
||||||
|
PROMA_WIDTH = 600
|
||||||
|
PROMA_HEIGHT = 600
|
||||||
|
PROMA_FONTSIZE = 40
|
||||||
|
PROMA_FONT = "./resource/OpenSans-Regular.ttf"
|
||||||
|
|
||||||
|
# generate
|
||||||
|
# return value : filename
|
||||||
|
# output to file
|
||||||
|
def gen(context:dict) -> str | None:
|
||||||
|
# data preparation
|
||||||
|
content = context["content"]["text"]
|
||||||
|
|
||||||
|
# generate image
|
||||||
|
img = Image.new(mode="RGB",
|
||||||
|
size=(PROMA_WIDTH, PROMA_HEIGHT),
|
||||||
|
color=(255, 255, 255))
|
||||||
|
|
||||||
|
font = ImageFont.truetype(PROMA_FONT, PROMA_FONTSIZE, encoding='utf-8')
|
||||||
|
|
||||||
|
draw:ImageDraw.ImageDraw = ImageDraw.Draw(img)
|
||||||
|
draw.text(xy=(0, 0),
|
||||||
|
text=content,
|
||||||
|
font=font,
|
||||||
|
fill=(0, 0, 0))
|
||||||
|
|
||||||
|
# save
|
||||||
|
filename = TMP + hashlib.sha512( str(time.time()).encode() ).hexdigest() + ".jpg"
|
||||||
|
img.save(filename)
|
||||||
|
filename = os.path.abspath(filename)
|
||||||
|
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
# 文案生成
|
||||||
|
def gentext(context:dict) -> str:
|
||||||
|
return context["content"]["text"]
|
33
README.md
Normal file
33
README.md
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
# Preparing
|
||||||
|
- Rename ``backend/db/id2igid.db.example`` to ``backend/db/id2igid.db``
|
||||||
|
|
||||||
|
- Rename ``config/config.py.example`` to ``config/config.py``
|
||||||
|
|
||||||
|
- Edit ``config/config.py``
|
||||||
|
```
|
||||||
|
ACCOUNT_USERNAME = "<Your_Instagram_username>"
|
||||||
|
ACCOUNT_PASSWORD = "<Your_Instagram_password>"
|
||||||
|
```
|
||||||
|
|
||||||
|
# Deploy
|
||||||
|
## Docker
|
||||||
|
```
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
## Manual
|
||||||
|
```
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y ffmpeg libpq-dev libmagic1 libmagic-dev
|
||||||
|
RUN pip3 install ffmpeg_python-0.2.0-py3-none-any.whl
|
||||||
|
RUN pip3 install -r /app/requirements.txt
|
||||||
|
|
||||||
|
python3 ./app.py
|
||||||
|
```
|
||||||
|
|
||||||
|
# Modules
|
||||||
|
frontend - frontend server
|
||||||
|
|
||||||
|
interface - interface between IGAPI and main service(niming)
|
||||||
|
|
||||||
|
PictureMaker - IG post template
|
4
TODO
Normal file
4
TODO
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[ ] 改善Traceback的收集
|
||||||
|
[V] 本地儲存ID對IGID表
|
||||||
|
|
||||||
|
[ ] 測試
|
80
app.py
Normal file
80
app.py
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
import importlib.util
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
import os
|
||||||
|
|
||||||
|
from backend import backend
|
||||||
|
from backend.ig import IG
|
||||||
|
from backend.db import dbhelper
|
||||||
|
from backend.utils import ld_interface
|
||||||
|
from backend.utils import ld_picturemaker
|
||||||
|
from config.config import DEBUG, TMP, FRONTEND
|
||||||
|
#if DEBUG:
|
||||||
|
# from dotenv import load_dotenv
|
||||||
|
# load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# logging init
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.DEBUG,
|
||||||
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
|
||||||
|
# logging
|
||||||
|
loaderlog = logging.getLogger("loader")
|
||||||
|
loaderlog.setLevel(level=logging.INFO)
|
||||||
|
|
||||||
|
# debug
|
||||||
|
if DEBUG:
|
||||||
|
loaderlog.info("DEBUG MODE ENABLED")
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
# tmp dir
|
||||||
|
if not os.path.exists(TMP):
|
||||||
|
loaderlog.info("Temporary directory not found, creating...")
|
||||||
|
os.mkdir(TMP)
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
# load interface module
|
||||||
|
ld_interface.init()
|
||||||
|
|
||||||
|
# load picture_maker module
|
||||||
|
ld_picturemaker.init()
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
# init backend modules
|
||||||
|
## id2igid.db
|
||||||
|
loaderlog.info("Connecting to id2igid.db...")
|
||||||
|
dbhelper.init()
|
||||||
|
|
||||||
|
## instagram
|
||||||
|
loaderlog.info("Initializing IG...")
|
||||||
|
IG.init()
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
# load frontend
|
||||||
|
loaderlog.info("Loading frontend")
|
||||||
|
spec = importlib.util.spec_from_file_location("frontend", FRONTEND)
|
||||||
|
femod = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(femod)
|
||||||
|
fe = threading.Thread(target=femod.main)
|
||||||
|
fe.start()
|
||||||
|
|
||||||
|
# load backend
|
||||||
|
loaderlog.info("Loading backend")
|
||||||
|
be = threading.Thread(target=backend.main)
|
||||||
|
be.start()
|
||||||
|
|
||||||
|
##################
|
||||||
|
|
||||||
|
# end
|
||||||
|
loaderlog.info("Loaded")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
100
backend/api.py
Normal file
100
backend/api.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
"""
|
||||||
|
Backend API for frontend to call
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
from typing import Tuple
|
||||||
|
from cachetools import TTLCache, cached
|
||||||
|
|
||||||
|
from config.config import ACCINFO_CACHE, RELOGIN_LIMIT
|
||||||
|
from backend import backend
|
||||||
|
from backend.utils import ld_interface
|
||||||
|
from backend.db import dbhelper
|
||||||
|
from backend.ig import IG
|
||||||
|
|
||||||
|
# logger
|
||||||
|
bkapilog = logging.getLogger("backend.api")
|
||||||
|
bkapilog.setLevel(level=logging.INFO)
|
||||||
|
|
||||||
|
# account info
|
||||||
|
cache_accinfo = TTLCache(maxsize=1, ttl=ACCINFO_CACHE)
|
||||||
|
@cached(cache_accinfo)
|
||||||
|
def IG_account_info() -> dict | None:
|
||||||
|
result = IG.account_info()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# login
|
||||||
|
cache_login = TTLCache(maxsize=1, ttl=RELOGIN_LIMIT)
|
||||||
|
@cached(cache_login)
|
||||||
|
def _IG_login() -> int:
|
||||||
|
result = IG.login()
|
||||||
|
return result
|
||||||
|
def IG_login() -> str:
|
||||||
|
if len(cache_login): # cooldown
|
||||||
|
bkapilog.info("IG_login: cooldown")
|
||||||
|
return "Cooldown"
|
||||||
|
|
||||||
|
# login
|
||||||
|
lgres = _IG_login()
|
||||||
|
if lgres == 1:
|
||||||
|
bkapilog.info("IG_login: login successed")
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
bkapilog.error("IG_login: login failed")
|
||||||
|
return "Login Failed"
|
||||||
|
|
||||||
|
|
||||||
|
# get queue content
|
||||||
|
def BACKEND_queue() -> dict:
|
||||||
|
t = backend.queue.items()
|
||||||
|
reply = { _[0]:str(_[1]["aid"]) for _ in t }
|
||||||
|
return reply
|
||||||
|
|
||||||
|
|
||||||
|
# task: upload
|
||||||
|
def upload(aid:int) -> Tuple[str, int]:
|
||||||
|
# check - visible
|
||||||
|
article = ld_interface.inf.get(index=aid, media=False)
|
||||||
|
if article is None:
|
||||||
|
return "Article not found", 1
|
||||||
|
|
||||||
|
# check - already in queue
|
||||||
|
if backend.queue["upload-"+str(aid)]:
|
||||||
|
return "Request is already in queue", 1
|
||||||
|
|
||||||
|
# check - there is a requet in queue that wants to delete same target
|
||||||
|
if backend.queue["delete-"+str(aid)]:
|
||||||
|
backend.queue.pop("delete-"+str(aid))
|
||||||
|
return "Canceled delete article request", 0
|
||||||
|
|
||||||
|
# check - already uploaded
|
||||||
|
uploaded = dbhelper.solo_article_fetcher(aid=aid)
|
||||||
|
if uploaded:
|
||||||
|
return "Already posted", 1
|
||||||
|
|
||||||
|
# put into queue
|
||||||
|
backend.queue["upload-"+str(aid)] = {"aid":aid}
|
||||||
|
|
||||||
|
return "Put into queue", 0
|
||||||
|
|
||||||
|
|
||||||
|
# task: delete
|
||||||
|
def delete(aid:int) -> Tuple[str, int]:
|
||||||
|
# check - already in queue
|
||||||
|
if backend.queue["delete-"+str(aid)]:
|
||||||
|
return "Request is already in queue", 1
|
||||||
|
|
||||||
|
# check - there is a requet in queue that wants to upload same target
|
||||||
|
if backend.queue["upload-"+str(aid)]:
|
||||||
|
backend.queue.pop("upload-"+str(aid))
|
||||||
|
return "Canceled upload post request", 0
|
||||||
|
|
||||||
|
# check - never uploaded
|
||||||
|
uploaded = dbhelper.solo_article_fetcher(aid=aid)
|
||||||
|
if not uploaded:
|
||||||
|
return "Has not been posted yet", 1
|
||||||
|
|
||||||
|
# put into queue
|
||||||
|
backend.queue["delete-"+str(aid)] = {"aid":aid}
|
||||||
|
|
||||||
|
return "Put into queue", 0
|
64
backend/backend.py
Normal file
64
backend/backend.py
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
|
||||||
|
from backend import processor
|
||||||
|
from config.config import WORK_INTERVAL_MIN, WORK_INTERVAL_MAX
|
||||||
|
from backend.utils.ThreadSafeOrderedDict import ThreadSafeOrderedDict
|
||||||
|
from backend.db import dbhelper
|
||||||
|
from utils.err import easyExceptionHandler
|
||||||
|
|
||||||
|
# logging
|
||||||
|
belog = logging.getLogger("backend.worker")
|
||||||
|
belog.setLevel(level=logging.INFO)
|
||||||
|
|
||||||
|
# queue
|
||||||
|
queue = ThreadSafeOrderedDict()
|
||||||
|
|
||||||
|
|
||||||
|
def task_processor():
|
||||||
|
t = queue.popitem(last=False)
|
||||||
|
if not t: # no any task in queue
|
||||||
|
belog.info("No task in queue")
|
||||||
|
return
|
||||||
|
|
||||||
|
aid = t[1]["aid"]
|
||||||
|
type = t[0].split("-")[0]
|
||||||
|
belog.info("Task %s(target_aid=%d)"%(type, aid))
|
||||||
|
|
||||||
|
if type == "upload": # upload
|
||||||
|
msg, err = processor.upload(aid)
|
||||||
|
elif type == "delete":
|
||||||
|
#code = t[1]["code"]
|
||||||
|
#msg, err = processor.remove(code)
|
||||||
|
msg, err = processor.remove(aid)
|
||||||
|
else:
|
||||||
|
msg, err = "Invalid task type %s"%type, 1
|
||||||
|
|
||||||
|
if err:
|
||||||
|
belog.error("Task failed: %s"%msg)
|
||||||
|
elif type == "upload":
|
||||||
|
dberr = dbhelper.solo_article_inserter(aid=aid, igid=msg)
|
||||||
|
if dberr:
|
||||||
|
belog.error("Task %s(target_aid=%d): Set igid failed"%(type, aid))
|
||||||
|
elif type == "delete":
|
||||||
|
# delete from db
|
||||||
|
dberr = dbhelper.solo_article_remover(aid=aid)
|
||||||
|
if dberr:
|
||||||
|
belog.error("Task %s(target_aid=%d): remove igid record failed"%(type, aid))
|
||||||
|
|
||||||
|
belog.info("Task Done")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
belog.info("Backend is starting...")
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
task_processor()
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
|
||||||
|
sleep = random.randint(WORK_INTERVAL_MIN, WORK_INTERVAL_MAX)
|
||||||
|
belog.info("Next round after %ds"%sleep)
|
||||||
|
time.sleep(sleep)
|
74
backend/db/dbhelper.py
Normal file
74
backend/db/dbhelper.py
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from typing import Tuple, Dict
|
||||||
|
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy import Engine, create_engine
|
||||||
|
|
||||||
|
from backend.db.pgclass import Base, articles
|
||||||
|
|
||||||
|
dblogger = logging.getLogger("backend.db")
|
||||||
|
dblogger.setLevel(level=logging.DEBUG)
|
||||||
|
|
||||||
|
db:Engine = None
|
||||||
|
|
||||||
|
def init():
|
||||||
|
global db
|
||||||
|
try:
|
||||||
|
dbpath = os.path.abspath("./backend/db/id2igid.db")
|
||||||
|
db = create_engine(f"sqlite:///{dbpath}")
|
||||||
|
Base.metadata.create_all(db)
|
||||||
|
except:
|
||||||
|
dblogger.critical("Cannot connect to database")
|
||||||
|
raise Exception("Cannot connect to database id2igid.db")
|
||||||
|
|
||||||
|
|
||||||
|
def get_session():
|
||||||
|
Session = sessionmaker(bind=db)
|
||||||
|
return Session()
|
||||||
|
|
||||||
|
|
||||||
|
def solo_article_fetcher(aid:int=None, igid:str=None) -> Dict | None:
|
||||||
|
with get_session() as session:
|
||||||
|
# query
|
||||||
|
if aid is not None: # has aid
|
||||||
|
res = session.query(articles).filter(articles.id == aid).first()
|
||||||
|
elif igid is not None: # no aid , has igid
|
||||||
|
res = session.query(articles).filter(articles.igid == igid).first()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# process result
|
||||||
|
if res is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return {"id": res.id, "igid": res.igid}
|
||||||
|
|
||||||
|
|
||||||
|
def solo_article_inserter(aid:int, igid:str) -> int: # TODO
|
||||||
|
with get_session() as session:
|
||||||
|
# check if exists
|
||||||
|
res = session.query(articles).filter(articles.id == aid).first()
|
||||||
|
if res is not None:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# insert
|
||||||
|
new_article = articles(id=aid, igid=igid)
|
||||||
|
session.add(new_article)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def solo_article_remover(aid:int) -> int:
|
||||||
|
with get_session() as session:
|
||||||
|
# check if exists
|
||||||
|
res = session.query(articles).filter(articles.id == aid).first()
|
||||||
|
if res is None:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# delete
|
||||||
|
session.delete(res)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
return 0
|
BIN
backend/db/id2igid.db.example
Normal file
BIN
backend/db/id2igid.db.example
Normal file
Binary file not shown.
11
backend/db/pgclass.py
Normal file
11
backend/db/pgclass.py
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
from sqlalchemy import Column, String, BIGINT
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
# post
|
||||||
|
class articles(Base):
|
||||||
|
__tablename__ = 'articles'
|
||||||
|
|
||||||
|
id = Column(BIGINT, nullable=False, primary_key=True, unique=True)
|
||||||
|
igid = Column(String, nullable=False, unique=True)
|
115
backend/ig/IG.py
Normal file
115
backend/ig/IG.py
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from instagrapi import Client
|
||||||
|
|
||||||
|
from backend.utils import ld_picturemaker
|
||||||
|
from config.config import DEBUG, ACCOUNT_USERNAME, ACCOUNT_PASSWORD
|
||||||
|
from utils.err import easyExceptionHandler
|
||||||
|
#from utils.const import DEVICE
|
||||||
|
|
||||||
|
# logging
|
||||||
|
iglog = logging.getLogger("backend.ig")
|
||||||
|
iglog.setLevel(level=logging.DEBUG)
|
||||||
|
|
||||||
|
cl:Client = Client()
|
||||||
|
|
||||||
|
|
||||||
|
def login() -> int:
|
||||||
|
# session
|
||||||
|
session_file = "./config/session.json"
|
||||||
|
|
||||||
|
session = None
|
||||||
|
if os.path.exists(session_file):
|
||||||
|
session = cl.load_settings(session_file)
|
||||||
|
|
||||||
|
cl.delay_range = [2, 5]
|
||||||
|
#cl.set_device(DEVICE)
|
||||||
|
sessionSuccess = True
|
||||||
|
# login with sessionid
|
||||||
|
if session:
|
||||||
|
iglog.info("Trying logging in with session...")
|
||||||
|
try:
|
||||||
|
cl.set_settings(session)
|
||||||
|
cl.login(ACCOUNT_USERNAME, ACCOUNT_PASSWORD)
|
||||||
|
cl.get_timeline_feed()
|
||||||
|
except:
|
||||||
|
sessionSuccess = False
|
||||||
|
else:
|
||||||
|
sessionSuccess = False
|
||||||
|
|
||||||
|
# login with username and password
|
||||||
|
if not sessionSuccess:
|
||||||
|
iglog.info("Trying logging in with username and password")
|
||||||
|
try:
|
||||||
|
old_session = cl.get_settings()
|
||||||
|
cl.set_settings({})
|
||||||
|
cl.set_uuids(old_session["uuids"])
|
||||||
|
cl.login(ACCOUNT_USERNAME, ACCOUNT_PASSWORD)
|
||||||
|
cl.get_timeline_feed()
|
||||||
|
except:
|
||||||
|
iglog.error("Cannot log in")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# save session
|
||||||
|
cl.dump_settings(session_file)
|
||||||
|
|
||||||
|
# return
|
||||||
|
username = cl.account_info().dict()["username"]
|
||||||
|
iglog.info("Logged as %s"%username)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def account_info() -> dict | None:
|
||||||
|
iglog.info("Fetching account info")
|
||||||
|
try:
|
||||||
|
info = cl.account_info().dict()
|
||||||
|
return info
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def media_info(code:str) -> dict | None:
|
||||||
|
try:
|
||||||
|
pk = cl.media_pk_from_code(code)
|
||||||
|
info = cl.media_info(pk).dict()
|
||||||
|
return info
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def upload_media(context:str, paths:List[str]) -> dict | None:
|
||||||
|
try:
|
||||||
|
if len(paths) == 0:
|
||||||
|
return None
|
||||||
|
elif len(paths) == 1:
|
||||||
|
content = ld_picturemaker.picture_maker.gentext(context)
|
||||||
|
media = cl.photo_upload(path=paths[0], caption=content).dict()
|
||||||
|
else:
|
||||||
|
content = ld_picturemaker.picture_maker.gentext(context)
|
||||||
|
media = cl.album_upload(paths=paths, caption=content).dict()
|
||||||
|
|
||||||
|
return media
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def delete_media(code:str) -> int:
|
||||||
|
try:
|
||||||
|
media_pk = str(cl.media_pk_from_code(code))
|
||||||
|
media_id = cl.media_id(media_pk)
|
||||||
|
cl.media_delete(media_id)
|
||||||
|
return 0
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def init():
|
||||||
|
if not DEBUG and not login():
|
||||||
|
iglog.critical("login failed")
|
||||||
|
raise Exception("Failed to login to Instagram")
|
93
backend/processor.py
Normal file
93
backend/processor.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
from typing import Tuple
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
|
||||||
|
import magic
|
||||||
|
|
||||||
|
from config.config import DEBUG
|
||||||
|
from backend.ig import IG
|
||||||
|
from backend.db import dbhelper
|
||||||
|
from backend.utils import ld_interface
|
||||||
|
from backend.utils import ld_picturemaker
|
||||||
|
from backend.utils import fileProcessor
|
||||||
|
|
||||||
|
def clean(file_list):
|
||||||
|
for f in file_list:
|
||||||
|
try: os.remove(f)
|
||||||
|
except: pass
|
||||||
|
|
||||||
|
# return (errmsg | code, errcode)
|
||||||
|
def upload(aid:int) -> Tuple[str, int]:
|
||||||
|
# get article
|
||||||
|
article = ld_interface.inf.get(index=aid, media=True)
|
||||||
|
if article is None:
|
||||||
|
return "Post not found", 1
|
||||||
|
|
||||||
|
# multimedia -> tmp file
|
||||||
|
tmp_path = []
|
||||||
|
for m in article["content"]["media"]:
|
||||||
|
# check mime type
|
||||||
|
mime = magic.Magic(mime=True)
|
||||||
|
tp = mime.from_buffer(m.read())
|
||||||
|
# save file
|
||||||
|
filename, err = fileProcessor.file_saver(tp, m.read())
|
||||||
|
if err:
|
||||||
|
clean(tmp_path)
|
||||||
|
return "Error while saving file", 1
|
||||||
|
tmp_path.append(filename)
|
||||||
|
article["content"]["media"] = []
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 抓取檔案
|
||||||
|
files = []
|
||||||
|
for k in article["files_hash"]:
|
||||||
|
f, code = s3helper.solo_file_fetcher(fnhash=k)
|
||||||
|
if code:
|
||||||
|
return "File not found", 1
|
||||||
|
else:
|
||||||
|
files.append(f)
|
||||||
|
|
||||||
|
# 轉出暫存檔案
|
||||||
|
tmp_path:list = []
|
||||||
|
for t in files:
|
||||||
|
filename, err = fileProcessor.file_saver(t.get("mime"), t.get("binary"))
|
||||||
|
if err: # 如果錯誤
|
||||||
|
return filename, 1
|
||||||
|
tmp_path.append(filename)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 合成文字圖
|
||||||
|
proma_file = ld_picturemaker.picture_maker.gen(article)
|
||||||
|
tmp_path = [proma_file] + tmp_path
|
||||||
|
|
||||||
|
# 送交 IG 上傳
|
||||||
|
if not DEBUG:
|
||||||
|
media = IG.upload_media(article, tmp_path)
|
||||||
|
if media is None:
|
||||||
|
return "Upload failed", 1
|
||||||
|
else:
|
||||||
|
media = {"code":"fake_data"}
|
||||||
|
|
||||||
|
# 刪除檔案
|
||||||
|
clean(tmp_path)
|
||||||
|
|
||||||
|
return media["code"], 0
|
||||||
|
|
||||||
|
|
||||||
|
# return (errmsg, code)
|
||||||
|
#def remove(code:str) -> Tuple[str, int]:
|
||||||
|
def remove(aid:int) -> Tuple[str, int]:
|
||||||
|
# 抓取文章本體 - 叫你刪除的時候可能已經找不到本體了
|
||||||
|
# article, code = dbhelper.solo_article_fetcher(role="general", key=aid)
|
||||||
|
# if code != 200:
|
||||||
|
# return "Post not found", 1
|
||||||
|
|
||||||
|
article = dbhelper.solo_article_fetcher(aid=aid) # 從對表資料庫裡面抓igid
|
||||||
|
if article is None:
|
||||||
|
return "Post not found", 1
|
||||||
|
|
||||||
|
err = IG.delete_media(article["igid"])
|
||||||
|
if err:
|
||||||
|
return "Remove failed", 1
|
||||||
|
|
||||||
|
return "OK", 0
|
47
backend/utils/ThreadSafeOrderedDict.py
Normal file
47
backend/utils/ThreadSafeOrderedDict.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
from collections import OrderedDict
|
||||||
|
from threading import RLock
|
||||||
|
|
||||||
|
class ThreadSafeOrderedDict:
|
||||||
|
def __init__(self):
|
||||||
|
self.lock = RLock()
|
||||||
|
self.data = OrderedDict()
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
with self.lock:
|
||||||
|
self.data[key] = value
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
with self.lock:
|
||||||
|
if key in self.data:
|
||||||
|
return self.data[key]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def remove(self, key):
|
||||||
|
with self.lock:
|
||||||
|
if key in self.data:
|
||||||
|
del self.data[key]
|
||||||
|
|
||||||
|
def move_to_end(self, key, last=True):
|
||||||
|
with self.lock:
|
||||||
|
if key in self.data:
|
||||||
|
self.data.move_to_end(key, last=last)
|
||||||
|
|
||||||
|
def pop(self, key):
|
||||||
|
with self.lock:
|
||||||
|
if key in self.data:
|
||||||
|
return self.data.pop(key)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def popitem(self, last:bool=True):
|
||||||
|
with self.lock:
|
||||||
|
if len(self.data):
|
||||||
|
return self.data.popitem(last)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
with self.lock:
|
||||||
|
return self.data.items()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
with self.lock:
|
||||||
|
return repr(self.data)
|
107
backend/utils/fileProcessor.py
Normal file
107
backend/utils/fileProcessor.py
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
from typing import Tuple
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from hashlib import sha512
|
||||||
|
from PIL import Image
|
||||||
|
from pillow_heif import register_heif_opener
|
||||||
|
import ffmpeg
|
||||||
|
|
||||||
|
from config.config import FILE_MINE_TYPE, TMP
|
||||||
|
from utils.err import easyExceptionHandler
|
||||||
|
|
||||||
|
register_heif_opener()
|
||||||
|
|
||||||
|
def image_conventer(filename:str, binary: bytes) -> int:
|
||||||
|
try:
|
||||||
|
fio = io.BytesIO(binary)
|
||||||
|
img:Image.Image = Image.open(fio)
|
||||||
|
img = img.convert("RGB")
|
||||||
|
img.save(filename, "JPEG", quality=95)
|
||||||
|
return 0
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def read_output(pipe, q):
|
||||||
|
""" 用於非阻塞讀取 ffmpeg 的 stdout """
|
||||||
|
while True:
|
||||||
|
data = pipe.read(4096)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
q.put(data)
|
||||||
|
q.put(None) # 標記輸出結束
|
||||||
|
|
||||||
|
|
||||||
|
def video_conventor(filename:str, oriFormat:str, binary:bytes) -> int:
|
||||||
|
try:
|
||||||
|
tmpfile = filename+"_tmp"
|
||||||
|
# write to tempfile
|
||||||
|
with open(tmpfile, "wb") as f:
|
||||||
|
f.write(binary)
|
||||||
|
|
||||||
|
# ffmpeg process
|
||||||
|
process:subprocess.Popen = (
|
||||||
|
ffmpeg
|
||||||
|
.input(tmpfile, format=oriFormat)
|
||||||
|
.output(filename, format='mp4')
|
||||||
|
.run_async(pipe_stdin=True, pipe_stdout=True, pipe_stderr=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
process.wait()
|
||||||
|
|
||||||
|
# remove tempfile
|
||||||
|
os.remove(tmpfile)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
except Exception as e:
|
||||||
|
easyExceptionHandler(e)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def file_writer(filename:str, binary:bytes):
|
||||||
|
with open(filename, "wb") as f:
|
||||||
|
f.write(binary)
|
||||||
|
|
||||||
|
|
||||||
|
def file_saver(ftype:str, binary:bytes) -> Tuple[str, int]:
|
||||||
|
"""
|
||||||
|
ftype -> minetype
|
||||||
|
binary -> file binary
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 獲取副檔名
|
||||||
|
ext = None
|
||||||
|
for t in FILE_MINE_TYPE:
|
||||||
|
if t == ftype:
|
||||||
|
ext = FILE_MINE_TYPE[t]
|
||||||
|
if ext is None:
|
||||||
|
return "Invalid file type", 1
|
||||||
|
|
||||||
|
# 如果不是 IG 本身支援的檔案 -> 轉檔
|
||||||
|
filename = sha512( str(time.time()).encode() ).hexdigest() # 暫存檔名稱
|
||||||
|
opt = "" # output file name
|
||||||
|
if not ( ftype == "image/jpg" or ftype == "image/webp" or \
|
||||||
|
ftype == "video/mp4" ):
|
||||||
|
# 轉圖片
|
||||||
|
if ftype.startswith("image"):
|
||||||
|
opt = os.path.abspath(os.path.join(TMP, filename+".jpg"))
|
||||||
|
err = image_conventer(opt, binary)
|
||||||
|
if err: # 發生錯誤
|
||||||
|
return "File convert error", 1
|
||||||
|
# 轉影片
|
||||||
|
elif ftype.startswith("video"):
|
||||||
|
opt = os.path.abspath(os.path.join(TMP, filename+".mp4"))
|
||||||
|
err = video_conventor(opt, ext, binary)
|
||||||
|
if err:
|
||||||
|
return "File convert error", 1
|
||||||
|
|
||||||
|
# 轉檔完成
|
||||||
|
return opt, 0
|
||||||
|
else: # 如果是 IG 本身支援的檔案 -> 存檔
|
||||||
|
opt = os.path.abspath(os.path.join(TMP, filename+"."+ext))
|
||||||
|
file_writer(opt, binary)
|
||||||
|
return opt, 0
|
14
backend/utils/ld_interface.py
Normal file
14
backend/utils/ld_interface.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
from config.config import INTERFACE
|
||||||
|
|
||||||
|
inf = None
|
||||||
|
|
||||||
|
def init():
|
||||||
|
global inf
|
||||||
|
try:
|
||||||
|
spec = importlib.util.spec_from_file_location("interface", INTERFACE)
|
||||||
|
inf = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(inf)
|
||||||
|
except:
|
||||||
|
raise ImportError(f"Cannot load interface module: {INTERFACE}")
|
14
backend/utils/ld_picturemaker.py
Normal file
14
backend/utils/ld_picturemaker.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
from config.config import PICTURE_MAKER
|
||||||
|
|
||||||
|
picture_maker = None
|
||||||
|
|
||||||
|
def init():
|
||||||
|
global picture_maker
|
||||||
|
try:
|
||||||
|
spec = importlib.util.spec_from_file_location("picture_maker", PICTURE_MAKER)
|
||||||
|
picture_maker = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(picture_maker)
|
||||||
|
except:
|
||||||
|
raise ImportError(f"Cannot load PictureMaker module: {PICTURE_MAKER}")
|
54
config/config.py.example
Normal file
54
config/config.py.example
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
####################
|
||||||
|
# General config #
|
||||||
|
####################
|
||||||
|
TMP = "./tmp/"
|
||||||
|
|
||||||
|
####################
|
||||||
|
# Frontend config #
|
||||||
|
####################
|
||||||
|
FRONTEND = "frontend/grpc/server.py"
|
||||||
|
|
||||||
|
####################
|
||||||
|
# Backend config #
|
||||||
|
####################
|
||||||
|
# debug mode
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
# worker
|
||||||
|
## work interval
|
||||||
|
#WORK_INTERVAL_MIN = 30
|
||||||
|
#WORK_INTERVAL_MAX = 60
|
||||||
|
WORK_INTERVAL_MIN = 2*60 # 2 mins
|
||||||
|
WORK_INTERVAL_MAX = 5*60 # 5 mins
|
||||||
|
|
||||||
|
# api
|
||||||
|
## cache
|
||||||
|
ACCINFO_CACHE = 5*60 # 5 mins - fetch IG account info
|
||||||
|
RELOGIN_LIMIT = 10*60 # 10 mins - re-login limit
|
||||||
|
|
||||||
|
# IG
|
||||||
|
ACCOUNT_USERNAME = ""
|
||||||
|
ACCOUNT_PASSWORD = ""
|
||||||
|
|
||||||
|
# type define {mine:ext}
|
||||||
|
FILE_MINE_TYPE = {
|
||||||
|
"image/jpeg": "jpg",
|
||||||
|
"image/pjpeg": "jfif",
|
||||||
|
"image/png": "png",
|
||||||
|
"image/heic": "heic",
|
||||||
|
"image/heif": "heif",
|
||||||
|
"image/webp": "webp",
|
||||||
|
"video/mp4": "mp4",
|
||||||
|
"video/quicktime": "mov",
|
||||||
|
"video/hevc": "hevc",
|
||||||
|
}
|
||||||
|
|
||||||
|
####################
|
||||||
|
# Interface config #
|
||||||
|
####################
|
||||||
|
INTERFACE = "interface/tcivs.py"
|
||||||
|
|
||||||
|
####################
|
||||||
|
# PictureMaker #
|
||||||
|
####################
|
||||||
|
PICTURE_MAKER = "PictureMaker/testing.py"
|
15
docker-compose.yml
Normal file
15
docker-compose.yml
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
# template: docker-compose.yml
|
||||||
|
|
||||||
|
services:
|
||||||
|
niming-igapi:
|
||||||
|
build: .
|
||||||
|
container_name: niming-igapi
|
||||||
|
volumes:
|
||||||
|
- ".:/app"
|
||||||
|
ports:
|
||||||
|
- "50051:50051"
|
||||||
|
restart: unless-stopped
|
||||||
|
working_dir: /app
|
||||||
|
command: python3 ./app.py
|
BIN
ffmpeg_python-0.2.0-py3-none-any.whl
Normal file
BIN
ffmpeg_python-0.2.0-py3-none-any.whl
Normal file
Binary file not shown.
25
frontend/grpc/protobuf/igapi.proto
Normal file
25
frontend/grpc/protobuf/igapi.proto
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
service IGAPI {
|
||||||
|
rpc login (Request) returns (Reply) {}
|
||||||
|
|
||||||
|
rpc account_info (Request) returns (Reply) {}
|
||||||
|
|
||||||
|
rpc upload (Request) returns (Reply) {}
|
||||||
|
|
||||||
|
rpc delete (Request) returns (Reply) {}
|
||||||
|
|
||||||
|
rpc setting (Request) returns (Reply) {}
|
||||||
|
|
||||||
|
rpc queue (Request) returns (Reply) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Request {
|
||||||
|
int64 code = 1;
|
||||||
|
repeated string args = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Reply {
|
||||||
|
int64 err = 1;
|
||||||
|
map<string, string> result = 2;
|
||||||
|
}
|
44
frontend/grpc/protobuf/igapi_pb2.py
Normal file
44
frontend/grpc/protobuf/igapi_pb2.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# NO CHECKED-IN PROTOBUF GENCODE
|
||||||
|
# source: igapi.proto
|
||||||
|
# Protobuf Python Version: 5.28.1
|
||||||
|
"""Generated protocol buffer code."""
|
||||||
|
from google.protobuf import descriptor as _descriptor
|
||||||
|
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||||
|
from google.protobuf import runtime_version as _runtime_version
|
||||||
|
from google.protobuf import symbol_database as _symbol_database
|
||||||
|
from google.protobuf.internal import builder as _builder
|
||||||
|
_runtime_version.ValidateProtobufRuntimeVersion(
|
||||||
|
_runtime_version.Domain.PUBLIC,
|
||||||
|
5,
|
||||||
|
28,
|
||||||
|
1,
|
||||||
|
'',
|
||||||
|
'igapi.proto'
|
||||||
|
)
|
||||||
|
# @@protoc_insertion_point(imports)
|
||||||
|
|
||||||
|
_sym_db = _symbol_database.Default()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0bigapi.proto\"%\n\x07Request\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x03\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\"g\n\x05Reply\x12\x0b\n\x03\x65rr\x18\x01 \x01(\x03\x12\"\n\x06result\x18\x02 \x03(\x0b\x32\x12.Reply.ResultEntry\x1a-\n\x0bResultEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\xc0\x01\n\x05IGAPI\x12\x1b\n\x05login\x12\x08.Request\x1a\x06.Reply\"\x00\x12\"\n\x0c\x61\x63\x63ount_info\x12\x08.Request\x1a\x06.Reply\"\x00\x12\x1c\n\x06upload\x12\x08.Request\x1a\x06.Reply\"\x00\x12\x1c\n\x06\x64\x65lete\x12\x08.Request\x1a\x06.Reply\"\x00\x12\x1d\n\x07setting\x12\x08.Request\x1a\x06.Reply\"\x00\x12\x1b\n\x05queue\x12\x08.Request\x1a\x06.Reply\"\x00\x62\x06proto3')
|
||||||
|
|
||||||
|
_globals = globals()
|
||||||
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||||
|
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'igapi_pb2', _globals)
|
||||||
|
if not _descriptor._USE_C_DESCRIPTORS:
|
||||||
|
DESCRIPTOR._loaded_options = None
|
||||||
|
_globals['_REPLY_RESULTENTRY']._loaded_options = None
|
||||||
|
_globals['_REPLY_RESULTENTRY']._serialized_options = b'8\001'
|
||||||
|
_globals['_REQUEST']._serialized_start=15
|
||||||
|
_globals['_REQUEST']._serialized_end=52
|
||||||
|
_globals['_REPLY']._serialized_start=54
|
||||||
|
_globals['_REPLY']._serialized_end=157
|
||||||
|
_globals['_REPLY_RESULTENTRY']._serialized_start=112
|
||||||
|
_globals['_REPLY_RESULTENTRY']._serialized_end=157
|
||||||
|
_globals['_IGAPI']._serialized_start=160
|
||||||
|
_globals['_IGAPI']._serialized_end=352
|
||||||
|
# @@protoc_insertion_point(module_scope)
|
312
frontend/grpc/protobuf/igapi_pb2_grpc.py
Normal file
312
frontend/grpc/protobuf/igapi_pb2_grpc.py
Normal file
|
@ -0,0 +1,312 @@
|
||||||
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||||
|
"""Client and server classes corresponding to protobuf-defined services."""
|
||||||
|
import grpc
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from frontend.grpc.protobuf import igapi_pb2 as igapi__pb2
|
||||||
|
|
||||||
|
GRPC_GENERATED_VERSION = '1.68.0'
|
||||||
|
GRPC_VERSION = grpc.__version__
|
||||||
|
_version_not_supported = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
from grpc._utilities import first_version_is_lower
|
||||||
|
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||||
|
except ImportError:
|
||||||
|
_version_not_supported = True
|
||||||
|
|
||||||
|
if _version_not_supported:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||||
|
+ f' but the generated code in igapi_pb2_grpc.py depends on'
|
||||||
|
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||||
|
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||||
|
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IGAPIStub(object):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
|
||||||
|
def __init__(self, channel):
|
||||||
|
"""Constructor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
channel: A grpc.Channel.
|
||||||
|
"""
|
||||||
|
self.login = channel.unary_unary(
|
||||||
|
'/IGAPI/login',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
self.account_info = channel.unary_unary(
|
||||||
|
'/IGAPI/account_info',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
self.upload = channel.unary_unary(
|
||||||
|
'/IGAPI/upload',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
self.delete = channel.unary_unary(
|
||||||
|
'/IGAPI/delete',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
self.setting = channel.unary_unary(
|
||||||
|
'/IGAPI/setting',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
self.queue = channel.unary_unary(
|
||||||
|
'/IGAPI/queue',
|
||||||
|
request_serializer=igapi__pb2.Request.SerializeToString,
|
||||||
|
response_deserializer=igapi__pb2.Reply.FromString,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
|
||||||
|
class IGAPIServicer(object):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
|
||||||
|
def login(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
def account_info(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
def upload(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
def delete(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
def setting(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
def queue(self, request, context):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
|
context.set_details('Method not implemented!')
|
||||||
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
|
||||||
|
def add_IGAPIServicer_to_server(servicer, server):
|
||||||
|
rpc_method_handlers = {
|
||||||
|
'login': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.login,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
'account_info': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.account_info,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
'upload': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.upload,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
'delete': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.delete,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
'setting': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.setting,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
'queue': grpc.unary_unary_rpc_method_handler(
|
||||||
|
servicer.queue,
|
||||||
|
request_deserializer=igapi__pb2.Request.FromString,
|
||||||
|
response_serializer=igapi__pb2.Reply.SerializeToString,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
generic_handler = grpc.method_handlers_generic_handler(
|
||||||
|
'IGAPI', rpc_method_handlers)
|
||||||
|
server.add_generic_rpc_handlers((generic_handler,))
|
||||||
|
server.add_registered_method_handlers('IGAPI', rpc_method_handlers)
|
||||||
|
|
||||||
|
|
||||||
|
# This class is part of an EXPERIMENTAL API.
|
||||||
|
class IGAPI(object):
|
||||||
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def login(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/login',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def account_info(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/account_info',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def upload(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/upload',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def delete(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/delete',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def setting(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/setting',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def queue(request,
|
||||||
|
target,
|
||||||
|
options=(),
|
||||||
|
channel_credentials=None,
|
||||||
|
call_credentials=None,
|
||||||
|
insecure=False,
|
||||||
|
compression=None,
|
||||||
|
wait_for_ready=None,
|
||||||
|
timeout=None,
|
||||||
|
metadata=None):
|
||||||
|
return grpc.experimental.unary_unary(
|
||||||
|
request,
|
||||||
|
target,
|
||||||
|
'/IGAPI/queue',
|
||||||
|
igapi__pb2.Request.SerializeToString,
|
||||||
|
igapi__pb2.Reply.FromString,
|
||||||
|
options,
|
||||||
|
channel_credentials,
|
||||||
|
insecure,
|
||||||
|
call_credentials,
|
||||||
|
compression,
|
||||||
|
wait_for_ready,
|
||||||
|
timeout,
|
||||||
|
metadata,
|
||||||
|
_registered_method=True)
|
90
frontend/grpc/server.py
Normal file
90
frontend/grpc/server.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import grpc
|
||||||
|
|
||||||
|
from backend import api
|
||||||
|
from frontend.grpc.protobuf import igapi_pb2_grpc, igapi_pb2
|
||||||
|
from frontend.grpc.protobuf.igapi_pb2 import Request, Reply
|
||||||
|
|
||||||
|
# logging
|
||||||
|
grpclog = logging.getLogger("frontend.grpc")
|
||||||
|
grpclog.setLevel(level=logging.INFO)
|
||||||
|
|
||||||
|
# object
|
||||||
|
# 考慮一下如果同時發起多的請求,asyncio可能會搞到被ban號(IG)
|
||||||
|
class IGAPI_Server(igapi_pb2_grpc.IGAPIServicer):
|
||||||
|
async def account_info(self, request: Request, context) -> Reply:
|
||||||
|
grpclog.info("Request: account_info")
|
||||||
|
account = api.IG_account_info()
|
||||||
|
if account:
|
||||||
|
result = {
|
||||||
|
"username":account["username"],
|
||||||
|
"full_name":account["full_name"],
|
||||||
|
"email":account["email"]
|
||||||
|
}
|
||||||
|
return Reply(err=0, result=result)
|
||||||
|
else:
|
||||||
|
return Reply(err=1, result={"error":"api.IG_account_info returned None"})
|
||||||
|
|
||||||
|
|
||||||
|
async def login(self, request: Request, context) -> Reply:
|
||||||
|
grpclog.info("Request: login")
|
||||||
|
err = api.IG_login()
|
||||||
|
if err:
|
||||||
|
return Reply(err=1, result={"error":err})
|
||||||
|
|
||||||
|
return Reply(err=0, result={"result":"Login Successed"})
|
||||||
|
|
||||||
|
|
||||||
|
async def upload(self, request: Request, context) -> Reply:
|
||||||
|
grpclog.info("Request: upload")
|
||||||
|
aid = request.code
|
||||||
|
res, err = api.upload(aid)
|
||||||
|
if err:
|
||||||
|
return Reply(err=1, result={"error":res})
|
||||||
|
|
||||||
|
return Reply(err=0, result={"result":res})
|
||||||
|
|
||||||
|
|
||||||
|
async def delete(self, request: Request, context) -> Reply:
|
||||||
|
grpclog.info("Request: delete")
|
||||||
|
aid = request.code
|
||||||
|
res, err = api.delete(aid)
|
||||||
|
if err:
|
||||||
|
return Reply(err=1, result={"error":res})
|
||||||
|
|
||||||
|
return Reply(err=0, result={"result":res})
|
||||||
|
|
||||||
|
|
||||||
|
async def queue(self, request:Request, context) -> Reply:
|
||||||
|
grpclog.info("Request: queue")
|
||||||
|
reply = api.BACKEND_queue()
|
||||||
|
return Reply(err=0, result=reply)
|
||||||
|
|
||||||
|
|
||||||
|
async def setting(self, request:Request, context) -> Reply:
|
||||||
|
# not done
|
||||||
|
grpclog.info("Request: setting")
|
||||||
|
return Reply(err=1, result={"error":"Not Done"})
|
||||||
|
|
||||||
|
# get igid with article id
|
||||||
|
|
||||||
|
|
||||||
|
# start server
|
||||||
|
async def serve() -> None:
|
||||||
|
server = grpc.aio.server()
|
||||||
|
igapi_pb2_grpc.add_IGAPIServicer_to_server(
|
||||||
|
IGAPI_Server(), server
|
||||||
|
)
|
||||||
|
server.add_insecure_port("[::]:50051")
|
||||||
|
await server.start()
|
||||||
|
grpclog.info("gRPC Server listening on 0.0.0.0:50051")
|
||||||
|
await server.wait_for_termination()
|
||||||
|
|
||||||
|
|
||||||
|
# entry point
|
||||||
|
def main():
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
asyncio.get_event_loop().run_until_complete(serve())
|
62
interface/tcivs.py
Normal file
62
interface/tcivs.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
from hashlib import sha512
|
||||||
|
import secrets
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import io
|
||||||
|
|
||||||
|
from config.config import TMP
|
||||||
|
|
||||||
|
# define
|
||||||
|
an_example_of_context = {
|
||||||
|
"id": int,
|
||||||
|
"metadata": {
|
||||||
|
"create_time": int,
|
||||||
|
"author": str,
|
||||||
|
"tags": list[str],
|
||||||
|
"category": str
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"text": str,
|
||||||
|
"media": [
|
||||||
|
io.BytesIO
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def get(index:int, media:bool=True) -> dict | None:
|
||||||
|
res = requests.get("http://localhost:5000/article/%d?media_count=1"%index)
|
||||||
|
if res.status_code != 200:
|
||||||
|
return None
|
||||||
|
|
||||||
|
rj = res.json()
|
||||||
|
media = []
|
||||||
|
bytesio = 0
|
||||||
|
|
||||||
|
if media:
|
||||||
|
for m in rj["media"]:
|
||||||
|
_m = requests.get(m)
|
||||||
|
if _m.status_code == 200:
|
||||||
|
#if bytesio: # save in memory
|
||||||
|
media.append(io.BytesIO(_m.content))
|
||||||
|
#else: # save in file
|
||||||
|
# filename = sha512( (str(time.time())+secrets.token_urlsafe(nbytes=16)).encode() ).hexdigest()
|
||||||
|
# filename = f"./{TMP}/{filename}"
|
||||||
|
# with open(filename, "wb") as f:
|
||||||
|
# f.write(_m.content)
|
||||||
|
# media.append(filename)
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"id": rj["id"],
|
||||||
|
"metadata": {
|
||||||
|
"create_time": rj["create_time"],
|
||||||
|
"author": "",
|
||||||
|
"tags": [],
|
||||||
|
"category": ""
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"text": rj["content"],
|
||||||
|
"media": media
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
11
requirements.txt
Normal file
11
requirements.txt
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
moviepy==1.0.3
|
||||||
|
instagrapi
|
||||||
|
sqlalchemy
|
||||||
|
sqlalchemy_utils
|
||||||
|
protobuf==5.28.3
|
||||||
|
Pillow
|
||||||
|
pillow-heif
|
||||||
|
asyncio
|
||||||
|
grpcio
|
||||||
|
cachetools
|
||||||
|
python-magic
|
BIN
resource/OpenSans-Regular.ttf
Normal file
BIN
resource/OpenSans-Regular.ttf
Normal file
Binary file not shown.
53
utils/err.py
Normal file
53
utils/err.py
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import json
|
||||||
|
import traceback
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
FILENAME = "./config/traceback.json"
|
||||||
|
|
||||||
|
def prechecker():
|
||||||
|
if not os.path.exists(FILENAME):
|
||||||
|
with open(FILENAME, "w", encoding = "utf-8") as f:
|
||||||
|
json.dump({"err":{}, "id":0}, f, ensure_ascii=False)
|
||||||
|
|
||||||
|
def load():
|
||||||
|
prechecker()
|
||||||
|
with open(FILENAME, "r", encoding = "utf-8") as f:
|
||||||
|
d = json.load(f)
|
||||||
|
return d
|
||||||
|
|
||||||
|
def debug_info_from_exception(exc) -> dict:
|
||||||
|
exc_type = type(exc).__name__
|
||||||
|
exc_message = str(exc)
|
||||||
|
exc_traceback = traceback.format_exception(type(exc), exc, exc.__traceback__)
|
||||||
|
|
||||||
|
debug_info = {
|
||||||
|
"Exception_type": str(exc_type),
|
||||||
|
"Exception_message": str(exc_message),
|
||||||
|
"Trackback": str(exc_traceback)
|
||||||
|
}
|
||||||
|
|
||||||
|
# debug
|
||||||
|
for s in exc_traceback:
|
||||||
|
logging.error(s) # must display
|
||||||
|
|
||||||
|
return debug_info
|
||||||
|
|
||||||
|
def write(e:Exception):
|
||||||
|
d:dict = load()
|
||||||
|
|
||||||
|
eid = d["id"]
|
||||||
|
debug_info = debug_info_from_exception(e)
|
||||||
|
d["err"][str(eid)] = debug_info
|
||||||
|
d["id"] += 1
|
||||||
|
|
||||||
|
with open(FILENAME, "w", encoding = "utf-8") as f:
|
||||||
|
json.dump(d, f, ensure_ascii=False)
|
||||||
|
|
||||||
|
return eid
|
||||||
|
|
||||||
|
def easyExceptionHandler(e:Exception):
|
||||||
|
exc_type = type(e).__name__
|
||||||
|
exc_message = str(e)
|
||||||
|
exc_saved_id = write(e)
|
||||||
|
logging.error(f"Exception id {exc_saved_id} : {exc_type} : {exc_message}")
|
Loading…
Add table
Reference in a new issue