Rework the backend api
This commit is contained in:
parent
fea3a714f0
commit
0e71eab1ee
@ -2,10 +2,12 @@ import json
|
||||
from pathlib import Path
|
||||
|
||||
import falcon
|
||||
import mongoengine
|
||||
|
||||
from .tools import JsonRequest, JsonResponse, error_handler
|
||||
from .controllers import *
|
||||
|
||||
|
||||
class AboutResource():
|
||||
|
||||
def on_get(self, req, resp):
|
||||
@ -35,15 +37,19 @@ class AboutResource():
|
||||
|
||||
resp.body = json.dumps(r, indent=4)
|
||||
|
||||
mongoengine.connect('KaruMedia', connect=False)
|
||||
|
||||
app = application = falcon.API(request_type=JsonRequest, response_type=JsonResponse)
|
||||
app.add_error_handler(ValueError, error_handler)
|
||||
app.add_error_handler(mongoengine.DoesNotExist, error_handler)
|
||||
app.add_route("/", AboutResource())
|
||||
|
||||
path = Path("/home/arti/Videod")
|
||||
path = Path("/srv/media/")
|
||||
|
||||
app.add_route("/movies", MoviesCollection(path))
|
||||
app.add_route("/movies/{movie}", MoviesResource(path))
|
||||
app.add_route("/movies/{movie}/stream", MovieStreamUrlResource(path))
|
||||
|
||||
app.add_route("/magnet", MagnetResource())
|
||||
|
||||
app.add_route("/tasks", TaskCollection())
|
||||
app.add_route("/tasks/{task_id}", TaskResource())
|
||||
|
@ -1,8 +1,14 @@
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from . import tasks
|
||||
from falcon import HTTPInternalServerError, HTTP_201
|
||||
from bson.objectid import ObjectId
|
||||
from .tools import TODOException
|
||||
from .models import *
|
||||
from pprint import pprint
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
movie_name_and_year = re.compile("(.*)\((.*)\)")
|
||||
|
||||
@ -10,55 +16,52 @@ class BaseMovieResource():
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def build_movie_object(media):
|
||||
mobj = {
|
||||
"title":media.title,
|
||||
"title_english":media.title,
|
||||
"title_long": f"{media.title} ({media.year})",
|
||||
"year":media.year,
|
||||
"imdb_code": media.imdb_id,
|
||||
"rating": media.rating,
|
||||
"summary": media.plot_outline,
|
||||
"synopsis": media.plot_outline,
|
||||
"mpa_rating": media.mpa_rating,
|
||||
"genres": media.genres,
|
||||
"yt_trailer_code": media.yt_trailer_code,
|
||||
"state": "ok",
|
||||
"id": media.id
|
||||
}
|
||||
try:
|
||||
mobj["runtime"] = media.runtime // 100
|
||||
except:
|
||||
pass
|
||||
if media.plots:
|
||||
mobj["description_full"] = media.plots[0]
|
||||
|
||||
relevant_tasks = list(Task.objects(imdb_id=media.imdb_id, state__ne="done"))
|
||||
if relevant_tasks:
|
||||
pprint(relevant_tasks)
|
||||
mobj["state"] = "tasks_running"
|
||||
mobj["relevant_tasks"] = [task.id for task in relevant_tasks]
|
||||
|
||||
mobj["streams"] = {}
|
||||
stream_urls = list(MediaFile.objects(media=media))
|
||||
if stream_urls:
|
||||
mobj["streams"] = {media_file.resolution: media_file.url for media_file in stream_urls if media_file.mimetype.startswith("video")}
|
||||
|
||||
mobj["small_cover_image"] = f"https://media.arti.ee/Filmid/{quote(mobj['title_long'])}/cover.jpg"
|
||||
mobj["medium_cover_image"] = mobj["small_cover_image"]
|
||||
mobj["large_cover_image"] = mobj["medium_cover_image"]
|
||||
|
||||
return mobj
|
||||
|
||||
class MoviesCollection(BaseMovieResource):
|
||||
|
||||
def on_get(self, req, resp):
|
||||
movie_paths = [p for p in (self.path / 'Filmid').iterdir() if p.is_dir()]
|
||||
movies = []
|
||||
for movie_path in movie_paths:
|
||||
if not (movie_path / "metadata.json").exists():
|
||||
match = movie_name_and_year.match(movie_path.name)
|
||||
if not match:
|
||||
mobj = {
|
||||
"title":movie_path.name,
|
||||
"title_english":movie_path.name,
|
||||
"title_long":movie_path.name,
|
||||
"state":"ok"
|
||||
}
|
||||
else:
|
||||
movie_name, movie_year = match.groups()
|
||||
mobj = {
|
||||
"title":movie_name,
|
||||
"title_english":movie_name,
|
||||
"title_long":movie_path.name,
|
||||
"year":movie_year,
|
||||
"state": "ok"
|
||||
}
|
||||
movies.append(mobj)
|
||||
continue
|
||||
with (movie_path / "metadata.json").open() as f:
|
||||
metadata = json.loads(f.read())
|
||||
mobj = {
|
||||
"title":metadata["title"],
|
||||
"title_english":metadata["title"],
|
||||
"title_long":movie_path.name,
|
||||
"year":metadata["year"],
|
||||
"imdb_code": metadata["imdb_id"],
|
||||
"rating": metadata["rating"],
|
||||
"summary": metadata["plot_outline"],
|
||||
"synopsis": metadata["plot_outline"],
|
||||
"mpa_rating": metadata["certification"],
|
||||
"genres": metadata["genres"],
|
||||
"yt_trailer_code": metadata["yt_trailer_code"],
|
||||
"state": "ok"
|
||||
}
|
||||
try:
|
||||
metadata["runtime"] = int(metadata["runtime"]) // 100
|
||||
except ValueError as err:
|
||||
pass
|
||||
if metadata["plots"]:
|
||||
mobj["description_full"] = metadata["plots"][0]
|
||||
movies.append(mobj)
|
||||
for media in Media.objects:
|
||||
movies.append(build_movie_object(media))
|
||||
jobj = {"data":{
|
||||
"limit":len(movies),
|
||||
"count":len(movies),
|
||||
@ -73,23 +76,37 @@ class MoviesCollection(BaseMovieResource):
|
||||
class MoviesResource(BaseMovieResource):
|
||||
|
||||
def on_get(self, req, resp, movie):
|
||||
resp.json = [{"path": self.path, "movie":movie}]
|
||||
|
||||
class MovieStreamUrlResource(BaseMovieResource):
|
||||
|
||||
def on_get(self, req, resp, movie):
|
||||
movie_name = movie
|
||||
path = self.path / "Filmid"
|
||||
movie_files = os.listdir(str(path / movie_name))
|
||||
sizes = []
|
||||
for movie_file in movie_files:
|
||||
sizes.append((os.path.getsize(str(path / movie_name / movie_file)), movie_file))
|
||||
file_name = sorted(sizes)[-1][1]
|
||||
resp.json = {
|
||||
"default": 'https://media.arti.ee/Filmid/{}/{}'.format(movie_name.replace(" ", "%20"), file_name.replace(" ", "%20"))
|
||||
}
|
||||
try:
|
||||
media = Media.objects.get(id=movie)
|
||||
except ValidationError:
|
||||
media = Media.objects.get(imdb_id=movie)
|
||||
resp.json = build_movie_object(media)
|
||||
|
||||
class MagnetResource():
|
||||
|
||||
def on_post(self, req, resp):
|
||||
resp.json = [req.json]
|
||||
json = req.json
|
||||
if not 'info_hash' in json:
|
||||
raise ValueError("info_hash missing from request")
|
||||
task = TaskMetainfoDl(info_hash=json['info_hash'])
|
||||
task.imdb_code = json.get('imdb_code')
|
||||
task.save()
|
||||
tasks.metainfo_dl(str(task.id))
|
||||
resp.json = {"task_id":task.id}
|
||||
|
||||
|
||||
class TaskCollection():
|
||||
|
||||
def on_get(self, req, resp):
|
||||
tasks = list()
|
||||
for task in Task.objects:
|
||||
tasks.append(task.to_mongo())
|
||||
resp.json = tasks
|
||||
|
||||
class TaskResource():
|
||||
def on_get(self, req, resp, task_id):
|
||||
task = Task.objects.get(id=task_id)
|
||||
resp.json = task.to_mongo()
|
||||
|
||||
def on_delete(self, req, resp, task_id):
|
||||
Task.objects(id=task_id).delete()
|
||||
|
54
api/karumedia/models.py
Normal file
54
api/karumedia/models.py
Normal file
@ -0,0 +1,54 @@
|
||||
import datetime
|
||||
from mongoengine import *
|
||||
|
||||
class Media(Document):
|
||||
title = StringField()
|
||||
slug = StringField(unique=True)
|
||||
imdb_id = StringField(unique=True)
|
||||
runtime = IntField()
|
||||
genres = ListField(StringField())
|
||||
mpa_rating = StringField()
|
||||
rating = FloatField()
|
||||
yt_trailer_code = StringField()
|
||||
date_added = DateTimeField(default=datetime.datetime.now)
|
||||
parrent = ReferenceField('self')
|
||||
season = IntField()
|
||||
episode = IntField()
|
||||
release_date = StringField()
|
||||
type = StringField()
|
||||
plot_outline = StringField()
|
||||
plots = ListField(StringField())
|
||||
year = IntField()
|
||||
|
||||
class MediaFile(Document):
|
||||
url = URLField()
|
||||
path = StringField()
|
||||
media = ReferenceField(Media)
|
||||
mimetype = StringField()
|
||||
resolution = StringField(choices=('480p', '720p', '1080p', 'native'), default="native")
|
||||
|
||||
class MediaPoster(Document):
|
||||
poster = FileField()
|
||||
media = ReferenceField(Media)
|
||||
|
||||
class Task(Document):
|
||||
progress = FloatField(default=0)
|
||||
state = StringField(choices=('starting', 'running', 'done'), default='starting')
|
||||
creation_time = DateTimeField(default=datetime.datetime.now)
|
||||
sub_tasks = ListField(ReferenceField('self'), default=list)
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class TaskMetainfoDl(Task):
|
||||
info_hash = StringField(required=True)
|
||||
imdb_id = StringField()
|
||||
|
||||
class TaskImdbDl(Task):
|
||||
imdb_id = StringField(required=True)
|
||||
media = ReferenceField(Media)
|
||||
|
||||
class TaskTorrentDl(Task):
|
||||
info_hash = StringField(required=True)
|
||||
imdb_id = StringField()
|
||||
media = ReferenceField(Media)
|
||||
|
281
api/karumedia/tasks.py
Normal file
281
api/karumedia/tasks.py
Normal file
@ -0,0 +1,281 @@
|
||||
from uwsgi_tasks import *
|
||||
from .models import *
|
||||
from time import sleep
|
||||
from requests import get
|
||||
import transmissionrpc
|
||||
from pprint import pprint
|
||||
from base64 import b64decode
|
||||
from urllib.parse import urlencode, quote
|
||||
import PTN
|
||||
from imdbpie import Imdb
|
||||
import os
|
||||
import subprocess
|
||||
import requests
|
||||
import magic
|
||||
import json
|
||||
|
||||
YTKEY = os.environ.get("YTKEY")
|
||||
if not YTKEY:
|
||||
error("YTKEY not set")
|
||||
exit(1)
|
||||
|
||||
|
||||
default_trackers = [
|
||||
'udp://glotorrents.pw:6969/announce',
|
||||
'udp://tracker.openbittorrent.com:80',
|
||||
'udp://tracker.coppersurfer.tk:6969',
|
||||
'udp://tracker.leechers-paradise.org:6969',
|
||||
'udp://p4p.arenabg.ch:1337',
|
||||
'udp://tracker.internetwarriors.net:1337',
|
||||
'udp://tracker.opentrackr.org:1337/announce'
|
||||
]
|
||||
|
||||
r = get("https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt")
|
||||
|
||||
best_trackers = r.text
|
||||
|
||||
for tracker in best_trackers.split("\n"):
|
||||
tracker = tracker.strip()
|
||||
if tracker:
|
||||
default_trackers.append(tracker)
|
||||
|
||||
def hash_to_magnet(infoHash, name=None, trackers=None):
|
||||
try:
|
||||
b64decode(infoHash)
|
||||
except:
|
||||
raise Exception("Invalid infoHash")
|
||||
magnet = {
|
||||
"dn": name,
|
||||
"tr": list(default_trackers)
|
||||
}
|
||||
if not name:
|
||||
del magnet["dn"]
|
||||
if trackers:
|
||||
magnet["tr"].extend(trackers)
|
||||
return "magnet:?xt=urn:btih:{}&".format(infoHash) + urlencode(magnet, doseq=True)
|
||||
|
||||
|
||||
|
||||
tc = transmissionrpc.Client("172.20.1.2", user="admin", password="minemetsa")
|
||||
imdb = Imdb()
|
||||
|
||||
def guess_imdb_code(title):
|
||||
info = PTN.parse(title)
|
||||
if 'year' not in info:
|
||||
print("No title year found in title")
|
||||
results = imdb.search_for_title(info["title"])
|
||||
if not results:
|
||||
return None
|
||||
if 'year' in info:
|
||||
match = [movie for movie in results if movie["year"] == str(info["year"])]
|
||||
if not match:
|
||||
pprint(results)
|
||||
return None
|
||||
else:
|
||||
match = match[0]["imdb_id"]
|
||||
else:
|
||||
match = results[0]["imdb_id"]
|
||||
return match
|
||||
|
||||
|
||||
@task
|
||||
def metainfo_dl(task_id):
|
||||
task = TaskMetainfoDl.objects.get(id=task_id)
|
||||
|
||||
magnet = hash_to_magnet(task.info_hash)
|
||||
t = tc.add_torrent(magnet)
|
||||
print(task.info_hash.lower())
|
||||
print(t.hashString)
|
||||
|
||||
task.state = "running"
|
||||
task.save()
|
||||
|
||||
while True:
|
||||
t = tc.get_torrent(t.hashString)
|
||||
print(t.name, t.status, t.metadataPercentComplete*100, t.percentDone*100)
|
||||
if t.metadataPercentComplete == 1:
|
||||
break
|
||||
task.progress = t.metadataPercentComplete*100
|
||||
task.save()
|
||||
sleep(1)
|
||||
|
||||
pprint(t.files())
|
||||
t.stop()
|
||||
|
||||
if not task.imdb_id:
|
||||
imdb_id = guess_imdb_code(t.name)
|
||||
else:
|
||||
imdb_id = task.imdb_id
|
||||
print("imdb_id:", imdb_id)
|
||||
|
||||
try:
|
||||
media = Media.objects.get(imdb_id=imdb_id)
|
||||
print("Found existing media object:", media)
|
||||
except DoesNotExist:
|
||||
media = Media()
|
||||
media.imdb_id = imdb_id
|
||||
media.save()
|
||||
print("Created a new media object:", media, media.imdb_id)
|
||||
|
||||
imdb_dl_task = TaskImdbDl(imdb_id=imdb_id)
|
||||
imdb_dl_task.media = media
|
||||
imdb_dl_task.save()
|
||||
imdb_dl(str(imdb_dl_task.id))
|
||||
|
||||
torrent_dl_task = TaskTorrentDl(info_hash=t.hashString, imdb_id=imdb_id)
|
||||
torrent_dl_task.media = media
|
||||
torrent_dl_task.save()
|
||||
torrent_dl(str(torrent_dl_task.id))
|
||||
|
||||
task.state = "done"
|
||||
task.sub_tasks.append(imdb_dl_task)
|
||||
task.sub_tasks.append(torrent_dl_task)
|
||||
task.media = media
|
||||
task.save()
|
||||
|
||||
@task
|
||||
def imdb_dl(task_id):
|
||||
task = TaskImdbDl.objects.get(id=task_id)
|
||||
task.state = "running"
|
||||
task.save()
|
||||
print("imdb_id:", task.imdb_id)
|
||||
try:
|
||||
media = Media.objects.get(imdb_id=task.imdb_id)
|
||||
print("Using existing media object:", media)
|
||||
except DoesNotExist:
|
||||
media = Media()
|
||||
print("Creating a new media object")
|
||||
media.imdb_id = task.imdb_id
|
||||
|
||||
title = imdb.get_title_by_id(task.imdb_id)
|
||||
media.title = title.title
|
||||
media.year = title.year
|
||||
media.runtime = int(title.runtime)
|
||||
media.genres = title.genres
|
||||
media.mpa_rating = title.certification
|
||||
media.release_date = title.release_date
|
||||
media.type = title.type
|
||||
media.plot_outline = title.plot_outline
|
||||
media.plots = title.plots
|
||||
media.rating = title.rating
|
||||
media.save()
|
||||
|
||||
task.media = media
|
||||
task.save()
|
||||
|
||||
|
||||
params = {"key": YTKEY,
|
||||
"part":"id", "maxResults":1,
|
||||
"q":"{} ({}) trailer".format(media.title, media.year)}
|
||||
r = requests.get("https://www.googleapis.com/youtube/v3/search", params=params)
|
||||
media.yt_trailer_code = r.json()["items"][0]["id"]["videoId"]
|
||||
media.save()
|
||||
|
||||
task.state = "done"
|
||||
task.progress = 100
|
||||
task.save()
|
||||
|
||||
|
||||
def probe(vid_file_path):
|
||||
''' Give a json from ffprobe command line
|
||||
|
||||
@vid_file_path : The absolute (full) path of the video file, string.
|
||||
'''
|
||||
if type(vid_file_path) != str:
|
||||
raise Exception('Gvie ffprobe a full file path of the video')
|
||||
return
|
||||
|
||||
command = ["ffprobe",
|
||||
"-loglevel", "quiet",
|
||||
"-print_format", "json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
vid_file_path
|
||||
]
|
||||
|
||||
pipe = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, err = pipe.communicate()
|
||||
return json.loads(out)
|
||||
|
||||
|
||||
def duration(vid_file_path):
|
||||
''' Video's duration in seconds, return a float number
|
||||
'''
|
||||
_json = probe(vid_file_path)
|
||||
pprint(_json)
|
||||
|
||||
if 'format' in _json:
|
||||
if 'duration' in _json['format']:
|
||||
return float(_json['format']['duration'])
|
||||
|
||||
if 'streams' in _json:
|
||||
# commonly stream 0 is the video
|
||||
for s in _json['streams']:
|
||||
if 'duration' in s:
|
||||
return float(s['duration'])
|
||||
|
||||
# if everything didn't happen,
|
||||
# we got here because no single 'return' in the above happen.
|
||||
raise Exception('I found no duration')
|
||||
#return None
|
||||
|
||||
@task
|
||||
def torrent_dl(task_id):
|
||||
task = TaskTorrentDl.objects.get(id=task_id)
|
||||
task.state = "running"
|
||||
task.save()
|
||||
|
||||
t = tc.get_torrent(task.info_hash)
|
||||
t.start()
|
||||
|
||||
while True:
|
||||
t = tc.get_torrent(task.info_hash)
|
||||
print(t.name, t.status, t.metadataPercentComplete*100, t.percentDone*100)
|
||||
if t.percentDone == 1:
|
||||
break
|
||||
task.progress = t.percentDone*100
|
||||
task.save()
|
||||
sleep(1)
|
||||
|
||||
if "imdb_id" in task:
|
||||
try:
|
||||
media = Media.objects.get(imdb_id=task.imdb_id)
|
||||
except DoesNotExist:
|
||||
pass
|
||||
else:
|
||||
tc.rename_torrent_path(task.info_hash, t.name, f"{media.title} ({media.year})")
|
||||
|
||||
tc.move_torrent_data(task.info_hash, "/srv/media/Filmid")
|
||||
|
||||
with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as m:
|
||||
for file_id, file_data in t.files().items():
|
||||
file_path = "/srv/media/Filmid/"+file_data["name"]
|
||||
file_type = m.id_filename(file_path)
|
||||
video_duration = None
|
||||
if file_type.startswith("video"):
|
||||
video_duration = duration(file_path)
|
||||
|
||||
print(file_path, file_type, video_duration)
|
||||
if not MediaFile.objects(path = file_path):
|
||||
media_file = MediaFile()
|
||||
media_file.media = Media.objects.get(imdb_id=task.imdb_id)
|
||||
media_file.path = file_path
|
||||
media_file.url = f"http://media.arti.ee/Filmid/{quote(file_data['name'])}"
|
||||
media_file.mimetype = file_type
|
||||
media_file.resolution = "native"
|
||||
media_file.save()
|
||||
|
||||
media = task.media
|
||||
cover_path = f"/srv/media/Filmid/{media.title} ({media.year})/cover.jpg"
|
||||
if not MediaFile.objects(media=media, path=cover_path):
|
||||
subprocess.call(['wget', imdb.get_title_by_id(media.imdb_id).cover_url,
|
||||
"-O", cover_path])
|
||||
media_poster = MediaFile()
|
||||
media_poster.path = cover_path
|
||||
movie_name_and_year = quote(f"{media.title} ({media.year})")
|
||||
media_poster.url = f"https://media.arti.ee/Filmid/{movie_name_and_year}/cover.jpg"
|
||||
media_poster.save()
|
||||
|
||||
task.state = "done"
|
||||
task.progress = 100
|
||||
task.save()
|
@ -1,10 +1,24 @@
|
||||
|
||||
import json
|
||||
import datetime
|
||||
|
||||
from falcon import Request as FalconRequest
|
||||
from falcon import Response as FalconResponse
|
||||
from falcon.errors import HTTPBadRequest, HTTPMissingParam, HTTPError
|
||||
from falcon.errors import HTTPBadRequest, HTTPMissingParam, HTTPError, HTTPNotFound
|
||||
import falcon.status_codes as status
|
||||
from bson.objectid import ObjectId
|
||||
from mongoengine import DoesNotExist
|
||||
|
||||
class BSONDumps(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, ObjectId):
|
||||
return str(obj)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
return obj.timestamp()
|
||||
# Let the base class default method raise the TypeError
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
dumps = BSONDumps(indent=4).encode
|
||||
|
||||
|
||||
class JsonRequest(FalconRequest):
|
||||
@ -37,10 +51,13 @@ class JsonResponse(FalconResponse):
|
||||
@json.setter
|
||||
def json(self, value):
|
||||
self._json = value
|
||||
self.body = json.dumps(value, indent=4)
|
||||
self.body = dumps(value)
|
||||
|
||||
def error_handler(ex, req, resp, params):
|
||||
raise HTTPBadRequest(type(ex).__name__, str(ex))
|
||||
if type(ex).__name__ == DoesNotExist.__name__:
|
||||
raise HTTPNotFound(title=type(ex).__name__, description=str(ex))
|
||||
else:
|
||||
raise HTTPBadRequest(type(ex).__name__, str(ex))
|
||||
|
||||
class TODOException(HTTPError):
|
||||
|
||||
|
@ -1 +1,13 @@
|
||||
falcon
|
||||
CacheControl==0.11.7
|
||||
falcon==1.1.0
|
||||
filemagic==1.6
|
||||
imdbpie==4.2.0
|
||||
lockfile==0.12.2
|
||||
mongoengine==0.11.0
|
||||
parse-torrent-name==1.1.1
|
||||
pymongo==3.4.0
|
||||
python-mimeparse==1.6.0
|
||||
requests==2.12.5
|
||||
six==1.10.0
|
||||
transmissionrpc==0.11
|
||||
uwsgi-tasks==0.6.4
|
||||
|
@ -14,6 +14,9 @@ http=:8080
|
||||
#static-index=index.html
|
||||
mount=/api/v1=wsgi.py
|
||||
manage-script-name = true
|
||||
py-autoreload = 1
|
||||
spooler = taskspool
|
||||
spooler-processes = 3
|
||||
|
||||
|
||||
[prod]
|
||||
|
Loading…
Reference in New Issue
Block a user