Compare commits
9 Commits
11c94c757e
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 56af1f0735 | |||
| 72266aad0f | |||
| 900cc92229 | |||
| 2687cc2bdc | |||
| 4a9d59c9b4 | |||
| 4af581ab7c | |||
| d1f381220d | |||
| eebf434f3e | |||
| c979c97077 |
@@ -7,13 +7,16 @@ debug = false # Whether to print verbose, d
|
||||
cache = true # Whether to cache requests for 3 hours (temporary solution to long load times).
|
||||
|
||||
[api]
|
||||
api_key = "" # Leave empty API key for public access to non-sensitive backend
|
||||
api_key_admin = "CHANGEME" # Empty *admin* API key will autogenerate a random one every launch.
|
||||
api_key = "" # Leave empty API key for public access to non-sensitive backend
|
||||
api_key_admin = "CHANGEME" # Empty *admin* API key will autogenerate a random one every launch.
|
||||
enable_debugger_halt = false # Whether to allow to trigger pdb using admin's API key.
|
||||
|
||||
[extractor]
|
||||
user-agent = "" # Leave empty for default (Firefox ESR).
|
||||
cookies_path = "" # Leave empty for none.
|
||||
preferred_extractor = "" # Leave empty for default (android_vr).
|
||||
user-agent = "" # Leave empty for default (Firefox ESR).
|
||||
cookies_path = "" # Leave empty for none.
|
||||
age_restricted_cookies_path = "" # Cookies to use when bypassing age-gated videos only. Leave empty to disable.
|
||||
deno_path = "" # Required when using cookies.
|
||||
preferred_extractor = "" # Leave empty for default (android_vr).
|
||||
|
||||
[proxy]
|
||||
user-agent = "" # Leave empty for default (Firefox ESR).
|
||||
|
||||
@@ -8,19 +8,20 @@ import ythdd_globals, ythdd_extractor
|
||||
#from flask_sqlalchemy import SQLAlchemy
|
||||
#import ythdd_api_v1_stats, ythdd_api_v1_user, ythdd_api_v1_info, ythdd_api_v1_query, ythdd_api_v1_meta, ythdd_api_v1_admin
|
||||
|
||||
def requireAuthentication(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
token = kwargs["r"].args.get('token')
|
||||
if token == lewy_globals.config['api']['api_key']:
|
||||
try:
|
||||
status, received, data = func(*args, **kwargs)
|
||||
return status, received, data
|
||||
except:
|
||||
raise AssertionError(f"Function \"{func.__name__}\" does not return status, code, and data as it should!")
|
||||
else:
|
||||
return 401, "error", {'error_msg': "Unauthorized"}
|
||||
return wrapper
|
||||
def requireAuthentication(admin: bool = True):
|
||||
def functionWrapper(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
token = kwargs["r"].args.get('token')
|
||||
if token == ythdd_globals.config['api']['api_key' + admin * '_admin']:
|
||||
try:
|
||||
status, received, data = func(*args, **kwargs)
|
||||
return status, received, data
|
||||
except:
|
||||
raise AssertionError(f"Function \"{func.__name__}\" does not return status, code, and data as it should!")
|
||||
else:
|
||||
return 401, "error", {'error_msg': "Unauthorized"}
|
||||
return wrapper
|
||||
return functionWrapper
|
||||
|
||||
def incrementBadRequests():
|
||||
ythdd_globals.apiFailedRequests += 1
|
||||
@@ -143,6 +144,13 @@ def hot(data):
|
||||
incrementBadRequests()
|
||||
return notImplemented([data[1]]) # workaround before notImplemented is reworked
|
||||
|
||||
@requireAuthentication(admin=True)
|
||||
def debugger_halt(r):
|
||||
if not ythdd_globals.config["api"]["enable_debugger_halt"]:
|
||||
return 403, "Administrator has disabled access for this endpoint.", []
|
||||
breakpoint()
|
||||
return 200, "Pdb triggered and ended successfully.", []
|
||||
|
||||
def lookup(data, request):
|
||||
match data[0]:
|
||||
case 'stats':
|
||||
@@ -163,6 +171,8 @@ def lookup(data, request):
|
||||
case 'admin':
|
||||
# REQUIRE CREDENTIALS!
|
||||
return stub_hello()
|
||||
case 'halt':
|
||||
return debugger_halt(r=request)
|
||||
case _:
|
||||
incrementBadRequests()
|
||||
return notImplemented(data)
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/usr/bin/python3
|
||||
import brotli, yt_dlp, requests, json, time
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from ythdd_globals import safeTraverse
|
||||
import ythdd_proto
|
||||
import ythdd_globals
|
||||
@@ -19,7 +20,11 @@ ytdl_opts = {
|
||||
# "formats": ["dashy"]
|
||||
}
|
||||
},
|
||||
"simulate": True
|
||||
"simulate": True,
|
||||
"js_runtimes": {
|
||||
"deno": {}
|
||||
},
|
||||
'remote_components': ['ejs:github']
|
||||
}
|
||||
|
||||
stage1_headers = {
|
||||
@@ -129,7 +134,7 @@ web_context_dict = {
|
||||
}
|
||||
}
|
||||
|
||||
def extract(url: str, getcomments=False, maxcomments="", manifest_fix=False):
|
||||
def extract(url: str, getcomments=False, maxcomments="", manifest_fix=False, use_cookies=None):
|
||||
# TODO: check user-agent and cookiefile
|
||||
|
||||
ytdl_context = ytdl_opts.copy()
|
||||
@@ -137,9 +142,6 @@ def extract(url: str, getcomments=False, maxcomments="", manifest_fix=False):
|
||||
if ythdd_globals.config['extractor']['user-agent']:
|
||||
yt_dlp.utils.std_headers['User-Agent'] = ythdd_globals.config['extractor']['user-agent']
|
||||
|
||||
if ythdd_globals.config['extractor']['cookies_path']:
|
||||
ytdl_context['cookiefile'] = ythdd_globals.config['extractor']['cookies_path']
|
||||
|
||||
if len(url) == 11:
|
||||
url = "https://www.youtube.com/watch?v=" + url
|
||||
if getcomments:
|
||||
@@ -153,7 +155,27 @@ def extract(url: str, getcomments=False, maxcomments="", manifest_fix=False):
|
||||
ytdl_context['extractor_args']['youtube']['player_client'] = [ythdd_globals.config['extractor']['preferred_extractor']]
|
||||
else:
|
||||
ytdl_context['extractor_args']['youtube']['player_client'] = ['android_vr']
|
||||
with yt_dlp.YoutubeDL(ytdl_opts) as ytdl:
|
||||
|
||||
if use_cookies is not None:
|
||||
# can be either "global", "agegated" or None
|
||||
deno_path = ythdd_globals.config['extractor']['deno_path']
|
||||
match use_cookies:
|
||||
case "global":
|
||||
ytdl_context['cookiefile'] = ythdd_globals.config['extractor']['cookies_path']
|
||||
ytdl_context['extractor_args']['youtube']['player_client'] = ['tv']
|
||||
if not deno_path:
|
||||
print("FATAL ERROR: deno path is required for playback using cookies!")
|
||||
ytdl_context['js_runtimes']['deno']['path'] = deno_path if deno_path else ""
|
||||
case "agegated":
|
||||
ytdl_context['cookiefile'] = ythdd_globals.config['extractor']['age_restricted_cookies_path']
|
||||
ytdl_context['extractor_args']['youtube']['player_client'] = ['tv']
|
||||
if not deno_path:
|
||||
print("FATAL ERROR: deno path is required for playback of age-restricted content!")
|
||||
ytdl_context['js_runtimes']['deno']['path'] = deno_path if deno_path else ""
|
||||
case None | _:
|
||||
pass
|
||||
|
||||
with yt_dlp.YoutubeDL(ytdl_context) as ytdl:
|
||||
result = ytdl.sanitize_info(ytdl.extract_info(url, download=False))
|
||||
return result
|
||||
|
||||
@@ -177,7 +199,7 @@ def WEBrelated(url: str):
|
||||
|
||||
return extracted_json["contents"]['twoColumnWatchNextResults']["secondaryResults"]
|
||||
|
||||
def WEBextractSinglePage(uri: str):
|
||||
def WEBextractSinglePage(uri: str, use_cookies=None):
|
||||
# WARNING! HIGHLY EXPERIMENTAL, DUE TO BREAK ANYTIME
|
||||
|
||||
start_time = time.time()
|
||||
@@ -185,11 +207,25 @@ def WEBextractSinglePage(uri: str):
|
||||
if len(uri) != 11:
|
||||
raise ValueError("WEBextractSinglePage expects a single, 11-character long argument")
|
||||
|
||||
response = requests.get("https://www.youtube.com/watch?v=" + uri, headers=ythdd_globals.getHeaders(caller='extractor'))
|
||||
cookies = None
|
||||
if use_cookies is not None:
|
||||
match use_cookies:
|
||||
case "global":
|
||||
ythdd_globals.print_debug("wdata: using global cookies")
|
||||
cookies = MozillaCookieJar(ythdd_globals.config["extractor"]["cookies_path"])
|
||||
cookies.load()
|
||||
case "agegated":
|
||||
ythdd_globals.print_debug("wdata: using agegated cookies")
|
||||
cookies = MozillaCookieJar(ythdd_globals.config["extractor"]["age_restricted_cookies_path"])
|
||||
cookies.load()
|
||||
case None | _:
|
||||
pass
|
||||
|
||||
response = requests.get("https://www.youtube.com/watch?v=" + uri, headers=ythdd_globals.getHeaders(caller='extractor'), cookies=cookies)
|
||||
extracted_string = str(response.content.decode('utf8', 'unicode_escape'))
|
||||
start = extracted_string.find('{"responseContext":{"serviceTrackingParams":')
|
||||
start = extracted_string.find('{"responseContext":')
|
||||
end = extracted_string.find(';var ', start)
|
||||
start2 = extracted_string.find('{"responseContext":{"serviceTrackingParams":', start + 1)
|
||||
start2 = extracted_string.find('{"responseContext":', start + 1)
|
||||
end2 = extracted_string.find(';</script>', start2)
|
||||
extracted_json1 = json.loads(extracted_string[start:end])
|
||||
extracted_json2 = json.loads(extracted_string[start2:end2])
|
||||
|
||||
@@ -32,7 +32,7 @@ def getConfig(configfile):
|
||||
global randomly_generated_passcode
|
||||
|
||||
if not os.path.exists(configfile):
|
||||
dummy_config = {'general': {'db_file_path': 'ythdd_db.sqlite', 'video_storage_directory_path': 'videos/', 'is_proxied': False, 'public_facing_url': 'http://127.0.0.1:5000/', 'debug': False, 'cache': True}, 'api': {'api_key': 'CHANGEME'}, 'proxy': {'user-agent': '', 'allow_proxying_videos': True, 'match_initcwndbps': True}, 'extractor': {'user-agent': '', 'cookies_path': ''}, 'admin': {'admins': ['admin']}, 'yt_dlp': {}, 'postprocessing': {'presets': [{'name': 'recommended: [N][<=720p] best V+A', 'format': 'bv[height<=720]+ba', 'reencode': ''}, {'name': '[N][1080p] best V+A', 'format': 'bv[height=1080]+ba', 'reencode': ''}, {'name': '[R][1080p] webm', 'format': 'bv[height=1080]+ba', 'reencode': 'webm'}, {'name': '[N][720p] best V+A', 'format': 'bv[height=720]+ba', 'reencode': ''}, {'name': '[R][720p] webm', 'format': 'bv[height=720]+ba', 'reencode': 'webm'}, {'name': '[N][480p] best V+A', 'format': 'bv[height=480]+ba', 'reencode': ''}, {'name': '[480p] VP9 webm/reencode', 'format': 'bv*[height=480][ext=webm]+ba/bv[height=480]+ba', 'reencode': 'webm'}, {'name': '[N][1080p] best video only', 'format': 'bv[height=1080]', 'reencode': ''}, {'name': '[N][opus] best audio only', 'format': 'ba', 'reencode': 'opus'}]}}
|
||||
dummy_config = {'general': {'db_file_path': 'ythdd_db.sqlite', 'video_storage_directory_path': 'videos/', 'is_proxied': False, 'public_facing_url': 'http://127.0.0.1:5000/', 'debug': False, 'cache': True}, 'api': {'api_key': 'CHANGEME', 'enable_debugger_halt': False}, 'proxy': {'user-agent': '', 'allow_proxying_videos': True, 'match_initcwndbps': True}, 'extractor': {'user-agent': '', 'cookies_path': ''}, 'admin': {'admins': ['admin']}, 'yt_dlp': {}, 'postprocessing': {'presets': [{'name': 'recommended: [N][<=720p] best V+A', 'format': 'bv[height<=720]+ba', 'reencode': ''}, {'name': '[N][1080p] best V+A', 'format': 'bv[height=1080]+ba', 'reencode': ''}, {'name': '[R][1080p] webm', 'format': 'bv[height=1080]+ba', 'reencode': 'webm'}, {'name': '[N][720p] best V+A', 'format': 'bv[height=720]+ba', 'reencode': ''}, {'name': '[R][720p] webm', 'format': 'bv[height=720]+ba', 'reencode': 'webm'}, {'name': '[N][480p] best V+A', 'format': 'bv[height=480]+ba', 'reencode': ''}, {'name': '[480p] VP9 webm/reencode', 'format': 'bv*[height=480][ext=webm]+ba/bv[height=480]+ba', 'reencode': 'webm'}, {'name': '[N][1080p] best video only', 'format': 'bv[height=1080]', 'reencode': ''}, {'name': '[N][opus] best audio only', 'format': 'ba', 'reencode': 'opus'}]}}
|
||||
# if a passcode has not been provided by the user (config file doesn't exist, and user didn't specify it using an argument)
|
||||
print(f"{colors.WARNING}WARNING{colors.ENDC}: Using default, baked in config data. {colors.ENDL}"
|
||||
f" Consider copying and editing the provided example file ({colors.OKCYAN}config.default.toml{colors.ENDC}).")
|
||||
|
||||
@@ -163,11 +163,24 @@ def videos(data):
|
||||
|
||||
wdata = ythdd_extractor.WEBextractSinglePage(data[3])
|
||||
|
||||
age_restricted = False
|
||||
error = getError(wdata)
|
||||
if error is not None:
|
||||
return send(500, {"status": "error", "error": error})
|
||||
|
||||
ydata = ythdd_extractor.extract(data[3])
|
||||
if error.startswith("(LOGIN_REQUIRED)") and "inappropriate for some users" in error:
|
||||
# check if user provided age-gated cookies
|
||||
if ythdd_globals.config["extractor"]["age_restricted_cookies_path"]:
|
||||
ythdd_globals.print_debug(f"videos({data[3]}): using agegated cookies to bypass restriction")
|
||||
ydata = ythdd_extractor.extract(data[3], use_cookies="agegated")
|
||||
wdata = ythdd_extractor.WEBextractSinglePage(data[3], use_cookies="agegated")
|
||||
age_restricted = True
|
||||
else:
|
||||
# return error if no age-gated cookies are provided
|
||||
return send(500, {"status": "error", "error": error})
|
||||
else:
|
||||
# return error if it doesn't mention age restriction
|
||||
return send(500, {"status": "error", "error": error})
|
||||
else:
|
||||
ydata = ythdd_extractor.extract(data[3])
|
||||
|
||||
#return send(200, {'ydata': ydata, 'wdata': wdata})
|
||||
#return send(200, {'idata': idata, 'wdata': wdata})
|
||||
@@ -277,16 +290,21 @@ def videos(data):
|
||||
continue
|
||||
if video_stream["format_id"] == "18": # todo: do this dynamically
|
||||
initial_fstreams_y[int(video_stream["format_id"])] = video_stream
|
||||
else:
|
||||
elif video_stream["format_id"].isdigit():
|
||||
# filter out DRC audio
|
||||
initial_astreams_y[int(video_stream["format_id"])] = video_stream
|
||||
else:
|
||||
continue
|
||||
|
||||
# format streams
|
||||
for video_stream in wdata_streams["formats"]:
|
||||
initial_fstreams_w[video_stream["itag"]] = video_stream
|
||||
|
||||
# adaptive streams
|
||||
for video_stream in wdata_streams["adaptiveFormats"]:
|
||||
initial_astreams_w[video_stream["itag"]] = video_stream
|
||||
for audiovideo_stream in wdata_streams["adaptiveFormats"]:
|
||||
if not "isVb" in audiovideo_stream and not "isDrc" in audiovideo_stream:
|
||||
# skip DRC and VB formats
|
||||
initial_astreams_w[audiovideo_stream["itag"]] = audiovideo_stream
|
||||
|
||||
for itag in initial_astreams_y:
|
||||
if itag in initial_astreams_w:
|
||||
@@ -301,6 +319,10 @@ def videos(data):
|
||||
adaptive_formats, format_streams = [{"url": f"http://a/?expire={int(time_start + 5.9 * 60 * 60)}", "itag": "18", "type": "", "clen": "0", "lmt": "", "projectionType": "RECTANGULAR"}], [] # freetube/clipious shenanigans, see: https://github.com/FreeTubeApp/FreeTube/pull/5997 and https://github.com/lamarios/clipious/blob/b9e7885/lib/videos/models/adaptive_format.g.dart
|
||||
hls_url = safeTraverse(ydata, ["url"], default="ythdd: unable to retrieve stream url")
|
||||
|
||||
if age_restricted:
|
||||
if not adaptive_formats:
|
||||
adaptive_formats = [{"url": f"http://a/?expire={int(time_start + 5.9 * 60 * 60)}", "itag": "18", "type": "", "clen": "0", "lmt": "", "projectionType": "RECTANGULAR"}] # same as above
|
||||
|
||||
if live_now:
|
||||
video_type = "livestream"
|
||||
premiere_timestamp = published # ??? that works i guess
|
||||
|
||||
@@ -519,6 +519,32 @@ def extractTextFromSimpleOrRuns(obj: dict, default: str = "") -> str:
|
||||
return text
|
||||
|
||||
|
||||
def findNearestResolution(width: int, height: int) -> int:
|
||||
# Finds the nearest standard resolution (one of 144p, 240p, ...)
|
||||
# So far only used for Yattee, as it has trouble playing anything
|
||||
# without one of the standard resolutions. Playback on other
|
||||
# clients is unaffected.
|
||||
|
||||
# failsafe behaviour
|
||||
try:
|
||||
width = int(width)
|
||||
height = int(height)
|
||||
res = min(width, height)
|
||||
except:
|
||||
return 360
|
||||
|
||||
standard_resolutions = [144, 240, 360, 720, 1080, 2160, 4320]
|
||||
if res in standard_resolutions:
|
||||
return res
|
||||
|
||||
# calculate relative distance to one of the standard resolutions
|
||||
res_normalized = [abs(1 - (x / res)) for x in standard_resolutions]
|
||||
# pick the one where the distance is the smallest
|
||||
target_index = res_normalized.index(min(res_normalized))
|
||||
target_res = standard_resolutions[target_index]
|
||||
|
||||
return target_res
|
||||
|
||||
def parseFormatStreams(wdata_fstream: dict, ydata_stream: dict) -> dict:
|
||||
|
||||
try:
|
||||
@@ -536,7 +562,7 @@ def parseFormatStreams(wdata_fstream: dict, ydata_stream: dict) -> dict:
|
||||
"bitrate": str(wdata_fstream["bitrate"]),
|
||||
"fps": wdata_fstream["fps"],
|
||||
"size": f"{wdata_fstream['width']}x{wdata_fstream['height']}",
|
||||
"resolution": f"{wdata_fstream['height'] if wdata_fstream['height'] in (144, 240, 360, 720, 1080, 2160) else 360}p",
|
||||
"resolution": f"{findNearestResolution(wdata_fstream['width'], wdata_fstream['height'])}p", # possibly not really needed here
|
||||
"qualityLabel": wdata_fstream["qualityLabel"],
|
||||
"container": safeTraverse(FORMATS.get(wdata_fstream["itag"]), [ "ext"], default="mp4"), # invidious_formats
|
||||
"encoding": safeTraverse(FORMATS.get(wdata_fstream["itag"]), ["vcodec"], default="mp4") # invidious_formats
|
||||
@@ -564,8 +590,8 @@ def parseAdaptiveStreams(wdata_astream: dict, ydata_stream: dict) -> dict:
|
||||
"clen": wdata_astream["contentLength"],
|
||||
"lmt": wdata_astream["lastModified"],
|
||||
"projectionType": wdata_astream["projectionType"],
|
||||
"container": safeTraverse(FORMATS.get(wdata_astream["itag"]), [ "ext"], default="mp4"), # invidious_formats,
|
||||
"encoding": safeTraverse(FORMATS.get(wdata_astream["itag"]), ["vcodec"], default="mp4") # invidious_formats,
|
||||
"container": safeTraverse(FORMATS.get(wdata_astream["itag"]), [ "ext"], default="mp4"), # invidious_formats
|
||||
"encoding": safeTraverse(FORMATS.get(wdata_astream["itag"]), ["vcodec"], default="mp4") # invidious_formats
|
||||
}
|
||||
|
||||
isVideo = True
|
||||
@@ -577,16 +603,16 @@ def parseAdaptiveStreams(wdata_astream: dict, ydata_stream: dict) -> dict:
|
||||
# video-specific metadata
|
||||
astream["fps"] = wdata_astream["fps"]
|
||||
astream["size"] = f"{wdata_astream['width']}x{wdata_astream['height']}"
|
||||
astream["resolution"] = f"{wdata_astream['height'] if wdata_astream['height'] in (144, 240, 360, 720, 1080, 2160) else 360}p"
|
||||
astream["resolution"] = f"{findNearestResolution(wdata_astream['width'], wdata_astream['height'])}p"
|
||||
astream["qualityLabel"] = wdata_astream["qualityLabel"]
|
||||
astream["colorInfo"] = safeTraverse(wdata_astream, ["colorInfo"])
|
||||
else:
|
||||
astream = astream_common
|
||||
# audio-specific metadata
|
||||
astream["encoding"] = safeTraverse(FORMATS.get(wdata_astream["itag"]), ["acodec"], default="mp4")
|
||||
astream["audioQuality"] = wdata_astream["audioQuality"],
|
||||
astream["audioSampleRate"] = int(wdata_astream["audioSampleRate"]),
|
||||
astream["audioChannels"] = wdata_astream["audioChannels"]
|
||||
|
||||
# breakpoint()
|
||||
return astream
|
||||
|
||||
|
||||
Reference in New Issue
Block a user