Changed to 100 char line limit, changed to ruff
This commit is contained in:
parent
5679dfab67
commit
17cf052c19
13 changed files with 52 additions and 141 deletions
|
@ -7,12 +7,12 @@ mypy:
|
||||||
- pip install . --quiet
|
- pip install . --quiet
|
||||||
- mypy syng --strict
|
- mypy syng --strict
|
||||||
|
|
||||||
pylint:
|
ruff:
|
||||||
stage: test
|
stage: test
|
||||||
script:
|
script:
|
||||||
- pip install pylint --quiet
|
- pip install ruff --quiet
|
||||||
- pip install . --quiet
|
- pip install . --quiet
|
||||||
- pylint syng
|
- ruff syng
|
||||||
|
|
||||||
test:
|
test:
|
||||||
stage: test
|
stage: test
|
||||||
|
|
|
@ -38,7 +38,7 @@ module = [
|
||||||
"pytube",
|
"pytube",
|
||||||
"minio",
|
"minio",
|
||||||
"aiocmd",
|
"aiocmd",
|
||||||
"pyqrcode",
|
"pyqrcodeng",
|
||||||
"socketio",
|
"socketio",
|
||||||
"pillow",
|
"pillow",
|
||||||
"PIL",
|
"PIL",
|
||||||
|
@ -46,5 +46,5 @@ module = [
|
||||||
]
|
]
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
[tool.black]
|
[tool.ruff]
|
||||||
line-length = 79
|
line-length = 100
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Awaitable
|
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
|
|
|
@ -223,9 +223,7 @@ async def handle_get_meta_info(data: dict[str, Any]) -> None:
|
||||||
:rtype: None
|
:rtype: None
|
||||||
"""
|
"""
|
||||||
source: Source = sources[data["source"]]
|
source: Source = sources[data["source"]]
|
||||||
meta_info: dict[str, Any] = await source.get_missing_metadata(
|
meta_info: dict[str, Any] = await source.get_missing_metadata(Entry(**data))
|
||||||
Entry(**data)
|
|
||||||
)
|
|
||||||
await sio.emit("meta-info", {"uuid": data["uuid"], "meta": meta_info})
|
await sio.emit("meta-info", {"uuid": data["uuid"], "meta": meta_info})
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,16 +326,10 @@ async def handle_client_registered(data: dict[str, Any]) -> None:
|
||||||
if data["success"]:
|
if data["success"]:
|
||||||
logging.info("Registered")
|
logging.info("Registered")
|
||||||
print(f"Join here: {state.server}/{data['room']}")
|
print(f"Join here: {state.server}/{data['room']}")
|
||||||
print(
|
print(pyqrcode.create(f"{state.server}/{data['room']}").terminal(quiet_zone=1))
|
||||||
pyqrcode.create(f"{state.server}/{data['room']}").terminal(
|
|
||||||
quiet_zone=1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
state.room = data["room"]
|
state.room = data["room"]
|
||||||
await sio.emit("sources", {"sources": list(sources.keys())})
|
await sio.emit("sources", {"sources": list(sources.keys())})
|
||||||
if (
|
if state.current_source is None: # A possible race condition can occur here
|
||||||
state.current_source is None
|
|
||||||
): # A possible race condition can occur here
|
|
||||||
await sio.emit("get-first")
|
await sio.emit("get-first")
|
||||||
else:
|
else:
|
||||||
logging.warning("Registration failed")
|
logging.warning("Registration failed")
|
||||||
|
@ -378,9 +370,7 @@ async def handle_request_config(data: dict[str, Any]) -> None:
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await sio.emit(
|
await sio.emit("config", {"source": data["source"], "config": config})
|
||||||
"config", {"source": data["source"], "config": config}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def aiomain() -> None:
|
async def aiomain() -> None:
|
||||||
|
@ -426,8 +416,7 @@ async def aiomain() -> None:
|
||||||
state.secret = args.secret
|
state.secret = args.secret
|
||||||
else:
|
else:
|
||||||
state.secret = "".join(
|
state.secret = "".join(
|
||||||
secrets.choice(string.ascii_letters + string.digits)
|
secrets.choice(string.ascii_letters + string.digits) for _ in range(8)
|
||||||
for _ in range(8)
|
|
||||||
)
|
)
|
||||||
print(f"Generated secret: {state.secret}")
|
print(f"Generated secret: {state.secret}")
|
||||||
|
|
||||||
|
|
|
@ -83,9 +83,7 @@ class Entry:
|
||||||
re.sub(
|
re.sub(
|
||||||
r"\s",
|
r"\s",
|
||||||
" ",
|
" ",
|
||||||
performers.lower()
|
performers.lower().replace(".", " ").replace(",", " "),
|
||||||
.replace(".", " ")
|
|
||||||
.replace(",", " "),
|
|
||||||
),
|
),
|
||||||
).split(" "),
|
).split(" "),
|
||||||
)
|
)
|
||||||
|
|
|
@ -89,9 +89,7 @@ class Queue:
|
||||||
"""
|
"""
|
||||||
return list(self._queue) # [item for item in self._queue]
|
return list(self._queue) # [item for item in self._queue]
|
||||||
|
|
||||||
def update(
|
def update(self, uuid: UUID | str, updater: Callable[[Entry], None]) -> None:
|
||||||
self, uuid: UUID | str, updater: Callable[[Entry], None]
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Update entries in the queue, identified by their uuid.
|
Update entries in the queue, identified by their uuid.
|
||||||
|
|
||||||
|
|
|
@ -64,9 +64,7 @@ async def root_handler(request: Any) -> Any:
|
||||||
:rtype web.FileResponse:
|
:rtype web.FileResponse:
|
||||||
"""
|
"""
|
||||||
if request.path.endswith("/favicon.ico"):
|
if request.path.endswith("/favicon.ico"):
|
||||||
return web.FileResponse(
|
return web.FileResponse(os.path.join(app["root_folder"], "favicon.ico"))
|
||||||
os.path.join(app["root_folder"], "favicon.ico")
|
|
||||||
)
|
|
||||||
return web.FileResponse(os.path.join(app["root_folder"], "index.html"))
|
return web.FileResponse(os.path.join(app["root_folder"], "index.html"))
|
||||||
|
|
||||||
|
|
||||||
|
@ -220,10 +218,7 @@ async def handle_waiting_room_append(sid: str, data: dict[str, Any]) -> None:
|
||||||
data["uid"] is not None
|
data["uid"] is not None
|
||||||
and len(list(state.queue.find_by_uid(data["uid"]))) == 0
|
and len(list(state.queue.find_by_uid(data["uid"]))) == 0
|
||||||
)
|
)
|
||||||
or (
|
or (data["uid"] is None and state.queue.find_by_name(data["performer"]) is None)
|
||||||
data["uid"] is None
|
|
||||||
and state.queue.find_by_name(data["performer"]) is None
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
await append_to_queue(room, entry, sid)
|
await append_to_queue(room, entry, sid)
|
||||||
return
|
return
|
||||||
|
@ -702,9 +697,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"registration-key" not in data
|
"registration-key" not in data
|
||||||
or hashlib.sha256(
|
or hashlib.sha256(data["registration-key"].encode()).hexdigest()
|
||||||
data["registration-key"].encode()
|
|
||||||
).hexdigest()
|
|
||||||
not in keys
|
not in keys
|
||||||
):
|
):
|
||||||
await sio.emit(
|
await sio.emit(
|
||||||
|
@ -741,9 +734,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
||||||
else:
|
else:
|
||||||
logger.info("Registerd new client %s", room)
|
logger.info("Registerd new client %s", room)
|
||||||
initial_entries = [Entry(**entry) for entry in data["queue"]]
|
initial_entries = [Entry(**entry) for entry in data["queue"]]
|
||||||
initial_waiting_room = [
|
initial_waiting_room = [Entry(**entry) for entry in data["waiting_room"]]
|
||||||
Entry(**entry) for entry in data["waiting_room"]
|
|
||||||
]
|
|
||||||
initial_recent = [Entry(**entry) for entry in data["recent"]]
|
initial_recent = [Entry(**entry) for entry in data["recent"]]
|
||||||
|
|
||||||
clients[room] = State(
|
clients[room] = State(
|
||||||
|
@ -760,9 +751,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
await sio.enter_room(sid, room)
|
await sio.enter_room(sid, room)
|
||||||
await sio.emit(
|
await sio.emit("client-registered", {"success": True, "room": room}, room=sid)
|
||||||
"client-registered", {"success": True, "room": room}, room=sid
|
|
||||||
)
|
|
||||||
await send_state(clients[room], sid)
|
await send_state(clients[room], sid)
|
||||||
|
|
||||||
|
|
||||||
|
@ -833,9 +822,9 @@ async def handle_config_chunk(sid: str, data: dict[str, Any]) -> None:
|
||||||
return
|
return
|
||||||
|
|
||||||
if data["source"] not in state.client.sources:
|
if data["source"] not in state.client.sources:
|
||||||
state.client.sources[data["source"]] = available_sources[
|
state.client.sources[data["source"]] = available_sources[data["source"]](
|
||||||
data["source"]
|
data["config"]
|
||||||
](data["config"])
|
)
|
||||||
else:
|
else:
|
||||||
state.client.sources[data["source"]].add_to_config(data["config"])
|
state.client.sources[data["source"]].add_to_config(data["config"])
|
||||||
|
|
||||||
|
@ -1079,10 +1068,7 @@ async def cleanup() -> None:
|
||||||
to_remove: list[str] = []
|
to_remove: list[str] = []
|
||||||
for sid, state in clients.items():
|
for sid, state in clients.items():
|
||||||
logger.info("Client %s, last seen: %s", sid, str(state.last_seen))
|
logger.info("Client %s, last seen: %s", sid, str(state.last_seen))
|
||||||
if (
|
if state.last_seen + datetime.timedelta(hours=4) < datetime.datetime.now():
|
||||||
state.last_seen + datetime.timedelta(hours=4)
|
|
||||||
< datetime.datetime.now()
|
|
||||||
):
|
|
||||||
logger.info("No activity for 4 hours, removing %s", sid)
|
logger.info("No activity for 4 hours, removing %s", sid)
|
||||||
to_remove.append(sid)
|
to_remove.append(sid)
|
||||||
for sid in to_remove:
|
for sid in to_remove:
|
||||||
|
@ -1101,9 +1087,7 @@ async def cleanup() -> None:
|
||||||
loop_next = asyncio.get_event_loop().time() + offset
|
loop_next = asyncio.get_event_loop().time() + offset
|
||||||
|
|
||||||
logger.info("Next Cleanup at %s", str(next))
|
logger.info("Next Cleanup at %s", str(next))
|
||||||
asyncio.get_event_loop().call_at(
|
asyncio.get_event_loop().call_at(loop_next, lambda: asyncio.create_task(cleanup()))
|
||||||
loop_next, lambda: asyncio.create_task(cleanup())
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def background_tasks(
|
async def background_tasks(
|
||||||
|
|
|
@ -8,9 +8,6 @@ from typing import Any
|
||||||
|
|
||||||
from .source import available_sources as available_sources
|
from .source import available_sources as available_sources
|
||||||
from .source import Source as Source
|
from .source import Source as Source
|
||||||
from .youtube import YoutubeSource
|
|
||||||
from .s3 import S3Source
|
|
||||||
from .files import FilesSource
|
|
||||||
|
|
||||||
|
|
||||||
def configure_sources(configs: dict[str, Any]) -> dict[str, Source]:
|
def configure_sources(configs: dict[str, Any]) -> dict[str, Source]:
|
||||||
|
|
|
@ -34,9 +34,7 @@ class FilesSource(Source):
|
||||||
for path, _, files in os.walk(self.dir):
|
for path, _, files in os.walk(self.dir):
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(".cdg"):
|
if file.endswith(".cdg"):
|
||||||
file_list.append(
|
file_list.append(os.path.join(path, file)[len(self.dir) :])
|
||||||
os.path.join(path, file)[len(self.dir) :]
|
|
||||||
)
|
|
||||||
return file_list
|
return file_list
|
||||||
|
|
||||||
return await asyncio.to_thread(_get_file_list)
|
return await asyncio.to_thread(_get_file_list)
|
||||||
|
|
|
@ -40,11 +40,7 @@ class S3Source(Source):
|
||||||
super().__init__(config)
|
super().__init__(config)
|
||||||
self.source_name = "s3"
|
self.source_name = "s3"
|
||||||
|
|
||||||
if (
|
if "endpoint" in config and "access_key" in config and "secret_key" in config:
|
||||||
"endpoint" in config
|
|
||||||
and "access_key" in config
|
|
||||||
and "secret_key" in config
|
|
||||||
):
|
|
||||||
self.minio: Minio = Minio(
|
self.minio: Minio = Minio(
|
||||||
config["endpoint"],
|
config["endpoint"],
|
||||||
access_key=config["access_key"],
|
access_key=config["access_key"],
|
||||||
|
@ -81,9 +77,7 @@ class S3Source(Source):
|
||||||
|
|
||||||
def _get_file_list() -> list[str]:
|
def _get_file_list() -> list[str]:
|
||||||
if self.index_file is not None and os.path.isfile(self.index_file):
|
if self.index_file is not None and os.path.isfile(self.index_file):
|
||||||
with open(
|
with open(self.index_file, "r", encoding="utf8") as index_file_handle:
|
||||||
self.index_file, "r", encoding="utf8"
|
|
||||||
) as index_file_handle:
|
|
||||||
return cast(list[str], load(index_file_handle))
|
return cast(list[str], load(index_file_handle))
|
||||||
|
|
||||||
file_list = [
|
file_list = [
|
||||||
|
@ -91,12 +85,8 @@ class S3Source(Source):
|
||||||
for obj in self.minio.list_objects(self.bucket, recursive=True)
|
for obj in self.minio.list_objects(self.bucket, recursive=True)
|
||||||
if os.path.splitext(obj.object_name)[1] in self.extensions
|
if os.path.splitext(obj.object_name)[1] in self.extensions
|
||||||
]
|
]
|
||||||
if self.index_file is not None and not os.path.isfile(
|
if self.index_file is not None and not os.path.isfile(self.index_file):
|
||||||
self.index_file
|
with open(self.index_file, "w", encoding="utf8") as index_file_handle:
|
||||||
):
|
|
||||||
with open(
|
|
||||||
self.index_file, "w", encoding="utf8"
|
|
||||||
) as index_file_handle:
|
|
||||||
dump(file_list, index_file_handle)
|
dump(file_list, index_file_handle)
|
||||||
return file_list
|
return file_list
|
||||||
|
|
||||||
|
@ -119,16 +109,12 @@ class S3Source(Source):
|
||||||
|
|
||||||
await self.ensure_playable(entry)
|
await self.ensure_playable(entry)
|
||||||
|
|
||||||
audio_file_name: Optional[str] = self.downloaded_files[
|
audio_file_name: Optional[str] = self.downloaded_files[entry.ident].audio
|
||||||
entry.ident
|
|
||||||
].audio
|
|
||||||
|
|
||||||
if audio_file_name is None:
|
if audio_file_name is None:
|
||||||
duration: int = 180
|
duration: int = 180
|
||||||
else:
|
else:
|
||||||
duration = await asyncio.to_thread(
|
duration = await asyncio.to_thread(mutagen_wrapped, audio_file_name)
|
||||||
mutagen_wrapped, audio_file_name
|
|
||||||
)
|
|
||||||
|
|
||||||
return {"duration": int(duration)}
|
return {"duration": int(duration)}
|
||||||
|
|
||||||
|
|
|
@ -201,9 +201,7 @@ class Source:
|
||||||
filtered: list[str] = self.filter_data_by_query(query, self._index)
|
filtered: list[str] = self.filter_data_by_query(query, self._index)
|
||||||
results: list[Result] = []
|
results: list[Result] = []
|
||||||
for filename in filtered:
|
for filename in filtered:
|
||||||
result: Optional[Result] = Result.from_filename(
|
result: Optional[Result] = Result.from_filename(filename, self.source_name)
|
||||||
filename, self.source_name
|
|
||||||
)
|
|
||||||
if result is None:
|
if result is None:
|
||||||
continue
|
continue
|
||||||
results.append(result)
|
results.append(result)
|
||||||
|
@ -364,16 +362,12 @@ class Source:
|
||||||
|
|
||||||
def contains_all_words(words: list[str], element: str) -> bool:
|
def contains_all_words(words: list[str], element: str) -> bool:
|
||||||
for word in words:
|
for word in words:
|
||||||
if not word.lower() in os.path.basename(element).lower():
|
if word.lower() not in os.path.basename(element).lower():
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
splitquery = shlex.split(query)
|
splitquery = shlex.split(query)
|
||||||
return [
|
return [element for element in data if contains_all_words(splitquery, element)]
|
||||||
element
|
|
||||||
for element in data
|
|
||||||
if contains_all_words(splitquery, element)
|
|
||||||
]
|
|
||||||
|
|
||||||
async def get_file_list(self) -> list[str]:
|
async def get_file_list(self) -> list[str]:
|
||||||
"""
|
"""
|
||||||
|
@ -411,10 +405,7 @@ class Source:
|
||||||
self._index = await self.get_file_list()
|
self._index = await self.get_file_list()
|
||||||
print(f"{self.source_name}: done")
|
print(f"{self.source_name}: done")
|
||||||
chunked = zip_longest(*[iter(self._index)] * 1000, fillvalue="")
|
chunked = zip_longest(*[iter(self._index)] * 1000, fillvalue="")
|
||||||
return [
|
return [{"index": list(filter(lambda x: x != "", chunk))} for chunk in chunked]
|
||||||
{"index": list(filter(lambda x: x != "", chunk))}
|
|
||||||
for chunk in chunked
|
|
||||||
]
|
|
||||||
|
|
||||||
def add_to_config(self, config: dict[str, Any]) -> None:
|
def add_to_config(self, config: dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -57,15 +57,9 @@ class YoutubeSource(Source):
|
||||||
super().__init__(config)
|
super().__init__(config)
|
||||||
self.source_name = "youtube"
|
self.source_name = "youtube"
|
||||||
|
|
||||||
self.innertube_client: innertube.InnerTube = innertube.InnerTube(
|
self.innertube_client: innertube.InnerTube = innertube.InnerTube(client="WEB")
|
||||||
client="WEB"
|
self.channels: list[str] = config["channels"] if "channels" in config else []
|
||||||
)
|
self.tmp_dir: str = config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
||||||
self.channels: list[str] = (
|
|
||||||
config["channels"] if "channels" in config else []
|
|
||||||
)
|
|
||||||
self.tmp_dir: str = (
|
|
||||||
config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
|
||||||
)
|
|
||||||
self.max_res: int = config["max_res"] if "max_res" in config else 720
|
self.max_res: int = config["max_res"] if "max_res" in config else 720
|
||||||
self.start_streaming: bool = (
|
self.start_streaming: bool = (
|
||||||
config["start_streaming"] if "start_streaming" in config else False
|
config["start_streaming"] if "start_streaming" in config else False
|
||||||
|
@ -105,10 +99,7 @@ class YoutubeSource(Source):
|
||||||
:type entry: Entry
|
:type entry: Entry
|
||||||
:rtype: None
|
:rtype: None
|
||||||
"""
|
"""
|
||||||
if (
|
if self.start_streaming and not self.downloaded_files[entry.ident].complete:
|
||||||
self.start_streaming
|
|
||||||
and not self.downloaded_files[entry.ident].complete
|
|
||||||
):
|
|
||||||
self.player = await self.play_mpv(
|
self.player = await self.play_mpv(
|
||||||
entry.ident,
|
entry.ident,
|
||||||
None,
|
None,
|
||||||
|
@ -177,9 +168,7 @@ class YoutubeSource(Source):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _contains_index(query: str, result: YouTube) -> float:
|
def _contains_index(query: str, result: YouTube) -> float:
|
||||||
compare_string: str = (
|
compare_string: str = result.title.lower() + " " + result.author.lower()
|
||||||
result.title.lower() + " " + result.author.lower()
|
|
||||||
)
|
|
||||||
hits: int = 0
|
hits: int = 0
|
||||||
queries: list[str] = shlex.split(query.lower())
|
queries: list[str] = shlex.split(query.lower())
|
||||||
for word in queries:
|
for word in queries:
|
||||||
|
@ -197,9 +186,7 @@ class YoutubeSource(Source):
|
||||||
asyncio.to_thread(self._yt_search, query),
|
asyncio.to_thread(self._yt_search, query),
|
||||||
)
|
)
|
||||||
results = [
|
results = [
|
||||||
search_result
|
search_result for yt_result in results_lists for search_result in yt_result
|
||||||
for yt_result in results_lists
|
|
||||||
for search_result in yt_result
|
|
||||||
]
|
]
|
||||||
|
|
||||||
results.sort(key=partial(_contains_index, query))
|
results.sort(key=partial(_contains_index, query))
|
||||||
|
@ -232,9 +219,7 @@ class YoutubeSource(Source):
|
||||||
|
|
||||||
A lot of black Magic happens here.
|
A lot of black Magic happens here.
|
||||||
"""
|
"""
|
||||||
browse_id: str = Channel(
|
browse_id: str = Channel(f"https://www.youtube.com{channel}").channel_id
|
||||||
f"https://www.youtube.com{channel}"
|
|
||||||
).channel_id
|
|
||||||
endpoint: str = f"{self.innertube_client.base_url}/browse"
|
endpoint: str = f"{self.innertube_client.base_url}/browse"
|
||||||
|
|
||||||
data: dict[str, str] = {
|
data: dict[str, str] = {
|
||||||
|
@ -248,9 +233,7 @@ class YoutubeSource(Source):
|
||||||
)
|
)
|
||||||
items: list[dict[str, Any]] = results["contents"][
|
items: list[dict[str, Any]] = results["contents"][
|
||||||
"twoColumnBrowseResultsRenderer"
|
"twoColumnBrowseResultsRenderer"
|
||||||
]["tabs"][-1]["expandableTabRenderer"]["content"][
|
]["tabs"][-1]["expandableTabRenderer"]["content"]["sectionListRenderer"][
|
||||||
"sectionListRenderer"
|
|
||||||
][
|
|
||||||
"contents"
|
"contents"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -259,14 +242,13 @@ class YoutubeSource(Source):
|
||||||
try:
|
try:
|
||||||
if (
|
if (
|
||||||
"itemSectionRenderer" in item
|
"itemSectionRenderer" in item
|
||||||
and "videoRenderer"
|
and "videoRenderer" in item["itemSectionRenderer"]["contents"][0]
|
||||||
in item["itemSectionRenderer"]["contents"][0]
|
|
||||||
):
|
):
|
||||||
yt_url: str = (
|
yt_url: str = (
|
||||||
"https://youtube.com/watch?v="
|
"https://youtube.com/watch?v="
|
||||||
+ item["itemSectionRenderer"]["contents"][0][
|
+ item["itemSectionRenderer"]["contents"][0]["videoRenderer"][
|
||||||
"videoRenderer"
|
"videoId"
|
||||||
]["videoId"]
|
]
|
||||||
)
|
)
|
||||||
author: str = item["itemSectionRenderer"]["contents"][0][
|
author: str = item["itemSectionRenderer"]["contents"][0][
|
||||||
"videoRenderer"
|
"videoRenderer"
|
||||||
|
@ -283,9 +265,7 @@ class YoutubeSource(Source):
|
||||||
pass
|
pass
|
||||||
return list_of_videos
|
return list_of_videos
|
||||||
|
|
||||||
async def _buffer_with_yt_dlp(
|
async def _buffer_with_yt_dlp(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||||
self, entry: Entry
|
|
||||||
) -> Tuple[str, Optional[str]]:
|
|
||||||
"""
|
"""
|
||||||
Download the video using yt-dlp.
|
Download the video using yt-dlp.
|
||||||
|
|
||||||
|
@ -329,16 +309,13 @@ class YoutubeSource(Source):
|
||||||
|
|
||||||
video_streams: StreamQuery = streams.filter(
|
video_streams: StreamQuery = streams.filter(
|
||||||
type="video",
|
type="video",
|
||||||
custom_filter_functions=[
|
custom_filter_functions=[lambda s: int(s.resolution[:-1]) <= self.max_res],
|
||||||
lambda s: int(s.resolution[:-1]) <= self.max_res
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
audio_streams: StreamQuery = streams.filter(only_audio=True)
|
audio_streams: StreamQuery = streams.filter(only_audio=True)
|
||||||
|
|
||||||
best_video_stream: Stream = sorted(
|
best_video_stream: Stream = sorted(
|
||||||
video_streams,
|
video_streams,
|
||||||
key=lambda s: int(s.resolution[:-1])
|
key=lambda s: int(s.resolution[:-1]) + (1 if s.is_progressive else 0),
|
||||||
+ (1 if s.is_progressive else 0),
|
|
||||||
)[-1]
|
)[-1]
|
||||||
best_audio_stream: Stream = sorted(
|
best_audio_stream: Stream = sorted(
|
||||||
audio_streams, key=lambda s: int(s.abr[:-4])
|
audio_streams, key=lambda s: int(s.abr[:-4])
|
||||||
|
|
|
@ -27,21 +27,15 @@ async def handle_state(data: dict[str, Any]) -> None:
|
||||||
print("New Queue")
|
print("New Queue")
|
||||||
for raw_item in data["queue"]:
|
for raw_item in data["queue"]:
|
||||||
item = Entry(**raw_item)
|
item = Entry(**raw_item)
|
||||||
print(
|
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
|
||||||
)
|
|
||||||
print("Waiting Room")
|
print("Waiting Room")
|
||||||
for raw_item in data["shadow_queue"]:
|
for raw_item in data["shadow_queue"]:
|
||||||
item = Entry(**raw_item)
|
item = Entry(**raw_item)
|
||||||
print(
|
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
|
||||||
)
|
|
||||||
print("Recent")
|
print("Recent")
|
||||||
for raw_item in data["recent"]:
|
for raw_item in data["recent"]:
|
||||||
item = Entry(**raw_item)
|
item = Entry(**raw_item)
|
||||||
print(
|
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@sio.on("msg")
|
@sio.on("msg")
|
||||||
|
|
Loading…
Add table
Reference in a new issue