Changed to 100 char line limit, changed to ruff
This commit is contained in:
parent
5679dfab67
commit
17cf052c19
13 changed files with 52 additions and 141 deletions
|
@ -7,12 +7,12 @@ mypy:
|
|||
- pip install . --quiet
|
||||
- mypy syng --strict
|
||||
|
||||
pylint:
|
||||
ruff:
|
||||
stage: test
|
||||
script:
|
||||
- pip install pylint --quiet
|
||||
- pip install ruff --quiet
|
||||
- pip install . --quiet
|
||||
- pylint syng
|
||||
- ruff syng
|
||||
|
||||
test:
|
||||
stage: test
|
||||
|
|
|
@ -38,7 +38,7 @@ module = [
|
|||
"pytube",
|
||||
"minio",
|
||||
"aiocmd",
|
||||
"pyqrcode",
|
||||
"pyqrcodeng",
|
||||
"socketio",
|
||||
"pillow",
|
||||
"PIL",
|
||||
|
@ -46,5 +46,5 @@ module = [
|
|||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.black]
|
||||
line-length = 79
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from typing import Any
|
||||
from typing import Awaitable
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import TypeVar
|
||||
|
|
|
@ -223,9 +223,7 @@ async def handle_get_meta_info(data: dict[str, Any]) -> None:
|
|||
:rtype: None
|
||||
"""
|
||||
source: Source = sources[data["source"]]
|
||||
meta_info: dict[str, Any] = await source.get_missing_metadata(
|
||||
Entry(**data)
|
||||
)
|
||||
meta_info: dict[str, Any] = await source.get_missing_metadata(Entry(**data))
|
||||
await sio.emit("meta-info", {"uuid": data["uuid"], "meta": meta_info})
|
||||
|
||||
|
||||
|
@ -328,16 +326,10 @@ async def handle_client_registered(data: dict[str, Any]) -> None:
|
|||
if data["success"]:
|
||||
logging.info("Registered")
|
||||
print(f"Join here: {state.server}/{data['room']}")
|
||||
print(
|
||||
pyqrcode.create(f"{state.server}/{data['room']}").terminal(
|
||||
quiet_zone=1
|
||||
)
|
||||
)
|
||||
print(pyqrcode.create(f"{state.server}/{data['room']}").terminal(quiet_zone=1))
|
||||
state.room = data["room"]
|
||||
await sio.emit("sources", {"sources": list(sources.keys())})
|
||||
if (
|
||||
state.current_source is None
|
||||
): # A possible race condition can occur here
|
||||
if state.current_source is None: # A possible race condition can occur here
|
||||
await sio.emit("get-first")
|
||||
else:
|
||||
logging.warning("Registration failed")
|
||||
|
@ -378,9 +370,7 @@ async def handle_request_config(data: dict[str, Any]) -> None:
|
|||
},
|
||||
)
|
||||
else:
|
||||
await sio.emit(
|
||||
"config", {"source": data["source"], "config": config}
|
||||
)
|
||||
await sio.emit("config", {"source": data["source"], "config": config})
|
||||
|
||||
|
||||
async def aiomain() -> None:
|
||||
|
@ -426,8 +416,7 @@ async def aiomain() -> None:
|
|||
state.secret = args.secret
|
||||
else:
|
||||
state.secret = "".join(
|
||||
secrets.choice(string.ascii_letters + string.digits)
|
||||
for _ in range(8)
|
||||
secrets.choice(string.ascii_letters + string.digits) for _ in range(8)
|
||||
)
|
||||
print(f"Generated secret: {state.secret}")
|
||||
|
||||
|
|
|
@ -83,9 +83,7 @@ class Entry:
|
|||
re.sub(
|
||||
r"\s",
|
||||
" ",
|
||||
performers.lower()
|
||||
.replace(".", " ")
|
||||
.replace(",", " "),
|
||||
performers.lower().replace(".", " ").replace(",", " "),
|
||||
),
|
||||
).split(" "),
|
||||
)
|
||||
|
|
|
@ -89,9 +89,7 @@ class Queue:
|
|||
"""
|
||||
return list(self._queue) # [item for item in self._queue]
|
||||
|
||||
def update(
|
||||
self, uuid: UUID | str, updater: Callable[[Entry], None]
|
||||
) -> None:
|
||||
def update(self, uuid: UUID | str, updater: Callable[[Entry], None]) -> None:
|
||||
"""
|
||||
Update entries in the queue, identified by their uuid.
|
||||
|
||||
|
|
|
@ -64,9 +64,7 @@ async def root_handler(request: Any) -> Any:
|
|||
:rtype web.FileResponse:
|
||||
"""
|
||||
if request.path.endswith("/favicon.ico"):
|
||||
return web.FileResponse(
|
||||
os.path.join(app["root_folder"], "favicon.ico")
|
||||
)
|
||||
return web.FileResponse(os.path.join(app["root_folder"], "favicon.ico"))
|
||||
return web.FileResponse(os.path.join(app["root_folder"], "index.html"))
|
||||
|
||||
|
||||
|
@ -220,10 +218,7 @@ async def handle_waiting_room_append(sid: str, data: dict[str, Any]) -> None:
|
|||
data["uid"] is not None
|
||||
and len(list(state.queue.find_by_uid(data["uid"]))) == 0
|
||||
)
|
||||
or (
|
||||
data["uid"] is None
|
||||
and state.queue.find_by_name(data["performer"]) is None
|
||||
)
|
||||
or (data["uid"] is None and state.queue.find_by_name(data["performer"]) is None)
|
||||
):
|
||||
await append_to_queue(room, entry, sid)
|
||||
return
|
||||
|
@ -702,9 +697,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
if (
|
||||
"registration-key" not in data
|
||||
or hashlib.sha256(
|
||||
data["registration-key"].encode()
|
||||
).hexdigest()
|
||||
or hashlib.sha256(data["registration-key"].encode()).hexdigest()
|
||||
not in keys
|
||||
):
|
||||
await sio.emit(
|
||||
|
@ -741,9 +734,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
|||
else:
|
||||
logger.info("Registerd new client %s", room)
|
||||
initial_entries = [Entry(**entry) for entry in data["queue"]]
|
||||
initial_waiting_room = [
|
||||
Entry(**entry) for entry in data["waiting_room"]
|
||||
]
|
||||
initial_waiting_room = [Entry(**entry) for entry in data["waiting_room"]]
|
||||
initial_recent = [Entry(**entry) for entry in data["recent"]]
|
||||
|
||||
clients[room] = State(
|
||||
|
@ -760,9 +751,7 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
await sio.enter_room(sid, room)
|
||||
await sio.emit(
|
||||
"client-registered", {"success": True, "room": room}, room=sid
|
||||
)
|
||||
await sio.emit("client-registered", {"success": True, "room": room}, room=sid)
|
||||
await send_state(clients[room], sid)
|
||||
|
||||
|
||||
|
@ -833,9 +822,9 @@ async def handle_config_chunk(sid: str, data: dict[str, Any]) -> None:
|
|||
return
|
||||
|
||||
if data["source"] not in state.client.sources:
|
||||
state.client.sources[data["source"]] = available_sources[
|
||||
data["source"]
|
||||
](data["config"])
|
||||
state.client.sources[data["source"]] = available_sources[data["source"]](
|
||||
data["config"]
|
||||
)
|
||||
else:
|
||||
state.client.sources[data["source"]].add_to_config(data["config"])
|
||||
|
||||
|
@ -1079,10 +1068,7 @@ async def cleanup() -> None:
|
|||
to_remove: list[str] = []
|
||||
for sid, state in clients.items():
|
||||
logger.info("Client %s, last seen: %s", sid, str(state.last_seen))
|
||||
if (
|
||||
state.last_seen + datetime.timedelta(hours=4)
|
||||
< datetime.datetime.now()
|
||||
):
|
||||
if state.last_seen + datetime.timedelta(hours=4) < datetime.datetime.now():
|
||||
logger.info("No activity for 4 hours, removing %s", sid)
|
||||
to_remove.append(sid)
|
||||
for sid in to_remove:
|
||||
|
@ -1101,9 +1087,7 @@ async def cleanup() -> None:
|
|||
loop_next = asyncio.get_event_loop().time() + offset
|
||||
|
||||
logger.info("Next Cleanup at %s", str(next))
|
||||
asyncio.get_event_loop().call_at(
|
||||
loop_next, lambda: asyncio.create_task(cleanup())
|
||||
)
|
||||
asyncio.get_event_loop().call_at(loop_next, lambda: asyncio.create_task(cleanup()))
|
||||
|
||||
|
||||
async def background_tasks(
|
||||
|
|
|
@ -8,9 +8,6 @@ from typing import Any
|
|||
|
||||
from .source import available_sources as available_sources
|
||||
from .source import Source as Source
|
||||
from .youtube import YoutubeSource
|
||||
from .s3 import S3Source
|
||||
from .files import FilesSource
|
||||
|
||||
|
||||
def configure_sources(configs: dict[str, Any]) -> dict[str, Source]:
|
||||
|
|
|
@ -34,9 +34,7 @@ class FilesSource(Source):
|
|||
for path, _, files in os.walk(self.dir):
|
||||
for file in files:
|
||||
if file.endswith(".cdg"):
|
||||
file_list.append(
|
||||
os.path.join(path, file)[len(self.dir) :]
|
||||
)
|
||||
file_list.append(os.path.join(path, file)[len(self.dir) :])
|
||||
return file_list
|
||||
|
||||
return await asyncio.to_thread(_get_file_list)
|
||||
|
|
|
@ -40,11 +40,7 @@ class S3Source(Source):
|
|||
super().__init__(config)
|
||||
self.source_name = "s3"
|
||||
|
||||
if (
|
||||
"endpoint" in config
|
||||
and "access_key" in config
|
||||
and "secret_key" in config
|
||||
):
|
||||
if "endpoint" in config and "access_key" in config and "secret_key" in config:
|
||||
self.minio: Minio = Minio(
|
||||
config["endpoint"],
|
||||
access_key=config["access_key"],
|
||||
|
@ -81,9 +77,7 @@ class S3Source(Source):
|
|||
|
||||
def _get_file_list() -> list[str]:
|
||||
if self.index_file is not None and os.path.isfile(self.index_file):
|
||||
with open(
|
||||
self.index_file, "r", encoding="utf8"
|
||||
) as index_file_handle:
|
||||
with open(self.index_file, "r", encoding="utf8") as index_file_handle:
|
||||
return cast(list[str], load(index_file_handle))
|
||||
|
||||
file_list = [
|
||||
|
@ -91,12 +85,8 @@ class S3Source(Source):
|
|||
for obj in self.minio.list_objects(self.bucket, recursive=True)
|
||||
if os.path.splitext(obj.object_name)[1] in self.extensions
|
||||
]
|
||||
if self.index_file is not None and not os.path.isfile(
|
||||
self.index_file
|
||||
):
|
||||
with open(
|
||||
self.index_file, "w", encoding="utf8"
|
||||
) as index_file_handle:
|
||||
if self.index_file is not None and not os.path.isfile(self.index_file):
|
||||
with open(self.index_file, "w", encoding="utf8") as index_file_handle:
|
||||
dump(file_list, index_file_handle)
|
||||
return file_list
|
||||
|
||||
|
@ -119,16 +109,12 @@ class S3Source(Source):
|
|||
|
||||
await self.ensure_playable(entry)
|
||||
|
||||
audio_file_name: Optional[str] = self.downloaded_files[
|
||||
entry.ident
|
||||
].audio
|
||||
audio_file_name: Optional[str] = self.downloaded_files[entry.ident].audio
|
||||
|
||||
if audio_file_name is None:
|
||||
duration: int = 180
|
||||
else:
|
||||
duration = await asyncio.to_thread(
|
||||
mutagen_wrapped, audio_file_name
|
||||
)
|
||||
duration = await asyncio.to_thread(mutagen_wrapped, audio_file_name)
|
||||
|
||||
return {"duration": int(duration)}
|
||||
|
||||
|
|
|
@ -201,9 +201,7 @@ class Source:
|
|||
filtered: list[str] = self.filter_data_by_query(query, self._index)
|
||||
results: list[Result] = []
|
||||
for filename in filtered:
|
||||
result: Optional[Result] = Result.from_filename(
|
||||
filename, self.source_name
|
||||
)
|
||||
result: Optional[Result] = Result.from_filename(filename, self.source_name)
|
||||
if result is None:
|
||||
continue
|
||||
results.append(result)
|
||||
|
@ -364,16 +362,12 @@ class Source:
|
|||
|
||||
def contains_all_words(words: list[str], element: str) -> bool:
|
||||
for word in words:
|
||||
if not word.lower() in os.path.basename(element).lower():
|
||||
if word.lower() not in os.path.basename(element).lower():
|
||||
return False
|
||||
return True
|
||||
|
||||
splitquery = shlex.split(query)
|
||||
return [
|
||||
element
|
||||
for element in data
|
||||
if contains_all_words(splitquery, element)
|
||||
]
|
||||
return [element for element in data if contains_all_words(splitquery, element)]
|
||||
|
||||
async def get_file_list(self) -> list[str]:
|
||||
"""
|
||||
|
@ -411,10 +405,7 @@ class Source:
|
|||
self._index = await self.get_file_list()
|
||||
print(f"{self.source_name}: done")
|
||||
chunked = zip_longest(*[iter(self._index)] * 1000, fillvalue="")
|
||||
return [
|
||||
{"index": list(filter(lambda x: x != "", chunk))}
|
||||
for chunk in chunked
|
||||
]
|
||||
return [{"index": list(filter(lambda x: x != "", chunk))} for chunk in chunked]
|
||||
|
||||
def add_to_config(self, config: dict[str, Any]) -> None:
|
||||
"""
|
||||
|
|
|
@ -57,15 +57,9 @@ class YoutubeSource(Source):
|
|||
super().__init__(config)
|
||||
self.source_name = "youtube"
|
||||
|
||||
self.innertube_client: innertube.InnerTube = innertube.InnerTube(
|
||||
client="WEB"
|
||||
)
|
||||
self.channels: list[str] = (
|
||||
config["channels"] if "channels" in config else []
|
||||
)
|
||||
self.tmp_dir: str = (
|
||||
config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
||||
)
|
||||
self.innertube_client: innertube.InnerTube = innertube.InnerTube(client="WEB")
|
||||
self.channels: list[str] = config["channels"] if "channels" in config else []
|
||||
self.tmp_dir: str = config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
||||
self.max_res: int = config["max_res"] if "max_res" in config else 720
|
||||
self.start_streaming: bool = (
|
||||
config["start_streaming"] if "start_streaming" in config else False
|
||||
|
@ -105,10 +99,7 @@ class YoutubeSource(Source):
|
|||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
if (
|
||||
self.start_streaming
|
||||
and not self.downloaded_files[entry.ident].complete
|
||||
):
|
||||
if self.start_streaming and not self.downloaded_files[entry.ident].complete:
|
||||
self.player = await self.play_mpv(
|
||||
entry.ident,
|
||||
None,
|
||||
|
@ -177,9 +168,7 @@ class YoutubeSource(Source):
|
|||
"""
|
||||
|
||||
def _contains_index(query: str, result: YouTube) -> float:
|
||||
compare_string: str = (
|
||||
result.title.lower() + " " + result.author.lower()
|
||||
)
|
||||
compare_string: str = result.title.lower() + " " + result.author.lower()
|
||||
hits: int = 0
|
||||
queries: list[str] = shlex.split(query.lower())
|
||||
for word in queries:
|
||||
|
@ -197,9 +186,7 @@ class YoutubeSource(Source):
|
|||
asyncio.to_thread(self._yt_search, query),
|
||||
)
|
||||
results = [
|
||||
search_result
|
||||
for yt_result in results_lists
|
||||
for search_result in yt_result
|
||||
search_result for yt_result in results_lists for search_result in yt_result
|
||||
]
|
||||
|
||||
results.sort(key=partial(_contains_index, query))
|
||||
|
@ -232,9 +219,7 @@ class YoutubeSource(Source):
|
|||
|
||||
A lot of black Magic happens here.
|
||||
"""
|
||||
browse_id: str = Channel(
|
||||
f"https://www.youtube.com{channel}"
|
||||
).channel_id
|
||||
browse_id: str = Channel(f"https://www.youtube.com{channel}").channel_id
|
||||
endpoint: str = f"{self.innertube_client.base_url}/browse"
|
||||
|
||||
data: dict[str, str] = {
|
||||
|
@ -248,9 +233,7 @@ class YoutubeSource(Source):
|
|||
)
|
||||
items: list[dict[str, Any]] = results["contents"][
|
||||
"twoColumnBrowseResultsRenderer"
|
||||
]["tabs"][-1]["expandableTabRenderer"]["content"][
|
||||
"sectionListRenderer"
|
||||
][
|
||||
]["tabs"][-1]["expandableTabRenderer"]["content"]["sectionListRenderer"][
|
||||
"contents"
|
||||
]
|
||||
|
||||
|
@ -259,14 +242,13 @@ class YoutubeSource(Source):
|
|||
try:
|
||||
if (
|
||||
"itemSectionRenderer" in item
|
||||
and "videoRenderer"
|
||||
in item["itemSectionRenderer"]["contents"][0]
|
||||
and "videoRenderer" in item["itemSectionRenderer"]["contents"][0]
|
||||
):
|
||||
yt_url: str = (
|
||||
"https://youtube.com/watch?v="
|
||||
+ item["itemSectionRenderer"]["contents"][0][
|
||||
"videoRenderer"
|
||||
]["videoId"]
|
||||
+ item["itemSectionRenderer"]["contents"][0]["videoRenderer"][
|
||||
"videoId"
|
||||
]
|
||||
)
|
||||
author: str = item["itemSectionRenderer"]["contents"][0][
|
||||
"videoRenderer"
|
||||
|
@ -283,9 +265,7 @@ class YoutubeSource(Source):
|
|||
pass
|
||||
return list_of_videos
|
||||
|
||||
async def _buffer_with_yt_dlp(
|
||||
self, entry: Entry
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
async def _buffer_with_yt_dlp(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Download the video using yt-dlp.
|
||||
|
||||
|
@ -329,16 +309,13 @@ class YoutubeSource(Source):
|
|||
|
||||
video_streams: StreamQuery = streams.filter(
|
||||
type="video",
|
||||
custom_filter_functions=[
|
||||
lambda s: int(s.resolution[:-1]) <= self.max_res
|
||||
],
|
||||
custom_filter_functions=[lambda s: int(s.resolution[:-1]) <= self.max_res],
|
||||
)
|
||||
audio_streams: StreamQuery = streams.filter(only_audio=True)
|
||||
|
||||
best_video_stream: Stream = sorted(
|
||||
video_streams,
|
||||
key=lambda s: int(s.resolution[:-1])
|
||||
+ (1 if s.is_progressive else 0),
|
||||
key=lambda s: int(s.resolution[:-1]) + (1 if s.is_progressive else 0),
|
||||
)[-1]
|
||||
best_audio_stream: Stream = sorted(
|
||||
audio_streams, key=lambda s: int(s.abr[:-4])
|
||||
|
|
|
@ -27,21 +27,15 @@ async def handle_state(data: dict[str, Any]) -> None:
|
|||
print("New Queue")
|
||||
for raw_item in data["queue"]:
|
||||
item = Entry(**raw_item)
|
||||
print(
|
||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
||||
)
|
||||
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||
print("Waiting Room")
|
||||
for raw_item in data["shadow_queue"]:
|
||||
item = Entry(**raw_item)
|
||||
print(
|
||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
||||
)
|
||||
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||
print("Recent")
|
||||
for raw_item in data["recent"]:
|
||||
item = Entry(**raw_item)
|
||||
print(
|
||||
f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})"
|
||||
)
|
||||
print(f"\t{item.performer}: {item.artist} - {item.title} ({item.duration})")
|
||||
|
||||
|
||||
@sio.on("msg")
|
||||
|
|
Loading…
Add table
Reference in a new issue