Documented everything. pylint does not complain, mypy is happy, tweaked a bit on possible race conditions, and streamlined the code a bit
This commit is contained in:
parent
db87fafb6c
commit
e32457a394
24 changed files with 1381 additions and 394 deletions
|
@ -2,12 +2,16 @@ Welcome to syng's documentation!
|
|||
================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:maxdepth: 3
|
||||
:caption: Contents:
|
||||
|
||||
server
|
||||
client
|
||||
queue
|
||||
entry
|
||||
result
|
||||
json
|
||||
sources
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
|
5
docs/source/json.rst
Normal file
5
docs/source/json.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
JSON
|
||||
====
|
||||
|
||||
.. automodule:: syng.json
|
||||
:members:
|
5
docs/source/queue.rst
Normal file
5
docs/source/queue.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
Queue
|
||||
=====
|
||||
|
||||
.. automodule:: syng.queue
|
||||
:members:
|
5
docs/source/result.rst
Normal file
5
docs/source/result.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
Result
|
||||
======
|
||||
|
||||
.. automodule:: syng.result
|
||||
:members:
|
5
docs/source/s3.rst
Normal file
5
docs/source/s3.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
S3
|
||||
==
|
||||
|
||||
.. automodule:: syng.sources.s3
|
||||
:members:
|
5
docs/source/source.rst
Normal file
5
docs/source/source.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
Source (Base Class)
|
||||
===================
|
||||
|
||||
.. automodule:: syng.sources.source
|
||||
:members:
|
13
docs/source/sources.rst
Normal file
13
docs/source/sources.rst
Normal file
|
@ -0,0 +1,13 @@
|
|||
Sources
|
||||
=======
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
source
|
||||
youtube
|
||||
s3
|
||||
|
||||
.. automodule:: syng.sources
|
||||
:members:
|
5
docs/source/youtube.rst
Normal file
5
docs/source/youtube.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
Youtube
|
||||
=======
|
||||
|
||||
.. automodule:: syng.sources.youtube
|
||||
:members:
|
28
socketio.pyi
28
socketio.pyi
|
@ -1,20 +1,32 @@
|
|||
from typing import Any, Optional, Awaitable, Callable, TypeVar
|
||||
from typing import Any
|
||||
from typing import Awaitable
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import TypeVar
|
||||
|
||||
Handler = TypeVar(
|
||||
"Handler",
|
||||
bound=Callable[[str, dict[str, Any]], Any] | Callable[[str], Any],
|
||||
)
|
||||
ClientHandler = TypeVar(
|
||||
"ClientHandler", bound=Callable[[dict[str, Any]], Any] | Callable[[], Any]
|
||||
)
|
||||
|
||||
Handler = TypeVar("Handler", bound=Callable[[str, dict[str, Any]], Any])
|
||||
ClientHandler = TypeVar("ClientHandler", bound=Callable[[dict[str, Any]], Any])
|
||||
|
||||
class _session_context_manager:
|
||||
async def __aenter__(self) -> dict[str, Any]: ...
|
||||
async def __aexit__(self, *args: list[Any]) -> None: ...
|
||||
|
||||
|
||||
class AsyncServer:
|
||||
def __init__(
|
||||
self, cors_allowed_origins: str, logger: bool, engineio_logger: bool
|
||||
self, cors_allowed_origins: str, logger: bool, engineio_logger: bool, json: Any
|
||||
): ...
|
||||
|
||||
async def emit(
|
||||
self,
|
||||
message: str,
|
||||
body: Optional[dict[str, Any]] = None,
|
||||
body: Any = None,
|
||||
room: Optional[str] = None,
|
||||
) -> None: ...
|
||||
def session(self, sid: str) -> _session_context_manager: ...
|
||||
|
@ -23,11 +35,11 @@ class AsyncServer:
|
|||
def leave_room(self, sid: str, room: str) -> None: ...
|
||||
def attach(self, app: Any) -> None: ...
|
||||
|
||||
|
||||
class AsyncClient:
|
||||
def __init__(self, json: Any = None): ...
|
||||
def on(self, event: str) -> Callable[[ClientHandler], ClientHandler]: ...
|
||||
async def wait(self) -> None: ...
|
||||
async def connect(self, server: str) -> None: ...
|
||||
async def disconnect(self) -> None: ...
|
||||
async def emit(
|
||||
self, message: str, data: Optional[dict[str, Any]] = None
|
||||
) -> None: ...
|
||||
async def emit(self, message: str, data: Any = None) -> None: ...
|
||||
|
|
0
syng/PIL.pyi
Normal file
0
syng/PIL.pyi
Normal file
|
@ -1,3 +1,33 @@
|
|||
"""
|
||||
Module for the playback client.
|
||||
|
||||
Excerp from the help::
|
||||
|
||||
usage: client.py [-h] [--room ROOM] [--secret SECRET] [--config-file CONFIG_FILE] server
|
||||
|
||||
positional arguments:
|
||||
server
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--room ROOM, -r ROOM
|
||||
--secret SECRET, -s SECRET
|
||||
--config-file CONFIG_FILE, -C CONFIG_FILE
|
||||
|
||||
The config file should be a json file in the following style::
|
||||
|
||||
{
|
||||
"sources": {
|
||||
"SOURCE1": { configuration for SOURCE },
|
||||
"SOURCE2": { configuration for SOURCE },
|
||||
...
|
||||
},
|
||||
},
|
||||
"config": {
|
||||
configuration for the client
|
||||
}
|
||||
}
|
||||
"""
|
||||
import asyncio
|
||||
import datetime
|
||||
import logging
|
||||
|
@ -16,12 +46,13 @@ import pyqrcode
|
|||
import socketio
|
||||
from PIL import Image
|
||||
|
||||
from . import json
|
||||
from .entry import Entry
|
||||
from .sources import configure_sources
|
||||
from .sources import Source
|
||||
|
||||
|
||||
sio: socketio.AsyncClient = socketio.AsyncClient()
|
||||
sio: socketio.AsyncClient = socketio.AsyncClient(json=json)
|
||||
logger: logging.Logger = logging.getLogger(__name__)
|
||||
sources: dict[str, Source] = {}
|
||||
|
||||
|
@ -58,6 +89,8 @@ class State:
|
|||
:type last_song: Optional[datetime.datetime]
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
current_source: Optional[Source] = None
|
||||
queue: list[Entry] = field(default_factory=list)
|
||||
recent: list[Entry] = field(default_factory=list)
|
||||
|
@ -88,20 +121,23 @@ state: State = State()
|
|||
|
||||
|
||||
@sio.on("skip-current")
|
||||
async def handle_skip_current(_: dict[str, Any] = {}) -> None:
|
||||
async def handle_skip_current(data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "skip-current" message.
|
||||
|
||||
Skips the song, that is currently played. If playback currently waits for
|
||||
buffering, the buffering is also aborted.
|
||||
|
||||
:param _: Data part of the message, ignored
|
||||
:type _: dict[str, Any]
|
||||
Since the ``queue`` could already be updated, when this evaluates, the
|
||||
first entry in the queue is send explicitly.
|
||||
|
||||
:param data: An entry, that should be equivalent to the first entry of the
|
||||
queue.
|
||||
:rtype: None
|
||||
"""
|
||||
logger.info("Skipping current")
|
||||
if state.current_source is not None:
|
||||
await state.current_source.skip_current(state.queue[0])
|
||||
await state.current_source.skip_current(Entry(**data))
|
||||
|
||||
|
||||
@sio.on("state")
|
||||
|
@ -129,7 +165,7 @@ async def handle_state(data: dict[str, Any]) -> None:
|
|||
|
||||
|
||||
@sio.on("connect")
|
||||
async def handle_connect(_: dict[str, Any] = {}) -> None:
|
||||
async def handle_connect() -> None:
|
||||
"""
|
||||
Handle the "connect" message.
|
||||
|
||||
|
@ -145,16 +181,14 @@ async def handle_connect(_: dict[str, Any] = {}) -> None:
|
|||
This message will be handled by the
|
||||
:py:func:`syng.server.handle_register_client` function of the server.
|
||||
|
||||
:param _: Data part of the message, ignored
|
||||
:type _: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
logging.info("Connected to server")
|
||||
await sio.emit(
|
||||
"register-client",
|
||||
{
|
||||
"queue": [entry.to_dict() for entry in state.queue],
|
||||
"recent": [entry.to_dict() for entry in state.recent],
|
||||
"queue": state.queue,
|
||||
"recent": state.recent,
|
||||
"room": state.room,
|
||||
"secret": state.secret,
|
||||
"config": state.get_config(),
|
||||
|
@ -167,7 +201,7 @@ async def handle_get_meta_info(data: dict[str, Any]) -> None:
|
|||
"""
|
||||
Handle a "get-meta-info" message.
|
||||
|
||||
Collects the metadata from a given :py:class:`Entry`, from its source, and
|
||||
Collects the metadata for a given :py:class:`Entry`, from its source, and
|
||||
sends them back to the server in a "meta-info" message. On the server side
|
||||
a :py:func:`syng.server.handle_meta_info` function is called.
|
||||
|
||||
|
@ -224,22 +258,33 @@ async def handle_play(data: dict[str, Any]) -> None:
|
|||
:py:attr:`State.preview_duration` is set, it shows a small preview before
|
||||
that.
|
||||
|
||||
When the playback is done, the next song is requested from the server with
|
||||
a "pop-then-get-next" message. This is handled by the
|
||||
:py:func:`syng.server.handle_pop_then_get_next` function on the server.
|
||||
|
||||
If the entry is marked as skipped, emit a "get-first" message instead,
|
||||
because the server already handled the removal of the first entry.
|
||||
|
||||
:param data: A dictionary encoding the entry
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
entry: Entry = Entry(**data)
|
||||
print(
|
||||
f"Playing: {entry.artist} - {entry.title} [{entry.album}] ({entry.source}) for {entry.performer}"
|
||||
f"Playing: {entry.artist} - {entry.title} [{entry.album}] "
|
||||
f"({entry.source}) for {entry.performer}"
|
||||
)
|
||||
try:
|
||||
state.current_source = sources[entry.source]
|
||||
if state.preview_duration > 0:
|
||||
await preview(entry)
|
||||
await sources[entry.source].play(entry)
|
||||
except Exception:
|
||||
except Exception: # pylint: disable=broad-except
|
||||
print_exc()
|
||||
state.current_source = None
|
||||
if entry.skip:
|
||||
await sio.emit("get-first")
|
||||
else:
|
||||
await sio.emit("pop-then-get-next")
|
||||
|
||||
|
||||
|
@ -254,12 +299,12 @@ async def handle_client_registered(data: dict[str, Any]) -> None:
|
|||
|
||||
Start listing all configured :py:class:`syng.sources.source.Source` to the
|
||||
server via a "sources" message. This message will be handled by the
|
||||
:py:func:`server.handle_sources` function and may request additional
|
||||
:py:func:`syng.server.handle_sources` function and may request additional
|
||||
configuration for each source.
|
||||
|
||||
If there is no song playing, start requesting the first song of the queue
|
||||
with a "get-first" message. This will be handled on the server by the
|
||||
:py:func:`server.handle_get_first` function.
|
||||
:py:func:`syng.server.handle_get_first` function.
|
||||
|
||||
:param data: A dictionary containing a `success` and a `room` entry.
|
||||
:type data: dict[str, Any]
|
||||
|
@ -366,5 +411,12 @@ async def aiomain() -> None:
|
|||
await sio.wait()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main() -> None:
|
||||
"""
|
||||
Entry point for the syng-client script.
|
||||
"""
|
||||
asyncio.run(aiomain())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,16 +1,52 @@
|
|||
"""
|
||||
Module for the entry of the queue.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from uuid import uuid4, UUID
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .sources import Source
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
@dataclass
|
||||
class Entry:
|
||||
id: str
|
||||
"""This represents a song in the queue.
|
||||
|
||||
:param ident: An identifier, that uniquely identifies the song in its source.
|
||||
:type ident: str
|
||||
:param source: The name of the source, this will be played from.
|
||||
:type source: str
|
||||
:param duration: The duration of the song in seconds.
|
||||
:type duration: int
|
||||
:param title: The title of the song.
|
||||
:type title: str
|
||||
:param artist: The name of the original artist.
|
||||
:type artist: str
|
||||
:param album: The name of the album or compilation, this particular
|
||||
version is from.
|
||||
:type album: str
|
||||
:param performer: The person, that will sing this song.
|
||||
:type performer: str
|
||||
:param failed: A flag, that indecates, that something went wrong. E.g.
|
||||
buffering was canceled, the file could not be read from disc etc.
|
||||
The exact meaning can differ from source to source. Default is false.
|
||||
:type failed: bool
|
||||
:param skip: A flag indicating, that this song is marked for skipping.
|
||||
:type skip: bool
|
||||
:param uuid: The UUID, that identifies this exact entry in the queue.
|
||||
Will be automatically assigned on creation.
|
||||
:type uuid: UUID
|
||||
:param started_at: The timestamp this entry began playing. ``None``, if it
|
||||
is yet to be played.
|
||||
:type started_at: Optional[float]
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
ident: str
|
||||
source: str
|
||||
duration: int
|
||||
title: str
|
||||
|
@ -22,27 +58,12 @@ class Entry:
|
|||
uuid: UUID = field(default_factory=uuid4)
|
||||
started_at: Optional[float] = None
|
||||
|
||||
@staticmethod
|
||||
async def from_source(performer: str, ident: str, source: Source) -> Entry:
|
||||
return await source.get_entry(performer, ident)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"uuid": str(self.uuid),
|
||||
"id": self.id,
|
||||
"source": self.source,
|
||||
"duration": self.duration,
|
||||
"title": self.title,
|
||||
"artist": self.artist,
|
||||
"album": self.album,
|
||||
"performer": self.performer,
|
||||
"skip": self.skip,
|
||||
"started_at": self.started_at,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(entry_dict: dict[str, Any]) -> Entry:
|
||||
return Entry(**entry_dict)
|
||||
|
||||
def update(self, **kwargs: Any) -> None:
|
||||
"""
|
||||
Update the attributes with given substitutions.
|
||||
|
||||
:param \\*\\*kwargs: Keywords taken from the list of attributes.
|
||||
:type \\*\\*kwargs: Any
|
||||
:rtype: None
|
||||
"""
|
||||
self.__dict__.update(kwargs)
|
||||
|
|
44
syng/json.py
Normal file
44
syng/json.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
"""
|
||||
Wraps the ``json`` module, so that own classes get encoded.
|
||||
"""
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from .entry import Entry
|
||||
from .queue import Queue
|
||||
from .result import Result
|
||||
|
||||
|
||||
class SyngEncoder(json.JSONEncoder):
|
||||
"""
|
||||
Encoder of :py:class:`Entry`, :py:class`Queue`, :py:class`Result` and UUID.
|
||||
|
||||
Entry and Result are ``dataclasses``, so they are mapped to their
|
||||
dictionary representation.
|
||||
|
||||
UUID is repersented by its string, and Queue will be represented by a list.
|
||||
"""
|
||||
|
||||
def default(self, o: Any) -> Any:
|
||||
"""Implement the encoding."""
|
||||
if isinstance(o, Entry):
|
||||
return asdict(o)
|
||||
if isinstance(o, UUID):
|
||||
return str(o)
|
||||
if isinstance(o, Result):
|
||||
return asdict(o)
|
||||
if isinstance(o, Queue):
|
||||
return o.to_list()
|
||||
return json.JSONEncoder.default(self, o)
|
||||
|
||||
|
||||
def dumps(obj: Any, **kw: Any) -> str:
|
||||
"""Wrap around ``json.dump`` with the :py:class:`SyngEncoder`."""
|
||||
return json.dumps(obj, cls=SyngEncoder, **kw)
|
||||
|
||||
|
||||
def loads(string: str, **kw: Any) -> Any:
|
||||
"""Forward everything to ``json.loads``"""
|
||||
return json.loads(string, **kw)
|
159
syng/queue.py
Normal file
159
syng/queue.py
Normal file
|
@ -0,0 +1,159 @@
|
|||
"""
|
||||
A async queue with synchronization.
|
||||
"""
|
||||
import asyncio
|
||||
from collections import deque
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from .entry import Entry
|
||||
|
||||
|
||||
class Queue:
|
||||
"""A async queue with synchronization.
|
||||
|
||||
This queue keeps track of the amount of entries by using a semaphore.
|
||||
|
||||
:param initial_entries: Initial list of entries to add to the queue
|
||||
:type initial_entries: list[Entry]
|
||||
"""
|
||||
|
||||
def __init__(self, initial_entries: list[Entry]):
|
||||
"""
|
||||
Construct the queue. And initialize the internal lock and semaphore.
|
||||
|
||||
:param initial_entries: Initial list of entries to add to the queue
|
||||
:type initial_entries: list[Entry]
|
||||
"""
|
||||
self._queue = deque(initial_entries)
|
||||
|
||||
self.num_of_entries_sem = asyncio.Semaphore(len(self._queue))
|
||||
self.readlock = asyncio.Lock()
|
||||
|
||||
def append(self, entry: Entry) -> None:
|
||||
"""
|
||||
Append an entry to the queue, increase the semaphore.
|
||||
|
||||
:param entry: The entry to add
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
self._queue.append(entry)
|
||||
self.num_of_entries_sem.release()
|
||||
|
||||
def try_peek(self) -> Optional[Entry]:
|
||||
"""Return the first entry in the queue, if it exists."""
|
||||
if len(self._queue) > 0:
|
||||
return self._queue[0]
|
||||
return None
|
||||
|
||||
async def peek(self) -> Entry:
|
||||
"""
|
||||
Return the first entry in the queue.
|
||||
|
||||
If the queue is empty, wait until the queue has at least one entry.
|
||||
|
||||
:returns: First entry of the queue
|
||||
:rtype: Entry
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
item = self._queue[0]
|
||||
self.num_of_entries_sem.release()
|
||||
return item
|
||||
|
||||
async def popleft(self) -> Entry:
|
||||
"""
|
||||
Remove the first entry in the queue and return it.
|
||||
|
||||
Decreases the semaphore. If the queue is empty, wait until the queue
|
||||
has at least one entry.
|
||||
|
||||
:returns: First entry of the queue
|
||||
:rtype: Entry
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
item = self._queue.popleft()
|
||||
return item
|
||||
|
||||
def to_list(self) -> list[Entry]:
|
||||
"""
|
||||
Return all entries in a list.
|
||||
|
||||
This is done, so that the entries can be converted to a JSON object,
|
||||
when sending it to the web or playback client.
|
||||
|
||||
:returns: A list with all the entries.
|
||||
:rtype: list[Entry]
|
||||
"""
|
||||
return list(self._queue) # [item for item in self._queue]
|
||||
|
||||
def update(self, uuid: UUID | str, updater: Callable[[Entry], None]) -> None:
|
||||
"""
|
||||
Update entries in the queue, identified by their uuid.
|
||||
|
||||
:param uuid: The uuid of the entry to update
|
||||
:type uuid: UUID | str
|
||||
:param updater: A function, that updates the entry
|
||||
:type updater: Callable[[Entry], None]
|
||||
:rtype: None
|
||||
"""
|
||||
for item in self._queue:
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
updater(item)
|
||||
|
||||
def find_by_uuid(self, uuid: UUID | str) -> Optional[Entry]:
|
||||
"""
|
||||
Find an entry by its uuid and return it.
|
||||
|
||||
:param uuid: The uuid to search for.
|
||||
:type uuid: UUID | str
|
||||
:returns: The entry with the uuid or `None` if no such entry exists
|
||||
:rtype: Optional[Entry]
|
||||
"""
|
||||
for item in self._queue:
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
return item
|
||||
return None
|
||||
|
||||
def fold(self, func: Callable[[Entry, Any], Any], start_value: Any) -> Any:
|
||||
"""Call ``func`` on each entry and accumulate the result."""
|
||||
for item in self._queue:
|
||||
start_value = func(item, start_value)
|
||||
return start_value
|
||||
|
||||
async def remove(self, entry: Entry) -> None:
|
||||
"""
|
||||
Remove an entry, if it exists. Decrease the semaphore.
|
||||
|
||||
:param entry: The entry to remove
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
self._queue.remove(entry)
|
||||
|
||||
async def move_up(self, uuid: str) -> None:
|
||||
"""
|
||||
Move an :py:class:`syng.entry.Entry` with the uuid up in the queue.
|
||||
|
||||
If it is called on the first two elements, nothing will happen.
|
||||
|
||||
:param uuid: The uuid of the entry.
|
||||
:type uuid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with self.readlock:
|
||||
uuid_idx = 0
|
||||
for idx, item in enumerate(self._queue):
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
uuid_idx = idx
|
||||
|
||||
if uuid_idx > 1:
|
||||
tmp = self._queue[uuid_idx]
|
||||
self._queue[uuid_idx] = self._queue[uuid_idx - 1]
|
||||
self._queue[uuid_idx - 1] = tmp
|
|
@ -1,28 +1,56 @@
|
|||
"""
|
||||
Module for search results
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Any
|
||||
from typing import Optional
|
||||
import os.path
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
id: str
|
||||
"""This models a search result.
|
||||
|
||||
:param ident: The identifier of the entry in the source
|
||||
:type ident: str
|
||||
:param source: The name of the source of the entry
|
||||
:type source: str
|
||||
:param title: The title of the song
|
||||
:type title: str
|
||||
:param artist: The artist of the song
|
||||
:type artist: str
|
||||
:param album: The name of the album or compilation, this particular
|
||||
version is from.
|
||||
:type album: str
|
||||
"""
|
||||
|
||||
ident: str
|
||||
source: str
|
||||
title: str
|
||||
artist: str
|
||||
album: str
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"source": self.source,
|
||||
"title": self.title,
|
||||
"artist": self.artist,
|
||||
"album": self.album,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def from_filename(filename: str, source: str) -> Optional[Result]:
|
||||
"""
|
||||
Infere most attributes from the filename.
|
||||
|
||||
The filename must be in this form::
|
||||
|
||||
{artist} - {title} - {album}.cdg
|
||||
|
||||
Although the extension (cdg) is not required
|
||||
|
||||
If parsing failes, ``None`` is returned. Otherwise a Result object with
|
||||
those attributes is created.
|
||||
|
||||
:param filename: The filename to parse
|
||||
:type filename: str
|
||||
:param source: The name of the source
|
||||
:type source: str
|
||||
:return: see above
|
||||
:rtype: Optional[Result]
|
||||
"""
|
||||
try:
|
||||
splitfile = os.path.basename(filename[:-4]).split(" - ")
|
||||
ident = filename
|
||||
|
|
594
syng/server.py
594
syng/server.py
|
@ -1,3 +1,16 @@
|
|||
"""
|
||||
Module for the Server.
|
||||
|
||||
Starts a async socketio server, and serves the web client::
|
||||
|
||||
usage: server.py [-h] [--host HOST] [--port PORT]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--host HOST, -H HOST
|
||||
--port PORT, -p PORT
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
@ -6,22 +19,22 @@ import logging
|
|||
import random
|
||||
import string
|
||||
from argparse import ArgumentParser
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
import socketio
|
||||
from aiohttp import web
|
||||
|
||||
from . import json
|
||||
from .entry import Entry
|
||||
from .queue import Queue
|
||||
from .sources import available_sources
|
||||
from .sources import Source
|
||||
|
||||
sio = socketio.AsyncServer(cors_allowed_origins="*",
|
||||
logger=True, engineio_logger=False)
|
||||
sio = socketio.AsyncServer(
|
||||
cors_allowed_origins="*", logger=True, engineio_logger=False, json=json
|
||||
)
|
||||
app = web.Application()
|
||||
sio.attach(app)
|
||||
|
||||
|
@ -47,145 +60,26 @@ logging.basicConfig(level=logging.INFO)
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Queue:
|
||||
"""A async queue with synchronization.
|
||||
|
||||
This queue keeps track of the amount of entries by using a semaphore.
|
||||
|
||||
:param initial_entries: Initial list of entries to add to the queue
|
||||
:type initial_entries: list[Entry]
|
||||
"""
|
||||
|
||||
def __init__(self, initial_entries: list[Entry]):
|
||||
"""
|
||||
Construct the queue. And initialize the internal lock and semaphore.
|
||||
|
||||
:param initial_entries: Initial list of entries to add to the queue
|
||||
:type initial_entries: list[Entry]
|
||||
"""
|
||||
self._queue = deque(initial_entries)
|
||||
|
||||
self.num_of_entries_sem = asyncio.Semaphore(len(self._queue))
|
||||
self.readlock = asyncio.Lock()
|
||||
|
||||
def append(self, entry: Entry) -> None:
|
||||
"""
|
||||
Append an entry to the queue, increase the semaphore.
|
||||
|
||||
:param entry: The entry to add
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
self._queue.append(entry)
|
||||
self.num_of_entries_sem.release()
|
||||
|
||||
async def peek(self) -> Entry:
|
||||
"""
|
||||
Return the first entry in the queue.
|
||||
|
||||
If the queue is empty, wait until the queue has at least one entry.
|
||||
|
||||
:returns: First entry of the queue
|
||||
:rtype: Entry
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
item = self._queue[0]
|
||||
self.num_of_entries_sem.release()
|
||||
return item
|
||||
|
||||
async def popleft(self) -> Entry:
|
||||
"""
|
||||
Remove the first entry in the queue and return it.
|
||||
|
||||
Decreases the semaphore. If the queue is empty, wait until the queue
|
||||
has at least one entry.
|
||||
|
||||
:returns: First entry of the queue
|
||||
:rtype: Entry
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
item = self._queue.popleft()
|
||||
return item
|
||||
|
||||
def to_dict(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Forward the to_dict request to all entries and return it in a list.
|
||||
|
||||
This is done, so that the entries can be converted to a JSON object,
|
||||
when sending it to the web or playback client.
|
||||
|
||||
:returns: A list with dictionaries, that encode the enties in the
|
||||
queue.
|
||||
:rtype: list[dict[str, Any]]
|
||||
"""
|
||||
return [item.to_dict() for item in self._queue]
|
||||
|
||||
def update(self, uuid: UUID | str, updater: Callable[[Entry], None]) -> None:
|
||||
"""
|
||||
Update entries in the queue, identified by their uuid.
|
||||
|
||||
:param uuid: The uuid of the entry to update
|
||||
:type uuid: UUID | str
|
||||
:param updater: A function, that updates the entry
|
||||
:type updater: Callable[[Entry], None]
|
||||
:rtype: None
|
||||
"""
|
||||
for item in self._queue:
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
updater(item)
|
||||
|
||||
def find_by_uuid(self, uuid: UUID | str) -> Optional[Entry]:
|
||||
"""
|
||||
Find an entry by its uuid and return it.
|
||||
|
||||
:param uuid: The uuid to search for.
|
||||
:type uuid: UUID | str
|
||||
:returns: The entry with the uuid or `None` if no such entry exists
|
||||
:rtype: Optional[Entry]
|
||||
"""
|
||||
for item in self._queue:
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
return item
|
||||
return None
|
||||
|
||||
async def remove(self, entry: Entry) -> None:
|
||||
"""
|
||||
Remove an entry, if it exists. Decrease the semaphore.
|
||||
|
||||
:param entry: The entry to remove
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
async with self.readlock:
|
||||
await self.num_of_entries_sem.acquire()
|
||||
self._queue.remove(entry)
|
||||
|
||||
async def move_up(self, uuid: str) -> None:
|
||||
"""
|
||||
Move an :py:class:`syng.entry.Entry` with the uuid up in the queue.
|
||||
|
||||
If it is called on the first two elements, nothing will happen.
|
||||
|
||||
:param uuid: The uuid of the entry.
|
||||
:type uuid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with self.readlock:
|
||||
uuid_idx = 0
|
||||
for idx, item in enumerate(self._queue):
|
||||
if item.uuid == uuid or str(item.uuid) == uuid:
|
||||
uuid_idx = idx
|
||||
|
||||
if uuid_idx > 1:
|
||||
tmp = self._queue[uuid_idx]
|
||||
self._queue[uuid_idx] = self._queue[uuid_idx - 1]
|
||||
self._queue[uuid_idx - 1] = tmp
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
"""This stores the configuration of a specific playback client.
|
||||
|
||||
In case a new playback client connects to a room, these values can be
|
||||
overwritten.
|
||||
|
||||
:param sources: A dictionary mapping the name of the used sources to their
|
||||
instances.
|
||||
:type sources: Source
|
||||
:param sources_prio: A list defining the order of the search results.
|
||||
:type sources_prio: list[str]
|
||||
:param preview_duration: The duration in seconds the playbackclients shows
|
||||
a preview for the next song. This is accounted for in the calculation
|
||||
of the ETA for songs later in the queue.
|
||||
:type preview_duration: int
|
||||
:param last_song: A timestamp, defining the end of the queue.
|
||||
:type last_song: Optional[float]
|
||||
"""
|
||||
|
||||
sources: dict[str, Source]
|
||||
sources_prio: list[str]
|
||||
preview_duration: int
|
||||
|
@ -194,7 +88,26 @@ class Config:
|
|||
|
||||
@dataclass
|
||||
class State:
|
||||
secret: str | None
|
||||
"""This defines the state of one session/room.
|
||||
|
||||
:param secret: The secret for the room. Used to log in as an admin on the
|
||||
webclient or reconnect a playbackclient
|
||||
:type secret: str
|
||||
:param queue: A queue of :py:class:`syng.entry.Entry` objects. New songs
|
||||
are appended to this, and if a playback client requests a song, it is
|
||||
taken from the top.
|
||||
:type queue: Queue
|
||||
:param recent: A list of already played songs in order.
|
||||
:type recent: list[Entry]
|
||||
:param sid: The socket.io session id of the (unique) playback client. Once
|
||||
a new playback client connects to a room (with the correct secret),
|
||||
this will be swapped with the new sid.
|
||||
:type sid: str
|
||||
:param config: The config for the client
|
||||
:type config: Config
|
||||
"""
|
||||
|
||||
secret: str
|
||||
queue: Queue
|
||||
recent: list[Entry]
|
||||
sid: str
|
||||
|
@ -205,18 +118,41 @@ clients: dict[str, State] = {}
|
|||
|
||||
|
||||
async def send_state(state: State, sid: str) -> None:
|
||||
"""
|
||||
Send the current state (queue and recent-list) to sid.
|
||||
|
||||
This sends a "state" message. This can be received either by the playback
|
||||
client, a web client or the whole room.
|
||||
|
||||
If it is send to a playback client, it will be handled by the
|
||||
:py:func:`syng.client.handle_state` function.
|
||||
|
||||
:param state: The state to send
|
||||
:type state: State
|
||||
:param sid: The recepient of the "state" message
|
||||
:type sid: str:
|
||||
:rtype: None
|
||||
"""
|
||||
await sio.emit(
|
||||
"state",
|
||||
{
|
||||
"queue": state.queue.to_dict(),
|
||||
"recent": [entry.to_dict() for entry in state.recent],
|
||||
},
|
||||
{"queue": state.queue, "recent": state.recent},
|
||||
room=sid,
|
||||
)
|
||||
|
||||
|
||||
@sio.on("get-state")
|
||||
async def handle_state(sid: str, data: dict[str, Any] = {}) -> None:
|
||||
async def handle_state(sid: str) -> None:
|
||||
"""
|
||||
Handle the "get-state" message.
|
||||
|
||||
Sends the current state to whoever requests it. This failes if the sender
|
||||
is not part of any room.
|
||||
|
||||
:param sid: The initial sender, and therefore recepient of the "state"
|
||||
message
|
||||
:type sid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
@ -226,24 +162,52 @@ async def handle_state(sid: str, data: dict[str, Any] = {}) -> None:
|
|||
|
||||
@sio.on("append")
|
||||
async def handle_append(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "append" message.
|
||||
|
||||
This should be called from a web client. Appends the entry, that is encoded
|
||||
within the data to the room the client is currently connected to. An entry
|
||||
constructed this way, will be given a UUID, to differentiate it from other
|
||||
entries for the same song.
|
||||
|
||||
If the room is configured to no longer accept songs past a certain time
|
||||
(via the :py:attr:`Config.last_song` attribute), it is checked, if the
|
||||
start time of the song would exceed this time. If this is the case, the
|
||||
request is denied and a "msg" message is send to the client, detailing
|
||||
this.
|
||||
|
||||
Otherwise the song is added to the queue. And all connected clients (web
|
||||
and playback client) are informed of the new state with a "state" message.
|
||||
|
||||
Since some properties of a song can only be accessed on the playback
|
||||
client, a "get-meta-info" message is send to the playback client. This is
|
||||
handled there with the :py:func:`syng.client.handle_get_meta_info`
|
||||
function.
|
||||
|
||||
:param sid: The session id of the client sending this request
|
||||
:type sid: str
|
||||
:param data: A dictionary encoding the entry, that should be added to the
|
||||
queue.
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
source_obj = state.config.sources[data["source"]]
|
||||
entry = await Entry.from_source(data["performer"], data["id"], source_obj)
|
||||
entry = await source_obj.get_entry(data["performer"], data["ident"])
|
||||
|
||||
first_song = state.queue._queue[0] if len(state.queue._queue) > 0 else None
|
||||
first_song = state.queue.try_peek()
|
||||
if first_song is None or first_song.started_at is None:
|
||||
start_time = datetime.datetime.now().timestamp()
|
||||
else:
|
||||
start_time = first_song.started_at
|
||||
|
||||
for item in state.queue._queue:
|
||||
start_time += item.duration + state.config.preview_duration + 1
|
||||
|
||||
print(state.config.last_song)
|
||||
print(start_time)
|
||||
start_time = state.queue.fold(
|
||||
lambda item, time: time + item.duration + state.config.preview_duration + 1,
|
||||
start_time,
|
||||
)
|
||||
|
||||
if state.config.last_song:
|
||||
if state.config.last_song < start_time:
|
||||
|
@ -262,13 +226,29 @@ async def handle_append(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
await sio.emit(
|
||||
"get-meta-info",
|
||||
entry.to_dict(),
|
||||
entry,
|
||||
room=clients[room].sid,
|
||||
)
|
||||
|
||||
|
||||
@sio.on("meta-info")
|
||||
async def handle_meta_info(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "meta-info" message.
|
||||
|
||||
Updated a :py:class:syng.entry.Entry`, that is encoded in the data
|
||||
parameter, in the queue, that belongs to the room the requesting client
|
||||
belongs to, with new meta data, that is send from the playback client.
|
||||
|
||||
Afterwards send the updated queue to all members of the room.
|
||||
|
||||
:param sid: The session id of the client sending this request.
|
||||
:type sid: str
|
||||
:param data: A dictionary encoding the entry to update (already with the
|
||||
new metadata)
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
@ -282,7 +262,24 @@ async def handle_meta_info(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
|
||||
@sio.on("get-first")
|
||||
async def handle_get_first(sid: str, data: dict[str, Any] = {}) -> None:
|
||||
async def handle_get_first(sid: str) -> None:
|
||||
"""
|
||||
Handle the "get-first" message.
|
||||
|
||||
This message is send by the playback client, once it has connected. It
|
||||
should only be send for the initial song. Each subsequent song should be
|
||||
requestet with a "pop-then-get-next" message (See
|
||||
:py:func:`handle_pop_then_get_next`).
|
||||
|
||||
If no songs are in the queue for this room, this function waits until one
|
||||
is available, then notes its starting time and sends it back to the
|
||||
playback client in a "play" message. This will be handled by the
|
||||
:py:func:`syng.client.handle_play` function.
|
||||
|
||||
:param sid: The session id of the requesting client
|
||||
:type sid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
@ -290,15 +287,35 @@ async def handle_get_first(sid: str, data: dict[str, Any] = {}) -> None:
|
|||
current = await state.queue.peek()
|
||||
current.started_at = datetime.datetime.now().timestamp()
|
||||
|
||||
await sio.emit("play", current.to_dict(), room=sid)
|
||||
await sio.emit("play", current, room=sid)
|
||||
|
||||
|
||||
@sio.on("pop-then-get-next")
|
||||
async def handle_pop_then_get_next(sid: str, data: dict[str, Any] = {}) -> None:
|
||||
async def handle_pop_then_get_next(sid: str) -> None:
|
||||
"""
|
||||
Handle the "pop-then-get-next" message.
|
||||
|
||||
This function acts similar to the :py:func:`handle_get_first` function. The
|
||||
main difference is, that prior to sending a song to the playback client,
|
||||
the first element of the queue is discarded.
|
||||
|
||||
Afterwards it follows the same steps as the handler for the "play" message,
|
||||
get the first element of the queue, annotate it with the current time,
|
||||
update everyones state and send the entry it to the playback client in a
|
||||
"play" message. This will be handled by the
|
||||
:py:func:`syng.client.handle_play` function.
|
||||
|
||||
:param sid: The session id of the requesting playback client
|
||||
:type sid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
if sid != state.sid:
|
||||
return
|
||||
|
||||
old_entry = await state.queue.popleft()
|
||||
state.recent.append(old_entry)
|
||||
|
||||
|
@ -307,26 +324,59 @@ async def handle_pop_then_get_next(sid: str, data: dict[str, Any] = {}) -> None:
|
|||
current.started_at = datetime.datetime.now().timestamp()
|
||||
await send_state(state, room)
|
||||
|
||||
await sio.emit("play", current.to_dict(), room=sid)
|
||||
|
||||
|
||||
def gen_id(length: int = 4) -> str:
|
||||
client_id = "".join([random.choice(string.ascii_letters)
|
||||
for _ in range(length)])
|
||||
if client_id in clients:
|
||||
client_id = gen_id(length + 1)
|
||||
return client_id
|
||||
await sio.emit("play", current, room=sid)
|
||||
|
||||
|
||||
@sio.on("register-client")
|
||||
async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
[TODO:description]
|
||||
Handle the "register-client" message.
|
||||
|
||||
:param sid str: [TODO:description]
|
||||
:param data dict[str, Any]: [TODO:description]
|
||||
:rtype None: [TODO:description]
|
||||
The data dictionary should have the following keys:
|
||||
- `room` (Optional), the requested room
|
||||
- `config`, an dictionary of initial configurations
|
||||
- `queue`, a list of initial entries for the queue. The entries are
|
||||
encoded as a dictionary.
|
||||
- `recent`, a list of initial entries for the recent list. The entries
|
||||
are encoded as a dictionary.
|
||||
- `secret`, the secret of the room
|
||||
|
||||
This will register a new playback client to a specific room. If there
|
||||
already exists a playback client registered for this room, this
|
||||
playback client will be replaced if and only if, the new playback
|
||||
client has the same secret.
|
||||
|
||||
If no room is provided, a fresh room id is generated.
|
||||
|
||||
If the client provides a new room, or a new room id was generated, the
|
||||
server will create a new :py:class:`State` object and associate it with
|
||||
the room id. The state will be initialized with a queue and recent
|
||||
list, an initial config as well as no sources (yet).
|
||||
|
||||
In any case, the client will be notified of the success or failure, along
|
||||
with its assigned room key via a "client-registered" message. This will be
|
||||
handled by the :py:func:`syng.client.handle_client_registered` function.
|
||||
|
||||
If it was successfully registerd, the client will be added to its assigend
|
||||
or requested room.
|
||||
|
||||
Afterwards all clients in the room will be send the current state.
|
||||
|
||||
:param sid: The session id of the requesting playback client.
|
||||
:type sid: str
|
||||
:param data: A dictionary with the keys described above
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
def gen_id(length: int = 4) -> str:
|
||||
client_id = "".join(
|
||||
[random.choice(string.ascii_letters) for _ in range(length)]
|
||||
)
|
||||
if client_id in clients:
|
||||
client_id = gen_id(length + 1)
|
||||
return client_id
|
||||
|
||||
room: str = data["room"] if "room" in data and data["room"] else gen_id()
|
||||
async with sio.session(sid) as session:
|
||||
session["room"] = room
|
||||
|
@ -372,14 +422,32 @@ async def handle_register_client(sid: str, data: dict[str, Any]) -> None:
|
|||
@sio.on("sources")
|
||||
async def handle_sources(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Get the list of sources the client wants to use.
|
||||
Update internal list of sources, remove unused
|
||||
sources and query for a config for all uninitialized sources
|
||||
Handle the "sources" message.
|
||||
|
||||
Get the list of sources the client wants to use. Update internal list of
|
||||
sources, remove unused sources and query for a config for all uninitialized
|
||||
sources by sending a "request-config" message for each such source to the
|
||||
playback client. This will be handled by the
|
||||
:py:func:`syng.client.request-config` function.
|
||||
|
||||
This will not yet add the sources to the configuration, rather gather what
|
||||
sources need to be configured and request their configuration. The list
|
||||
of sources will set the :py:attr:`Config.sources_prio` attribute.
|
||||
|
||||
:param sid: The session id of the playback client
|
||||
:type sid: str
|
||||
:param data: A dictionary containing a "sources" key, with the list of
|
||||
sources to use.
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
if sid != state.sid:
|
||||
return
|
||||
|
||||
unused_sources = state.config.sources.keys() - data["sources"]
|
||||
new_sources = data["sources"] - state.config.sources.keys()
|
||||
|
||||
|
@ -394,10 +462,29 @@ async def handle_sources(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
@sio.on("config-chunk")
|
||||
async def handle_config_chung(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "config-chunk" message.
|
||||
|
||||
This is called, when a source wants its configuration transmitted in
|
||||
chunks, rather than a single message. If the source already exist
|
||||
(e.g. when this is not the first chunk), the config will be added
|
||||
to the source, otherwise a source will be created with the given
|
||||
configuration.
|
||||
|
||||
:param sid: The session id of the playback client
|
||||
:type sid: str
|
||||
:param data: A dictionary with a "source" (str) and a
|
||||
"config" (dict[str, Any]) entry. The exact content of the config entry
|
||||
depends on the source.
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
if sid != state.sid:
|
||||
return
|
||||
|
||||
if not data["source"] in state.config.sources:
|
||||
state.config.sources[data["source"]] = available_sources[data["source"]](
|
||||
data["config"]
|
||||
|
@ -408,10 +495,28 @@ async def handle_config_chung(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
@sio.on("config")
|
||||
async def handle_config(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "config" message.
|
||||
|
||||
This is called, when a source wants its configuration transmitted in
|
||||
a single message, rather than chunks. A source will be created with the
|
||||
given configuration.
|
||||
|
||||
:param sid: The session id of the playback client
|
||||
:type sid: str
|
||||
:param data: A dictionary with a "source" (str) and a
|
||||
"config" (dict[str, Any]) entry. The exact content of the config entry
|
||||
depends on the source.
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
if sid != state.sid:
|
||||
return
|
||||
|
||||
state.config.sources[data["source"]] = available_sources[data["source"]](
|
||||
data["config"]
|
||||
)
|
||||
|
@ -419,6 +524,19 @@ async def handle_config(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
@sio.on("register-web")
|
||||
async def handle_register_web(sid: str, data: dict[str, Any]) -> bool:
|
||||
"""
|
||||
Handle a "register-web" message.
|
||||
|
||||
Adds a web client to a requested room and sends it the initial state of the
|
||||
queue and recent list.
|
||||
|
||||
:param sid: The session id of the web client.
|
||||
:type sid: str
|
||||
:param data: A dictionary, containing at least a "room" entry.
|
||||
:type data: dict[str, Any]
|
||||
:returns: True, if the room exist, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
if data["room"] in clients:
|
||||
async with sio.session(sid) as session:
|
||||
session["room"] = data["room"]
|
||||
|
@ -430,46 +548,69 @@ async def handle_register_web(sid: str, data: dict[str, Any]) -> bool:
|
|||
|
||||
|
||||
@sio.on("register-admin")
|
||||
async def handle_register_admin(sid: str, data: dict[str, str]) -> None:
|
||||
async def handle_register_admin(sid: str, data: dict[str, Any]) -> bool:
|
||||
"""
|
||||
Handle a "register-admin" message.
|
||||
|
||||
If the client provides the correct secret for its room, the connection is
|
||||
upgraded to an admin connection.
|
||||
|
||||
:param sid: The session id of the client, requesting admin.
|
||||
:type sid: str:
|
||||
:param data: A dictionary with at least a "secret" entry.
|
||||
:type data: dict[str, Any]
|
||||
:returns: True, if the secret is correct, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
||||
is_admin = data["secret"] == state.secret
|
||||
is_admin: bool = data["secret"] == state.secret
|
||||
async with sio.session(sid) as session:
|
||||
session["admin"] = is_admin
|
||||
await sio.emit("register-admin", {"success": is_admin}, room=sid)
|
||||
|
||||
|
||||
@sio.on("get-config")
|
||||
async def handle_get_config(sid: str, data: dict[str, Any]) -> None:
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
is_admin = session["admin"]
|
||||
state = clients[room]
|
||||
|
||||
if is_admin:
|
||||
await sio.emit(
|
||||
"config",
|
||||
{
|
||||
name: source.get_config()
|
||||
for name, source in state.config.sources.items()
|
||||
},
|
||||
)
|
||||
return is_admin
|
||||
|
||||
|
||||
@sio.on("skip-current")
|
||||
async def handle_skip_current(sid: str, data: dict[str, Any] = {}) -> None:
|
||||
async def handle_skip_current(sid: str) -> None:
|
||||
"""
|
||||
Handle a "skip-current" message.
|
||||
|
||||
If this comes from an admin connection, forward the "skip-current" message
|
||||
to the playback client. This will be handled by the
|
||||
:py:func:`syng.client.handle_skip_current` function.
|
||||
|
||||
:param sid: The session id of the client, requesting.
|
||||
:type sid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
is_admin = session["admin"]
|
||||
state = clients[room]
|
||||
|
||||
if is_admin:
|
||||
await sio.emit("skip-current", room=clients[room].sid)
|
||||
old_entry = await state.queue.popleft()
|
||||
state.recent.append(old_entry)
|
||||
await sio.emit("skip-current", old_entry, room=clients[room].sid)
|
||||
await send_state(state, room)
|
||||
|
||||
|
||||
@sio.on("move-up")
|
||||
async def handle_move_up(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "move-up" message.
|
||||
|
||||
If on an admin connection, moves up the entry specified in the data by one
|
||||
place in the queue.
|
||||
|
||||
:param sid: The session id of the client requesting.
|
||||
:type sid: str
|
||||
:param data: A dictionary with at least an "uuid" entry
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
is_admin = session["admin"]
|
||||
|
@ -481,6 +622,18 @@ async def handle_move_up(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
@sio.on("skip")
|
||||
async def handle_skip(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "skip" message.
|
||||
|
||||
If on an admin connection, removes the entry specified by data["uuid"]
|
||||
from the queue.
|
||||
|
||||
:param sid: The session id of the client requesting.
|
||||
:type sid: str
|
||||
:param data: A dictionary with at least an "uuid" entry.
|
||||
:type data: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
is_admin = session["admin"]
|
||||
|
@ -495,13 +648,41 @@ async def handle_skip(sid: str, data: dict[str, Any]) -> None:
|
|||
|
||||
|
||||
@sio.on("disconnect")
|
||||
async def handle_disconnect(sid: str, data: dict[str, Any] = {}) -> None:
|
||||
async def handle_disconnect(sid: str) -> None:
|
||||
"""
|
||||
Handle the "disconnect" message.
|
||||
|
||||
This message is send automatically, when a client disconnets.
|
||||
|
||||
Remove the client from its room.
|
||||
|
||||
:param sid: The session id of the client disconnecting
|
||||
:type sid: str
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
if "room" in session:
|
||||
sio.leave_room(sid, session["room"])
|
||||
|
||||
|
||||
@sio.on("search")
|
||||
async def handle_search(sid: str, data: dict[str, str]) -> None:
|
||||
async def handle_search(sid: str, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle the "search" message.
|
||||
|
||||
Forwards the dict["query"] to the :py:func:`Source.search` method, and
|
||||
execute them concurrently. The order is given by the
|
||||
:py:attr:`Config.sources_prio` attribute of the state.
|
||||
|
||||
The result will be send with a "search-results" message to the (web)
|
||||
client.
|
||||
|
||||
:param sid: The session id of the client requesting.
|
||||
:type sid: str
|
||||
:param data: A dictionary with at least a "query" entry.
|
||||
:type data: dict[str, str]
|
||||
:rtype: None
|
||||
"""
|
||||
async with sio.session(sid) as session:
|
||||
room = session["room"]
|
||||
state = clients[room]
|
||||
|
@ -513,11 +694,6 @@ async def handle_search(sid: str, data: dict[str, str]) -> None:
|
|||
for source in state.config.sources_prio
|
||||
]
|
||||
)
|
||||
# for source in state.config.sources_prio:
|
||||
# loop = asyncio.get_running_loop()
|
||||
# search_future = loop.create_future()
|
||||
# loop.create_task(state.config.sources[source].search(search_future, query))
|
||||
# result_futures.append(search_future)
|
||||
|
||||
results = [
|
||||
search_result
|
||||
|
@ -526,12 +702,20 @@ async def handle_search(sid: str, data: dict[str, str]) -> None:
|
|||
]
|
||||
await sio.emit(
|
||||
"search-results",
|
||||
{"results": [result.to_dict() for result in results]},
|
||||
{"results": results},
|
||||
room=sid,
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Configure and start the server.
|
||||
|
||||
Parse the command line arguments, register static routes to serve the web
|
||||
client and start the server.
|
||||
|
||||
:rtype: None
|
||||
"""
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--host", "-H", default="localhost")
|
||||
parser.add_argument("--port", "-p", default="8080")
|
||||
|
|
|
@ -1,11 +1,27 @@
|
|||
"""
|
||||
Imports all sources, so that they add themselves to the
|
||||
``available_sources`` dictionary.
|
||||
"""
|
||||
# pylint: disable=useless-import-alias
|
||||
|
||||
from typing import Any
|
||||
|
||||
from .source import Source as Source, available_sources as available_sources
|
||||
from .source import available_sources as available_sources
|
||||
from .source import Source as Source
|
||||
from .youtube import YoutubeSource
|
||||
from .s3 import S3Source
|
||||
|
||||
|
||||
def configure_sources(configs: dict[str, Any]) -> dict[str, Source]:
|
||||
"""
|
||||
Create a Source object for each entry in the given configs dictionary.
|
||||
|
||||
:param configs: Configurations for the sources
|
||||
:type configs: dict[str, Any]
|
||||
:return: A dictionary, mapping the name of the source to the
|
||||
source object
|
||||
:rtype: dict[str, Source]
|
||||
"""
|
||||
configured_sources = {}
|
||||
for source, config in configs.items():
|
||||
if source in available_sources:
|
||||
|
|
|
@ -1,20 +1,41 @@
|
|||
# from json import load, dump
|
||||
from itertools import zip_longest
|
||||
"""
|
||||
Construct the S3 source.
|
||||
|
||||
Adds it to the ``available_sources`` with the name ``s3``
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
from typing import Tuple, Optional, Any
|
||||
|
||||
from minio import Minio
|
||||
from itertools import zip_longest
|
||||
from json import load
|
||||
from json import dump
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
import mutagen
|
||||
from minio import Minio
|
||||
|
||||
from .source import Source, available_sources
|
||||
from ..result import Result
|
||||
from ..entry import Entry
|
||||
from ..result import Result
|
||||
from .source import available_sources
|
||||
from .source import Source
|
||||
|
||||
|
||||
class S3Source(Source):
|
||||
"""A source for playing songs from a s3 compatible storage.
|
||||
|
||||
Config options are:
|
||||
- ``endpoint``, ``access_key``, ``secret_key``, ``bucket``: These
|
||||
will simply be forwarded to the ``minio`` client.
|
||||
- ``tmp_dir``: The folder, where temporary files are stored. Default
|
||||
is ``/tmp/syng``
|
||||
- ``index_file``: If the file does not exist, saves the list of
|
||||
``cdg``-files from the s3 instance to this file. If it exists, loads
|
||||
the list of files from this file.
|
||||
"""
|
||||
|
||||
def __init__(self, config: dict[str, Any]):
|
||||
"""Create the source."""
|
||||
super().__init__(config)
|
||||
|
||||
if "endpoint" in config and "access_key" in config and "secret_key" in config:
|
||||
|
@ -28,14 +49,33 @@ class S3Source(Source):
|
|||
config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
||||
)
|
||||
|
||||
self.index: list[str] = [] if "index" not in config else config["index"]
|
||||
self.index: list[str] = []
|
||||
self.index_file: Optional[str] = (
|
||||
config["index_file"] if "index_file" in config else None
|
||||
)
|
||||
self.extra_mpv_arguments = ["--scale=oversample"]
|
||||
|
||||
async def get_entry(self, performer: str, ident: str) -> Entry:
|
||||
"""
|
||||
Create an :py:class:`syng.entry.Entry` for the identifier.
|
||||
|
||||
The identifier should be a ``cdg`` filepath on the s3 server.
|
||||
|
||||
Initially the duration for the generated entry will be set to 180
|
||||
seconds, so the server will ask the client for that missing
|
||||
metadata.
|
||||
|
||||
:param performer: The persong singing.
|
||||
:type performer: str
|
||||
:param ident: A path to a ``cdg`` file.
|
||||
:type ident: str
|
||||
:return: An entry with the data.
|
||||
:rtype: Entry
|
||||
"""
|
||||
res: Optional[Result] = Result.from_filename(ident, "s3")
|
||||
if res is not None:
|
||||
return Entry(
|
||||
id=ident,
|
||||
ident=ident,
|
||||
source="s3",
|
||||
duration=180,
|
||||
album=res.album,
|
||||
|
@ -46,8 +86,24 @@ class S3Source(Source):
|
|||
raise RuntimeError(f"Could not parse {ident}")
|
||||
|
||||
async def get_config(self) -> dict[str, Any] | list[dict[str, Any]]:
|
||||
"""
|
||||
Return the list of ``cdg`` files on the s3 instance.
|
||||
|
||||
The list is chunked in 1000 files per entry and inside the dictionary
|
||||
with key ``index``.
|
||||
|
||||
:return: see above
|
||||
:rtype: list[dict[str, Any]]
|
||||
"""
|
||||
|
||||
def _get_config() -> dict[str, Any] | list[dict[str, Any]]:
|
||||
if not self.index:
|
||||
if self.index_file is not None and os.path.isfile(self.index_file):
|
||||
with open(
|
||||
self.index_file, "r", encoding="utf8"
|
||||
) as index_file_handle:
|
||||
self.index = load(index_file_handle)
|
||||
else:
|
||||
print(f"s3: Indexing '{self.bucket}'")
|
||||
self.index = [
|
||||
obj.object_name
|
||||
|
@ -55,10 +111,13 @@ class S3Source(Source):
|
|||
if obj.object_name.endswith(".cdg")
|
||||
]
|
||||
print("s3: Indexing done")
|
||||
# with open("s3_files", "w") as f:
|
||||
# dump(self.index, f)
|
||||
# with open("s3_files", "r") as f:
|
||||
# self.index = [item for item in load(f) if item.endswith(".cdg")]
|
||||
if self.index_file is not None and not os.path.isfile(
|
||||
self.index_file
|
||||
):
|
||||
with open(
|
||||
self.index_file, "w", encoding="utf8"
|
||||
) as index_file_handle:
|
||||
dump(self.index, index_file_handle)
|
||||
|
||||
chunked = zip_longest(*[iter(self.index)] * 1000, fillvalue="")
|
||||
return [
|
||||
|
@ -68,9 +127,19 @@ class S3Source(Source):
|
|||
return await asyncio.to_thread(_get_config)
|
||||
|
||||
def add_to_config(self, config: dict[str, Any]) -> None:
|
||||
"""Add the chunk of the index list to the internal index list."""
|
||||
self.index += config["index"]
|
||||
|
||||
async def search(self, query: str) -> list[Result]:
|
||||
"""
|
||||
Search the internal index list for the query.
|
||||
|
||||
:param query: The query to search for
|
||||
:type query: str
|
||||
:return: A list of Results, that need to contain all the words from
|
||||
the ``query``
|
||||
:rtype: list[Result]
|
||||
"""
|
||||
filtered: list[str] = self.filter_data_by_query(query, self.index)
|
||||
results: list[Result] = []
|
||||
for filename in filtered:
|
||||
|
@ -81,13 +150,23 @@ class S3Source(Source):
|
|||
return results
|
||||
|
||||
async def get_missing_metadata(self, entry: Entry) -> dict[str, Any]:
|
||||
"""
|
||||
Return the duration for the mp3 file.
|
||||
|
||||
:param entry: The entry with the associated mp3 file
|
||||
:type entry: Entry
|
||||
:return: A dictionary containing the duration in seconds in the
|
||||
``duration`` key.
|
||||
:rtype: dict[str, Any]
|
||||
"""
|
||||
|
||||
def mutagen_wrapped(file: str) -> int:
|
||||
meta_infos = mutagen.File(file).info
|
||||
return int(meta_infos.length)
|
||||
|
||||
await self.ensure_playable(entry)
|
||||
|
||||
audio_file_name: Optional[str] = self.downloaded_files[entry.id].audio
|
||||
audio_file_name: Optional[str] = self.downloaded_files[entry.ident].audio
|
||||
|
||||
if audio_file_name is None:
|
||||
duration: int = 180
|
||||
|
@ -96,20 +175,28 @@ class S3Source(Source):
|
|||
|
||||
return {"duration": int(duration)}
|
||||
|
||||
async def doBuffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
cdg_filename: str = os.path.basename(entry.id)
|
||||
path_to_file: str = os.path.dirname(entry.id)
|
||||
async def do_buffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Download the ``cdg`` and the ``mp3`` file from the s3.
|
||||
|
||||
:param entry: The entry to download
|
||||
:type entry: Entry
|
||||
:return: A tuple with the location of the ``cdg`` and the ``mp3`` file.
|
||||
:rtype: Tuple[str, Optional[str]]
|
||||
"""
|
||||
cdg_filename: str = os.path.basename(entry.ident)
|
||||
path_to_file: str = os.path.dirname(entry.ident)
|
||||
|
||||
cdg_path: str = os.path.join(path_to_file, cdg_filename)
|
||||
target_file_cdg: str = os.path.join(self.tmp_dir, cdg_path)
|
||||
|
||||
ident_mp3: str = entry.id[:-3] + "mp3"
|
||||
ident_mp3: str = entry.ident[:-3] + "mp3"
|
||||
target_file_mp3: str = target_file_cdg[:-3] + "mp3"
|
||||
os.makedirs(os.path.dirname(target_file_cdg), exist_ok=True)
|
||||
|
||||
video_task: asyncio.Task[Any] = asyncio.create_task(
|
||||
asyncio.to_thread(
|
||||
self.minio.fget_object, self.bucket, entry.id, target_file_cdg
|
||||
self.minio.fget_object, self.bucket, entry.ident, target_file_cdg
|
||||
)
|
||||
)
|
||||
audio_task: asyncio.Task[Any] = asyncio.create_task(
|
||||
|
|
|
@ -1,12 +1,23 @@
|
|||
"""
|
||||
Abstract class for sources.
|
||||
|
||||
Also defines the dictionary of available sources. Each source should add itself
|
||||
to this dictionary in its module.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import shlex
|
||||
|
||||
import asyncio
|
||||
from typing import Tuple, Optional, Type, Any
|
||||
import os.path
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
import os.path
|
||||
import shlex
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from traceback import print_exc
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from ..entry import Entry
|
||||
from ..result import Result
|
||||
|
@ -16,28 +27,105 @@ logger: logging.Logger = logging.getLogger(__name__)
|
|||
|
||||
@dataclass
|
||||
class DLFilesEntry:
|
||||
"""This represents a song in the context of a source.
|
||||
|
||||
:param ready: This event triggers as soon, as all files for the song are
|
||||
downloaded/buffered.
|
||||
:type ready: asyncio.Event
|
||||
:param video: The location of the video part of the song.
|
||||
:type video: str
|
||||
:param audio: The location of the audio part of the song, if it is not
|
||||
incuded in the video file. (Default is ``None``)
|
||||
:type audio: Optional[str]
|
||||
:param buffering: True if parts are buffering, False otherwise (Default is
|
||||
``False``)
|
||||
:type buffering: bool
|
||||
:param complete: True if download was completed, False otherwise (Default
|
||||
is ``False``)
|
||||
:type complete: bool
|
||||
:param failed: True if the buffering failed, False otherwise (Default is
|
||||
``False``)
|
||||
:type failed: bool
|
||||
:param skip: True if the next Entry for this file should be skipped
|
||||
(Default is ``False``)
|
||||
:param buffer_task: Reference to the task, that downloads the files.
|
||||
:type buffer_task: Optional[asyncio.Task[Tuple[str, Optional[str]]]]
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
ready: asyncio.Event = field(default_factory=asyncio.Event)
|
||||
video: str = ""
|
||||
audio: Optional[str] = None
|
||||
buffering: bool = False
|
||||
complete: bool = False
|
||||
failed: bool = False
|
||||
skip: bool = False
|
||||
buffer_task: Optional[asyncio.Task[Tuple[str, Optional[str]]]] = None
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, config: dict[str, Any]):
|
||||
"""Parentclass for all sources.
|
||||
|
||||
A new source should subclass this, and at least implement
|
||||
:py:func:`Source.get_entry`, :py:func:`Source.search` and
|
||||
:py:func:`Source.do_buffer`. The sources will be shared between the server
|
||||
and the playback client.
|
||||
|
||||
Source specific tasks will be forwarded to the respective source, like:
|
||||
- Playing the audio/video
|
||||
- Buffering the audio/video
|
||||
- Searching for a query
|
||||
- Getting an entry from an identifier
|
||||
- Handling the skipping of currently played song
|
||||
|
||||
Each source has a reference to all files, that are currently queued to
|
||||
download via the :py:attr:`Source.downloaded_files` attribute and a
|
||||
reference to a ``mpv`` process playing songs for that specific source
|
||||
|
||||
:attributes: - ``downloaded_files``, a dictionary mapping
|
||||
:py:attr:`Entry.ident` to :py:class:`DLFilesEntry`.
|
||||
- ``player``, the reference to the ``mpv`` process, if it has
|
||||
started
|
||||
- ``extra_mpv_arguments``, list of arguments added to the mpv
|
||||
instance, can be overwritten by a subclass
|
||||
"""
|
||||
|
||||
def __init__(self, _: dict[str, Any]):
|
||||
"""
|
||||
Create and initialize a new source.
|
||||
|
||||
You should never try to instantiate the Source class directly, rather
|
||||
you should instantiate a subclass.
|
||||
|
||||
:param _: Specific configuration for a Soure, ignored in the base
|
||||
class
|
||||
:type _: dict[str, Any]
|
||||
"""
|
||||
self.downloaded_files: defaultdict[str, DLFilesEntry] = defaultdict(
|
||||
DLFilesEntry
|
||||
)
|
||||
self.masterlock: asyncio.Lock = asyncio.Lock()
|
||||
self._masterlock: asyncio.Lock = asyncio.Lock()
|
||||
self.player: Optional[asyncio.subprocess.Process] = None
|
||||
self.extra_mpv_arguments: list[str] = []
|
||||
self._skip_next = False
|
||||
|
||||
@staticmethod
|
||||
async def play_mpv(
|
||||
video: str, audio: str | None, /, *options: str
|
||||
video: str, audio: Optional[str], /, *options: str
|
||||
) -> asyncio.subprocess.Process:
|
||||
"""
|
||||
Create a mpv process to play a song in full screen.
|
||||
|
||||
:param video: Location of the video part.
|
||||
:type video: str
|
||||
:param audio: Location of the audio part, if it exists.
|
||||
:type audio: Optional[str]
|
||||
:param options: Extra arguments forwarded to the mpv player
|
||||
:type options: str
|
||||
:returns: An async reference to the process
|
||||
:rtype: asyncio.subprocess.Process
|
||||
"""
|
||||
args = ["--fullscreen", *options, video] + (
|
||||
[f"--audio-file={audio}"] if audio else []
|
||||
)
|
||||
|
@ -50,75 +138,186 @@ class Source:
|
|||
return await mpv_process
|
||||
|
||||
async def get_entry(self, performer: str, ident: str) -> Entry:
|
||||
"""
|
||||
Create an :py:class:`syng.entry.Entry` from a given identifier.
|
||||
|
||||
Abstract, needs to be implemented by subclass.
|
||||
|
||||
:param performer: The performer of the song
|
||||
:type performer: str
|
||||
:param ident: Unique identifier of the song.
|
||||
:type ident: str
|
||||
:returns: New entry for the identifier.
|
||||
:rtype: Entry
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def search(self, query: str) -> list[Result]:
|
||||
"""
|
||||
Search the songs from the source for a query.
|
||||
|
||||
Abstract, needs to be implemented by subclass.
|
||||
|
||||
:param query: The query to search for
|
||||
:type query: str
|
||||
:returns: A list of Results containing the query.
|
||||
:rtype: list[Result]
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def doBuffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
async def do_buffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Source specific part of buffering.
|
||||
|
||||
This should asynchronous download all required files to play the entry,
|
||||
and return the location of the video and audio file. If the audio is
|
||||
included in the video file, the location for the audio file should be
|
||||
`None`.
|
||||
|
||||
Abstract, needs to be implemented by subclass.
|
||||
|
||||
:param entry: The entry to buffer
|
||||
:type entry: Entry
|
||||
:returns: A Tuple of the locations for the video and the audio file.
|
||||
:rtype: Tuple[str, Optional[str]]
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def buffer(self, entry: Entry) -> None:
|
||||
async with self.masterlock:
|
||||
if self.downloaded_files[entry.id].buffering:
|
||||
"""
|
||||
Buffer all necessary files for the entry.
|
||||
|
||||
This calls the specific :py:func:`Source.do_buffer` method. It
|
||||
ensures, that the correct events will be triggered, when the buffer
|
||||
function ends. Also ensures, that no entry will be buffered multiple
|
||||
times.
|
||||
|
||||
If this is called multiple times for the same song (even if they come
|
||||
from different entries) This will immediately return.
|
||||
|
||||
:param entry: The entry to buffer
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
async with self._masterlock:
|
||||
if self.downloaded_files[entry.ident].buffering:
|
||||
return
|
||||
self.downloaded_files[entry.id].buffering = True
|
||||
self.downloaded_files[entry.ident].buffering = True
|
||||
|
||||
try:
|
||||
buffer_task = asyncio.create_task(self.doBuffer(entry))
|
||||
self.downloaded_files[entry.id].buffer_task = buffer_task
|
||||
buffer_task = asyncio.create_task(self.do_buffer(entry))
|
||||
self.downloaded_files[entry.ident].buffer_task = buffer_task
|
||||
video, audio = await buffer_task
|
||||
|
||||
self.downloaded_files[entry.id].video = video
|
||||
self.downloaded_files[entry.id].audio = audio
|
||||
self.downloaded_files[entry.id].complete = True
|
||||
except Exception:
|
||||
self.downloaded_files[entry.ident].video = video
|
||||
self.downloaded_files[entry.ident].audio = audio
|
||||
self.downloaded_files[entry.ident].complete = True
|
||||
except Exception: # pylint: disable=broad-except
|
||||
print_exc()
|
||||
logger.error("Buffering failed for %s", entry)
|
||||
self.downloaded_files[entry.id].failed = True
|
||||
self.downloaded_files[entry.ident].failed = True
|
||||
|
||||
self.downloaded_files[entry.id].ready.set()
|
||||
self.downloaded_files[entry.ident].ready.set()
|
||||
|
||||
async def play(self, entry: Entry) -> None:
|
||||
"""
|
||||
Play the entry.
|
||||
|
||||
This waits until buffering is complete and starts
|
||||
playing the entry.
|
||||
|
||||
:param entry: The entry to play
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
await self.ensure_playable(entry)
|
||||
|
||||
if self.downloaded_files[entry.id].failed:
|
||||
del self.downloaded_files[entry.id]
|
||||
if self.downloaded_files[entry.ident].failed:
|
||||
del self.downloaded_files[entry.ident]
|
||||
return
|
||||
|
||||
if entry.skip:
|
||||
del self.downloaded_files[entry.id]
|
||||
async with self._masterlock:
|
||||
if self._skip_next:
|
||||
self._skip_next = False
|
||||
entry.skip = True
|
||||
return
|
||||
|
||||
self.player = await self.play_mpv(
|
||||
self.downloaded_files[entry.id].video,
|
||||
self.downloaded_files[entry.id].audio,
|
||||
self.downloaded_files[entry.ident].video,
|
||||
self.downloaded_files[entry.ident].audio,
|
||||
*self.extra_mpv_arguments,
|
||||
)
|
||||
await self.player.wait()
|
||||
self.player = None
|
||||
if self._skip_next:
|
||||
self._skip_next = False
|
||||
entry.skip = True
|
||||
|
||||
async def skip_current(self, entry: Entry) -> None:
|
||||
entry.skip = True
|
||||
self.downloaded_files[entry.id].buffering = False
|
||||
buffer_task = self.downloaded_files[entry.id].buffer_task
|
||||
"""
|
||||
Skips first song in the queue.
|
||||
|
||||
If it is played, the player is killed, if it is still buffered, the
|
||||
buffering is aborted. Then a flag is set to keep the player from
|
||||
playing it.
|
||||
|
||||
:param entry: A reference to the first entry of the queue
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
async with self._masterlock:
|
||||
self._skip_next = True
|
||||
self.downloaded_files[entry.ident].buffering = False
|
||||
buffer_task = self.downloaded_files[entry.ident].buffer_task
|
||||
if buffer_task is not None:
|
||||
buffer_task.cancel()
|
||||
self.downloaded_files[entry.id].ready.set()
|
||||
self.downloaded_files[entry.ident].ready.set()
|
||||
|
||||
if (
|
||||
self.player is not None
|
||||
): # A race condition can occur here. In that case, just press the skip button again
|
||||
if self.player is not None:
|
||||
self.player.kill()
|
||||
|
||||
async def ensure_playable(self, entry: Entry) -> None:
|
||||
await self.buffer(entry)
|
||||
await self.downloaded_files[entry.id].ready.wait()
|
||||
"""
|
||||
Guaranties that the given entry can be played.
|
||||
|
||||
async def get_missing_metadata(self, entry: Entry) -> dict[str, Any]:
|
||||
First start buffering, then wait for the buffering to end.
|
||||
|
||||
:param entry: The entry to ensure playback for.
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
await self.buffer(entry)
|
||||
await self.downloaded_files[entry.ident].ready.wait()
|
||||
|
||||
async def get_missing_metadata(self, _entry: Entry) -> dict[str, Any]:
|
||||
"""
|
||||
Read and report missing metadata.
|
||||
|
||||
If the source sended a list of filenames to the server, the server can
|
||||
search these filenames, but has no way to read e.g. the duration. This
|
||||
method will be called to return the missing metadata.
|
||||
|
||||
By default this just returns an empty dict.
|
||||
|
||||
:param _entry: The entry to get the metadata for
|
||||
:type _entry: Entry
|
||||
:returns: A dictionary with the missing metadata.
|
||||
:rtype dict[str, Any]
|
||||
"""
|
||||
return {}
|
||||
|
||||
def filter_data_by_query(self, query: str, data: list[str]) -> list[str]:
|
||||
"""
|
||||
Filters the ``data``-list by the ``query``.
|
||||
|
||||
:param query: The query to filter
|
||||
:type query: str
|
||||
:param data: The list to filter
|
||||
:type data: list[str]
|
||||
:return: All entries in the list containing the query.
|
||||
:rtype: list[str]
|
||||
"""
|
||||
|
||||
def contains_all_words(words: list[str], element: str) -> bool:
|
||||
for word in words:
|
||||
if not word.lower() in os.path.basename(element).lower():
|
||||
|
@ -129,10 +328,31 @@ class Source:
|
|||
return [element for element in data if contains_all_words(splitquery, element)]
|
||||
|
||||
async def get_config(self) -> dict[str, Any] | list[dict[str, Any]]:
|
||||
"""
|
||||
Return the part of the config, that should be send to the server.
|
||||
|
||||
Can be either a dictionary or a list of dictionaries. If it is a
|
||||
dictionary, a single message will be send. If it is a list, one message
|
||||
will be send for each entry in the list.
|
||||
|
||||
Abstract, needs to be implemented by subclass.
|
||||
|
||||
:return: The part of the config, that should be sended to the server.
|
||||
:rtype: dict[str, Any] | list[dict[str, Any]]
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def add_to_config(self, config: dict[str, Any]) -> None:
|
||||
pass
|
||||
"""
|
||||
Add the config to the own config.
|
||||
|
||||
This is called on the server, if :py:func:`Source.get_config` returns a
|
||||
list.
|
||||
|
||||
:param config: The part of the config to add.
|
||||
:type config: dict[str, Any]
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
|
||||
available_sources: dict[str, Type[Source]] = {}
|
||||
|
|
|
@ -1,20 +1,51 @@
|
|||
"""
|
||||
Construct the YouTube source.
|
||||
|
||||
Adds it to the ``available_sources`` with the name ``youtube``.
|
||||
"""
|
||||
import asyncio
|
||||
import shlex
|
||||
from functools import partial
|
||||
from typing import Optional, Tuple, Any
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from pytube import YouTube, Search, Channel, innertube, Stream, StreamQuery
|
||||
from pytube import Channel
|
||||
from pytube import innertube
|
||||
from pytube import Search
|
||||
from pytube import Stream
|
||||
from pytube import StreamQuery
|
||||
from pytube import YouTube
|
||||
|
||||
from .source import Source, available_sources
|
||||
from ..entry import Entry
|
||||
from ..result import Result
|
||||
from .source import available_sources
|
||||
from .source import Source
|
||||
|
||||
|
||||
class YoutubeSource(Source):
|
||||
"""A source for playing karaoke files from YouTube.
|
||||
|
||||
Config options are:
|
||||
- ``channels``: A list of all channel this source should search in.
|
||||
Examples are ``/c/CCKaraoke`` or
|
||||
``/channel/UCwTRjvjVge51X-ILJ4i22ew``
|
||||
- ``tmp_dir``: The folder, where temporary files are stored. Default
|
||||
is ``/tmp/syng``
|
||||
- ``max_res``: The highest video resolution, that should be
|
||||
downloaded/streamed. Default is 720.
|
||||
- ``start_streaming``: If set to ``True``, the client starts streaming
|
||||
the video, if buffering was not completed. Needs ``youtube-dl`` or
|
||||
``yt-dlp``.
|
||||
"""
|
||||
|
||||
def __init__(self, config: dict[str, Any]):
|
||||
"""Create the source."""
|
||||
super().__init__(config)
|
||||
self.innertube_client: innertube.InnerTube = innertube.InnerTube(client="WEB")
|
||||
self.channels: list[str] = config["channels"] if "channels" in config else []
|
||||
self.innertube_client: innertube.InnerTube = innertube.InnerTube(
|
||||
client="WEB")
|
||||
self.channels: list[str] = config["channels"] if "channels" in config else [
|
||||
]
|
||||
self.tmp_dir: str = config["tmp_dir"] if "tmp_dir" in config else "/tmp/syng"
|
||||
self.max_res: int = config["max_res"] if "max_res" in config else 720
|
||||
self.start_streaming: bool = (
|
||||
|
@ -22,16 +53,36 @@ class YoutubeSource(Source):
|
|||
)
|
||||
|
||||
async def get_config(self) -> dict[str, Any] | list[dict[str, Any]]:
|
||||
"""
|
||||
Return the list of channels in a dictionary with key ``channels``.
|
||||
|
||||
:return: see above
|
||||
:rtype: dict[str, Any]]
|
||||
"""
|
||||
return {"channels": self.channels}
|
||||
|
||||
async def play(self, entry: Entry) -> None:
|
||||
if self.start_streaming and not self.downloaded_files[entry.id].complete:
|
||||
"""
|
||||
Play the given entry.
|
||||
|
||||
If ``start_streaming`` is set and buffering is not yet done, starts
|
||||
immediatly and forwards the url to ``mpv``.
|
||||
|
||||
Otherwise wait for buffering and start playing.
|
||||
|
||||
:param entry: The entry to play.
|
||||
:type entry: Entry
|
||||
:rtype: None
|
||||
"""
|
||||
if self.start_streaming and not self.downloaded_files[entry.ident].complete:
|
||||
print("streaming")
|
||||
self.player = await self.play_mpv(
|
||||
entry.id,
|
||||
entry.ident,
|
||||
None,
|
||||
"--script-opts=ytdl_hook-ytdl_path=yt-dlp,ytdl_hook-exclude='%.pls$'",
|
||||
f"--ytdl-format=bestvideo[height<={self.max_res}]+bestaudio/best[height<={self.max_res}]",
|
||||
"--script-opts=ytdl_hook-ytdl_path=yt-dlp,"
|
||||
"ytdl_hook-exclude='%.pls$'",
|
||||
f"--ytdl-format=bestvideo[height<={self.max_res}]"
|
||||
f"+bestaudio/best[height<={self.max_res}]",
|
||||
"--fullscreen",
|
||||
)
|
||||
await self.player.wait()
|
||||
|
@ -39,21 +90,54 @@ class YoutubeSource(Source):
|
|||
await super().play(entry)
|
||||
|
||||
async def get_entry(self, performer: str, ident: str) -> Entry:
|
||||
"""
|
||||
Create an :py:class:`syng.entry.Entry` for the identifier.
|
||||
|
||||
The identifier should be a youtube url. An entry is created with
|
||||
all available metadata for the video.
|
||||
|
||||
:param performer: The persong singing.
|
||||
:type performer: str
|
||||
:param ident: A url to a YouTube video.
|
||||
:type ident: str
|
||||
:return: An entry with the data.
|
||||
:rtype: Entry
|
||||
"""
|
||||
|
||||
def _get_entry(performer: str, url: str) -> Entry:
|
||||
yt = YouTube(url)
|
||||
yt_song = YouTube(url)
|
||||
return Entry(
|
||||
id=url,
|
||||
ident=url,
|
||||
source="youtube",
|
||||
album="YouTube",
|
||||
duration=yt.length,
|
||||
title=yt.title,
|
||||
artist=yt.author,
|
||||
duration=yt_song.length,
|
||||
title=yt_song.title,
|
||||
artist=yt_song.author,
|
||||
performer=performer,
|
||||
)
|
||||
|
||||
return await asyncio.to_thread(_get_entry, performer, ident)
|
||||
|
||||
def _contains_index(self, query: str, result: YouTube) -> float:
|
||||
async def search(self, query: str) -> list[Result]:
|
||||
"""
|
||||
Search YouTube and the configured channels for the query.
|
||||
|
||||
The first results are the results of the configured channels. The next
|
||||
results are the results from youtube as a whole, but the term "Karaoke"
|
||||
is appended to the search query.
|
||||
|
||||
All results are sorted by how good they match to the search query,
|
||||
respecting their original source (channel or YouTube as a whole).
|
||||
|
||||
All searching is done concurrently.
|
||||
|
||||
:param query: The query to search for
|
||||
:type query: str
|
||||
:return: A list of Results.
|
||||
:rtype: list[Result]
|
||||
"""
|
||||
|
||||
def _contains_index(query: str, result: YouTube) -> float:
|
||||
compare_string: str = result.title.lower() + " " + result.author.lower()
|
||||
hits: int = 0
|
||||
queries: list[str] = shlex.split(query.lower())
|
||||
|
@ -63,7 +147,6 @@ class YoutubeSource(Source):
|
|||
|
||||
return 1 - (hits / len(queries))
|
||||
|
||||
async def search(self, query: str) -> list[Result]:
|
||||
results: list[YouTube] = []
|
||||
results_lists: list[list[YouTube]] = await asyncio.gather(
|
||||
*[
|
||||
|
@ -76,11 +159,11 @@ class YoutubeSource(Source):
|
|||
search_result for yt_result in results_lists for search_result in yt_result
|
||||
]
|
||||
|
||||
results.sort(key=partial(self._contains_index, query))
|
||||
results.sort(key=partial(_contains_index, query))
|
||||
|
||||
return [
|
||||
Result(
|
||||
id=result.watch_url,
|
||||
ident=result.watch_url,
|
||||
source="youtube",
|
||||
title=result.title,
|
||||
artist=result.author,
|
||||
|
@ -90,13 +173,24 @@ class YoutubeSource(Source):
|
|||
]
|
||||
|
||||
def _yt_search(self, query: str) -> list[YouTube]:
|
||||
"""Search youtube as a whole.
|
||||
|
||||
Adds "karaoke" to the query.
|
||||
"""
|
||||
results: Optional[list[YouTube]] = Search(f"{query} karaoke").results
|
||||
if results is not None:
|
||||
return results
|
||||
return []
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def _channel_search(self, query: str, channel: str) -> list[YouTube]:
|
||||
browse_id: str = Channel(f"https://www.youtube.com{channel}").channel_id
|
||||
"""
|
||||
Search a channel for a query.
|
||||
|
||||
A lot of black Magic happens here.
|
||||
"""
|
||||
browse_id: str = Channel(
|
||||
f"https://www.youtube.com{channel}").channel_id
|
||||
endpoint: str = f"{self.innertube_client.base_url}/browse"
|
||||
|
||||
data: dict[str, str] = {
|
||||
|
@ -133,29 +227,46 @@ class YoutubeSource(Source):
|
|||
title: str = item["itemSectionRenderer"]["contents"][0][
|
||||
"videoRenderer"
|
||||
]["title"]["runs"][0]["text"]
|
||||
yt: YouTube = YouTube(yt_url)
|
||||
yt.author = author
|
||||
yt.title = title
|
||||
list_of_videos.append(yt)
|
||||
yt_song: YouTube = YouTube(yt_url)
|
||||
yt_song.author = author
|
||||
yt_song.title = title
|
||||
list_of_videos.append(yt_song)
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
return list_of_videos
|
||||
|
||||
async def doBuffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
yt: YouTube = YouTube(entry.id)
|
||||
async def do_buffer(self, entry: Entry) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Download the video.
|
||||
|
||||
streams: StreamQuery = await asyncio.to_thread(lambda: yt.streams)
|
||||
Downloads the highest quality stream respecting the ``max_res``.
|
||||
For higher resolution videos (1080p and above), YouTube will give you
|
||||
the video and audio seperatly. If that is the case, both will be
|
||||
downloaded.
|
||||
|
||||
|
||||
:param entry: The entry to download.
|
||||
:type entry: Entry
|
||||
:return: The location of the video file and (if applicable) the
|
||||
location of the audio file.
|
||||
:rtype: Tuple[str, Optional[str]]
|
||||
"""
|
||||
yt_song: YouTube = YouTube(entry.ident)
|
||||
|
||||
streams: StreamQuery = await asyncio.to_thread(lambda: yt_song.streams)
|
||||
|
||||
video_streams: StreamQuery = streams.filter(
|
||||
type="video",
|
||||
custom_filter_functions=[lambda s: int(s.resolution[:-1]) <= self.max_res],
|
||||
custom_filter_functions=[lambda s: int(
|
||||
s.resolution[:-1]) <= self.max_res],
|
||||
)
|
||||
audio_streams: StreamQuery = streams.filter(only_audio=True)
|
||||
|
||||
best_video_stream: Stream = sorted(
|
||||
video_streams,
|
||||
key=lambda s: int(s.resolution[:-1]) + (1 if s.is_progressive else 0),
|
||||
key=lambda s: int(s.resolution[:-1]) +
|
||||
(1 if s.is_progressive else 0),
|
||||
)[-1]
|
||||
best_audio_stream: Stream = sorted(
|
||||
audio_streams, key=lambda s: int(s.abr[:-4])
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -5,8 +5,8 @@
|
|||
<link rel="icon" href="/favicon.ico">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Syng Rocks!</title>
|
||||
<script type="module" crossorigin src="/assets/index.c3b37c18.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index.1ff4ae2d.css">
|
||||
<script type="module" crossorigin src="/assets/index.d57b37cd.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index.527b8dfc.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
# pylint: disable=missing-function-docstring
|
||||
# pylint: disable=missing-module-docstring
|
||||
# pylint: disable=missing-class-docstring
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
|
@ -16,7 +19,7 @@ async def handle_search_results(data: dict[str, Any]) -> None:
|
|||
for raw_item in data["results"]:
|
||||
item = Result(**raw_item)
|
||||
print(f"{item.artist} - {item.title} [{item.album}]")
|
||||
print(f"{item.source}: {item.id}")
|
||||
print(f"{item.source}: {item.ident}")
|
||||
|
||||
|
||||
@sio.on("state")
|
||||
|
@ -57,7 +60,8 @@ class SyngShell(aiocmd.PromptToolkitCmd):
|
|||
{
|
||||
"performer": "Hammy",
|
||||
"source": "youtube",
|
||||
"id": "https://www.youtube.com/watch?v=rqZqHXJm-UA", # https://youtube.com/watch?v=x5bM5Bdizi4",
|
||||
# https://youtube.com/watch?v=x5bM5Bdizi4",
|
||||
"ident": "https://www.youtube.com/watch?v=rqZqHXJm-UA",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -65,7 +69,9 @@ class SyngShell(aiocmd.PromptToolkitCmd):
|
|||
await sio.emit("search", {"query": query})
|
||||
|
||||
async def do_append(self, source: str, ident: str) -> None:
|
||||
await sio.emit("append", {"performer": "Hammy", "source": source, "id": ident})
|
||||
await sio.emit(
|
||||
"append", {"performer": "Hammy", "source": source, "ident": ident}
|
||||
)
|
||||
|
||||
async def do_admin(self, data: str) -> None:
|
||||
await sio.emit("register-admin", {"secret": data})
|
||||
|
|
Loading…
Add table
Reference in a new issue