Python3 Migrate
This commit is contained in:
@@ -0,0 +1,52 @@
|
||||
import contextlib
|
||||
import re
|
||||
import warnings
|
||||
|
||||
# Messages used in deprecation warnings are collected here so we can target
|
||||
# them easily when ignoring warnings.
|
||||
_MESSAGES = {
|
||||
# Deprecated features in core playback:
|
||||
"core.playback.play:tl_track_kwargs": (
|
||||
'playback.play() with "tl_track" argument is pending deprecation use '
|
||||
'"tlid" instead'
|
||||
),
|
||||
# Deprecated features in core tracklist:
|
||||
"core.tracklist.add:tracks_arg": (
|
||||
'tracklist.add() "tracks" argument is deprecated'
|
||||
),
|
||||
"core.tracklist.eot_track": (
|
||||
"tracklist.eot_track() is pending deprecation, use "
|
||||
"tracklist.get_eot_tlid()"
|
||||
),
|
||||
"core.tracklist.next_track": (
|
||||
"tracklist.next_track() is pending deprecation, use "
|
||||
"tracklist.get_next_tlid()"
|
||||
),
|
||||
"core.tracklist.previous_track": (
|
||||
"tracklist.previous_track() is pending deprecation, use "
|
||||
"tracklist.get_previous_tlid()"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def warn(msg_id, pending=False):
|
||||
if pending:
|
||||
category = PendingDeprecationWarning
|
||||
else:
|
||||
category = DeprecationWarning
|
||||
warnings.warn(_MESSAGES.get(msg_id, msg_id), category)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ignore(ids=None):
|
||||
with warnings.catch_warnings():
|
||||
if isinstance(ids, str):
|
||||
ids = [ids]
|
||||
|
||||
if ids:
|
||||
for msg_id in ids:
|
||||
msg = re.escape(_MESSAGES.get(msg_id, msg_id))
|
||||
warnings.filterwarnings("ignore", msg, DeprecationWarning)
|
||||
else:
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
yield
|
||||
189
venv/lib/python3.7/site-packages/mopidy/internal/deps.py
Normal file
189
venv/lib/python3.7/site-packages/mopidy/internal/deps.py
Normal file
@@ -0,0 +1,189 @@
|
||||
import functools
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from mopidy.internal import formatting
|
||||
from mopidy.internal.gi import Gst, gi
|
||||
|
||||
|
||||
def format_dependency_list(adapters=None):
|
||||
if adapters is None:
|
||||
dist_names = {
|
||||
ep.dist.project_name
|
||||
for ep in pkg_resources.iter_entry_points("mopidy.ext")
|
||||
if ep.dist.project_name != "Mopidy"
|
||||
}
|
||||
dist_infos = [
|
||||
functools.partial(pkg_info, dist_name) for dist_name in dist_names
|
||||
]
|
||||
|
||||
adapters = (
|
||||
[
|
||||
executable_info,
|
||||
platform_info,
|
||||
python_info,
|
||||
functools.partial(pkg_info, "Mopidy", True),
|
||||
]
|
||||
+ dist_infos
|
||||
+ [gstreamer_info]
|
||||
)
|
||||
|
||||
return "\n".join([_format_dependency(a()) for a in adapters])
|
||||
|
||||
|
||||
def _format_dependency(dep_info):
|
||||
lines = []
|
||||
|
||||
if "version" not in dep_info:
|
||||
lines.append(f"{dep_info['name']}: not found")
|
||||
else:
|
||||
source = f" from {dep_info['path']}" if "path" in dep_info else ""
|
||||
lines.append(f"{dep_info['name']}: {dep_info['version']}{source}")
|
||||
|
||||
if "other" in dep_info:
|
||||
details = formatting.indent(dep_info["other"], places=4)
|
||||
lines.append(f" Detailed information: {details}")
|
||||
|
||||
if dep_info.get("dependencies", []):
|
||||
for sub_dep_info in dep_info["dependencies"]:
|
||||
sub_dep_lines = _format_dependency(sub_dep_info)
|
||||
lines.append(
|
||||
formatting.indent(sub_dep_lines, places=2, singles=True)
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def executable_info():
|
||||
return {
|
||||
"name": "Executable",
|
||||
"version": sys.argv[0],
|
||||
}
|
||||
|
||||
|
||||
def platform_info():
|
||||
return {
|
||||
"name": "Platform",
|
||||
"version": platform.platform(),
|
||||
}
|
||||
|
||||
|
||||
def python_info():
|
||||
return {
|
||||
"name": "Python",
|
||||
"version": (
|
||||
f"{platform.python_implementation()} {platform.python_version()}"
|
||||
),
|
||||
"path": os.path.dirname(platform.__file__),
|
||||
}
|
||||
|
||||
|
||||
def pkg_info(
|
||||
project_name=None, include_transitive_deps=True, include_extras=False
|
||||
):
|
||||
if project_name is None:
|
||||
project_name = "Mopidy"
|
||||
try:
|
||||
distribution = pkg_resources.get_distribution(project_name)
|
||||
extras = include_extras and distribution.extras or []
|
||||
if include_transitive_deps:
|
||||
dependencies = [
|
||||
pkg_info(
|
||||
d.project_name,
|
||||
include_transitive_deps=d.project_name != "Mopidy",
|
||||
)
|
||||
for d in distribution.requires(extras)
|
||||
]
|
||||
else:
|
||||
dependencies = []
|
||||
return {
|
||||
"name": project_name,
|
||||
"version": distribution.version,
|
||||
"path": distribution.location,
|
||||
"dependencies": dependencies,
|
||||
}
|
||||
except pkg_resources.ResolutionError:
|
||||
return {
|
||||
"name": project_name,
|
||||
}
|
||||
|
||||
|
||||
def gstreamer_info():
|
||||
other = []
|
||||
other.append(f"Python wrapper: python-gi {gi.__version__}")
|
||||
|
||||
found_elements = []
|
||||
missing_elements = []
|
||||
for name, status in _gstreamer_check_elements():
|
||||
if status:
|
||||
found_elements.append(name)
|
||||
else:
|
||||
missing_elements.append(name)
|
||||
|
||||
other.append("Relevant elements:")
|
||||
other.append(" Found:")
|
||||
for element in found_elements:
|
||||
other.append(f" {element}")
|
||||
if not found_elements:
|
||||
other.append(" none")
|
||||
other.append(" Not found:")
|
||||
for element in missing_elements:
|
||||
other.append(f" {element}")
|
||||
if not missing_elements:
|
||||
other.append(" none")
|
||||
|
||||
return {
|
||||
"name": "GStreamer",
|
||||
"version": ".".join(map(str, Gst.version())),
|
||||
"path": os.path.dirname(gi.__file__),
|
||||
"other": "\n".join(other),
|
||||
}
|
||||
|
||||
|
||||
def _gstreamer_check_elements():
|
||||
elements_to_check = [
|
||||
# Core playback
|
||||
"uridecodebin",
|
||||
# External HTTP streams
|
||||
"souphttpsrc",
|
||||
# Spotify
|
||||
"appsrc",
|
||||
# Audio sinks
|
||||
"alsasink",
|
||||
"osssink",
|
||||
"oss4sink",
|
||||
"pulsesink",
|
||||
# MP3 encoding and decoding
|
||||
#
|
||||
# One of flump3dec, mad, and mpg123audiodec is required for MP3
|
||||
# playback.
|
||||
"flump3dec",
|
||||
"id3demux",
|
||||
"id3v2mux",
|
||||
"lamemp3enc",
|
||||
"mad",
|
||||
"mpegaudioparse",
|
||||
"mpg123audiodec",
|
||||
# Ogg Vorbis encoding and decoding
|
||||
"vorbisdec",
|
||||
"vorbisenc",
|
||||
"vorbisparse",
|
||||
"oggdemux",
|
||||
"oggmux",
|
||||
"oggparse",
|
||||
# Flac decoding
|
||||
"flacdec",
|
||||
"flacparse",
|
||||
# Shoutcast output
|
||||
"shout2send",
|
||||
]
|
||||
known_elements = [
|
||||
factory.get_name()
|
||||
for factory in Gst.Registry.get().get_feature_list(Gst.ElementFactory)
|
||||
]
|
||||
return [
|
||||
(element, element in known_elements) for element in elements_to_check
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
import re
|
||||
import unicodedata
|
||||
|
||||
|
||||
def indent(string, places=4, linebreak="\n", singles=False):
|
||||
lines = string.split(linebreak)
|
||||
if not singles and len(lines) == 1:
|
||||
return string
|
||||
for i, line in enumerate(lines):
|
||||
lines[i] = " " * places + line
|
||||
result = linebreak.join(lines)
|
||||
if not singles:
|
||||
result = linebreak + result
|
||||
return result
|
||||
|
||||
|
||||
def slugify(value):
|
||||
"""
|
||||
Converts to lowercase, removes non-word characters (alphanumerics and
|
||||
underscores) and converts spaces to hyphens. Also strips leading and
|
||||
trailing whitespace.
|
||||
|
||||
This function is based on Django's slugify implementation.
|
||||
"""
|
||||
value = unicodedata.normalize("NFKD", value)
|
||||
value = value.encode("ascii", "ignore").decode("ascii")
|
||||
value = re.sub(r"[^\w\s-]", "", value).strip().lower()
|
||||
return re.sub(r"[-\s]+", "-", value)
|
||||
49
venv/lib/python3.7/site-packages/mopidy/internal/gi.py
Normal file
49
venv/lib/python3.7/site-packages/mopidy/internal/gi.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
try:
|
||||
import gi
|
||||
|
||||
gi.require_version("Gst", "1.0")
|
||||
from gi.repository import GLib, GObject, Gst
|
||||
except ImportError:
|
||||
print(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
ERROR: A GObject based library was not found.
|
||||
|
||||
Mopidy requires GStreamer to work. GStreamer is a C library with a
|
||||
number of dependencies itself, and cannot be installed with the regular
|
||||
Python tools like pip.
|
||||
|
||||
Please see http://docs.mopidy.com/en/latest/installation/ for
|
||||
instructions on how to install the required dependencies.
|
||||
"""
|
||||
)
|
||||
)
|
||||
raise
|
||||
else:
|
||||
Gst.init([])
|
||||
gi.require_version("GstPbutils", "1.0")
|
||||
from gi.repository import GstPbutils
|
||||
|
||||
GLib.set_prgname("mopidy")
|
||||
GLib.set_application_name("Mopidy")
|
||||
|
||||
REQUIRED_GST_VERSION = (1, 14, 0)
|
||||
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
|
||||
|
||||
if Gst.version() < REQUIRED_GST_VERSION:
|
||||
sys.exit(
|
||||
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
|
||||
f"but found {Gst.version_string()}."
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"GLib",
|
||||
"GObject",
|
||||
"Gst",
|
||||
"GstPbutils",
|
||||
"gi",
|
||||
]
|
||||
57
venv/lib/python3.7/site-packages/mopidy/internal/http.py
Normal file
57
venv/lib/python3.7/site-packages/mopidy/internal/http.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
from mopidy import httpclient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_requests_session(proxy_config, user_agent):
|
||||
proxy = httpclient.format_proxy(proxy_config)
|
||||
full_user_agent = httpclient.format_user_agent(user_agent)
|
||||
|
||||
session = requests.Session()
|
||||
session.proxies.update({"http": proxy, "https": proxy})
|
||||
session.headers.update({"user-agent": full_user_agent})
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def download(session, uri, timeout=1.0, chunk_size=4096):
|
||||
try:
|
||||
response = session.get(uri, stream=True, timeout=timeout)
|
||||
except requests.exceptions.Timeout:
|
||||
logger.warning(
|
||||
"Download of %r failed due to connection timeout after " "%.3fs",
|
||||
uri,
|
||||
timeout,
|
||||
)
|
||||
return None
|
||||
except requests.exceptions.InvalidSchema:
|
||||
logger.warning("Download of %r failed due to unsupported schema", uri)
|
||||
return None
|
||||
except requests.exceptions.RequestException as exc:
|
||||
logger.warning("Download of %r failed: %s", uri, exc)
|
||||
logger.debug("Download exception details", exc_info=True)
|
||||
return None
|
||||
|
||||
content = []
|
||||
deadline = time.time() + timeout
|
||||
for chunk in response.iter_content(chunk_size):
|
||||
content.append(chunk)
|
||||
if time.time() > deadline:
|
||||
logger.warning(
|
||||
"Download of %r failed due to download taking more "
|
||||
"than %.3fs",
|
||||
uri,
|
||||
timeout,
|
||||
)
|
||||
return None
|
||||
|
||||
if not response.ok:
|
||||
logger.warning("Problem downloading %r: %s", uri, response.reason)
|
||||
return None
|
||||
|
||||
return b"".join(content)
|
||||
387
venv/lib/python3.7/site-packages/mopidy/internal/jsonrpc.py
Normal file
387
venv/lib/python3.7/site-packages/mopidy/internal/jsonrpc.py
Normal file
@@ -0,0 +1,387 @@
|
||||
import inspect
|
||||
import json
|
||||
import traceback
|
||||
|
||||
import pykka
|
||||
|
||||
|
||||
class JsonRpcWrapper:
|
||||
|
||||
"""
|
||||
Wrap objects and make them accessible through JSON-RPC 2.0 messaging.
|
||||
|
||||
This class takes responsibility of communicating with the objects and
|
||||
processing of JSON-RPC 2.0 messages. The transport of the messages over
|
||||
HTTP, WebSocket, TCP, or whatever is of no concern to this class.
|
||||
|
||||
The wrapper supports exporting the methods of one or more objects. Either
|
||||
way, the objects must be exported with method name prefixes, called
|
||||
"mounts".
|
||||
|
||||
To expose objects, add them all to the objects mapping. The key in the
|
||||
mapping is used as the object's mounting point in the exposed API::
|
||||
|
||||
jrw = JsonRpcWrapper(objects={
|
||||
'foo': foo,
|
||||
'hello': lambda: 'Hello, world!',
|
||||
})
|
||||
|
||||
This will export the Python callables on the left as the JSON-RPC 2.0
|
||||
method names on the right::
|
||||
|
||||
foo.bar() -> foo.bar
|
||||
foo.baz() -> foo.baz
|
||||
lambda -> hello
|
||||
|
||||
Only the public methods of the mounted objects, or functions/methods
|
||||
included directly in the mapping, will be exposed.
|
||||
|
||||
If a method returns a :class:`pykka.Future`, the future will be completed
|
||||
and its value unwrapped before the JSON-RPC wrapper returns the response.
|
||||
|
||||
For further details on the JSON-RPC 2.0 spec, see
|
||||
http://www.jsonrpc.org/specification
|
||||
|
||||
:param objects: mapping between mounting points and exposed functions or
|
||||
class instances
|
||||
:type objects: dict
|
||||
:param decoders: object builders to be used by :func`json.loads`
|
||||
:type decoders: list of functions taking a dict and returning a dict
|
||||
:param encoders: object serializers to be used by :func:`json.dumps`
|
||||
:type encoders: list of :class:`json.JSONEncoder` subclasses with the
|
||||
method :meth:`default` implemented
|
||||
"""
|
||||
|
||||
def __init__(self, objects, decoders=None, encoders=None):
|
||||
if "" in objects.keys():
|
||||
raise AttributeError(
|
||||
"The empty string is not allowed as an object mount"
|
||||
)
|
||||
self.objects = objects
|
||||
self.decoder = get_combined_json_decoder(decoders or [])
|
||||
self.encoder = get_combined_json_encoder(encoders or [])
|
||||
|
||||
def handle_json(self, request):
|
||||
"""
|
||||
Handles an incoming request encoded as a JSON string.
|
||||
|
||||
Returns a response as a JSON string for commands, and :class:`None` for
|
||||
notifications.
|
||||
|
||||
:param request: the serialized JSON-RPC request
|
||||
:type request: string
|
||||
:rtype: string or :class:`None`
|
||||
"""
|
||||
try:
|
||||
request = json.loads(request, object_hook=self.decoder)
|
||||
except ValueError:
|
||||
response = JsonRpcParseError().get_response()
|
||||
else:
|
||||
response = self.handle_data(request)
|
||||
if response is None:
|
||||
return None
|
||||
return json.dumps(response, cls=self.encoder)
|
||||
|
||||
def handle_data(self, request):
|
||||
"""
|
||||
Handles an incoming request in the form of a Python data structure.
|
||||
|
||||
Returns a Python data structure for commands, or a :class:`None` for
|
||||
notifications.
|
||||
|
||||
:param request: the unserialized JSON-RPC request
|
||||
:type request: dict
|
||||
:rtype: dict, list, or :class:`None`
|
||||
"""
|
||||
if isinstance(request, list):
|
||||
return self._handle_batch(request)
|
||||
else:
|
||||
return self._handle_single_request(request)
|
||||
|
||||
def _handle_batch(self, requests):
|
||||
if not requests:
|
||||
return JsonRpcInvalidRequestError(
|
||||
data="Batch list cannot be empty"
|
||||
).get_response()
|
||||
|
||||
responses = []
|
||||
for request in requests:
|
||||
response = self._handle_single_request(request)
|
||||
if response:
|
||||
responses.append(response)
|
||||
|
||||
return responses or None
|
||||
|
||||
def _handle_single_request(self, request):
|
||||
try:
|
||||
self._validate_request(request)
|
||||
args, kwargs = self._get_params(request)
|
||||
except JsonRpcInvalidRequestError as error:
|
||||
return error.get_response()
|
||||
|
||||
try:
|
||||
method = self._get_method(request["method"])
|
||||
|
||||
try:
|
||||
result = method(*args, **kwargs)
|
||||
|
||||
if self._is_notification(request):
|
||||
return None
|
||||
|
||||
result = self._unwrap_result(result)
|
||||
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": request["id"],
|
||||
"result": result,
|
||||
}
|
||||
except TypeError as error:
|
||||
raise JsonRpcInvalidParamsError(
|
||||
data={
|
||||
"type": error.__class__.__name__,
|
||||
"message": str(error),
|
||||
"traceback": traceback.format_exc(),
|
||||
}
|
||||
)
|
||||
except Exception as error:
|
||||
raise JsonRpcApplicationError(
|
||||
data={
|
||||
"type": error.__class__.__name__,
|
||||
"message": str(error),
|
||||
"traceback": traceback.format_exc(),
|
||||
}
|
||||
)
|
||||
except JsonRpcError as error:
|
||||
if self._is_notification(request):
|
||||
return None
|
||||
return error.get_response(request["id"])
|
||||
|
||||
def _validate_request(self, request):
|
||||
if not isinstance(request, dict):
|
||||
raise JsonRpcInvalidRequestError(data="Request must be an object")
|
||||
if "jsonrpc" not in request:
|
||||
raise JsonRpcInvalidRequestError(
|
||||
data="'jsonrpc' member must be included"
|
||||
)
|
||||
if request["jsonrpc"] != "2.0":
|
||||
raise JsonRpcInvalidRequestError(
|
||||
data="'jsonrpc' value must be '2.0'"
|
||||
)
|
||||
if "method" not in request:
|
||||
raise JsonRpcInvalidRequestError(
|
||||
data="'method' member must be included"
|
||||
)
|
||||
if not isinstance(request["method"], str):
|
||||
raise JsonRpcInvalidRequestError(data="'method' must be a string")
|
||||
|
||||
def _get_params(self, request):
|
||||
if "params" not in request:
|
||||
return [], {}
|
||||
params = request["params"]
|
||||
if isinstance(params, list):
|
||||
return params, {}
|
||||
elif isinstance(params, dict):
|
||||
return [], params
|
||||
else:
|
||||
raise JsonRpcInvalidRequestError(
|
||||
data="'params', if given, must be an array or an object"
|
||||
)
|
||||
|
||||
def _get_method(self, method_path):
|
||||
if callable(self.objects.get(method_path, None)):
|
||||
# The mounted object is the callable
|
||||
return self.objects[method_path]
|
||||
|
||||
# The mounted object contains the callable
|
||||
|
||||
if "." not in method_path:
|
||||
raise JsonRpcMethodNotFoundError(
|
||||
data=f"Could not find object mount in method name {method_path!r}"
|
||||
)
|
||||
|
||||
mount, method_name = method_path.rsplit(".", 1)
|
||||
|
||||
if method_name.startswith("_"):
|
||||
raise JsonRpcMethodNotFoundError(
|
||||
data="Private methods are not exported"
|
||||
)
|
||||
|
||||
try:
|
||||
obj = self.objects[mount]
|
||||
except KeyError:
|
||||
raise JsonRpcMethodNotFoundError(
|
||||
data=f"No object found at {mount!r}"
|
||||
)
|
||||
|
||||
try:
|
||||
return getattr(obj, method_name)
|
||||
except AttributeError:
|
||||
raise JsonRpcMethodNotFoundError(
|
||||
data=f"Object mounted at {mount!r} has no member {method_name!r}"
|
||||
)
|
||||
|
||||
def _is_notification(self, request):
|
||||
return "id" not in request
|
||||
|
||||
def _unwrap_result(self, result):
|
||||
if isinstance(result, pykka.Future):
|
||||
result = result.get()
|
||||
return result
|
||||
|
||||
|
||||
class JsonRpcError(Exception):
|
||||
code = -32000
|
||||
message = "Unspecified server error"
|
||||
|
||||
def __init__(self, data=None):
|
||||
self.data = data
|
||||
|
||||
def get_response(self, request_id=None):
|
||||
response = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": request_id,
|
||||
"error": {"code": self.code, "message": self.message},
|
||||
}
|
||||
if self.data:
|
||||
response["error"]["data"] = self.data
|
||||
return response
|
||||
|
||||
|
||||
class JsonRpcParseError(JsonRpcError):
|
||||
code = -32700
|
||||
message = "Parse error"
|
||||
|
||||
|
||||
class JsonRpcInvalidRequestError(JsonRpcError):
|
||||
code = -32600
|
||||
message = "Invalid Request"
|
||||
|
||||
|
||||
class JsonRpcMethodNotFoundError(JsonRpcError):
|
||||
code = -32601
|
||||
message = "Method not found"
|
||||
|
||||
|
||||
class JsonRpcInvalidParamsError(JsonRpcError):
|
||||
code = -32602
|
||||
message = "Invalid params"
|
||||
|
||||
|
||||
class JsonRpcApplicationError(JsonRpcError):
|
||||
code = 0
|
||||
message = "Application error"
|
||||
|
||||
|
||||
def get_combined_json_decoder(decoders):
|
||||
def decode(dct):
|
||||
for decoder in decoders:
|
||||
dct = decoder(dct)
|
||||
return dct
|
||||
|
||||
return decode
|
||||
|
||||
|
||||
def get_combined_json_encoder(encoders):
|
||||
class JsonRpcEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
for encoder in encoders:
|
||||
try:
|
||||
return encoder().default(obj)
|
||||
except TypeError:
|
||||
pass # Try next encoder
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
return JsonRpcEncoder
|
||||
|
||||
|
||||
class JsonRpcInspector:
|
||||
|
||||
"""
|
||||
Inspects a group of classes and functions to create a description of what
|
||||
methods they can expose over JSON-RPC 2.0.
|
||||
|
||||
To inspect one or more classes, add them all to the objects mapping. The
|
||||
key in the mapping is used as the classes' mounting point in the exposed
|
||||
API::
|
||||
|
||||
jri = JsonRpcInspector(objects={
|
||||
'foo': Foo,
|
||||
'hello': lambda: 'Hello, world!',
|
||||
})
|
||||
|
||||
Since the inspector is based on inspecting classes and not instances, it
|
||||
will not include methods added dynamically. The wrapper works with
|
||||
instances, and it will thus export dynamically added methods as well.
|
||||
|
||||
:param objects: mapping between mounts and exposed functions or classes
|
||||
:type objects: dict
|
||||
"""
|
||||
|
||||
def __init__(self, objects):
|
||||
if "" in objects.keys():
|
||||
raise AttributeError(
|
||||
"The empty string is not allowed as an object mount"
|
||||
)
|
||||
self.objects = objects
|
||||
|
||||
def describe(self):
|
||||
"""
|
||||
Inspects the object and returns a data structure which describes the
|
||||
available properties and methods.
|
||||
"""
|
||||
methods = {}
|
||||
for mount, obj in self.objects.items():
|
||||
if inspect.isroutine(obj):
|
||||
methods[mount] = self._describe_method(obj)
|
||||
else:
|
||||
obj_methods = self._get_methods(obj)
|
||||
for name, description in obj_methods.items():
|
||||
if mount:
|
||||
name = f"{mount}.{name}"
|
||||
methods[name] = description
|
||||
return methods
|
||||
|
||||
def _get_methods(self, obj):
|
||||
methods = {}
|
||||
for name, value in inspect.getmembers(obj):
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if not inspect.isroutine(value):
|
||||
continue
|
||||
method = self._describe_method(value)
|
||||
if method:
|
||||
methods[name] = method
|
||||
return methods
|
||||
|
||||
def _describe_method(self, method):
|
||||
return {
|
||||
"description": inspect.getdoc(method),
|
||||
"params": self._describe_params(method),
|
||||
}
|
||||
|
||||
def _describe_params(self, method):
|
||||
argspec = inspect.getfullargspec(method)
|
||||
|
||||
defaults = argspec.defaults and list(argspec.defaults) or []
|
||||
num_args_without_default = len(argspec.args) - len(defaults)
|
||||
no_defaults = [None] * num_args_without_default
|
||||
defaults = no_defaults + defaults
|
||||
|
||||
params = []
|
||||
|
||||
for arg, _default in zip(argspec.args, defaults):
|
||||
if arg == "self":
|
||||
continue
|
||||
params.append({"name": arg})
|
||||
|
||||
if argspec.defaults:
|
||||
for i, default in enumerate(reversed(argspec.defaults)):
|
||||
params[len(params) - i - 1]["default"] = default
|
||||
|
||||
if argspec.varargs:
|
||||
params.append({"name": argspec.varargs, "varargs": True})
|
||||
|
||||
if argspec.varkw:
|
||||
params.append({"name": argspec.varkw, "kwargs": True})
|
||||
|
||||
return params
|
||||
199
venv/lib/python3.7/site-packages/mopidy/internal/log.py
Normal file
199
venv/lib/python3.7/site-packages/mopidy/internal/log.py
Normal file
@@ -0,0 +1,199 @@
|
||||
import logging
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import platform
|
||||
|
||||
LOG_LEVELS = {
|
||||
-1: dict(root=logging.ERROR, mopidy=logging.WARNING),
|
||||
0: dict(root=logging.ERROR, mopidy=logging.INFO),
|
||||
1: dict(root=logging.WARNING, mopidy=logging.DEBUG),
|
||||
2: dict(root=logging.INFO, mopidy=logging.DEBUG),
|
||||
3: dict(root=logging.DEBUG, mopidy=logging.DEBUG),
|
||||
4: dict(root=logging.NOTSET, mopidy=logging.NOTSET),
|
||||
}
|
||||
|
||||
# Custom log level which has even lower priority than DEBUG
|
||||
TRACE_LOG_LEVEL = 5
|
||||
logging.addLevelName(TRACE_LOG_LEVEL, "TRACE")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DelayedHandler(logging.Handler):
|
||||
def __init__(self):
|
||||
logging.Handler.__init__(self)
|
||||
self._released = False
|
||||
self._buffer = []
|
||||
|
||||
def handle(self, record):
|
||||
if not self._released:
|
||||
self._buffer.append(record)
|
||||
|
||||
def release(self):
|
||||
self._released = True
|
||||
root = logging.getLogger("")
|
||||
while self._buffer:
|
||||
root.handle(self._buffer.pop(0))
|
||||
|
||||
|
||||
_delayed_handler = DelayedHandler()
|
||||
|
||||
|
||||
def bootstrap_delayed_logging():
|
||||
root = logging.getLogger("")
|
||||
root.setLevel(logging.NOTSET)
|
||||
root.addHandler(_delayed_handler)
|
||||
|
||||
|
||||
def setup_logging(config, base_verbosity_level, args_verbosity_level):
|
||||
logging.captureWarnings(True)
|
||||
|
||||
if config["logging"]["config_file"]:
|
||||
# Logging config from file must be read before other handlers are
|
||||
# added. If not, the other handlers will have no effect.
|
||||
try:
|
||||
path = config["logging"]["config_file"]
|
||||
logging.config.fileConfig(path, disable_existing_loggers=False)
|
||||
except Exception as e:
|
||||
# Catch everything as logging does not specify what can go wrong.
|
||||
logger.error("Loading logging config %r failed. %s", path, e)
|
||||
|
||||
loglevels = config.get("loglevels", {})
|
||||
|
||||
verbosity_level = get_verbosity_level(
|
||||
config, base_verbosity_level, args_verbosity_level
|
||||
)
|
||||
verbosity_filter = VerbosityFilter(verbosity_level, loglevels)
|
||||
|
||||
formatter = logging.Formatter(config["logging"]["format"])
|
||||
|
||||
if config["logging"]["color"]:
|
||||
handler = ColorizingStreamHandler(config.get("logcolors", {}))
|
||||
else:
|
||||
handler = logging.StreamHandler()
|
||||
handler.addFilter(verbosity_filter)
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
logging.getLogger("").addHandler(handler)
|
||||
|
||||
_delayed_handler.release()
|
||||
|
||||
|
||||
def get_verbosity_level(config, base_verbosity_level, args_verbosity_level):
|
||||
if args_verbosity_level:
|
||||
result = base_verbosity_level + args_verbosity_level
|
||||
else:
|
||||
result = base_verbosity_level + config["logging"]["verbosity"]
|
||||
|
||||
if result < min(LOG_LEVELS.keys()):
|
||||
result = min(LOG_LEVELS.keys())
|
||||
if result > max(LOG_LEVELS.keys()):
|
||||
result = max(LOG_LEVELS.keys())
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class VerbosityFilter(logging.Filter):
|
||||
def __init__(self, verbosity_level, loglevels):
|
||||
self.verbosity_level = verbosity_level
|
||||
self.loglevels = loglevels
|
||||
|
||||
def filter(self, record):
|
||||
for name, required_log_level in self.loglevels.items():
|
||||
if record.name == name or record.name.startswith(name + "."):
|
||||
return record.levelno >= required_log_level
|
||||
|
||||
if record.name.startswith("mopidy"):
|
||||
required_log_level = LOG_LEVELS[self.verbosity_level]["mopidy"]
|
||||
else:
|
||||
required_log_level = LOG_LEVELS[self.verbosity_level]["root"]
|
||||
return record.levelno >= required_log_level
|
||||
|
||||
|
||||
#: Available log colors.
|
||||
COLORS = [
|
||||
"black",
|
||||
"red",
|
||||
"green",
|
||||
"yellow",
|
||||
"blue",
|
||||
"magenta",
|
||||
"cyan",
|
||||
"white",
|
||||
]
|
||||
|
||||
|
||||
class ColorizingStreamHandler(logging.StreamHandler):
|
||||
|
||||
"""
|
||||
Stream handler which colorizes the log using ANSI escape sequences.
|
||||
|
||||
Does nothing on Windows, which doesn't support ANSI escape sequences.
|
||||
|
||||
This implementation is based upon https://gist.github.com/vsajip/758430,
|
||||
which is:
|
||||
|
||||
Copyright (C) 2010-2012 Vinay Sajip. All rights reserved.
|
||||
Licensed under the new BSD license.
|
||||
"""
|
||||
|
||||
# Map logging levels to (background, foreground, bold/intense)
|
||||
level_map = {
|
||||
TRACE_LOG_LEVEL: (None, "blue", False),
|
||||
logging.DEBUG: (None, "blue", False),
|
||||
logging.INFO: (None, "white", False),
|
||||
logging.WARNING: (None, "yellow", False),
|
||||
logging.ERROR: (None, "red", False),
|
||||
logging.CRITICAL: ("red", "white", True),
|
||||
}
|
||||
# Map logger name to foreground colors
|
||||
logger_map = {}
|
||||
|
||||
csi = "\x1b["
|
||||
reset = "\x1b[0m"
|
||||
|
||||
is_windows = platform.system() == "Windows"
|
||||
|
||||
def __init__(self, logger_colors):
|
||||
super().__init__()
|
||||
self.logger_map = logger_colors
|
||||
|
||||
@property
|
||||
def is_tty(self):
|
||||
isatty = getattr(self.stream, "isatty", None)
|
||||
return isatty and isatty()
|
||||
|
||||
def emit(self, record):
|
||||
try:
|
||||
message = self.format(record)
|
||||
self.stream.write(message)
|
||||
self.stream.write(getattr(self, "terminator", "\n"))
|
||||
self.flush()
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def format(self, record):
|
||||
message = logging.StreamHandler.format(self, record)
|
||||
if not self.is_tty or self.is_windows:
|
||||
return message
|
||||
for name, color in self.logger_map.items():
|
||||
if record.name.startswith(name):
|
||||
return self.colorize(message, fg=color)
|
||||
if record.levelno in self.level_map:
|
||||
bg, fg, bold = self.level_map[record.levelno]
|
||||
return self.colorize(message, bg=bg, fg=fg, bold=bold)
|
||||
return message
|
||||
|
||||
def colorize(self, message, bg=None, fg=None, bold=False):
|
||||
params = []
|
||||
if bg in COLORS:
|
||||
params.append(str(COLORS.index(bg) + 40))
|
||||
if fg in COLORS:
|
||||
params.append(str(COLORS.index(fg) + 30))
|
||||
if bold:
|
||||
params.append("1")
|
||||
if params:
|
||||
message = "".join(
|
||||
(self.csi, ";".join(params), "m", message, self.reset)
|
||||
)
|
||||
return message
|
||||
142
venv/lib/python3.7/site-packages/mopidy/internal/models.py
Normal file
142
venv/lib/python3.7/site-packages/mopidy/internal/models.py
Normal file
@@ -0,0 +1,142 @@
|
||||
from mopidy.internal import validation
|
||||
from mopidy.models import Ref, TlTrack, fields
|
||||
from mopidy.models.immutable import ValidatedImmutableObject
|
||||
|
||||
|
||||
class HistoryTrack(ValidatedImmutableObject):
|
||||
"""
|
||||
A history track. Wraps a :class:`Ref` and its timestamp.
|
||||
|
||||
:param timestamp: the timestamp
|
||||
:type timestamp: int
|
||||
:param track: the track reference
|
||||
:type track: :class:`Ref`
|
||||
"""
|
||||
|
||||
# The timestamp. Read-only.
|
||||
timestamp = fields.Integer()
|
||||
|
||||
# The track reference. Read-only.
|
||||
track = fields.Field(type=Ref)
|
||||
|
||||
|
||||
class HistoryState(ValidatedImmutableObject):
|
||||
"""
|
||||
State of the history controller.
|
||||
Internally used for save/load state.
|
||||
|
||||
:param history: the track history
|
||||
:type history: list of :class:`HistoryTrack`
|
||||
"""
|
||||
|
||||
# The tracks. Read-only.
|
||||
history = fields.Collection(type=HistoryTrack, container=tuple)
|
||||
|
||||
|
||||
class MixerState(ValidatedImmutableObject):
|
||||
"""
|
||||
State of the mixer controller.
|
||||
Internally used for save/load state.
|
||||
|
||||
:param volume: the volume
|
||||
:type volume: int
|
||||
:param mute: the mute state
|
||||
:type mute: int
|
||||
"""
|
||||
|
||||
# The volume. Read-only.
|
||||
volume = fields.Integer(min=0, max=100)
|
||||
|
||||
# The mute state. Read-only.
|
||||
mute = fields.Boolean(default=False)
|
||||
|
||||
|
||||
class PlaybackState(ValidatedImmutableObject):
|
||||
"""
|
||||
State of the playback controller.
|
||||
Internally used for save/load state.
|
||||
|
||||
:param tlid: current track tlid
|
||||
:type tlid: int
|
||||
:param time_position: play position
|
||||
:type time_position: int
|
||||
:param state: playback state
|
||||
:type state: :class:`validation.PLAYBACK_STATES`
|
||||
"""
|
||||
|
||||
# The tlid of current playing track. Read-only.
|
||||
tlid = fields.Integer(min=1)
|
||||
|
||||
# The playback position. Read-only.
|
||||
time_position = fields.Integer(min=0)
|
||||
|
||||
# The playback state. Read-only.
|
||||
state = fields.Field(choices=validation.PLAYBACK_STATES)
|
||||
|
||||
|
||||
class TracklistState(ValidatedImmutableObject):
|
||||
|
||||
"""
|
||||
State of the tracklist controller.
|
||||
Internally used for save/load state.
|
||||
|
||||
:param repeat: the repeat mode
|
||||
:type repeat: bool
|
||||
:param consume: the consume mode
|
||||
:type consume: bool
|
||||
:param random: the random mode
|
||||
:type random: bool
|
||||
:param single: the single mode
|
||||
:type single: bool
|
||||
:param next_tlid: the id for the next added track
|
||||
:type next_tlid: int
|
||||
:param tl_tracks: the list of tracks
|
||||
:type tl_tracks: list of :class:`TlTrack`
|
||||
"""
|
||||
|
||||
# The repeat mode. Read-only.
|
||||
repeat = fields.Boolean()
|
||||
|
||||
# The consume mode. Read-only.
|
||||
consume = fields.Boolean()
|
||||
|
||||
# The random mode. Read-only.
|
||||
random = fields.Boolean()
|
||||
|
||||
# The single mode. Read-only.
|
||||
single = fields.Boolean()
|
||||
|
||||
# The id of the track to play. Read-only.
|
||||
next_tlid = fields.Integer(min=0)
|
||||
|
||||
# The list of tracks. Read-only.
|
||||
tl_tracks = fields.Collection(type=TlTrack, container=tuple)
|
||||
|
||||
|
||||
class CoreState(ValidatedImmutableObject):
|
||||
|
||||
"""
|
||||
State of all Core controller.
|
||||
Internally used for save/load state.
|
||||
|
||||
:param history: State of the history controller
|
||||
:type history: :class:`HistorState`
|
||||
:param mixer: State of the mixer controller
|
||||
:type mixer: :class:`MixerState`
|
||||
:param playback: State of the playback controller
|
||||
:type playback: :class:`PlaybackState`
|
||||
:param tracklist: State of the tracklist controller
|
||||
:type tracklist: :class:`TracklistState`
|
||||
"""
|
||||
|
||||
# State of the history controller.
|
||||
history = fields.Field(type=HistoryState)
|
||||
|
||||
# State of the mixer controller.
|
||||
mixer = fields.Field(type=MixerState)
|
||||
|
||||
# State of the playback controller.
|
||||
playback = fields.Field(type=PlaybackState)
|
||||
|
||||
# State of the tracklist controller.
|
||||
tracklist = fields.Field(type=TracklistState)
|
||||
31
venv/lib/python3.7/site-packages/mopidy/internal/network.py
Normal file
31
venv/lib/python3.7/site-packages/mopidy/internal/network.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def try_ipv6_socket():
|
||||
"""Determine if system really supports IPv6"""
|
||||
if not socket.has_ipv6:
|
||||
return False
|
||||
try:
|
||||
socket.socket(socket.AF_INET6).close()
|
||||
return True
|
||||
except OSError as exc:
|
||||
logger.debug(
|
||||
f"Platform supports IPv6, but socket creation failed, "
|
||||
f"disabling: {exc}"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
#: Boolean value that indicates if creating an IPv6 socket will succeed.
|
||||
has_ipv6 = try_ipv6_socket()
|
||||
|
||||
|
||||
def format_hostname(hostname):
|
||||
"""Format hostname for display."""
|
||||
if has_ipv6 and re.match(r"\d+.\d+.\d+.\d+", hostname) is not None:
|
||||
hostname = f"::ffff:{hostname}"
|
||||
return hostname
|
||||
104
venv/lib/python3.7/site-packages/mopidy/internal/path.py
Normal file
104
venv/lib/python3.7/site-packages/mopidy/internal/path.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import urllib
|
||||
|
||||
from mopidy.internal import xdg
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
XDG_DIRS = xdg.get_dirs()
|
||||
|
||||
|
||||
def get_or_create_dir(dir_path):
|
||||
dir_path = expand_path(dir_path)
|
||||
if dir_path.is_file():
|
||||
raise OSError(
|
||||
f"A file with the same name as the desired dir, "
|
||||
f"{dir_path!r}, already exists."
|
||||
)
|
||||
elif not dir_path.is_dir():
|
||||
logger.info(f"Creating dir {dir_path.as_uri()}")
|
||||
dir_path.mkdir(mode=0o755, parents=True)
|
||||
return dir_path
|
||||
|
||||
|
||||
def get_or_create_file(file_path, mkdir=True, content=None):
|
||||
file_path = expand_path(file_path)
|
||||
if isinstance(content, str):
|
||||
content = content.encode()
|
||||
if mkdir:
|
||||
get_or_create_dir(file_path.parent)
|
||||
if not file_path.is_file():
|
||||
logger.info(f"Creating file {file_path.as_uri()}")
|
||||
file_path.touch(exist_ok=False)
|
||||
if content is not None:
|
||||
file_path.write_bytes(content)
|
||||
return file_path
|
||||
|
||||
|
||||
def get_unix_socket_path(socket_path):
|
||||
match = re.search("^unix:(.*)", socket_path)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def path_to_uri(path):
|
||||
"""
|
||||
Convert OS specific path to file:// URI.
|
||||
|
||||
Accepts either unicode strings or bytestrings. The encoding of any
|
||||
bytestring will be maintained so that :func:`uri_to_path` can return the
|
||||
same bytestring.
|
||||
|
||||
Returns a file:// URI as an unicode string.
|
||||
"""
|
||||
return pathlib.Path(path).as_uri()
|
||||
|
||||
|
||||
def uri_to_path(uri):
|
||||
"""
|
||||
Convert an URI to a OS specific path.
|
||||
"""
|
||||
bytes_path = urllib.parse.unquote_to_bytes(urllib.parse.urlsplit(uri).path)
|
||||
unicode_path = bytes_path.decode(errors="surrogateescape")
|
||||
return pathlib.Path(unicode_path)
|
||||
|
||||
|
||||
def expand_path(path):
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode(errors="surrogateescape")
|
||||
path = str(pathlib.Path(path))
|
||||
|
||||
for xdg_var, xdg_dir in XDG_DIRS.items():
|
||||
path = path.replace("$" + xdg_var, str(xdg_dir))
|
||||
if "$" in path:
|
||||
return None
|
||||
|
||||
return pathlib.Path(path).expanduser().resolve()
|
||||
|
||||
|
||||
def is_path_inside_base_dir(path, base_path):
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode(errors="surrogateescape")
|
||||
if isinstance(base_path, bytes):
|
||||
base_path = base_path.decode(errors="surrogateescape")
|
||||
|
||||
path = pathlib.Path(path).resolve()
|
||||
base_path = pathlib.Path(base_path).resolve()
|
||||
|
||||
if path.is_file():
|
||||
# Use dir of file for prefix comparision, so we don't accept
|
||||
# /tmp/foo.m3u as being inside /tmp/foo, simply because they have a
|
||||
# common prefix, /tmp/foo, which matches the base path, /tmp/foo.
|
||||
path = path.parent
|
||||
|
||||
# Check if dir of file is the base path or a subdir
|
||||
try:
|
||||
path.relative_to(base_path)
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
137
venv/lib/python3.7/site-packages/mopidy/internal/playlists.py
Normal file
137
venv/lib/python3.7/site-packages/mopidy/internal/playlists.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import configparser
|
||||
import io
|
||||
|
||||
from mopidy.internal import validation
|
||||
|
||||
import xml.etree.ElementTree as elementtree # noqa: N813
|
||||
|
||||
|
||||
def parse(data):
|
||||
handlers = {
|
||||
detect_extm3u_header: parse_extm3u,
|
||||
detect_pls_header: parse_pls,
|
||||
detect_asx_header: parse_asx,
|
||||
detect_xspf_header: parse_xspf,
|
||||
}
|
||||
for detector, parser in handlers.items():
|
||||
if detector(data):
|
||||
return list(parser(data))
|
||||
return list(parse_urilist(data)) # Fallback
|
||||
|
||||
|
||||
def detect_extm3u_header(data):
|
||||
return data[0:7].upper() == b"#EXTM3U"
|
||||
|
||||
|
||||
def detect_pls_header(data):
|
||||
return data[0:10].lower() == b"[playlist]"
|
||||
|
||||
|
||||
def detect_xspf_header(data):
|
||||
data = data[0:150]
|
||||
if b"xspf" not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for _event, element in elementtree.iterparse(data, events=["start"]):
|
||||
return element.tag.lower() == "{http://xspf.org/ns/0/}playlist"
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def detect_asx_header(data):
|
||||
data = data[0:50]
|
||||
if b"asx" not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for _event, element in elementtree.iterparse(data, events=["start"]):
|
||||
return element.tag.lower() == "asx"
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def parse_extm3u(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
found_header = False
|
||||
for line in data.splitlines():
|
||||
if found_header or line.startswith(b"#EXTM3U"):
|
||||
found_header = True
|
||||
else:
|
||||
continue
|
||||
|
||||
if not line.strip() or line.startswith(b"#"):
|
||||
continue
|
||||
|
||||
try:
|
||||
line = line.decode()
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
yield line.strip()
|
||||
|
||||
|
||||
def parse_pls(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
try:
|
||||
cp = configparser.RawConfigParser()
|
||||
cp.read_string(data.decode())
|
||||
except configparser.Error:
|
||||
return
|
||||
|
||||
for section in cp.sections():
|
||||
if section.lower() != "playlist":
|
||||
continue
|
||||
for i in range(cp.getint(section, "numberofentries")):
|
||||
yield cp.get(section, f"file{i + 1}").strip("\"'")
|
||||
|
||||
|
||||
def parse_xspf(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for _event, element in elementtree.iterparse(io.BytesIO(data)):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
ns = "http://xspf.org/ns/0/"
|
||||
path = f"{{{ns}}}tracklist/{{{ns}}}track"
|
||||
for track in element.iterfind(path):
|
||||
yield track.findtext(f"{{{ns}}}location")
|
||||
|
||||
|
||||
def parse_asx(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for _event, element in elementtree.iterparse(io.BytesIO(data)):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
for ref in element.findall("entry/ref[@href]"):
|
||||
yield ref.get("href", "").strip()
|
||||
|
||||
for entry in element.findall("entry[@href]"):
|
||||
yield entry.get("href", "").strip()
|
||||
|
||||
|
||||
def parse_urilist(data):
|
||||
for line in data.splitlines():
|
||||
if not line.strip() or line.startswith(b"#"):
|
||||
continue
|
||||
|
||||
try:
|
||||
line = line.decode()
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
try:
|
||||
validation.check_uri(line)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
yield line.strip()
|
||||
53
venv/lib/python3.7/site-packages/mopidy/internal/process.py
Normal file
53
venv/lib/python3.7/site-packages/mopidy/internal/process.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import pykka
|
||||
|
||||
import _thread
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def exit_process():
|
||||
logger.debug("Interrupting main...")
|
||||
_thread.interrupt_main()
|
||||
logger.debug("Interrupted main")
|
||||
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
"""A :mod:`signal` handler which will exit the program on signal.
|
||||
|
||||
This function is not called when the process' main thread is running a GLib
|
||||
mainloop. In that case, the GLib mainloop must listen for SIGTERM signals
|
||||
and quit itself.
|
||||
|
||||
For Mopidy subcommands that does not run the GLib mainloop, this handler
|
||||
ensures a proper shutdown of the process on SIGTERM.
|
||||
"""
|
||||
logger.info("Got SIGTERM signal. Exiting...")
|
||||
exit_process()
|
||||
|
||||
|
||||
def stop_actors_by_class(klass):
|
||||
actors = pykka.ActorRegistry.get_by_class(klass)
|
||||
logger.debug("Stopping %d instance(s) of %s", len(actors), klass.__name__)
|
||||
for actor in actors:
|
||||
actor.stop()
|
||||
|
||||
|
||||
def stop_remaining_actors():
|
||||
num_actors = len(pykka.ActorRegistry.get_all())
|
||||
while num_actors:
|
||||
logger.error(
|
||||
"There are actor threads still running, this is probably a bug"
|
||||
)
|
||||
logger.debug(
|
||||
"Seeing %d actor and %d non-actor thread(s): %s",
|
||||
num_actors,
|
||||
threading.active_count() - num_actors,
|
||||
", ".join([t.name for t in threading.enumerate()]),
|
||||
)
|
||||
logger.debug("Stopping %d actor(s)...", num_actors)
|
||||
pykka.ActorRegistry.stop_all()
|
||||
num_actors = len(pykka.ActorRegistry.get_all())
|
||||
logger.debug("All actors stopped.")
|
||||
59
venv/lib/python3.7/site-packages/mopidy/internal/storage.py
Normal file
59
venv/lib/python3.7/site-packages/mopidy/internal/storage.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import pathlib
|
||||
import tempfile
|
||||
|
||||
from mopidy import models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load(path):
|
||||
"""
|
||||
Deserialize data from file.
|
||||
|
||||
:param path: full path to import file
|
||||
:type path: pathlib.Path
|
||||
:return: deserialized data
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
# TODO: raise an exception in case of error?
|
||||
if not path.is_file():
|
||||
logger.info("File does not exist: %s", path)
|
||||
return {}
|
||||
try:
|
||||
with gzip.open(str(path), "rb") as fp:
|
||||
return json.load(fp, object_hook=models.model_json_decoder)
|
||||
except (OSError, ValueError) as exc:
|
||||
logger.warning(f"Loading JSON failed: {exc}")
|
||||
return {}
|
||||
|
||||
|
||||
def dump(path, data):
|
||||
"""
|
||||
Serialize data to file.
|
||||
|
||||
:param path: full path to export file
|
||||
:type path: pathlib.Path
|
||||
:param data: dictionary containing data to save
|
||||
:type data: dict
|
||||
"""
|
||||
|
||||
# TODO: cleanup directory/basename.* files.
|
||||
tmp = tempfile.NamedTemporaryFile(
|
||||
prefix=path.name + ".", dir=str(path.parent), delete=False
|
||||
)
|
||||
tmp_path = pathlib.Path(tmp.name)
|
||||
|
||||
try:
|
||||
data_string = json.dumps(
|
||||
data, cls=models.ModelJSONEncoder, indent=2, separators=(",", ": ")
|
||||
)
|
||||
with gzip.GzipFile(fileobj=tmp, mode="wb") as fp:
|
||||
fp.write(data_string.encode())
|
||||
tmp_path.rename(path)
|
||||
finally:
|
||||
if tmp_path.exists():
|
||||
tmp_path.unlink()
|
||||
14
venv/lib/python3.7/site-packages/mopidy/internal/timer.py
Normal file
14
venv/lib/python3.7/site-packages/mopidy/internal/timer.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import contextlib
|
||||
import logging
|
||||
import time
|
||||
|
||||
from mopidy.internal import log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def time_logger(name, level=log.TRACE_LOG_LEVEL):
|
||||
start = time.time()
|
||||
yield
|
||||
logger.log(level, "%s took %dms", name, (time.time() - start) * 1000)
|
||||
133
venv/lib/python3.7/site-packages/mopidy/internal/validation.py
Normal file
133
venv/lib/python3.7/site-packages/mopidy/internal/validation.py
Normal file
@@ -0,0 +1,133 @@
|
||||
import urllib
|
||||
from collections.abc import Iterable, Mapping
|
||||
|
||||
from mopidy import exceptions
|
||||
|
||||
PLAYBACK_STATES = {"paused", "stopped", "playing"}
|
||||
|
||||
SEARCH_FIELDS = {
|
||||
"uri",
|
||||
"track_name",
|
||||
"album",
|
||||
"artist",
|
||||
"albumartist",
|
||||
"composer",
|
||||
"performer",
|
||||
"track_no",
|
||||
"genre",
|
||||
"date",
|
||||
"comment",
|
||||
"any",
|
||||
}
|
||||
|
||||
PLAYLIST_FIELDS = {"uri", "name"} # TODO: add length and last_modified?
|
||||
|
||||
TRACKLIST_FIELDS = { # TODO: add bitrate, length, disc_no, track_no, modified?
|
||||
"uri",
|
||||
"name",
|
||||
"genre",
|
||||
"date",
|
||||
"comment",
|
||||
"musicbrainz_id",
|
||||
}
|
||||
|
||||
DISTINCT_FIELDS = {
|
||||
"track",
|
||||
"artist",
|
||||
"albumartist",
|
||||
"album",
|
||||
"composer",
|
||||
"performer",
|
||||
"date",
|
||||
"genre",
|
||||
}
|
||||
|
||||
|
||||
# TODO: _check_iterable(check, msg, **kwargs) + [check(a) for a in arg]?
|
||||
def _check_iterable(arg, msg, **kwargs):
|
||||
"""Ensure we have an iterable which is not a string or an iterator"""
|
||||
if isinstance(arg, str):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
|
||||
elif not isinstance(arg, Iterable):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
|
||||
elif iter(arg) is iter(arg):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
|
||||
|
||||
|
||||
def check_choice(arg, choices, msg="Expected one of {choices}, not {arg!r}"):
|
||||
if arg not in choices:
|
||||
raise exceptions.ValidationError(
|
||||
msg.format(arg=arg, choices=tuple(choices))
|
||||
)
|
||||
|
||||
|
||||
def check_boolean(arg, msg="Expected a boolean, not {arg!r}"):
|
||||
check_instance(arg, bool, msg=msg)
|
||||
|
||||
|
||||
def check_instance(arg, cls, msg="Expected a {name} instance, not {arg!r}"):
|
||||
if not isinstance(arg, cls):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__))
|
||||
|
||||
|
||||
def check_instances(arg, cls, msg="Expected a list of {name}, not {arg!r}"):
|
||||
_check_iterable(arg, msg, name=cls.__name__)
|
||||
if not all(isinstance(instance, cls) for instance in arg):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__))
|
||||
|
||||
|
||||
def check_integer(arg, min=None, max=None):
|
||||
if not isinstance(arg, int):
|
||||
raise exceptions.ValidationError(f"Expected an integer, not {arg!r}")
|
||||
elif min is not None and arg < min:
|
||||
raise exceptions.ValidationError(
|
||||
f"Expected number larger or equal to {min}, not {arg!r}"
|
||||
)
|
||||
elif max is not None and arg > max:
|
||||
raise exceptions.ValidationError(
|
||||
f"Expected number smaller or equal to {max}, not {arg!r}"
|
||||
)
|
||||
|
||||
|
||||
def check_query(arg, fields=SEARCH_FIELDS, list_values=True):
|
||||
# TODO: normalize name -> track_name
|
||||
# TODO: normalize value -> [value]
|
||||
# TODO: normalize blank -> [] or just remove field?
|
||||
# TODO: remove list_values?
|
||||
|
||||
if not isinstance(arg, Mapping):
|
||||
raise exceptions.ValidationError(
|
||||
f"Expected a query dictionary, not {arg!r}"
|
||||
)
|
||||
|
||||
for key, value in arg.items():
|
||||
check_choice(
|
||||
key,
|
||||
fields,
|
||||
msg="Expected query field to be one of " "{choices}, not {arg!r}",
|
||||
)
|
||||
if list_values:
|
||||
msg = 'Expected "{key}" to be list of strings, not {arg!r}'
|
||||
_check_iterable(value, msg, key=key)
|
||||
[_check_query_value(key, v, msg) for v in value]
|
||||
else:
|
||||
_check_query_value(
|
||||
key, value, 'Expected "{key}" to be a string, not {arg!r}'
|
||||
)
|
||||
|
||||
|
||||
def _check_query_value(key, arg, msg):
|
||||
if not isinstance(arg, str) or not arg.strip():
|
||||
raise exceptions.ValidationError(msg.format(arg=arg, key=key))
|
||||
|
||||
|
||||
def check_uri(arg, msg="Expected a valid URI, not {arg!r}"):
|
||||
if not isinstance(arg, str):
|
||||
raise exceptions.ValidationError(msg.format(arg=arg))
|
||||
elif urllib.parse.urlparse(arg).scheme == "":
|
||||
raise exceptions.ValidationError(msg.format(arg=arg))
|
||||
|
||||
|
||||
def check_uris(arg, msg="Expected a list of URIs, not {arg!r}"):
|
||||
_check_iterable(arg, msg)
|
||||
[check_uri(a, msg) for a in arg]
|
||||
@@ -0,0 +1,29 @@
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import mopidy
|
||||
|
||||
|
||||
def get_version():
|
||||
try:
|
||||
return get_git_version()
|
||||
except OSError:
|
||||
return mopidy.__version__
|
||||
|
||||
|
||||
def get_git_version():
|
||||
project_dir = os.path.abspath(
|
||||
os.path.join(os.path.dirname(mopidy.__file__), "..")
|
||||
)
|
||||
process = subprocess.Popen(
|
||||
["git", "describe"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=project_dir,
|
||||
)
|
||||
if process.wait() != 0:
|
||||
raise OSError('Execution of "git describe" failed')
|
||||
version = process.stdout.read().strip().decode()
|
||||
if version.startswith("v"):
|
||||
version = version[1:]
|
||||
return version
|
||||
68
venv/lib/python3.7/site-packages/mopidy/internal/xdg.py
Normal file
68
venv/lib/python3.7/site-packages/mopidy/internal/xdg.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import configparser
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
|
||||
def get_dirs():
|
||||
"""Returns a dict of all the known XDG Base Directories for the current user.
|
||||
|
||||
The keys ``XDG_CACHE_DIR``, ``XDG_CONFIG_DIR``, and ``XDG_DATA_DIR`` is
|
||||
always available.
|
||||
|
||||
Additional keys, like ``XDG_MUSIC_DIR``, may be available if the
|
||||
``$XDG_CONFIG_DIR/user-dirs.dirs`` file exists and is parseable.
|
||||
|
||||
See http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
for the XDG Base Directory specification.
|
||||
"""
|
||||
|
||||
dirs = {
|
||||
"XDG_CACHE_DIR": pathlib.Path(
|
||||
os.getenv("XDG_CACHE_HOME", "~/.cache")
|
||||
).expanduser(),
|
||||
"XDG_CONFIG_DIR": pathlib.Path(
|
||||
os.getenv("XDG_CONFIG_HOME", "~/.config")
|
||||
).expanduser(),
|
||||
"XDG_DATA_DIR": pathlib.Path(
|
||||
os.getenv("XDG_DATA_HOME", "~/.local/share")
|
||||
).expanduser(),
|
||||
}
|
||||
|
||||
dirs.update(_get_user_dirs(dirs["XDG_CONFIG_DIR"]))
|
||||
|
||||
return dirs
|
||||
|
||||
|
||||
def _get_user_dirs(xdg_config_dir):
|
||||
"""Returns a dict of XDG dirs read from
|
||||
``$XDG_CONFIG_HOME/user-dirs.dirs``.
|
||||
|
||||
This is used at import time for most users of :mod:`mopidy`. By rolling our
|
||||
own implementation instead of using :meth:`glib.get_user_special_dir` we
|
||||
make it possible for many extensions to run their test suites, which are
|
||||
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
|
||||
disabled, and thus no :mod:`glib` available.
|
||||
"""
|
||||
|
||||
dirs_file = xdg_config_dir / "user-dirs.dirs"
|
||||
|
||||
if not dirs_file.exists():
|
||||
return {}
|
||||
|
||||
data = dirs_file.read_bytes()
|
||||
data = b"[XDG_USER_DIRS]\n" + data
|
||||
data = data.replace(b"$HOME", bytes(pathlib.Path.home()))
|
||||
data = data.replace(b'"', b"")
|
||||
|
||||
config = configparser.RawConfigParser()
|
||||
config.read_string(data.decode())
|
||||
|
||||
result = {}
|
||||
for k, v in config.items("XDG_USER_DIRS"):
|
||||
if v is None:
|
||||
continue
|
||||
if isinstance(k, bytes):
|
||||
k = k.decode()
|
||||
result[k.upper()] = pathlib.Path(v).resolve()
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user