Python3 Migrate

This commit is contained in:
MariuszC
2020-01-18 20:01:00 +01:00
parent ea05af2d15
commit 6cd7e0fe44
691 changed files with 201846 additions and 598 deletions

View File

@@ -0,0 +1,15 @@
import platform
import sys
import warnings
import pkg_resources
if not sys.version_info >= (3, 7):
sys.exit(
f"ERROR: Mopidy requires Python >= 3.7, "
f"but found {platform.python_version()}."
)
warnings.filterwarnings("ignore", "could not open display")
__version__ = pkg_resources.get_distribution("Mopidy").version

View File

@@ -0,0 +1,231 @@
import logging
import signal
import sys
import pykka.debug
from mopidy import commands
from mopidy import config as config_lib
from mopidy import ext
from mopidy.internal import log, path, process, versioning
from mopidy.internal.gi import Gst # noqa: F401
try:
# Make GLib's mainloop the event loop for python-dbus
import dbus.mainloop.glib
dbus.mainloop.glib.threads_init()
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
except ImportError:
pass
logger = logging.getLogger(__name__)
def main():
log.bootstrap_delayed_logging()
logger.info(f"Starting Mopidy {versioning.get_version()}")
signal.signal(signal.SIGTERM, process.sigterm_handler)
# Windows does not have signal.SIGUSR1
if hasattr(signal, "SIGUSR1"):
signal.signal(signal.SIGUSR1, pykka.debug.log_thread_tracebacks)
try:
registry = ext.Registry()
root_cmd = commands.RootCommand()
config_cmd = commands.ConfigCommand()
deps_cmd = commands.DepsCommand()
root_cmd.set(extension=None, registry=registry)
root_cmd.add_child("config", config_cmd)
root_cmd.add_child("deps", deps_cmd)
extensions_data = ext.load_extensions()
for data in extensions_data:
if data.command: # TODO: check isinstance?
data.command.set(extension=data.extension)
root_cmd.add_child(data.extension.ext_name, data.command)
args = root_cmd.parse(sys.argv[1:])
config, config_errors = config_lib.load(
args.config_files,
[d.config_schema for d in extensions_data],
[d.config_defaults for d in extensions_data],
args.config_overrides,
)
create_core_dirs(config)
create_initial_config_file(args, extensions_data)
log.setup_logging(
config, args.base_verbosity_level, args.verbosity_level
)
extensions = {
"validate": [],
"config": [],
"disabled": [],
"enabled": [],
}
for data in extensions_data:
extension = data.extension
# TODO: factor out all of this to a helper that can be tested
if not ext.validate_extension_data(data):
config[extension.ext_name] = {"enabled": False}
config_errors[extension.ext_name] = {
"enabled": "extension disabled by self check."
}
extensions["validate"].append(extension)
elif not config[extension.ext_name]["enabled"]:
config[extension.ext_name] = {"enabled": False}
config_errors[extension.ext_name] = {
"enabled": "extension disabled by user config."
}
extensions["disabled"].append(extension)
elif config_errors.get(extension.ext_name):
config[extension.ext_name]["enabled"] = False
config_errors[extension.ext_name][
"enabled"
] = "extension disabled due to config errors."
extensions["config"].append(extension)
else:
extensions["enabled"].append(extension)
log_extension_info(
[d.extension for d in extensions_data], extensions["enabled"]
)
# Config and deps commands are simply special cased for now.
if args.command == config_cmd:
schemas = [d.config_schema for d in extensions_data]
return args.command.run(config, config_errors, schemas)
elif args.command == deps_cmd:
return args.command.run()
check_config_errors(config, config_errors, extensions)
if not extensions["enabled"]:
logger.error("No extension enabled, exiting...")
sys.exit(1)
# Read-only config from here on, please.
proxied_config = config_lib.Proxy(config)
if args.extension and args.extension not in extensions["enabled"]:
logger.error(
"Unable to run command provided by disabled extension %s",
args.extension.ext_name,
)
return 1
for extension in extensions["enabled"]:
try:
extension.setup(registry)
except Exception:
# TODO: would be nice a transactional registry. But sadly this
# is a bit tricky since our current API is giving out a mutable
# list. We might however be able to replace this with a
# collections.Sequence to provide a RO view.
logger.exception(
f"Extension {extension.ext_name} failed during setup. "
f"This might have left the registry in a bad state."
)
# Anything that wants to exit after this point must use
# mopidy.internal.process.exit_process as actors can have been started.
try:
return args.command.run(args, proxied_config)
except NotImplementedError:
print(root_cmd.format_help())
return 1
except KeyboardInterrupt:
pass
except Exception as ex:
logger.exception(ex)
raise
def create_core_dirs(config):
path.get_or_create_dir(config["core"]["cache_dir"])
path.get_or_create_dir(config["core"]["config_dir"])
path.get_or_create_dir(config["core"]["data_dir"])
def create_initial_config_file(args, extensions_data):
"""Initialize whatever the last config file is with defaults"""
config_file = path.expand_path(args.config_files[-1])
if config_file.exists():
return
try:
default = config_lib.format_initial(extensions_data)
path.get_or_create_file(
config_file,
mkdir=False,
content=default.encode(errors="surrogateescape"),
)
logger.info(f"Initialized {config_file.as_uri()} with default config")
except OSError as exc:
logger.warning(
f"Unable to initialize {config_file.as_uri()} with default config: {exc}"
)
def log_extension_info(all_extensions, enabled_extensions):
# TODO: distinguish disabled vs blocked by env?
enabled_names = {e.ext_name for e in enabled_extensions}
disabled_names = {e.ext_name for e in all_extensions} - enabled_names
logger.info("Enabled extensions: %s", ", ".join(enabled_names) or "none")
logger.info("Disabled extensions: %s", ", ".join(disabled_names) or "none")
def check_config_errors(config, errors, extensions):
fatal_errors = []
extension_names = {}
all_extension_names = set()
for state in extensions:
extension_names[state] = {e.ext_name for e in extensions[state]}
all_extension_names.update(extension_names[state])
for section in sorted(errors):
if not errors[section]:
continue
if section not in all_extension_names:
logger.warning(f"Found fatal {section} configuration errors:")
fatal_errors.append(section)
elif section in extension_names["config"]:
del errors[section]["enabled"]
logger.warning(
f"Found {section} configuration errors. "
f"The extension has been automatically disabled:"
)
else:
continue
for field, msg in errors[section].items():
logger.warning(f" {section}/{field} {msg}")
if extensions["config"]:
logger.warning(
"Please fix the extension configuration errors or "
"disable the extensions to silence these messages."
)
if fatal_errors:
logger.error("Please fix fatal configuration errors, exiting...")
sys.exit(1)
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,10 @@
# flake8: noqa
from .actor import Audio
from .constants import PlaybackState
from .listener import AudioListener
from .utils import (
calculate_duration,
create_buffer,
millisecond_to_clocktime,
supported_uri_schemes,
)

View File

@@ -0,0 +1,855 @@
import logging
import os
import threading
import pykka
from mopidy import exceptions
from mopidy.audio import tags as tags_lib
from mopidy.audio import utils
from mopidy.audio.constants import PlaybackState
from mopidy.audio.listener import AudioListener
from mopidy.internal import process
from mopidy.internal.gi import GLib, GObject, Gst, GstPbutils
logger = logging.getLogger(__name__)
# This logger is only meant for debug logging of low level GStreamer info such
# as callbacks, event, messages and direct interaction with GStreamer such as
# set_state() on a pipeline.
gst_logger = logging.getLogger("mopidy.audio.gst")
_GST_PLAY_FLAGS_AUDIO = 0x02
_GST_STATE_MAPPING = {
Gst.State.PLAYING: PlaybackState.PLAYING,
Gst.State.PAUSED: PlaybackState.PAUSED,
Gst.State.NULL: PlaybackState.STOPPED,
}
# TODO: expose this as a property on audio?
class _Appsrc:
"""Helper class for dealing with appsrc based playback."""
def __init__(self):
self._signals = utils.Signals()
self.reset()
def reset(self):
"""Reset the helper.
Should be called whenever the source changes and we are not setting up
a new appsrc.
"""
self.prepare(None, None, None, None)
def prepare(self, caps, need_data, enough_data, seek_data):
"""Store info we will need when the appsrc element gets installed."""
self._signals.clear()
self._source = None
self._caps = caps
self._need_data_callback = need_data
self._seek_data_callback = seek_data
self._enough_data_callback = enough_data
def configure(self, source):
"""Configure the supplied source for use.
Should be called whenever we get a new appsrc.
"""
source.set_property("caps", self._caps)
source.set_property("format", "time")
source.set_property("stream-type", "seekable")
source.set_property("max-bytes", 1 << 20) # 1MB
source.set_property("min-percent", 50)
if self._need_data_callback:
self._signals.connect(
source, "need-data", self._on_signal, self._need_data_callback
)
if self._seek_data_callback:
self._signals.connect(
source, "seek-data", self._on_signal, self._seek_data_callback
)
if self._enough_data_callback:
self._signals.connect(
source,
"enough-data",
self._on_signal,
None,
self._enough_data_callback,
)
self._source = source
def push(self, buffer_):
if self._source is None:
return False
if buffer_ is None:
gst_logger.debug("Sending appsrc end-of-stream event.")
result = self._source.emit("end-of-stream")
return result == Gst.FlowReturn.OK
else:
result = self._source.emit("push-buffer", buffer_)
return result == Gst.FlowReturn.OK
def _on_signal(self, element, clocktime, func):
# This shim is used to ensure we always return true, and also handles
# that not all the callbacks have a time argument.
if clocktime is None:
func()
else:
func(utils.clocktime_to_millisecond(clocktime))
return True
# TODO: expose this as a property on audio when #790 gets further along.
class _Outputs(Gst.Bin):
def __init__(self):
Gst.Bin.__init__(self)
# TODO gst1: Set 'outputs' as the Bin name for easier debugging
self._tee = Gst.ElementFactory.make("tee")
self.add(self._tee)
ghost_pad = Gst.GhostPad.new("sink", self._tee.get_static_pad("sink"))
self.add_pad(ghost_pad)
def add_output(self, description):
# XXX This only works for pipelines not in use until #790 gets done.
try:
output = Gst.parse_bin_from_description(
description, ghost_unlinked_pads=True
)
except GLib.GError as ex:
logger.error(
'Failed to create audio output "%s": %s', description, ex
)
raise exceptions.AudioException(bytes(ex))
self._add(output)
logger.info('Audio output set to "%s"', description)
def _add(self, element):
queue = Gst.ElementFactory.make("queue")
self.add(element)
self.add(queue)
queue.link(element)
self._tee.link(queue)
class SoftwareMixer:
def __init__(self, mixer):
self._mixer = mixer
self._element = None
self._last_volume = None
self._last_mute = None
self._signals = utils.Signals()
def setup(self, element, mixer_ref):
self._element = element
self._mixer.setup(mixer_ref)
def teardown(self):
self._signals.clear()
self._mixer.teardown()
def get_volume(self):
return int(round(self._element.get_property("volume") * 100))
def set_volume(self, volume):
self._element.set_property("volume", volume / 100.0)
self._mixer.trigger_volume_changed(self.get_volume())
def get_mute(self):
return self._element.get_property("mute")
def set_mute(self, mute):
self._element.set_property("mute", bool(mute))
self._mixer.trigger_mute_changed(self.get_mute())
class _Handler:
def __init__(self, audio):
self._audio = audio
self._element = None
self._pad = None
self._message_handler_id = None
self._event_handler_id = None
def setup_message_handling(self, element):
self._element = element
bus = element.get_bus()
bus.add_signal_watch()
self._message_handler_id = bus.connect("message", self.on_message)
def setup_event_handling(self, pad):
self._pad = pad
self._event_handler_id = pad.add_probe(
Gst.PadProbeType.EVENT_BOTH, self.on_pad_event
)
def teardown_message_handling(self):
bus = self._element.get_bus()
bus.remove_signal_watch()
bus.disconnect(self._message_handler_id)
self._message_handler_id = None
def teardown_event_handling(self):
self._pad.remove_probe(self._event_handler_id)
self._event_handler_id = None
def on_message(self, bus, msg):
if msg.type == Gst.MessageType.STATE_CHANGED:
if msg.src != self._element:
return
old_state, new_state, pending_state = msg.parse_state_changed()
self.on_playbin_state_changed(old_state, new_state, pending_state)
elif msg.type == Gst.MessageType.BUFFERING:
self.on_buffering(msg.parse_buffering(), msg.get_structure())
elif msg.type == Gst.MessageType.EOS:
self.on_end_of_stream()
elif msg.type == Gst.MessageType.ERROR:
error, debug = msg.parse_error()
self.on_error(error, debug)
elif msg.type == Gst.MessageType.WARNING:
error, debug = msg.parse_warning()
self.on_warning(error, debug)
elif msg.type == Gst.MessageType.ASYNC_DONE:
self.on_async_done()
elif msg.type == Gst.MessageType.TAG:
taglist = msg.parse_tag()
self.on_tag(taglist)
elif msg.type == Gst.MessageType.ELEMENT:
if GstPbutils.is_missing_plugin_message(msg):
self.on_missing_plugin(msg)
elif msg.type == Gst.MessageType.STREAM_START:
self.on_stream_start()
def on_pad_event(self, pad, pad_probe_info):
event = pad_probe_info.get_event()
if event.type == Gst.EventType.SEGMENT:
self.on_segment(event.parse_segment())
return Gst.PadProbeReturn.OK
def on_playbin_state_changed(self, old_state, new_state, pending_state):
gst_logger.debug(
"Got STATE_CHANGED bus message: old=%s new=%s pending=%s",
old_state.value_name,
new_state.value_name,
pending_state.value_name,
)
if new_state == Gst.State.READY and pending_state == Gst.State.NULL:
# XXX: We're not called on the last state change when going down to
# NULL, so we rewrite the second to last call to get the expected
# behavior.
new_state = Gst.State.NULL
pending_state = Gst.State.VOID_PENDING
if pending_state != Gst.State.VOID_PENDING:
return # Ignore intermediate state changes
if new_state == Gst.State.READY:
return # Ignore READY state as it's GStreamer specific
new_state = _GST_STATE_MAPPING[new_state]
old_state, self._audio.state = self._audio.state, new_state
target_state = _GST_STATE_MAPPING.get(self._audio._target_state)
if target_state is None:
# XXX: Workaround for #1430, to be fixed properly by #1222.
logger.warn("Race condition happened. See #1222 and #1430.")
return
if target_state == new_state:
target_state = None
logger.debug(
"Audio event: state_changed(old_state=%s, new_state=%s, "
"target_state=%s)",
old_state,
new_state,
target_state,
)
AudioListener.send(
"state_changed",
old_state=old_state,
new_state=new_state,
target_state=target_state,
)
if new_state == PlaybackState.STOPPED:
logger.debug("Audio event: stream_changed(uri=None)")
AudioListener.send("stream_changed", uri=None)
if "GST_DEBUG_DUMP_DOT_DIR" in os.environ:
Gst.debug_bin_to_dot_file(
self._audio._playbin, Gst.DebugGraphDetails.ALL, "mopidy"
)
def on_buffering(self, percent, structure=None):
if self._audio._target_state < Gst.State.PAUSED:
gst_logger.debug("Skip buffering during track change.")
return
if structure is not None and structure.has_field("buffering-mode"):
buffering_mode = structure.get_enum(
"buffering-mode", Gst.BufferingMode
)
if buffering_mode == Gst.BufferingMode.LIVE:
return # Live sources stall in paused.
level = logging.getLevelName("TRACE")
if percent < 10 and not self._audio._buffering:
self._audio._playbin.set_state(Gst.State.PAUSED)
self._audio._buffering = True
level = logging.DEBUG
if percent == 100:
self._audio._buffering = False
if self._audio._target_state == Gst.State.PLAYING:
self._audio._playbin.set_state(Gst.State.PLAYING)
level = logging.DEBUG
gst_logger.log(
level, "Got BUFFERING bus message: percent=%d%%", percent
)
def on_end_of_stream(self):
gst_logger.debug("Got EOS (end of stream) bus message.")
logger.debug("Audio event: reached_end_of_stream()")
self._audio._tags = {}
AudioListener.send("reached_end_of_stream")
def on_error(self, error, debug):
gst_logger.error(f"GStreamer error: {error.message}")
gst_logger.debug(
f"Got ERROR bus message: error={error!r} debug={debug!r}"
)
# TODO: is this needed?
self._audio.stop_playback()
def on_warning(self, error, debug):
gst_logger.warning(f"GStreamer warning: {error.message}")
gst_logger.debug(
f"Got WARNING bus message: error={error!r} debug={debug!r}"
)
def on_async_done(self):
gst_logger.debug("Got ASYNC_DONE bus message.")
def on_tag(self, taglist):
tags = tags_lib.convert_taglist(taglist)
gst_logger.debug("Got TAG bus message: tags=%r", dict(tags))
# Postpone emitting tags until stream start.
if self._audio._pending_tags is not None:
self._audio._pending_tags.update(tags)
return
# TODO: Add proper tests for only emitting changed tags.
unique = object()
changed = []
for key, value in tags.items():
# Update any tags that changed, and store changed keys.
if self._audio._tags.get(key, unique) != value:
self._audio._tags[key] = value
changed.append(key)
if changed:
logger.debug("Audio event: tags_changed(tags=%r)", changed)
AudioListener.send("tags_changed", tags=changed)
def on_missing_plugin(self, msg):
desc = GstPbutils.missing_plugin_message_get_description(msg)
debug = GstPbutils.missing_plugin_message_get_installer_detail(msg)
gst_logger.debug("Got missing-plugin bus message: description=%r", desc)
logger.warning("Could not find a %s to handle media.", desc)
if GstPbutils.install_plugins_supported():
logger.info(
"You might be able to fix this by running: "
'gst-installer "%s"',
debug,
)
# TODO: store the missing plugins installer info in a file so we can
# can provide a 'mopidy install-missing-plugins' if the system has the
# required helper installed?
def on_stream_start(self):
gst_logger.debug("Got STREAM_START bus message")
uri = self._audio._pending_uri
logger.debug("Audio event: stream_changed(uri=%r)", uri)
AudioListener.send("stream_changed", uri=uri)
# Emit any postponed tags that we got after about-to-finish.
tags, self._audio._pending_tags = self._audio._pending_tags, None
self._audio._tags = tags or {}
if tags:
logger.debug("Audio event: tags_changed(tags=%r)", tags.keys())
AudioListener.send("tags_changed", tags=tags.keys())
if self._audio._pending_metadata:
self._audio._playbin.send_event(self._audio._pending_metadata)
self._audio._pending_metadata = None
def on_segment(self, segment):
gst_logger.debug(
"Got SEGMENT pad event: "
"rate=%(rate)s format=%(format)s start=%(start)s stop=%(stop)s "
"position=%(position)s",
{
"rate": segment.rate,
"format": Gst.Format.get_name(segment.format),
"start": segment.start,
"stop": segment.stop,
"position": segment.position,
},
)
position_ms = segment.position // Gst.MSECOND
logger.debug("Audio event: position_changed(position=%r)", position_ms)
AudioListener.send("position_changed", position=position_ms)
# TODO: create a player class which replaces the actors internals
class Audio(pykka.ThreadingActor):
"""
Audio output through `GStreamer <https://gstreamer.freedesktop.org/>`_.
"""
#: The GStreamer state mapped to :class:`mopidy.audio.PlaybackState`
state = PlaybackState.STOPPED
#: The software mixing interface :class:`mopidy.audio.actor.SoftwareMixer`
mixer = None
def __init__(self, config, mixer):
super().__init__()
self._config = config
self._target_state = Gst.State.NULL
self._buffering = False
self._live_stream = False
self._tags = {}
self._pending_uri = None
self._pending_tags = None
self._pending_metadata = None
self._playbin = None
self._outputs = None
self._queue = None
self._about_to_finish_callback = None
self._handler = _Handler(self)
self._appsrc = _Appsrc()
self._signals = utils.Signals()
if mixer and self._config["audio"]["mixer"] == "software":
self.mixer = pykka.traversable(SoftwareMixer(mixer))
def on_start(self):
self._thread = threading.current_thread()
try:
self._setup_preferences()
self._setup_playbin()
self._setup_outputs()
self._setup_audio_sink()
except GLib.GError as ex:
logger.exception(ex)
process.exit_process()
def on_stop(self):
self._teardown_mixer()
self._teardown_playbin()
def _setup_preferences(self):
# TODO: move out of audio actor?
# Fix for https://github.com/mopidy/mopidy/issues/604
registry = Gst.Registry.get()
jacksink = registry.find_feature("jackaudiosink", Gst.ElementFactory)
if jacksink:
jacksink.set_rank(Gst.Rank.SECONDARY)
def _setup_playbin(self):
playbin = Gst.ElementFactory.make("playbin")
playbin.set_property("flags", _GST_PLAY_FLAGS_AUDIO)
# TODO: turn into config values...
playbin.set_property("buffer-size", 5 << 20) # 5MB
playbin.set_property("buffer-duration", 5 * Gst.SECOND)
self._signals.connect(playbin, "source-setup", self._on_source_setup)
self._signals.connect(
playbin, "about-to-finish", self._on_about_to_finish
)
self._playbin = playbin
self._handler.setup_message_handling(playbin)
def _teardown_playbin(self):
self._handler.teardown_message_handling()
self._handler.teardown_event_handling()
self._signals.disconnect(self._playbin, "about-to-finish")
self._signals.disconnect(self._playbin, "source-setup")
self._playbin.set_state(Gst.State.NULL)
def _setup_outputs(self):
# We don't want to use outputs for regular testing, so just install
# an unsynced fakesink when someone asks for a 'testoutput'.
if self._config["audio"]["output"] == "testoutput":
self._outputs = Gst.ElementFactory.make("fakesink")
else:
self._outputs = _Outputs()
try:
self._outputs.add_output(self._config["audio"]["output"])
except exceptions.AudioException:
process.exit_process() # TODO: move this up the chain
self._handler.setup_event_handling(self._outputs.get_static_pad("sink"))
def _setup_audio_sink(self):
audio_sink = Gst.ElementFactory.make("bin", "audio-sink")
queue = Gst.ElementFactory.make("queue")
volume = Gst.ElementFactory.make("volume")
# Queue element to buy us time between the about-to-finish event and
# the actual switch, i.e. about to switch can block for longer thanks
# to this queue.
# TODO: See if settings should be set to minimize latency. Previous
# setting breaks appsrc, and settings before that broke on a few
# systems. So leave the default to play it safe.
buffer_time = self._config["audio"]["buffer_time"]
if buffer_time is not None and buffer_time > 0:
queue.set_property("max-size-time", buffer_time * Gst.MSECOND)
audio_sink.add(queue)
audio_sink.add(self._outputs)
audio_sink.add(volume)
queue.link(volume)
volume.link(self._outputs)
if self.mixer:
self.mixer.setup(volume, self.actor_ref.proxy().mixer)
ghost_pad = Gst.GhostPad.new("sink", queue.get_static_pad("sink"))
audio_sink.add_pad(ghost_pad)
self._playbin.set_property("audio-sink", audio_sink)
self._queue = queue
def _teardown_mixer(self):
if self.mixer:
self.mixer.teardown()
def _on_about_to_finish(self, element):
if self._thread == threading.current_thread():
logger.error(
"about-to-finish in actor, aborting to avoid deadlock."
)
return
gst_logger.debug("Got about-to-finish event.")
if self._about_to_finish_callback:
logger.debug("Running about-to-finish callback.")
self._about_to_finish_callback()
def _on_source_setup(self, element, source):
gst_logger.debug(
"Got source-setup signal: element=%s", source.__class__.__name__
)
if source.get_factory().get_name() == "appsrc":
self._appsrc.configure(source)
else:
self._appsrc.reset()
if self._live_stream and hasattr(source.props, "is_live"):
gst_logger.debug("Enabling live stream mode")
source.set_live(True)
utils.setup_proxy(source, self._config["proxy"])
def set_uri(self, uri, live_stream=False):
"""
Set URI of audio to be played.
You *MUST* call :meth:`prepare_change` before calling this method.
:param uri: the URI to play
:type uri: string
:param live_stream: disables buffering, reducing latency for stream,
and discarding data when paused
:type live_stream: bool
"""
# XXX: Hack to workaround issue on Mac OS X where volume level
# does not persist between track changes. mopidy/mopidy#886
if self.mixer is not None:
current_volume = self.mixer.get_volume()
else:
current_volume = None
self._pending_uri = uri
self._pending_tags = {}
self._live_stream = live_stream
self._playbin.set_property("uri", uri)
if self.mixer is not None and current_volume is not None:
self.mixer.set_volume(current_volume)
def set_appsrc(
self, caps, need_data=None, enough_data=None, seek_data=None
):
"""
Switch to using appsrc for getting audio to be played.
You *MUST* call :meth:`prepare_change` before calling this method.
:param caps: GStreamer caps string describing the audio format to
expect
:type caps: string
:param need_data: callback for when appsrc needs data
:type need_data: callable which takes data length hint in ms
:param enough_data: callback for when appsrc has enough data
:type enough_data: callable
:param seek_data: callback for when data from a new position is needed
to continue playback
:type seek_data: callable which takes time position in ms
"""
self._appsrc.prepare(
Gst.Caps.from_string(caps), need_data, enough_data, seek_data
)
uri = "appsrc://"
self._pending_uri = uri
self._playbin.set_property("uri", uri)
def emit_data(self, buffer_):
"""
Call this to deliver raw audio data to be played.
If the buffer is :class:`None`, the end-of-stream token is put on the
playbin. We will get a GStreamer message when the stream playback
reaches the token, and can then do any end-of-stream related tasks.
Note that the URI must be set to ``appsrc://`` for this to work.
Returns :class:`True` if data was delivered.
:param buffer_: buffer to pass to appsrc
:type buffer_: :class:`Gst.Buffer` or :class:`None`
:rtype: boolean
"""
return self._appsrc.push(buffer_)
def set_about_to_finish_callback(self, callback):
"""
Configure audio to use an about-to-finish callback.
This should be used to achieve gapless playback. For this to work the
callback *MUST* call :meth:`set_uri` with the new URI to play and
block until this call has been made. :meth:`prepare_change` is not
needed before :meth:`set_uri` in this one special case.
:param callable callback: Callback to run when we need the next URI.
"""
self._about_to_finish_callback = callback
def get_position(self):
"""
Get position in milliseconds.
:rtype: int
"""
success, position = self._playbin.query_position(Gst.Format.TIME)
if not success:
# TODO: take state into account for this and possibly also return
# None as the unknown value instead of zero?
logger.debug("Position query failed")
return 0
return utils.clocktime_to_millisecond(position)
def set_position(self, position):
"""
Set position in milliseconds.
:param position: the position in milliseconds
:type position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
# TODO: double check seek flags in use.
gst_position = utils.millisecond_to_clocktime(position)
gst_logger.debug("Sending flushing seek: position=%r", gst_position)
# Send seek event to the queue not the playbin. The default behavior
# for bins is to forward this event to all sinks. Which results in
# duplicate seek events making it to appsrc. Since elements are not
# allowed to act on the seek event, only modify it, this should be safe
# to do.
result = self._queue.seek_simple(
Gst.Format.TIME, Gst.SeekFlags.FLUSH, gst_position
)
return result
def start_playback(self):
"""
Notify GStreamer that it should start playback.
:rtype: :class:`True` if successfull, else :class:`False`
"""
return self._set_state(Gst.State.PLAYING)
def pause_playback(self):
"""
Notify GStreamer that it should pause playback.
:rtype: :class:`True` if successfull, else :class:`False`
"""
return self._set_state(Gst.State.PAUSED)
def prepare_change(self):
"""
Notify GStreamer that we are about to change state of playback.
This function *MUST* be called before changing URIs or doing
changes like updating data that is being pushed. The reason for this
is that GStreamer will reset all its state when it changes to
:attr:`Gst.State.READY`.
"""
return self._set_state(Gst.State.READY)
def stop_playback(self):
"""
Notify GStreamer that is should stop playback.
:rtype: :class:`True` if successfull, else :class:`False`
"""
return self._set_state(Gst.State.NULL)
def wait_for_state_change(self):
"""Block until any pending state changes are complete.
Should only be used by tests.
"""
self._playbin.get_state(timeout=Gst.CLOCK_TIME_NONE)
def enable_sync_handler(self):
"""Enable manual processing of messages from bus.
Should only be used by tests.
"""
def sync_handler(bus, message):
self._handler.on_message(bus, message)
return Gst.BusSyncReply.DROP
bus = self._playbin.get_bus()
bus.set_sync_handler(sync_handler)
def _set_state(self, state):
"""
Internal method for setting the raw GStreamer state.
.. digraph:: gst_state_transitions
graph [rankdir="LR"];
node [fontsize=10];
"NULL" -> "READY"
"PAUSED" -> "PLAYING"
"PAUSED" -> "READY"
"PLAYING" -> "PAUSED"
"READY" -> "NULL"
"READY" -> "PAUSED"
:param state: State to set playbin to. One of: `Gst.State.NULL`,
`Gst.State.READY`, `Gst.State.PAUSED` and `Gst.State.PLAYING`.
:type state: :class:`Gst.State`
:rtype: :class:`True` if successfull, else :class:`False`
"""
if state < Gst.State.PAUSED:
self._buffering = False
self._target_state = state
result = self._playbin.set_state(state)
gst_logger.debug(
"Changing state to %s: result=%s",
state.value_name,
result.value_name,
)
if result == Gst.StateChangeReturn.FAILURE:
logger.warning(
"Setting GStreamer state to %s failed", state.value_name
)
return False
# TODO: at this point we could already emit stopped event instead
# of faking it in the message handling when result=OK
return True
# TODO: bake this into setup appsrc perhaps?
def set_metadata(self, track):
"""
Set track metadata for currently playing song.
Only needs to be called by sources such as ``appsrc`` which do not
already inject tags in playbin, e.g. when using :meth:`emit_data` to
deliver raw audio data to GStreamer.
:param track: the current track
:type track: :class:`mopidy.models.Track`
"""
taglist = Gst.TagList.new_empty()
artists = [a for a in (track.artists or []) if a.name]
def set_value(tag, value):
gobject_value = GObject.Value()
gobject_value.init(GObject.TYPE_STRING)
gobject_value.set_string(value)
taglist.add_value(Gst.TagMergeMode.REPLACE, tag, gobject_value)
# Default to blank data to trick shoutcast into clearing any previous
# values it might have.
# TODO: Verify if this works at all, likely it doesn't.
set_value(Gst.TAG_ARTIST, " ")
set_value(Gst.TAG_TITLE, " ")
set_value(Gst.TAG_ALBUM, " ")
if artists:
set_value(Gst.TAG_ARTIST, ", ".join([a.name for a in artists]))
if track.name:
set_value(Gst.TAG_TITLE, track.name)
if track.album and track.album.name:
set_value(Gst.TAG_ALBUM, track.album.name)
gst_logger.debug(
"Sending TAG event for track %r: %r", track.uri, taglist.to_string()
)
event = Gst.Event.new_tag(taglist)
if self._pending_uri:
self._pending_metadata = event
else:
self._playbin.send_event(event)
def get_current_tags(self):
"""
Get the currently playing media's tags.
If no tags have been found, or nothing is playing this returns an empty
dictionary. For each set of tags we collect a tags_changed event is
emitted with the keys of the changes tags. After such calls users may
call this function to get the updated values.
:rtype: {key: [values]} dict for the current media.
"""
# TODO: should this be a (deep) copy? most likely yes
# TODO: should we return None when stopped?
# TODO: support only fetching keys we care about?
return self._tags

View File

@@ -0,0 +1,14 @@
class PlaybackState:
"""
Enum of playback states.
"""
#: Constant representing the paused state.
PAUSED = "paused"
#: Constant representing the playing state.
PLAYING = "playing"
#: Constant representing the stopped state.
STOPPED = "stopped"

View File

@@ -0,0 +1,94 @@
from mopidy import listener
class AudioListener(listener.Listener):
"""
Marker interface for recipients of events sent by the audio actor.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in the core actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of audio listener events"""
listener.send(AudioListener, event, **kwargs)
def reached_end_of_stream(self):
"""
Called whenever the end of the audio stream is reached.
*MAY* be implemented by actor.
"""
pass
def stream_changed(self, uri):
"""
Called whenever the audio stream changes.
*MAY* be implemented by actor.
:param string uri: URI the stream has started playing.
"""
pass
def position_changed(self, position):
"""
Called whenever the position of the stream changes.
*MAY* be implemented by actor.
:param int position: Position in milliseconds.
"""
pass
def state_changed(self, old_state, new_state, target_state):
"""
Called after the playback state have changed.
Will be called for both immediate and async state changes in GStreamer.
Target state is used to when we should be in the target state, but
temporarily need to switch to an other state. A typical example of this
is buffering. When this happens an event with
`old=PLAYING, new=PAUSED, target=PLAYING` will be emitted. Once we have
caught up a `old=PAUSED, new=PLAYING, target=None` event will be
be generated.
Regular state changes will not have target state set as they are final
states which should be stable.
*MAY* be implemented by actor.
:param old_state: the state before the change
:type old_state: string from :class:`mopidy.core.PlaybackState` field
:param new_state: the state after the change
:type new_state: A :class:`mopidy.core.PlaybackState` field
:type new_state: string from :class:`mopidy.core.PlaybackState` field
:param target_state: the intended state
:type target_state: string from :class:`mopidy.core.PlaybackState`
field or :class:`None` if this is a final state.
"""
pass
def tags_changed(self, tags):
"""
Called whenever the current audio stream's tags change.
This event signals that some track metadata has been updated. This can
be metadata such as artists, titles, organization, or details about the
actual audio such as bit-rates, numbers of channels etc.
For the available tag keys please refer to GStreamer documentation for
tags.
*MAY* be implemented by actor.
:param tags: The tags that have just been updated.
:type tags: :class:`set` of strings
"""
pass

View File

@@ -0,0 +1,302 @@
import collections
import logging
import time
from mopidy import exceptions
from mopidy.audio import tags as tags_lib
from mopidy.audio import utils
from mopidy.internal import log
from mopidy.internal.gi import Gst, GstPbutils
# GST_ELEMENT_FACTORY_LIST:
_DECODER = 1 << 0
_AUDIO = 1 << 50
_DEMUXER = 1 << 5
_DEPAYLOADER = 1 << 8
_PARSER = 1 << 6
# GST_TYPE_AUTOPLUG_SELECT_RESULT:
_SELECT_TRY = 0
_SELECT_EXPOSE = 1
_Result = collections.namedtuple(
"Result", ("uri", "tags", "duration", "seekable", "mime", "playable")
)
logger = logging.getLogger(__name__)
def _trace(*args, **kwargs):
logger.log(log.TRACE_LOG_LEVEL, *args, **kwargs)
# TODO: replace with a scan(uri, timeout=1000, proxy_config=None)?
class Scanner:
"""
Helper to get tags and other relevant info from URIs.
:param timeout: timeout for scanning a URI in ms
:param proxy_config: dictionary containing proxy config strings.
:type event: int
"""
def __init__(self, timeout=1000, proxy_config=None):
self._timeout_ms = int(timeout)
self._proxy_config = proxy_config or {}
def scan(self, uri, timeout=None):
"""
Scan the given uri collecting relevant metadata.
:param uri: URI of the resource to scan.
:type uri: string
:param timeout: timeout for scanning a URI in ms. Defaults to the
``timeout`` value used when creating the scanner.
:type timeout: int
:return: A named tuple containing
``(uri, tags, duration, seekable, mime)``.
``tags`` is a dictionary of lists for all the tags we found.
``duration`` is the length of the URI in milliseconds, or
:class:`None` if the URI has no duration. ``seekable`` is boolean.
indicating if a seek would succeed.
"""
timeout = int(timeout or self._timeout_ms)
tags, duration, seekable, mime = None, None, None, None
pipeline, signals = _setup_pipeline(uri, self._proxy_config)
try:
_start_pipeline(pipeline)
tags, mime, have_audio, duration = _process(pipeline, timeout)
seekable = _query_seekable(pipeline)
finally:
signals.clear()
pipeline.set_state(Gst.State.NULL)
del pipeline
return _Result(uri, tags, duration, seekable, mime, have_audio)
# Turns out it's _much_ faster to just create a new pipeline for every as
# decodebins and other elements don't seem to take well to being reused.
def _setup_pipeline(uri, proxy_config=None):
src = Gst.Element.make_from_uri(Gst.URIType.SRC, uri)
if not src:
raise exceptions.ScannerError(f"GStreamer can not open: {uri}")
if proxy_config:
utils.setup_proxy(src, proxy_config)
signals = utils.Signals()
pipeline = Gst.ElementFactory.make("pipeline")
pipeline.add(src)
if _has_src_pads(src):
_setup_decodebin(src, src.get_static_pad("src"), pipeline, signals)
elif _has_dynamic_src_pad(src):
signals.connect(src, "pad-added", _setup_decodebin, pipeline, signals)
else:
raise exceptions.ScannerError("No pads found in source element.")
return pipeline, signals
def _has_src_pads(element):
pads = []
element.iterate_src_pads().foreach(pads.append)
return bool(pads)
def _has_dynamic_src_pad(element):
for template in element.get_pad_template_list():
if template.direction == Gst.PadDirection.SRC:
if template.presence == Gst.PadPresence.SOMETIMES:
return True
return False
def _setup_decodebin(element, pad, pipeline, signals):
typefind = Gst.ElementFactory.make("typefind")
decodebin = Gst.ElementFactory.make("decodebin")
for element in (typefind, decodebin):
pipeline.add(element)
element.sync_state_with_parent()
pad.link(typefind.get_static_pad("sink"))
typefind.link(decodebin)
signals.connect(typefind, "have-type", _have_type, decodebin)
signals.connect(decodebin, "pad-added", _pad_added, pipeline)
signals.connect(decodebin, "autoplug-select", _autoplug_select)
def _have_type(element, probability, caps, decodebin):
decodebin.set_property("sink-caps", caps)
struct = Gst.Structure.new_empty("have-type")
struct.set_value("caps", caps.get_structure(0))
element.get_bus().post(Gst.Message.new_application(element, struct))
def _pad_added(element, pad, pipeline):
sink = Gst.ElementFactory.make("fakesink")
sink.set_property("sync", False)
pipeline.add(sink)
sink.sync_state_with_parent()
pad.link(sink.get_static_pad("sink"))
if pad.query_caps().is_subset(Gst.Caps.from_string("audio/x-raw")):
# Probably won't happen due to autoplug-select fix, but lets play it
# safe until we've tested more.
struct = Gst.Structure.new_empty("have-audio")
element.get_bus().post(Gst.Message.new_application(element, struct))
def _autoplug_select(element, pad, caps, factory):
if factory.list_is_type(_DECODER | _AUDIO):
struct = Gst.Structure.new_empty("have-audio")
element.get_bus().post(Gst.Message.new_application(element, struct))
if not factory.list_is_type(_DEMUXER | _DEPAYLOADER | _PARSER):
return _SELECT_EXPOSE
return _SELECT_TRY
def _start_pipeline(pipeline):
result = pipeline.set_state(Gst.State.PAUSED)
if result == Gst.StateChangeReturn.NO_PREROLL:
pipeline.set_state(Gst.State.PLAYING)
def _query_duration(pipeline):
success, duration = pipeline.query_duration(Gst.Format.TIME)
if not success:
duration = None # Make sure error case preserves None.
elif duration < 0:
duration = None # Stream without duration.
else:
duration = int(duration // Gst.MSECOND)
return success, duration
def _query_seekable(pipeline):
query = Gst.Query.new_seeking(Gst.Format.TIME)
pipeline.query(query)
return query.parse_seeking()[1]
def _process(pipeline, timeout_ms):
bus = pipeline.get_bus()
tags = {}
mime = None
have_audio = False
missing_message = None
duration = None
types = (
Gst.MessageType.ELEMENT
| Gst.MessageType.APPLICATION
| Gst.MessageType.ERROR
| Gst.MessageType.EOS
| Gst.MessageType.ASYNC_DONE
| Gst.MessageType.DURATION_CHANGED
| Gst.MessageType.TAG
)
timeout = timeout_ms
start = int(time.time() * 1000)
while timeout > 0:
msg = bus.timed_pop_filtered(timeout * Gst.MSECOND, types)
if msg is None:
break
if logger.isEnabledFor(log.TRACE_LOG_LEVEL) and msg.get_structure():
debug_text = msg.get_structure().to_string()
if len(debug_text) > 77:
debug_text = debug_text[:77] + "..."
_trace("element %s: %s", msg.src.get_name(), debug_text)
if msg.type == Gst.MessageType.ELEMENT:
if GstPbutils.is_missing_plugin_message(msg):
missing_message = msg
elif msg.type == Gst.MessageType.APPLICATION:
if msg.get_structure().get_name() == "have-type":
mime = msg.get_structure().get_value("caps").get_name()
if mime and (
mime.startswith("text/") or mime == "application/xml"
):
return tags, mime, have_audio, duration
elif msg.get_structure().get_name() == "have-audio":
have_audio = True
elif msg.type == Gst.MessageType.ERROR:
error, _debug = msg.parse_error()
if missing_message and not mime:
caps = missing_message.get_structure().get_value("detail")
mime = caps.get_structure(0).get_name()
return tags, mime, have_audio, duration
raise exceptions.ScannerError(str(error))
elif msg.type == Gst.MessageType.EOS:
return tags, mime, have_audio, duration
elif msg.type == Gst.MessageType.ASYNC_DONE:
success, duration = _query_duration(pipeline)
if tags and success:
return tags, mime, have_audio, duration
# Don't try workaround for non-seekable sources such as mmssrc:
if not _query_seekable(pipeline):
return tags, mime, have_audio, duration
# Workaround for upstream bug which causes tags/duration to arrive
# after pre-roll. We get around this by starting to play the track
# and then waiting for a duration change.
# https://bugzilla.gnome.org/show_bug.cgi?id=763553
logger.debug("Using workaround for duration missing before play.")
result = pipeline.set_state(Gst.State.PLAYING)
if result == Gst.StateChangeReturn.FAILURE:
return tags, mime, have_audio, duration
elif msg.type == Gst.MessageType.DURATION_CHANGED and tags:
# VBR formats sometimes seem to not have a duration by the time we
# go back to paused. So just try to get it right away.
success, duration = _query_duration(pipeline)
pipeline.set_state(Gst.State.PAUSED)
if success:
return tags, mime, have_audio, duration
elif msg.type == Gst.MessageType.TAG:
taglist = msg.parse_tag()
# Note that this will only keep the last tag.
tags.update(tags_lib.convert_taglist(taglist))
timeout = timeout_ms - (int(time.time() * 1000) - start)
raise exceptions.ScannerError(f"Timeout after {timeout_ms:d}ms")
if __name__ == "__main__":
import os
import sys
from mopidy.internal import path
logging.basicConfig(
format="%(asctime)-15s %(levelname)s %(message)s",
level=log.TRACE_LOG_LEVEL,
)
scanner = Scanner(5000)
for uri in sys.argv[1:]:
if not Gst.uri_is_valid(uri):
uri = path.path_to_uri(os.path.abspath(uri))
try:
result = scanner.scan(uri)
for key in ("uri", "mime", "duration", "playable", "seekable"):
value = getattr(result, key)
print(f"{key:<20} {value}")
print("tags")
for tag, value in result.tags.items():
line = f"{tag:<20} {value}"
if len(line) > 77:
line = line[:77] + "..."
print(line)
except exceptions.ScannerError as error:
print(f"{uri}: {error}")

View File

@@ -0,0 +1,161 @@
import collections
import datetime
import logging
import numbers
from mopidy.internal import log
from mopidy.internal.gi import GLib, Gst
from mopidy.models import Album, Artist, Track
logger = logging.getLogger(__name__)
def convert_taglist(taglist):
"""Convert a :class:`Gst.TagList` to plain Python types.
Knows how to convert:
- Dates
- Buffers
- Numbers
- Strings
- Booleans
Unknown types will be ignored and trace logged. Tag keys are all strings
defined as part GStreamer under GstTagList_.
.. _GstTagList: https://developer.gnome.org/gstreamer/stable/\
gstreamer-GstTagList.html
:param taglist: A GStreamer taglist to be converted.
:type taglist: :class:`Gst.TagList`
:rtype: dictionary of tag keys with a list of values.
"""
result = collections.defaultdict(list)
for n in range(taglist.n_tags()):
tag = taglist.nth_tag_name(n)
for i in range(taglist.get_tag_size(tag)):
value = taglist.get_value_index(tag, i)
if isinstance(value, GLib.Date):
try:
date = datetime.date(
value.get_year(), value.get_month(), value.get_day()
)
result[tag].append(date.isoformat())
except ValueError:
logger.debug(
"Ignoring dodgy date value: %d-%d-%d",
value.get_year(),
value.get_month(),
value.get_day(),
)
elif isinstance(value, Gst.DateTime):
result[tag].append(value.to_iso8601_string())
elif isinstance(value, bytes):
result[tag].append(value.decode(errors="replace"))
elif isinstance(value, (str, bool, numbers.Number)):
result[tag].append(value)
elif isinstance(value, Gst.Sample):
data = _extract_sample_data(value)
if data:
result[tag].append(data)
else:
logger.log(
log.TRACE_LOG_LEVEL,
"Ignoring unknown tag data: %r = %r",
tag,
value,
)
# TODO: dict(result) to not leak the defaultdict, or just use setdefault?
return result
def _extract_sample_data(sample):
buf = sample.get_buffer()
if not buf:
return None
return buf.extract_dup(0, buf.get_size())
# TODO: split based on "stream" and "track" based conversion? i.e. handle data
# from radios in it's own helper instead?
def convert_tags_to_track(tags):
"""Convert our normalized tags to a track.
:param tags: dictionary of tag keys with a list of values
:type tags: :class:`dict`
:rtype: :class:`mopidy.models.Track`
"""
album_kwargs = {}
track_kwargs = {}
track_kwargs["composers"] = _artists(tags, Gst.TAG_COMPOSER)
track_kwargs["performers"] = _artists(tags, Gst.TAG_PERFORMER)
track_kwargs["artists"] = _artists(
tags, Gst.TAG_ARTIST, "musicbrainz-artistid", "musicbrainz-sortname"
)
album_kwargs["artists"] = _artists(
tags, Gst.TAG_ALBUM_ARTIST, "musicbrainz-albumartistid"
)
track_kwargs["genre"] = "; ".join(tags.get(Gst.TAG_GENRE, []))
track_kwargs["name"] = "; ".join(tags.get(Gst.TAG_TITLE, []))
if not track_kwargs["name"]:
track_kwargs["name"] = "; ".join(tags.get(Gst.TAG_ORGANIZATION, []))
track_kwargs["comment"] = "; ".join(tags.get("comment", []))
if not track_kwargs["comment"]:
track_kwargs["comment"] = "; ".join(tags.get(Gst.TAG_LOCATION, []))
if not track_kwargs["comment"]:
track_kwargs["comment"] = "; ".join(tags.get(Gst.TAG_COPYRIGHT, []))
track_kwargs["track_no"] = tags.get(Gst.TAG_TRACK_NUMBER, [None])[0]
track_kwargs["disc_no"] = tags.get(Gst.TAG_ALBUM_VOLUME_NUMBER, [None])[0]
track_kwargs["bitrate"] = tags.get(Gst.TAG_BITRATE, [None])[0]
track_kwargs["musicbrainz_id"] = tags.get("musicbrainz-trackid", [None])[0]
album_kwargs["name"] = tags.get(Gst.TAG_ALBUM, [None])[0]
album_kwargs["num_tracks"] = tags.get(Gst.TAG_TRACK_COUNT, [None])[0]
album_kwargs["num_discs"] = tags.get(Gst.TAG_ALBUM_VOLUME_COUNT, [None])[0]
album_kwargs["musicbrainz_id"] = tags.get("musicbrainz-albumid", [None])[0]
album_kwargs["date"] = tags.get(Gst.TAG_DATE, [None])[0]
if not album_kwargs["date"]:
datetime = tags.get(Gst.TAG_DATE_TIME, [None])[0]
if datetime is not None:
album_kwargs["date"] = datetime.split("T")[0]
track_kwargs["date"] = album_kwargs["date"]
# Clear out any empty values we found
track_kwargs = {k: v for k, v in track_kwargs.items() if v}
album_kwargs = {k: v for k, v in album_kwargs.items() if v}
# Only bother with album if we have a name to show.
if album_kwargs.get("name"):
track_kwargs["album"] = Album(**album_kwargs)
return Track(**track_kwargs)
def _artists(tags, artist_name, artist_id=None, artist_sortname=None):
# Name missing, don't set artist
if not tags.get(artist_name):
return None
# One artist name and either id or sortname, include all available fields
if len(tags[artist_name]) == 1 and (
artist_id in tags or artist_sortname in tags
):
attrs = {"name": tags[artist_name][0]}
if artist_id in tags:
attrs["musicbrainz_id"] = tags[artist_id][0]
if artist_sortname in tags:
attrs["sortname"] = tags[artist_sortname][0]
return [Artist(**attrs)]
# Multiple artist, provide artists with name only to avoid ambiguity.
return [Artist(name=name) for name in tags[artist_name]]

View File

@@ -0,0 +1,100 @@
from mopidy import httpclient
from mopidy.internal.gi import Gst
def calculate_duration(num_samples, sample_rate):
"""Determine duration of samples using GStreamer helper for precise
math."""
return Gst.util_uint64_scale(num_samples, Gst.SECOND, sample_rate)
def create_buffer(data, timestamp=None, duration=None):
"""Create a new GStreamer buffer based on provided data.
Mainly intended to keep gst imports out of non-audio modules.
.. versionchanged:: 2.0
``capabilites`` argument was removed.
"""
if not data:
raise ValueError("Cannot create buffer without data")
buffer_ = Gst.Buffer.new_wrapped(data)
if timestamp is not None:
buffer_.pts = timestamp
if duration is not None:
buffer_.duration = duration
return buffer_
def millisecond_to_clocktime(value):
"""Convert a millisecond time to internal GStreamer time."""
return value * Gst.MSECOND
def clocktime_to_millisecond(value):
"""Convert an internal GStreamer time to millisecond time."""
return value // Gst.MSECOND
def supported_uri_schemes(uri_schemes):
"""Determine which URIs we can actually support from provided whitelist.
:param uri_schemes: list/set of URIs to check support for.
:type uri_schemes: list or set or URI schemes as strings.
:rtype: set of URI schemes we can support via this GStreamer install.
"""
supported_schemes = set()
registry = Gst.Registry.get()
for factory in registry.get_feature_list(Gst.ElementFactory):
for uri in factory.get_uri_protocols():
if uri in uri_schemes:
supported_schemes.add(uri)
return supported_schemes
def setup_proxy(element, config):
"""Configure a GStreamer element with proxy settings.
:param element: element to setup proxy in.
:type element: :class:`Gst.GstElement`
:param config: proxy settings to use.
:type config: :class:`dict`
"""
if not hasattr(element.props, "proxy") or not config.get("hostname"):
return
element.set_property("proxy", httpclient.format_proxy(config, auth=False))
element.set_property("proxy-id", config.get("username"))
element.set_property("proxy-pw", config.get("password"))
class Signals:
"""Helper for tracking gobject signal registrations"""
def __init__(self):
self._ids = {}
def connect(self, element, event, func, *args):
"""Connect a function + args to signal event on an element.
Each event may only be handled by one callback in this implementation.
"""
assert (element, event) not in self._ids
self._ids[(element, event)] = element.connect(event, func, *args)
def disconnect(self, element, event):
"""Disconnect whatever handler we have for an element+event pair.
Does nothing it the handler has already been removed.
"""
signal_id = self._ids.pop((element, event), None)
if signal_id is not None:
element.disconnect(signal_id)
def clear(self):
"""Clear all registered signal handlers."""
for element, event in list(self._ids):
element.disconnect(self._ids.pop((element, event)))

View File

@@ -0,0 +1,445 @@
import logging
import pykka
from mopidy import listener
logger = logging.getLogger(__name__)
class Backend:
"""Backend API
If the backend has problems during initialization it should raise
:exc:`mopidy.exceptions.BackendError` with a descriptive error message.
This will make Mopidy print the error message and exit so that the user can
fix the issue.
:param config: the entire Mopidy configuration
:type config: dict
:param audio: actor proxy for the audio subsystem
:type audio: :class:`pykka.ActorProxy` for :class:`mopidy.audio.Audio`
"""
#: Actor proxy to an instance of :class:`mopidy.audio.Audio`.
#:
#: Should be passed to the backend constructor as the kwarg ``audio``,
#: which will then set this field.
audio = None
#: The library provider. An instance of
#: :class:`~mopidy.backend.LibraryProvider`, or :class:`None` if
#: the backend doesn't provide a library.
library = None
#: The playback provider. An instance of
#: :class:`~mopidy.backend.PlaybackProvider`, or :class:`None` if
#: the backend doesn't provide playback.
playback = None
#: The playlists provider. An instance of
#: :class:`~mopidy.backend.PlaylistsProvider`, or class:`None` if
#: the backend doesn't provide playlists.
playlists = None
#: List of URI schemes this backend can handle.
uri_schemes = []
# Because the providers is marked as pykka.traversable(), we can't get()
# them from another actor, and need helper methods to check if the
# providers are set or None.
def has_library(self):
return self.library is not None
def has_library_browse(self):
return self.has_library() and self.library.root_directory is not None
def has_playback(self):
return self.playback is not None
def has_playlists(self):
return self.playlists is not None
def ping(self):
"""Called to check if the actor is still alive."""
return True
@pykka.traversable
class LibraryProvider:
"""
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend`
"""
root_directory = None
"""
:class:`mopidy.models.Ref.directory` instance with a URI and name set
representing the root of this library's browse tree. URIs must
use one of the schemes supported by the backend, and name should
be set to a human friendly value.
*MUST be set by any class that implements* :meth:`LibraryProvider.browse`.
"""
def __init__(self, backend):
self.backend = backend
def browse(self, uri):
"""
See :meth:`mopidy.core.LibraryController.browse`.
If you implement this method, make sure to also set
:attr:`root_directory`.
*MAY be implemented by subclass.*
"""
return []
def get_distinct(self, field, query=None):
"""
See :meth:`mopidy.core.LibraryController.get_distinct`.
*MAY be implemented by subclass.*
Default implementation will simply return an empty set.
Note that backends should always return an empty set for unexpected
field types.
"""
return set()
def get_images(self, uris):
"""
See :meth:`mopidy.core.LibraryController.get_images`.
*MAY be implemented by subclass.*
Default implementation will simply return an empty dictionary.
"""
return {}
def lookup(self, uri):
"""
See :meth:`mopidy.core.LibraryController.lookup`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def refresh(self, uri=None):
"""
See :meth:`mopidy.core.LibraryController.refresh`.
*MAY be implemented by subclass.*
"""
pass
def search(self, query=None, uris=None, exact=False):
"""
See :meth:`mopidy.core.LibraryController.search`.
*MAY be implemented by subclass.*
.. versionadded:: 1.0
The ``exact`` param which replaces the old ``find_exact``.
"""
pass
@pykka.traversable
class PlaybackProvider:
"""
:param audio: the audio actor
:type audio: actor proxy to an instance of :class:`mopidy.audio.Audio`
:param backend: the backend
:type backend: :class:`mopidy.backend.Backend`
"""
def __init__(self, audio, backend):
self.audio = audio
self.backend = backend
def pause(self):
"""
Pause playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.pause_playback().get()
def play(self):
"""
Start playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def prepare_change(self):
"""
Indicate that an URI change is about to happen.
*MAY be reimplemented by subclass.*
It is extremely unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
"""
self.audio.prepare_change().get()
def translate_uri(self, uri):
"""
Convert custom URI scheme to real playable URI.
*MAY be reimplemented by subclass.*
This is very likely the *only* thing you need to override as a backend
author. Typically this is where you convert any Mopidy specific URI
to a real URI and then return it. If you can't convert the URI just
return :class:`None`.
:param uri: the URI to translate
:type uri: string
:rtype: string or :class:`None` if the URI could not be translated
"""
return uri
def is_live(self, uri):
"""
Decide if the URI should be threated as a live stream or not.
*MAY be reimplemented by subclass.*
Playing a source as a live stream disables buffering, which reduces
latency before playback starts, and discards data when paused.
:param uri: the URI
:type uri: string
:rtype: bool
"""
return False
def change_track(self, track):
"""
Swith to provided track.
*MAY be reimplemented by subclass.*
It is unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
The default implementation will call :meth:`translate_uri` which
is what you want to implement.
:param track: the track to play
:type track: :class:`mopidy.models.Track`
:rtype: :class:`True` if successful, else :class:`False`
"""
uri = self.translate_uri(track.uri)
if uri != track.uri:
logger.debug("Backend translated URI from %s to %s", track.uri, uri)
if not uri:
return False
self.audio.set_uri(uri, live_stream=self.is_live(uri)).get()
return True
def resume(self):
"""
Resume playback at the same time position playback was paused.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def seek(self, time_position):
"""
Seek to a given time position.
*MAY be reimplemented by subclass.*
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.set_position(time_position).get()
def stop(self):
"""
Stop playback.
*MAY be reimplemented by subclass.*
Should not be used for tracking if tracks have been played or when we
are done playing them.
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.stop_playback().get()
def get_time_position(self):
"""
Get the current time position in milliseconds.
*MAY be reimplemented by subclass.*
:rtype: int
"""
return self.audio.get_position().get()
@pykka.traversable
class PlaylistsProvider:
"""
A playlist provider exposes a collection of playlists, methods to
create/change/delete playlists in this collection, and lookup of any
playlist the backend knows about.
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend` instance
"""
def __init__(self, backend):
self.backend = backend
def as_list(self):
"""
Get a list of the currently available playlists.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlists. In other words, no information about the playlists' content
is given.
:rtype: list of :class:`mopidy.models.Ref`
.. versionadded:: 1.0
"""
raise NotImplementedError
def get_items(self, uri):
"""
Get the items in a playlist specified by ``uri``.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlist's items.
If a playlist with the given ``uri`` doesn't exist, it returns
:class:`None`.
:rtype: list of :class:`mopidy.models.Ref`, or :class:`None`
.. versionadded:: 1.0
"""
raise NotImplementedError
def create(self, name):
"""
Create a new empty playlist with the given name.
Returns a new playlist with the given name and an URI, or :class:`None`
on failure.
*MUST be implemented by subclass.*
:param name: name of the new playlist
:type name: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
def delete(self, uri):
"""
Delete playlist identified by the URI.
Returns :class:`True` if deleted, :class:`False` otherwise.
*MUST be implemented by subclass.*
:param uri: URI of the playlist to delete
:type uri: string
:rtype: :class:`bool`
.. versionchanged:: 2.2
Return type defined.
"""
raise NotImplementedError
def lookup(self, uri):
"""
Lookup playlist with given URI in both the set of playlists and in any
other playlist source.
Returns the playlists or :class:`None` if not found.
*MUST be implemented by subclass.*
:param uri: playlist URI
:type uri: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
def refresh(self):
"""
Refresh the playlists in :attr:`playlists`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def save(self, playlist):
"""
Save the given playlist.
The playlist must have an ``uri`` attribute set. To create a new
playlist with an URI, use :meth:`create`.
Returns the saved playlist or :class:`None` on failure.
*MUST be implemented by subclass.*
:param playlist: the playlist to save
:type playlist: :class:`mopidy.models.Playlist`
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
class BackendListener(listener.Listener):
"""
Marker interface for recipients of events sent by the backend actors.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in a backend actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
Normally, only the Core actor should mix in this class.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of backend listener events"""
listener.send(BackendListener, event, **kwargs)
def playlists_loaded(self):
"""
Called when playlists are loaded or refreshed.
*MAY* be implemented by actor.
"""
pass

View File

@@ -0,0 +1,489 @@
import argparse
import collections
import contextlib
import logging
import os
import pathlib
import signal
import sys
import pykka
from mopidy import config as config_lib
from mopidy import exceptions
from mopidy.audio import Audio
from mopidy.core import Core
from mopidy.internal import deps, process, timer, versioning
from mopidy.internal.gi import GLib
logger = logging.getLogger(__name__)
_default_config = [
(pathlib.Path(base) / "mopidy" / "mopidy.conf").resolve()
for base in GLib.get_system_config_dirs() + [GLib.get_user_config_dir()]
]
DEFAULT_CONFIG = ":".join(map(str, _default_config))
def config_files_type(value):
return value.split(":")
def config_override_type(value):
try:
section, remainder = value.split("/", 1)
key, value = remainder.split("=", 1)
return (section.strip(), key.strip(), value.strip())
except ValueError:
raise argparse.ArgumentTypeError(
f"{value} must have the format section/key=value"
)
class _ParserError(Exception):
def __init__(self, message):
self.message = message
class _HelpError(Exception):
pass
class _ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise _ParserError(message)
class _HelpAction(argparse.Action):
def __init__(self, option_strings, dest=None, help=None):
super().__init__(
option_strings=option_strings,
dest=dest or argparse.SUPPRESS,
default=argparse.SUPPRESS,
nargs=0,
help=help,
)
def __call__(self, parser, namespace, values, option_string=None):
raise _HelpError()
class Command:
"""Command parser and runner for building trees of commands.
This class provides a wraper around :class:`argparse.ArgumentParser`
for handling this type of command line application in a better way than
argprases own sub-parser handling.
"""
help = None
#: Help text to display in help output.
def __init__(self):
self._children = collections.OrderedDict()
self._arguments = []
self._overrides = {}
def _build(self):
actions = []
parser = _ArgumentParser(add_help=False)
parser.register("action", "help", _HelpAction)
for args, kwargs in self._arguments:
actions.append(parser.add_argument(*args, **kwargs))
parser.add_argument(
"_args", nargs=argparse.REMAINDER, help=argparse.SUPPRESS
)
return parser, actions
def add_child(self, name, command):
"""Add a child parser to consider using.
:param name: name to use for the sub-command that is being added.
:type name: string
"""
self._children[name] = command
def add_argument(self, *args, **kwargs):
"""Add an argument to the parser.
This method takes all the same arguments as the
:class:`argparse.ArgumentParser` version of this method.
"""
self._arguments.append((args, kwargs))
def set(self, **kwargs):
"""Override a value in the finaly result of parsing."""
self._overrides.update(kwargs)
def exit(self, status_code=0, message=None, usage=None):
"""Optionally print a message and exit."""
print("\n\n".join(m for m in (usage, message) if m))
sys.exit(status_code)
def format_usage(self, prog=None):
"""Format usage for current parser."""
actions = self._build()[1]
prog = prog or os.path.basename(sys.argv[0])
return self._usage(actions, prog) + "\n"
def _usage(self, actions, prog):
formatter = argparse.HelpFormatter(prog)
formatter.add_usage(None, actions, [])
return formatter.format_help().strip()
def format_help(self, prog=None):
"""Format help for current parser and children."""
actions = self._build()[1]
prog = prog or os.path.basename(sys.argv[0])
formatter = argparse.HelpFormatter(prog)
formatter.add_usage(None, actions, [])
if self.help:
formatter.add_text(self.help)
if actions:
formatter.add_text("OPTIONS:")
formatter.start_section(None)
formatter.add_arguments(actions)
formatter.end_section()
subhelp = []
for name, child in self._children.items():
child._subhelp(name, subhelp)
if subhelp:
formatter.add_text("COMMANDS:")
subhelp.insert(0, "")
return formatter.format_help() + "\n".join(subhelp)
def _subhelp(self, name, result):
actions = self._build()[1]
if self.help or actions:
formatter = argparse.HelpFormatter(name)
formatter.add_usage(None, actions, [], "")
formatter.start_section(None)
formatter.add_text(self.help)
formatter.start_section(None)
formatter.add_arguments(actions)
formatter.end_section()
formatter.end_section()
result.append(formatter.format_help())
for childname, child in self._children.items():
child._subhelp(" ".join((name, childname)), result)
def parse(self, args, prog=None):
"""Parse command line arguments.
Will recursively parse commands until a final parser is found or an
error occurs. In the case of errors we will print a message and exit.
Otherwise, any overrides are applied and the current parser stored
in the command attribute of the return value.
:param args: list of arguments to parse
:type args: list of strings
:param prog: name to use for program
:type prog: string
:rtype: :class:`argparse.Namespace`
"""
prog = prog or os.path.basename(sys.argv[0])
try:
return self._parse(
args, argparse.Namespace(), self._overrides.copy(), prog
)
except _HelpError:
self.exit(0, self.format_help(prog))
def _parse(self, args, namespace, overrides, prog):
overrides.update(self._overrides)
parser, actions = self._build()
try:
result = parser.parse_args(args, namespace)
except _ParserError as exc:
self.exit(1, str(exc), self._usage(actions, prog))
if not result._args:
for attr, value in overrides.items():
setattr(result, attr, value)
delattr(result, "_args")
result.command = self
return result
child = result._args.pop(0)
if child not in self._children:
usage = self._usage(actions, prog)
self.exit(1, f"unrecognized command: {child}", usage)
return self._children[child]._parse(
result._args, result, overrides, " ".join([prog, child])
)
def run(self, *args, **kwargs):
"""Run the command.
Must be implemented by sub-classes that are not simply an intermediate
in the command namespace.
"""
raise NotImplementedError
@contextlib.contextmanager
def _actor_error_handling(name):
try:
yield
except exceptions.BackendError as exc:
logger.error("Backend (%s) initialization error: %s", name, exc)
except exceptions.FrontendError as exc:
logger.error("Frontend (%s) initialization error: %s", name, exc)
except exceptions.MixerError as exc:
logger.error("Mixer (%s) initialization error: %s", name, exc)
except Exception:
logger.exception("Got un-handled exception from %s", name)
# TODO: move out of this utility class
class RootCommand(Command):
def __init__(self):
super().__init__()
self.set(base_verbosity_level=0)
self.add_argument(
"-h", "--help", action="help", help="Show this message and exit"
)
self.add_argument(
"--version",
action="version",
version=f"Mopidy {versioning.get_version()}",
)
self.add_argument(
"-q",
"--quiet",
action="store_const",
const=-1,
dest="verbosity_level",
help="less output (warning level)",
)
self.add_argument(
"-v",
"--verbose",
action="count",
dest="verbosity_level",
default=0,
help="more output (repeat up to 4 times for even more)",
)
self.add_argument(
"--config",
action="store",
dest="config_files",
type=config_files_type,
default=DEFAULT_CONFIG,
metavar="FILES",
help="config files to use, colon seperated, later files override",
)
self.add_argument(
"-o",
"--option",
action="append",
dest="config_overrides",
type=config_override_type,
metavar="OPTIONS",
help="`section/key=value` values to override config options",
)
def run(self, args, config):
def on_sigterm(loop):
logger.info("GLib mainloop got SIGTERM. Exiting...")
loop.quit()
loop = GLib.MainLoop()
GLib.unix_signal_add(
GLib.PRIORITY_DEFAULT, signal.SIGTERM, on_sigterm, loop
)
mixer_class = self.get_mixer_class(config, args.registry["mixer"])
backend_classes = args.registry["backend"]
frontend_classes = args.registry["frontend"]
core = None
exit_status_code = 0
try:
mixer = None
if mixer_class is not None:
mixer = self.start_mixer(config, mixer_class)
if mixer:
self.configure_mixer(config, mixer)
audio = self.start_audio(config, mixer)
backends = self.start_backends(config, backend_classes, audio)
core = self.start_core(config, mixer, backends, audio)
self.start_frontends(config, frontend_classes, core)
logger.info("Starting GLib mainloop")
loop.run()
except (
exceptions.BackendError,
exceptions.FrontendError,
exceptions.MixerError,
):
logger.info("Initialization error. Exiting...")
exit_status_code = 1
except KeyboardInterrupt:
logger.info("Interrupted. Exiting...")
except Exception:
logger.exception("Uncaught exception")
finally:
loop.quit()
self.stop_frontends(frontend_classes)
self.stop_core(core)
self.stop_backends(backend_classes)
self.stop_audio()
if mixer_class is not None:
self.stop_mixer(mixer_class)
process.stop_remaining_actors()
return exit_status_code
def get_mixer_class(self, config, mixer_classes):
logger.debug(
"Available Mopidy mixers: %s",
", ".join(m.__name__ for m in mixer_classes) or "none",
)
if config["audio"]["mixer"] == "none":
logger.debug("Mixer disabled")
return None
selected_mixers = [
m for m in mixer_classes if m.name == config["audio"]["mixer"]
]
if len(selected_mixers) != 1:
logger.error(
'Did not find unique mixer "%s". Alternatives are: %s',
config["audio"]["mixer"],
", ".join([m.name for m in mixer_classes]) + ", none" or "none",
)
process.exit_process()
return selected_mixers[0]
def start_mixer(self, config, mixer_class):
logger.info("Starting Mopidy mixer: %s", mixer_class.__name__)
with _actor_error_handling(mixer_class.__name__):
mixer = mixer_class.start(config=config).proxy()
try:
mixer.ping().get()
return mixer
except pykka.ActorDeadError as exc:
logger.error("Actor died: %s", exc)
return None
def configure_mixer(self, config, mixer):
volume = config["audio"]["mixer_volume"]
if volume is not None:
mixer.set_volume(volume)
logger.info("Mixer volume set to %d", volume)
else:
logger.debug("Mixer volume left unchanged")
def start_audio(self, config, mixer):
logger.info("Starting Mopidy audio")
return Audio.start(config=config, mixer=mixer).proxy()
def start_backends(self, config, backend_classes, audio):
logger.info(
"Starting Mopidy backends: %s",
", ".join(b.__name__ for b in backend_classes) or "none",
)
backends = []
for backend_class in backend_classes:
with _actor_error_handling(backend_class.__name__):
with timer.time_logger(backend_class.__name__):
backend = backend_class.start(
config=config, audio=audio
).proxy()
backends.append(backend)
# Block until all on_starts have finished, letting them run in parallel
for backend in backends[:]:
try:
backend.ping().get()
except pykka.ActorDeadError as exc:
backends.remove(backend)
logger.error("Actor died: %s", exc)
return backends
def start_core(self, config, mixer, backends, audio):
logger.info("Starting Mopidy core")
core = Core.start(
config=config, mixer=mixer, backends=backends, audio=audio
).proxy()
core.setup().get()
return core
def start_frontends(self, config, frontend_classes, core):
logger.info(
"Starting Mopidy frontends: %s",
", ".join(f.__name__ for f in frontend_classes) or "none",
)
for frontend_class in frontend_classes:
with _actor_error_handling(frontend_class.__name__):
with timer.time_logger(frontend_class.__name__):
frontend_class.start(config=config, core=core)
def stop_frontends(self, frontend_classes):
logger.info("Stopping Mopidy frontends")
for frontend_class in frontend_classes:
process.stop_actors_by_class(frontend_class)
def stop_core(self, core):
logger.info("Stopping Mopidy core")
if core:
core.teardown().get()
process.stop_actors_by_class(Core)
def stop_backends(self, backend_classes):
logger.info("Stopping Mopidy backends")
for backend_class in backend_classes:
process.stop_actors_by_class(backend_class)
def stop_audio(self):
logger.info("Stopping Mopidy audio")
process.stop_actors_by_class(Audio)
def stop_mixer(self, mixer_class):
logger.info("Stopping Mopidy mixer")
process.stop_actors_by_class(mixer_class)
class ConfigCommand(Command):
help = "Show currently active configuration."
def __init__(self):
super().__init__()
self.set(base_verbosity_level=-1)
def run(self, config, errors, schemas):
data = config_lib.format(config, schemas, errors)
# Throw away all bytes that are not valid UTF-8 before printing
data = data.encode(errors="surrogateescape").decode(errors="replace")
print(data)
return 0
class DepsCommand(Command):
help = "Show dependencies and debug information."
def __init__(self):
super().__init__()
self.set(base_verbosity_level=-1)
def run(self):
print(deps.format_dependency_list())
return 0

View File

@@ -0,0 +1,321 @@
import configparser
import itertools
import logging
import os
import pathlib
import re
from collections.abc import Mapping
from mopidy.config import keyring
from mopidy.config.schemas import ConfigSchema, MapConfigSchema
from mopidy.config.types import (
Boolean,
ConfigValue,
Deprecated,
DeprecatedValue,
Hostname,
Integer,
List,
LogColor,
LogLevel,
Path,
Port,
Secret,
String,
)
from mopidy.internal import path, versioning
__all__ = [
# TODO List everything that is reexported, not just the unused parts.
"ConfigValue",
"List",
]
logger = logging.getLogger(__name__)
_core_schema = ConfigSchema("core")
_core_schema["cache_dir"] = Path()
_core_schema["config_dir"] = Path()
_core_schema["data_dir"] = Path()
# MPD supports at most 10k tracks, some clients segfault when this is exceeded.
_core_schema["max_tracklist_length"] = Integer(minimum=1)
_core_schema["restore_state"] = Boolean(optional=True)
_logging_schema = ConfigSchema("logging")
_logging_schema["verbosity"] = Integer(minimum=-1, maximum=4)
_logging_schema["format"] = String()
_logging_schema["color"] = Boolean()
_logging_schema["console_format"] = Deprecated()
_logging_schema["debug_format"] = Deprecated()
_logging_schema["debug_file"] = Deprecated()
_logging_schema["config_file"] = Path(optional=True)
_loglevels_schema = MapConfigSchema("loglevels", LogLevel())
_logcolors_schema = MapConfigSchema("logcolors", LogColor())
_audio_schema = ConfigSchema("audio")
_audio_schema["mixer"] = String()
_audio_schema["mixer_track"] = Deprecated()
_audio_schema["mixer_volume"] = Integer(optional=True, minimum=0, maximum=100)
_audio_schema["output"] = String()
_audio_schema["visualizer"] = Deprecated()
_audio_schema["buffer_time"] = Integer(optional=True, minimum=1)
_proxy_schema = ConfigSchema("proxy")
_proxy_schema["scheme"] = String(
optional=True, choices=["http", "https", "socks4", "socks5"]
)
_proxy_schema["hostname"] = Hostname(optional=True)
_proxy_schema["port"] = Port(optional=True)
_proxy_schema["username"] = String(optional=True)
_proxy_schema["password"] = Secret(optional=True)
# NOTE: if multiple outputs ever comes something like LogLevelConfigSchema
# _outputs_schema = config.AudioOutputConfigSchema()
_schemas = [
_core_schema,
_logging_schema,
_loglevels_schema,
_logcolors_schema,
_audio_schema,
_proxy_schema,
]
_INITIAL_HELP = """
# For further information about options in this file see:
# https://docs.mopidy.com/
#
# The initial commented out values reflect the defaults as of:
# {versions}
#
# Available options and defaults might have changed since then,
# run `mopidy config` to see the current effective config and
# `mopidy --version` to check the current version.
"""
def read(config_file):
"""Helper to load config defaults in same way across core and extensions"""
return pathlib.Path(config_file).read_text(errors="surrogateescape")
def load(files, ext_schemas, ext_defaults, overrides):
config_dir = pathlib.Path(__file__).parent
defaults = [read(config_dir / "default.conf")]
defaults.extend(ext_defaults)
raw_config = _load(files, defaults, keyring.fetch() + (overrides or []))
schemas = _schemas[:]
schemas.extend(ext_schemas)
return _validate(raw_config, schemas)
def format(config, ext_schemas, comments=None, display=True):
schemas = _schemas[:]
schemas.extend(ext_schemas)
return _format(config, comments or {}, schemas, display, False)
def format_initial(extensions_data):
config_dir = pathlib.Path(__file__).parent
defaults = [read(config_dir / "default.conf")]
defaults.extend(d.extension.get_default_config() for d in extensions_data)
raw_config = _load([], defaults, [])
schemas = _schemas[:]
schemas.extend(d.extension.get_config_schema() for d in extensions_data)
config, errors = _validate(raw_config, schemas)
versions = [f"Mopidy {versioning.get_version()}"]
extensions_data = sorted(
extensions_data, key=lambda d: d.extension.dist_name
)
for data in extensions_data:
versions.append(f"{data.extension.dist_name} {data.extension.version}")
header = _INITIAL_HELP.strip().format(versions="\n# ".join(versions))
formatted_config = _format(
config=config, comments={}, schemas=schemas, display=False, disable=True
)
return header + "\n\n" + formatted_config
def _load(files, defaults, overrides):
parser = configparser.RawConfigParser()
# TODO: simply return path to config file for defaults so we can load it
# all in the same way?
logger.info("Loading config from builtin defaults")
for default in defaults:
if isinstance(default, bytes):
default = default.decode()
parser.read_string(default)
# Load config from a series of config files
for f in files:
f = path.expand_path(f)
if f.is_dir():
for g in f.iterdir():
if g.is_file() and g.suffix == ".conf":
_load_file(parser, g.resolve())
else:
_load_file(parser, f.resolve())
raw_config = {}
for section in parser.sections():
raw_config[section] = dict(parser.items(section))
logger.info("Loading config from command line options")
for section, key, value in overrides:
raw_config.setdefault(section, {})[key] = value
return raw_config
def _load_file(parser, file_path):
if not file_path.exists():
logger.debug(
f"Loading config from {file_path.as_uri()} failed; "
f"it does not exist"
)
return
if not os.access(str(file_path), os.R_OK):
logger.warning(
f"Loading config from file_path.as_uri() failed; "
f"read permission missing"
)
return
try:
logger.info(f"Loading config from {file_path.as_uri()}")
with file_path.open("r") as fh:
parser.read_file(fh)
except configparser.MissingSectionHeaderError:
logger.warning(
f"Loading config from {file_path.as_uri()} failed; "
f"it does not have a config section"
)
except configparser.ParsingError as e:
linenos = ", ".join(str(lineno) for lineno, line in e.errors)
logger.warning(
f"Config file {file_path.as_uri()} has errors; "
f"line {linenos} has been ignored"
)
except OSError:
# TODO: if this is the initial load of logging config we might not
# have a logger at this point, we might want to handle this better.
logger.debug(f"Config file {file_path.as_uri()} not found; skipping")
def _validate(raw_config, schemas):
# Get validated config
config = {}
errors = {}
sections = set(raw_config)
for schema in schemas:
sections.discard(schema.name)
values = raw_config.get(schema.name, {})
result, error = schema.deserialize(values)
if error:
errors[schema.name] = error
if result:
config[schema.name] = result
for section in sections:
logger.debug(f"Ignoring unknown config section: {section}")
return config, errors
def _format(config, comments, schemas, display, disable):
output = []
for schema in schemas:
serialized = schema.serialize(
config.get(schema.name, {}), display=display
)
if not serialized:
continue
output.append(f"[{schema.name}]")
for key, value in serialized.items():
if isinstance(value, DeprecatedValue):
continue
comment = comments.get(schema.name, {}).get(key, "")
output.append(f"{key} =")
if value is not None:
output[-1] += " " + value
if comment:
output[-1] += " ; " + comment.capitalize()
if disable:
output[-1] = re.sub(r"^", "#", output[-1], flags=re.M)
output.append("")
return "\n".join(output).strip()
def _preprocess(config_string):
"""Convert a raw config into a form that preserves comments etc."""
results = ["[__COMMENTS__]"]
counter = itertools.count(0)
section_re = re.compile(r"^(\[[^\]]+\])\s*(.+)$")
blank_line_re = re.compile(r"^\s*$")
comment_re = re.compile(r"^(#|;)")
inline_comment_re = re.compile(r" ;")
def newlines(match):
return f"__BLANK{next(counter):d}__ ="
def comments(match):
if match.group(1) == "#":
return f"__HASH{next(counter):d}__ ="
elif match.group(1) == ";":
return f"__SEMICOLON{next(counter):d}__ ="
def inlinecomments(match):
return f"\n__INLINE{next(counter):d}__ ="
def sections(match):
return (
f"{match.group(1)}\n__SECTION{next(counter):d}__ = {match.group(2)}"
)
for line in config_string.splitlines():
line = blank_line_re.sub(newlines, line)
line = section_re.sub(sections, line)
line = comment_re.sub(comments, line)
line = inline_comment_re.sub(inlinecomments, line)
results.append(line)
return "\n".join(results)
def _postprocess(config_string):
"""Converts a preprocessed config back to original form."""
flags = re.IGNORECASE | re.MULTILINE
result = re.sub(r"^\[__COMMENTS__\](\n|$)", "", config_string, flags=flags)
result = re.sub(r"\n__INLINE\d+__ =(.*)$", r" ;\g<1>", result, flags=flags)
result = re.sub(r"^__HASH\d+__ =(.*)$", r"#\g<1>", result, flags=flags)
result = re.sub(r"^__SEMICOLON\d+__ =(.*)$", r";\g<1>", result, flags=flags)
result = re.sub(r"\n__SECTION\d+__ =(.*)$", r"\g<1>", result, flags=flags)
result = re.sub(r"^__BLANK\d+__ =$", "", result, flags=flags)
return result
class Proxy(Mapping):
def __init__(self, data):
self._data = data
def __getitem__(self, key):
item = self._data.__getitem__(key)
if isinstance(item, dict):
return Proxy(item)
return item
def __iter__(self):
return self._data.__iter__()
def __len__(self):
return self._data.__len__()
def __repr__(self):
return f"Proxy({self._data!r})"

View File

@@ -0,0 +1,25 @@
[core]
cache_dir = $XDG_CACHE_DIR/mopidy
config_dir = $XDG_CONFIG_DIR/mopidy
data_dir = $XDG_DATA_DIR/mopidy
max_tracklist_length = 10000
restore_state = false
[logging]
verbosity = 0
format = %(levelname)-8s %(asctime)s [%(process)d:%(threadName)s] %(name)s\n %(message)s
color = true
config_file =
[audio]
mixer = software
mixer_volume =
output = autoaudiosink
buffer_time =
[proxy]
scheme =
hostname =
port =
username =
password =

View File

@@ -0,0 +1,177 @@
import logging
logger = logging.getLogger(__name__)
try:
import dbus
except ImportError:
dbus = None
# XXX: Hack to workaround introspection bug caused by gnome-keyring, should be
# fixed by version 3.5 per:
# https://git.gnome.org/browse/gnome-keyring/commit/?id=5dccbe88eb94eea9934e2b7
if dbus:
EMPTY_STRING = dbus.String("", variant_level=1)
else:
EMPTY_STRING = ""
FETCH_ERROR = (
"Fetching passwords from your keyring failed. Any passwords "
"stored in the keyring will not be available."
)
def fetch():
if not dbus:
logger.debug("%s (dbus not installed)", FETCH_ERROR)
return []
try:
bus = dbus.SessionBus()
except dbus.exceptions.DBusException as e:
logger.debug("%s (%s)", FETCH_ERROR, e)
return []
if not bus.name_has_owner("org.freedesktop.secrets"):
logger.debug(
"%s (org.freedesktop.secrets service not running)", FETCH_ERROR
)
return []
service = _service(bus)
session = service.OpenSession("plain", EMPTY_STRING)[1]
items, locked = service.SearchItems({"service": "mopidy"})
if not locked and not items:
return []
if locked:
# There is a chance we can unlock without prompting the users...
items, prompt = service.Unlock(locked)
if prompt != "/":
_prompt(bus, prompt).Dismiss()
logger.debug("%s (Keyring is locked)", FETCH_ERROR)
return []
result = []
secrets = service.GetSecrets(items, session, byte_arrays=True)
for item_path, values in secrets.items():
session_path, parameters, value, content_type = values
attrs = _item_attributes(bus, item_path)
result.append((attrs["section"], attrs["key"], bytes(value)))
return result
def set(section, key, value):
"""Store a secret config value for a given section/key.
Indicates if storage failed or succeeded.
"""
if not dbus:
logger.debug(
"Saving %s/%s to keyring failed. (dbus not installed)", section, key
)
return False
try:
bus = dbus.SessionBus()
except dbus.exceptions.DBusException as e:
logger.debug("Saving %s/%s to keyring failed. (%s)", section, key, e)
return False
if not bus.name_has_owner("org.freedesktop.secrets"):
logger.debug(
"Saving %s/%s to keyring failed. "
"(org.freedesktop.secrets service not running)",
section,
key,
)
return False
service = _service(bus)
collection = _collection(bus)
if not collection:
return False
if isinstance(value, str):
value = value.encode()
session = service.OpenSession("plain", EMPTY_STRING)[1]
secret = dbus.Struct(
(session, "", dbus.ByteArray(value), "plain/text; charset=utf8")
)
label = f"mopidy: {section}/{key}"
attributes = {"service": "mopidy", "section": section, "key": key}
properties = {
"org.freedesktop.Secret.Item.Label": label,
"org.freedesktop.Secret.Item.Attributes": attributes,
}
try:
item, prompt = collection.CreateItem(properties, secret, True)
except dbus.exceptions.DBusException as e:
# TODO: catch IsLocked errors etc.
logger.debug("Saving %s/%s to keyring failed. (%s)", section, key, e)
return False
if prompt == "/":
return True
_prompt(bus, prompt).Dismiss()
logger.debug(
"Saving secret %s/%s failed. (Keyring is locked)", section, key
)
return False
def _service(bus):
return _interface(
bus, "/org/freedesktop/secrets", "org.freedesktop.Secret.Service"
)
# NOTE: depending on versions and setup 'default' might not exists, so try and
# use it but fall back to the 'login' collection, and finally the 'session' one
# if all else fails. We should probably create a keyring/collection setting
# that allows users to set this so they have control over where their secrets
# get stored.
def _collection(bus):
for name in "aliases/default", "collection/login", "collection/session":
path = "/org/freedesktop/secrets/" + name
if _collection_exists(bus, path):
break
else:
return None
return _interface(bus, path, "org.freedesktop.Secret.Collection")
# NOTE: Hack to probe if a given collection actually exists. Needed to work
# around an introspection bug in setting passwords for non-existant aliases.
def _collection_exists(bus, path):
try:
item = _interface(bus, path, "org.freedesktop.DBus.Properties")
item.Get("org.freedesktop.Secret.Collection", "Label")
return True
except dbus.exceptions.DBusException:
return False
# NOTE: We could call prompt.Prompt('') to unlock the keyring when it is not
# '/', but we would then also have to arrange to setup signals to wait until
# this has been completed. So for now we just dismiss the prompt and expect
# keyrings to be unlocked.
def _prompt(bus, path):
return _interface(bus, path, "Prompt")
def _item_attributes(bus, path):
item = _interface(bus, path, "org.freedesktop.DBus.Properties")
result = item.Get("org.freedesktop.Secret.Item", "Attributes")
return {bytes(k): bytes(v) for k, v in result.items()}
def _interface(bus, path, interface):
obj = bus.get_object("org.freedesktop.secrets", path)
return dbus.Interface(obj, interface)

View File

@@ -0,0 +1,125 @@
import collections
from mopidy.config import types
def _did_you_mean(name, choices):
"""Suggest most likely setting based on levenshtein."""
if not choices:
return None
name = name.lower()
candidates = [(_levenshtein(name, c), c) for c in choices]
candidates.sort()
if candidates[0][0] <= 3:
return candidates[0][1]
return None
def _levenshtein(a, b):
"""Calculates the Levenshtein distance between a and b."""
n, m = len(a), len(b)
if n > m:
return _levenshtein(b, a)
current = range(n + 1)
for i in range(1, m + 1):
previous, current = current, [i] + [0] * n
for j in range(1, n + 1):
add, delete = previous[j] + 1, current[j - 1] + 1
change = previous[j - 1]
if a[j - 1] != b[i - 1]:
change += 1
current[j] = min(add, delete, change)
return current[n]
class ConfigSchema(collections.OrderedDict):
"""Logical group of config values that correspond to a config section.
Schemas are set up by assigning config keys with config values to
instances. Once setup :meth:`deserialize` can be called with a dict of
values to process. For convienience we also support :meth:`format` method
that can used for converting the values to a dict that can be printed and
:meth:`serialize` for converting the values to a form suitable for
persistence.
"""
def __init__(self, name):
super().__init__()
self.name = name
def deserialize(self, values):
"""Validates the given ``values`` using the config schema.
Returns a tuple with cleaned values and errors.
"""
errors = {}
result = {}
for key, value in values.items():
try:
result[key] = self[key].deserialize(value)
except KeyError: # not in our schema
errors[key] = "unknown config key."
suggestion = _did_you_mean(key, self.keys())
if suggestion:
errors[key] += f" Did you mean {suggestion!r}?"
except ValueError as e: # deserialization failed
result[key] = None
errors[key] = str(e)
for key in self.keys():
if isinstance(self[key], types.Deprecated):
result.pop(key, None)
elif key not in result and key not in errors:
result[key] = None
errors[key] = "config key not found."
return result, errors
def serialize(self, values, display=False):
"""Converts the given ``values`` to a format suitable for persistence.
If ``display`` is :class:`True` secret config values, like passwords,
will be masked out.
Returns a dict of config keys and values."""
result = collections.OrderedDict()
for key in self.keys():
if key in values:
result[key] = self[key].serialize(values[key], display)
return result
class MapConfigSchema:
"""Schema for handling multiple unknown keys with the same type.
Does not sub-class :class:`ConfigSchema`, but implements the same
serialize/deserialize interface.
"""
def __init__(self, name, value_type):
self.name = name
self._value_type = value_type
def deserialize(self, values):
errors = {}
result = {}
for key, value in values.items():
try:
result[key] = self._value_type.deserialize(value)
except ValueError as e: # deserialization failed
result[key] = None
errors[key] = str(e)
return result, errors
def serialize(self, values, display=False):
result = collections.OrderedDict()
for key in sorted(values.keys()):
result[key] = self._value_type.serialize(values[key], display)
return result

View File

@@ -0,0 +1,323 @@
import logging
import re
import socket
from mopidy.config import validators
from mopidy.internal import log, path
def decode(value):
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
for char in ("\\", "\n", "\t"):
value = value.replace(
char.encode(encoding="unicode-escape").decode(), char
)
return value
def encode(value):
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
for char in ("\\", "\n", "\t"):
value = value.replace(
char, char.encode(encoding="unicode-escape").decode()
)
return value
class DeprecatedValue:
pass
class ConfigValue:
"""Represents a config key's value and how to handle it.
Normally you will only be interacting with sub-classes for config values
that encode either deserialization behavior and/or validation.
Each config value should be used for the following actions:
1. Deserializing from a raw string and validating, raising ValueError on
failure.
2. Serializing a value back to a string that can be stored in a config.
3. Formatting a value to a printable form (useful for masking secrets).
:class:`None` values should not be deserialized, serialized or formatted,
the code interacting with the config should simply skip None config values.
"""
def deserialize(self, value):
"""Cast raw string to appropriate type."""
return decode(value)
def serialize(self, value, display=False):
"""Convert value back to string for saving."""
if value is None:
return ""
return str(value)
class Deprecated(ConfigValue):
"""Deprecated value.
Used for ignoring old config values that are no longer in use, but should
not cause the config parser to crash.
"""
def deserialize(self, value):
return DeprecatedValue()
def serialize(self, value, display=False):
return DeprecatedValue()
class String(ConfigValue):
"""String value.
Is decoded as utf-8 and \\n \\t escapes should work and be preserved.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = choices
def deserialize(self, value):
value = decode(value).strip()
validators.validate_required(value, self._required)
if not value:
return None
validators.validate_choice(value, self._choices)
return value
def serialize(self, value, display=False):
if value is None:
return ""
return encode(value)
class Secret(String):
"""Secret string value.
Is decoded as utf-8 and \\n \\t escapes should work and be preserved.
Should be used for passwords, auth tokens etc. Will mask value when being
displayed.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = None # Choices doesn't make sense for secrets
def serialize(self, value, display=False):
if value is not None and display:
return "********"
return super().serialize(value, display)
class Integer(ConfigValue):
"""Integer value."""
def __init__(
self, minimum=None, maximum=None, choices=None, optional=False
):
self._required = not optional
self._minimum = minimum
self._maximum = maximum
self._choices = choices
def deserialize(self, value):
value = decode(value)
validators.validate_required(value, self._required)
if not value:
return None
value = int(value)
validators.validate_choice(value, self._choices)
validators.validate_minimum(value, self._minimum)
validators.validate_maximum(value, self._maximum)
return value
class Boolean(ConfigValue):
"""Boolean value.
Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as
:class:`True`.
Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as
:class:`False`.
"""
true_values = ("1", "yes", "true", "on")
false_values = ("0", "no", "false", "off")
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value)
validators.validate_required(value, self._required)
if not value:
return None
if value.lower() in self.true_values:
return True
elif value.lower() in self.false_values:
return False
raise ValueError(f"invalid value for boolean: {value!r}")
def serialize(self, value, display=False):
if value is True:
return "true"
elif value in (False, None):
return "false"
else:
raise ValueError(f"{value!r} is not a boolean")
class List(ConfigValue):
"""List value.
Supports elements split by commas or newlines. Newlines take presedence and
empty list items will be filtered out.
"""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value)
if "\n" in value:
values = re.split(r"\s*\n\s*", value)
else:
values = re.split(r"\s*,\s*", value)
values = tuple(v.strip() for v in values if v.strip())
validators.validate_required(values, self._required)
return tuple(values)
def serialize(self, value, display=False):
if not value:
return ""
return "\n " + "\n ".join(encode(v) for v in value if v)
class LogColor(ConfigValue):
def deserialize(self, value):
value = decode(value)
validators.validate_choice(value.lower(), log.COLORS)
return value.lower()
def serialize(self, value, display=False):
if value.lower() in log.COLORS:
return encode(value.lower())
return ""
class LogLevel(ConfigValue):
"""Log level value.
Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``,
or ``all``, with any casing.
"""
levels = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG,
"trace": log.TRACE_LOG_LEVEL,
"all": logging.NOTSET,
}
def deserialize(self, value):
value = decode(value)
validators.validate_choice(value.lower(), self.levels.keys())
return self.levels.get(value.lower())
def serialize(self, value, display=False):
lookup = {v: k for k, v in self.levels.items()}
if value in lookup:
return encode(lookup[value])
return ""
class Hostname(ConfigValue):
"""Network hostname value."""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value, display=False):
value = decode(value).strip()
validators.validate_required(value, self._required)
if not value:
return None
socket_path = path.get_unix_socket_path(value)
if socket_path is not None:
path_str = Path(not self._required).deserialize(socket_path)
return f"unix:{path_str}"
try:
socket.getaddrinfo(value, None)
except OSError:
raise ValueError("must be a resolveable hostname or valid IP")
return value
class Port(Integer):
"""Network port value.
Expects integer in the range 0-65535, zero tells the kernel to simply
allocate a port for us.
"""
def __init__(self, choices=None, optional=False):
super().__init__(
minimum=0, maximum=2 ** 16 - 1, choices=choices, optional=optional
)
class _ExpandedPath(str):
def __new__(cls, original, expanded):
return super().__new__(cls, expanded)
def __init__(self, original, expanded):
self.original = original
class Path(ConfigValue):
"""File system path.
The following expansions of the path will be done:
- ``~`` to the current user's home directory
- ``$XDG_CACHE_DIR`` according to the XDG spec
- ``$XDG_CONFIG_DIR`` according to the XDG spec
- ``$XDG_DATA_DIR`` according to the XDG spec
- ``$XDG_MUSIC_DIR`` according to the XDG spec
"""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value).strip()
expanded = path.expand_path(value)
validators.validate_required(value, self._required)
validators.validate_required(expanded, self._required)
if not value or expanded is None:
return None
return _ExpandedPath(value, expanded)
def serialize(self, value, display=False):
if isinstance(value, _ExpandedPath):
value = value.original
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
return value

View File

@@ -0,0 +1,39 @@
# TODO: add validate regexp?
def validate_required(value, required):
"""Validate that ``value`` is set if ``required``
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize` on
the raw string, _not_ the converted value.
"""
if required and not value:
raise ValueError("must be set.")
def validate_choice(value, choices):
"""Validate that ``value`` is one of the ``choices``
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`.
"""
if choices is not None and value not in choices:
names = ", ".join(repr(c) for c in choices)
raise ValueError(f"must be one of {names}, not {value}.")
def validate_minimum(value, minimum):
"""Validate that ``value`` is at least ``minimum``
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`.
"""
if minimum is not None and value < minimum:
raise ValueError(f"{value!r} must be larger than {minimum!r}.")
def validate_maximum(value, maximum):
"""Validate that ``value`` is at most ``maximum``
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`.
"""
if maximum is not None and value > maximum:
raise ValueError(f"{value!r} must be smaller than {maximum!r}.")

View File

@@ -0,0 +1,9 @@
# flake8: noqa
from .actor import Core
from .history import HistoryController
from .library import LibraryController
from .listener import CoreListener
from .mixer import MixerController
from .playback import PlaybackController, PlaybackState
from .playlists import PlaylistsController
from .tracklist import TracklistController

View File

@@ -0,0 +1,276 @@
import collections
import itertools
import logging
import pykka
import mopidy
from mopidy import audio, backend, mixer
from mopidy.audio import PlaybackState
from mopidy.core.history import HistoryController
from mopidy.core.library import LibraryController
from mopidy.core.listener import CoreListener
from mopidy.core.mixer import MixerController
from mopidy.core.playback import PlaybackController
from mopidy.core.playlists import PlaylistsController
from mopidy.core.tracklist import TracklistController
from mopidy.internal import path, storage, validation, versioning
from mopidy.internal.models import CoreState
logger = logging.getLogger(__name__)
class Core(
pykka.ThreadingActor,
audio.AudioListener,
backend.BackendListener,
mixer.MixerListener,
):
library = None
"""An instance of :class:`~mopidy.core.LibraryController`"""
history = None
"""An instance of :class:`~mopidy.core.HistoryController`"""
mixer = None
"""An instance of :class:`~mopidy.core.MixerController`"""
playback = None
"""An instance of :class:`~mopidy.core.PlaybackController`"""
playlists = None
"""An instance of :class:`~mopidy.core.PlaylistsController`"""
tracklist = None
"""An instance of :class:`~mopidy.core.TracklistController`"""
def __init__(self, config=None, mixer=None, backends=None, audio=None):
super().__init__()
self._config = config
self.backends = Backends(backends)
self.library = pykka.traversable(
LibraryController(backends=self.backends, core=self)
)
self.history = pykka.traversable(HistoryController())
self.mixer = pykka.traversable(MixerController(mixer=mixer))
self.playback = pykka.traversable(
PlaybackController(audio=audio, backends=self.backends, core=self)
)
self.playlists = pykka.traversable(
PlaylistsController(backends=self.backends, core=self)
)
self.tracklist = pykka.traversable(TracklistController(core=self))
self.audio = audio
def get_uri_schemes(self):
"""Get list of URI schemes we can handle"""
futures = [b.uri_schemes for b in self.backends]
results = pykka.get_all(futures)
uri_schemes = itertools.chain(*results)
return sorted(uri_schemes)
def get_version(self):
"""Get version of the Mopidy core API"""
return versioning.get_version()
def reached_end_of_stream(self):
self.playback._on_end_of_stream()
def stream_changed(self, uri):
self.playback._on_stream_changed(uri)
def position_changed(self, position):
self.playback._on_position_changed(position)
def state_changed(self, old_state, new_state, target_state):
# XXX: This is a temporary fix for issue #232 while we wait for a more
# permanent solution with the implementation of issue #234. When the
# Spotify play token is lost, the Spotify backend pauses audio
# playback, but mopidy.core doesn't know this, so we need to update
# mopidy.core's state to match the actual state in mopidy.audio. If we
# don't do this, clients will think that we're still playing.
# We ignore cases when target state is set as this is buffering
# updates (at least for now) and we need to get #234 fixed...
if (
new_state == PlaybackState.PAUSED
and not target_state
and self.playback.get_state() != PlaybackState.PAUSED
):
self.playback.set_state(new_state)
self.playback._trigger_track_playback_paused()
def playlists_loaded(self):
# Forward event from backend to frontends
CoreListener.send("playlists_loaded")
def volume_changed(self, volume):
# Forward event from mixer to frontends
CoreListener.send("volume_changed", volume=volume)
def mute_changed(self, mute):
# Forward event from mixer to frontends
CoreListener.send("mute_changed", mute=mute)
def tags_changed(self, tags):
if not self.audio or "title" not in tags:
return
tags = self.audio.get_current_tags().get()
if not tags:
return
# TODO: this is a hack to make sure we don't emit stream title changes
# for plain tracks. We need a better way to decide if something is a
# stream.
if "title" in tags and tags["title"]:
title = tags["title"][0]
current_track = self.playback.get_current_track()
if current_track is not None and current_track.name != title:
self.playback._stream_title = title
CoreListener.send("stream_title_changed", title=title)
def setup(self):
"""Do not call this function. It is for internal use at startup."""
try:
coverage = []
if self._config and "restore_state" in self._config["core"]:
if self._config["core"]["restore_state"]:
coverage = [
"tracklist",
"mode",
"play-last",
"mixer",
"history",
]
if len(coverage):
self._load_state(coverage)
except Exception as e:
logger.warn("Restore state: Unexpected error: %s", str(e))
def teardown(self):
"""Do not call this function. It is for internal use at shutdown."""
try:
if self._config and "restore_state" in self._config["core"]:
if self._config["core"]["restore_state"]:
self._save_state()
except Exception as e:
logger.warn("Unexpected error while saving state: %s", str(e))
def _get_data_dir(self):
# get or create data director for core
data_dir_path = (
path.expand_path(self._config["core"]["data_dir"]) / "core"
)
path.get_or_create_dir(data_dir_path)
return data_dir_path
def _get_state_file(self):
return self._get_data_dir() / "state.json.gz"
def _save_state(self):
"""
Save current state to disk.
"""
state_file = self._get_state_file()
logger.info("Saving state to %s", state_file)
data = {}
data["version"] = mopidy.__version__
data["state"] = CoreState(
tracklist=self.tracklist._save_state(),
history=self.history._save_state(),
playback=self.playback._save_state(),
mixer=self.mixer._save_state(),
)
storage.dump(state_file, data)
logger.debug("Saving state done")
def _load_state(self, coverage):
"""
Restore state from disk.
Load state from disk and restore it. Parameter ``coverage``
limits the amount of data to restore. Possible
values for ``coverage`` (list of one or more of):
- 'tracklist' fill the tracklist
- 'mode' set tracklist properties (consume, random, repeat, single)
- 'play-last' restore play state ('tracklist' also required)
- 'mixer' set mixer volume and mute state
- 'history' restore history
:param coverage: amount of data to restore
:type coverage: list of strings
"""
state_file = self._get_state_file()
logger.info("Loading state from %s", state_file)
data = storage.load(state_file)
try:
# Try only once. If something goes wrong, the next start is clean.
state_file.unlink()
except OSError:
logger.info("Failed to delete %s", state_file)
if "state" in data:
core_state = data["state"]
validation.check_instance(core_state, CoreState)
self.history._load_state(core_state.history, coverage)
self.tracklist._load_state(core_state.tracklist, coverage)
self.mixer._load_state(core_state.mixer, coverage)
# playback after tracklist
self.playback._load_state(core_state.playback, coverage)
logger.debug("Loading state done")
class Backends(list):
def __init__(self, backends):
super().__init__(backends)
self.with_library = collections.OrderedDict()
self.with_library_browse = collections.OrderedDict()
self.with_playback = collections.OrderedDict()
self.with_playlists = collections.OrderedDict()
backends_by_scheme = {}
def name(b):
return b.actor_ref.actor_class.__name__
for b in backends:
try:
has_library = b.has_library().get()
has_library_browse = b.has_library_browse().get()
has_playback = b.has_playback().get()
has_playlists = b.has_playlists().get()
except Exception:
self.remove(b)
logger.exception(
"Fetching backend info for %s failed",
b.actor_ref.actor_class.__name__,
)
for scheme in b.uri_schemes.get():
assert scheme not in backends_by_scheme, (
f"Cannot add URI scheme {scheme!r} for {name(b)}, "
f"it is already handled by {name(backends_by_scheme[scheme])}"
)
backends_by_scheme[scheme] = b
if has_library:
self.with_library[scheme] = b
if has_library_browse:
self.with_library_browse[scheme] = b
if has_playback:
self.with_playback[scheme] = b
if has_playlists:
self.with_playlists[scheme] = b

View File

@@ -0,0 +1,73 @@
import copy
import logging
import time
from mopidy import models
from mopidy.internal.models import HistoryState, HistoryTrack
logger = logging.getLogger(__name__)
class HistoryController:
def __init__(self):
self._history = []
def _add_track(self, track):
"""Add track to the playback history.
Internal method for :class:`mopidy.core.PlaybackController`.
:param track: track to add
:type track: :class:`mopidy.models.Track`
"""
if not isinstance(track, models.Track):
raise TypeError("Only Track objects can be added to the history")
timestamp = int(time.time() * 1000)
name_parts = []
if track.artists:
name_parts.append(
", ".join([artist.name for artist in track.artists])
)
if track.name is not None:
name_parts.append(track.name)
name = " - ".join(name_parts)
ref = models.Ref.track(uri=track.uri, name=name)
self._history.insert(0, (timestamp, ref))
def get_length(self):
"""Get the number of tracks in the history.
:returns: the history length
:rtype: int
"""
return len(self._history)
def get_history(self):
"""Get the track history.
The timestamps are milliseconds since epoch.
:returns: the track history
:rtype: list of (timestamp, :class:`mopidy.models.Ref`) tuples
"""
return copy.copy(self._history)
def _save_state(self):
# 500 tracks a 3 minutes -> 24 hours history
count_max = 500
count = 1
history_list = []
for timestamp, track in self._history:
history_list.append(HistoryTrack(timestamp=timestamp, track=track))
count += 1
if count_max < count:
logger.info("Limiting history to %s tracks", count_max)
break
return HistoryState(history=history_list)
def _load_state(self, state, coverage):
if state and "history" in coverage:
self._history = [(h.timestamp, h.track) for h in state.history]

View File

@@ -0,0 +1,351 @@
import collections
import contextlib
import logging
import operator
import urllib
from collections.abc import Mapping
from mopidy import exceptions, models
from mopidy.internal import validation
logger = logging.getLogger(__name__)
@contextlib.contextmanager
def _backend_error_handling(backend, reraise=None):
try:
yield
except exceptions.ValidationError as e:
logger.error(
"%s backend returned bad data: %s",
backend.actor_ref.actor_class.__name__,
e,
)
except Exception as e:
if reraise and isinstance(e, reraise):
raise
logger.exception(
"%s backend caused an exception.",
backend.actor_ref.actor_class.__name__,
)
class LibraryController:
def __init__(self, backends, core):
self.backends = backends
self.core = core
def _get_backend(self, uri):
uri_scheme = urllib.parse.urlparse(uri).scheme
return self.backends.with_library.get(uri_scheme, None)
def _get_backends_to_uris(self, uris):
if uris:
backends_to_uris = collections.defaultdict(list)
for uri in uris:
backend = self._get_backend(uri)
if backend is not None:
backends_to_uris[backend].append(uri)
else:
backends_to_uris = {
b: None for b in self.backends.with_library.values()
}
return backends_to_uris
def browse(self, uri):
"""
Browse directories and tracks at the given ``uri``.
``uri`` is a string which represents some directory belonging to a
backend. To get the intial root directories for backends pass
:class:`None` as the URI.
Returns a list of :class:`mopidy.models.Ref` objects for the
directories and tracks at the given ``uri``.
The :class:`~mopidy.models.Ref` objects representing tracks keep the
track's original URI. A matching pair of objects can look like this::
Track(uri='dummy:/foo.mp3', name='foo', artists=..., album=...)
Ref.track(uri='dummy:/foo.mp3', name='foo')
The :class:`~mopidy.models.Ref` objects representing directories have
backend specific URIs. These are opaque values, so no one but the
backend that created them should try and derive any meaning from them.
The only valid exception to this is checking the scheme, as it is used
to route browse requests to the correct backend.
For example, the dummy library's ``/bar`` directory could be returned
like this::
Ref.directory(uri='dummy:directory:/bar', name='bar')
:param string uri: URI to browse
:rtype: list of :class:`mopidy.models.Ref`
.. versionadded:: 0.18
"""
if uri is None:
return self._roots()
elif not uri.strip():
return []
validation.check_uri(uri)
return self._browse(uri)
def _roots(self):
directories = set()
backends = self.backends.with_library_browse.values()
futures = {b: b.library.root_directory for b in backends}
for backend, future in futures.items():
with _backend_error_handling(backend):
root = future.get()
validation.check_instance(root, models.Ref)
directories.add(root)
return sorted(directories, key=operator.attrgetter("name"))
def _browse(self, uri):
scheme = urllib.parse.urlparse(uri).scheme
backend = self.backends.with_library_browse.get(scheme)
if not backend:
return []
with _backend_error_handling(backend):
result = backend.library.browse(uri).get()
validation.check_instances(result, models.Ref)
return result
return []
def get_distinct(self, field, query=None):
"""
List distinct values for a given field from the library.
This has mainly been added to support the list commands the MPD
protocol supports in a more sane fashion. Other frontends are not
recommended to use this method.
:param string field: One of ``track``, ``artist``, ``albumartist``,
``album``, ``composer``, ``performer``, ``date`` or ``genre``.
:param dict query: Query to use for limiting results, see
:meth:`search` for details about the query format.
:rtype: set of values corresponding to the requested field type.
.. versionadded:: 1.0
"""
validation.check_choice(field, validation.DISTINCT_FIELDS)
query is None or validation.check_query(query) # TODO: normalize?
result = set()
futures = {
b: b.library.get_distinct(field, query)
for b in self.backends.with_library.values()
}
for backend, future in futures.items():
with _backend_error_handling(backend):
values = future.get()
if values is not None:
validation.check_instances(values, str)
result.update(values)
return result
def get_images(self, uris):
"""Lookup the images for the given URIs
Backends can use this to return image URIs for any URI they know about
be it tracks, albums, playlists. The lookup result is a dictionary
mapping the provided URIs to lists of images.
Unknown URIs or URIs the corresponding backend couldn't find anything
for will simply return an empty list for that URI.
:param uris: list of URIs to find images for
:type uris: list of string
:rtype: {uri: tuple of :class:`mopidy.models.Image`}
.. versionadded:: 1.0
"""
validation.check_uris(uris)
futures = {
backend: backend.library.get_images(backend_uris)
for (backend, backend_uris) in self._get_backends_to_uris(
uris
).items()
if backend_uris
}
results = {uri: tuple() for uri in uris}
for backend, future in futures.items():
with _backend_error_handling(backend):
if future.get() is None:
continue
validation.check_instance(future.get(), Mapping)
for uri, images in future.get().items():
if uri not in uris:
raise exceptions.ValidationError(
f"Got unknown image URI: {uri}"
)
validation.check_instances(images, models.Image)
results[uri] += tuple(images)
return results
def lookup(self, uris):
"""
Lookup the given URIs.
If the URI expands to multiple tracks, the returned list will contain
them all.
:param uris: track URIs
:type uris: list of string
:rtype: {uri: list of :class:`mopidy.models.Track`}
"""
validation.check_uris(uris)
futures = {}
results = {u: [] for u in uris}
# TODO: lookup(uris) to backend APIs
for backend, backend_uris in self._get_backends_to_uris(uris).items():
if backend_uris:
for u in backend_uris:
futures[(backend, u)] = backend.library.lookup(u)
for (backend, u), future in futures.items():
with _backend_error_handling(backend):
result = future.get()
if result is not None:
validation.check_instances(result, models.Track)
# TODO Consider making Track.uri field mandatory, and
# then remove this filtering of tracks without URIs.
results[u] = [r for r in result if r.uri]
return results
def refresh(self, uri=None):
"""
Refresh library. Limit to URI and below if an URI is given.
:param uri: directory or track URI
:type uri: string
"""
uri is None or validation.check_uri(uri)
futures = {}
backends = {}
uri_scheme = urllib.parse.urlparse(uri).scheme if uri else None
for backend_scheme, backend in self.backends.with_library.items():
backends.setdefault(backend, set()).add(backend_scheme)
for backend, backend_schemes in backends.items():
if uri_scheme is None or uri_scheme in backend_schemes:
futures[backend] = backend.library.refresh(uri)
for backend, future in futures.items():
with _backend_error_handling(backend):
future.get()
def search(self, query, uris=None, exact=False):
"""
Search the library for tracks where ``field`` contains ``values``.
``field`` can be one of ``uri``, ``track_name``, ``album``, ``artist``,
``albumartist``, ``composer``, ``performer``, ``track_no``, ``genre``,
``date``, ``comment``, or ``any``.
If ``uris`` is given, the search is limited to results from within the
URI roots. For example passing ``uris=['file:']`` will limit the search
to the local backend.
Examples::
# Returns results matching 'a' in any backend
search({'any': ['a']})
# Returns results matching artist 'xyz' in any backend
search({'artist': ['xyz']})
# Returns results matching 'a' and 'b' and artist 'xyz' in any
# backend
search({'any': ['a', 'b'], 'artist': ['xyz']})
# Returns results matching 'a' if within the given URI roots
# "file:///media/music" and "spotify:"
search({'any': ['a']}, uris=['file:///media/music', 'spotify:'])
# Returns results matching artist 'xyz' and 'abc' in any backend
search({'artist': ['xyz', 'abc']})
:param query: one or more queries to search for
:type query: dict
:param uris: zero or more URI roots to limit the search to
:type uris: list of string or :class:`None`
:param exact: if the search should use exact matching
:type exact: :class:`bool`
:rtype: list of :class:`mopidy.models.SearchResult`
.. versionadded:: 1.0
The ``exact`` keyword argument.
"""
query = _normalize_query(query)
uris is None or validation.check_uris(uris)
validation.check_query(query)
validation.check_boolean(exact)
if not query:
return []
futures = {}
for backend, backend_uris in self._get_backends_to_uris(uris).items():
futures[backend] = backend.library.search(
query=query, uris=backend_uris, exact=exact
)
# Some of our tests check for LookupError to catch bad queries. This is
# silly and should be replaced with query validation before passing it
# to the backends.
reraise = (TypeError, LookupError)
results = []
for backend, future in futures.items():
try:
with _backend_error_handling(backend, reraise=reraise):
result = future.get()
if result is not None:
validation.check_instance(result, models.SearchResult)
results.append(result)
except TypeError:
backend_name = backend.actor_ref.actor_class.__name__
logger.warning(
'%s does not implement library.search() with "exact" '
"support. Please upgrade it.",
backend_name,
)
return results
def _normalize_query(query):
broken_client = False
# TODO: this breaks if query is not a dictionary like object...
for (field, values) in query.items():
if isinstance(values, str):
broken_client = True
query[field] = [values]
if broken_client:
logger.warning(
"A client or frontend made a broken library search. Values in "
"queries must be lists of strings, not a string. Please check what"
" sent this query and file a bug. Query: %s",
query,
)
if not query:
logger.warning(
"A client or frontend made a library search with an empty query. "
"This is strongly discouraged. Please check what sent this query "
"and file a bug."
)
return query

View File

@@ -0,0 +1,187 @@
from mopidy import listener
class CoreListener(listener.Listener):
"""
Marker interface for recipients of events sent by the core actor.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in the core actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of core listener events"""
listener.send(CoreListener, event, **kwargs)
def on_event(self, event, **kwargs):
"""
Called on all events.
*MAY* be implemented by actor. By default, this method forwards the
event to the specific event methods.
:param event: the event name
:type event: string
:param kwargs: any other arguments to the specific event handlers
"""
# Just delegate to parent, entry mostly for docs.
super().on_event(event, **kwargs)
def track_playback_paused(self, tl_track, time_position):
"""
Called whenever track playback is paused.
*MAY* be implemented by actor.
:param tl_track: the track that was playing when playback paused
:type tl_track: :class:`mopidy.models.TlTrack`
:param time_position: the time position in milliseconds
:type time_position: int
"""
pass
def track_playback_resumed(self, tl_track, time_position):
"""
Called whenever track playback is resumed.
*MAY* be implemented by actor.
:param tl_track: the track that was playing when playback resumed
:type tl_track: :class:`mopidy.models.TlTrack`
:param time_position: the time position in milliseconds
:type time_position: int
"""
pass
def track_playback_started(self, tl_track):
"""
Called whenever a new track starts playing.
*MAY* be implemented by actor.
:param tl_track: the track that just started playing
:type tl_track: :class:`mopidy.models.TlTrack`
"""
pass
def track_playback_ended(self, tl_track, time_position):
"""
Called whenever playback of a track ends.
*MAY* be implemented by actor.
:param tl_track: the track that was played before playback stopped
:type tl_track: :class:`mopidy.models.TlTrack`
:param time_position: the time position in milliseconds
:type time_position: int
"""
pass
def playback_state_changed(self, old_state, new_state):
"""
Called whenever playback state is changed.
*MAY* be implemented by actor.
:param old_state: the state before the change
:type old_state: string from :class:`mopidy.core.PlaybackState` field
:param new_state: the state after the change
:type new_state: string from :class:`mopidy.core.PlaybackState` field
"""
pass
def tracklist_changed(self):
"""
Called whenever the tracklist is changed.
*MAY* be implemented by actor.
"""
pass
def playlists_loaded(self):
"""
Called when playlists are loaded or refreshed.
*MAY* be implemented by actor.
"""
pass
def playlist_changed(self, playlist):
"""
Called whenever a playlist is changed.
*MAY* be implemented by actor.
:param playlist: the changed playlist
:type playlist: :class:`mopidy.models.Playlist`
"""
pass
def playlist_deleted(self, uri):
"""
Called whenever a playlist is deleted.
*MAY* be implemented by actor.
:param uri: the URI of the deleted playlist
:type uri: string
"""
pass
def options_changed(self):
"""
Called whenever an option is changed.
*MAY* be implemented by actor.
"""
pass
def volume_changed(self, volume):
"""
Called whenever the volume is changed.
*MAY* be implemented by actor.
:param volume: the new volume in the range [0..100]
:type volume: int
"""
pass
def mute_changed(self, mute):
"""
Called whenever the mute state is changed.
*MAY* be implemented by actor.
:param mute: the new mute state
:type mute: boolean
"""
pass
def seeked(self, time_position):
"""
Called whenever the time position changes by an unexpected amount, e.g.
at seek to a new time position.
*MAY* be implemented by actor.
:param time_position: the position that was seeked to in milliseconds
:type time_position: int
"""
pass
def stream_title_changed(self, title):
"""
Called whenever the currently playing stream title changes.
*MAY* be implemented by actor.
:param title: the new stream title
:type title: string
"""
pass

View File

@@ -0,0 +1,111 @@
import contextlib
import logging
from mopidy import exceptions
from mopidy.internal import validation
from mopidy.internal.models import MixerState
logger = logging.getLogger(__name__)
@contextlib.contextmanager
def _mixer_error_handling(mixer):
try:
yield
except exceptions.ValidationError as e:
logger.error(
"%s mixer returned bad data: %s",
mixer.actor_ref.actor_class.__name__,
e,
)
except Exception:
logger.exception(
"%s mixer caused an exception.",
mixer.actor_ref.actor_class.__name__,
)
class MixerController:
def __init__(self, mixer):
self._mixer = mixer
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is None:
return None
with _mixer_error_handling(self._mixer):
volume = self._mixer.get_volume().get()
volume is None or validation.check_integer(volume, min=0, max=100)
return volume
return None
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
Returns :class:`True` if call is successful, otherwise :class:`False`.
"""
validation.check_integer(volume, min=0, max=100)
if self._mixer is None:
return False # TODO: 2.0 return None
with _mixer_error_handling(self._mixer):
result = self._mixer.set_volume(volume).get()
validation.check_instance(result, bool)
return result
return False
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is None:
return None
with _mixer_error_handling(self._mixer):
mute = self._mixer.get_mute().get()
mute is None or validation.check_instance(mute, bool)
return mute
return None
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
Returns :class:`True` if call is successful, otherwise :class:`False`.
"""
validation.check_boolean(mute)
if self._mixer is None:
return False # TODO: 2.0 return None
with _mixer_error_handling(self._mixer):
result = self._mixer.set_mute(bool(mute)).get()
validation.check_instance(result, bool)
return result
return False
def _save_state(self):
return MixerState(volume=self.get_volume(), mute=self.get_mute())
def _load_state(self, state, coverage):
if state and "mixer" in coverage:
self.set_mute(state.mute)
if state.volume:
self.set_volume(state.volume)

View File

@@ -0,0 +1,558 @@
import logging
import urllib
from pykka.messages import ProxyCall
from mopidy.audio import PlaybackState
from mopidy.core import listener
from mopidy.internal import deprecation, models, validation
logger = logging.getLogger(__name__)
class PlaybackController:
def __init__(self, audio, backends, core):
# TODO: these should be internal
self.backends = backends
self.core = core
self._audio = audio
self._stream_title = None
self._state = PlaybackState.STOPPED
self._current_tl_track = None
self._pending_tl_track = None
self._pending_position = None
self._last_position = None
self._previous = False
self._start_at_position = None
self._start_paused = False
if self._audio:
self._audio.set_about_to_finish_callback(
self._on_about_to_finish_callback
)
def _get_backend(self, tl_track):
if tl_track is None:
return None
uri_scheme = urllib.parse.urlparse(tl_track.track.uri).scheme
return self.backends.with_playback.get(uri_scheme, None)
def get_current_tl_track(self):
"""Get the currently playing or selected track.
Returns a :class:`mopidy.models.TlTrack` or :class:`None`.
"""
return self._current_tl_track
def _set_current_tl_track(self, value):
"""Set the currently playing or selected track.
*Internal:* This is only for use by Mopidy's test suite.
"""
self._current_tl_track = value
def get_current_track(self):
"""
Get the currently playing or selected track.
Extracted from :meth:`get_current_tl_track` for convenience.
Returns a :class:`mopidy.models.Track` or :class:`None`.
"""
return getattr(self.get_current_tl_track(), "track", None)
def get_current_tlid(self):
"""
Get the currently playing or selected TLID.
Extracted from :meth:`get_current_tl_track` for convenience.
Returns a :class:`int` or :class:`None`.
.. versionadded:: 1.1
"""
return getattr(self.get_current_tl_track(), "tlid", None)
def get_stream_title(self):
"""Get the current stream title or :class:`None`."""
return self._stream_title
def get_state(self):
"""Get The playback state."""
return self._state
def set_state(self, new_state):
"""Set the playback state.
Must be :attr:`PLAYING`, :attr:`PAUSED`, or :attr:`STOPPED`.
Possible states and transitions:
.. digraph:: state_transitions
"STOPPED" -> "PLAYING" [ label="play" ]
"STOPPED" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "STOPPED" [ label="stop" ]
"PLAYING" -> "PAUSED" [ label="pause" ]
"PLAYING" -> "PLAYING" [ label="play" ]
"PAUSED" -> "PLAYING" [ label="resume" ]
"PAUSED" -> "STOPPED" [ label="stop" ]
"""
validation.check_choice(new_state, validation.PLAYBACK_STATES)
(old_state, self._state) = (self.get_state(), new_state)
logger.debug("Changing state: %s -> %s", old_state, new_state)
self._trigger_playback_state_changed(old_state, new_state)
def get_time_position(self):
"""Get time position in milliseconds."""
if self._pending_position is not None:
return self._pending_position
backend = self._get_backend(self.get_current_tl_track())
if backend:
# TODO: Wrap backend call in error handling.
return backend.playback.get_time_position().get()
else:
return 0
def _on_end_of_stream(self):
self.set_state(PlaybackState.STOPPED)
if self._current_tl_track:
self._trigger_track_playback_ended(self.get_time_position())
self._set_current_tl_track(None)
def _on_stream_changed(self, uri):
if self._last_position is None:
position = self.get_time_position()
else:
# This code path handles the stop() case, uri should be none.
position, self._last_position = self._last_position, None
if self._pending_position is None:
self._trigger_track_playback_ended(position)
self._stream_title = None
if self._pending_tl_track:
self._set_current_tl_track(self._pending_tl_track)
self._pending_tl_track = None
if self._pending_position is None:
self.set_state(PlaybackState.PLAYING)
self._trigger_track_playback_started()
seek_ok = False
if self._start_at_position:
seek_ok = self.seek(self._start_at_position)
self._start_at_position = None
if not seek_ok and self._start_paused:
self.pause()
self._start_paused = False
else:
self._seek(self._pending_position)
def _on_position_changed(self, position):
if self._pending_position is not None:
self._trigger_seeked(self._pending_position)
self._pending_position = None
if self._start_paused:
self._start_paused = False
self.pause()
def _on_about_to_finish_callback(self):
"""Callback that performs a blocking actor call to the real callback.
This is passed to audio, which is allowed to call this code from the
audio thread. We pass execution into the core actor to ensure that
there is no unsafe access of state in core. This must block until
we get a response.
"""
self.core.actor_ref.ask(
ProxyCall(
attr_path=["playback", "_on_about_to_finish"],
args=[],
kwargs={},
)
)
def _on_about_to_finish(self):
if self._state == PlaybackState.STOPPED:
return
# Unless overridden by other calls (e.g. next / previous / stop) this
# will be the last position recorded until the track gets reassigned.
# TODO: Check if case when track.length isn't populated needs to be
# handled.
self._last_position = self._current_tl_track.track.length
pending = self.core.tracklist.eot_track(self._current_tl_track)
# avoid endless loop if 'repeat' is 'true' and no track is playable
# * 2 -> second run to get all playable track in a shuffled playlist
count = self.core.tracklist.get_length() * 2
while pending:
backend = self._get_backend(pending)
if backend:
try:
if backend.playback.change_track(pending.track).get():
self._pending_tl_track = pending
break
except Exception:
logger.exception(
"%s backend caused an exception.",
backend.actor_ref.actor_class.__name__,
)
self.core.tracklist._mark_unplayable(pending)
pending = self.core.tracklist.eot_track(pending)
count -= 1
if not count:
logger.info("No playable track in the list.")
break
def _on_tracklist_change(self):
"""
Tell the playback controller that the current playlist has changed.
Used by :class:`mopidy.core.TracklistController`.
"""
tl_tracks = self.core.tracklist.get_tl_tracks()
if not tl_tracks:
self.stop()
self._set_current_tl_track(None)
elif self.get_current_tl_track() not in tl_tracks:
self._set_current_tl_track(None)
def next(self):
"""
Change to the next track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
state = self.get_state()
current = self._pending_tl_track or self._current_tl_track
# avoid endless loop if 'repeat' is 'true' and no track is playable
# * 2 -> second run to get all playable track in a shuffled playlist
count = self.core.tracklist.get_length() * 2
while current:
pending = self.core.tracklist.next_track(current)
if self._change(pending, state):
break
else:
self.core.tracklist._mark_unplayable(pending)
# TODO: this could be needed to prevent a loop in rare cases
# if current == pending:
# break
current = pending
count -= 1
if not count:
logger.info("No playable track in the list.")
break
# TODO return result?
def pause(self):
"""Pause playback."""
backend = self._get_backend(self.get_current_tl_track())
# TODO: Wrap backend call in error handling.
if not backend or backend.playback.pause().get():
# TODO: switch to:
# backend.track(pause)
# wait for state change?
self.set_state(PlaybackState.PAUSED)
self._trigger_track_playback_paused()
def play(self, tl_track=None, tlid=None):
"""
Play the given track, or if the given tl_track and tlid is
:class:`None`, play the currently active track.
Note that the track **must** already be in the tracklist.
.. deprecated:: 3.0
The ``tl_track`` argument. Use ``tlid`` instead.
:param tl_track: track to play
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:param tlid: TLID of the track to play
:type tlid: :class:`int` or :class:`None`
"""
if sum(o is not None for o in [tl_track, tlid]) > 1:
raise ValueError('At most one of "tl_track" and "tlid" may be set')
tl_track is None or validation.check_instance(tl_track, models.TlTrack)
tlid is None or validation.check_integer(tlid, min=1)
if tl_track:
deprecation.warn("core.playback.play:tl_track_kwarg")
if tl_track is None and tlid is not None:
for tl_track in self.core.tracklist.get_tl_tracks():
if tl_track.tlid == tlid:
break
else:
tl_track = None
if tl_track is not None:
# TODO: allow from outside tracklist, would make sense given refs?
assert tl_track in self.core.tracklist.get_tl_tracks()
elif tl_track is None and self.get_state() == PlaybackState.PAUSED:
self.resume()
return
current = self._pending_tl_track or self._current_tl_track
pending = tl_track or current or self.core.tracklist.next_track(None)
# avoid endless loop if 'repeat' is 'true' and no track is playable
# * 2 -> second run to get all playable track in a shuffled playlist
count = self.core.tracklist.get_length() * 2
while pending:
if self._change(pending, PlaybackState.PLAYING):
break
else:
self.core.tracklist._mark_unplayable(pending)
current = pending
pending = self.core.tracklist.next_track(current)
count -= 1
if not count:
logger.info("No playable track in the list.")
break
# TODO return result?
def _change(self, pending_tl_track, state):
self._pending_tl_track = pending_tl_track
if not pending_tl_track:
self.stop()
self._on_end_of_stream() # pretend an EOS happened for cleanup
return True
backend = self._get_backend(pending_tl_track)
if not backend:
return False
# This must happen before prepare_change gets called, otherwise the
# backend flushes the information of the track.
self._last_position = self.get_time_position()
# TODO: Wrap backend call in error handling.
backend.playback.prepare_change()
try:
if not backend.playback.change_track(pending_tl_track.track).get():
return False
except Exception:
logger.exception(
"%s backend caused an exception.",
backend.actor_ref.actor_class.__name__,
)
return False
# TODO: Wrap backend calls in error handling.
if state == PlaybackState.PLAYING:
try:
return backend.playback.play().get()
except TypeError:
# TODO: check by binding against underlying play method using
# inspect and otherwise re-raise?
logger.error(
"%s needs to be updated to work with this "
"version of Mopidy.",
backend,
)
return False
elif state == PlaybackState.PAUSED:
return backend.playback.pause().get()
elif state == PlaybackState.STOPPED:
# TODO: emit some event now?
self._current_tl_track = self._pending_tl_track
self._pending_tl_track = None
return True
raise Exception(f"Unknown state: {state}")
def previous(self):
"""
Change to the previous track.
The current playback state will be kept. If it was playing, playing
will continue. If it was paused, it will still be paused, etc.
"""
self._previous = True
state = self.get_state()
current = self._pending_tl_track or self._current_tl_track
# avoid endless loop if 'repeat' is 'true' and no track is playable
# * 2 -> second run to get all playable track in a shuffled playlist
count = self.core.tracklist.get_length() * 2
while current:
pending = self.core.tracklist.previous_track(current)
if self._change(pending, state):
break
else:
self.core.tracklist._mark_unplayable(pending)
# TODO: this could be needed to prevent a loop in rare cases
# if current == pending:
# break
current = pending
count -= 1
if not count:
logger.info("No playable track in the list.")
break
# TODO: no return value?
def resume(self):
"""If paused, resume playing the current track."""
if self.get_state() != PlaybackState.PAUSED:
return
backend = self._get_backend(self.get_current_tl_track())
# TODO: Wrap backend call in error handling.
if backend and backend.playback.resume().get():
self.set_state(PlaybackState.PLAYING)
# TODO: trigger via gst messages
self._trigger_track_playback_resumed()
# TODO: switch to:
# backend.resume()
# wait for state change?
def seek(self, time_position):
"""
Seeks to time position given in milliseconds.
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
# TODO: seek needs to take pending tracks into account :(
validation.check_integer(time_position)
if time_position < 0:
logger.debug("Client seeked to negative position. Seeking to zero.")
time_position = 0
if not self.core.tracklist.get_length():
return False
if self.get_state() == PlaybackState.STOPPED:
self.play()
# We need to prefer the still playing track, but if nothing is playing
# we fall back to the pending one.
tl_track = self._current_tl_track or self._pending_tl_track
if tl_track and tl_track.track.length is None:
return False
if time_position < 0:
time_position = 0
elif time_position > tl_track.track.length:
# TODO: GStreamer will trigger a about-to-finish for us, use that?
self.next()
return True
# Store our target position.
self._pending_position = time_position
# Make sure we switch back to previous track if we get a seek while we
# have a pending track.
if self._current_tl_track and self._pending_tl_track:
self._change(self._current_tl_track, self.get_state())
else:
return self._seek(time_position)
def _seek(self, time_position):
backend = self._get_backend(self.get_current_tl_track())
if not backend:
return False
# TODO: Wrap backend call in error handling.
return backend.playback.seek(time_position).get()
def stop(self):
"""Stop playing."""
if self.get_state() != PlaybackState.STOPPED:
self._last_position = self.get_time_position()
backend = self._get_backend(self.get_current_tl_track())
# TODO: Wrap backend call in error handling.
if not backend or backend.playback.stop().get():
self.set_state(PlaybackState.STOPPED)
def _trigger_track_playback_paused(self):
logger.debug("Triggering track playback paused event")
if self.get_current_tl_track() is None:
return
listener.CoreListener.send(
"track_playback_paused",
tl_track=self.get_current_tl_track(),
time_position=self.get_time_position(),
)
def _trigger_track_playback_resumed(self):
logger.debug("Triggering track playback resumed event")
if self.get_current_tl_track() is None:
return
listener.CoreListener.send(
"track_playback_resumed",
tl_track=self.get_current_tl_track(),
time_position=self.get_time_position(),
)
def _trigger_track_playback_started(self):
if self.get_current_tl_track() is None:
return
logger.debug("Triggering track playback started event")
tl_track = self.get_current_tl_track()
self.core.tracklist._mark_playing(tl_track)
self.core.history._add_track(tl_track.track)
listener.CoreListener.send("track_playback_started", tl_track=tl_track)
def _trigger_track_playback_ended(self, time_position_before_stop):
tl_track = self.get_current_tl_track()
if tl_track is None:
return
logger.debug("Triggering track playback ended event")
if not self._previous:
self.core.tracklist._mark_played(self._current_tl_track)
self._previous = False
# TODO: Use the lowest of track duration and position.
listener.CoreListener.send(
"track_playback_ended",
tl_track=tl_track,
time_position=time_position_before_stop,
)
def _trigger_playback_state_changed(self, old_state, new_state):
logger.debug("Triggering playback state change event")
listener.CoreListener.send(
"playback_state_changed", old_state=old_state, new_state=new_state
)
def _trigger_seeked(self, time_position):
# TODO: Trigger this from audio events?
logger.debug("Triggering seeked event")
listener.CoreListener.send("seeked", time_position=time_position)
def _save_state(self):
return models.PlaybackState(
tlid=self.get_current_tlid(),
time_position=self.get_time_position(),
state=self.get_state(),
)
def _load_state(self, state, coverage):
if state and "play-last" in coverage and state.tlid is not None:
if state.state == PlaybackState.PAUSED:
self._start_paused = True
if state.state in (PlaybackState.PLAYING, PlaybackState.PAUSED):
self._start_at_position = state.time_position
self.play(tlid=state.tlid)

View File

@@ -0,0 +1,280 @@
import contextlib
import logging
import urllib
from mopidy import exceptions
from mopidy.core import listener
from mopidy.internal import validation
from mopidy.models import Playlist, Ref
logger = logging.getLogger(__name__)
@contextlib.contextmanager
def _backend_error_handling(backend, reraise=None):
try:
yield
except exceptions.ValidationError as e:
logger.error(
"%s backend returned bad data: %s",
backend.actor_ref.actor_class.__name__,
e,
)
except Exception as e:
if reraise and isinstance(e, reraise):
raise
logger.exception(
"%s backend caused an exception.",
backend.actor_ref.actor_class.__name__,
)
class PlaylistsController:
def __init__(self, backends, core):
self.backends = backends
self.core = core
def get_uri_schemes(self):
"""
Get the list of URI schemes that support playlists.
:rtype: list of string
.. versionadded:: 2.0
"""
return list(sorted(self.backends.with_playlists.keys()))
def as_list(self):
"""
Get a list of the currently available playlists.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlists. In other words, no information about the playlists' content
is given.
:rtype: list of :class:`mopidy.models.Ref`
.. versionadded:: 1.0
"""
futures = {
backend: backend.playlists.as_list()
for backend in set(self.backends.with_playlists.values())
}
results = []
for b, future in futures.items():
try:
with _backend_error_handling(b, reraise=NotImplementedError):
playlists = future.get()
if playlists is not None:
validation.check_instances(playlists, Ref)
results.extend(playlists)
except NotImplementedError:
backend_name = b.actor_ref.actor_class.__name__
logger.warning(
"%s does not implement playlists.as_list(). "
"Please upgrade it.",
backend_name,
)
return results
def get_items(self, uri):
"""
Get the items in a playlist specified by ``uri``.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlist's items.
If a playlist with the given ``uri`` doesn't exist, it returns
:class:`None`.
:rtype: list of :class:`mopidy.models.Ref`, or :class:`None`
.. versionadded:: 1.0
"""
validation.check_uri(uri)
uri_scheme = urllib.parse.urlparse(uri).scheme
backend = self.backends.with_playlists.get(uri_scheme, None)
if not backend:
return None
with _backend_error_handling(backend):
items = backend.playlists.get_items(uri).get()
items is None or validation.check_instances(items, Ref)
return items
return None
def create(self, name, uri_scheme=None):
"""
Create a new playlist.
If ``uri_scheme`` matches an URI scheme handled by a current backend,
that backend is asked to create the playlist. If ``uri_scheme`` is
:class:`None` or doesn't match a current backend, the first backend is
asked to create the playlist.
All new playlists must be created by calling this method, and **not**
by creating new instances of :class:`mopidy.models.Playlist`.
:param name: name of the new playlist
:type name: string
:param uri_scheme: use the backend matching the URI scheme
:type uri_scheme: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
if uri_scheme in self.backends.with_playlists:
backends = [self.backends.with_playlists[uri_scheme]]
else:
backends = self.backends.with_playlists.values()
for backend in backends:
with _backend_error_handling(backend):
result = backend.playlists.create(name).get()
if result is None:
continue
validation.check_instance(result, Playlist)
listener.CoreListener.send("playlist_changed", playlist=result)
return result
return None
def delete(self, uri):
"""
Delete playlist identified by the URI.
If the URI doesn't match the URI schemes handled by the current
backends, nothing happens.
Returns :class:`True` if deleted, :class:`False` otherwise.
:param uri: URI of the playlist to delete
:type uri: string
:rtype: :class:`bool`
.. versionchanged:: 2.2
Return type defined.
"""
validation.check_uri(uri)
uri_scheme = urllib.parse.urlparse(uri).scheme
backend = self.backends.with_playlists.get(uri_scheme, None)
if not backend:
return False
success = False
with _backend_error_handling(backend):
success = backend.playlists.delete(uri).get()
if success is None:
# Return type was defined in Mopidy 2.2. Assume everything went
# well if the backend doesn't report otherwise.
success = True
if success:
listener.CoreListener.send("playlist_deleted", uri=uri)
return success
def lookup(self, uri):
"""
Lookup playlist with given URI in both the set of playlists and in any
other playlist sources. Returns :class:`None` if not found.
:param uri: playlist URI
:type uri: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
uri_scheme = urllib.parse.urlparse(uri).scheme
backend = self.backends.with_playlists.get(uri_scheme, None)
if not backend:
return None
with _backend_error_handling(backend):
playlist = backend.playlists.lookup(uri).get()
playlist is None or validation.check_instance(playlist, Playlist)
return playlist
return None
# TODO: there is an inconsistency between library.refresh(uri) and this
# call, not sure how to sort this out.
def refresh(self, uri_scheme=None):
"""
Refresh the playlists in :attr:`playlists`.
If ``uri_scheme`` is :class:`None`, all backends are asked to refresh.
If ``uri_scheme`` is an URI scheme handled by a backend, only that
backend is asked to refresh. If ``uri_scheme`` doesn't match any
current backend, nothing happens.
:param uri_scheme: limit to the backend matching the URI scheme
:type uri_scheme: string
"""
# TODO: check: uri_scheme is None or uri_scheme?
futures = {}
backends = {}
playlists_loaded = False
for backend_scheme, backend in self.backends.with_playlists.items():
backends.setdefault(backend, set()).add(backend_scheme)
for backend, backend_schemes in backends.items():
if uri_scheme is None or uri_scheme in backend_schemes:
futures[backend] = backend.playlists.refresh()
for backend, future in futures.items():
with _backend_error_handling(backend):
future.get()
playlists_loaded = True
if playlists_loaded:
listener.CoreListener.send("playlists_loaded")
def save(self, playlist):
"""
Save the playlist.
For a playlist to be saveable, it must have the ``uri`` attribute set.
You must not set the ``uri`` atribute yourself, but use playlist
objects returned by :meth:`create` or retrieved from :attr:`playlists`,
which will always give you saveable playlists.
The method returns the saved playlist. The return playlist may differ
from the saved playlist. E.g. if the playlist name was changed, the
returned playlist may have a different URI. The caller of this method
must throw away the playlist sent to this method, and use the
returned playlist instead.
If the playlist's URI isn't set or doesn't match the URI scheme of a
current backend, nothing is done and :class:`None` is returned.
:param playlist: the playlist
:type playlist: :class:`mopidy.models.Playlist`
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
validation.check_instance(playlist, Playlist)
if playlist.uri is None:
return # TODO: log this problem?
uri_scheme = urllib.parse.urlparse(playlist.uri).scheme
backend = self.backends.with_playlists.get(uri_scheme, None)
if not backend:
return None
# TODO: we let AssertionError error through due to legacy tests :/
with _backend_error_handling(backend, reraise=AssertionError):
playlist = backend.playlists.save(playlist).get()
playlist is None or validation.check_instance(playlist, Playlist)
if playlist:
listener.CoreListener.send(
"playlist_changed", playlist=playlist
)
return playlist
return None

View File

@@ -0,0 +1,620 @@
import logging
import random
from mopidy import exceptions
from mopidy.core import listener
from mopidy.internal import deprecation, validation
from mopidy.internal.models import TracklistState
from mopidy.models import TlTrack, Track
logger = logging.getLogger(__name__)
class TracklistController:
def __init__(self, core):
self.core = core
self._next_tlid = 1
self._tl_tracks = []
self._version = 0
self._consume = False
self._random = False
self._shuffled = []
self._repeat = False
self._single = False
def get_tl_tracks(self):
"""Get tracklist as list of :class:`mopidy.models.TlTrack`."""
return self._tl_tracks[:]
def get_tracks(self):
"""Get tracklist as list of :class:`mopidy.models.Track`."""
return [tl_track.track for tl_track in self._tl_tracks]
def get_length(self):
"""Get length of the tracklist."""
return len(self._tl_tracks)
def get_version(self):
"""
Get the tracklist version.
Integer which is increased every time the tracklist is changed. Is not
reset before Mopidy is restarted.
"""
return self._version
def _increase_version(self):
self._version += 1
self.core.playback._on_tracklist_change()
self._trigger_tracklist_changed()
def get_consume(self):
"""Get consume mode.
:class:`True`
Tracks are removed from the tracklist when they have been played.
:class:`False`
Tracks are not removed from the tracklist.
"""
return self._consume
def set_consume(self, value):
"""Set consume mode.
:class:`True`
Tracks are removed from the tracklist when they have been played.
:class:`False`
Tracks are not removed from the tracklist.
"""
validation.check_boolean(value)
if self.get_consume() != value:
self._trigger_options_changed()
self._consume = value
def get_random(self):
"""Get random mode.
:class:`True`
Tracks are selected at random from the tracklist.
:class:`False`
Tracks are played in the order of the tracklist.
"""
return self._random
def set_random(self, value):
"""Set random mode.
:class:`True`
Tracks are selected at random from the tracklist.
:class:`False`
Tracks are played in the order of the tracklist.
"""
validation.check_boolean(value)
if self.get_random() != value:
self._trigger_options_changed()
if value:
self._shuffled = self.get_tl_tracks()
random.shuffle(self._shuffled)
self._random = value
def get_repeat(self):
"""
Get repeat mode.
:class:`True`
The tracklist is played repeatedly.
:class:`False`
The tracklist is played once.
"""
return self._repeat
def set_repeat(self, value):
"""
Set repeat mode.
To repeat a single track, set both ``repeat`` and ``single``.
:class:`True`
The tracklist is played repeatedly.
:class:`False`
The tracklist is played once.
"""
validation.check_boolean(value)
if self.get_repeat() != value:
self._trigger_options_changed()
self._repeat = value
def get_single(self):
"""
Get single mode.
:class:`True`
Playback is stopped after current song, unless in ``repeat`` mode.
:class:`False`
Playback continues after current song.
"""
return self._single
def set_single(self, value):
"""
Set single mode.
:class:`True`
Playback is stopped after current song, unless in ``repeat`` mode.
:class:`False`
Playback continues after current song.
"""
validation.check_boolean(value)
if self.get_single() != value:
self._trigger_options_changed()
self._single = value
def index(self, tl_track=None, tlid=None):
"""
The position of the given track in the tracklist.
If neither *tl_track* or *tlid* is given we return the index of
the currently playing track.
:param tl_track: the track to find the index of
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:param tlid: TLID of the track to find the index of
:type tlid: :class:`int` or :class:`None`
:rtype: :class:`int` or :class:`None`
.. versionadded:: 1.1
The *tlid* parameter
"""
tl_track is None or validation.check_instance(tl_track, TlTrack)
tlid is None or validation.check_integer(tlid, min=1)
if tl_track is None and tlid is None:
tl_track = self.core.playback.get_current_tl_track()
if tl_track is not None:
try:
return self._tl_tracks.index(tl_track)
except ValueError:
pass
elif tlid is not None:
for i, tl_track in enumerate(self._tl_tracks):
if tl_track.tlid == tlid:
return i
return None
def get_eot_tlid(self):
"""
The TLID of the track that will be played after the current track.
Not necessarily the same TLID as returned by :meth:`get_next_tlid`.
:rtype: :class:`int` or :class:`None`
.. versionadded:: 1.1
"""
current_tl_track = self.core.playback.get_current_tl_track()
with deprecation.ignore("core.tracklist.eot_track"):
eot_tl_track = self.eot_track(current_tl_track)
return getattr(eot_tl_track, "tlid", None)
def eot_track(self, tl_track):
"""
The track that will be played after the given track.
Not necessarily the same track as :meth:`next_track`.
.. deprecated:: 3.0
Use :meth:`get_eot_tlid` instead.
:param tl_track: the reference track
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:rtype: :class:`mopidy.models.TlTrack` or :class:`None`
"""
deprecation.warn("core.tracklist.eot_track")
tl_track is None or validation.check_instance(tl_track, TlTrack)
if self.get_single() and self.get_repeat():
return tl_track
elif self.get_single():
return None
# Current difference between next and EOT handling is that EOT needs to
# handle "single", with that out of the way the rest of the logic is
# shared.
return self.next_track(tl_track)
def get_next_tlid(self):
"""
The tlid of the track that will be played if calling
:meth:`mopidy.core.PlaybackController.next()`.
For normal playback this is the next track in the tracklist. If repeat
is enabled the next track can loop around the tracklist. When random is
enabled this should be a random track, all tracks should be played once
before the tracklist repeats.
:rtype: :class:`int` or :class:`None`
.. versionadded:: 1.1
"""
current_tl_track = self.core.playback.get_current_tl_track()
with deprecation.ignore("core.tracklist.next_track"):
next_tl_track = self.next_track(current_tl_track)
return getattr(next_tl_track, "tlid", None)
def next_track(self, tl_track):
"""
The track that will be played if calling
:meth:`mopidy.core.PlaybackController.next()`.
For normal playback this is the next track in the tracklist. If repeat
is enabled the next track can loop around the tracklist. When random is
enabled this should be a random track, all tracks should be played once
before the tracklist repeats.
.. deprecated:: 3.0
Use :meth:`get_next_tlid` instead.
:param tl_track: the reference track
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:rtype: :class:`mopidy.models.TlTrack` or :class:`None`
"""
deprecation.warn("core.tracklist.next_track")
tl_track is None or validation.check_instance(tl_track, TlTrack)
if not self._tl_tracks:
return None
if self.get_random() and not self._shuffled:
if self.get_repeat() or not tl_track:
logger.debug("Shuffling tracks")
self._shuffled = self._tl_tracks[:]
random.shuffle(self._shuffled)
if self.get_random():
if self._shuffled:
return self._shuffled[0]
return None
next_index = self.index(tl_track)
if next_index is None:
next_index = 0
else:
next_index += 1
if self.get_repeat():
if self.get_consume() and len(self._tl_tracks) == 1:
return None
else:
next_index %= len(self._tl_tracks)
elif next_index >= len(self._tl_tracks):
return None
return self._tl_tracks[next_index]
def get_previous_tlid(self):
"""
Returns the TLID of the track that will be played if calling
:meth:`mopidy.core.PlaybackController.previous()`.
For normal playback this is the previous track in the tracklist. If
random and/or consume is enabled it should return the current track
instead.
:rtype: :class:`int` or :class:`None`
.. versionadded:: 1.1
"""
current_tl_track = self.core.playback.get_current_tl_track()
with deprecation.ignore("core.tracklist.previous_track"):
previous_tl_track = self.previous_track(current_tl_track)
return getattr(previous_tl_track, "tlid", None)
def previous_track(self, tl_track):
"""
Returns the track that will be played if calling
:meth:`mopidy.core.PlaybackController.previous()`.
For normal playback this is the previous track in the tracklist. If
random and/or consume is enabled it should return the current track
instead.
.. deprecated:: 3.0
Use :meth:`get_previous_tlid` instead.
:param tl_track: the reference track
:type tl_track: :class:`mopidy.models.TlTrack` or :class:`None`
:rtype: :class:`mopidy.models.TlTrack` or :class:`None`
"""
deprecation.warn("core.tracklist.previous_track")
tl_track is None or validation.check_instance(tl_track, TlTrack)
if self.get_repeat() or self.get_consume() or self.get_random():
return tl_track
position = self.index(tl_track)
if position in (None, 0):
return None
# Since we know we are not at zero we have to be somewhere in the range
# 1 - len(tracks) Thus 'position - 1' will always be within the list.
return self._tl_tracks[position - 1]
def add(self, tracks=None, at_position=None, uris=None):
"""
Add tracks to the tracklist.
If ``uris`` is given instead of ``tracks``, the URIs are
looked up in the library and the resulting tracks are added to the
tracklist.
If ``at_position`` is given, the tracks are inserted at the given
position in the tracklist. If ``at_position`` is not given, the tracks
are appended to the end of the tracklist.
Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event.
:param tracks: tracks to add
:type tracks: list of :class:`mopidy.models.Track` or :class:`None`
:param at_position: position in tracklist to add tracks
:type at_position: int or :class:`None`
:param uris: list of URIs for tracks to add
:type uris: list of string or :class:`None`
:rtype: list of :class:`mopidy.models.TlTrack`
.. versionadded:: 1.0
The ``uris`` argument.
.. deprecated:: 1.0
The ``tracks`` argument. Use ``uris``.
"""
if sum(o is not None for o in [tracks, uris]) != 1:
raise ValueError('Exactly one of "tracks" or "uris" must be set')
tracks is None or validation.check_instances(tracks, Track)
uris is None or validation.check_uris(uris)
validation.check_integer(at_position or 0)
if tracks:
deprecation.warn("core.tracklist.add:tracks_arg")
if tracks is None:
tracks = []
track_map = self.core.library.lookup(uris=uris)
for uri in uris:
tracks.extend(track_map[uri])
tl_tracks = []
max_length = self.core._config["core"]["max_tracklist_length"]
for track in tracks:
if self.get_length() >= max_length:
raise exceptions.TracklistFull(
f"Tracklist may contain at most {max_length:d} tracks."
)
tl_track = TlTrack(self._next_tlid, track)
self._next_tlid += 1
if at_position is not None:
self._tl_tracks.insert(at_position, tl_track)
at_position += 1
else:
self._tl_tracks.append(tl_track)
tl_tracks.append(tl_track)
if tl_tracks:
self._increase_version()
return tl_tracks
def clear(self):
"""
Clear the tracklist.
Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event.
"""
self._tl_tracks = []
self._increase_version()
def filter(self, criteria):
"""
Filter the tracklist by the given criteria.
Each rule in the criteria consists of a model field and a list of
values to compare it against. If the model field matches any of the
values, it may be returned.
Only tracks that match all the given criteria are returned.
Examples::
# Returns tracks with TLIDs 1, 2, 3, or 4 (tracklist ID)
filter({'tlid': [1, 2, 3, 4]})
# Returns track with URIs 'xyz' or 'abc'
filter({'uri': ['xyz', 'abc']})
# Returns track with a matching TLIDs (1, 3 or 6) and a
# matching URI ('xyz' or 'abc')
filter({'tlid': [1, 3, 6], 'uri': ['xyz', 'abc']})
:param criteria: one or more rules to match by
:type criteria: dict, of (string, list) pairs
:rtype: list of :class:`mopidy.models.TlTrack`
"""
tlids = criteria.pop("tlid", [])
validation.check_query(criteria, validation.TRACKLIST_FIELDS)
validation.check_instances(tlids, int)
matches = self._tl_tracks
for (key, values) in criteria.items():
matches = [ct for ct in matches if getattr(ct.track, key) in values]
if tlids:
matches = [ct for ct in matches if ct.tlid in tlids]
return matches
def move(self, start, end, to_position):
"""
Move the tracks in the slice ``[start:end]`` to ``to_position``.
Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event.
:param start: position of first track to move
:type start: int
:param end: position after last track to move
:type end: int
:param to_position: new position for the tracks
:type to_position: int
"""
if start == end:
end += 1
tl_tracks = self._tl_tracks
# TODO: use validation helpers?
assert start < end, "start must be smaller than end"
assert start >= 0, "start must be at least zero"
assert end <= len(
tl_tracks
), "end can not be larger than tracklist length"
assert to_position >= 0, "to_position must be at least zero"
assert to_position <= len(
tl_tracks
), "to_position can not be larger than tracklist length"
new_tl_tracks = tl_tracks[:start] + tl_tracks[end:]
for tl_track in tl_tracks[start:end]:
new_tl_tracks.insert(to_position, tl_track)
to_position += 1
self._tl_tracks = new_tl_tracks
self._increase_version()
def remove(self, criteria):
"""
Remove the matching tracks from the tracklist.
Uses :meth:`filter()` to lookup the tracks to remove.
Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event.
:param criteria: one or more rules to match by
:type criteria: dict, of (string, list) pairs
:rtype: list of :class:`mopidy.models.TlTrack` that were removed
"""
tl_tracks = self.filter(criteria)
for tl_track in tl_tracks:
position = self._tl_tracks.index(tl_track)
del self._tl_tracks[position]
self._increase_version()
return tl_tracks
def shuffle(self, start=None, end=None):
"""
Shuffles the entire tracklist. If ``start`` and ``end`` is given only
shuffles the slice ``[start:end]``.
Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event.
:param start: position of first track to shuffle
:type start: int or :class:`None`
:param end: position after last track to shuffle
:type end: int or :class:`None`
"""
tl_tracks = self._tl_tracks
# TOOD: use validation helpers?
if start is not None and end is not None:
assert start < end, "start must be smaller than end"
if start is not None:
assert start >= 0, "start must be at least zero"
if end is not None:
assert end <= len(tl_tracks), (
"end can not be larger than " + "tracklist length"
)
before = tl_tracks[: start or 0]
shuffled = tl_tracks[start:end]
after = tl_tracks[end or len(tl_tracks) :]
random.shuffle(shuffled)
self._tl_tracks = before + shuffled + after
self._increase_version()
def slice(self, start, end):
"""
Returns a slice of the tracklist, limited by the given start and end
positions.
:param start: position of first track to include in slice
:type start: int
:param end: position after last track to include in slice
:type end: int
:rtype: :class:`mopidy.models.TlTrack`
"""
# TODO: validate slice?
return self._tl_tracks[start:end]
def _mark_playing(self, tl_track):
"""Internal method for :class:`mopidy.core.PlaybackController`."""
if self.get_random() and tl_track in self._shuffled:
self._shuffled.remove(tl_track)
def _mark_unplayable(self, tl_track):
"""Internal method for :class:`mopidy.core.PlaybackController`."""
logger.warning("Track is not playable: %s", tl_track.track.uri)
if self.get_consume() and tl_track is not None:
self.remove({"tlid": [tl_track.tlid]})
if self.get_random() and tl_track in self._shuffled:
self._shuffled.remove(tl_track)
def _mark_played(self, tl_track):
"""Internal method for :class:`mopidy.core.PlaybackController`."""
if self.get_consume() and tl_track is not None:
self.remove({"tlid": [tl_track.tlid]})
return True
return False
def _trigger_tracklist_changed(self):
if self.get_random():
self._shuffled = self._tl_tracks[:]
random.shuffle(self._shuffled)
else:
self._shuffled = []
logger.debug("Triggering event: tracklist_changed()")
listener.CoreListener.send("tracklist_changed")
def _trigger_options_changed(self):
logger.debug("Triggering options changed event")
listener.CoreListener.send("options_changed")
def _save_state(self):
return TracklistState(
tl_tracks=self._tl_tracks,
next_tlid=self._next_tlid,
consume=self.get_consume(),
random=self.get_random(),
repeat=self.get_repeat(),
single=self.get_single(),
)
def _load_state(self, state, coverage):
if state:
if "mode" in coverage:
self.set_consume(state.consume)
self.set_random(state.random)
self.set_repeat(state.repeat)
self.set_single(state.single)
if "tracklist" in coverage:
self._next_tlid = max(state.next_tlid, self._next_tlid)
self._tl_tracks = list(state.tl_tracks)
self._increase_version()

View File

@@ -0,0 +1,53 @@
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super().__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter # noqa
def message(self, message):
self._message = message
class BackendError(MopidyException):
pass
class CoreError(MopidyException):
def __init__(self, message, errno=None):
super().__init__(message, errno)
self.errno = errno
class ExtensionError(MopidyException):
pass
class FrontendError(MopidyException):
pass
class MixerError(MopidyException):
pass
class ScannerError(MopidyException):
pass
class TracklistFull(CoreError):
def __init__(self, message, errno=None):
super().__init__(message, errno)
self.errno = errno
class AudioException(MopidyException):
pass
class ValidationError(ValueError):
pass

View File

@@ -0,0 +1,340 @@
import collections
import logging
from collections.abc import Mapping
import pkg_resources
from mopidy import config as config_lib
from mopidy import exceptions
from mopidy.internal import path
logger = logging.getLogger(__name__)
_extension_data_fields = [
"extension",
"entry_point",
"config_schema",
"config_defaults",
"command",
]
ExtensionData = collections.namedtuple("ExtensionData", _extension_data_fields)
class Extension:
"""Base class for Mopidy extensions"""
dist_name = None
"""The extension's distribution name, as registered on PyPI
Example: ``Mopidy-Soundspot``
"""
ext_name = None
"""The extension's short name, as used in setup.py and as config section
name
Example: ``soundspot``
"""
version = None
"""The extension's version
Should match the :attr:`__version__` attribute on the extension's main
Python module and the version registered on PyPI.
"""
def get_default_config(self):
"""The extension's default config as a bytestring
:returns: bytes or unicode
"""
raise NotImplementedError(
'Add at least a config section with "enabled = true"'
)
def get_config_schema(self):
"""The extension's config validation schema
:returns: :class:`~mopidy.config.schemas.ConfigSchema`
"""
schema = config_lib.ConfigSchema(self.ext_name)
schema["enabled"] = config_lib.Boolean()
return schema
@classmethod
def get_cache_dir(cls, config):
"""Get or create cache directory for the extension.
Use this directory to cache data that can safely be thrown away.
:param config: the Mopidy config object
:return: pathlib.Path
"""
assert cls.ext_name is not None
cache_dir_path = (
path.expand_path(config["core"]["cache_dir"]) / cls.ext_name
)
path.get_or_create_dir(cache_dir_path)
return cache_dir_path
@classmethod
def get_config_dir(cls, config):
"""Get or create configuration directory for the extension.
:param config: the Mopidy config object
:return: pathlib.Path
"""
assert cls.ext_name is not None
config_dir_path = (
path.expand_path(config["core"]["config_dir"]) / cls.ext_name
)
path.get_or_create_dir(config_dir_path)
return config_dir_path
@classmethod
def get_data_dir(cls, config):
"""Get or create data directory for the extension.
Use this directory to store data that should be persistent.
:param config: the Mopidy config object
:returns: pathlib.Path
"""
assert cls.ext_name is not None
data_dir_path = (
path.expand_path(config["core"]["data_dir"]) / cls.ext_name
)
path.get_or_create_dir(data_dir_path)
return data_dir_path
def get_command(self):
"""Command to expose to command line users running ``mopidy``.
:returns:
Instance of a :class:`~mopidy.commands.Command` class.
"""
pass
def validate_environment(self):
"""Checks if the extension can run in the current environment.
Dependencies described by :file:`setup.py` are checked by Mopidy, so
you should not check their presence here.
If a problem is found, raise :exc:`~mopidy.exceptions.ExtensionError`
with a message explaining the issue.
:raises: :exc:`~mopidy.exceptions.ExtensionError`
:returns: :class:`None`
"""
pass
def setup(self, registry):
"""
Register the extension's components in the extension :class:`Registry`.
For example, to register a backend::
def setup(self, registry):
from .backend import SoundspotBackend
registry.add('backend', SoundspotBackend)
See :class:`Registry` for a list of registry keys with a special
meaning. Mopidy will instantiate and start any classes registered under
the ``frontend`` and ``backend`` registry keys.
This method can also be used for other setup tasks not involving the
extension registry.
:param registry: the extension registry
:type registry: :class:`Registry`
"""
raise NotImplementedError
class Registry(Mapping):
"""Registry of components provided by Mopidy extensions.
Passed to the :meth:`~Extension.setup` method of all extensions. The
registry can be used like a dict of string keys and lists.
Some keys have a special meaning, including, but not limited to:
- ``backend`` is used for Mopidy backend classes.
- ``frontend`` is used for Mopidy frontend classes.
Extensions can use the registry for allow other to extend the extension
itself. For example the ``Mopidy-Local`` historically used the
``local:library`` key to allow other extensions to register library
providers for ``Mopidy-Local`` to use. Extensions should namespace
custom keys with the extension's :attr:`~Extension.ext_name`,
e.g. ``local:foo`` or ``http:bar``.
"""
def __init__(self):
self._registry = {}
def add(self, name, cls):
"""Add a component to the registry.
Multiple classes can be registered to the same name.
"""
self._registry.setdefault(name, []).append(cls)
def __getitem__(self, name):
return self._registry.setdefault(name, [])
def __iter__(self):
return iter(self._registry)
def __len__(self):
return len(self._registry)
def load_extensions():
"""Find all installed extensions.
:returns: list of installed extensions
"""
installed_extensions = []
for entry_point in pkg_resources.iter_entry_points("mopidy.ext"):
logger.debug("Loading entry point: %s", entry_point)
try:
extension_class = entry_point.resolve()
except Exception as e:
logger.exception(
f"Failed to load extension {entry_point.name}: {e}"
)
continue
try:
if not issubclass(extension_class, Extension):
raise TypeError # issubclass raises TypeError on non-class
except TypeError:
logger.error(
"Entry point %s did not contain a valid extension" "class: %r",
entry_point.name,
extension_class,
)
continue
try:
extension = extension_class()
config_schema = extension.get_config_schema()
default_config = extension.get_default_config()
command = extension.get_command()
except Exception:
logger.exception(
"Setup of extension from entry point %s failed, "
"ignoring extension.",
entry_point.name,
)
continue
installed_extensions.append(
ExtensionData(
extension, entry_point, config_schema, default_config, command
)
)
logger.debug(
"Loaded extension: %s %s", extension.dist_name, extension.version
)
names = (ed.extension.ext_name for ed in installed_extensions)
logger.debug("Discovered extensions: %s", ", ".join(names))
return installed_extensions
def validate_extension_data(data):
"""Verify extension's dependencies and environment.
:param extensions: an extension to check
:returns: if extension should be run
"""
logger.debug("Validating extension: %s", data.extension.ext_name)
if data.extension.ext_name != data.entry_point.name:
logger.warning(
"Disabled extension %(ep)s: entry point name (%(ep)s) "
"does not match extension name (%(ext)s)",
{"ep": data.entry_point.name, "ext": data.extension.ext_name},
)
return False
try:
data.entry_point.require()
except pkg_resources.DistributionNotFound as exc:
logger.info(
"Disabled extension %s: Dependency %s not found",
data.extension.ext_name,
exc,
)
return False
except pkg_resources.VersionConflict as exc:
if len(exc.args) == 2:
found, required = exc.args
logger.info(
"Disabled extension %s: %s required, but found %s at %s",
data.extension.ext_name,
required,
found,
found.location,
)
else:
logger.info(
"Disabled extension %s: %s", data.extension.ext_name, exc
)
return False
try:
data.extension.validate_environment()
except exceptions.ExtensionError as exc:
logger.info("Disabled extension %s: %s", data.extension.ext_name, exc)
return False
except Exception:
logger.exception(
"Validating extension %s failed with an exception.",
data.extension.ext_name,
)
return False
if not data.config_schema:
logger.error(
"Extension %s does not have a config schema, disabling.",
data.extension.ext_name,
)
return False
elif not isinstance(data.config_schema.get("enabled"), config_lib.Boolean):
logger.error(
'Extension %s does not have the required "enabled" config'
" option, disabling.",
data.extension.ext_name,
)
return False
for key, value in data.config_schema.items():
if not isinstance(value, config_lib.ConfigValue):
logger.error(
"Extension %s config schema contains an invalid value"
' for the option "%s", disabling.',
data.extension.ext_name,
key,
)
return False
if not data.config_defaults:
logger.error(
"Extension %s does not have a default config, disabling.",
data.extension.ext_name,
)
return False
return True

View File

@@ -0,0 +1,32 @@
import logging
import os
import mopidy
from mopidy import config, ext
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = "Mopidy-File"
ext_name = "file"
version = mopidy.__version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["media_dirs"] = config.List(optional=True)
schema["excluded_file_extensions"] = config.List(optional=True)
schema["show_dotfiles"] = config.Boolean(optional=True)
schema["follow_symlinks"] = config.Boolean(optional=True)
schema["metadata_timeout"] = config.Integer(optional=True)
return schema
def setup(self, registry):
from .backend import FileBackend
registry.add("backend", FileBackend)

View File

@@ -0,0 +1,18 @@
import logging
import pykka
from mopidy import backend
from mopidy.file import library
logger = logging.getLogger(__name__)
class FileBackend(pykka.ThreadingActor, backend.Backend):
uri_schemes = ["file"]
def __init__(self, config, audio):
super().__init__()
self.library = library.FileLibraryProvider(backend=self, config=config)
self.playback = backend.PlaybackProvider(audio=audio, backend=self)
self.playlists = None

View File

@@ -0,0 +1,19 @@
[file]
enabled = true
media_dirs =
$XDG_MUSIC_DIR|Music
~/|Home
show_dotfiles = false
excluded_file_extensions =
.directory
.html
.jpeg
.jpg
.log
.nfo
.pdf
.png
.txt
.zip
follow_symlinks = false
metadata_timeout = 1000

View File

@@ -0,0 +1,148 @@
import logging
import os
from mopidy import backend, exceptions, models
from mopidy.audio import scan, tags
from mopidy.internal import path
logger = logging.getLogger(__name__)
class FileLibraryProvider(backend.LibraryProvider):
"""Library for browsing local files."""
# TODO: get_images that can pull from metadata and/or .folder.png etc?
# TODO: handle playlists?
@property
def root_directory(self):
if not self._media_dirs:
return None
elif len(self._media_dirs) == 1:
uri = path.path_to_uri(self._media_dirs[0]["path"])
else:
uri = "file:root"
return models.Ref.directory(name="Files", uri=uri)
def __init__(self, backend, config):
super().__init__(backend)
self._media_dirs = list(self._get_media_dirs(config))
self._show_dotfiles = config["file"]["show_dotfiles"]
self._excluded_file_extensions = tuple(
file_ext.lower()
for file_ext in config["file"]["excluded_file_extensions"]
)
self._follow_symlinks = config["file"]["follow_symlinks"]
self._scanner = scan.Scanner(timeout=config["file"]["metadata_timeout"])
def browse(self, uri):
logger.debug("Browsing files at: %s", uri)
result = []
local_path = path.uri_to_path(uri)
if str(local_path) == "root":
return list(self._get_media_dirs_refs())
if not self._is_in_basedir(local_path):
logger.warning(
"Rejected attempt to browse path (%s) outside dirs defined "
"in file/media_dirs config.",
uri,
)
return []
for dir_entry in local_path.iterdir():
child_path = dir_entry.resolve()
uri = path.path_to_uri(child_path)
if not self._show_dotfiles and dir_entry.name.startswith("."):
continue
if (
self._excluded_file_extensions
and dir_entry.suffix in self._excluded_file_extensions
):
continue
if child_path.is_symlink() and not self._follow_symlinks:
logger.debug("Ignoring symlink: %s", uri)
continue
if not self._is_in_basedir(child_path):
logger.debug("Ignoring symlink to outside base dir: %s", uri)
continue
if child_path.is_dir():
result.append(
models.Ref.directory(name=dir_entry.name, uri=uri)
)
elif child_path.is_file():
result.append(models.Ref.track(name=dir_entry.name, uri=uri))
def order(item):
return (item.type != models.Ref.DIRECTORY, item.name)
result.sort(key=order)
return result
def lookup(self, uri):
logger.debug("Looking up file URI: %s", uri)
local_path = path.uri_to_path(uri)
try:
result = self._scanner.scan(uri)
track = tags.convert_tags_to_track(result.tags).replace(
uri=uri, length=result.duration
)
except exceptions.ScannerError as e:
logger.warning("Failed looking up %s: %s", uri, e)
track = models.Track(uri=uri)
if not track.name:
track = track.replace(name=local_path.name)
return [track]
def _get_media_dirs(self, config):
for entry in config["file"]["media_dirs"]:
media_dir = {}
media_dir_split = entry.split("|", 1)
local_path = path.expand_path(media_dir_split[0])
if local_path is None:
logger.debug(
"Failed expanding path (%s) from file/media_dirs config "
"value.",
media_dir_split[0],
)
continue
elif not local_path.is_dir():
logger.warning(
"%s is not a directory. Please create the directory or "
"update the file/media_dirs config value.",
local_path,
)
continue
media_dir["path"] = local_path
if len(media_dir_split) == 2:
media_dir["name"] = media_dir_split[1]
else:
# TODO Mpd client should accept / in dir name
media_dir["name"] = media_dir_split[0].replace(os.sep, "+")
yield media_dir
def _get_media_dirs_refs(self):
for media_dir in self._media_dirs:
yield models.Ref.directory(
name=media_dir["name"], uri=path.path_to_uri(media_dir["path"])
)
def _is_in_basedir(self, local_path):
return any(
path.is_path_inside_base_dir(local_path, media_dir["path"])
for media_dir in self._media_dirs
)

View File

@@ -0,0 +1,53 @@
import logging
import os
import mopidy
from mopidy import config as config_lib
from mopidy import exceptions, ext
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = "Mopidy-HTTP"
ext_name = "http"
version = mopidy.__version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config_lib.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["hostname"] = config_lib.Hostname()
schema["port"] = config_lib.Port()
schema["static_dir"] = config_lib.Deprecated()
schema["zeroconf"] = config_lib.String(optional=True)
schema["allowed_origins"] = config_lib.List(optional=True)
schema["csrf_protection"] = config_lib.Boolean(optional=True)
schema["default_app"] = config_lib.String(optional=True)
return schema
def validate_environment(self):
try:
import tornado.web # noqa
except ImportError as e:
raise exceptions.ExtensionError("tornado library not found", e)
def setup(self, registry):
from .actor import HttpFrontend
from .handlers import make_mopidy_app_factory
HttpFrontend.apps = registry["http:app"]
HttpFrontend.statics = registry["http:static"]
registry.add("frontend", HttpFrontend)
registry.add(
"http:app",
{
"name": "mopidy",
"factory": make_mopidy_app_factory(
registry["http:app"], registry["http:static"]
),
},
)

View File

@@ -0,0 +1,210 @@
import json
import logging
import secrets
import threading
import pykka
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
import tornado.websocket
from mopidy import exceptions, models, zeroconf
from mopidy.core import CoreListener
from mopidy.http import Extension, handlers
from mopidy.internal import formatting, network
try:
import asyncio
except ImportError:
asyncio = None
logger = logging.getLogger(__name__)
class HttpFrontend(pykka.ThreadingActor, CoreListener):
apps = []
statics = []
def __init__(self, config, core):
super().__init__()
self.hostname = network.format_hostname(config["http"]["hostname"])
self.port = config["http"]["port"]
tornado_hostname = config["http"]["hostname"]
if tornado_hostname == "::":
tornado_hostname = None
try:
logger.debug("Starting HTTP server")
sockets = tornado.netutil.bind_sockets(self.port, tornado_hostname)
self.server = HttpServer(
config=config,
core=core,
sockets=sockets,
apps=self.apps,
statics=self.statics,
)
except OSError as exc:
raise exceptions.FrontendError(f"HTTP server startup failed: {exc}")
self.zeroconf_name = config["http"]["zeroconf"]
self.zeroconf_http = None
self.zeroconf_mopidy_http = None
def on_start(self):
logger.info("HTTP server running at [%s]:%s", self.hostname, self.port)
self.server.start()
if self.zeroconf_name:
self.zeroconf_http = zeroconf.Zeroconf(
name=self.zeroconf_name, stype="_http._tcp", port=self.port
)
self.zeroconf_mopidy_http = zeroconf.Zeroconf(
name=self.zeroconf_name,
stype="_mopidy-http._tcp",
port=self.port,
)
self.zeroconf_http.publish()
self.zeroconf_mopidy_http.publish()
def on_stop(self):
if self.zeroconf_http:
self.zeroconf_http.unpublish()
if self.zeroconf_mopidy_http:
self.zeroconf_mopidy_http.unpublish()
self.server.stop()
def on_event(self, name, **data):
on_event(name, self.server.io_loop, **data)
def on_event(name, io_loop, **data):
event = data
event["event"] = name
message = json.dumps(event, cls=models.ModelJSONEncoder)
handlers.WebSocketHandler.broadcast(message, io_loop)
class HttpServer(threading.Thread):
name = "HttpServer"
def __init__(self, config, core, sockets, apps, statics):
super().__init__()
self.config = config
self.core = core
self.sockets = sockets
self.apps = apps
self.statics = statics
self.app = None
self.server = None
self.io_loop = None
def run(self):
if asyncio:
# If asyncio is available, Tornado uses it as its IO loop. Since we
# start Tornado in a another thread than the main thread, we must
# explicitly create an asyncio loop for the current thread.
asyncio.set_event_loop(asyncio.new_event_loop())
self.app = tornado.web.Application(
self._get_request_handlers(),
cookie_secret=self._get_cookie_secret(),
)
self.server = tornado.httpserver.HTTPServer(self.app)
self.server.add_sockets(self.sockets)
self.io_loop = tornado.ioloop.IOLoop.current()
self.io_loop.start()
logger.debug("Stopped HTTP server")
def stop(self):
logger.debug("Stopping HTTP server")
self.io_loop.add_callback(self.io_loop.stop)
def _get_request_handlers(self):
request_handlers = []
request_handlers.extend(self._get_app_request_handlers())
request_handlers.extend(self._get_static_request_handlers())
request_handlers.extend(self._get_default_request_handlers())
logger.debug(
"HTTP routes from extensions: %s",
formatting.indent(
"\n".join(
f"{path!r}: {handler!r}"
for (path, handler, *_) in request_handlers
)
),
)
return request_handlers
def _get_app_request_handlers(self):
result = []
for app in self.apps:
try:
request_handlers = app["factory"](self.config, self.core)
except Exception:
logger.exception("Loading %s failed.", app["name"])
continue
result.append((f"/{app['name']}", handlers.AddSlashHandler))
for handler in request_handlers:
handler = list(handler)
handler[0] = f"/{app['name']}{handler[0]}"
result.append(tuple(handler))
logger.debug("Loaded HTTP extension: %s", app["name"])
return result
def _get_static_request_handlers(self):
result = []
for static in self.statics:
result.append((f"/{static['name']}", handlers.AddSlashHandler))
result.append(
(
f"/{static['name']}/(.*)",
handlers.StaticFileHandler,
{"path": static["path"], "default_filename": "index.html"},
)
)
logger.debug("Loaded static HTTP extension: %s", static["name"])
return result
def _get_default_request_handlers(self):
sites = [app["name"] for app in self.apps + self.statics]
default_app = self.config["http"]["default_app"]
if default_app not in sites:
logger.warning(
f"HTTP server's default app {default_app!r} not found"
)
default_app = "mopidy"
logger.debug(f"Default webclient is {default_app}")
return [
(
r"/",
tornado.web.RedirectHandler,
{"url": f"/{default_app}/", "permanent": False},
)
]
def _get_cookie_secret(self):
file_path = Extension.get_data_dir(self.config) / "cookie_secret"
if not file_path.is_file():
cookie_secret = secrets.token_hex(32)
file_path.write_text(cookie_secret)
else:
cookie_secret = file_path.read_text().strip()
if not cookie_secret:
logging.error(
f"HTTP server could not find cookie secret in {file_path}"
)
return cookie_secret

View File

@@ -0,0 +1,31 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Mopidy</title>
<link rel="stylesheet" type="text/css" href="mopidy.css">
</head>
<body>
<div class="box focus">
<h1>Mopidy</h1>
<p>This web server is a part of the Mopidy music server. To learn more
about Mopidy, please visit
<a href="http://www.mopidy.com/">www.mopidy.com</a>.</p>
</div>
<div class="box">
<h2>Web clients</h2>
<ul>
{% for app in apps %}
<li><a href="/{{ url_escape(app) }}/">{{ escape(app) }}</a></li>
{% end %}
</ul>
<p>Web clients which are installed as Mopidy extensions will
automatically appear here.</p>
</div>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

@@ -0,0 +1,43 @@
html {
background: #f8f8f8;
color: #555;
font-family: Geneva, Tahoma, Verdana, sans-serif;
line-height: 1.4em;
}
body {
max-width: 600px;
margin: 0 auto;
}
h1, h2 {
font-weight: 500;
line-height: 1.1em;
}
a {
color: #555;
text-decoration: none;
border-bottom: 1px dotted;
}
img {
border: 0;
}
.box {
background: white;
box-shadow: 0px 5px 5px #f0f0f0;
margin: 1em;
padding: 1em;
}
.box.focus {
background: #465158;
color: #e8ecef;
}
.box a {
color: #465158;
}
.box a:hover {
opacity: 0.8;
}
.box.focus a {
color: #e8ecef;
}

View File

@@ -0,0 +1,8 @@
[http]
enabled = true
hostname = 127.0.0.1
port = 6680
zeroconf = Mopidy HTTP server on $hostname
allowed_origins =
csrf_protection = true
default_app = mopidy

View File

@@ -0,0 +1,273 @@
import functools
import logging
import os
import urllib
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.websocket
import mopidy
from mopidy import core, models
from mopidy.internal import jsonrpc
logger = logging.getLogger(__name__)
def make_mopidy_app_factory(apps, statics):
def mopidy_app_factory(config, core):
if not config["http"]["csrf_protection"]:
logger.warning(
"HTTP Cross-Site Request Forgery protection is disabled"
)
allowed_origins = {
x.lower() for x in config["http"]["allowed_origins"] if x
}
return [
(
r"/ws/?",
WebSocketHandler,
{
"core": core,
"allowed_origins": allowed_origins,
"csrf_protection": config["http"]["csrf_protection"],
},
),
(
r"/rpc",
JsonRpcHandler,
{
"core": core,
"allowed_origins": allowed_origins,
"csrf_protection": config["http"]["csrf_protection"],
},
),
(
r"/(.+)",
StaticFileHandler,
{"path": os.path.join(os.path.dirname(__file__), "data")},
),
(r"/", ClientListHandler, {"apps": apps, "statics": statics}),
]
return mopidy_app_factory
def make_jsonrpc_wrapper(core_actor):
inspector = jsonrpc.JsonRpcInspector(
objects={
"core.get_uri_schemes": core.Core.get_uri_schemes,
"core.get_version": core.Core.get_version,
"core.history": core.HistoryController,
"core.library": core.LibraryController,
"core.mixer": core.MixerController,
"core.playback": core.PlaybackController,
"core.playlists": core.PlaylistsController,
"core.tracklist": core.TracklistController,
}
)
return jsonrpc.JsonRpcWrapper(
objects={
"core.describe": inspector.describe,
"core.get_uri_schemes": core_actor.get_uri_schemes,
"core.get_version": core_actor.get_version,
"core.history": core_actor.history,
"core.library": core_actor.library,
"core.mixer": core_actor.mixer,
"core.playback": core_actor.playback,
"core.playlists": core_actor.playlists,
"core.tracklist": core_actor.tracklist,
},
decoders=[models.model_json_decoder],
encoders=[models.ModelJSONEncoder],
)
def _send_broadcast(client, msg):
# We could check for client.ws_connection, but we don't really
# care why the broadcast failed, we just want the rest of them
# to succeed, so catch everything.
try:
client.write_message(msg)
except Exception as exc:
logger.debug(
f"Broadcast of WebSocket message to "
f"{client.request.remote_ip} failed: {exc}"
)
# TODO: should this do the same cleanup as the on_message code?
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# XXX This set is shared by all WebSocketHandler objects. This isn't
# optimal, but there's currently no use case for having more than one of
# these anyway.
clients = set()
@classmethod
def broadcast(cls, msg, io_loop):
# This can be called from outside the Tornado ioloop, so we need to
# safely cross the thread boundary by adding a callback to the loop.
for client in cls.clients:
# One callback per client to keep time we hold up the loop short
io_loop.add_callback(
functools.partial(_send_broadcast, client, msg)
)
def initialize(self, core, allowed_origins, csrf_protection):
self.jsonrpc = make_jsonrpc_wrapper(core)
self.allowed_origins = allowed_origins
self.csrf_protection = csrf_protection
def open(self):
self.set_nodelay(True)
self.clients.add(self)
logger.debug("New WebSocket connection from %s", self.request.remote_ip)
def on_close(self):
self.clients.discard(self)
logger.debug(
"Closed WebSocket connection from %s", self.request.remote_ip
)
def on_message(self, message):
if not message:
return
logger.debug(
"Received WebSocket message from %s: %r",
self.request.remote_ip,
message,
)
try:
response = self.jsonrpc.handle_json(
tornado.escape.native_str(message)
)
if response and self.write_message(response):
logger.debug(
"Sent WebSocket message to %s: %r",
self.request.remote_ip,
response,
)
except Exception as exc:
logger.error(f"WebSocket request error: {exc}")
self.close()
def check_origin(self, origin):
if not self.csrf_protection:
return True
return check_origin(origin, self.request.headers, self.allowed_origins)
def set_mopidy_headers(request_handler):
request_handler.set_header("Cache-Control", "no-cache")
request_handler.set_header("X-Mopidy-Version", mopidy.__version__.encode())
def check_origin(origin, request_headers, allowed_origins):
if origin is None:
logger.warning("HTTP request denied for missing Origin header")
return False
allowed_origins.add(request_headers.get("Host"))
parsed_origin = urllib.parse.urlparse(origin).netloc.lower()
# Some frameworks (e.g. Apache Cordova) use local files. Requests from
# these files don't really have a sensible Origin so the browser sets the
# header to something like 'file://' or 'null'. This results here in an
# empty parsed_origin which we choose to allow.
if parsed_origin and parsed_origin not in allowed_origins:
logger.warning('HTTP request denied for Origin "%s"', origin)
return False
return True
class JsonRpcHandler(tornado.web.RequestHandler):
def initialize(self, core, allowed_origins, csrf_protection):
self.jsonrpc = make_jsonrpc_wrapper(core)
self.allowed_origins = allowed_origins
self.csrf_protection = csrf_protection
def head(self):
self.set_extra_headers()
self.finish()
def post(self):
if self.csrf_protection:
content_type = self.request.headers.get("Content-Type", "")
if content_type != "application/json":
self.set_status(415, "Content-Type must be application/json")
return
data = self.request.body
if not data:
return
logger.debug(
"Received RPC message from %s: %r", self.request.remote_ip, data
)
try:
self.set_extra_headers()
response = self.jsonrpc.handle_json(tornado.escape.native_str(data))
if response and self.write(response):
logger.debug(
"Sent RPC message to %s: %r",
self.request.remote_ip,
response,
)
except Exception as e:
logger.error("HTTP JSON-RPC request error: %s", e)
self.write_error(500)
def set_extra_headers(self):
set_mopidy_headers(self)
self.set_header("Accept", "application/json")
self.set_header("Content-Type", "application/json; utf-8")
def options(self):
if self.csrf_protection:
origin = self.request.headers.get("Origin")
if not check_origin(
origin, self.request.headers, self.allowed_origins
):
self.set_status(403, f"Access denied for origin {origin}")
return
self.set_header("Access-Control-Allow-Origin", f"{origin}")
self.set_header("Access-Control-Allow-Headers", "Content-Type")
self.set_status(204)
self.finish()
class ClientListHandler(tornado.web.RequestHandler):
def initialize(self, apps, statics):
self.apps = apps
self.statics = statics
def get_template_path(self):
return os.path.dirname(__file__)
def get(self):
set_mopidy_headers(self)
names = set()
for app in self.apps:
names.add(app["name"])
for static in self.statics:
names.add(static["name"])
names.discard("mopidy")
self.render("data/clients.html", apps=sorted(list(names)))
class StaticFileHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path):
set_mopidy_headers(self)
class AddSlashHandler(tornado.web.RequestHandler):
@tornado.web.addslash
def prepare(self):
return super().prepare()

View File

@@ -0,0 +1,50 @@
import platform
import mopidy
"Helpers for configuring HTTP clients used in Mopidy extensions."
def format_proxy(proxy_config, auth=True):
"""Convert a Mopidy proxy config to the commonly used proxy string format.
Outputs ``scheme://host:port``, ``scheme://user:pass@host:port`` or
:class:`None` depending on the proxy config provided.
You can also opt out of getting the basic auth by setting ``auth`` to
:class:`False`.
.. versionadded:: 1.1
"""
if not proxy_config.get("hostname"):
return None
scheme = proxy_config.get("scheme") or "http"
username = proxy_config.get("username")
password = proxy_config.get("password")
hostname = proxy_config["hostname"]
port = proxy_config.get("port")
if not port or port < 0:
port = 80
if username and password and auth:
return f"{scheme}://{username}:{password}@{hostname}:{port}"
else:
return f"{scheme}://{hostname}:{port}"
def format_user_agent(name=None):
"""Construct a User-Agent suitable for use in client code.
This will identify use by the provided ``name`` (which should be on the
format ``dist_name/version``), Mopidy version and Python version.
.. versionadded:: 1.1
"""
parts = [
f"Mopidy/{mopidy.__version__}",
f"{platform.python_implementation()}/{platform.python_version()}",
]
if name:
parts.insert(0, name)
return " ".join(parts)

View File

@@ -0,0 +1,52 @@
import contextlib
import re
import warnings
# Messages used in deprecation warnings are collected here so we can target
# them easily when ignoring warnings.
_MESSAGES = {
# Deprecated features in core playback:
"core.playback.play:tl_track_kwargs": (
'playback.play() with "tl_track" argument is pending deprecation use '
'"tlid" instead'
),
# Deprecated features in core tracklist:
"core.tracklist.add:tracks_arg": (
'tracklist.add() "tracks" argument is deprecated'
),
"core.tracklist.eot_track": (
"tracklist.eot_track() is pending deprecation, use "
"tracklist.get_eot_tlid()"
),
"core.tracklist.next_track": (
"tracklist.next_track() is pending deprecation, use "
"tracklist.get_next_tlid()"
),
"core.tracklist.previous_track": (
"tracklist.previous_track() is pending deprecation, use "
"tracklist.get_previous_tlid()"
),
}
def warn(msg_id, pending=False):
if pending:
category = PendingDeprecationWarning
else:
category = DeprecationWarning
warnings.warn(_MESSAGES.get(msg_id, msg_id), category)
@contextlib.contextmanager
def ignore(ids=None):
with warnings.catch_warnings():
if isinstance(ids, str):
ids = [ids]
if ids:
for msg_id in ids:
msg = re.escape(_MESSAGES.get(msg_id, msg_id))
warnings.filterwarnings("ignore", msg, DeprecationWarning)
else:
warnings.filterwarnings("ignore", category=DeprecationWarning)
yield

View File

@@ -0,0 +1,189 @@
import functools
import os
import platform
import sys
import pkg_resources
from mopidy.internal import formatting
from mopidy.internal.gi import Gst, gi
def format_dependency_list(adapters=None):
if adapters is None:
dist_names = {
ep.dist.project_name
for ep in pkg_resources.iter_entry_points("mopidy.ext")
if ep.dist.project_name != "Mopidy"
}
dist_infos = [
functools.partial(pkg_info, dist_name) for dist_name in dist_names
]
adapters = (
[
executable_info,
platform_info,
python_info,
functools.partial(pkg_info, "Mopidy", True),
]
+ dist_infos
+ [gstreamer_info]
)
return "\n".join([_format_dependency(a()) for a in adapters])
def _format_dependency(dep_info):
lines = []
if "version" not in dep_info:
lines.append(f"{dep_info['name']}: not found")
else:
source = f" from {dep_info['path']}" if "path" in dep_info else ""
lines.append(f"{dep_info['name']}: {dep_info['version']}{source}")
if "other" in dep_info:
details = formatting.indent(dep_info["other"], places=4)
lines.append(f" Detailed information: {details}")
if dep_info.get("dependencies", []):
for sub_dep_info in dep_info["dependencies"]:
sub_dep_lines = _format_dependency(sub_dep_info)
lines.append(
formatting.indent(sub_dep_lines, places=2, singles=True)
)
return "\n".join(lines)
def executable_info():
return {
"name": "Executable",
"version": sys.argv[0],
}
def platform_info():
return {
"name": "Platform",
"version": platform.platform(),
}
def python_info():
return {
"name": "Python",
"version": (
f"{platform.python_implementation()} {platform.python_version()}"
),
"path": os.path.dirname(platform.__file__),
}
def pkg_info(
project_name=None, include_transitive_deps=True, include_extras=False
):
if project_name is None:
project_name = "Mopidy"
try:
distribution = pkg_resources.get_distribution(project_name)
extras = include_extras and distribution.extras or []
if include_transitive_deps:
dependencies = [
pkg_info(
d.project_name,
include_transitive_deps=d.project_name != "Mopidy",
)
for d in distribution.requires(extras)
]
else:
dependencies = []
return {
"name": project_name,
"version": distribution.version,
"path": distribution.location,
"dependencies": dependencies,
}
except pkg_resources.ResolutionError:
return {
"name": project_name,
}
def gstreamer_info():
other = []
other.append(f"Python wrapper: python-gi {gi.__version__}")
found_elements = []
missing_elements = []
for name, status in _gstreamer_check_elements():
if status:
found_elements.append(name)
else:
missing_elements.append(name)
other.append("Relevant elements:")
other.append(" Found:")
for element in found_elements:
other.append(f" {element}")
if not found_elements:
other.append(" none")
other.append(" Not found:")
for element in missing_elements:
other.append(f" {element}")
if not missing_elements:
other.append(" none")
return {
"name": "GStreamer",
"version": ".".join(map(str, Gst.version())),
"path": os.path.dirname(gi.__file__),
"other": "\n".join(other),
}
def _gstreamer_check_elements():
elements_to_check = [
# Core playback
"uridecodebin",
# External HTTP streams
"souphttpsrc",
# Spotify
"appsrc",
# Audio sinks
"alsasink",
"osssink",
"oss4sink",
"pulsesink",
# MP3 encoding and decoding
#
# One of flump3dec, mad, and mpg123audiodec is required for MP3
# playback.
"flump3dec",
"id3demux",
"id3v2mux",
"lamemp3enc",
"mad",
"mpegaudioparse",
"mpg123audiodec",
# Ogg Vorbis encoding and decoding
"vorbisdec",
"vorbisenc",
"vorbisparse",
"oggdemux",
"oggmux",
"oggparse",
# Flac decoding
"flacdec",
"flacparse",
# Shoutcast output
"shout2send",
]
known_elements = [
factory.get_name()
for factory in Gst.Registry.get().get_feature_list(Gst.ElementFactory)
]
return [
(element, element in known_elements) for element in elements_to_check
]

View File

@@ -0,0 +1,28 @@
import re
import unicodedata
def indent(string, places=4, linebreak="\n", singles=False):
lines = string.split(linebreak)
if not singles and len(lines) == 1:
return string
for i, line in enumerate(lines):
lines[i] = " " * places + line
result = linebreak.join(lines)
if not singles:
result = linebreak + result
return result
def slugify(value):
"""
Converts to lowercase, removes non-word characters (alphanumerics and
underscores) and converts spaces to hyphens. Also strips leading and
trailing whitespace.
This function is based on Django's slugify implementation.
"""
value = unicodedata.normalize("NFKD", value)
value = value.encode("ascii", "ignore").decode("ascii")
value = re.sub(r"[^\w\s-]", "", value).strip().lower()
return re.sub(r"[-\s]+", "-", value)

View File

@@ -0,0 +1,49 @@
import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]

View File

@@ -0,0 +1,57 @@
import logging
import time
import requests
from mopidy import httpclient
logger = logging.getLogger(__name__)
def get_requests_session(proxy_config, user_agent):
proxy = httpclient.format_proxy(proxy_config)
full_user_agent = httpclient.format_user_agent(user_agent)
session = requests.Session()
session.proxies.update({"http": proxy, "https": proxy})
session.headers.update({"user-agent": full_user_agent})
return session
def download(session, uri, timeout=1.0, chunk_size=4096):
try:
response = session.get(uri, stream=True, timeout=timeout)
except requests.exceptions.Timeout:
logger.warning(
"Download of %r failed due to connection timeout after " "%.3fs",
uri,
timeout,
)
return None
except requests.exceptions.InvalidSchema:
logger.warning("Download of %r failed due to unsupported schema", uri)
return None
except requests.exceptions.RequestException as exc:
logger.warning("Download of %r failed: %s", uri, exc)
logger.debug("Download exception details", exc_info=True)
return None
content = []
deadline = time.time() + timeout
for chunk in response.iter_content(chunk_size):
content.append(chunk)
if time.time() > deadline:
logger.warning(
"Download of %r failed due to download taking more "
"than %.3fs",
uri,
timeout,
)
return None
if not response.ok:
logger.warning("Problem downloading %r: %s", uri, response.reason)
return None
return b"".join(content)

View File

@@ -0,0 +1,387 @@
import inspect
import json
import traceback
import pykka
class JsonRpcWrapper:
"""
Wrap objects and make them accessible through JSON-RPC 2.0 messaging.
This class takes responsibility of communicating with the objects and
processing of JSON-RPC 2.0 messages. The transport of the messages over
HTTP, WebSocket, TCP, or whatever is of no concern to this class.
The wrapper supports exporting the methods of one or more objects. Either
way, the objects must be exported with method name prefixes, called
"mounts".
To expose objects, add them all to the objects mapping. The key in the
mapping is used as the object's mounting point in the exposed API::
jrw = JsonRpcWrapper(objects={
'foo': foo,
'hello': lambda: 'Hello, world!',
})
This will export the Python callables on the left as the JSON-RPC 2.0
method names on the right::
foo.bar() -> foo.bar
foo.baz() -> foo.baz
lambda -> hello
Only the public methods of the mounted objects, or functions/methods
included directly in the mapping, will be exposed.
If a method returns a :class:`pykka.Future`, the future will be completed
and its value unwrapped before the JSON-RPC wrapper returns the response.
For further details on the JSON-RPC 2.0 spec, see
http://www.jsonrpc.org/specification
:param objects: mapping between mounting points and exposed functions or
class instances
:type objects: dict
:param decoders: object builders to be used by :func`json.loads`
:type decoders: list of functions taking a dict and returning a dict
:param encoders: object serializers to be used by :func:`json.dumps`
:type encoders: list of :class:`json.JSONEncoder` subclasses with the
method :meth:`default` implemented
"""
def __init__(self, objects, decoders=None, encoders=None):
if "" in objects.keys():
raise AttributeError(
"The empty string is not allowed as an object mount"
)
self.objects = objects
self.decoder = get_combined_json_decoder(decoders or [])
self.encoder = get_combined_json_encoder(encoders or [])
def handle_json(self, request):
"""
Handles an incoming request encoded as a JSON string.
Returns a response as a JSON string for commands, and :class:`None` for
notifications.
:param request: the serialized JSON-RPC request
:type request: string
:rtype: string or :class:`None`
"""
try:
request = json.loads(request, object_hook=self.decoder)
except ValueError:
response = JsonRpcParseError().get_response()
else:
response = self.handle_data(request)
if response is None:
return None
return json.dumps(response, cls=self.encoder)
def handle_data(self, request):
"""
Handles an incoming request in the form of a Python data structure.
Returns a Python data structure for commands, or a :class:`None` for
notifications.
:param request: the unserialized JSON-RPC request
:type request: dict
:rtype: dict, list, or :class:`None`
"""
if isinstance(request, list):
return self._handle_batch(request)
else:
return self._handle_single_request(request)
def _handle_batch(self, requests):
if not requests:
return JsonRpcInvalidRequestError(
data="Batch list cannot be empty"
).get_response()
responses = []
for request in requests:
response = self._handle_single_request(request)
if response:
responses.append(response)
return responses or None
def _handle_single_request(self, request):
try:
self._validate_request(request)
args, kwargs = self._get_params(request)
except JsonRpcInvalidRequestError as error:
return error.get_response()
try:
method = self._get_method(request["method"])
try:
result = method(*args, **kwargs)
if self._is_notification(request):
return None
result = self._unwrap_result(result)
return {
"jsonrpc": "2.0",
"id": request["id"],
"result": result,
}
except TypeError as error:
raise JsonRpcInvalidParamsError(
data={
"type": error.__class__.__name__,
"message": str(error),
"traceback": traceback.format_exc(),
}
)
except Exception as error:
raise JsonRpcApplicationError(
data={
"type": error.__class__.__name__,
"message": str(error),
"traceback": traceback.format_exc(),
}
)
except JsonRpcError as error:
if self._is_notification(request):
return None
return error.get_response(request["id"])
def _validate_request(self, request):
if not isinstance(request, dict):
raise JsonRpcInvalidRequestError(data="Request must be an object")
if "jsonrpc" not in request:
raise JsonRpcInvalidRequestError(
data="'jsonrpc' member must be included"
)
if request["jsonrpc"] != "2.0":
raise JsonRpcInvalidRequestError(
data="'jsonrpc' value must be '2.0'"
)
if "method" not in request:
raise JsonRpcInvalidRequestError(
data="'method' member must be included"
)
if not isinstance(request["method"], str):
raise JsonRpcInvalidRequestError(data="'method' must be a string")
def _get_params(self, request):
if "params" not in request:
return [], {}
params = request["params"]
if isinstance(params, list):
return params, {}
elif isinstance(params, dict):
return [], params
else:
raise JsonRpcInvalidRequestError(
data="'params', if given, must be an array or an object"
)
def _get_method(self, method_path):
if callable(self.objects.get(method_path, None)):
# The mounted object is the callable
return self.objects[method_path]
# The mounted object contains the callable
if "." not in method_path:
raise JsonRpcMethodNotFoundError(
data=f"Could not find object mount in method name {method_path!r}"
)
mount, method_name = method_path.rsplit(".", 1)
if method_name.startswith("_"):
raise JsonRpcMethodNotFoundError(
data="Private methods are not exported"
)
try:
obj = self.objects[mount]
except KeyError:
raise JsonRpcMethodNotFoundError(
data=f"No object found at {mount!r}"
)
try:
return getattr(obj, method_name)
except AttributeError:
raise JsonRpcMethodNotFoundError(
data=f"Object mounted at {mount!r} has no member {method_name!r}"
)
def _is_notification(self, request):
return "id" not in request
def _unwrap_result(self, result):
if isinstance(result, pykka.Future):
result = result.get()
return result
class JsonRpcError(Exception):
code = -32000
message = "Unspecified server error"
def __init__(self, data=None):
self.data = data
def get_response(self, request_id=None):
response = {
"jsonrpc": "2.0",
"id": request_id,
"error": {"code": self.code, "message": self.message},
}
if self.data:
response["error"]["data"] = self.data
return response
class JsonRpcParseError(JsonRpcError):
code = -32700
message = "Parse error"
class JsonRpcInvalidRequestError(JsonRpcError):
code = -32600
message = "Invalid Request"
class JsonRpcMethodNotFoundError(JsonRpcError):
code = -32601
message = "Method not found"
class JsonRpcInvalidParamsError(JsonRpcError):
code = -32602
message = "Invalid params"
class JsonRpcApplicationError(JsonRpcError):
code = 0
message = "Application error"
def get_combined_json_decoder(decoders):
def decode(dct):
for decoder in decoders:
dct = decoder(dct)
return dct
return decode
def get_combined_json_encoder(encoders):
class JsonRpcEncoder(json.JSONEncoder):
def default(self, obj):
for encoder in encoders:
try:
return encoder().default(obj)
except TypeError:
pass # Try next encoder
return json.JSONEncoder.default(self, obj)
return JsonRpcEncoder
class JsonRpcInspector:
"""
Inspects a group of classes and functions to create a description of what
methods they can expose over JSON-RPC 2.0.
To inspect one or more classes, add them all to the objects mapping. The
key in the mapping is used as the classes' mounting point in the exposed
API::
jri = JsonRpcInspector(objects={
'foo': Foo,
'hello': lambda: 'Hello, world!',
})
Since the inspector is based on inspecting classes and not instances, it
will not include methods added dynamically. The wrapper works with
instances, and it will thus export dynamically added methods as well.
:param objects: mapping between mounts and exposed functions or classes
:type objects: dict
"""
def __init__(self, objects):
if "" in objects.keys():
raise AttributeError(
"The empty string is not allowed as an object mount"
)
self.objects = objects
def describe(self):
"""
Inspects the object and returns a data structure which describes the
available properties and methods.
"""
methods = {}
for mount, obj in self.objects.items():
if inspect.isroutine(obj):
methods[mount] = self._describe_method(obj)
else:
obj_methods = self._get_methods(obj)
for name, description in obj_methods.items():
if mount:
name = f"{mount}.{name}"
methods[name] = description
return methods
def _get_methods(self, obj):
methods = {}
for name, value in inspect.getmembers(obj):
if name.startswith("_"):
continue
if not inspect.isroutine(value):
continue
method = self._describe_method(value)
if method:
methods[name] = method
return methods
def _describe_method(self, method):
return {
"description": inspect.getdoc(method),
"params": self._describe_params(method),
}
def _describe_params(self, method):
argspec = inspect.getfullargspec(method)
defaults = argspec.defaults and list(argspec.defaults) or []
num_args_without_default = len(argspec.args) - len(defaults)
no_defaults = [None] * num_args_without_default
defaults = no_defaults + defaults
params = []
for arg, _default in zip(argspec.args, defaults):
if arg == "self":
continue
params.append({"name": arg})
if argspec.defaults:
for i, default in enumerate(reversed(argspec.defaults)):
params[len(params) - i - 1]["default"] = default
if argspec.varargs:
params.append({"name": argspec.varargs, "varargs": True})
if argspec.varkw:
params.append({"name": argspec.varkw, "kwargs": True})
return params

View File

@@ -0,0 +1,199 @@
import logging
import logging.config
import logging.handlers
import platform
LOG_LEVELS = {
-1: dict(root=logging.ERROR, mopidy=logging.WARNING),
0: dict(root=logging.ERROR, mopidy=logging.INFO),
1: dict(root=logging.WARNING, mopidy=logging.DEBUG),
2: dict(root=logging.INFO, mopidy=logging.DEBUG),
3: dict(root=logging.DEBUG, mopidy=logging.DEBUG),
4: dict(root=logging.NOTSET, mopidy=logging.NOTSET),
}
# Custom log level which has even lower priority than DEBUG
TRACE_LOG_LEVEL = 5
logging.addLevelName(TRACE_LOG_LEVEL, "TRACE")
logger = logging.getLogger(__name__)
class DelayedHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self._released = False
self._buffer = []
def handle(self, record):
if not self._released:
self._buffer.append(record)
def release(self):
self._released = True
root = logging.getLogger("")
while self._buffer:
root.handle(self._buffer.pop(0))
_delayed_handler = DelayedHandler()
def bootstrap_delayed_logging():
root = logging.getLogger("")
root.setLevel(logging.NOTSET)
root.addHandler(_delayed_handler)
def setup_logging(config, base_verbosity_level, args_verbosity_level):
logging.captureWarnings(True)
if config["logging"]["config_file"]:
# Logging config from file must be read before other handlers are
# added. If not, the other handlers will have no effect.
try:
path = config["logging"]["config_file"]
logging.config.fileConfig(path, disable_existing_loggers=False)
except Exception as e:
# Catch everything as logging does not specify what can go wrong.
logger.error("Loading logging config %r failed. %s", path, e)
loglevels = config.get("loglevels", {})
verbosity_level = get_verbosity_level(
config, base_verbosity_level, args_verbosity_level
)
verbosity_filter = VerbosityFilter(verbosity_level, loglevels)
formatter = logging.Formatter(config["logging"]["format"])
if config["logging"]["color"]:
handler = ColorizingStreamHandler(config.get("logcolors", {}))
else:
handler = logging.StreamHandler()
handler.addFilter(verbosity_filter)
handler.setFormatter(formatter)
logging.getLogger("").addHandler(handler)
_delayed_handler.release()
def get_verbosity_level(config, base_verbosity_level, args_verbosity_level):
if args_verbosity_level:
result = base_verbosity_level + args_verbosity_level
else:
result = base_verbosity_level + config["logging"]["verbosity"]
if result < min(LOG_LEVELS.keys()):
result = min(LOG_LEVELS.keys())
if result > max(LOG_LEVELS.keys()):
result = max(LOG_LEVELS.keys())
return result
class VerbosityFilter(logging.Filter):
def __init__(self, verbosity_level, loglevels):
self.verbosity_level = verbosity_level
self.loglevels = loglevels
def filter(self, record):
for name, required_log_level in self.loglevels.items():
if record.name == name or record.name.startswith(name + "."):
return record.levelno >= required_log_level
if record.name.startswith("mopidy"):
required_log_level = LOG_LEVELS[self.verbosity_level]["mopidy"]
else:
required_log_level = LOG_LEVELS[self.verbosity_level]["root"]
return record.levelno >= required_log_level
#: Available log colors.
COLORS = [
"black",
"red",
"green",
"yellow",
"blue",
"magenta",
"cyan",
"white",
]
class ColorizingStreamHandler(logging.StreamHandler):
"""
Stream handler which colorizes the log using ANSI escape sequences.
Does nothing on Windows, which doesn't support ANSI escape sequences.
This implementation is based upon https://gist.github.com/vsajip/758430,
which is:
Copyright (C) 2010-2012 Vinay Sajip. All rights reserved.
Licensed under the new BSD license.
"""
# Map logging levels to (background, foreground, bold/intense)
level_map = {
TRACE_LOG_LEVEL: (None, "blue", False),
logging.DEBUG: (None, "blue", False),
logging.INFO: (None, "white", False),
logging.WARNING: (None, "yellow", False),
logging.ERROR: (None, "red", False),
logging.CRITICAL: ("red", "white", True),
}
# Map logger name to foreground colors
logger_map = {}
csi = "\x1b["
reset = "\x1b[0m"
is_windows = platform.system() == "Windows"
def __init__(self, logger_colors):
super().__init__()
self.logger_map = logger_colors
@property
def is_tty(self):
isatty = getattr(self.stream, "isatty", None)
return isatty and isatty()
def emit(self, record):
try:
message = self.format(record)
self.stream.write(message)
self.stream.write(getattr(self, "terminator", "\n"))
self.flush()
except Exception:
self.handleError(record)
def format(self, record):
message = logging.StreamHandler.format(self, record)
if not self.is_tty or self.is_windows:
return message
for name, color in self.logger_map.items():
if record.name.startswith(name):
return self.colorize(message, fg=color)
if record.levelno in self.level_map:
bg, fg, bold = self.level_map[record.levelno]
return self.colorize(message, bg=bg, fg=fg, bold=bold)
return message
def colorize(self, message, bg=None, fg=None, bold=False):
params = []
if bg in COLORS:
params.append(str(COLORS.index(bg) + 40))
if fg in COLORS:
params.append(str(COLORS.index(fg) + 30))
if bold:
params.append("1")
if params:
message = "".join(
(self.csi, ";".join(params), "m", message, self.reset)
)
return message

View File

@@ -0,0 +1,142 @@
from mopidy.internal import validation
from mopidy.models import Ref, TlTrack, fields
from mopidy.models.immutable import ValidatedImmutableObject
class HistoryTrack(ValidatedImmutableObject):
"""
A history track. Wraps a :class:`Ref` and its timestamp.
:param timestamp: the timestamp
:type timestamp: int
:param track: the track reference
:type track: :class:`Ref`
"""
# The timestamp. Read-only.
timestamp = fields.Integer()
# The track reference. Read-only.
track = fields.Field(type=Ref)
class HistoryState(ValidatedImmutableObject):
"""
State of the history controller.
Internally used for save/load state.
:param history: the track history
:type history: list of :class:`HistoryTrack`
"""
# The tracks. Read-only.
history = fields.Collection(type=HistoryTrack, container=tuple)
class MixerState(ValidatedImmutableObject):
"""
State of the mixer controller.
Internally used for save/load state.
:param volume: the volume
:type volume: int
:param mute: the mute state
:type mute: int
"""
# The volume. Read-only.
volume = fields.Integer(min=0, max=100)
# The mute state. Read-only.
mute = fields.Boolean(default=False)
class PlaybackState(ValidatedImmutableObject):
"""
State of the playback controller.
Internally used for save/load state.
:param tlid: current track tlid
:type tlid: int
:param time_position: play position
:type time_position: int
:param state: playback state
:type state: :class:`validation.PLAYBACK_STATES`
"""
# The tlid of current playing track. Read-only.
tlid = fields.Integer(min=1)
# The playback position. Read-only.
time_position = fields.Integer(min=0)
# The playback state. Read-only.
state = fields.Field(choices=validation.PLAYBACK_STATES)
class TracklistState(ValidatedImmutableObject):
"""
State of the tracklist controller.
Internally used for save/load state.
:param repeat: the repeat mode
:type repeat: bool
:param consume: the consume mode
:type consume: bool
:param random: the random mode
:type random: bool
:param single: the single mode
:type single: bool
:param next_tlid: the id for the next added track
:type next_tlid: int
:param tl_tracks: the list of tracks
:type tl_tracks: list of :class:`TlTrack`
"""
# The repeat mode. Read-only.
repeat = fields.Boolean()
# The consume mode. Read-only.
consume = fields.Boolean()
# The random mode. Read-only.
random = fields.Boolean()
# The single mode. Read-only.
single = fields.Boolean()
# The id of the track to play. Read-only.
next_tlid = fields.Integer(min=0)
# The list of tracks. Read-only.
tl_tracks = fields.Collection(type=TlTrack, container=tuple)
class CoreState(ValidatedImmutableObject):
"""
State of all Core controller.
Internally used for save/load state.
:param history: State of the history controller
:type history: :class:`HistorState`
:param mixer: State of the mixer controller
:type mixer: :class:`MixerState`
:param playback: State of the playback controller
:type playback: :class:`PlaybackState`
:param tracklist: State of the tracklist controller
:type tracklist: :class:`TracklistState`
"""
# State of the history controller.
history = fields.Field(type=HistoryState)
# State of the mixer controller.
mixer = fields.Field(type=MixerState)
# State of the playback controller.
playback = fields.Field(type=PlaybackState)
# State of the tracklist controller.
tracklist = fields.Field(type=TracklistState)

View File

@@ -0,0 +1,31 @@
import logging
import re
import socket
logger = logging.getLogger(__name__)
def try_ipv6_socket():
"""Determine if system really supports IPv6"""
if not socket.has_ipv6:
return False
try:
socket.socket(socket.AF_INET6).close()
return True
except OSError as exc:
logger.debug(
f"Platform supports IPv6, but socket creation failed, "
f"disabling: {exc}"
)
return False
#: Boolean value that indicates if creating an IPv6 socket will succeed.
has_ipv6 = try_ipv6_socket()
def format_hostname(hostname):
"""Format hostname for display."""
if has_ipv6 and re.match(r"\d+.\d+.\d+.\d+", hostname) is not None:
hostname = f"::ffff:{hostname}"
return hostname

View File

@@ -0,0 +1,104 @@
import logging
import pathlib
import re
import urllib
from mopidy.internal import xdg
logger = logging.getLogger(__name__)
XDG_DIRS = xdg.get_dirs()
def get_or_create_dir(dir_path):
dir_path = expand_path(dir_path)
if dir_path.is_file():
raise OSError(
f"A file with the same name as the desired dir, "
f"{dir_path!r}, already exists."
)
elif not dir_path.is_dir():
logger.info(f"Creating dir {dir_path.as_uri()}")
dir_path.mkdir(mode=0o755, parents=True)
return dir_path
def get_or_create_file(file_path, mkdir=True, content=None):
file_path = expand_path(file_path)
if isinstance(content, str):
content = content.encode()
if mkdir:
get_or_create_dir(file_path.parent)
if not file_path.is_file():
logger.info(f"Creating file {file_path.as_uri()}")
file_path.touch(exist_ok=False)
if content is not None:
file_path.write_bytes(content)
return file_path
def get_unix_socket_path(socket_path):
match = re.search("^unix:(.*)", socket_path)
if not match:
return None
return match.group(1)
def path_to_uri(path):
"""
Convert OS specific path to file:// URI.
Accepts either unicode strings or bytestrings. The encoding of any
bytestring will be maintained so that :func:`uri_to_path` can return the
same bytestring.
Returns a file:// URI as an unicode string.
"""
return pathlib.Path(path).as_uri()
def uri_to_path(uri):
"""
Convert an URI to a OS specific path.
"""
bytes_path = urllib.parse.unquote_to_bytes(urllib.parse.urlsplit(uri).path)
unicode_path = bytes_path.decode(errors="surrogateescape")
return pathlib.Path(unicode_path)
def expand_path(path):
if isinstance(path, bytes):
path = path.decode(errors="surrogateescape")
path = str(pathlib.Path(path))
for xdg_var, xdg_dir in XDG_DIRS.items():
path = path.replace("$" + xdg_var, str(xdg_dir))
if "$" in path:
return None
return pathlib.Path(path).expanduser().resolve()
def is_path_inside_base_dir(path, base_path):
if isinstance(path, bytes):
path = path.decode(errors="surrogateescape")
if isinstance(base_path, bytes):
base_path = base_path.decode(errors="surrogateescape")
path = pathlib.Path(path).resolve()
base_path = pathlib.Path(base_path).resolve()
if path.is_file():
# Use dir of file for prefix comparision, so we don't accept
# /tmp/foo.m3u as being inside /tmp/foo, simply because they have a
# common prefix, /tmp/foo, which matches the base path, /tmp/foo.
path = path.parent
# Check if dir of file is the base path or a subdir
try:
path.relative_to(base_path)
except ValueError:
return False
else:
return True

View File

@@ -0,0 +1,137 @@
import configparser
import io
from mopidy.internal import validation
import xml.etree.ElementTree as elementtree # noqa: N813
def parse(data):
handlers = {
detect_extm3u_header: parse_extm3u,
detect_pls_header: parse_pls,
detect_asx_header: parse_asx,
detect_xspf_header: parse_xspf,
}
for detector, parser in handlers.items():
if detector(data):
return list(parser(data))
return list(parse_urilist(data)) # Fallback
def detect_extm3u_header(data):
return data[0:7].upper() == b"#EXTM3U"
def detect_pls_header(data):
return data[0:10].lower() == b"[playlist]"
def detect_xspf_header(data):
data = data[0:150]
if b"xspf" not in data.lower():
return False
try:
data = io.BytesIO(data)
for _event, element in elementtree.iterparse(data, events=["start"]):
return element.tag.lower() == "{http://xspf.org/ns/0/}playlist"
except elementtree.ParseError:
pass
return False
def detect_asx_header(data):
data = data[0:50]
if b"asx" not in data.lower():
return False
try:
data = io.BytesIO(data)
for _event, element in elementtree.iterparse(data, events=["start"]):
return element.tag.lower() == "asx"
except elementtree.ParseError:
pass
return False
def parse_extm3u(data):
# TODO: convert non URIs to file URIs.
found_header = False
for line in data.splitlines():
if found_header or line.startswith(b"#EXTM3U"):
found_header = True
else:
continue
if not line.strip() or line.startswith(b"#"):
continue
try:
line = line.decode()
except UnicodeDecodeError:
continue
yield line.strip()
def parse_pls(data):
# TODO: convert non URIs to file URIs.
try:
cp = configparser.RawConfigParser()
cp.read_string(data.decode())
except configparser.Error:
return
for section in cp.sections():
if section.lower() != "playlist":
continue
for i in range(cp.getint(section, "numberofentries")):
yield cp.get(section, f"file{i + 1}").strip("\"'")
def parse_xspf(data):
try:
# Last element will be root.
for _event, element in elementtree.iterparse(io.BytesIO(data)):
element.tag = element.tag.lower() # normalize
except elementtree.ParseError:
return
ns = "http://xspf.org/ns/0/"
path = f"{{{ns}}}tracklist/{{{ns}}}track"
for track in element.iterfind(path):
yield track.findtext(f"{{{ns}}}location")
def parse_asx(data):
try:
# Last element will be root.
for _event, element in elementtree.iterparse(io.BytesIO(data)):
element.tag = element.tag.lower() # normalize
except elementtree.ParseError:
return
for ref in element.findall("entry/ref[@href]"):
yield ref.get("href", "").strip()
for entry in element.findall("entry[@href]"):
yield entry.get("href", "").strip()
def parse_urilist(data):
for line in data.splitlines():
if not line.strip() or line.startswith(b"#"):
continue
try:
line = line.decode()
except UnicodeDecodeError:
continue
try:
validation.check_uri(line)
except ValueError:
continue
yield line.strip()

View File

@@ -0,0 +1,53 @@
import logging
import threading
import pykka
import _thread
logger = logging.getLogger(__name__)
def exit_process():
logger.debug("Interrupting main...")
_thread.interrupt_main()
logger.debug("Interrupted main")
def sigterm_handler(signum, frame):
"""A :mod:`signal` handler which will exit the program on signal.
This function is not called when the process' main thread is running a GLib
mainloop. In that case, the GLib mainloop must listen for SIGTERM signals
and quit itself.
For Mopidy subcommands that does not run the GLib mainloop, this handler
ensures a proper shutdown of the process on SIGTERM.
"""
logger.info("Got SIGTERM signal. Exiting...")
exit_process()
def stop_actors_by_class(klass):
actors = pykka.ActorRegistry.get_by_class(klass)
logger.debug("Stopping %d instance(s) of %s", len(actors), klass.__name__)
for actor in actors:
actor.stop()
def stop_remaining_actors():
num_actors = len(pykka.ActorRegistry.get_all())
while num_actors:
logger.error(
"There are actor threads still running, this is probably a bug"
)
logger.debug(
"Seeing %d actor and %d non-actor thread(s): %s",
num_actors,
threading.active_count() - num_actors,
", ".join([t.name for t in threading.enumerate()]),
)
logger.debug("Stopping %d actor(s)...", num_actors)
pykka.ActorRegistry.stop_all()
num_actors = len(pykka.ActorRegistry.get_all())
logger.debug("All actors stopped.")

View File

@@ -0,0 +1,59 @@
import gzip
import json
import logging
import pathlib
import tempfile
from mopidy import models
logger = logging.getLogger(__name__)
def load(path):
"""
Deserialize data from file.
:param path: full path to import file
:type path: pathlib.Path
:return: deserialized data
:rtype: dict
"""
# TODO: raise an exception in case of error?
if not path.is_file():
logger.info("File does not exist: %s", path)
return {}
try:
with gzip.open(str(path), "rb") as fp:
return json.load(fp, object_hook=models.model_json_decoder)
except (OSError, ValueError) as exc:
logger.warning(f"Loading JSON failed: {exc}")
return {}
def dump(path, data):
"""
Serialize data to file.
:param path: full path to export file
:type path: pathlib.Path
:param data: dictionary containing data to save
:type data: dict
"""
# TODO: cleanup directory/basename.* files.
tmp = tempfile.NamedTemporaryFile(
prefix=path.name + ".", dir=str(path.parent), delete=False
)
tmp_path = pathlib.Path(tmp.name)
try:
data_string = json.dumps(
data, cls=models.ModelJSONEncoder, indent=2, separators=(",", ": ")
)
with gzip.GzipFile(fileobj=tmp, mode="wb") as fp:
fp.write(data_string.encode())
tmp_path.rename(path)
finally:
if tmp_path.exists():
tmp_path.unlink()

View File

@@ -0,0 +1,14 @@
import contextlib
import logging
import time
from mopidy.internal import log
logger = logging.getLogger(__name__)
@contextlib.contextmanager
def time_logger(name, level=log.TRACE_LOG_LEVEL):
start = time.time()
yield
logger.log(level, "%s took %dms", name, (time.time() - start) * 1000)

View File

@@ -0,0 +1,133 @@
import urllib
from collections.abc import Iterable, Mapping
from mopidy import exceptions
PLAYBACK_STATES = {"paused", "stopped", "playing"}
SEARCH_FIELDS = {
"uri",
"track_name",
"album",
"artist",
"albumartist",
"composer",
"performer",
"track_no",
"genre",
"date",
"comment",
"any",
}
PLAYLIST_FIELDS = {"uri", "name"} # TODO: add length and last_modified?
TRACKLIST_FIELDS = { # TODO: add bitrate, length, disc_no, track_no, modified?
"uri",
"name",
"genre",
"date",
"comment",
"musicbrainz_id",
}
DISTINCT_FIELDS = {
"track",
"artist",
"albumartist",
"album",
"composer",
"performer",
"date",
"genre",
}
# TODO: _check_iterable(check, msg, **kwargs) + [check(a) for a in arg]?
def _check_iterable(arg, msg, **kwargs):
"""Ensure we have an iterable which is not a string or an iterator"""
if isinstance(arg, str):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
elif not isinstance(arg, Iterable):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
elif iter(arg) is iter(arg):
raise exceptions.ValidationError(msg.format(arg=arg, **kwargs))
def check_choice(arg, choices, msg="Expected one of {choices}, not {arg!r}"):
if arg not in choices:
raise exceptions.ValidationError(
msg.format(arg=arg, choices=tuple(choices))
)
def check_boolean(arg, msg="Expected a boolean, not {arg!r}"):
check_instance(arg, bool, msg=msg)
def check_instance(arg, cls, msg="Expected a {name} instance, not {arg!r}"):
if not isinstance(arg, cls):
raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__))
def check_instances(arg, cls, msg="Expected a list of {name}, not {arg!r}"):
_check_iterable(arg, msg, name=cls.__name__)
if not all(isinstance(instance, cls) for instance in arg):
raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__))
def check_integer(arg, min=None, max=None):
if not isinstance(arg, int):
raise exceptions.ValidationError(f"Expected an integer, not {arg!r}")
elif min is not None and arg < min:
raise exceptions.ValidationError(
f"Expected number larger or equal to {min}, not {arg!r}"
)
elif max is not None and arg > max:
raise exceptions.ValidationError(
f"Expected number smaller or equal to {max}, not {arg!r}"
)
def check_query(arg, fields=SEARCH_FIELDS, list_values=True):
# TODO: normalize name -> track_name
# TODO: normalize value -> [value]
# TODO: normalize blank -> [] or just remove field?
# TODO: remove list_values?
if not isinstance(arg, Mapping):
raise exceptions.ValidationError(
f"Expected a query dictionary, not {arg!r}"
)
for key, value in arg.items():
check_choice(
key,
fields,
msg="Expected query field to be one of " "{choices}, not {arg!r}",
)
if list_values:
msg = 'Expected "{key}" to be list of strings, not {arg!r}'
_check_iterable(value, msg, key=key)
[_check_query_value(key, v, msg) for v in value]
else:
_check_query_value(
key, value, 'Expected "{key}" to be a string, not {arg!r}'
)
def _check_query_value(key, arg, msg):
if not isinstance(arg, str) or not arg.strip():
raise exceptions.ValidationError(msg.format(arg=arg, key=key))
def check_uri(arg, msg="Expected a valid URI, not {arg!r}"):
if not isinstance(arg, str):
raise exceptions.ValidationError(msg.format(arg=arg))
elif urllib.parse.urlparse(arg).scheme == "":
raise exceptions.ValidationError(msg.format(arg=arg))
def check_uris(arg, msg="Expected a list of URIs, not {arg!r}"):
_check_iterable(arg, msg)
[check_uri(a, msg) for a in arg]

View File

@@ -0,0 +1,29 @@
import os
import subprocess
import mopidy
def get_version():
try:
return get_git_version()
except OSError:
return mopidy.__version__
def get_git_version():
project_dir = os.path.abspath(
os.path.join(os.path.dirname(mopidy.__file__), "..")
)
process = subprocess.Popen(
["git", "describe"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=project_dir,
)
if process.wait() != 0:
raise OSError('Execution of "git describe" failed')
version = process.stdout.read().strip().decode()
if version.startswith("v"):
version = version[1:]
return version

View File

@@ -0,0 +1,68 @@
import configparser
import os
import pathlib
def get_dirs():
"""Returns a dict of all the known XDG Base Directories for the current user.
The keys ``XDG_CACHE_DIR``, ``XDG_CONFIG_DIR``, and ``XDG_DATA_DIR`` is
always available.
Additional keys, like ``XDG_MUSIC_DIR``, may be available if the
``$XDG_CONFIG_DIR/user-dirs.dirs`` file exists and is parseable.
See http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
for the XDG Base Directory specification.
"""
dirs = {
"XDG_CACHE_DIR": pathlib.Path(
os.getenv("XDG_CACHE_HOME", "~/.cache")
).expanduser(),
"XDG_CONFIG_DIR": pathlib.Path(
os.getenv("XDG_CONFIG_HOME", "~/.config")
).expanduser(),
"XDG_DATA_DIR": pathlib.Path(
os.getenv("XDG_DATA_HOME", "~/.local/share")
).expanduser(),
}
dirs.update(_get_user_dirs(dirs["XDG_CONFIG_DIR"]))
return dirs
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = xdg_config_dir / "user-dirs.dirs"
if not dirs_file.exists():
return {}
data = dirs_file.read_bytes()
data = b"[XDG_USER_DIRS]\n" + data
data = data.replace(b"$HOME", bytes(pathlib.Path.home()))
data = data.replace(b'"', b"")
config = configparser.RawConfigParser()
config.read_string(data.decode())
result = {}
for k, v in config.items("XDG_USER_DIRS"):
if v is None:
continue
if isinstance(k, bytes):
k = k.decode()
result[k.upper()] = pathlib.Path(v).resolve()
return result

View File

@@ -0,0 +1,45 @@
import logging
import pykka
from pykka.messages import ProxyCall
logger = logging.getLogger(__name__)
def send(cls, event, **kwargs):
listeners = pykka.ActorRegistry.get_by_class(cls)
logger.debug("Sending %s to %s: %s", event, cls.__name__, kwargs)
for listener in listeners:
# Save time by calling methods on Pykka actor without creating a
# throwaway actor proxy.
#
# Because we use `.tell()` there is no return channel for any errors,
# so Pykka logs them immediately. The alternative would be to use
# `.ask()` and `.get()` the returned futures to block for the listeners
# to react and return their exceptions to us. Since emitting events in
# practise is making calls upwards in the stack, blocking here would
# quickly deadlock.
listener.tell(
ProxyCall(attr_path=["on_event"], args=[event], kwargs=kwargs,)
)
class Listener:
def on_event(self, event, **kwargs):
"""
Called on all events.
*MAY* be implemented by actor. By default, this method forwards the
event to the specific event methods.
:param event: the event name
:type event: string
:param kwargs: any other arguments to the specific event handlers
"""
try:
getattr(self, event)(**kwargs)
except Exception:
# Ensure we don't crash the actor due to "bad" events.
logger.exception(
"Triggering event failed: %s(%s)", event, ", ".join(kwargs)
)

View File

@@ -0,0 +1,13 @@
# XXX This module is only here to ease the migration from Mopidy-Local being
# bundled with Mopidy to being an independent extension. This file should
# probably be removed before Mopidy 3.0 final ships.
import warnings
from mopidy_local import * # noqa
warnings.warn(
"Mopidy-Local has been moved to its own project. "
"Update any imports from `mopidy.local` to use `mopidy_local` instead.",
DeprecationWarning,
)

View File

@@ -0,0 +1,31 @@
import logging
import os
import mopidy
from mopidy import config, ext
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = "Mopidy-M3U"
ext_name = "m3u"
version = mopidy.__version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["base_dir"] = config.Path(optional=True)
schema["default_encoding"] = config.String()
schema["default_extension"] = config.String(choices=[".m3u", ".m3u8"])
schema["playlists_dir"] = config.Path(optional=True)
return schema
def setup(self, registry):
from .backend import M3UBackend
registry.add("backend", M3UBackend)

View File

@@ -0,0 +1,13 @@
import pykka
from mopidy import backend
from . import playlists
class M3UBackend(pykka.ThreadingActor, backend.Backend):
uri_schemes = ["m3u"]
def __init__(self, config, audio):
super().__init__()
self.playlists = playlists.M3UPlaylistsProvider(self, config)

View File

@@ -0,0 +1,6 @@
[m3u]
enabled = true
playlists_dir =
base_dir = $XDG_MUSIC_DIR
default_encoding = latin-1
default_extension = .m3u8

View File

@@ -0,0 +1,174 @@
import contextlib
import locale
import logging
import operator
import os
import pathlib
import tempfile
from mopidy import backend
from mopidy.internal import path
from . import Extension, translator
logger = logging.getLogger(__name__)
def log_environment_error(message, error):
if isinstance(error.strerror, bytes):
strerror = error.strerror.decode(locale.getpreferredencoding())
else:
strerror = error.strerror
logger.error("%s: %s", message, strerror)
@contextlib.contextmanager
def replace(path, mode="w+b", encoding=None, errors=None):
(fd, tempname) = tempfile.mkstemp(dir=str(path.parent))
tempname = pathlib.Path(tempname)
try:
fp = open(fd, mode, encoding=encoding, errors=errors)
except Exception:
tempname.unlink()
os.close(fd)
raise
try:
yield fp
fp.flush()
os.fsync(fd)
tempname.rename(path)
except Exception:
tempname.unlink()
raise
finally:
fp.close()
class M3UPlaylistsProvider(backend.PlaylistsProvider):
def __init__(self, backend, config):
super().__init__(backend)
ext_config = config[Extension.ext_name]
if ext_config["playlists_dir"] is None:
self._playlists_dir = Extension.get_data_dir(config)
else:
self._playlists_dir = path.expand_path(ext_config["playlists_dir"])
if ext_config["base_dir"] is None:
self._base_dir = self._playlists_dir
else:
self._base_dir = path.expand_path(ext_config["base_dir"])
self._default_encoding = ext_config["default_encoding"]
self._default_extension = ext_config["default_extension"]
def as_list(self):
result = []
for entry in self._playlists_dir.iterdir():
if entry.suffix not in [".m3u", ".m3u8"]:
continue
elif not entry.is_file():
continue
else:
playlist_path = entry.relative_to(self._playlists_dir)
result.append(translator.path_to_ref(playlist_path))
result.sort(key=operator.attrgetter("name"))
return result
def create(self, name):
path = translator.path_from_name(name.strip(), self._default_extension)
try:
with self._open(path, "w"):
pass
mtime = self._abspath(path).stat().st_mtime
except OSError as e:
log_environment_error(f"Error creating playlist {name!r}", e)
else:
return translator.playlist(path, [], mtime)
def delete(self, uri):
path = translator.uri_to_path(uri)
if not self._is_in_basedir(path):
logger.debug("Ignoring path outside playlist dir: %s", uri)
return False
try:
self._abspath(path).unlink()
except OSError as e:
log_environment_error(f"Error deleting playlist {uri!r}", e)
return False
else:
return True
def get_items(self, uri):
path = translator.uri_to_path(uri)
if not self._is_in_basedir(path):
logger.debug("Ignoring path outside playlist dir: %s", uri)
return None
try:
with self._open(path, "r") as fp:
items = translator.load_items(fp, self._base_dir)
except OSError as e:
log_environment_error(f"Error reading playlist {uri!r}", e)
else:
return items
def lookup(self, uri):
path = translator.uri_to_path(uri)
if not self._is_in_basedir(path):
logger.debug("Ignoring path outside playlist dir: %s", uri)
return None
try:
with self._open(path, "r") as fp:
items = translator.load_items(fp, self._base_dir)
mtime = self._abspath(path).stat().st_mtime
except OSError as e:
log_environment_error(f"Error reading playlist {uri!r}", e)
else:
return translator.playlist(path, items, mtime)
def refresh(self):
pass # nothing to do
def save(self, playlist):
path = translator.uri_to_path(playlist.uri)
if not self._is_in_basedir(path):
logger.debug("Ignoring path outside playlist dir: %s", playlist.uri)
return None
name = translator.name_from_path(path)
try:
with self._open(path, "w") as fp:
translator.dump_items(playlist.tracks, fp)
if playlist.name and playlist.name != name:
orig_path = path
path = translator.path_from_name(playlist.name.strip())
path = path.with_suffix(orig_path.suffix)
self._abspath(orig_path).rename(self._abspath(path))
mtime = self._abspath(path).stat().st_mtime
except OSError as e:
log_environment_error(f"Error saving playlist {playlist.uri!r}", e)
else:
return translator.playlist(path, playlist.tracks, mtime)
def _abspath(self, path):
if not path.is_absolute():
return self._playlists_dir / path
else:
return path
def _is_in_basedir(self, local_path):
local_path = self._abspath(local_path)
return path.is_path_inside_base_dir(local_path, self._playlists_dir)
def _open(self, path, mode="r"):
if path.suffix == ".m3u8":
encoding = "utf-8"
else:
encoding = self._default_encoding
if not path.is_absolute():
path = self._abspath(path)
if not self._is_in_basedir(path):
raise Exception(
f"Path {path!r} is not inside playlist dir {self._playlist_dir!r}"
)
if "w" in mode:
return replace(path, mode, encoding=encoding, errors="replace")
else:
return path.open(mode, encoding=encoding, errors="replace")

View File

@@ -0,0 +1,87 @@
import os
import pathlib
import urllib
from mopidy import models
from mopidy.internal import path
from . import Extension
def path_to_uri(path, scheme=Extension.ext_name):
"""Convert file path to URI."""
bytes_path = os.path.normpath(bytes(path))
uripath = urllib.parse.quote_from_bytes(bytes_path)
return urllib.parse.urlunsplit((scheme, None, uripath, None, None))
def uri_to_path(uri):
"""Convert URI to file path."""
return path.uri_to_path(uri)
def name_from_path(path):
"""Extract name from file path."""
name = bytes(pathlib.Path(path.stem))
try:
return name.decode(errors="replace")
except UnicodeError:
return None
def path_from_name(name, ext=None, sep="|"):
"""Convert name with optional extension to file path."""
if ext:
name = name.replace(os.sep, sep) + ext
else:
name = name.replace(os.sep, sep)
return pathlib.Path(name)
def path_to_ref(path):
return models.Ref.playlist(uri=path_to_uri(path), name=name_from_path(path))
def load_items(fp, basedir):
refs = []
name = None
for line in filter(None, (line.strip() for line in fp)):
if line.startswith("#"):
if line.startswith("#EXTINF:"):
name = line.partition(",")[2]
continue
elif not urllib.parse.urlsplit(line).scheme:
path = basedir / line
if not name:
name = name_from_path(path)
uri = path_to_uri(path, scheme="file")
else:
# TODO: ensure this is urlencoded
uri = line # do *not* extract name from (stream?) URI path
refs.append(models.Ref.track(uri=uri, name=name))
name = None
return refs
def dump_items(items, fp):
if any(item.name for item in items):
print("#EXTM3U", file=fp)
for item in items:
if item.name:
print(f"#EXTINF:-1,{item.name}", file=fp)
# TODO: convert file URIs to (relative) paths?
if isinstance(item.uri, bytes):
print(item.uri.decode(), file=fp)
else:
print(item.uri, file=fp)
def playlist(path, items=None, mtime=None):
if items is None:
items = []
return models.Playlist(
uri=path_to_uri(path),
name=name_from_path(path),
tracks=[models.Track(uri=item.uri, name=item.name) for item in items],
last_modified=(int(mtime * 1000) if mtime else None),
)

View File

@@ -0,0 +1,152 @@
import logging
from mopidy import listener
logger = logging.getLogger(__name__)
class Mixer:
"""
Audio mixer API
If the mixer has problems during initialization it should raise
:exc:`mopidy.exceptions.MixerError` with a descriptive error message. This
will make Mopidy print the error message and exit so that the user can fix
the issue.
:param config: the entire Mopidy configuration
:type config: dict
"""
name = None
"""
Name of the mixer.
Used when configuring what mixer to use. Should match the
:attr:`~mopidy.ext.Extension.ext_name` of the extension providing the
mixer.
"""
def __init__(self, config):
pass
def get_volume(self):
"""
Get volume level of the mixer on a linear scale from 0 to 100.
Example values:
0:
Minimum volume, usually silent.
100:
Maximum volume.
:class:`None`:
Volume is unknown.
*MAY be implemented by subclass.*
:rtype: int in range [0..100] or :class:`None`
"""
return None
def set_volume(self, volume):
"""
Set volume level of the mixer.
*MAY be implemented by subclass.*
:param volume: Volume in the range [0..100]
:type volume: int
:rtype: :class:`True` if success, :class:`False` if failure
"""
return False
def trigger_volume_changed(self, volume):
"""
Send ``volume_changed`` event to all mixer listeners.
This method should be called by subclasses when the volume is changed,
either because of a call to :meth:`set_volume` or because of any
external entity changing the volume.
"""
logger.debug("Mixer event: volume_changed(volume=%d)", volume)
MixerListener.send("volume_changed", volume=volume)
def get_mute(self):
"""
Get mute state of the mixer.
*MAY be implemented by subclass.*
:rtype: :class:`True` if muted, :class:`False` if unmuted,
:class:`None` if unknown.
"""
return None
def set_mute(self, mute):
"""
Mute or unmute the mixer.
*MAY be implemented by subclass.*
:param mute: :class:`True` to mute, :class:`False` to unmute
:type mute: bool
:rtype: :class:`True` if success, :class:`False` if failure
"""
return False
def trigger_mute_changed(self, mute):
"""
Send ``mute_changed`` event to all mixer listeners.
This method should be called by subclasses when the mute state is
changed, either because of a call to :meth:`set_mute` or because of
any external entity changing the mute state.
"""
logger.debug("Mixer event: mute_changed(mute=%s)", mute)
MixerListener.send("mute_changed", mute=mute)
def ping(self):
"""Called to check if the actor is still alive."""
return True
class MixerListener(listener.Listener):
"""
Marker interface for recipients of events sent by the mixer actor.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in the mixer actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of mixer listener events"""
listener.send(MixerListener, event, **kwargs)
def volume_changed(self, volume):
"""
Called after the volume has changed.
*MAY* be implemented by actor.
:param volume: the new volume
:type volume: int in range [0..100]
"""
pass
def mute_changed(self, mute):
"""
Called after the mute state has changed.
*MAY* be implemented by actor.
:param mute: :class:`True` if muted, :class:`False` if not muted
:type mute: bool
"""
pass

View File

@@ -0,0 +1,361 @@
from mopidy.models import fields
from mopidy.models.immutable import ImmutableObject, ValidatedImmutableObject
from mopidy.models.serialize import ModelJSONEncoder, model_json_decoder
__all__ = [
"ImmutableObject",
"Ref",
"Image",
"Artist",
"Album",
"Track",
"TlTrack",
"Playlist",
"SearchResult",
"model_json_decoder",
"ModelJSONEncoder",
"ValidatedImmutableObject",
]
class Ref(ValidatedImmutableObject):
"""
Model to represent URI references with a human friendly name and type
attached. This is intended for use a lightweight object "free" of metadata
that can be passed around instead of using full blown models.
:param uri: object URI
:type uri: string
:param name: object name
:type name: string
:param type: object type
:type type: string
"""
#: The object URI. Read-only.
uri = fields.URI()
#: The object name. Read-only.
name = fields.String()
#: The object type, e.g. "artist", "album", "track", "playlist",
#: "directory". Read-only.
type = fields.Identifier() # TODO: consider locking this down.
# type = fields.Field(choices=(ALBUM, ARTIST, DIRECTORY, PLAYLIST, TRACK))
#: Constant used for comparison with the :attr:`type` field.
ALBUM = "album"
#: Constant used for comparison with the :attr:`type` field.
ARTIST = "artist"
#: Constant used for comparison with the :attr:`type` field.
DIRECTORY = "directory"
#: Constant used for comparison with the :attr:`type` field.
PLAYLIST = "playlist"
#: Constant used for comparison with the :attr:`type` field.
TRACK = "track"
@classmethod
def album(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`ALBUM`."""
kwargs["type"] = Ref.ALBUM
return cls(**kwargs)
@classmethod
def artist(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`ARTIST`."""
kwargs["type"] = Ref.ARTIST
return cls(**kwargs)
@classmethod
def directory(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`DIRECTORY`."""
kwargs["type"] = Ref.DIRECTORY
return cls(**kwargs)
@classmethod
def playlist(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`PLAYLIST`."""
kwargs["type"] = Ref.PLAYLIST
return cls(**kwargs)
@classmethod
def track(cls, **kwargs):
"""Create a :class:`Ref` with ``type`` :attr:`TRACK`."""
kwargs["type"] = Ref.TRACK
return cls(**kwargs)
class Image(ValidatedImmutableObject):
"""
:param string uri: URI of the image
:param int width: Optional width of image or :class:`None`
:param int height: Optional height of image or :class:`None`
"""
#: The image URI. Read-only.
uri = fields.URI()
#: Optional width of the image or :class:`None`. Read-only.
width = fields.Integer(min=0)
#: Optional height of the image or :class:`None`. Read-only.
height = fields.Integer(min=0)
class Artist(ValidatedImmutableObject):
"""
:param uri: artist URI
:type uri: string
:param name: artist name
:type name: string
:param sortname: artist name for sorting
:type sortname: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
"""
#: The artist URI. Read-only.
uri = fields.URI()
#: The artist name. Read-only.
name = fields.String()
#: Artist name for better sorting, e.g. with articles stripped
sortname = fields.String()
#: The MusicBrainz ID of the artist. Read-only.
musicbrainz_id = fields.Identifier()
class Album(ValidatedImmutableObject):
"""
:param uri: album URI
:type uri: string
:param name: album name
:type name: string
:param artists: album artists
:type artists: list of :class:`Artist`
:param num_tracks: number of tracks in album
:type num_tracks: integer or :class:`None` if unknown
:param num_discs: number of discs in album
:type num_discs: integer or :class:`None` if unknown
:param date: album release date (YYYY or YYYY-MM-DD)
:type date: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
"""
#: The album URI. Read-only.
uri = fields.URI()
#: The album name. Read-only.
name = fields.String()
#: A set of album artists. Read-only.
artists = fields.Collection(type=Artist, container=frozenset)
#: The number of tracks in the album. Read-only.
num_tracks = fields.Integer(min=0)
#: The number of discs in the album. Read-only.
num_discs = fields.Integer(min=0)
#: The album release date. Read-only.
date = fields.Date()
#: The MusicBrainz ID of the album. Read-only.
musicbrainz_id = fields.Identifier()
class Track(ValidatedImmutableObject):
"""
:param uri: track URI
:type uri: string
:param name: track name
:type name: string
:param artists: track artists
:type artists: list of :class:`Artist`
:param album: track album
:type album: :class:`Album`
:param composers: track composers
:type composers: list of :class:`Artist`
:param performers: track performers
:type performers: list of :class:`Artist`
:param genre: track genre
:type genre: string
:param track_no: track number in album
:type track_no: integer or :class:`None` if unknown
:param disc_no: disc number in album
:type disc_no: integer or :class:`None` if unknown
:param date: track release date (YYYY or YYYY-MM-DD)
:type date: string
:param length: track length in milliseconds
:type length: integer or :class:`None` if there is no duration
:param bitrate: bitrate in kbit/s
:type bitrate: integer
:param comment: track comment
:type comment: string
:param musicbrainz_id: MusicBrainz ID
:type musicbrainz_id: string
:param last_modified: Represents last modification time
:type last_modified: integer or :class:`None` if unknown
"""
#: The track URI. Read-only.
uri = fields.URI()
#: The track name. Read-only.
name = fields.String()
#: A set of track artists. Read-only.
artists = fields.Collection(type=Artist, container=frozenset)
#: The track :class:`Album`. Read-only.
album = fields.Field(type=Album)
#: A set of track composers. Read-only.
composers = fields.Collection(type=Artist, container=frozenset)
#: A set of track performers`. Read-only.
performers = fields.Collection(type=Artist, container=frozenset)
#: The track genre. Read-only.
genre = fields.String()
#: The track number in the album. Read-only.
track_no = fields.Integer(min=0)
#: The disc number in the album. Read-only.
disc_no = fields.Integer(min=0)
#: The track release date. Read-only.
date = fields.Date()
#: The track length in milliseconds. Read-only.
length = fields.Integer(min=0)
#: The track's bitrate in kbit/s. Read-only.
bitrate = fields.Integer(min=0)
#: The track comment. Read-only.
comment = fields.String()
#: The MusicBrainz ID of the track. Read-only.
musicbrainz_id = fields.Identifier()
#: Integer representing when the track was last modified. Exact meaning
#: depends on source of track. For local files this is the modification
#: time in milliseconds since Unix epoch. For other backends it could be an
#: equivalent timestamp or simply a version counter.
last_modified = fields.Integer(min=0)
class TlTrack(ValidatedImmutableObject):
"""
A tracklist track. Wraps a regular track and it's tracklist ID.
The use of :class:`TlTrack` allows the same track to appear multiple times
in the tracklist.
This class also accepts it's parameters as positional arguments. Both
arguments must be provided, and they must appear in the order they are
listed here.
This class also supports iteration, so your extract its values like this::
(tlid, track) = tl_track
:param tlid: tracklist ID
:type tlid: int
:param track: the track
:type track: :class:`Track`
"""
#: The tracklist ID. Read-only.
tlid = fields.Integer(min=0)
#: The track. Read-only.
track = fields.Field(type=Track)
def __init__(self, *args, **kwargs):
if len(args) == 2 and len(kwargs) == 0:
kwargs["tlid"] = args[0]
kwargs["track"] = args[1]
args = []
super().__init__(*args, **kwargs)
def __iter__(self):
return iter([self.tlid, self.track])
class Playlist(ValidatedImmutableObject):
"""
:param uri: playlist URI
:type uri: string
:param name: playlist name
:type name: string
:param tracks: playlist's tracks
:type tracks: list of :class:`Track` elements
:param last_modified:
playlist's modification time in milliseconds since Unix epoch
:type last_modified: int
"""
#: The playlist URI. Read-only.
uri = fields.URI()
#: The playlist name. Read-only.
name = fields.String()
#: The playlist's tracks. Read-only.
tracks = fields.Collection(type=Track, container=tuple)
#: The playlist modification time in milliseconds since Unix epoch.
#: Read-only.
#:
#: Integer, or :class:`None` if unknown.
last_modified = fields.Integer(min=0)
# TODO: def insert(self, pos, track): ... ?
@property
def length(self):
"""The number of tracks in the playlist. Read-only."""
return len(self.tracks)
class SearchResult(ValidatedImmutableObject):
"""
:param uri: search result URI
:type uri: string
:param tracks: matching tracks
:type tracks: list of :class:`Track` elements
:param artists: matching artists
:type artists: list of :class:`Artist` elements
:param albums: matching albums
:type albums: list of :class:`Album` elements
"""
#: The search result URI. Read-only.
uri = fields.URI()
#: The tracks matching the search query. Read-only.
tracks = fields.Collection(type=Track, container=tuple)
#: The artists matching the search query. Read-only.
artists = fields.Collection(type=Artist, container=tuple)
#: The albums matching the search query. Read-only.
albums = fields.Collection(type=Album, container=tuple)

View File

@@ -0,0 +1,179 @@
import sys
class Field:
"""
Base field for use in
:class:`~mopidy.models.immutable.ValidatedImmutableObject`. These fields
are responsible for type checking and other data sanitation in our models.
For simplicity fields use the Python descriptor protocol to store the
values in the instance dictionary. Also note that fields are mutable if
the object they are attached to allow it.
Default values will be validated with the exception of :class:`None`.
:param default: default value for field
:param type: if set the field value must be of this type
:param choices: if set the field value must be one of these
"""
def __init__(self, default=None, type=None, choices=None):
self._name = None # Set by ValidatedImmutableObjectMeta
self._choices = choices
self._default = default
self._type = type
if self._default is not None:
self.validate(self._default)
def validate(self, value):
"""Validate and possibly modify the field value before assignment"""
if self._type and not isinstance(value, self._type):
raise TypeError(
f"Expected {self._name} to be a {self._type}, not {value!r}"
)
if self._choices and value not in self._choices:
raise TypeError(
f"Expected {self._name} to be a one of {self._choices}, not {value!r}"
)
return value
def __get__(self, instance, owner):
if not instance:
return self
return getattr(instance, "_" + self._name, self._default)
def __set__(self, instance, value):
if value is not None:
value = self.validate(value)
if value is None or value == self._default:
self.__delete__(instance)
else:
setattr(instance, "_" + self._name, value)
def __delete__(self, instance):
if hasattr(instance, "_" + self._name):
delattr(instance, "_" + self._name)
class String(Field):
"""
Specialized :class:`Field` which is wired up for bytes and unicode.
:param default: default value for field
"""
def __init__(self, default=None):
# TODO: normalize to unicode?
# TODO: only allow unicode?
# TODO: disallow empty strings?
super().__init__(type=str, default=default)
class Date(String):
"""
:class:`Field` for storing ISO 8601 dates as a string.
Supported formats are ``YYYY-MM-DD``, ``YYYY-MM`` and ``YYYY``, currently
not validated.
:param default: default value for field
"""
pass # TODO: make this check for YYYY-MM-DD, YYYY-MM, YYYY using strptime.
class Identifier(String):
"""
:class:`Field` for storing values such as GUIDs or other identifiers.
Values will be interned.
:param default: default value for field
"""
def validate(self, value):
value = super().validate(value)
if isinstance(value, bytes):
value = value.decode()
return sys.intern(value)
class URI(Identifier):
"""
:class:`Field` for storing URIs
Values will be interned, currently not validated.
:param default: default value for field
"""
pass # TODO: validate URIs?
class Integer(Field):
"""
:class:`Field` for storing integer numbers.
:param default: default value for field
:param min: field value must be larger or equal to this value when set
:param max: field value must be smaller or equal to this value when set
"""
def __init__(self, default=None, min=None, max=None):
self._min = min
self._max = max
super().__init__(type=int, default=default)
def validate(self, value):
value = super().validate(value)
if self._min is not None and value < self._min:
raise ValueError(
f"Expected {self._name} to be at least {self._min}, not {value:d}"
)
if self._max is not None and value > self._max:
raise ValueError(
f"Expected {self._name} to be at most {self._max}, not {value:d}"
)
return value
class Boolean(Field):
"""
:class:`Field` for storing boolean values
:param default: default value for field
"""
def __init__(self, default=None):
super().__init__(type=bool, default=default)
class Collection(Field):
"""
:class:`Field` for storing collections of a given type.
:param type: all items stored in the collection must be of this type
:param container: the type to store the items in
"""
def __init__(self, type, container=tuple):
super().__init__(type=type, default=container())
def validate(self, value):
if isinstance(value, str):
raise TypeError(
f"Expected {self._name} to be a collection of "
f"{self._type.__name__}, not {value!r}"
)
for v in value:
if not isinstance(v, self._type):
raise TypeError(
f"Expected {self._name} to be a collection of "
f"{self._type.__name__}, not {value!r}"
)
return self._default.__class__(value) or None

View File

@@ -0,0 +1,219 @@
import copy
import itertools
import weakref
from mopidy.models.fields import Field
# Registered models for automatic deserialization
_models = {}
class ImmutableObject:
"""
Superclass for immutable objects whose fields can only be modified via the
constructor.
This version of this class has been retained to avoid breaking any clients
relying on it's behavior. Internally in Mopidy we now use
:class:`ValidatedImmutableObject` for type safety and it's much smaller
memory footprint.
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
"""
# Any sub-classes that don't set slots won't be effected by the base using
# slots as they will still get an instance dict.
__slots__ = ["__weakref__"]
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
if not self._is_valid_field(key):
raise TypeError(
f"__init__() got an unexpected keyword argument {key!r}"
)
self._set_field(key, value)
def __setattr__(self, name, value):
if name.startswith("_"):
object.__setattr__(self, name, value)
else:
raise AttributeError("Object is immutable.")
def __delattr__(self, name):
if name.startswith("_"):
object.__delattr__(self, name)
else:
raise AttributeError("Object is immutable.")
def _is_valid_field(self, name):
return hasattr(self, name) and not callable(getattr(self, name))
def _set_field(self, name, value):
if value == getattr(self.__class__, name):
self.__dict__.pop(name, None)
else:
self.__dict__[name] = value
def _items(self):
return self.__dict__.items()
def __repr__(self):
kwarg_pairs = []
for key, value in sorted(self._items()):
if isinstance(value, (frozenset, tuple)):
if not value:
continue
value = list(value)
kwarg_pairs.append(f"{key}={value!r}")
return f"{self.__class__.__name__}({', '.join(kwarg_pairs)})"
def __hash__(self):
hash_sum = 0
for key, value in self._items():
hash_sum += hash(key) + hash(value)
return hash_sum
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return all(
a == b
for a, b in itertools.zip_longest(
self._items(), other._items(), fillvalue=object()
)
)
def __ne__(self, other):
return not self.__eq__(other)
def replace(self, **kwargs):
"""
Replace the fields in the model and return a new instance
Examples::
# Returns a track with a new name
Track(name='foo').replace(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).replace(num_tracks=5)
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
:rtype: instance of the model with replaced fields
"""
other = copy.copy(self)
for key, value in kwargs.items():
if not self._is_valid_field(key):
raise TypeError(
f"replace() got an unexpected keyword argument {key!r}"
)
other._set_field(key, value)
return other
def serialize(self):
data = {}
data["__model__"] = self.__class__.__name__
for key, value in self._items():
if isinstance(value, (set, frozenset, list, tuple)):
value = [
v.serialize() if isinstance(v, ImmutableObject) else v
for v in value
]
elif isinstance(value, ImmutableObject):
value = value.serialize()
if not (isinstance(value, list) and len(value) == 0):
data[key] = value
return data
class _ValidatedImmutableObjectMeta(type):
"""Helper that initializes fields, slots and memoizes instance creation."""
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases: # Copy parent fields over to our state
fields.update(getattr(base, "_fields", {}))
for key, value in attrs.items(): # Add our own fields
if isinstance(value, Field):
fields[key] = "_" + key
value._name = key
attrs["_fields"] = fields
attrs["_instances"] = weakref.WeakValueDictionary()
attrs["__slots__"] = list(attrs.get("__slots__", [])) + list(
fields.values()
)
clsc = super().__new__(cls, name, bases, attrs)
if clsc.__name__ != "ValidatedImmutableObject":
_models[clsc.__name__] = clsc
return clsc
def __call__(cls, *args, **kwargs): # noqa: N805
instance = super().__call__(*args, **kwargs)
return cls._instances.setdefault(weakref.ref(instance), instance)
class ValidatedImmutableObject(
ImmutableObject, metaclass=_ValidatedImmutableObjectMeta
):
"""
Superclass for immutable objects whose fields can only be modified via the
constructor. Fields should be :class:`Field` instances to ensure type
safety in our models.
Note that since these models can not be changed, we heavily memoize them
to save memory. So constructing a class with the same arguments twice will
give you the same instance twice.
"""
__slots__ = ["_hash"]
def __hash__(self):
if not hasattr(self, "_hash"):
hash_sum = super().__hash__()
object.__setattr__(self, "_hash", hash_sum)
return self._hash
def _is_valid_field(self, name):
return name in self._fields
def _set_field(self, name, value):
object.__setattr__(self, name, value)
def _items(self):
for field, key in self._fields.items():
if hasattr(self, key):
yield field, getattr(self, key)
def replace(self, **kwargs):
"""
Replace the fields in the model and return a new instance
Examples::
# Returns a track with a new name
Track(name='foo').replace(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).replace(num_tracks=5)
Note that internally we memoize heavily to keep memory usage down given
our overly repetitive data structures. So you might get an existing
instance if it contains the same values.
:param kwargs: kwargs to set as fields on the object
:type kwargs: any
:rtype: instance of the model with replaced fields
"""
if not kwargs:
return self
other = super().replace(**kwargs)
if hasattr(self, "_hash"):
object.__delattr__(other, "_hash")
return self._instances.setdefault(weakref.ref(other), other)

View File

@@ -0,0 +1,43 @@
import json
from mopidy.models import immutable
class ModelJSONEncoder(json.JSONEncoder):
"""
Automatically serialize Mopidy models to JSON.
Usage::
>>> import json
>>> json.dumps({'a_track': Track(name='name')}, cls=ModelJSONEncoder)
'{"a_track": {"__model__": "Track", "name": "name"}}'
"""
def default(self, obj):
if isinstance(obj, immutable.ImmutableObject):
return obj.serialize()
return json.JSONEncoder.default(self, obj)
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
if "__model__" in dct:
model_name = dct.pop("__model__")
if model_name in immutable._models:
cls = immutable._models[model_name]
return cls(**dct)
return dct

View File

@@ -0,0 +1,24 @@
import os
import mopidy
from mopidy import config, ext
class Extension(ext.Extension):
dist_name = "Mopidy-SoftwareMixer"
ext_name = "softwaremixer"
version = mopidy.__version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
return schema
def setup(self, registry):
from .mixer import SoftwareMixer
registry.add("mixer", SoftwareMixer)

View File

@@ -0,0 +1,2 @@
[softwaremixer]
enabled = true

View File

@@ -0,0 +1,58 @@
import logging
import pykka
from mopidy import mixer
logger = logging.getLogger(__name__)
class SoftwareMixer(pykka.ThreadingActor, mixer.Mixer):
name = "software"
def __init__(self, config):
super().__init__(config)
self._audio_mixer = None
self._initial_volume = None
self._initial_mute = None
def setup(self, mixer_ref):
self._audio_mixer = mixer_ref
# The Mopidy startup procedure will set the initial volume of a
# mixer, but this happens before the audio actor's mixer is injected
# into the software mixer actor and has no effect. Thus, we need to set
# the initial volume again.
if self._initial_volume is not None:
self.set_volume(self._initial_volume)
if self._initial_mute is not None:
self.set_mute(self._initial_mute)
def teardown(self):
self._audio_mixer = None
def get_volume(self):
if self._audio_mixer is None:
return None
return self._audio_mixer.get_volume().get()
def set_volume(self, volume):
if self._audio_mixer is None:
self._initial_volume = volume
return False
self._audio_mixer.set_volume(volume)
return True
def get_mute(self):
if self._audio_mixer is None:
return None
return self._audio_mixer.get_mute().get()
def set_mute(self, mute):
if self._audio_mixer is None:
self._initial_mute = mute
return False
self._audio_mixer.set_mute(mute)
return True

View File

@@ -0,0 +1,30 @@
import os
import mopidy
from mopidy import config, ext
class Extension(ext.Extension):
dist_name = "Mopidy-Stream"
ext_name = "stream"
version = mopidy.__version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["protocols"] = config.List()
schema["metadata_blacklist"] = config.List(optional=True)
schema["timeout"] = config.Integer(minimum=1000, maximum=1000 * 60 * 60)
return schema
def validate_environment(self):
pass
def setup(self, registry):
from .actor import StreamBackend
registry.add("backend", StreamBackend)

View File

@@ -0,0 +1,187 @@
import fnmatch
import logging
import re
import time
import urllib
import pykka
from mopidy import audio as audio_lib
from mopidy import backend, exceptions, stream
from mopidy.audio import scan, tags
from mopidy.internal import http, playlists
from mopidy.models import Track
logger = logging.getLogger(__name__)
class StreamBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super().__init__()
self._scanner = scan.Scanner(
timeout=config["stream"]["timeout"], proxy_config=config["proxy"]
)
self._session = http.get_requests_session(
proxy_config=config["proxy"],
user_agent=(
f"{stream.Extension.dist_name}/{stream.Extension.version}"
),
)
blacklist = config["stream"]["metadata_blacklist"]
self._blacklist_re = re.compile(
r"^(%s)$" % "|".join(fnmatch.translate(u) for u in blacklist)
)
self._timeout = config["stream"]["timeout"]
self.library = StreamLibraryProvider(backend=self)
self.playback = StreamPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = audio_lib.supported_uri_schemes(
config["stream"]["protocols"]
)
if "file" in self.uri_schemes and config["file"]["enabled"]:
logger.warning(
'The stream/protocols config value includes the "file" '
'protocol. "file" playback is now handled by Mopidy-File. '
"Please remove it from the stream/protocols config."
)
self.uri_schemes -= {"file"}
class StreamLibraryProvider(backend.LibraryProvider):
def lookup(self, uri):
if urllib.parse.urlsplit(uri).scheme not in self.backend.uri_schemes:
return []
if self.backend._blacklist_re.match(uri):
logger.debug("URI matched metadata lookup blacklist: %s", uri)
return [Track(uri=uri)]
_, scan_result = _unwrap_stream(
uri,
timeout=self.backend._timeout,
scanner=self.backend._scanner,
requests_session=self.backend._session,
)
if scan_result:
track = tags.convert_tags_to_track(scan_result.tags).replace(
uri=uri, length=scan_result.duration
)
else:
logger.warning("Problem looking up %s", uri)
track = Track(uri=uri)
return [track]
class StreamPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
if urllib.parse.urlsplit(uri).scheme not in self.backend.uri_schemes:
return None
if self.backend._blacklist_re.match(uri):
logger.debug("URI matched metadata lookup blacklist: %s", uri)
return uri
unwrapped_uri, _ = _unwrap_stream(
uri,
timeout=self.backend._timeout,
scanner=self.backend._scanner,
requests_session=self.backend._session,
)
return unwrapped_uri
# TODO: cleanup the return value of this.
def _unwrap_stream(uri, timeout, scanner, requests_session):
"""
Get a stream URI from a playlist URI, ``uri``.
Unwraps nested playlists until something that's not a playlist is found or
the ``timeout`` is reached.
"""
original_uri = uri
seen_uris = set()
deadline = time.time() + timeout
while time.time() < deadline:
if uri in seen_uris:
logger.info(
"Unwrapping stream from URI (%s) failed: "
"playlist referenced itself",
uri,
)
return None, None
else:
seen_uris.add(uri)
logger.debug("Unwrapping stream from URI: %s", uri)
try:
scan_timeout = deadline - time.time()
if scan_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: "
"timed out in %sms",
uri,
timeout,
)
return None, None
scan_result = scanner.scan(uri, timeout=scan_timeout)
except exceptions.ScannerError as exc:
logger.debug("GStreamer failed scanning URI (%s): %s", uri, exc)
scan_result = None
if scan_result is not None:
has_interesting_mime = (
scan_result.mime is not None
and not scan_result.mime.startswith("text/")
and not scan_result.mime.startswith("application/")
)
if scan_result.playable or has_interesting_mime:
logger.debug(
"Unwrapped potential %s stream: %s", scan_result.mime, uri
)
return uri, scan_result
download_timeout = deadline - time.time()
if download_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: timed out in %sms",
uri,
timeout,
)
return None, None
content = http.download(
requests_session, uri, timeout=download_timeout / 1000
)
if content is None:
logger.info(
"Unwrapping stream from URI (%s) failed: "
"error downloading URI %s",
original_uri,
uri,
)
return None, None
uris = playlists.parse(content)
if not uris:
logger.debug(
"Failed parsing URI (%s) as playlist; found potential stream.",
uri,
)
return uri, None
# TODO Test streams and return first that seems to be playable
new_uri = uris[0]
logger.debug("Parsed playlist (%s) and found new URI: %s", uri, new_uri)
uri = urllib.parse.urljoin(uri, new_uri)

View File

@@ -0,0 +1,11 @@
[stream]
enabled = true
protocols =
http
https
mms
rtmp
rtmps
rtsp
timeout = 5000
metadata_blacklist =

View File

@@ -0,0 +1,150 @@
import logging
import string
logger = logging.getLogger(__name__)
try:
import dbus
except ImportError:
dbus = None
_AVAHI_IF_UNSPEC = -1
_AVAHI_PROTO_UNSPEC = -1
_AVAHI_PUBLISHFLAGS_NONE = 0
def _is_loopback_address(host):
return (
host.startswith("127.")
or host.startswith("::ffff:127.")
or host == "::1"
)
def _convert_text_list_to_dbus_format(text_list):
array = dbus.Array(signature="ay")
for text in text_list:
array.append([dbus.Byte(ord(c)) for c in text])
return array
class Zeroconf:
"""Publish a network service with Zeroconf.
Currently, this only works on Linux using Avahi via D-Bus.
:param str name: human readable name of the service, e.g. 'MPD on neptune'
:param str stype: service type, e.g. '_mpd._tcp'
:param int port: TCP port of the service, e.g. 6600
:param str domain: local network domain name, defaults to ''
:param str host: interface to advertise the service on, defaults to ''
:param text: extra information depending on ``stype``, defaults to empty
list
:type text: list of str
"""
def __init__(self, name, stype, port, domain="", host="", text=None):
self.stype = stype
self.port = port
self.domain = domain
self.host = host
self.text = text or []
self.bus = None
self.server = None
self.group = None
self.display_hostname = None
self.name = None
if dbus:
try:
self.bus = dbus.SystemBus()
self.server = dbus.Interface(
self.bus.get_object("org.freedesktop.Avahi", "/"),
"org.freedesktop.Avahi.Server",
)
self.display_hostname = f"{self.server.GetHostName()}"
self.name = string.Template(name).safe_substitute(
hostname=self.display_hostname, port=port
)
except dbus.exceptions.DBusException as e:
logger.debug("%s: Server failed: %s", self, e)
def __str__(self):
return (
f"Zeroconf service {self.name!r} "
f"({self.stype} at [{self.host}]:{self.port:d})"
)
def publish(self):
"""Publish the service.
Call when your service starts.
"""
if _is_loopback_address(self.host):
logger.debug(
"%s: Publish on loopback interface is not supported.", self
)
return False
if not dbus:
logger.debug("%s: dbus not installed; publish failed.", self)
return False
if not self.bus:
logger.debug("%s: Bus not available; publish failed.", self)
return False
if not self.server:
logger.debug("%s: Server not available; publish failed.", self)
return False
try:
if not self.bus.name_has_owner("org.freedesktop.Avahi"):
logger.debug(
"%s: Avahi service not running; publish failed.", self
)
return False
self.group = dbus.Interface(
self.bus.get_object(
"org.freedesktop.Avahi", self.server.EntryGroupNew()
),
"org.freedesktop.Avahi.EntryGroup",
)
self.group.AddService(
_AVAHI_IF_UNSPEC,
_AVAHI_PROTO_UNSPEC,
dbus.UInt32(_AVAHI_PUBLISHFLAGS_NONE),
self.name,
self.stype,
self.domain,
self.host,
dbus.UInt16(self.port),
_convert_text_list_to_dbus_format(self.text),
)
self.group.Commit()
logger.debug("%s: Published", self)
return True
except dbus.exceptions.DBusException as e:
logger.debug("%s: Publish failed: %s", self, e)
return False
def unpublish(self):
"""Unpublish the service.
Call when your service shuts down.
"""
if self.group:
try:
self.group.Reset()
logger.debug("%s: Unpublished", self)
except dbus.exceptions.DBusException as e:
logger.debug("%s: Unpublish failed: %s", self, e)
finally:
self.group = None