Init: mediaserver

This commit is contained in:
2023-02-08 12:13:28 +01:00
parent 848bc9739c
commit f7c23d4ba9
31914 changed files with 6175775 additions and 0 deletions

View File

@@ -0,0 +1,64 @@
import enum
import os
from ..const import IS_WINDOWS_PLATFORM
NAMES = [
'grey',
'red',
'green',
'yellow',
'blue',
'magenta',
'cyan',
'white'
]
@enum.unique
class AnsiMode(enum.Enum):
"""Enumeration for when to output ANSI colors."""
NEVER = "never"
ALWAYS = "always"
AUTO = "auto"
def use_ansi_codes(self, stream):
if self is AnsiMode.ALWAYS:
return True
if self is AnsiMode.NEVER or os.environ.get('CLICOLOR') == '0':
return False
return stream.isatty()
def get_pairs():
for i, name in enumerate(NAMES):
yield (name, str(30 + i))
yield ('intense_' + name, str(30 + i) + ';1')
def ansi(code):
return '\033[{}m'.format(code)
def ansi_color(code, s):
return '{}{}{}'.format(ansi(code), s, ansi(0))
def make_color_fn(code):
return lambda s: ansi_color(code, s)
if IS_WINDOWS_PLATFORM:
import colorama
colorama.init(strip=False)
for (name, code) in get_pairs():
globals()[name] = make_color_fn(code)
def rainbow():
cs = ['cyan', 'yellow', 'green', 'magenta', 'blue',
'intense_cyan', 'intense_yellow', 'intense_green',
'intense_magenta', 'intense_blue']
for c in cs:
yield globals()[c]

View File

@@ -0,0 +1,210 @@
import logging
import os
import re
from . import errors
from .. import config
from .. import parallel
from ..config.environment import Environment
from ..const import LABEL_CONFIG_FILES
from ..const import LABEL_ENVIRONMENT_FILE
from ..const import LABEL_WORKING_DIR
from ..project import Project
from .docker_client import get_client
from .docker_client import load_context
from .docker_client import make_context
from .errors import UserError
log = logging.getLogger(__name__)
SILENT_COMMANDS = {
'events',
'exec',
'kill',
'logs',
'pause',
'ps',
'restart',
'rm',
'start',
'stop',
'top',
'unpause',
}
def project_from_options(project_dir, options, additional_options=None):
additional_options = additional_options or {}
override_dir = get_project_dir(options)
environment_file = options.get('--env-file')
environment = Environment.from_env_file(override_dir or project_dir, environment_file)
environment.silent = options.get('COMMAND', None) in SILENT_COMMANDS
set_parallel_limit(environment)
# get the context for the run
context = None
context_name = options.get('--context', None)
if context_name:
context = load_context(context_name)
if not context:
raise UserError("Context '{}' not found".format(context_name))
host = options.get('--host', None)
if host is not None:
if context:
raise UserError(
"-H, --host and -c, --context are mutually exclusive. Only one should be set.")
host = host.lstrip('=')
context = make_context(host, options, environment)
return get_project(
project_dir,
get_config_path_from_options(options, environment),
project_name=options.get('--project-name'),
verbose=options.get('--verbose'),
context=context,
environment=environment,
override_dir=override_dir,
interpolate=(not additional_options.get('--no-interpolate')),
environment_file=environment_file,
enabled_profiles=get_profiles_from_options(options, environment)
)
def set_parallel_limit(environment):
parallel_limit = environment.get('COMPOSE_PARALLEL_LIMIT')
if parallel_limit:
try:
parallel_limit = int(parallel_limit)
except ValueError:
raise errors.UserError(
'COMPOSE_PARALLEL_LIMIT must be an integer (found: "{}")'.format(
environment.get('COMPOSE_PARALLEL_LIMIT')
)
)
if parallel_limit <= 1:
raise errors.UserError('COMPOSE_PARALLEL_LIMIT can not be less than 2')
parallel.GlobalLimit.set_global_limit(parallel_limit)
def get_project_dir(options):
override_dir = None
files = get_config_path_from_options(options, os.environ)
if files:
if files[0] == '-':
return '.'
override_dir = os.path.dirname(files[0])
return options.get('--project-directory') or override_dir
def get_config_from_options(base_dir, options, additional_options=None):
additional_options = additional_options or {}
override_dir = get_project_dir(options)
environment_file = options.get('--env-file')
environment = Environment.from_env_file(override_dir or base_dir, environment_file)
config_path = get_config_path_from_options(options, environment)
return config.load(
config.find(base_dir, config_path, environment, override_dir),
not additional_options.get('--no-interpolate')
)
def get_config_path_from_options(options, environment):
def unicode_paths(paths):
return [p.decode('utf-8') if isinstance(p, bytes) else p for p in paths]
file_option = options.get('--file')
if file_option:
return unicode_paths(file_option)
config_files = environment.get('COMPOSE_FILE')
if config_files:
pathsep = environment.get('COMPOSE_PATH_SEPARATOR', os.pathsep)
return unicode_paths(config_files.split(pathsep))
return None
def get_profiles_from_options(options, environment):
profile_option = options.get('--profile')
if profile_option:
return profile_option
profiles = environment.get('COMPOSE_PROFILES')
if profiles:
return profiles.split(',')
return []
def get_project(project_dir, config_path=None, project_name=None, verbose=False,
context=None, environment=None, override_dir=None,
interpolate=True, environment_file=None, enabled_profiles=None):
if not environment:
environment = Environment.from_env_file(project_dir)
config_details = config.find(project_dir, config_path, environment, override_dir)
project_name = get_project_name(
config_details.working_dir, project_name, environment
)
config_data = config.load(config_details, interpolate)
api_version = environment.get('COMPOSE_API_VERSION')
client = get_client(
verbose=verbose, version=api_version, context=context, environment=environment
)
with errors.handle_connection_errors(client):
return Project.from_config(
project_name,
config_data,
client,
environment.get('DOCKER_DEFAULT_PLATFORM'),
execution_context_labels(config_details, environment_file),
enabled_profiles,
)
def execution_context_labels(config_details, environment_file):
extra_labels = [
'{}={}'.format(LABEL_WORKING_DIR, os.path.abspath(config_details.working_dir))
]
if not use_config_from_stdin(config_details):
extra_labels.append('{}={}'.format(LABEL_CONFIG_FILES, config_files_label(config_details)))
if environment_file is not None:
extra_labels.append('{}={}'.format(
LABEL_ENVIRONMENT_FILE,
os.path.normpath(environment_file))
)
return extra_labels
def use_config_from_stdin(config_details):
for c in config_details.config_files:
if not c.filename:
return True
return False
def config_files_label(config_details):
return ",".join(
os.path.normpath(c.filename) for c in config_details.config_files
)
def get_project_name(working_dir, project_name=None, environment=None):
def normalize_name(name):
return re.sub(r'[^-_a-z0-9]', '', name.lower())
if not environment:
environment = Environment.from_env_file(working_dir)
project_name = project_name or environment.get('COMPOSE_PROJECT_NAME')
if project_name:
return normalize_name(project_name)
project = os.path.basename(os.path.abspath(working_dir))
if project:
return normalize_name(project)
return 'default'

View File

@@ -0,0 +1,173 @@
import logging
import os.path
import ssl
from docker import APIClient
from docker import Context
from docker import ContextAPI
from docker import TLSConfig
from docker.errors import TLSParameterError
from docker.utils import kwargs_from_env
from docker.utils.config import home_dir
from . import verbose_proxy
from ..config.environment import Environment
from ..const import HTTP_TIMEOUT
from ..utils import unquote_path
from .errors import UserError
from .utils import generate_user_agent
from .utils import get_version_info
log = logging.getLogger(__name__)
def default_cert_path():
return os.path.join(home_dir(), '.docker')
def make_context(host, options, environment):
tls = tls_config_from_options(options, environment)
ctx = Context("compose", host=host, tls=tls.verify if tls else False)
if tls:
ctx.set_endpoint("docker", host, tls, skip_tls_verify=not tls.verify)
return ctx
def load_context(name=None):
return ContextAPI.get_context(name)
def get_client(environment, verbose=False, version=None, context=None):
client = docker_client(
version=version, context=context,
environment=environment, tls_version=get_tls_version(environment)
)
if verbose:
version_info = client.version().items()
log.info(get_version_info('full'))
log.info("Docker base_url: %s", client.base_url)
log.info("Docker version: %s",
", ".join("%s=%s" % item for item in version_info))
return verbose_proxy.VerboseProxy('docker', client)
return client
def get_tls_version(environment):
compose_tls_version = environment.get('COMPOSE_TLS_VERSION', None)
if not compose_tls_version:
return None
tls_attr_name = "PROTOCOL_{}".format(compose_tls_version)
if not hasattr(ssl, tls_attr_name):
log.warning(
'The "{}" protocol is unavailable. You may need to update your '
'version of Python or OpenSSL. Falling back to TLSv1 (default).'
.format(compose_tls_version)
)
return None
return getattr(ssl, tls_attr_name)
def tls_config_from_options(options, environment=None):
environment = environment or Environment()
cert_path = environment.get('DOCKER_CERT_PATH') or None
tls = options.get('--tls', False)
ca_cert = unquote_path(options.get('--tlscacert'))
cert = unquote_path(options.get('--tlscert'))
key = unquote_path(options.get('--tlskey'))
# verify is a special case - with docopt `--tlsverify` = False means it
# wasn't used, so we set it if either the environment or the flag is True
# see https://github.com/docker/compose/issues/5632
verify = options.get('--tlsverify') or environment.get_boolean('DOCKER_TLS_VERIFY')
skip_hostname_check = options.get('--skip-hostname-check', False)
if cert_path is not None and not any((ca_cert, cert, key)):
# FIXME: Modify TLSConfig to take a cert_path argument and do this internally
cert = os.path.join(cert_path, 'cert.pem')
key = os.path.join(cert_path, 'key.pem')
ca_cert = os.path.join(cert_path, 'ca.pem')
if verify and not any((ca_cert, cert, key)):
# Default location for cert files is ~/.docker
ca_cert = os.path.join(default_cert_path(), 'ca.pem')
cert = os.path.join(default_cert_path(), 'cert.pem')
key = os.path.join(default_cert_path(), 'key.pem')
tls_version = get_tls_version(environment)
advanced_opts = any([ca_cert, cert, key, verify, tls_version])
if tls is True and not advanced_opts:
return True
elif advanced_opts: # --tls is a noop
client_cert = None
if cert or key:
client_cert = (cert, key)
return TLSConfig(
client_cert=client_cert, verify=verify, ca_cert=ca_cert,
assert_hostname=False if skip_hostname_check else None,
ssl_version=tls_version
)
return None
def docker_client(environment, version=None, context=None, tls_version=None):
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
try:
kwargs = kwargs_from_env(environment=environment, ssl_version=tls_version)
except TLSParameterError:
raise UserError(
"TLS configuration is invalid - make sure your DOCKER_TLS_VERIFY "
"and DOCKER_CERT_PATH are set correctly.\n"
"You might need to run `eval \"$(docker-machine env default)\"`")
if not context:
# check env for DOCKER_HOST and certs path
host = kwargs.get("base_url", None)
tls = kwargs.get("tls", None)
verify = False if not tls else tls.verify
if host:
context = Context("compose", host=host, tls=verify)
else:
context = ContextAPI.get_current_context()
if tls:
context.set_endpoint("docker", host=host, tls_cfg=tls, skip_tls_verify=not verify)
if not context.is_docker_host():
raise UserError(
"The platform targeted with the current context is not supported.\n"
"Make sure the context in use targets a Docker Engine.\n")
kwargs['base_url'] = context.Host
if context.TLSConfig:
kwargs['tls'] = context.TLSConfig
if version:
kwargs['version'] = version
timeout = environment.get('COMPOSE_HTTP_TIMEOUT')
if timeout:
kwargs['timeout'] = int(timeout)
else:
kwargs['timeout'] = HTTP_TIMEOUT
kwargs['user_agent'] = generate_user_agent()
# Workaround for
# https://pyinstaller.readthedocs.io/en/v3.3.1/runtime-information.html#ld-library-path-libpath-considerations
if 'LD_LIBRARY_PATH_ORIG' in environment:
kwargs['credstore_env'] = {
'LD_LIBRARY_PATH': environment.get('LD_LIBRARY_PATH_ORIG'),
}
use_paramiko_ssh = int(environment.get('COMPOSE_PARAMIKO_SSH', 0))
client = APIClient(use_ssh_client=not use_paramiko_ssh, **kwargs)
client._original_base_url = kwargs.get('base_url')
return client

View File

@@ -0,0 +1,62 @@
from inspect import getdoc
from docopt import docopt
from docopt import DocoptExit
def docopt_full_help(docstring, *args, **kwargs):
try:
return docopt(docstring, *args, **kwargs)
except DocoptExit:
raise SystemExit(docstring)
class DocoptDispatcher:
def __init__(self, command_class, options):
self.command_class = command_class
self.options = options
@classmethod
def get_command_and_options(cls, doc_entity, argv, options):
command_help = getdoc(doc_entity)
opt = docopt_full_help(command_help, argv, **options)
command = opt['COMMAND']
return command_help, opt, command
def parse(self, argv):
command_help, options, command = DocoptDispatcher.get_command_and_options(
self.command_class, argv, self.options)
if command is None:
raise SystemExit(command_help)
handler = get_handler(self.command_class, command)
docstring = getdoc(handler)
if docstring is None:
raise NoSuchCommand(command, self)
command_options = docopt_full_help(docstring, options['ARGS'], options_first=True)
return options, handler, command_options
def get_handler(command_class, command):
command = command.replace('-', '_')
# we certainly want to have "exec" command, since that's what docker client has
# but in python exec is a keyword
if command == "exec":
command = "exec_command"
if not hasattr(command_class, command):
raise NoSuchCommand(command, command_class)
return getattr(command_class, command)
class NoSuchCommand(Exception):
def __init__(self, command, supercommand):
super().__init__("No such command: %s" % command)
self.command = command
self.supercommand = supercommand

View File

@@ -0,0 +1,165 @@
import contextlib
import logging
import socket
from distutils.spawn import find_executable
from textwrap import dedent
from docker.errors import APIError
from requests.exceptions import ConnectionError as RequestsConnectionError
from requests.exceptions import ReadTimeout
from requests.exceptions import SSLError
from requests.packages.urllib3.exceptions import ReadTimeoutError
from ..const import API_VERSION_TO_ENGINE_VERSION
from .utils import binarystr_to_unicode
from .utils import is_docker_for_mac_installed
from .utils import is_mac
from .utils import is_ubuntu
from .utils import is_windows
log = logging.getLogger(__name__)
class UserError(Exception):
def __init__(self, msg):
self.msg = dedent(msg).strip()
def __str__(self):
return self.msg
class ConnectionError(Exception):
pass
@contextlib.contextmanager
def handle_connection_errors(client):
try:
yield
except SSLError as e:
log.error('SSL error: %s' % e)
raise ConnectionError()
except RequestsConnectionError as e:
if e.args and isinstance(e.args[0], ReadTimeoutError):
log_timeout_error(client.timeout)
raise ConnectionError()
exit_with_error(get_conn_error_message(client.base_url))
except APIError as e:
log_api_error(e, client.api_version)
raise ConnectionError()
except (ReadTimeout, socket.timeout):
log_timeout_error(client.timeout)
raise ConnectionError()
except Exception as e:
if is_windows():
import pywintypes
if isinstance(e, pywintypes.error):
log_windows_pipe_error(e)
raise ConnectionError()
raise
def log_windows_pipe_error(exc):
if exc.winerror == 2:
log.error("Couldn't connect to Docker daemon. You might need to start Docker for Windows.")
elif exc.winerror == 232: # https://github.com/docker/compose/issues/5005
log.error(
"The current Compose file version is not compatible with your engine version. "
"Please upgrade your Compose file to a more recent version, or set "
"a COMPOSE_API_VERSION in your environment."
)
else:
log.error(
"Windows named pipe error: {} (code: {})".format(
binarystr_to_unicode(exc.strerror), exc.winerror
)
)
def log_timeout_error(timeout):
log.error(
"An HTTP request took too long to complete. Retry with --verbose to "
"obtain debug information.\n"
"If you encounter this issue regularly because of slow network "
"conditions, consider setting COMPOSE_HTTP_TIMEOUT to a higher "
"value (current value: %s)." % timeout)
def log_api_error(e, client_version):
explanation = binarystr_to_unicode(e.explanation)
if 'client is newer than server' not in explanation:
log.error(explanation)
return
version = API_VERSION_TO_ENGINE_VERSION.get(client_version)
if not version:
# They've set a custom API version
log.error(explanation)
return
log.error(
"The Docker Engine version is less than the minimum required by "
"Compose. Your current project requires a Docker Engine of "
"version {version} or greater.".format(version=version)
)
def exit_with_error(msg):
log.error(dedent(msg).strip())
raise ConnectionError()
def get_conn_error_message(url):
try:
if find_executable('docker') is None:
return docker_not_found_msg("Couldn't connect to Docker daemon.")
if is_docker_for_mac_installed():
return conn_error_docker_for_mac
if find_executable('docker-machine') is not None:
return conn_error_docker_machine
except UnicodeDecodeError:
# https://github.com/docker/compose/issues/5442
# Ignore the error and print the generic message instead.
pass
return conn_error_generic.format(url=url)
def docker_not_found_msg(problem):
return "{} You might need to install Docker:\n\n{}".format(
problem, docker_install_url())
def docker_install_url():
if is_mac():
return docker_install_url_mac
elif is_ubuntu():
return docker_install_url_ubuntu
elif is_windows():
return docker_install_url_windows
else:
return docker_install_url_generic
docker_install_url_mac = "https://docs.docker.com/engine/installation/mac/"
docker_install_url_ubuntu = "https://docs.docker.com/engine/installation/ubuntulinux/"
docker_install_url_windows = "https://docs.docker.com/engine/installation/windows/"
docker_install_url_generic = "https://docs.docker.com/engine/installation/"
conn_error_docker_machine = """
Couldn't connect to Docker daemon - you might need to run `docker-machine start default`.
"""
conn_error_docker_for_mac = """
Couldn't connect to Docker daemon. You might need to start Docker for Mac.
"""
conn_error_generic = """
Couldn't connect to Docker daemon at {url} - is it running?
If it's at a non-standard location, specify the URL with the DOCKER_HOST environment variable.
"""

View File

@@ -0,0 +1,54 @@
import logging
from shutil import get_terminal_size
import texttable
from compose.cli import colors
def get_tty_width():
try:
# get_terminal_size can't determine the size if compose is piped
# to another command. But in such case it doesn't make sense to
# try format the output by terminal size as this output is consumed
# by another command. So let's pretend we have a huge terminal so
# output is single-lined
width, _ = get_terminal_size(fallback=(999, 0))
return int(width)
except OSError:
return 0
class Formatter:
"""Format tabular data for printing."""
@staticmethod
def table(headers, rows):
table = texttable.Texttable(max_width=get_tty_width())
table.set_cols_dtype(['t' for h in headers])
table.add_rows([headers] + rows)
table.set_deco(table.HEADER)
table.set_chars(['-', '|', '+', '-'])
return table.draw()
class ConsoleWarningFormatter(logging.Formatter):
"""A logging.Formatter which prints WARNING and ERROR messages with
a prefix of the log level colored appropriate for the log level.
"""
def get_level_message(self, record):
separator = ': '
if record.levelno >= logging.ERROR:
return colors.red(record.levelname) + separator
if record.levelno >= logging.WARNING:
return colors.yellow(record.levelname) + separator
return ''
def format(self, record):
if isinstance(record.msg, bytes):
record.msg = record.msg.decode('utf-8')
message = super().format(record)
return '{}{}'.format(self.get_level_message(record), message)

View File

@@ -0,0 +1,255 @@
import _thread as thread
import sys
from collections import namedtuple
from itertools import cycle
from operator import attrgetter
from queue import Empty
from queue import Queue
from threading import Thread
from docker.errors import APIError
from . import colors
from compose.cli.signals import ShutdownException
from compose.utils import split_buffer
class LogPresenter:
def __init__(self, prefix_width, color_func, keep_prefix=True):
self.prefix_width = prefix_width
self.color_func = color_func
self.keep_prefix = keep_prefix
def present(self, container, line):
to_log = '{line}'.format(line=line)
if self.keep_prefix:
prefix = container.name_without_project.ljust(self.prefix_width)
to_log = '{prefix} '.format(prefix=self.color_func(prefix + ' |')) + to_log
return to_log
def build_log_presenters(service_names, monochrome, keep_prefix=True):
"""Return an iterable of functions.
Each function can be used to format the logs output of a container.
"""
prefix_width = max_name_width(service_names)
def no_color(text):
return text
for color_func in cycle([no_color] if monochrome else colors.rainbow()):
yield LogPresenter(prefix_width, color_func, keep_prefix)
def max_name_width(service_names, max_index_width=3):
"""Calculate the maximum width of container names so we can make the log
prefixes line up like so:
db_1 | Listening
web_1 | Listening
"""
return max(len(name) for name in service_names) + max_index_width
class LogPrinter:
"""Print logs from many containers to a single output stream."""
def __init__(self,
containers,
presenters,
event_stream,
output=sys.stdout,
cascade_stop=False,
log_args=None):
self.containers = containers
self.presenters = presenters
self.event_stream = event_stream
self.output = output
self.cascade_stop = cascade_stop
self.log_args = log_args or {}
def run(self):
if not self.containers:
return
queue = Queue()
thread_args = queue, self.log_args
thread_map = build_thread_map(self.containers, self.presenters, thread_args)
start_producer_thread((
thread_map,
self.event_stream,
self.presenters,
thread_args))
for line in consume_queue(queue, self.cascade_stop):
remove_stopped_threads(thread_map)
if self.cascade_stop:
matching_container = [cont.name for cont in self.containers if cont.name == line]
if line in matching_container:
# Returning the name of the container that started the
# the cascade_stop so we can return the correct exit code
return line
if not line:
if not thread_map:
# There are no running containers left to tail, so exit
return
# We got an empty line because of a timeout, but there are still
# active containers to tail, so continue
continue
self.write(line)
def write(self, line):
try:
self.output.write(line)
except UnicodeEncodeError:
# This may happen if the user's locale settings don't support UTF-8
# and UTF-8 characters are present in the log line. The following
# will output a "degraded" log with unsupported characters
# replaced by `?`
self.output.write(line.encode('ascii', 'replace').decode())
self.output.flush()
def remove_stopped_threads(thread_map):
for container_id, tailer_thread in list(thread_map.items()):
if not tailer_thread.is_alive():
thread_map.pop(container_id, None)
def build_thread(container, presenter, queue, log_args):
tailer = Thread(
target=tail_container_logs,
args=(container, presenter, queue, log_args))
tailer.daemon = True
tailer.start()
return tailer
def build_thread_map(initial_containers, presenters, thread_args):
return {
container.id: build_thread(container, next(presenters), *thread_args)
# Container order is unspecified, so they are sorted by name in order to make
# container:presenter (log color) assignment deterministic when given a list of containers
# with the same names.
for container in sorted(initial_containers, key=attrgetter('name'))
}
class QueueItem(namedtuple('_QueueItem', 'item is_stop exc')):
@classmethod
def new(cls, item):
return cls(item, None, None)
@classmethod
def exception(cls, exc):
return cls(None, None, exc)
@classmethod
def stop(cls, item=None):
return cls(item, True, None)
def tail_container_logs(container, presenter, queue, log_args):
try:
for item in build_log_generator(container, log_args):
queue.put(QueueItem.new(presenter.present(container, item)))
except Exception as e:
queue.put(QueueItem.exception(e))
return
if log_args.get('follow'):
queue.put(QueueItem.new(presenter.color_func(wait_on_exit(container))))
queue.put(QueueItem.stop(container.name))
def build_log_generator(container, log_args):
# if the container doesn't have a log_stream we need to attach to container
# before log printer starts running
if container.log_stream is None:
stream = container.logs(stdout=True, stderr=True, stream=True, **log_args)
else:
stream = container.log_stream
return split_buffer(stream)
def wait_on_exit(container):
try:
exit_code = container.wait()
return "{} exited with code {}\n".format(container.name, exit_code)
except APIError as e:
return "Unexpected API error for {} (HTTP code {})\nResponse body:\n{}\n".format(
container.name, e.response.status_code,
e.response.text or '[empty]'
)
def start_producer_thread(thread_args):
producer = Thread(target=watch_events, args=thread_args)
producer.daemon = True
producer.start()
def watch_events(thread_map, event_stream, presenters, thread_args):
crashed_containers = set()
for event in event_stream:
if event['action'] == 'stop':
thread_map.pop(event['id'], None)
if event['action'] == 'die':
thread_map.pop(event['id'], None)
crashed_containers.add(event['id'])
if event['action'] != 'start':
continue
if event['id'] in thread_map:
if thread_map[event['id']].is_alive():
continue
# Container was stopped and started, we need a new thread
thread_map.pop(event['id'], None)
# Container crashed so we should reattach to it
if event['id'] in crashed_containers:
container = event['container']
if not container.is_restarting:
try:
container.attach_log_stream()
except APIError:
# Just ignore errors when reattaching to already crashed containers
pass
crashed_containers.remove(event['id'])
thread_map[event['id']] = build_thread(
event['container'],
next(presenters),
*thread_args
)
def consume_queue(queue, cascade_stop):
"""Consume the queue by reading lines off of it and yielding them."""
while True:
try:
item = queue.get(timeout=0.1)
except Empty:
yield None
continue
# See https://github.com/docker/compose/issues/189
except thread.error:
raise ShutdownException()
if item.exc:
raise item.exc
if item.is_stop and not cascade_stop:
continue
yield item.item

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,41 @@
import signal
from ..const import IS_WINDOWS_PLATFORM
class ShutdownException(Exception):
pass
class HangUpException(Exception):
pass
def shutdown(signal, frame):
raise ShutdownException()
def set_signal_handler(handler):
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGTERM, handler)
def set_signal_handler_to_shutdown():
set_signal_handler(shutdown)
def hang_up(signal, frame):
raise HangUpException()
def set_signal_handler_to_hang_up():
# on Windows a ValueError will be raised if trying to set signal handler for SIGHUP
if not IS_WINDOWS_PLATFORM:
signal.signal(signal.SIGHUP, hang_up)
def ignore_sigpipe():
# Restore default behavior for SIGPIPE instead of raising
# an exception when encountered.
if not IS_WINDOWS_PLATFORM:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)

View File

@@ -0,0 +1,144 @@
import math
import os
import platform
import ssl
import subprocess
import sys
import distro
import docker
import compose
from ..const import IS_WINDOWS_PLATFORM
def yesno(prompt, default=None):
"""
Prompt the user for a yes or no.
Can optionally specify a default value, which will only be
used if they enter a blank line.
Unrecognised input (anything other than "y", "n", "yes",
"no" or "") will return None.
"""
answer = input(prompt).strip().lower()
if answer == "y" or answer == "yes":
return True
elif answer == "n" or answer == "no":
return False
elif answer == "":
return default
else:
return None
def input(prompt):
"""
Version of input (raw_input in Python 2) which forces a flush of sys.stdout
to avoid problems where the prompt fails to appear due to line buffering
"""
sys.stdout.write(prompt)
sys.stdout.flush()
return sys.stdin.readline().rstrip('\n')
def call_silently(*args, **kwargs):
"""
Like subprocess.call(), but redirects stdout and stderr to /dev/null.
"""
with open(os.devnull, 'w') as shutup:
try:
return subprocess.call(*args, stdout=shutup, stderr=shutup, **kwargs)
except OSError:
# On Windows, subprocess.call() can still raise exceptions. Normalize
# to POSIXy behaviour by returning a nonzero exit code.
return 1
def is_mac():
return platform.system() == 'Darwin'
def is_ubuntu():
return platform.system() == 'Linux' and distro.linux_distribution()[0] == 'Ubuntu'
def is_windows():
return IS_WINDOWS_PLATFORM
def get_version_info(scope):
versioninfo = 'docker-compose version {}, build {}'.format(
compose.__version__,
get_build_version())
if scope == 'compose':
return versioninfo
if scope == 'full':
return (
"{}\n"
"docker-py version: {}\n"
"{} version: {}\n"
"OpenSSL version: {}"
).format(
versioninfo,
docker.version,
platform.python_implementation(),
platform.python_version(),
ssl.OPENSSL_VERSION)
raise ValueError("{} is not a valid version scope".format(scope))
def get_build_version():
filename = os.path.join(os.path.dirname(compose.__file__), 'GITSHA')
if not os.path.exists(filename):
return 'unknown'
with open(filename) as fh:
return fh.read().strip()
def is_docker_for_mac_installed():
return is_mac() and os.path.isdir('/Applications/Docker.app')
def generate_user_agent():
parts = [
"docker-compose/{}".format(compose.__version__),
"docker-py/{}".format(docker.__version__),
]
try:
p_system = platform.system()
p_release = platform.release()
except OSError:
pass
else:
parts.append("{}/{}".format(p_system, p_release))
return " ".join(parts)
def human_readable_file_size(size):
suffixes = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', ]
order = int(math.log(size, 1000)) if size else 0
if order >= len(suffixes):
order = len(suffixes) - 1
return '{:.4g} {}'.format(
size / pow(10, order * 3),
suffixes[order]
)
def binarystr_to_unicode(s):
if not isinstance(s, bytes):
return s
if IS_WINDOWS_PLATFORM:
try:
return s.decode('windows-1250')
except UnicodeDecodeError:
pass
return s.decode('utf-8', 'replace')

View File

@@ -0,0 +1,55 @@
import functools
import logging
import pprint
from itertools import chain
def format_call(args, kwargs):
args = (repr(a) for a in args)
kwargs = ("{!s}={!r}".format(*item) for item in kwargs.items())
return "({})".format(", ".join(chain(args, kwargs)))
def format_return(result, max_lines):
if isinstance(result, (list, tuple, set)):
return "({} with {} items)".format(type(result).__name__, len(result))
if result:
lines = pprint.pformat(result).split('\n')
extra = '\n...' if len(lines) > max_lines else ''
return '\n'.join(lines[:max_lines]) + extra
return result
class VerboseProxy:
"""Proxy all function calls to another class and log method name, arguments
and return values for each call.
"""
def __init__(self, obj_name, obj, log_name=None, max_lines=10):
self.obj_name = obj_name
self.obj = obj
self.max_lines = max_lines
self.log = logging.getLogger(log_name or __name__)
def __getattr__(self, name):
attr = getattr(self.obj, name)
if not callable(attr):
return attr
return functools.partial(self.proxy_callable, name)
def proxy_callable(self, call_name, *args, **kwargs):
self.log.info("%s %s <- %s",
self.obj_name,
call_name,
format_call(args, kwargs))
result = getattr(self.obj, call_name)(*args, **kwargs)
self.log.info("%s %s -> %s",
self.obj_name,
call_name,
format_return(result, self.max_lines))
return result