Remove fig.packages replace with real deps.

Signed-off-by: Daniel Nephin <dnephin@gmail.com>
This commit is contained in:
Daniel Nephin 2014-07-30 13:11:11 -07:00 committed by Ben Firshman
parent 8d3c9dccc5
commit 7fd37c89b9
30 changed files with 47 additions and 1775 deletions

View File

@ -3,9 +3,9 @@ python:
- '2.6'
- '2.7'
install:
- pip install .
- pip install -r requirements.txt
- pip install -r requirements-dev.txt
- pip install .
script:
- script/validate-dco
- flake8 fig

View File

@ -1,12 +1,17 @@
FROM ubuntu:14.04
RUN apt-get update -qq && apt-get install -qy python python-pip python-dev
ADD requirements.txt /code/
RUN apt-get update -qq && apt-get install -qy python python-pip python-dev git
RUN useradd -d /home/user -m -s /bin/bash user
WORKDIR /code/
ADD requirements.txt /code/
RUN pip install -r requirements.txt
ADD requirements-dev.txt /code/
RUN pip install -r requirements-dev.txt
ADD . /code/
RUN python setup.py develop
RUN useradd -d /home/user -m -s /bin/bash user
RUN python setup.py install
RUN chown -R user /code/
USER user

View File

@ -1,13 +1,13 @@
from __future__ import unicode_literals
from __future__ import absolute_import
from ..packages.docker import Client
from docker import Client
from requests.exceptions import ConnectionError
import errno
import logging
import os
import re
import yaml
from ..packages import six
import six
from ..project import Project
from ..service import ConfigError

View File

@ -16,7 +16,7 @@ from .formatter import Formatter
from .log_printer import LogPrinter
from .utils import yesno
from ..packages.docker.errors import APIError
from docker.errors import APIError
from .errors import UserError
from .docopt_command import NoSuchCommand

View File

@ -4,7 +4,7 @@ from itertools import chain
import logging
import pprint
from fig.packages import six
import six
def format_call(args, kwargs):

View File

@ -1,20 +0,0 @@
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .version import version
__version__ = version
__title__ = 'docker-py'
from .client import Client # flake8: noqa

View File

@ -1,7 +0,0 @@
from .auth import (
INDEX_URL,
encode_header,
load_config,
resolve_authconfig,
resolve_repository_name
) # flake8: noqa

View File

@ -1,167 +0,0 @@
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import fileinput
import json
import os
from fig.packages import six
from ..utils import utils
from .. import errors
INDEX_URL = 'https://index.docker.io/v1/'
DOCKER_CONFIG_FILENAME = '.dockercfg'
def swap_protocol(url):
if url.startswith('http://'):
return url.replace('http://', 'https://', 1)
if url.startswith('https://'):
return url.replace('https://', 'http://', 1)
return url
def expand_registry_url(hostname):
if hostname.startswith('http:') or hostname.startswith('https:'):
if '/' not in hostname[9:]:
hostname = hostname + '/v1/'
return hostname
if utils.ping('https://' + hostname + '/v1/_ping'):
return 'https://' + hostname + '/v1/'
return 'http://' + hostname + '/v1/'
def resolve_repository_name(repo_name):
if '://' in repo_name:
raise errors.InvalidRepository(
'Repository name cannot contain a scheme ({0})'.format(repo_name))
parts = repo_name.split('/', 1)
if '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost':
# This is a docker index repo (ex: foo/bar or ubuntu)
return INDEX_URL, repo_name
if len(parts) < 2:
raise errors.InvalidRepository(
'Invalid repository name ({0})'.format(repo_name))
if 'index.docker.io' in parts[0]:
raise errors.InvalidRepository(
'Invalid repository name, try "{0}" instead'.format(parts[1]))
return expand_registry_url(parts[0]), parts[1]
def resolve_authconfig(authconfig, registry=None):
"""Return the authentication data from the given auth configuration for a
specific registry. We'll do our best to infer the correct URL for the
registry, trying both http and https schemes. Returns an empty dictionnary
if no data exists."""
# Default to the public index server
registry = registry or INDEX_URL
# Ff its not the index server there are three cases:
#
# 1. this is a full config url -> it should be used as is
# 2. it could be a full url, but with the wrong protocol
# 3. it can be the hostname optionally with a port
#
# as there is only one auth entry which is fully qualified we need to start
# parsing and matching
if '/' not in registry:
registry = registry + '/v1/'
if not registry.startswith('http:') and not registry.startswith('https:'):
registry = 'https://' + registry
if registry in authconfig:
return authconfig[registry]
return authconfig.get(swap_protocol(registry), None)
def encode_auth(auth_info):
return base64.b64encode(auth_info.get('username', '') + b':' +
auth_info.get('password', ''))
def decode_auth(auth):
if isinstance(auth, six.string_types):
auth = auth.encode('ascii')
s = base64.b64decode(auth)
login, pwd = s.split(b':')
return login.decode('ascii'), pwd.decode('ascii')
def encode_header(auth):
auth_json = json.dumps(auth).encode('ascii')
return base64.b64encode(auth_json)
def encode_full_header(auth):
""" Returns the given auth block encoded for the X-Registry-Config header.
"""
return encode_header({'configs': auth})
def load_config(root=None):
"""Loads authentication data from a Docker configuration file in the given
root directory."""
conf = {}
data = None
config_file = os.path.join(root or os.environ.get('HOME', '.'),
DOCKER_CONFIG_FILENAME)
# First try as JSON
try:
with open(config_file) as f:
conf = {}
for registry, entry in six.iteritems(json.load(f)):
username, password = decode_auth(entry['auth'])
conf[registry] = {
'username': username,
'password': password,
'email': entry['email'],
'serveraddress': registry,
}
return conf
except:
pass
# If that fails, we assume the configuration file contains a single
# authentication token for the public registry in the following format:
#
# auth = AUTH_TOKEN
# email = email@domain.com
try:
data = []
for line in fileinput.input(config_file):
data.append(line.strip().split(' = ')[1])
if len(data) < 2:
# Not enough data
raise errors.InvalidConfigFile(
'Invalid or empty configuration file!')
username, password = decode_auth(data[0])
conf[INDEX_URL] = {
'username': username,
'password': password,
'email': data[1],
'serveraddress': INDEX_URL,
}
return conf
except:
pass
# If all fails, return an empty config
return {}

View File

@ -1,860 +0,0 @@
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import re
import shlex
import struct
import warnings
import requests
import requests.exceptions
from fig.packages import six
from .auth import auth
from .unixconn import unixconn
from .utils import utils
from . import errors
if not six.PY3:
import websocket
DEFAULT_DOCKER_API_VERSION = '1.12'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
class Client(requests.Session):
def __init__(self, base_url=None, version=DEFAULT_DOCKER_API_VERSION,
timeout=DEFAULT_TIMEOUT_SECONDS):
super(Client, self).__init__()
if base_url is None:
base_url = "http+unix://var/run/docker.sock"
if 'unix:///' in base_url:
base_url = base_url.replace('unix:/', 'unix:')
if base_url.startswith('unix:'):
base_url = "http+" + base_url
if base_url.startswith('tcp:'):
base_url = base_url.replace('tcp:', 'http:')
if base_url.endswith('/'):
base_url = base_url[:-1]
self.base_url = base_url
self._version = version
self._timeout = timeout
self._auth_configs = auth.load_config()
self.mount('http+unix://', unixconn.UnixAdapter(base_url, timeout))
def _set_request_timeout(self, kwargs):
"""Prepare the kwargs for an HTTP request by inserting the timeout
parameter, if not already present."""
kwargs.setdefault('timeout', self._timeout)
return kwargs
def _post(self, url, **kwargs):
return self.post(url, **self._set_request_timeout(kwargs))
def _get(self, url, **kwargs):
return self.get(url, **self._set_request_timeout(kwargs))
def _delete(self, url, **kwargs):
return self.delete(url, **self._set_request_timeout(kwargs))
def _url(self, path):
return '{0}/v{1}{2}'.format(self.base_url, self._version, path)
def _raise_for_status(self, response, explanation=None):
"""Raises stored :class:`APIError`, if one occurred."""
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise errors.APIError(e, response, explanation=explanation)
def _result(self, response, json=False, binary=False):
assert not (json and binary)
self._raise_for_status(response)
if json:
return response.json()
if binary:
return response.content
return response.text
def _container_config(self, image, command, hostname=None, user=None,
detach=False, stdin_open=False, tty=False,
mem_limit=0, ports=None, environment=None, dns=None,
volumes=None, volumes_from=None,
network_disabled=False, entrypoint=None,
cpu_shares=None, working_dir=None, domainname=None,
memswap_limit=0):
if isinstance(command, six.string_types):
command = shlex.split(str(command))
if isinstance(environment, dict):
environment = [
'{0}={1}'.format(k, v) for k, v in environment.items()
]
if isinstance(ports, list):
exposed_ports = {}
for port_definition in ports:
port = port_definition
proto = 'tcp'
if isinstance(port_definition, tuple):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
exposed_ports['{0}/{1}'.format(port, proto)] = {}
ports = exposed_ports
if isinstance(volumes, list):
volumes_dict = {}
for vol in volumes:
volumes_dict[vol] = {}
volumes = volumes_dict
if volumes_from:
if not isinstance(volumes_from, six.string_types):
volumes_from = ','.join(volumes_from)
else:
# Force None, an empty list or dict causes client.start to fail
volumes_from = None
attach_stdin = False
attach_stdout = False
attach_stderr = False
stdin_once = False
if not detach:
attach_stdout = True
attach_stderr = True
if stdin_open:
attach_stdin = True
stdin_once = True
if utils.compare_version('1.10', self._version) >= 0:
message = ('{0!r} parameter has no effect on create_container().'
' It has been moved to start()')
if dns is not None:
raise errors.DockerException(message.format('dns'))
if volumes_from is not None:
raise errors.DockerException(message.format('volumes_from'))
return {
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
'User': user,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
'Memory': mem_limit,
'AttachStdin': attach_stdin,
'AttachStdout': attach_stdout,
'AttachStderr': attach_stderr,
'Env': environment,
'Cmd': command,
'Dns': dns,
'Image': image,
'Volumes': volumes,
'VolumesFrom': volumes_from,
'NetworkDisabled': network_disabled,
'Entrypoint': entrypoint,
'CpuShares': cpu_shares,
'WorkingDir': working_dir,
'MemorySwap': memswap_limit
}
def _post_json(self, url, data, **kwargs):
# Go <1.1 can't unserialize null to a string
# so we do this disgusting thing here.
data2 = {}
if data is not None:
for k, v in six.iteritems(data):
if v is not None:
data2[k] = v
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['Content-Type'] = 'application/json'
return self._post(url, data=json.dumps(data2), **kwargs)
def _attach_params(self, override=None):
return override or {
'stdout': 1,
'stderr': 1,
'stream': 1
}
def _attach_websocket(self, container, params=None):
if six.PY3:
raise NotImplementedError("This method is not currently supported "
"under python 3")
url = self._url("/containers/{0}/attach/ws".format(container))
req = requests.Request("POST", url, params=self._attach_params(params))
full_url = req.prepare().url
full_url = full_url.replace("http://", "ws://", 1)
full_url = full_url.replace("https://", "wss://", 1)
return self._create_websocket_connection(full_url)
def _create_websocket_connection(self, url):
return websocket.create_connection(url)
def _get_raw_response_socket(self, response):
self._raise_for_status(response)
if six.PY3:
return response.raw._fp.fp.raw._sock
else:
return response.raw._fp.fp._sock
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
# Because Docker introduced newlines at the end of chunks in v0.9,
# and only on some API endpoints, we have to cater for both cases.
size_line = socket.readline()
if size_line == '\r\n':
size_line = socket.readline()
size = int(size_line, 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered
response."""
buf = self._result(response, binary=True)
walker = 0
while True:
if len(buf[walker:]) < 8:
break
_, length = struct.unpack_from('>BxxxL', buf[walker:])
start = walker + STREAM_HEADER_SIZE_BYTES
end = start + length
walker = end
yield buf[start:end]
def _multiplexed_socket_stream_helper(self, response):
"""A generator of multiplexed data blocks coming from a response
socket."""
socket = self._get_raw_response_socket(response)
def recvall(socket, size):
blocks = []
while size > 0:
block = socket.recv(size)
if not block:
return None
blocks.append(block)
size -= len(block)
sep = bytes() if six.PY3 else str()
data = sep.join(blocks)
return data
while True:
socket.settimeout(None)
header = recvall(socket, STREAM_HEADER_SIZE_BYTES)
if not header:
break
_, length = struct.unpack('>BxxxL', header)
if not length:
break
data = recvall(socket, length)
if not data:
break
yield data
def attach(self, container, stdout=True, stderr=True,
stream=False, logs=False):
if isinstance(container, dict):
container = container.get('Id')
params = {
'logs': logs and 1 or 0,
'stdout': stdout and 1 or 0,
'stderr': stderr and 1 or 0,
'stream': stream and 1 or 0,
}
u = self._url("/containers/{0}/attach".format(container))
response = self._post(u, params=params, stream=stream)
# Stream multi-plexing was only introduced in API v1.6. Anything before
# that needs old-style streaming.
if utils.compare_version('1.6', self._version) < 0:
def stream_result():
self._raise_for_status(response)
for line in response.iter_lines(chunk_size=1,
decode_unicode=True):
# filter out keep-alive new lines
if line:
yield line
return stream_result() if stream else \
self._result(response, binary=True)
sep = bytes() if six.PY3 else str()
return stream and self._multiplexed_socket_stream_helper(response) or \
sep.join([x for x in self._multiplexed_buffer_helper(response)])
def attach_socket(self, container, params=None, ws=False):
if params is None:
params = {
'stdout': 1,
'stderr': 1,
'stream': 1
}
if ws:
return self._attach_websocket(container, params)
if isinstance(container, dict):
container = container.get('Id')
u = self._url("/containers/{0}/attach".format(container))
return self._get_raw_response_socket(self.post(
u, None, params=self._attach_params(params), stream=True))
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, stream=False, timeout=None,
custom_context=False, encoding=None):
remote = context = headers = None
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
context = fileobj
elif fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(('http://', 'https://',
'git://', 'github.com/')):
remote = path
else:
context = utils.tar(path)
if utils.compare_version('1.8', self._version) >= 0:
stream = True
u = self._url('/build')
params = {
't': tag,
'remote': remote,
'q': quiet,
'nocache': nocache,
'rm': rm
}
if context is not None:
headers = {'Content-Type': 'application/tar'}
if encoding:
headers['Content-Encoding'] = encoding
if utils.compare_version('1.9', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
# Send the full auth configuration (if any exists), since the build
# could use any (or all) of the registries.
if self._auth_configs:
headers['X-Registry-Config'] = auth.encode_full_header(
self._auth_configs
)
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=stream,
timeout=timeout,
)
if context is not None:
context.close()
if stream:
return self._stream_helper(response)
else:
output = self._result(response)
srch = r'Successfully built ([0-9a-f]+)'
match = re.search(srch, output)
if not match:
return None, output
return match.group(1), output
def commit(self, container, repository=None, tag=None, message=None,
author=None, conf=None):
params = {
'container': container,
'repo': repository,
'tag': tag,
'comment': message,
'author': author
}
u = self._url("/commit")
return self._result(self._post_json(u, data=conf, params=params),
json=True)
def containers(self, quiet=False, all=False, trunc=True, latest=False,
since=None, before=None, limit=-1, size=False):
params = {
'limit': 1 if latest else limit,
'all': 1 if all else 0,
'size': 1 if size else 0,
'trunc_cmd': 1 if trunc else 0,
'since': since,
'before': before
}
u = self._url("/containers/json")
res = self._result(self._get(u, params=params), True)
if quiet:
return [{'Id': x['Id']} for x in res]
return res
def copy(self, container, resource):
if isinstance(container, dict):
container = container.get('Id')
res = self._post_json(
self._url("/containers/{0}/copy".format(container)),
data={"Resource": resource},
stream=True
)
self._raise_for_status(res)
return res.raw
def create_container(self, image, command=None, hostname=None, user=None,
detach=False, stdin_open=False, tty=False,
mem_limit=0, ports=None, environment=None, dns=None,
volumes=None, volumes_from=None,
network_disabled=False, name=None, entrypoint=None,
cpu_shares=None, working_dir=None, domainname=None,
memswap_limit=0):
config = self._container_config(
image, command, hostname, user, detach, stdin_open, tty, mem_limit,
ports, environment, dns, volumes, volumes_from, network_disabled,
entrypoint, cpu_shares, working_dir, domainname, memswap_limit
)
return self.create_container_from_config(config, name)
def create_container_from_config(self, config, name=None):
u = self._url("/containers/create")
params = {
'name': name
}
res = self._post_json(u, data=config, params=params)
return self._result(res, True)
def diff(self, container):
if isinstance(container, dict):
container = container.get('Id')
return self._result(self._get(self._url("/containers/{0}/changes".
format(container))), True)
def events(self):
return self._stream_helper(self.get(self._url('/events'), stream=True))
def export(self, container):
if isinstance(container, dict):
container = container.get('Id')
res = self._get(self._url("/containers/{0}/export".format(container)),
stream=True)
self._raise_for_status(res)
return res.raw
def get_image(self, image):
res = self._get(self._url("/images/{0}/get".format(image)),
stream=True)
self._raise_for_status(res)
return res.raw
def history(self, image):
res = self._get(self._url("/images/{0}/history".format(image)))
self._raise_for_status(res)
return self._result(res)
def images(self, name=None, quiet=False, all=False, viz=False):
if viz:
if utils.compare_version('1.7', self._version) >= 0:
raise Exception('Viz output is not supported in API >= 1.7!')
return self._result(self._get(self._url("images/viz")))
params = {
'filter': name,
'only_ids': 1 if quiet else 0,
'all': 1 if all else 0,
}
res = self._result(self._get(self._url("/images/json"), params=params),
True)
if quiet:
return [x['Id'] for x in res]
return res
def import_image(self, src=None, repository=None, tag=None, image=None):
u = self._url("/images/create")
params = {
'repo': repository,
'tag': tag
}
if src:
try:
# XXX: this is ways not optimal but the only way
# for now to import tarballs through the API
fic = open(src)
data = fic.read()
fic.close()
src = "-"
except IOError:
# file does not exists or not a file (URL)
data = None
if isinstance(src, six.string_types):
params['fromSrc'] = src
return self._result(self._post(u, data=data, params=params))
return self._result(self._post(u, data=src, params=params))
if image:
params['fromImage'] = image
return self._result(self._post(u, data=None, params=params))
raise Exception("Must specify a src or image")
def info(self):
return self._result(self._get(self._url("/info")),
True)
def insert(self, image, url, path):
if utils.compare_version('1.12', self._version) >= 0:
raise errors.DeprecatedMethod(
'insert is not available for API version >=1.12'
)
api_url = self._url("/images/" + image + "/insert")
params = {
'url': url,
'path': path
}
return self._result(self._post(api_url, params=params))
def inspect_container(self, container):
if isinstance(container, dict):
container = container.get('Id')
return self._result(
self._get(self._url("/containers/{0}/json".format(container))),
True)
def inspect_image(self, image_id):
return self._result(
self._get(self._url("/images/{0}/json".format(image_id))),
True
)
def kill(self, container, signal=None):
if isinstance(container, dict):
container = container.get('Id')
url = self._url("/containers/{0}/kill".format(container))
params = {}
if signal is not None:
params['signal'] = signal
res = self._post(url, params=params)
self._raise_for_status(res)
def load_image(self, data):
res = self._post(self._url("/images/load"), data=data)
self._raise_for_status(res)
def login(self, username, password=None, email=None, registry=None,
reauth=False):
# If we don't have any auth data so far, try reloading the config file
# one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
registry = registry or auth.INDEX_URL
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
if authcfg and authcfg.get('username', None) == username \
and not reauth:
return authcfg
req_data = {
'username': username,
'password': password,
'email': email,
'serveraddress': registry,
}
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
self._auth_configs[registry] = req_data
return self._result(response, json=True)
def logs(self, container, stdout=True, stderr=True, stream=False,
timestamps=False):
if isinstance(container, dict):
container = container.get('Id')
if utils.compare_version('1.11', self._version) >= 0:
params = {'stderr': stderr and 1 or 0,
'stdout': stdout and 1 or 0,
'timestamps': timestamps and 1 or 0,
'follow': stream and 1 or 0}
url = self._url("/containers/{0}/logs".format(container))
res = self._get(url, params=params, stream=stream)
if stream:
return self._multiplexed_socket_stream_helper(res)
elif six.PY3:
return bytes().join(
[x for x in self._multiplexed_buffer_helper(res)]
)
else:
return str().join(
[x for x in self._multiplexed_buffer_helper(res)]
)
return self.attach(
container,
stdout=stdout,
stderr=stderr,
stream=stream,
logs=True
)
def ping(self):
return self._result(self._get(self._url('/_ping')))
def port(self, container, private_port):
if isinstance(container, dict):
container = container.get('Id')
res = self._get(self._url("/containers/{0}/json".format(container)))
self._raise_for_status(res)
json_ = res.json()
s_port = str(private_port)
h_ports = None
h_ports = json_['NetworkSettings']['Ports'].get(s_port + '/udp')
if h_ports is None:
h_ports = json_['NetworkSettings']['Ports'].get(s_port + '/tcp')
return h_ports
def pull(self, repository, tag=None, stream=False):
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
if repo_name.count(":") == 1:
repository, tag = repository.rsplit(":", 1)
params = {
'tag': tag,
'fromImage': repository
}
headers = {}
if utils.compare_version('1.5', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# Do not fail here if no authentication exists for this specific
# registry as we can have a readonly pull. Just put the header if
# we can.
if authcfg:
headers['X-Registry-Auth'] = auth.encode_header(authcfg)
response = self._post(self._url('/images/create'), params=params,
headers=headers, stream=stream, timeout=None)
if stream:
return self._stream_helper(response)
else:
return self._result(response)
def push(self, repository, stream=False):
registry, repo_name = auth.resolve_repository_name(repository)
u = self._url("/images/{0}/push".format(repository))
headers = {}
if utils.compare_version('1.5', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# Do not fail here if no authentication exists for this specific
# registry as we can have a readonly pull. Just put the header if
# we can.
if authcfg:
headers['X-Registry-Auth'] = auth.encode_header(authcfg)
response = self._post_json(u, None, headers=headers, stream=stream)
else:
response = self._post_json(u, None, stream=stream)
return stream and self._stream_helper(response) \
or self._result(response)
def remove_container(self, container, v=False, link=False, force=False):
if isinstance(container, dict):
container = container.get('Id')
params = {'v': v, 'link': link, 'force': force}
res = self._delete(self._url("/containers/" + container),
params=params)
self._raise_for_status(res)
def remove_image(self, image, force=False, noprune=False):
params = {'force': force, 'noprune': noprune}
res = self._delete(self._url("/images/" + image), params=params)
self._raise_for_status(res)
def restart(self, container, timeout=10):
if isinstance(container, dict):
container = container.get('Id')
params = {'t': timeout}
url = self._url("/containers/{0}/restart".format(container))
res = self._post(url, params=params)
self._raise_for_status(res)
def search(self, term):
return self._result(self._get(self._url("/images/search"),
params={'term': term}),
True)
def start(self, container, binds=None, port_bindings=None, lxc_conf=None,
publish_all_ports=False, links=None, privileged=False,
dns=None, dns_search=None, volumes_from=None, network_mode=None):
if isinstance(container, dict):
container = container.get('Id')
if isinstance(lxc_conf, dict):
formatted = []
for k, v in six.iteritems(lxc_conf):
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
start_config = {
'LxcConf': lxc_conf
}
if binds:
start_config['Binds'] = utils.convert_volume_binds(binds)
if port_bindings:
start_config['PortBindings'] = utils.convert_port_bindings(
port_bindings
)
start_config['PublishAllPorts'] = publish_all_ports
if links:
if isinstance(links, dict):
links = six.iteritems(links)
formatted_links = [
'{0}:{1}'.format(k, v) for k, v in sorted(links)
]
start_config['Links'] = formatted_links
start_config['Privileged'] = privileged
if utils.compare_version('1.10', self._version) >= 0:
if dns is not None:
start_config['Dns'] = dns
if volumes_from is not None:
if isinstance(volumes_from, six.string_types):
volumes_from = volumes_from.split(',')
start_config['VolumesFrom'] = volumes_from
else:
warning_message = ('{0!r} parameter is discarded. It is only'
' available for API version greater or equal'
' than 1.10')
if dns is not None:
warnings.warn(warning_message.format('dns'),
DeprecationWarning)
if volumes_from is not None:
warnings.warn(warning_message.format('volumes_from'),
DeprecationWarning)
if dns_search:
start_config['DnsSearch'] = dns_search
if network_mode:
start_config['NetworkMode'] = network_mode
url = self._url("/containers/{0}/start".format(container))
res = self._post_json(url, data=start_config)
self._raise_for_status(res)
def resize(self, container, height, width):
if isinstance(container, dict):
container = container.get('Id')
params = {'h': height, 'w': width}
url = self._url("/containers/{0}/resize".format(container))
res = self._post(url, params=params)
self._raise_for_status(res)
def stop(self, container, timeout=10):
if isinstance(container, dict):
container = container.get('Id')
params = {'t': timeout}
url = self._url("/containers/{0}/stop".format(container))
res = self._post(url, params=params,
timeout=max(timeout, self._timeout))
self._raise_for_status(res)
def tag(self, image, repository, tag=None, force=False):
params = {
'tag': tag,
'repo': repository,
'force': 1 if force else 0
}
url = self._url("/images/{0}/tag".format(image))
res = self._post(url, params=params)
self._raise_for_status(res)
return res.status_code == 201
def top(self, container):
u = self._url("/containers/{0}/top".format(container))
return self._result(self._get(u), True)
def version(self):
return self._result(self._get(self._url("/version")), True)
def wait(self, container):
if isinstance(container, dict):
container = container.get('Id')
url = self._url("/containers/{0}/wait".format(container))
res = self._post(url, timeout=None)
self._raise_for_status(res)
json_ = res.json()
if 'StatusCode' in json_:
return json_['StatusCode']
return -1

View File

@ -1,65 +0,0 @@
# Copyright 2014 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
class APIError(requests.exceptions.HTTPError):
def __init__(self, message, response, explanation=None):
# requests 1.2 supports response as a keyword argument, but
# requests 1.1 doesn't
super(APIError, self).__init__(message)
self.response = response
self.explanation = explanation
if self.explanation is None and response.content:
self.explanation = response.content.strip()
def __str__(self):
message = super(APIError, self).__str__()
if self.is_client_error():
message = '%s Client Error: %s' % (
self.response.status_code, self.response.reason)
elif self.is_server_error():
message = '%s Server Error: %s' % (
self.response.status_code, self.response.reason)
if self.explanation:
message = '%s ("%s")' % (message, self.explanation)
return message
def is_client_error(self):
return 400 <= self.response.status_code < 500
def is_server_error(self):
return 500 <= self.response.status_code < 600
class DockerException(Exception):
pass
class InvalidRepository(DockerException):
pass
class InvalidConfigFile(DockerException):
pass
class DeprecatedMethod(DockerException):
pass

View File

@ -1 +0,0 @@
from .unixconn import UnixAdapter # flake8: noqa

View File

@ -1,71 +0,0 @@
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fig.packages import six
if six.PY3:
import http.client as httplib
else:
import httplib
import requests.adapters
import socket
try:
import requests.packages.urllib3.connectionpool as connectionpool
except ImportError:
import urllib3.connectionpool as connectionpool
class UnixHTTPConnection(httplib.HTTPConnection, object):
def __init__(self, base_url, unix_socket, timeout=60):
httplib.HTTPConnection.__init__(self, 'localhost', timeout=timeout)
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect(self.base_url.replace("http+unix:/", ""))
self.sock = sock
def _extract_path(self, url):
# remove the base_url entirely..
return url.replace(self.base_url, "")
def request(self, method, url, **kwargs):
url = self._extract_path(self.unix_socket)
super(UnixHTTPConnection, self).request(method, url, **kwargs)
class UnixHTTPConnectionPool(connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60):
connectionpool.HTTPConnectionPool.__init__(self, 'localhost',
timeout=timeout)
self.base_url = base_url
self.socket_path = socket_path
self.timeout = timeout
def _new_conn(self):
return UnixHTTPConnection(self.base_url, self.socket_path,
self.timeout)
class UnixAdapter(requests.adapters.HTTPAdapter):
def __init__(self, base_url, timeout=60):
self.base_url = base_url
self.timeout = timeout
super(UnixAdapter, self).__init__()
def get_connection(self, socket_path, proxies=None):
return UnixHTTPConnectionPool(self.base_url, socket_path, self.timeout)

View File

@ -1,4 +0,0 @@
from .utils import (
compare_version, convert_port_bindings, convert_volume_binds,
mkbuildcontext, ping, tar, parse_repository_tag
) # flake8: noqa

View File

@ -1,147 +0,0 @@
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import tarfile
import tempfile
from distutils.version import StrictVersion
import requests
from fig.packages import six
def mkbuildcontext(dockerfile):
f = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile')
if six.PY3:
raise TypeError('Please use io.BytesIO to create in-memory '
'Dockerfiles with Python 3')
else:
dfinfo.size = len(dockerfile.getvalue())
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
else:
dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile')
t.addfile(dfinfo, dockerfile)
t.close()
f.seek(0)
return f
def tar(path):
f = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w', fileobj=f)
t.add(path, arcname='.')
t.close()
f.seek(0)
return f
def compare_version(v1, v2):
"""Compare docker versions
>>> v1 = '1.9'
>>> v2 = '1.10'
>>> compare_version(v1, v2)
1
>>> compare_version(v2, v1)
-1
>>> compare_version(v2, v2)
0
"""
s1 = StrictVersion(v1)
s2 = StrictVersion(v2)
if s1 == s2:
return 0
elif s1 > s2:
return -1
else:
return 1
def ping(url):
try:
res = requests.get(url)
except Exception:
return False
else:
return res.status_code < 400
def _convert_port_binding(binding):
result = {'HostIp': '', 'HostPort': ''}
if isinstance(binding, tuple):
if len(binding) == 2:
result['HostPort'] = binding[1]
result['HostIp'] = binding[0]
elif isinstance(binding[0], six.string_types):
result['HostIp'] = binding[0]
else:
result['HostPort'] = binding[0]
elif isinstance(binding, dict):
if 'HostPort' in binding:
result['HostPort'] = binding['HostPort']
if 'HostIp' in binding:
result['HostIp'] = binding['HostIp']
else:
raise ValueError(binding)
else:
result['HostPort'] = binding
if result['HostPort'] is None:
result['HostPort'] = ''
else:
result['HostPort'] = str(result['HostPort'])
return result
def convert_port_bindings(port_bindings):
result = {}
for k, v in six.iteritems(port_bindings):
key = str(k)
if '/' not in key:
key = key + '/tcp'
if isinstance(v, list):
result[key] = [_convert_port_binding(binding) for binding in v]
else:
result[key] = [_convert_port_binding(v)]
return result
def convert_volume_binds(binds):
result = []
for k, v in binds.items():
if isinstance(v, dict):
result.append('%s:%s:%s' % (
k, v['bind'], 'ro' if v.get('ro', False) else 'rw'
))
else:
result.append('%s:%s:rw' % (k, v))
return result
def parse_repository_tag(repo):
column_index = repo.rfind(':')
if column_index < 0:
return repo, None
tag = repo[column_index+1:]
slash_index = tag.find('/')
if slash_index < 0:
return repo[:column_index], tag
return repo, None

View File

@ -1 +0,0 @@
version = "0.3.2"

View File

@ -1,404 +0,0 @@
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2013 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.3.0"
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d, **kw):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)(**kw))
def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)(**kw))
def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)(**kw))
def iterlists(d, **kw):
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
return iter(getattr(d, _iterlists)(**kw))
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})

View File

@ -4,7 +4,7 @@ import logging
from .service import Service
from .container import Container
from .packages.docker.errors import APIError
from docker.errors import APIError
log = logging.getLogger(__name__)

View File

@ -1,7 +1,7 @@
from __future__ import unicode_literals
from __future__ import absolute_import
from collections import namedtuple
from .packages.docker.errors import APIError
from docker.errors import APIError
import logging
import re
import os

View File

@ -1,5 +1,5 @@
mock==1.0.1
nose==1.3.0
pyinstaller==2.1
mock >= 1.0.1
nose
git+https://github.com/pyinstaller/pyinstaller.git@12e40471c77f588ea5be352f7219c873ddaae056#egg=pyinstaller
unittest2
flake8

View File

@ -1,6 +1,8 @@
docopt==0.6.1
PyYAML==3.10
docker-py==0.4.0
dockerpty==0.2.3
docopt==0.6.1
requests==2.2.1
six==1.7.3
texttable==0.8.1
websocket-client==0.11.0
dockerpty==0.2.3

View File

@ -2,7 +2,8 @@
set -ex
rm -rf venv
virtualenv venv
venv/bin/pip install pyinstaller==2.1
venv/bin/pip install -r requirements.txt
venv/bin/pip install -r requirements-dev.txt
venv/bin/pip install .
venv/bin/pyinstaller -F bin/fig
dist/fig --version

View File

@ -3,9 +3,10 @@
from __future__ import unicode_literals
from __future__ import absolute_import
from setuptools import setup, find_packages
import re
import os
import codecs
import os
import re
import sys
def read(*parts):
@ -30,11 +31,21 @@ install_requires = [
'texttable >= 0.8.1, < 0.9',
'websocket-client >= 0.11.0, < 0.12',
'dockerpty >= 0.2.3, < 0.3',
'docker-py >= 0.3.2, < 0.5',
'six >= 1.3.0, < 2',
]
tests_require = [
'mock >= 1.0.1',
'nose',
'pyinstaller',
'flake8',
]
with open('requirements-dev.txt') as f:
tests_require = f.read().splitlines()
if sys.version_info < (2, 7):
tests_require.append('unittest2')
setup(
name='fig',

View File

@ -1,7 +1,7 @@
from __future__ import absolute_import
import sys
from fig.packages.six import StringIO
from six import StringIO
from mock import patch
from .testcases import DockerClientTestCase

View File

@ -5,7 +5,7 @@ import os
from fig import Service
from fig.service import CannotBeScaledError
from fig.container import Container
from fig.packages.docker.errors import APIError
from docker.errors import APIError
from .testcases import DockerClientTestCase

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from .. import unittest

View File

@ -8,7 +8,7 @@ import mock
from fig.cli import main
from fig.cli.main import TopLevelCommand
from fig.packages.six import StringIO
from six import StringIO
class CLITestCase(unittest.TestCase):

View File

@ -2,7 +2,7 @@ from __future__ import unicode_literals
from .. import unittest
import mock
from fig.packages import docker
import docker
from fig.container import Container

View File

@ -5,7 +5,7 @@ import os
from .. import unittest
import mock
from fig.packages import docker
import docker
from fig import Service
from fig.service import (

View File

@ -2,6 +2,7 @@
envlist = py26,py27,py32,py33,pypy
[testenv]
usedevelop=True
deps =
-rrequirements.txt
-rrequirements-dev.txt
@ -12,4 +13,3 @@ commands =
[flake8]
# ignore line-length for now
ignore = E501,E203
exclude = fig/packages/