dockercloud
This commit is contained in:
parent
86f4a232ff
commit
2d9586a8a1
41 changed files with 2701 additions and 2 deletions
49
dockercloud/__init__.py
Normal file
49
dockercloud/__init__.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import base64
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
from future.standard_library import install_aliases
|
||||
|
||||
install_aliases()
|
||||
|
||||
from dockercloud.api import auth
|
||||
from dockercloud.api.service import Service
|
||||
from dockercloud.api.container import Container
|
||||
from dockercloud.api.repository import Repository
|
||||
from dockercloud.api.node import Node
|
||||
from dockercloud.api.action import Action
|
||||
from dockercloud.api.nodecluster import NodeCluster
|
||||
from dockercloud.api.nodetype import NodeType
|
||||
from dockercloud.api.nodeprovider import Provider
|
||||
from dockercloud.api.noderegion import Region
|
||||
from dockercloud.api.tag import Tag
|
||||
from dockercloud.api.trigger import Trigger
|
||||
from dockercloud.api.stack import Stack
|
||||
from dockercloud.api.exceptions import ApiError, AuthError, ObjectNotFound, NonUniqueIdentifier
|
||||
from dockercloud.api.utils import Utils
|
||||
from dockercloud.api.events import Events
|
||||
from dockercloud.api.nodeaz import AZ
|
||||
|
||||
__version__ = '1.0.0'
|
||||
|
||||
dockercloud_auth = os.environ.get('DOCKERCLOUD_AUTH')
|
||||
basic_auth = auth.load_from_file("~/.docker/config.json")
|
||||
|
||||
if os.environ.get('DOCKERCLOUD_USER') and os.environ.get('DOCKERCLOUD_PASS'):
|
||||
basic_auth = base64.b64encode("%s:%s" % (os.environ.get('DOCKERCLOUD_USER'), os.environ.get('DOCKERCLOUD_PASS')))
|
||||
if os.environ.get('DOCKERCLOUD_USER') and os.environ.get('DOCKERCLOUD_APIKEY'):
|
||||
basic_auth = base64.b64encode("%s:%s" % (os.environ.get('DOCKERCLOUD_USER'), os.environ.get('DOCKERCLOUD_APIKEY')))
|
||||
|
||||
rest_host = os.environ.get("DOCKERCLOUD_REST_HOST") or 'https://cloud.docker.com/'
|
||||
stream_host = os.environ.get("DOCKERCLOUD_STREAM_HOST") or 'wss://ws.cloud.docker.com/'
|
||||
|
||||
user_agent = None
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger("python-dockercloud")
|
||||
|
||||
try:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
except:
|
||||
pass
|
||||
0
dockercloud/api/__init__.py
Normal file
0
dockercloud/api/__init__.py
Normal file
23
dockercloud/api/action.py
Normal file
23
dockercloud/api/action.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Immutable, StreamingLog
|
||||
|
||||
|
||||
class Action(Immutable):
|
||||
subsystem = 'audit'
|
||||
endpoint = "/action"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'uuid'
|
||||
|
||||
def logs(self, tail, follow, log_handler=StreamingLog.default_log_handler):
|
||||
logs = StreamingLog(self.subsystem, self.endpoint, self.pk, tail, follow)
|
||||
logs.on_message(log_handler)
|
||||
logs.run_forever()
|
||||
|
||||
def cancel(self):
|
||||
return self._perform_action("cancel")
|
||||
|
||||
def retry(self):
|
||||
return self._perform_action("retry")
|
||||
67
dockercloud/api/auth.py
Normal file
67
dockercloud/api/auth.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
import dockercloud
|
||||
from .http import send_request
|
||||
|
||||
|
||||
def authenticate(username, password):
|
||||
verify_credential(username, password)
|
||||
dockercloud.basic_auth = base64.b64encode("%s:%s" % (username, password))
|
||||
|
||||
|
||||
def verify_credential(username, password):
|
||||
auth = HTTPBasicAuth(username, password)
|
||||
send_request("GET", "/auth", auth=auth)
|
||||
|
||||
|
||||
def is_authenticated():
|
||||
try:
|
||||
dockercloud.basic_auth = base64.b64encode("%s:%s" % (dockercloud.user, dockercloud.password))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
dockercloud.basic_auth = base64.b64encode("%s:%s" % (dockercloud.user, dockercloud.apikey))
|
||||
except:
|
||||
pass
|
||||
|
||||
return dockercloud.dockercloud_auth or dockercloud.basic_auth
|
||||
|
||||
|
||||
def logout():
|
||||
dockercloud.dockercloud_auth = None
|
||||
dockercloud.basic_auth = None
|
||||
|
||||
|
||||
def load_from_file(f="~/.docker/config.json"):
|
||||
try:
|
||||
with open(os.path.expanduser(f)) as config_file:
|
||||
data = json.load(config_file)
|
||||
|
||||
return data.get("auths", {}).get("https://index.docker.io/v1/", {}).get("auth", None)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_auth_header():
|
||||
try:
|
||||
dockercloud.basic_auth = base64.b64encode("%s:%s" % (dockercloud.user, dockercloud.password))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
dockercloud.basic_auth = base64.b64encode("%s:%s" % (dockercloud.user, dockercloud.apikey))
|
||||
except:
|
||||
pass
|
||||
|
||||
if dockercloud.dockercloud_auth:
|
||||
return {'Authorization': dockercloud.dockercloud_auth}
|
||||
if dockercloud.basic_auth:
|
||||
return {'Authorization': 'Basic %s' % dockercloud.basic_auth}
|
||||
return {}
|
||||
351
dockercloud/api/base.py
Normal file
351
dockercloud/api/base.py
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
from __future__ import absolute_import, print_function
|
||||
|
||||
import json as json_parser
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
import websocket
|
||||
|
||||
import dockercloud
|
||||
from .exceptions import ApiError, AuthError
|
||||
from .http import send_request
|
||||
|
||||
logger = logging.getLogger("python-dockercloud")
|
||||
|
||||
|
||||
class BasicObject(object):
|
||||
_api_version = 'v1'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class Restful(BasicObject):
|
||||
_detail_uri = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Simply reflect all the values in kwargs"""
|
||||
for k, v in list(kwargs.items()):
|
||||
setattr(self, k, v)
|
||||
|
||||
def __addchanges__(self, name):
|
||||
changed_attrs = self.__getchanges__()
|
||||
if not name in changed_attrs:
|
||||
changed_attrs.append(name)
|
||||
self.__setchanges__(changed_attrs)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""Keeps track of what attributes have been set"""
|
||||
current_value = getattr(self, name, None)
|
||||
if value != current_value:
|
||||
self.__addchanges__(name)
|
||||
super(Restful, self).__setattr__(name, value)
|
||||
|
||||
def __getchanges__(self):
|
||||
"""Internal. Convenience method to get the changed attrs list"""
|
||||
return getattr(self, '__changedattrs__', [])
|
||||
|
||||
def __setchanges__(self, val):
|
||||
"""Internal. Convenience method to set the changed attrs list"""
|
||||
# Use the super implementation to prevent infinite recursion
|
||||
super(Restful, self).__setattr__('__changedattrs__', val)
|
||||
|
||||
def _loaddict(self, dict):
|
||||
"""Internal. Sets the model attributes to the dictionary values passed"""
|
||||
endpoint = getattr(self, 'endpoint', None)
|
||||
subsystem = getattr(self, 'subsystem', None)
|
||||
assert endpoint, "Endpoint not specified for %s" % self.__class__.__name__
|
||||
assert subsystem, "Subsystem not specified for %s" % self.__class__.__name__
|
||||
for k, v in list(dict.items()):
|
||||
setattr(self, k, v)
|
||||
self._detail_uri = "/".join(["api", subsystem, self._api_version, endpoint.strip("/"), self.pk])
|
||||
self.__setchanges__([])
|
||||
|
||||
@property
|
||||
def pk(self):
|
||||
"""Returns the primary key for the object.
|
||||
|
||||
:returns: str -- the primary key for the object
|
||||
"""
|
||||
return getattr(self, self._pk_key(), None)
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
"""Internal. Returns the attribute name that acts as primary key of the model. Can be overridden by subclasses.
|
||||
|
||||
:returns: str -- the name of the primary key attribute for the model
|
||||
"""
|
||||
return 'uuid'
|
||||
|
||||
@property
|
||||
def is_dirty(self):
|
||||
"""Returns whether or not the object has unsaved changes
|
||||
|
||||
:returns: bool -- whether or not the object has unsaved changes
|
||||
"""
|
||||
return len(self.__getchanges__()) > 0
|
||||
|
||||
def _perform_action(self, action, params=None, data={}):
|
||||
"""Internal. Performs the specified action on the object remotely"""
|
||||
success = False
|
||||
if not self._detail_uri:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
path = "/".join([self._detail_uri.rstrip("/"), action.lstrip("/")])
|
||||
json = send_request("POST", path, params=params, data=data)
|
||||
if json:
|
||||
self._loaddict(json)
|
||||
success = True
|
||||
return success
|
||||
|
||||
def _expand_attribute(self, attribute):
|
||||
"""Internal. Expands the given attribute from remote information"""
|
||||
if not self._detail_uri:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
path = "/".join([self._detail_uri, attribute])
|
||||
json = send_request("GET", path)
|
||||
if json:
|
||||
return json[attribute]
|
||||
return None
|
||||
|
||||
def get_all_attributes(self):
|
||||
"""Returns a dict with all object attributes
|
||||
|
||||
:returns: dict -- all object attributes as a dict
|
||||
"""
|
||||
attributes = {}
|
||||
for attr in [attr for attr in vars(self) if not attr.startswith('_')]:
|
||||
attributes[attr] = getattr(self, attr, None)
|
||||
return attributes
|
||||
|
||||
|
||||
class Immutable(Restful):
|
||||
@classmethod
|
||||
def fetch(cls, pk):
|
||||
instance = None
|
||||
endpoint = getattr(cls, 'endpoint', None)
|
||||
subsystem = getattr(cls, 'subsystem', None)
|
||||
assert endpoint, "Endpoint not specified for %s" % cls.__name__
|
||||
assert subsystem, "Subsystem not specified for %s" % cls.__name__
|
||||
detail_uri = "/".join(["api", subsystem, cls._api_version, endpoint.strip("/"), pk])
|
||||
json = send_request('GET', detail_uri)
|
||||
if json:
|
||||
instance = cls()
|
||||
instance._loaddict(json)
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def list(cls, limit=None, **kwargs):
|
||||
restful = []
|
||||
endpoint = getattr(cls, 'endpoint', None)
|
||||
subsystem = getattr(cls, 'subsystem', None)
|
||||
assert endpoint, "Endpoint not specified for %s" % cls.__name__
|
||||
assert subsystem, "Subsystem not specified for %s" % cls.__name__
|
||||
|
||||
detail_uri = "/".join(["api", subsystem, cls._api_version, endpoint.strip("/")])
|
||||
objects = []
|
||||
while True:
|
||||
if limit and len(objects) >= limit:
|
||||
break
|
||||
json = send_request('GET', detail_uri, params=kwargs)
|
||||
objs = json.get('objects', [])
|
||||
meta = json.get('meta', {})
|
||||
next_url = meta.get('next', '')
|
||||
offset = meta.get('offset', 0)
|
||||
api_limit = meta.get('limit', 0)
|
||||
objects.extend(objs)
|
||||
if next_url:
|
||||
kwargs['offset'] = offset + api_limit
|
||||
kwargs['limit'] = api_limit
|
||||
else:
|
||||
break
|
||||
if limit:
|
||||
objects = objects[:limit]
|
||||
for obj in objects:
|
||||
instance = cls()
|
||||
instance._loaddict(obj)
|
||||
restful.append(instance)
|
||||
return restful
|
||||
|
||||
def refresh(self, force=False):
|
||||
success = False
|
||||
if self.is_dirty and not force:
|
||||
# We have local non-committed changes - rejecting the refresh
|
||||
success = False
|
||||
elif not self._detail_uri:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
else:
|
||||
json = send_request("GET", self._detail_uri)
|
||||
if json:
|
||||
self._loaddict(json)
|
||||
success = True
|
||||
return success
|
||||
|
||||
|
||||
class Mutable(Immutable):
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
"""Returns a new instance of the model (without saving it) with the attributes specified in ``kwargs``
|
||||
|
||||
:returns: RESTModel -- a new local instance of the model
|
||||
"""
|
||||
return cls(**kwargs)
|
||||
|
||||
def delete(self):
|
||||
if not self._detail_uri:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
action = "DELETE"
|
||||
url = self._detail_uri
|
||||
json = send_request(action, url)
|
||||
if json:
|
||||
self._loaddict(json)
|
||||
else:
|
||||
# Object deleted successfully and nothing came back - deleting PK reference.
|
||||
self._detail_uri = None
|
||||
# setattr(self, self._pk_key(), None) -- doesn't work
|
||||
self.__setchanges__([])
|
||||
return True
|
||||
|
||||
def save(self):
|
||||
success = False
|
||||
if not self.is_dirty:
|
||||
# No changes
|
||||
success = True
|
||||
else:
|
||||
cls = self.__class__
|
||||
endpoint = getattr(cls, 'endpoint', None)
|
||||
subsystem = getattr(cls, 'subsystem', None)
|
||||
assert endpoint, "Endpoint not specified for %s" % self.__class__.__name__
|
||||
assert subsystem, "Subsystem not specified for %s" % self.__class__.__name__
|
||||
# Figure out whether we should do a create or update
|
||||
if not self._detail_uri:
|
||||
action = "POST"
|
||||
path = "/".join(["api", subsystem, self._api_version, endpoint.lstrip("/")])
|
||||
else:
|
||||
action = "PATCH"
|
||||
path = self._detail_uri
|
||||
# Construct the necessary params
|
||||
params = {}
|
||||
for attr in self.__getchanges__():
|
||||
value = getattr(self, attr, None)
|
||||
params[attr] = value
|
||||
# Construct the json body
|
||||
payload = None
|
||||
if params:
|
||||
payload = json_parser.dumps(params)
|
||||
if not payload:
|
||||
payload = json_parser.dumps({})
|
||||
# Make the request
|
||||
success = False
|
||||
json = send_request(action, path, data=payload)
|
||||
if json:
|
||||
self._loaddict(json)
|
||||
success = True
|
||||
return success
|
||||
|
||||
|
||||
class Taggable(BasicObject):
|
||||
pass
|
||||
|
||||
|
||||
class Triggerable(BasicObject):
|
||||
pass
|
||||
|
||||
|
||||
class StreamingAPI(BasicObject):
|
||||
def __init__(self, url):
|
||||
self._ws_init(url)
|
||||
|
||||
def _ws_init(self, url):
|
||||
self.url = url
|
||||
|
||||
user_agent = 'python-dockercloud/%s' % dockercloud.__version__
|
||||
if dockercloud.user_agent:
|
||||
user_agent = "%s %s" % (dockercloud.user_agent, user_agent)
|
||||
header = {'User-Agent': user_agent}
|
||||
header.update(dockercloud.auth.get_auth_header())
|
||||
self.header = [": ".join([key, value]) for key, value in header.items()]
|
||||
logger.info("websocket: %s %s" % (self.url, self.header))
|
||||
self.open_handler = None
|
||||
self.message_handler = None
|
||||
self.error_handler = None
|
||||
self.close_handler = None
|
||||
self.auth_error = False
|
||||
|
||||
def _on_open(self, ws):
|
||||
if self.open_handler:
|
||||
self.open_handler()
|
||||
|
||||
def _on_message(self, ws, message):
|
||||
if self.message_handler:
|
||||
self.message_handler(message)
|
||||
|
||||
def _on_error(self, ws, error):
|
||||
if self.error_handler:
|
||||
self.error_handler(error)
|
||||
|
||||
def _on_close(self, ws):
|
||||
if self.close_handler:
|
||||
self.close_handler()
|
||||
|
||||
def on_open(self, handler):
|
||||
self.open_handler = handler
|
||||
|
||||
def on_message(self, handler):
|
||||
self.message_handler = handler
|
||||
|
||||
def on_error(self, handler):
|
||||
self.error_handler = handler
|
||||
|
||||
def on_close(self, handler):
|
||||
self.close_handler = handler
|
||||
|
||||
def run_forever(self, *args, **kwargs):
|
||||
while True:
|
||||
if getattr(self, "auth_error", False):
|
||||
raise AuthError("Not authorized")
|
||||
ws = websocket.WebSocketApp(self.url, header=self.header,
|
||||
on_open=self._on_open,
|
||||
on_message=self._on_message,
|
||||
on_error=self._on_error,
|
||||
on_close=self._on_close)
|
||||
ws.run_forever(ping_interval=5, ping_timeout=5, *args, **kwargs)
|
||||
|
||||
|
||||
class StreamingLog(StreamingAPI):
|
||||
def __init__(self, subsystem, resource, uuid, tail, follow):
|
||||
endpoint = "%s/%s/logs/?follow=%s" % (resource, uuid, str(follow).lower())
|
||||
if tail:
|
||||
endpoint = "%s&tail=%d" % (endpoint, tail)
|
||||
url = "/".join([dockercloud.stream_host.rstrip("/"), "api", subsystem, self._api_version, endpoint.lstrip("/")])
|
||||
super(self.__class__, self).__init__(url)
|
||||
|
||||
@staticmethod
|
||||
def default_log_handler(message):
|
||||
print(message)
|
||||
|
||||
def run_forever(self, *args, **kwargs):
|
||||
ws = websocket.WebSocketApp(self.url, header=self.header,
|
||||
on_open=self._on_open,
|
||||
on_message=self._on_message,
|
||||
on_error=self._on_error,
|
||||
on_close=self._on_close)
|
||||
ws.run_forever(ping_interval=5, ping_timeout=5, *args, **kwargs)
|
||||
|
||||
|
||||
class Exec(StreamingAPI):
|
||||
def __init__(self, uuid, cmd='sh'):
|
||||
endpoint = "container/%s/exec/?command=%s" % (uuid, urllib.quote_plus(cmd))
|
||||
url = "/".join([dockercloud.stream_host.rstrip("/"), "api", "app", self._api_version, endpoint.lstrip("/")])
|
||||
super(self.__class__, self).__init__(url)
|
||||
|
||||
@staticmethod
|
||||
def default_message_handler(message):
|
||||
print(message)
|
||||
|
||||
def run_forever(self, *args, **kwargs):
|
||||
ws = websocket.WebSocketApp(self.url, header=self.header,
|
||||
on_open=self._on_open,
|
||||
on_message=self._on_message,
|
||||
on_error=self._on_error,
|
||||
on_close=self._on_close)
|
||||
ws.run_forever(ping_interval=5, ping_timeout=5, *args, **kwargs)
|
||||
33
dockercloud/api/container.py
Normal file
33
dockercloud/api/container.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable, StreamingLog, Exec
|
||||
|
||||
|
||||
class Container(Mutable):
|
||||
subsystem = "app"
|
||||
endpoint = "/container"
|
||||
|
||||
def save(self):
|
||||
raise AttributeError("'save' is not supported in 'Container' object. "
|
||||
"Please use the related 'Service' object instead.")
|
||||
|
||||
def start(self):
|
||||
return self._perform_action("start")
|
||||
|
||||
def stop(self):
|
||||
return self._perform_action("stop")
|
||||
|
||||
def redeploy(self, reuse_volumes=True):
|
||||
params = {'reuse_volumes': reuse_volumes}
|
||||
return self._perform_action("redeploy", params=params)
|
||||
|
||||
def logs(self, tail, follow, log_handler=StreamingLog.default_log_handler):
|
||||
logs = StreamingLog(self.subsystem, self.endpoint, self.pk, tail, follow)
|
||||
logs.on_message(log_handler)
|
||||
logs.run_forever()
|
||||
|
||||
def execute(self, cmd, handler=Exec.default_message_handler):
|
||||
if hasattr(self, "uuid"):
|
||||
exec_obj = Exec(self.uuid, cmd)
|
||||
exec_obj.on_message(handler)
|
||||
exec_obj.run_forever()
|
||||
42
dockercloud/api/events.py
Normal file
42
dockercloud/api/events.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
import websocket
|
||||
|
||||
import dockercloud
|
||||
from .base import StreamingAPI
|
||||
from .exceptions import AuthError
|
||||
|
||||
|
||||
class Events(StreamingAPI):
|
||||
def __init__(self):
|
||||
endpoint = "events"
|
||||
url = "/".join([dockercloud.stream_host.rstrip("/"), "api", "audit", self._api_version, endpoint.lstrip("/")])
|
||||
super(self.__class__, self).__init__(url)
|
||||
|
||||
def _on_message(self, ws, message):
|
||||
try:
|
||||
event = json.loads(message)
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if event.get("type") == "error" and event.get("data", {}).get("errorMessage") == "UNAUTHORIZED":
|
||||
self.auth_error = True
|
||||
raise AuthError("Not authorized")
|
||||
if event.get("type") == "auth":
|
||||
return
|
||||
|
||||
if self.message_handler:
|
||||
self.message_handler(event)
|
||||
|
||||
def run_forever(self, *args, **kwargs):
|
||||
while True:
|
||||
if self.auth_error:
|
||||
raise AuthError("Not authorized")
|
||||
ws = websocket.WebSocketApp(self.url, header=self.header,
|
||||
on_open=self._on_open,
|
||||
on_message=self._on_message,
|
||||
on_error=self._on_error,
|
||||
on_close=self._on_close)
|
||||
ws.run_forever(ping_interval=5, ping_timeout=5, *args, **kwargs)
|
||||
16
dockercloud/api/exceptions.py
Normal file
16
dockercloud/api/exceptions.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
class ApiError(Exception):
|
||||
"""An error status code was returned when querying the HTTP API"""
|
||||
pass
|
||||
|
||||
|
||||
class AuthError(ApiError):
|
||||
"""An 401 Unauthorized status code was returned when querying the API"""
|
||||
pass
|
||||
|
||||
|
||||
class NonUniqueIdentifier(ApiError):
|
||||
pass
|
||||
|
||||
|
||||
class ObjectNotFound(ApiError):
|
||||
pass
|
||||
64
dockercloud/api/http.py
Normal file
64
dockercloud/api/http.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
from requests import Request, Session
|
||||
from requests import utils
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import dockercloud
|
||||
from .exceptions import ApiError, AuthError
|
||||
|
||||
logger = logging.getLogger("python-dockercloud")
|
||||
|
||||
|
||||
def send_request(method, path, inject_header=True, **kwargs):
|
||||
json = None
|
||||
url = urljoin(dockercloud.rest_host.rstrip("/"), path.strip("/").encode("ascii", "ignore"))
|
||||
if not url.endswith("/"):
|
||||
url = "%s/" % url
|
||||
user_agent = 'python-dockercloud/%s' % dockercloud.__version__
|
||||
if dockercloud.user_agent:
|
||||
user_agent = "%s %s" % (dockercloud.user_agent, user_agent)
|
||||
|
||||
# construct headers
|
||||
headers = {'Content-Type': 'application/json', 'User-Agent': user_agent}
|
||||
headers.update(dockercloud.auth.get_auth_header())
|
||||
logger.info("Request: %s, %s, %s, %s" % (method, url, headers, kwargs))
|
||||
|
||||
# construct request
|
||||
s = Session()
|
||||
req = Request(method, url, headers=headers, **kwargs)
|
||||
|
||||
# get environment proxies
|
||||
env_proxies = utils.get_environ_proxies(url) or {}
|
||||
kw_args = {'proxies': env_proxies}
|
||||
|
||||
# make the request
|
||||
response = s.send(req.prepare(), **kw_args)
|
||||
status_code = getattr(response, 'status_code', None)
|
||||
logger.info("Response: Status %s, %s, %s" % (str(status_code), response.headers, response.text))
|
||||
|
||||
# handle the response
|
||||
if not status_code:
|
||||
# Most likely network trouble
|
||||
raise ApiError("No Response (%s %s)" % (method, url))
|
||||
elif 200 <= status_code <= 299:
|
||||
# Success
|
||||
if status_code != 204:
|
||||
# Try to parse the response.
|
||||
try:
|
||||
json = response.json()
|
||||
if response.headers and inject_header:
|
||||
json["dockercloud_action_uri"] = response.headers.get("X-DockerCloud-Action-URI", "")
|
||||
except TypeError:
|
||||
raise ApiError("JSON Parse Error (%s %s). Response: %s" % (method, url, response.text))
|
||||
else:
|
||||
json = None
|
||||
else:
|
||||
# Server returned an error
|
||||
if status_code == 401:
|
||||
raise AuthError("Not authorized")
|
||||
else:
|
||||
raise ApiError("Status %s (%s %s). Response: %s" % (str(status_code), method, url, response.text))
|
||||
return json
|
||||
19
dockercloud/api/node.py
Normal file
19
dockercloud/api/node.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable, Taggable
|
||||
|
||||
|
||||
class Node(Mutable, Taggable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/node"
|
||||
|
||||
def save(self):
|
||||
if not self._detail_uri:
|
||||
raise AttributeError("Adding a new node is not supported via 'save' method")
|
||||
super(Node, self).save()
|
||||
|
||||
def deploy(self, tag=None):
|
||||
return self._perform_action("deploy")
|
||||
|
||||
def upgrade_docker(self):
|
||||
return self._perform_action("docker-upgrade")
|
||||
12
dockercloud/api/nodeaz.py
Normal file
12
dockercloud/api/nodeaz.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Immutable
|
||||
|
||||
|
||||
class AZ(Immutable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/az"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'name'
|
||||
22
dockercloud/api/nodecluster.py
Normal file
22
dockercloud/api/nodecluster.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable, Taggable
|
||||
from .noderegion import Region
|
||||
from .nodetype import NodeType
|
||||
|
||||
|
||||
class NodeCluster(Mutable, Taggable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/nodecluster"
|
||||
|
||||
def deploy(self, tag=None):
|
||||
return self._perform_action("deploy")
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
if key == "node_type" and isinstance(value, NodeType):
|
||||
kwargs[key] = getattr(value, "resource_uri", "")
|
||||
if key == "region" and isinstance(value, Region):
|
||||
kwargs[key] = getattr(value, "resource_uri", "")
|
||||
return cls(**kwargs)
|
||||
18
dockercloud/api/nodeprovider.py
Normal file
18
dockercloud/api/nodeprovider.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Immutable
|
||||
|
||||
|
||||
class Provider(Immutable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/provider"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'name'
|
||||
|
||||
def delete(self):
|
||||
raise AttributeError("'delete' is not supported in 'Provider'")
|
||||
|
||||
def save(self):
|
||||
raise AttributeError("'save' is not supported in 'Provider'")
|
||||
12
dockercloud/api/noderegion.py
Normal file
12
dockercloud/api/noderegion.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Immutable
|
||||
|
||||
|
||||
class Region(Immutable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/region"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'name'
|
||||
12
dockercloud/api/nodetype.py
Normal file
12
dockercloud/api/nodetype.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Immutable
|
||||
|
||||
|
||||
class NodeType(Immutable):
|
||||
subsystem = "infra"
|
||||
endpoint = "/nodetype"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'name'
|
||||
12
dockercloud/api/repository.py
Normal file
12
dockercloud/api/repository.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable, Taggable
|
||||
|
||||
|
||||
class Repository(Mutable, Taggable):
|
||||
subsystem = "repo"
|
||||
endpoint = "/repository"
|
||||
|
||||
@classmethod
|
||||
def _pk_key(cls):
|
||||
return 'name'
|
||||
26
dockercloud/api/service.py
Normal file
26
dockercloud/api/service.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable, Taggable, Triggerable, StreamingLog
|
||||
|
||||
|
||||
class Service(Mutable, Taggable, Triggerable):
|
||||
subsystem = "app"
|
||||
endpoint = "/service"
|
||||
|
||||
def start(self):
|
||||
return self._perform_action("start")
|
||||
|
||||
def stop(self):
|
||||
return self._perform_action("stop")
|
||||
|
||||
def redeploy(self, reuse_volumes=True):
|
||||
params = {'reuse_volumes': reuse_volumes}
|
||||
return self._perform_action("redeploy", params=params)
|
||||
|
||||
def scale(self):
|
||||
return self._perform_action("scale")
|
||||
|
||||
def logs(self, tail, follow, log_handler=StreamingLog.default_log_handler):
|
||||
logs = StreamingLog(self.subsystem, self.endpoint, self.pk, tail, follow)
|
||||
logs.on_message(log_handler)
|
||||
logs.run_forever()
|
||||
26
dockercloud/api/stack.py
Normal file
26
dockercloud/api/stack.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Mutable
|
||||
from .exceptions import ApiError
|
||||
from .http import send_request
|
||||
|
||||
|
||||
class Stack(Mutable):
|
||||
subsystem = "app"
|
||||
endpoint = "/stack"
|
||||
|
||||
def start(self):
|
||||
return self._perform_action("start")
|
||||
|
||||
def stop(self):
|
||||
return self._perform_action("stop")
|
||||
|
||||
def redeploy(self, reuse_volumes=True):
|
||||
params = {'reuse_volumes': reuse_volumes}
|
||||
return self._perform_action("redeploy", params=params)
|
||||
|
||||
def export(self):
|
||||
if not self._detail_uri:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
url = "/".join([self._detail_uri, "export"])
|
||||
return send_request("GET", url, inject_header=False)
|
||||
78
dockercloud/api/tag.py
Normal file
78
dockercloud/api/tag.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from .base import Taggable, BasicObject
|
||||
from .exceptions import ApiError
|
||||
|
||||
|
||||
class Tag(BasicObject):
|
||||
def __init__(self):
|
||||
self.tags = []
|
||||
|
||||
def add(self, tagname):
|
||||
if isinstance(tagname, list):
|
||||
for t in tagname:
|
||||
self.taggable.tags.append({"name": t})
|
||||
else:
|
||||
self.taggable.tags.append({"name": tagname})
|
||||
|
||||
self.taggable.__addchanges__('tags')
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
return cls(**kwargs)
|
||||
|
||||
def remove(self, tagname):
|
||||
if not self.taggable:
|
||||
raise ApiError("You must initialize the tag object before performing this operation")
|
||||
|
||||
_tags = []
|
||||
tagnames = []
|
||||
if isinstance(tagname, list):
|
||||
for n in tagname:
|
||||
tagnames.append(n)
|
||||
else:
|
||||
tagnames.append(tagname)
|
||||
|
||||
for t in self.taggable.tags:
|
||||
for tagname in tagnames:
|
||||
if t.get("name", "") == tagname:
|
||||
_tags.append(t)
|
||||
|
||||
if _tags:
|
||||
for _tag in _tags:
|
||||
self.taggable.tags.remove(_tag)
|
||||
self.taggable.__addchanges__('tags')
|
||||
|
||||
def delete(self, tagname):
|
||||
if not self.taggable:
|
||||
raise ApiError("You must initialize the tag object before performing this operation")
|
||||
|
||||
if self.taggable.is_dirty:
|
||||
raise ApiError("You must save the tab object before performing this operation")
|
||||
|
||||
self.remove(tagname)
|
||||
return self.save()
|
||||
|
||||
@classmethod
|
||||
def fetch(cls, taggable):
|
||||
if not isinstance(taggable, Taggable):
|
||||
raise ApiError("The object does not support tag")
|
||||
if not taggable._detail_uri:
|
||||
raise ApiError("You must save the taggable object before performing this operation")
|
||||
|
||||
tag = cls()
|
||||
tag.taggable = taggable
|
||||
|
||||
return tag
|
||||
|
||||
def list(self, **kwargs):
|
||||
if not self.taggable:
|
||||
raise ApiError("You must initialize the tag object before performing this operation")
|
||||
|
||||
return self.taggable.tags
|
||||
|
||||
def save(self):
|
||||
if not self.taggable:
|
||||
raise ApiError("You must initialize the tag object before performing this operation")
|
||||
|
||||
return self.taggable.save()
|
||||
102
dockercloud/api/trigger.py
Normal file
102
dockercloud/api/trigger.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import json as json_parser
|
||||
|
||||
from .base import Triggerable, BasicObject
|
||||
from .exceptions import ApiError
|
||||
from .http import send_request
|
||||
|
||||
|
||||
class Trigger(BasicObject):
|
||||
def __init__(self):
|
||||
self.trigger = None
|
||||
|
||||
def add(self, name=None, operation=None):
|
||||
|
||||
if self.trigger is not None:
|
||||
raise ApiError("You must save the object before performing this operation")
|
||||
|
||||
trigger = {}
|
||||
if name:
|
||||
trigger['name'] = name
|
||||
if operation:
|
||||
trigger['operation'] = operation
|
||||
self.trigger = trigger
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
"""Returns a new instance of the model (without saving it) with the attributes specified in ``kwargs``
|
||||
|
||||
:returns: trigger -- a new local instance of the Trigger
|
||||
"""
|
||||
return cls(**kwargs)
|
||||
|
||||
def delete(self, uuid):
|
||||
if not self.endpoint:
|
||||
raise ApiError("You must initialize the Trigger object before performing this operation")
|
||||
|
||||
action = "DELETE"
|
||||
url = "/".join([self.endpoint, uuid])
|
||||
send_request(action, url)
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def fetch(cls, triggerable):
|
||||
if not isinstance(triggerable, Triggerable):
|
||||
raise ApiError("The object does not support trigger")
|
||||
|
||||
if not triggerable._detail_uri:
|
||||
raise ApiError("You must save the triggerable object before performing this operation")
|
||||
|
||||
trigger = cls()
|
||||
trigger.endpoint = "/".join([triggerable._detail_uri, "trigger"])
|
||||
handlers = []
|
||||
for t in trigger.list():
|
||||
triggername = t.get("name", "")
|
||||
if triggername:
|
||||
handlers.append({"name": triggername})
|
||||
return trigger
|
||||
|
||||
def list(self, **kwargs):
|
||||
if not self.endpoint:
|
||||
raise ApiError("You must initialize the Trigger object before performing this operation")
|
||||
|
||||
objects = []
|
||||
while True:
|
||||
json = send_request('GET', self.endpoint, params=kwargs)
|
||||
objs = json.get('objects', [])
|
||||
meta = json.get('meta', {})
|
||||
next_url = meta.get('next', '')
|
||||
offset = meta.get('offset', 0)
|
||||
limit = meta.get('limit', 0)
|
||||
objects.extend(objs)
|
||||
if next_url:
|
||||
kwargs['offset'] = offset + limit
|
||||
kwargs['limit'] = limit
|
||||
else:
|
||||
break
|
||||
|
||||
return objects
|
||||
|
||||
def save(self):
|
||||
if not self.endpoint:
|
||||
raise ApiError("You must initialize the Trigger object before performing this operation")
|
||||
|
||||
if self.trigger is None:
|
||||
return True
|
||||
|
||||
json = send_request("POST", self.endpoint, data=json_parser.dumps(self.trigger))
|
||||
if json:
|
||||
self.clear()
|
||||
self.clear()
|
||||
return True
|
||||
|
||||
def call(self, uuid):
|
||||
if not self.endpoint:
|
||||
raise ApiError("You must initialize the Trigger object before performing this operation")
|
||||
|
||||
json = send_request("POST", "/".join([self.endpoint, uuid + "/call"]))
|
||||
if json:
|
||||
return True
|
||||
return False
|
||||
|
||||
def clear(self):
|
||||
self.trigger = None
|
||||
193
dockercloud/api/utils.py
Normal file
193
dockercloud/api/utils.py
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
|
||||
from .action import Action
|
||||
from .container import Container
|
||||
from .exceptions import ApiError, ObjectNotFound, NonUniqueIdentifier
|
||||
from .node import Node
|
||||
from .nodecluster import NodeCluster
|
||||
from .service import Service
|
||||
from .stack import Stack
|
||||
|
||||
|
||||
def is_uuid4(identifier):
|
||||
uuid4_regexp = re.compile('^[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}', re.I)
|
||||
match = uuid4_regexp.match(identifier)
|
||||
return bool(match)
|
||||
|
||||
|
||||
class Utils:
|
||||
@staticmethod
|
||||
def fetch_by_resource_uri(uri):
|
||||
if not isinstance(uri, basestring):
|
||||
raise ApiError("Uri format is invalid")
|
||||
terms = uri.strip("/").split("/")
|
||||
if len(terms) < 2:
|
||||
raise ApiError("Uri format is invalid")
|
||||
|
||||
id = terms[-1]
|
||||
resource_type = terms[-2]
|
||||
|
||||
if resource_type.lower() == "container":
|
||||
return Container.fetch(id)
|
||||
elif resource_type.lower() == "service":
|
||||
return Service.fetch(id)
|
||||
elif resource_type.lower() == "stack":
|
||||
return Stack.fetch(id)
|
||||
elif resource_type.lower() == "node":
|
||||
return Node.fetch(id)
|
||||
elif resource_type.lower() == "nodecluster":
|
||||
return NodeCluster.fetch(id)
|
||||
elif resource_type.lower() == "action":
|
||||
return Action.fetch(id)
|
||||
else:
|
||||
raise ApiError(
|
||||
"Unsupported resource type. Only support: action, container, node, nodecluster, service, stack")
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_container(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return Container.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find a container with the identifier '%s'" % identifier)
|
||||
else:
|
||||
if "." in identifier:
|
||||
terms = identifier.split(".", 2)
|
||||
objects_same_identifier = Container.list(name=terms[0], service__stack__name=terms[1])
|
||||
else:
|
||||
objects_same_identifier = Container.list(uuid__startswith=identifier) or \
|
||||
Container.list(name=identifier)
|
||||
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return Container.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find a container with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier("More than one container has the same identifier, please use the long uuid")
|
||||
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_service(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return Service.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find a service with the identifier '%s'" % identifier)
|
||||
else:
|
||||
if "." in identifier:
|
||||
terms = identifier.split(".", 2)
|
||||
objects_same_identifier = Service.list(name=terms[0], stack__name=terms[1])
|
||||
else:
|
||||
objects_same_identifier = Service.list(uuid__startswith=identifier) or \
|
||||
Service.list(name=identifier)
|
||||
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return Service.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find a service with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier("More than one service has the same identifier, please use the long uuid")
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_stack(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return Stack.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find a stack with the identifier '%s'" % identifier)
|
||||
else:
|
||||
objects_same_identifier = Stack.list(uuid__startswith=identifier) or \
|
||||
Stack.list(name=identifier)
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return Stack.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find a stack with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier("More than one stack has the same identifier, please use the long uuid")
|
||||
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_node(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return Node.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find a node with the identifier '%s'" % identifier)
|
||||
else:
|
||||
objects_same_identifier = Node.list(uuid__startswith=identifier)
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return Node.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find a node with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier("More than one node has the same identifier, please use the long uuid")
|
||||
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_nodecluster(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return NodeCluster.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find a node cluster with the identifier '%s'" % identifier)
|
||||
else:
|
||||
objects_same_identifier = NodeCluster.list(uuid__startswith=identifier) or \
|
||||
NodeCluster.list(name=identifier)
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return NodeCluster.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find a node cluster with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier(
|
||||
"More than one node cluster has the same identifier, please use the long uuid")
|
||||
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
|
||||
@staticmethod
|
||||
def fetch_remote_action(identifier, raise_exceptions=True):
|
||||
try:
|
||||
if is_uuid4(identifier):
|
||||
try:
|
||||
return Action.fetch(identifier)
|
||||
except Exception:
|
||||
raise ObjectNotFound("Cannot find an action with the identifier '%s'" % identifier)
|
||||
else:
|
||||
objects_same_identifier = Action.list(uuid__startswith=identifier)
|
||||
if len(objects_same_identifier) == 1:
|
||||
uuid = objects_same_identifier[0].uuid
|
||||
return Action.fetch(uuid)
|
||||
elif len(objects_same_identifier) == 0:
|
||||
raise ObjectNotFound("Cannot find an action cluster with the identifier '%s'" % identifier)
|
||||
raise NonUniqueIdentifier(
|
||||
"More than one action has the same identifier, please use the long uuid")
|
||||
|
||||
except (NonUniqueIdentifier, ObjectNotFound) as e:
|
||||
if not raise_exceptions:
|
||||
return e
|
||||
raise e
|
||||
Loading…
Add table
Add a link
Reference in a new issue