2012-12-12 20:23:00 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
# Copyright © 2012 Humbug, Inc.
|
2012-11-26 16:55:22 +01:00
|
|
|
#
|
2012-12-12 20:23:00 +01:00
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
2012-11-26 16:55:22 +01:00
|
|
|
#
|
2012-12-12 20:23:00 +01:00
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
2012-11-26 16:55:22 +01:00
|
|
|
#
|
2012-12-12 20:23:00 +01:00
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
|
|
# THE SOFTWARE.
|
2012-11-26 16:55:22 +01:00
|
|
|
|
2012-10-01 21:36:44 +02:00
|
|
|
import simplejson
|
2012-10-04 22:13:47 +02:00
|
|
|
import requests
|
2012-10-01 21:36:44 +02:00
|
|
|
import time
|
2012-10-02 21:47:59 +02:00
|
|
|
import traceback
|
2012-10-18 17:32:58 +02:00
|
|
|
import urlparse
|
2012-10-22 20:31:21 +02:00
|
|
|
import sys
|
2012-11-26 16:45:11 +01:00
|
|
|
import os
|
2012-12-10 20:42:20 +01:00
|
|
|
import optparse
|
2013-03-06 19:48:24 +01:00
|
|
|
from distutils.version import LooseVersion
|
2012-12-10 20:42:20 +01:00
|
|
|
|
|
|
|
from ConfigParser import SafeConfigParser
|
2012-10-03 23:21:09 +02:00
|
|
|
|
2013-01-31 21:09:59 +01:00
|
|
|
|
2013-03-26 22:55:36 +01:00
|
|
|
__version__ = "0.1.5"
|
2013-01-31 21:09:59 +01:00
|
|
|
|
2012-10-05 00:06:54 +02:00
|
|
|
# Check that we have a recent enough version
|
2012-11-01 22:50:38 +01:00
|
|
|
# Older versions don't provide the 'json' attribute on responses.
|
2013-03-06 19:48:24 +01:00
|
|
|
assert(LooseVersion(requests.__version__) >= LooseVersion('0.12.1'))
|
2013-02-05 21:08:48 +01:00
|
|
|
# In newer versions, the 'json' attribute is a function, not a property
|
2013-02-05 22:34:16 +01:00
|
|
|
requests_json_is_function = not isinstance(requests.Response.json, property)
|
2013-02-05 21:08:48 +01:00
|
|
|
|
2012-11-29 15:17:09 +01:00
|
|
|
API_VERSTRING = "/api/v1/"
|
2012-10-05 00:06:54 +02:00
|
|
|
|
2012-12-10 20:42:20 +01:00
|
|
|
def generate_option_group(parser):
|
|
|
|
group = optparse.OptionGroup(parser, 'API configuration')
|
|
|
|
group.add_option('--site',
|
2013-02-14 19:16:45 +01:00
|
|
|
default=None,
|
2012-12-10 20:42:20 +01:00
|
|
|
help=optparse.SUPPRESS_HELP)
|
|
|
|
group.add_option('--api-key',
|
|
|
|
action='store')
|
|
|
|
group.add_option('--user',
|
|
|
|
dest='email',
|
|
|
|
help='Email address of the calling user.')
|
|
|
|
group.add_option('--config-file',
|
|
|
|
action='store',
|
|
|
|
help='Location of an ini file containing the above information.')
|
|
|
|
group.add_option('-v', '--verbose',
|
|
|
|
action='store_true',
|
|
|
|
help='Provide detailed output.')
|
|
|
|
|
|
|
|
return group
|
|
|
|
|
|
|
|
def init_from_options(options):
|
|
|
|
return Client(email=options.email, api_key=options.api_key, config_file=options.config_file,
|
2012-12-14 22:03:15 +01:00
|
|
|
verbose=options.verbose, site=options.site)
|
2012-12-10 20:42:20 +01:00
|
|
|
|
2012-12-03 18:24:49 +01:00
|
|
|
class Client(object):
|
2012-12-10 20:42:20 +01:00
|
|
|
def __init__(self, email=None, api_key=None, config_file=None,
|
2012-11-26 16:45:11 +01:00
|
|
|
verbose=False, retry_on_errors=True,
|
2013-03-22 18:29:04 +01:00
|
|
|
site=None, client="API: Python"):
|
2012-12-10 20:42:20 +01:00
|
|
|
if None in (api_key, email):
|
|
|
|
if config_file is None:
|
|
|
|
config_file = os.path.join(os.environ["HOME"], ".humbugrc")
|
|
|
|
if not os.path.exists(config_file):
|
|
|
|
raise RuntimeError("api_key or email not specified and %s does not exist"
|
|
|
|
% (config_file,))
|
|
|
|
config = SafeConfigParser()
|
|
|
|
with file(config_file, 'r') as f:
|
|
|
|
config.readfp(f, config_file)
|
|
|
|
if api_key is None:
|
|
|
|
api_key = config.get("api", "key")
|
|
|
|
if email is None:
|
|
|
|
email = config.get("api", "email")
|
2013-02-19 20:04:20 +01:00
|
|
|
if site is None and config.has_option("api", "site"):
|
|
|
|
site = config.get("api", "site")
|
2012-11-26 16:45:11 +01:00
|
|
|
|
2012-10-01 21:36:44 +02:00
|
|
|
self.api_key = api_key
|
|
|
|
self.email = email
|
|
|
|
self.verbose = verbose
|
2013-02-14 19:16:45 +01:00
|
|
|
if site is not None:
|
|
|
|
self.base_url = site
|
|
|
|
else:
|
|
|
|
self.base_url = "https://humbughq.com"
|
2012-10-22 20:31:21 +02:00
|
|
|
self.retry_on_errors = retry_on_errors
|
2012-10-23 16:59:42 +02:00
|
|
|
self.client_name = client
|
2012-10-01 21:36:44 +02:00
|
|
|
|
2013-03-22 18:29:04 +01:00
|
|
|
def do_api_query(self, orig_request, url, method="POST", longpolling = False):
|
2012-11-29 15:35:30 +01:00
|
|
|
request = {}
|
2012-10-23 16:59:42 +02:00
|
|
|
request["client"] = self.client_name
|
2012-11-07 23:22:19 +01:00
|
|
|
|
2012-11-29 15:35:30 +01:00
|
|
|
for (key, val) in orig_request.iteritems():
|
2012-11-07 23:22:19 +01:00
|
|
|
if not (isinstance(val, str) or isinstance(val, unicode)):
|
|
|
|
request[key] = simplejson.dumps(val)
|
2012-11-29 15:35:30 +01:00
|
|
|
else:
|
|
|
|
request[key] = val
|
2012-11-07 23:22:19 +01:00
|
|
|
|
2012-11-29 15:17:09 +01:00
|
|
|
query_state = {
|
|
|
|
'had_error_retry': False,
|
|
|
|
'request': request,
|
|
|
|
'failures': 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
def error_retry(error_string):
|
|
|
|
if not self.retry_on_errors or query_state["failures"] >= 10:
|
|
|
|
return False
|
|
|
|
if self.verbose:
|
|
|
|
if not query_state["had_error_retry"]:
|
|
|
|
sys.stdout.write("humbug API(%s): connection error%s -- retrying." % \
|
|
|
|
(url.split(API_VERSTRING, 2)[1], error_string,))
|
|
|
|
query_state["had_error_retry"] = True
|
|
|
|
else:
|
|
|
|
sys.stdout.write(".")
|
|
|
|
sys.stdout.flush()
|
|
|
|
query_state["request"]["dont_block"] = simplejson.dumps(True)
|
|
|
|
time.sleep(1)
|
|
|
|
query_state["failures"] += 1
|
|
|
|
return True
|
|
|
|
|
|
|
|
def end_error_retry(succeeded):
|
|
|
|
if query_state["had_error_retry"] and self.verbose:
|
|
|
|
if succeeded:
|
|
|
|
print "Success!"
|
|
|
|
else:
|
|
|
|
print "Failed!"
|
|
|
|
|
2012-10-05 00:06:54 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2013-03-28 20:47:26 +01:00
|
|
|
if method == "GET":
|
|
|
|
kwarg = "params"
|
|
|
|
else:
|
|
|
|
kwarg = "data"
|
|
|
|
kwargs = {kwarg: query_state["request"]}
|
2013-03-22 18:29:04 +01:00
|
|
|
res = requests.request(
|
|
|
|
method,
|
|
|
|
urlparse.urljoin(self.base_url, url),
|
|
|
|
auth=requests.auth.HTTPBasicAuth(self.email,
|
|
|
|
self.api_key),
|
2013-03-29 16:05:40 +01:00
|
|
|
verify=True, timeout=90,
|
2013-03-28 20:47:26 +01:00
|
|
|
**kwargs)
|
2012-10-22 20:31:21 +02:00
|
|
|
|
|
|
|
# On 50x errors, try again after a short sleep
|
2012-11-29 15:17:09 +01:00
|
|
|
if str(res.status_code).startswith('5'):
|
|
|
|
if error_retry(" (server %s)" % (res.status_code,)):
|
|
|
|
continue
|
|
|
|
# Otherwise fall through and process the python-requests error normally
|
2012-11-09 21:38:34 +01:00
|
|
|
except (requests.exceptions.Timeout, requests.exceptions.SSLError) as e:
|
|
|
|
# Timeouts are either a Timeout or an SSLError; we
|
|
|
|
# want the later exception handlers to deal with any
|
|
|
|
# non-timeout other SSLErrors
|
|
|
|
if (isinstance(e, requests.exceptions.SSLError) and
|
|
|
|
str(e) != "The read operation timed out"):
|
|
|
|
raise
|
2012-11-09 20:16:22 +01:00
|
|
|
if longpolling:
|
|
|
|
# When longpolling, we expect the timeout to fire,
|
|
|
|
# and the correct response is to just retry
|
|
|
|
continue
|
|
|
|
else:
|
2012-11-29 15:17:09 +01:00
|
|
|
end_error_retry(False)
|
2012-11-09 20:16:22 +01:00
|
|
|
return {'msg': "Connection error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "connection-error"}
|
2012-10-05 00:06:54 +02:00
|
|
|
except requests.exceptions.ConnectionError:
|
2012-11-29 15:17:09 +01:00
|
|
|
if error_retry(""):
|
2012-10-22 20:31:21 +02:00
|
|
|
continue
|
2012-11-29 15:17:09 +01:00
|
|
|
end_error_retry(False)
|
2012-10-05 00:06:54 +02:00
|
|
|
return {'msg': "Connection error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "connection-error"}
|
|
|
|
except Exception:
|
2012-10-22 20:31:21 +02:00
|
|
|
# We'll split this out into more cases as we encounter new bugs.
|
2012-10-05 00:06:54 +02:00
|
|
|
return {'msg': "Unexpected error:\n%s" % traceback.format_exc(),
|
|
|
|
"result": "unexpected-error"}
|
|
|
|
|
2013-02-05 21:08:48 +01:00
|
|
|
if requests_json_is_function:
|
|
|
|
json_result = res.json()
|
|
|
|
else:
|
|
|
|
json_result = res.json
|
|
|
|
if json_result is not None:
|
2012-11-29 15:17:09 +01:00
|
|
|
end_error_retry(True)
|
2013-02-05 21:08:48 +01:00
|
|
|
return json_result
|
2012-11-29 15:17:09 +01:00
|
|
|
end_error_retry(False)
|
2012-10-05 00:06:54 +02:00
|
|
|
return {'msg': res.text, "result": "http-error",
|
|
|
|
"status_code": res.status_code}
|
|
|
|
|
2012-11-07 21:48:37 +01:00
|
|
|
@classmethod
|
2013-03-22 18:29:04 +01:00
|
|
|
def _register(cls, name, url=None, make_request=(lambda request={}: request),
|
|
|
|
method="POST", **query_kwargs):
|
2012-11-07 21:48:37 +01:00
|
|
|
if url is None:
|
|
|
|
url = name
|
|
|
|
def call(self, *args, **kwargs):
|
|
|
|
request = make_request(*args, **kwargs)
|
2013-03-22 18:29:04 +01:00
|
|
|
return self.do_api_query(request, API_VERSTRING + url, method=method, **query_kwargs)
|
2012-11-07 21:48:37 +01:00
|
|
|
call.func_name = name
|
|
|
|
setattr(cls, name, call)
|
2012-11-16 20:15:03 +01:00
|
|
|
|
2013-03-22 22:44:58 +01:00
|
|
|
def call_on_each_event(self, callback, event_types=None):
|
2013-03-21 23:14:13 +01:00
|
|
|
def do_register():
|
2013-03-22 22:44:58 +01:00
|
|
|
while True:
|
|
|
|
if event_types is None:
|
|
|
|
res = self.register()
|
|
|
|
else:
|
|
|
|
res = self.register(event_types=event_types)
|
|
|
|
|
|
|
|
if 'error' in res.get('result'):
|
|
|
|
if self.verbose:
|
|
|
|
print "Server returned error:\n%s" % res['msg']
|
|
|
|
time.sleep(1)
|
|
|
|
else:
|
|
|
|
return (res['queue_id'], res['last_event_id'])
|
2013-03-21 23:14:13 +01:00
|
|
|
|
|
|
|
queue_id = None
|
2012-10-01 21:36:44 +02:00
|
|
|
while True:
|
2013-03-21 23:14:13 +01:00
|
|
|
if queue_id is None:
|
|
|
|
(queue_id, last_event_id) = do_register()
|
|
|
|
|
|
|
|
res = self.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
2012-10-04 22:13:47 +02:00
|
|
|
if 'error' in res.get('result'):
|
2013-02-12 19:59:28 +01:00
|
|
|
if res["result"] == "http-error":
|
|
|
|
if self.verbose:
|
2013-03-22 22:44:58 +01:00
|
|
|
print "HTTP error fetching events -- probably a server restart"
|
2013-02-12 19:59:28 +01:00
|
|
|
elif res["result"] == "connection-error":
|
|
|
|
if self.verbose:
|
2013-03-22 22:44:58 +01:00
|
|
|
print "Connection error fetching events -- probably server is temporarily down?"
|
2013-02-12 19:59:28 +01:00
|
|
|
else:
|
|
|
|
if self.verbose:
|
2012-10-04 22:13:47 +02:00
|
|
|
print "Server returned error:\n%s" % res["msg"]
|
2013-03-21 23:14:13 +01:00
|
|
|
if res["msg"].startswith("Bad event queue id:"):
|
|
|
|
# Our event queue went away, probably because
|
|
|
|
# we were asleep or the server restarted
|
|
|
|
# abnormally. We may have missed some
|
2013-03-22 22:44:58 +01:00
|
|
|
# events while the network was down or
|
2013-03-21 23:14:13 +01:00
|
|
|
# something, but there's not really anything
|
|
|
|
# we can do about it other than resuming
|
|
|
|
# getting new ones.
|
2013-02-12 19:59:28 +01:00
|
|
|
#
|
2013-03-21 23:14:13 +01:00
|
|
|
# Reset queue_id to register a new event queue.
|
|
|
|
queue_id = None
|
2012-10-04 22:13:47 +02:00
|
|
|
# TODO: Make this back off once it's more reliable
|
2012-10-01 21:36:44 +02:00
|
|
|
time.sleep(1)
|
2012-10-02 21:47:59 +02:00
|
|
|
continue
|
2013-03-21 23:14:13 +01:00
|
|
|
|
|
|
|
for event in res['events']:
|
|
|
|
last_event_id = max(last_event_id, int(event['id']))
|
2013-03-22 22:44:58 +01:00
|
|
|
callback(event)
|
|
|
|
|
|
|
|
def call_on_each_message(self, callback):
|
|
|
|
def event_callback(event):
|
|
|
|
if event['type'] == 'message':
|
|
|
|
callback(event['message'])
|
2013-03-21 23:14:13 +01:00
|
|
|
|
2013-03-22 22:44:58 +01:00
|
|
|
self.call_on_each_event(event_callback, ['message'])
|
2012-11-07 21:48:37 +01:00
|
|
|
|
|
|
|
def _mk_subs(streams):
|
|
|
|
return {'subscriptions': streams}
|
|
|
|
|
2013-03-22 18:29:04 +01:00
|
|
|
def _mk_del_subs(streams):
|
|
|
|
return {'delete': streams}
|
|
|
|
|
2013-03-22 22:44:58 +01:00
|
|
|
def _mk_events(event_types=None):
|
|
|
|
if event_types is None:
|
|
|
|
return dict()
|
|
|
|
return dict(event_types=event_types)
|
|
|
|
|
2013-03-22 18:29:04 +01:00
|
|
|
Client._register('send_message', url='messages', make_request=(lambda request: request))
|
|
|
|
Client._register('get_messages', method='GET', url='messages/latest', longpolling=True)
|
2013-03-21 23:14:13 +01:00
|
|
|
Client._register('get_events', url='events', method='GET', longpolling=True, make_request=(lambda **kwargs: kwargs))
|
2013-03-22 22:44:58 +01:00
|
|
|
Client._register('register', make_request=_mk_events)
|
2013-03-22 18:29:04 +01:00
|
|
|
Client._register('get_profile', method='GET', url='users/me')
|
|
|
|
Client._register('get_public_streams', method='GET', url='streams')
|
|
|
|
Client._register('get_members', method='GET', url='users')
|
|
|
|
Client._register('list_subscriptions', method='GET', url='users/me/subscriptions')
|
|
|
|
Client._register('add_subscriptions', url='users/me/subscriptions', make_request=_mk_subs)
|
|
|
|
Client._register('delete_subscriptions', method='PATCH', url='users/me/subscriptions', make_request=_mk_del_subs)
|