This commit is contained in:
Ryan Williams
2010-02-25 02:45:41 -05:00
23 changed files with 461 additions and 417 deletions

View File

@@ -13,3 +13,6 @@ cover
nosetests*.xml nosetests*.xml
.coverage .coverage
*,cover *,cover
syntax: re
^.ropeproject/.*$

View File

@@ -1,5 +1,4 @@
import collections import collections
import time
import traceback import traceback
import warnings import warnings
@@ -148,7 +147,8 @@ class Queue(object):
return len(self.items) return len(self.items)
def __repr__(self): def __repr__(self):
params = (self.__class__.__name__, hex(id(self)), len(self.items), len(self._waiters)) params = (self.__class__.__name__, hex(id(self)),
len(self.items), len(self._waiters))
return '<%s at %s items[%d] _waiters[%s]>' % params return '<%s at %s items[%d] _waiters[%s]>' % params
def send(self, result=None, exc=None): def send(self, result=None, exc=None):
@@ -221,7 +221,9 @@ class Channel(object):
return len(self.items) return len(self.items)
def __repr__(self): def __repr__(self):
params = (self.__class__.__name__, hex(id(self)), self.max_size, len(self.items), len(self._waiters), len(self._senders)) params = (self.__class__.__name__, hex(id(self)),
self.max_size, len(self.items),
len(self._waiters), len(self._senders))
return '<%s at %s max=%s items[%d] _w[%s] _s[%s]>' % params return '<%s at %s max=%s items[%d] _w[%s] _s[%s]>' % params
def send(self, result=None, exc=None): def send(self, result=None, exc=None):
@@ -397,4 +399,3 @@ class Actor(object):
>>> eventlet.kill(a._killer) # test cleanup >>> eventlet.kill(a._killer) # test cleanup
""" """
raise NotImplementedError() raise NotImplementedError()

View File

@@ -4,6 +4,7 @@ from eventlet.green import SimpleHTTPServer
from eventlet.green import urllib from eventlet.green import urllib
from eventlet.green import select from eventlet.green import select
test = None # bind prior to patcher.inject to silence pyflakes warning below
patcher.inject('CGIHTTPServer', patcher.inject('CGIHTTPServer',
globals(), globals(),
('BaseHTTPServer', BaseHTTPServer), ('BaseHTTPServer', BaseHTTPServer),
@@ -14,4 +15,4 @@ patcher.inject('CGIHTTPServer',
del patcher del patcher
if __name__ == '__main__': if __name__ == '__main__':
test() test() # pyflakes false alarm here unless test = None above

View File

@@ -21,6 +21,3 @@ def _patch_main_thread(mod):
curthread = mod._active.pop(mod._get_ident(), None) curthread = mod._active.pop(mod._get_ident(), None)
if curthread: if curthread:
mod._active[thread.get_ident()] = curthread mod._active[thread.get_ident()] = curthread
if __name__ == '__main__':
_test()

View File

@@ -3,3 +3,4 @@ for var in dir(__time):
exec "%s = __time.%s" % (var, var) exec "%s = __time.%s" % (var, var)
__patched__ = ['sleep'] __patched__ = ['sleep']
from eventlet.greenthread import sleep from eventlet.greenthread import sleep
sleep # silence pyflakes

View File

@@ -1,7 +1,4 @@
import eventlet
from eventlet.hubs import trampoline from eventlet.hubs import trampoline
from eventlet.hubs import get_hub
BUFFER_SIZE = 4096 BUFFER_SIZE = 4096
import errno import errno
@@ -12,10 +9,6 @@ import sys
import time import time
import warnings import warnings
from errno import EWOULDBLOCK, EAGAIN
__all__ = ['GreenSocket', 'GreenPipe', 'shutdown_safe'] __all__ = ['GreenSocket', 'GreenPipe', 'shutdown_safe']
CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK)) CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK))
@@ -195,11 +188,11 @@ class GreenSocket(object):
else: else:
end = time.time() + self.gettimeout() end = time.time() + self.gettimeout()
while True: while True:
if socket_connect(fd, address):
return 0
if time.time() >= end:
raise socket.timeout(errno.EAGAIN)
try: try:
if socket_connect(fd, address):
return 0
if time.time() >= end:
raise socket.timeout(errno.EAGAIN)
trampoline(fd, write=True, timeout=end-time.time(), trampoline(fd, write=True, timeout=end-time.time(),
timeout_exc=socket.timeout(errno.EAGAIN)) timeout_exc=socket.timeout(errno.EAGAIN))
except socket.error, ex: except socket.error, ex:
@@ -302,7 +295,6 @@ class GreenSocket(object):
return total_sent return total_sent
def sendall(self, data, flags=0): def sendall(self, data, flags=0):
fd = self.fd
tail = self.send(data, flags) tail = self.send(data, flags)
len_data = len(data) len_data = len(data)
while tail < len_data: while tail < len_data:
@@ -375,7 +367,7 @@ class GreenPipe(object):
try: try:
return fd.read(buflen) return fd.read(buflen)
except IOError, e: except IOError, e:
if e[0] != EAGAIN: if e[0] != errno.EAGAIN:
return '' return ''
except socket.error, e: except socket.error, e:
if e[0] == errno.EPIPE: if e[0] == errno.EPIPE:
@@ -407,7 +399,7 @@ class GreenPipe(object):
fd.flush() fd.flush()
return len(data) return len(data)
except IOError, e: except IOError, e:
if e[0] != EAGAIN: if e[0] != errno.EAGAIN:
raise raise
except ValueError, e: except ValueError, e:
# what's this for? # what's this for?
@@ -589,4 +581,3 @@ def serve(sock, handle, concurrency=1000):
connections until the existing ones complete. connections until the existing ones complete.
""" """
pass pass

View File

@@ -30,11 +30,12 @@ class DebugListener(FdListener):
self.greenlet = greenlet.getcurrent() self.greenlet = greenlet.getcurrent()
super(DebugListener, self).__init__(evtype, fileno, cb) super(DebugListener, self).__init__(evtype, fileno, cb)
def __repr__(self): def __repr__(self):
return "DebugListener(%r, %r, %r, %r)\n%sEndDebugFdListener" % (self.evtype, return "DebugListener(%r, %r, %r, %r)\n%sEndDebugFdListener" % (
self.fileno, self.evtype,
self.cb, self.fileno,
self.greenlet, self.cb,
''.join(self.where_called)) self.greenlet,
''.join(self.where_called))
__str__ = __repr__ __str__ = __repr__
@@ -231,8 +232,6 @@ class BaseHub(object):
t = self.timers t = self.timers
heappop = heapq.heappop heappop = heapq.heappop
i = 0
while t: while t:
next = t[0] next = t[0]

View File

@@ -43,7 +43,9 @@ class Hub(BaseHub):
event.init() event.init()
self.signal_exc_info = None self.signal_exc_info = None
self.signal(2, lambda signalnum, frame: self.greenlet.parent.throw(KeyboardInterrupt)) self.signal(
2,
lambda signalnum, frame: self.greenlet.parent.throw(KeyboardInterrupt))
self.events_to_add = [] self.events_to_add = []
def dispatch(self): def dispatch(self):
@@ -76,7 +78,8 @@ class Hub(BaseHub):
raise raise
except: except:
if self.signal_exc_info is not None: if self.signal_exc_info is not None:
self.schedule_call_global(0, greenlet.getcurrent().parent.throw, *self.signal_exc_info) self.schedule_call_global(
0, greenlet.getcurrent().parent.throw, *self.signal_exc_info)
self.signal_exc_info = None self.signal_exc_info = None
else: else:
self.squelch_timer_exception(None, sys.exc_info()) self.squelch_timer_exception(None, sys.exc_info())
@@ -128,7 +131,7 @@ class Hub(BaseHub):
for listener in l_list: for listener in l_list:
try: try:
listener.cb.delete() listener.cb.delete()
except SYSTEM_EXCEPTIONS: except self.SYSTEM_EXCEPTIONS:
raise raise
except: except:
traceback.print_exc() traceback.print_exc()
@@ -169,4 +172,3 @@ def _scheduled_call_local(event_impl, handle, evtype, arg):
cb(*args, **kwargs) cb(*args, **kwargs)
finally: finally:
event_impl.delete() event_impl.delete()

View File

@@ -74,8 +74,8 @@ class socket_rwdescriptor(FdListener):
# to the mainloop occurs, twisted will not re-evaluate the delayed calls # to the mainloop occurs, twisted will not re-evaluate the delayed calls
# because it assumes that none were scheduled since no client code was executed # because it assumes that none were scheduled since no client code was executed
# (it has no idea it was switched away). So, we restart the mainloop. # (it has no idea it was switched away). So, we restart the mainloop.
# XXX this is not enough, pollreactor prints the traceback for this and epollreactor # XXX this is not enough, pollreactor prints the traceback for
# times out. see test__hub.TestCloseSocketWhilePolling # this and epollreactor times out. see test__hub.TestCloseSocketWhilePolling
raise greenlet.GreenletExit raise greenlet.GreenletExit
logstr = "twistedr" logstr = "twistedr"
@@ -103,9 +103,10 @@ class BaseTwistedHub(object):
self.greenlet = mainloop_greenlet self.greenlet = mainloop_greenlet
def switch(self): def switch(self):
assert api.getcurrent() is not self.greenlet, "Cannot switch from MAINLOOP to MAINLOOP" assert getcurrent() is not self.greenlet, \
"Cannot switch from MAINLOOP to MAINLOOP"
try: try:
api.getcurrent().parent = self.greenlet getcurrent().parent = self.greenlet
except ValueError: except ValueError:
pass pass
return self.greenlet.switch() return self.greenlet.switch()
@@ -134,7 +135,8 @@ class BaseTwistedHub(object):
if timer.greenlet.dead: if timer.greenlet.dead:
return return
return func(*args1, **kwargs1) return func(*args1, **kwargs1)
timer = callLater(LocalDelayedCall, reactor, seconds, call_if_greenlet_alive, *args, **kwargs) timer = callLater(LocalDelayedCall, reactor, seconds,
call_if_greenlet_alive, *args, **kwargs)
return timer return timer
schedule_call = schedule_call_local schedule_call = schedule_call_local
@@ -189,18 +191,22 @@ class TwistedHub(BaseTwistedHub):
installSignalHandlers = False installSignalHandlers = False
def __init__(self): def __init__(self):
assert Hub.state==0, ('%s hub can only be instantiated once' % type(self).__name__, Hub.state) assert Hub.state==0, ('%s hub can only be instantiated once'%type(self).__name__,
Hub.state)
Hub.state = 1 Hub.state = 1
make_twisted_threadpool_daemonic() # otherwise the program would hang after the main greenlet exited make_twisted_threadpool_daemonic() # otherwise the program
g = api.Greenlet(self.run) # would hang after the main
# greenlet exited
g = greenlet.greenlet(self.run)
BaseTwistedHub.__init__(self, g) BaseTwistedHub.__init__(self, g)
def switch(self): def switch(self):
assert api.getcurrent() is not self.greenlet, "Cannot switch from MAINLOOP to MAINLOOP" assert getcurrent() is not self.greenlet, \
"Cannot switch from MAINLOOP to MAINLOOP"
if self.greenlet.dead: if self.greenlet.dead:
self.greenlet = api.Greenlet(self.run) self.greenlet = greenlet.greenlet(self.run)
try: try:
api.getcurrent().parent = self.greenlet getcurrent().parent = self.greenlet
except ValueError: except ValueError:
pass pass
return self.greenlet.switch() return self.greenlet.switch()
@@ -255,5 +261,3 @@ def make_twisted_threadpool_daemonic():
from twisted.python.threadpool import ThreadPool from twisted.python.threadpool import ThreadPool
if ThreadPool.threadFactory != DaemonicThread: if ThreadPool.threadFactory != DaemonicThread:
ThreadPool.threadFactory = DaemonicThread ThreadPool.threadFactory = DaemonicThread

View File

@@ -35,7 +35,8 @@ def inject(module_name, new_globals, *additional_modules):
saved[name] = sys.modules.get(name, None) saved[name] = sys.modules.get(name, None)
sys.modules[name] = mod sys.modules[name] = mod
## Remove the old module from sys.modules and reimport it while the specified modules are in place ## Remove the old module from sys.modules and reimport it while
## the specified modules are in place
old_module = sys.modules.pop(module_name, None) old_module = sys.modules.pop(module_name, None)
try: try:
module = __import__(module_name, {}, {}, module_name.split('.')[:-1]) module = __import__(module_name, {}, {}, module_name.split('.')[:-1])
@@ -73,9 +74,9 @@ def import_patched(module_name, *additional_modules, **kw_additional_modules):
The only required argument is the name of the module to be imported. The only required argument is the name of the module to be imported.
""" """
return inject( return inject(
module_name, module_name,
None, None,
*additional_modules + tuple(kw_additional_modules.items())) *additional_modules + tuple(kw_additional_modules.items()))
def patch_function(func, *additional_modules): def patch_function(func, *additional_modules):
@@ -159,11 +160,12 @@ def monkey_patch(all=True, os=False, select=False,
for name, mod in modules_to_patch: for name, mod in modules_to_patch:
orig_mod = sys.modules.get(name) orig_mod = sys.modules.get(name)
for attr in mod.__patched__: for attr_name in mod.__patched__:
orig_attr = getattr(orig_mod, attr, None) #orig_attr = getattr(orig_mod, attr_name, None)
patched_attr = getattr(mod, attr, None) # @@tavis: line above wasn't used, not sure what author intended
patched_attr = getattr(mod, attr_name, None)
if patched_attr is not None: if patched_attr is not None:
setattr(orig_mod, attr, patched_attr) setattr(orig_mod, attr_name, patched_attr)
def _green_os_modules(): def _green_os_modules():
from eventlet.green import os from eventlet.green import os

View File

@@ -165,5 +165,3 @@ class TokenPool(Pool):
""" """
def create(self): def create(self):
return Token() return Token()

View File

@@ -11,7 +11,6 @@ import popen2
import signal import signal
from eventlet import api from eventlet import api
from eventlet import coros
from eventlet import pools from eventlet import pools
from eventlet import greenio from eventlet import greenio
@@ -69,7 +68,9 @@ class Process(object):
greenio.set_nonblocking(child_stdout_stderr) greenio.set_nonblocking(child_stdout_stderr)
greenio.set_nonblocking(child_stdin) greenio.set_nonblocking(child_stdin)
self.child_stdout_stderr = greenio.GreenPipe(child_stdout_stderr) self.child_stdout_stderr = greenio.GreenPipe(child_stdout_stderr)
self.child_stdout_stderr.newlines = '\n' # the default is \r\n, which aren't sent over pipes self.child_stdout_stderr.newlines = '\n' # the default is
# \r\n, which aren't sent over
# pipes
self.child_stdin = greenio.GreenPipe(child_stdin) self.child_stdin = greenio.GreenPipe(child_stdin)
self.child_stdin.newlines = '\n' self.child_stdin.newlines = '\n'

View File

@@ -36,7 +36,10 @@ def wrap(obj, dead_callback = None):
return wrap_module(obj.__name__, dead_callback) return wrap_module(obj.__name__, dead_callback)
pythonpath_sync() pythonpath_sync()
if _g_debug_mode: if _g_debug_mode:
p = Process(sys.executable, ["-W", "ignore", __file__, '--child', '--logfile', os.path.join(tempfile.gettempdir(), 'saranwrap.log')], dead_callback) p = Process(sys.executable,
["-W", "ignore", __file__, '--child',
'--logfile', os.path.join(tempfile.gettempdir(), 'saranwrap.log')],
dead_callback)
else: else:
p = Process(sys.executable, ["-W", "ignore", __file__, '--child'], dead_callback) p = Process(sys.executable, ["-W", "ignore", __file__, '--child'], dead_callback)
prox = Proxy(ChildProcess(p, p)) prox = Proxy(ChildProcess(p, p))
@@ -53,9 +56,13 @@ def wrap_module(fqname, dead_callback = None):
pythonpath_sync() pythonpath_sync()
global _g_debug_mode global _g_debug_mode
if _g_debug_mode: if _g_debug_mode:
p = Process(sys.executable, ["-W", "ignore", __file__, '--module', fqname, '--logfile', os.path.join(tempfile.gettempdir(), 'saranwrap.log')], dead_callback) p = Process(sys.executable,
["-W", "ignore", __file__, '--module', fqname,
'--logfile', os.path.join(tempfile.gettempdir(), 'saranwrap.log')],
dead_callback)
else: else:
p = Process(sys.executable, ["-W", "ignore", __file__, '--module', fqname,], dead_callback) p = Process(sys.executable,
["-W", "ignore", __file__, '--module', fqname,], dead_callback)
prox = Proxy(ChildProcess(p,p)) prox = Proxy(ChildProcess(p,p))
return prox return prox
@@ -140,7 +147,8 @@ def _write_request(param, output):
def _is_local(attribute): def _is_local(attribute):
"Return ``True`` if the attribute should be handled locally" "Return ``True`` if the attribute should be handled locally"
# return attribute in ('_in', '_out', '_id', '__getattribute__', '__setattr__', '__dict__') # return attribute in ('_in', '_out', '_id', '__getattribute__',
# '__setattr__', '__dict__')
# good enough for now. :) # good enough for now. :)
if '__local_dict' in attribute: if '__local_dict' in attribute:
return True return True
@@ -266,7 +274,8 @@ class Proxy(object):
my_cp = self.__local_dict['_cp'] my_cp = self.__local_dict['_cp']
my_id = self.__local_dict['_id'] my_id = self.__local_dict['_id']
# Pass the set attribute across # Pass the set attribute across
request = Request('setattr', {'id':my_id, 'attribute':attribute, 'value':value}) request = Request('setattr',
{'id':my_id, 'attribute':attribute, 'value':value})
return my_cp.make_request(request, attribute=attribute) return my_cp.make_request(request, attribute=attribute)
class ObjectProxy(Proxy): class ObjectProxy(Proxy):
@@ -324,7 +333,8 @@ class ObjectProxy(Proxy):
return self.__str__() return self.__str__()
def __nonzero__(self): def __nonzero__(self):
# bool(obj) is another method that skips __getattribute__. There's no good way to just pass # bool(obj) is another method that skips __getattribute__.
# There's no good way to just pass
# the method on, so we use a special message. # the method on, so we use a special message.
my_cp = self.__local_dict['_cp'] my_cp = self.__local_dict['_cp']
my_id = self.__local_dict['_id'] my_id = self.__local_dict['_id']
@@ -395,7 +405,9 @@ class CallableProxy(object):
# having already checked if the method starts with '_' so we # having already checked if the method starts with '_' so we
# can safely pass this one to the remote object. # can safely pass this one to the remote object.
#_prnt("calling %s %s" % (self._object_id, self._name) #_prnt("calling %s %s" % (self._object_id, self._name)
request = Request('call', {'id':self._object_id, 'name':self._name, 'args':args, 'kwargs':kwargs}) request = Request('call', {'id':self._object_id,
'name':self._name,
'args':args, 'kwargs':kwargs})
return self._cp.make_request(request, attribute=self._name) return self._cp.make_request(request, attribute=self._name)
class Server(object): class Server(object):
@@ -444,14 +456,15 @@ class Server(object):
def handle_setitem(self, obj, req): def handle_setitem(self, obj, req):
obj[req['key']] = req['value'] obj[req['key']] = req['value']
return None # *TODO figure out what the actual return value of __setitem__ should be return None # *TODO figure out what the actual return value
# of __setitem__ should be
def handle_eq(self, obj, req): def handle_eq(self, obj, req):
#_log("__eq__ %s %s" % (obj, req)) #_log("__eq__ %s %s" % (obj, req))
rhs = None rhs = None
try: try:
rhs = self._objects[req['rhs']] rhs = self._objects[req['rhs']]
except KeyError, e: except KeyError:
return False return False
return (obj == rhs) return (obj == rhs)
@@ -565,7 +578,7 @@ class Server(object):
#_log("objects: %s" % self._objects) #_log("objects: %s" % self._objects)
s = Pickle.dumps(body) s = Pickle.dumps(body)
_log(`s`) _log(`s`)
str_ = _write_lp_hunk(self._out, s) _write_lp_hunk(self._out, s)
def write_exception(self, e): def write_exception(self, e):
"""Helper method to respond with an exception.""" """Helper method to respond with an exception."""
@@ -621,14 +634,16 @@ def named(name):
import_err_strings.append(err.__str__()) import_err_strings.append(err.__str__())
toimport = '.'.join(toimport.split('.')[:-1]) toimport = '.'.join(toimport.split('.')[:-1])
if obj is None: if obj is None:
raise ImportError('%s could not be imported. Import errors: %r' % (name, import_err_strings)) raise ImportError(
'%s could not be imported. Import errors: %r' % (name, import_err_strings))
for seg in name.split('.')[1:]: for seg in name.split('.')[1:]:
try: try:
obj = getattr(obj, seg) obj = getattr(obj, seg)
except AttributeError: except AttributeError:
dirobj = dir(obj) dirobj = dir(obj)
dirobj.sort() dirobj.sort()
raise AttributeError('attribute %r missing from %r (%r) %r. Import errors: %r' % ( raise AttributeError(
'attribute %r missing from %r (%r) %r. Import errors: %r' % (
seg, obj, dirobj, name, import_err_strings)) seg, obj, dirobj, name, import_err_strings))
return obj return obj

View File

@@ -17,5 +17,6 @@ except ImportError, e:
except ImportError: except ImportError:
try: try:
from support.stacklesss import greenlet, getcurrent, GreenletExit from support.stacklesss import greenlet, getcurrent, GreenletExit
(greenlet, getcurrent, GreenletExit) # silence pyflakes
except ImportError, e: except ImportError, e:
raise ImportError("Unable to find an implementation of greenlet.") raise ImportError("Unable to find an implementation of greenlet.")

View File

@@ -30,7 +30,7 @@ class FirstSwitch(object):
gr.t = t gr.t = t
tasklet_to_greenlet[t] = gr tasklet_to_greenlet[t] = gr
t.setup(*args, **kw) t.setup(*args, **kw)
result = t.run() t.run()
class greenlet(object): class greenlet(object):
@@ -75,10 +75,10 @@ def emulate():
module.getcurrent = getcurrent module.getcurrent = getcurrent
module.GreenletExit = GreenletExit module.GreenletExit = GreenletExit
caller = t = stackless.getcurrent() caller = stackless.getcurrent()
tasklet_to_greenlet[t] = None tasklet_to_greenlet[caller] = None
main_coro = greenlet() main_coro = greenlet()
tasklet_to_greenlet[t] = main_coro tasklet_to_greenlet[caller] = main_coro
main_coro.t = t main_coro.t = caller
del main_coro.switch ## It's already running del main_coro.switch ## It's already running
coro_args[main_coro] = None coro_args[main_coro] = None

View File

@@ -74,9 +74,13 @@ def tworker():
rv = meth(*args,**kwargs) rv = meth(*args,**kwargs)
except SYS_EXCS: except SYS_EXCS:
raise raise
except Exception,exn: except Exception:
rv = sys.exc_info() rv = sys.exc_info()
_rspq.put((e,rv)) _rspq.put((e,rv)) # @@tavis: not supposed to
# keep references to
# sys.exc_info() so it would
# be worthwhile testing
# if this leads to memory leaks
meth = args = kwargs = e = rv = None meth = args = kwargs = e = rv = None
_signal_t2e() _signal_t2e()

View File

@@ -4,10 +4,10 @@ You generally don't have to use it unless you need to call reactor.run()
yourself. yourself.
""" """
from eventlet.hubs.twistedr import BaseTwistedHub from eventlet.hubs.twistedr import BaseTwistedHub
from eventlet.api import use_hub, _threadlocal from eventlet import use_hub
from eventlet.support import greenlets as greenlet from eventlet.support import greenlets as greenlet
from eventlet.hubs import _threadlocal
use_hub(BaseTwistedHub) use_hub(BaseTwistedHub)
assert not hasattr(_threadlocal, 'hub') assert not hasattr(_threadlocal, 'hub')
hub = _threadlocal.hub = _threadlocal.Hub(greenlet.getcurrent()) hub = _threadlocal.hub = _threadlocal.Hub(greenlet.getcurrent())

View File

@@ -1,12 +1,11 @@
import os
import socket import socket
import errno
import warnings import warnings
from eventlet import greenio
def g_log(*args): def g_log(*args):
warnings.warn("eventlet.util.g_log is deprecated because we're pretty sure no one uses it. Send mail to eventletdev@lists.secondlife.com if you are actually using it.", warnings.warn("eventlet.util.g_log is deprecated because "
"we're pretty sure no one uses it. "
"Send mail to eventletdev@lists.secondlife.com "
"if you are actually using it.",
DeprecationWarning, stacklevel=2) DeprecationWarning, stacklevel=2)
import sys import sys
from eventlet.support import greenlets as greenlet from eventlet.support import greenlets as greenlet
@@ -49,7 +48,8 @@ except ImportError:
try: try:
from eventlet.green.OpenSSL import SSL from eventlet.green.OpenSSL import SSL
except ImportError: except ImportError:
raise ImportError("To use SSL with Eventlet, you must install PyOpenSSL or use Python 2.6 or later.") raise ImportError("To use SSL with Eventlet, "
"you must install PyOpenSSL or use Python 2.6 or later.")
context = SSL.Context(SSL.SSLv23_METHOD) context = SSL.Context(SSL.SSLv23_METHOD)
if certificate is not None: if certificate is not None:
context.use_certificate_file(certificate) context.use_certificate_file(certificate)
@@ -126,4 +126,3 @@ def set_reuse_addr(descriptor):
descriptor.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) | 1) descriptor.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) | 1)
except socket.error: except socket.error:
pass pass

View File

@@ -15,7 +15,8 @@ DEFAULT_MAX_SIMULTANEOUS_REQUESTS = 1024
DEFAULT_MAX_HTTP_VERSION = 'HTTP/1.1' DEFAULT_MAX_HTTP_VERSION = 'HTTP/1.1'
MAX_REQUEST_LINE = 8192 MAX_REQUEST_LINE = 8192
MINIMUM_CHUNK_SIZE = 4096 MINIMUM_CHUNK_SIZE = 4096
DEFAULT_LOG_FORMAT='%(client_ip)s - - [%(date_time)s] "%(request_line)s" %(status_code)s %(body_length)s %(wall_seconds).6f' DEFAULT_LOG_FORMAT= ('%(client_ip)s - - [%(date_time)s] "%(request_line)s"'
' %(status_code)s %(body_length)s %(wall_seconds).6f')
__all__ = ['server', 'format_date_time'] __all__ = ['server', 'format_date_time']
@@ -173,7 +174,8 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
self.raw_requestline = self.rfile.readline(MAX_REQUEST_LINE) self.raw_requestline = self.rfile.readline(MAX_REQUEST_LINE)
if len(self.raw_requestline) == MAX_REQUEST_LINE: if len(self.raw_requestline) == MAX_REQUEST_LINE:
self.wfile.write( self.wfile.write(
"HTTP/1.0 414 Request URI Too Long\r\nConnection: close\r\nContent-length: 0\r\n\r\n") "HTTP/1.0 414 Request URI Too Long\r\n"
"Connection: close\r\nContent-length: 0\r\n\r\n")
self.close_connection = 1 self.close_connection = 1
return return
except greenio.SSL.ZeroReturnError: except greenio.SSL.ZeroReturnError:
@@ -277,7 +279,8 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
_writelines(towrite) _writelines(towrite)
length[0] = length[0] + sum(map(len, towrite)) length[0] = length[0] + sum(map(len, towrite))
except UnicodeEncodeError: except UnicodeEncodeError:
print "Encountered unicode while attempting to write wsgi response: ", [x for x in towrite if isinstance(x, unicode)] print "Encountered unicode while attempting to write wsgi response: ", \
[x for x in towrite if isinstance(x, unicode)]
traceback.print_exc() traceback.print_exc()
_writelines( _writelines(
["HTTP/1.0 500 Internal Server Error\r\n", ["HTTP/1.0 500 Internal Server Error\r\n",
@@ -285,7 +288,8 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
"Content-type: text/plain\r\n", "Content-type: text/plain\r\n",
"Content-length: 98\r\n", "Content-length: 98\r\n",
"\r\n", "\r\n",
"Internal Server Error: wsgi application passed a unicode object to the server instead of a string."]) ("Internal Server Error: wsgi application passed "
"a unicode object to the server instead of a string.")])
def start_response(status, response_headers, exc_info=None): def start_response(status, response_headers, exc_info=None):
status_code[0] = status.split()[0] status_code[0] = status.split()[0]
@@ -298,7 +302,8 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
# Avoid dangling circular ref # Avoid dangling circular ref
exc_info = None exc_info = None
capitalized_headers = [('-'.join([x.capitalize() for x in key.split('-')]), value) capitalized_headers = [('-'.join([x.capitalize()
for x in key.split('-')]), value)
for key, value in response_headers] for key, value in response_headers]
headers_set[:] = [status, capitalized_headers] headers_set[:] = [status, capitalized_headers]
@@ -329,17 +334,19 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
write(''.join(towrite)) write(''.join(towrite))
if not headers_sent or (use_chunked[0] and just_written_size): if not headers_sent or (use_chunked[0] and just_written_size):
write('') write('')
except Exception, e: except Exception:
self.close_connection = 1 self.close_connection = 1
exc = traceback.format_exc() exc = traceback.format_exc()
print exc print exc
if not headers_set: if not headers_set:
start_response("500 Internal Server Error", [('Content-type', 'text/plain')]) start_response("500 Internal Server Error",
[('Content-type', 'text/plain')])
write(exc) write(exc)
finally: finally:
if hasattr(result, 'close'): if hasattr(result, 'close'):
result.close() result.close()
if self.environ['eventlet.input'].position < self.environ.get('CONTENT_LENGTH', 0): if (self.environ['eventlet.input'].position
< self.environ.get('CONTENT_LENGTH', 0)):
## Read and discard body if there was no pending 100-continue ## Read and discard body if there was no pending 100-continue
if not self.environ['eventlet.input'].wfile: if not self.environ['eventlet.input'].wfile:
while self.environ['eventlet.input'].read(MINIMUM_CHUNK_SIZE): while self.environ['eventlet.input'].read(MINIMUM_CHUNK_SIZE):
@@ -454,7 +461,6 @@ class Server(BaseHTTPServer.HTTPServer):
self.log_format = log_format self.log_format = log_format
def get_environ(self): def get_environ(self):
socket = self.socket
d = { d = {
'wsgi.errors': sys.stderr, 'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0), 'wsgi.version': (1, 0),
@@ -543,7 +549,8 @@ def server(sock, site,
if port == ':80': if port == ':80':
port = '' port = ''
serv.log.write("(%s) wsgi starting up on %s://%s%s/\n" % (os.getpid(), scheme, host, port)) serv.log.write("(%s) wsgi starting up on %s://%s%s/\n" % (
os.getpid(), scheme, host, port))
while True: while True:
try: try:
client_socket = sock.accept() client_socket = sock.accept()
@@ -572,4 +579,3 @@ def server(sock, site,
except socket.error, e: except socket.error, e:
if get_errno(e) not in BROKEN_SOCK: if get_errno(e) not in BROKEN_SOCK:
traceback.print_exc() traceback.print_exc()

View File

@@ -1,11 +1,14 @@
"Test cases for db_pool" "Test cases for db_pool"
import sys
import os
import traceback
from unittest import TestCase, main
from tests import skipped, skip_unless, skip_with_pyevent from tests import skipped, skip_unless, skip_with_pyevent
from unittest import TestCase, main
from eventlet import event from eventlet import event
from eventlet import db_pool from eventlet import db_pool
import eventlet import eventlet
import os
class DBTester(object): class DBTester(object):
__test__ = False # so that nose doesn't try to execute this directly __test__ = False # so that nose doesn't try to execute this directly
@@ -26,7 +29,7 @@ class DBTester(object):
self.connection.close() self.connection.close()
self.drop_db() self.drop_db()
def set_up_dummy_table(self, connection = None): def set_up_dummy_table(self, connection=None):
close_connection = False close_connection = False
if connection is None: if connection is None:
close_connection = True close_connection = True
@@ -84,7 +87,7 @@ class DBConnectionPool(DBTester):
self.assert_(False) self.assert_(False)
except AssertionError: except AssertionError:
raise raise
except Exception, e: except Exception:
pass pass
cursor.close() cursor.close()
@@ -144,7 +147,6 @@ class DBConnectionPool(DBTester):
curs.execute(SHORT_QUERY) curs.execute(SHORT_QUERY)
results.append(2) results.append(2)
evt.send() evt.send()
evt2 = event.Event()
eventlet.spawn(a_query) eventlet.spawn(a_query)
results.append(1) results.append(1)
self.assertEqual([1], results) self.assertEqual([1], results)
@@ -299,7 +301,10 @@ class DBConnectionPool(DBTester):
@skipped @skipped
def test_max_idle(self): def test_max_idle(self):
# This test is timing-sensitive. Rename the function without the "dont" to run it, but beware that it could fail or take a while. # This test is timing-sensitive. Rename the function without
# the "dont" to run it, but beware that it could fail or take
# a while.
self.pool = self.create_pool(max_size=2, max_idle=0.02) self.pool = self.create_pool(max_size=2, max_idle=0.02)
self.connection = self.pool.get() self.connection = self.pool.get()
self.connection.close() self.connection.close()
@@ -319,7 +324,10 @@ class DBConnectionPool(DBTester):
@skipped @skipped
def test_max_idle_many(self): def test_max_idle_many(self):
# This test is timing-sensitive. Rename the function without the "dont" to run it, but beware that it could fail or take a while. # This test is timing-sensitive. Rename the function without
# the "dont" to run it, but beware that it could fail or take
# a while.
self.pool = self.create_pool(max_size=2, max_idle=0.02) self.pool = self.create_pool(max_size=2, max_idle=0.02)
self.connection, conn2 = self.pool.get(), self.pool.get() self.connection, conn2 = self.pool.get(), self.pool.get()
self.connection.close() self.connection.close()
@@ -332,7 +340,10 @@ class DBConnectionPool(DBTester):
@skipped @skipped
def test_max_age(self): def test_max_age(self):
# This test is timing-sensitive. Rename the function without the "dont" to run it, but beware that it could fail or take a while. # This test is timing-sensitive. Rename the function without
# the "dont" to run it, but beware that it could fail or take
# a while.
self.pool = self.create_pool(max_size=2, max_age=0.05) self.pool = self.create_pool(max_size=2, max_age=0.05)
self.connection = self.pool.get() self.connection = self.pool.get()
self.connection.close() self.connection.close()
@@ -347,7 +358,10 @@ class DBConnectionPool(DBTester):
@skipped @skipped
def test_max_age_many(self): def test_max_age_many(self):
# This test is timing-sensitive. Rename the function without the "dont" to run it, but beware that it could fail or take a while. # This test is timing-sensitive. Rename the function without
# the "dont" to run it, but beware that it could fail or take
# a while.
self.pool = self.create_pool(max_size=2, max_age=0.15) self.pool = self.create_pool(max_size=2, max_age=0.15)
self.connection, conn2 = self.pool.get(), self.pool.get() self.connection, conn2 = self.pool.get(), self.pool.get()
self.connection.close() self.connection.close()
@@ -424,7 +438,8 @@ class RaisingDBModule(object):
class TpoolConnectionPool(DBConnectionPool): class TpoolConnectionPool(DBConnectionPool):
__test__ = False # so that nose doesn't try to execute this directly __test__ = False # so that nose doesn't try to execute this directly
def create_pool(self, max_size = 1, max_idle = 10, max_age = 10, connect_timeout=0.5, module=None): def create_pool(self, max_size=1, max_idle=10, max_age=10,
connect_timeout=0.5, module=None):
if module is None: if module is None:
module = self._dbmodule module = self._dbmodule
return db_pool.TpooledConnectionPool(module, return db_pool.TpooledConnectionPool(module,
@@ -447,7 +462,8 @@ class TpoolConnectionPool(DBConnectionPool):
class RawConnectionPool(DBConnectionPool): class RawConnectionPool(DBConnectionPool):
__test__ = False # so that nose doesn't try to execute this directly __test__ = False # so that nose doesn't try to execute this directly
def create_pool(self, max_size = 1, max_idle = 10, max_age = 10, connect_timeout= 0.5, module=None): def create_pool(self, max_size=1, max_idle=10, max_age=10,
connect_timeout=0.5, module=None):
if module is None: if module is None:
module = self._dbmodule module = self._dbmodule
return db_pool.RawConnectionPool(module, return db_pool.RawConnectionPool(module,
@@ -458,8 +474,9 @@ class RawConnectionPool(DBConnectionPool):
def get_auth(): def get_auth():
"""Looks in the local directory and in the user's home directory for a file named ".test_dbauth", """Looks in the local directory and in the user's home directory
which contains a json map of parameters to the connect function. for a file named ".test_dbauth", which contains a json map of
parameters to the connect function.
""" """
files = [os.path.join(os.path.dirname(__file__), '.test_dbauth'), files = [os.path.join(os.path.dirname(__file__), '.test_dbauth'),
os.path.join(os.path.expanduser('~'), '.test_dbauth')] os.path.join(os.path.expanduser('~'), '.test_dbauth')]
@@ -473,13 +490,14 @@ def get_auth():
return dict([(str(modname), dict([(str(k), str(v)) return dict([(str(modname), dict([(str(k), str(v))
for k, v in connectargs.items()])) for k, v in connectargs.items()]))
for modname, connectargs in auth_utf8.items()]) for modname, connectargs in auth_utf8.items()])
except (IOError, ImportError), e: except (IOError, ImportError):
pass pass
return {'MySQLdb':{'host': 'localhost','user': 'root','passwd': ''}, return {'MySQLdb':{'host': 'localhost','user': 'root','passwd': ''},
'psycopg2':{'user':'test'}} 'psycopg2':{'user':'test'}}
def mysql_requirement(_f): def mysql_requirement(_f):
verbose = os.environ.get('eventlet_test_mysql_verbose')
try: try:
import MySQLdb import MySQLdb
try: try:
@@ -487,12 +505,13 @@ def mysql_requirement(_f):
MySQLdb.connect(**auth) MySQLdb.connect(**auth)
return True return True
except MySQLdb.OperationalError: except MySQLdb.OperationalError:
print "Skipping mysql tests, error when connecting" if verbose:
import traceback print >> sys.stderr, ">> Skipping mysql tests, error when connecting:"
traceback.print_exc() traceback.print_exc()
return False return False
except ImportError: except ImportError:
print "Skipping mysql tests, MySQLdb not importable" if verbose:
print >> sys.stderr, ">> Skipping mysql tests, MySQLdb not importable"
return False return False
class MysqlConnectionPool(object): class MysqlConnectionPool(object):
@@ -600,7 +619,6 @@ class Psycopg2ConnectionPool(object):
def drop_db(self): def drop_db(self):
auth = self._auth.copy() auth = self._auth.copy()
dbname = auth.pop('database')
conn = self._dbmodule.connect(**auth) conn = self._dbmodule.connect(**auth)
conn.set_isolation_level(0) conn.set_isolation_level(0)
db = conn.cursor() db = conn.cursor()

View File

@@ -40,7 +40,7 @@ class TestGreenIo(LimitedTestCase):
self.assertEqual(e.args[0], 'timed out') self.assertEqual(e.args[0], 'timed out')
except socket.error, e: except socket.error, e:
# unreachable is also a valid outcome # unreachable is also a valid outcome
if e[0] != errno.EHOSTUNREACH: if not e[0] in (errno.EHOSTUNREACH, errno.ENETUNREACH):
raise raise
def test_accept_timeout(self): def test_accept_timeout(self):
@@ -62,7 +62,8 @@ class TestGreenIo(LimitedTestCase):
s.settimeout(0.1) s.settimeout(0.1)
gs = greenio.GreenSocket(s) gs = greenio.GreenSocket(s)
e = gs.connect_ex(('192.0.2.1', 80)) e = gs.connect_ex(('192.0.2.1', 80))
self.assertEquals(e, errno.EAGAIN) if not e in (errno.EHOSTUNREACH, errno.ENETUNREACH):
self.assertEquals(e, errno.EAGAIN)
def test_recv_timeout(self): def test_recv_timeout(self):
listener = greenio.GreenSocket(socket.socket()) listener = greenio.GreenSocket(socket.socket())