Python 3 compat: Fix all Travis test failures

This patch consists of the following changes:

* Splitting eventlet.greenio into base, py2 and py3 parts
  (eventlet.greenio should be exporing the same public objects). This
  change is motivated by the size and the number of conditions present
  in the current greenio code
* Connected to the first point: implementing almost completely new
  GreenPipe callable utilizing parts of old GreenPipe code but dropping
  _fileobject/SocketIO inheritance in favour of io.FileIO and making use
  of patched _pyio.open function which wraps raw file-like object in
  various readers and writers (they take care of the buffering,
  encoding/decoding etc.)
* Implementing (from scratch or updating existing versions)
  green versions of the following modules:

  * http.* (needed by Python 3's urllib)
  * selectors (Python >= 3.4, used in subprocess module)
  * urllib.* (needed by various tests and we were already exposing green
    urllib)

* Modifying some tests to make tests pass, which includes:

  * unicode/bytestring issues
  * modifying wsgi_test_conntimeout.py to not pass bufsize and close
    arguments to ExplodingSocketFile - on Python 3 it inherits from
    SocketIO, which doesn't deal with buffering at all as far as I can
    see

* Random cleaning up and reorganizing
* Requiring Python 3.x tests to pass for the whole build to pass

Known issues:

* code repetition
* naming inconsistencies
* possibly breaking some external code using private eventlet.greenio attributes

Closes https://github.com/eventlet/eventlet/issues/108

Affects https://github.com/eventlet/eventlet/issues/6 (I'd call it an
experimental support)

Should help for https://github.com/eventlet/eventlet/issues/145
Should help for https://github.com/eventlet/eventlet/issues/157
This commit is contained in:
Jakub Stasiak 2015-02-12 00:30:28 +01:00
parent 4e1804af31
commit 449c90a509
24 changed files with 662 additions and 320 deletions

View File

@ -20,12 +20,6 @@ env:
matrix: matrix:
fast_finish: true fast_finish: true
allow_failures: allow_failures:
- env: TOX_ENV=py33-selects
- env: TOX_ENV=py33-poll
- env: TOX_ENV=py33-epolls
- env: TOX_ENV=py34-selects
- env: TOX_ENV=py34-poll
- env: TOX_ENV=py34-epolls
- env: TOX_ENV=pypy-selects - env: TOX_ENV=pypy-selects
- env: TOX_ENV=pypy-poll - env: TOX_ENV=pypy-poll
- env: TOX_ENV=pypy-epolls - env: TOX_ENV=pypy-epolls

View File

@ -0,0 +1,2 @@
from eventlet.support import six
assert six.PY3, 'This is a Python 3 module'

View File

@ -0,0 +1,9 @@
from eventlet import patcher
from eventlet.green import os, socket
from eventlet.green.urllib import parse as urllib_parse
patcher.inject('http.client', globals(),
('os', os), ('socket', socket), ('urllib.parse', urllib_parse))
del patcher
del urllib_parse

View File

@ -0,0 +1,13 @@
from eventlet.green import threading, time
from eventlet.green.http import client
from eventlet.green.urllib import parse as urllib_parse, request as urllib_request
from eventlet import patcher
patcher.inject('http.cookiejar', globals(),
('http.client', client), ('threading', threading),
('urllib.parse', urllib_parse), ('urllib.request', urllib_request),
('time', time))
del urllib_request
del urllib_parse
del patcher

View File

@ -0,0 +1,7 @@
from eventlet import patcher
from eventlet.green import time
patcher.inject('http.cookies', globals())
_getdate = patcher.patch_function(_getdate, ('time', time))
del patcher

View File

@ -0,0 +1,17 @@
from eventlet import patcher
from eventlet.green import os, time, select, socket, SocketServer, subprocess
from eventlet.green.http import client
from eventlet.green.urllib import parse as urllib_parse
patcher.inject('http.server', globals(),
('http.client', client), ('os', os), ('select', select),
('socket', socket), ('socketserver', SocketServer), ('time', time),
('urllib.parse', urllib_parse))
CGIHTTPRequestHandler.run_cgi = patcher.patch_function(
CGIHTTPRequestHandler.run_cgi, ('subprocess', subprocess))
del urllib_parse
del client
del patcher

View File

@ -0,0 +1,11 @@
import sys
from eventlet import patcher
from eventlet.green import select
patcher.inject('selectors', globals(), ('select', select))
del patcher
if sys.platform != 'win32':
SelectSelector._select = staticmethod(select.select)

View File

@ -1,15 +1,21 @@
import errno import errno
import time import sys
from types import FunctionType from types import FunctionType
import eventlet import eventlet
from eventlet import greenio from eventlet import greenio
from eventlet import patcher from eventlet import patcher
from eventlet.green import select from eventlet.green import select, threading, time
from eventlet.support import six from eventlet.support import six
patcher.inject('subprocess', globals(), ('select', select)) to_patch = [('select', select), ('threading', threading), ('time', time)]
if sys.version_info > (3, 4):
from eventlet.green import selectors
to_patch.append(('selectors', selectors))
patcher.inject('subprocess', globals(), *to_patch)
subprocess_orig = __import__("subprocess") subprocess_orig = __import__("subprocess")

View File

@ -1,38 +0,0 @@
from eventlet import patcher
from eventlet.green import socket
from eventlet.green import time
from eventlet.green import httplib
from eventlet.green import ftplib
to_patch = [('socket', socket), ('httplib', httplib),
('time', time), ('ftplib', ftplib)]
try:
from eventlet.green import ssl
to_patch.append(('ssl', ssl))
except ImportError:
pass
patcher.inject('urllib', globals(), *to_patch)
try:
URLopener
except NameError:
patcher.inject('urllib.request', globals(), *to_patch)
# patch a bunch of things that have imports inside the
# function body; this is lame and hacky but I don't feel
# too bad because urllib is a hacky pile of junk that no
# one should be using anyhow
URLopener.open_http = patcher.patch_function(URLopener.open_http, ('httplib', httplib))
if hasattr(URLopener, 'open_https'):
URLopener.open_https = patcher.patch_function(URLopener.open_https, ('httplib', httplib))
URLopener.open_ftp = patcher.patch_function(URLopener.open_ftp, ('ftplib', ftplib))
ftpwrapper.init = patcher.patch_function(ftpwrapper.init, ('ftplib', ftplib))
ftpwrapper.retrfile = patcher.patch_function(ftpwrapper.retrfile, ('ftplib', ftplib))
del patcher
# Run test program when run as a script
if __name__ == '__main__':
main()

View File

@ -0,0 +1,40 @@
from eventlet import patcher
from eventlet.green import socket
from eventlet.green import time
from eventlet.green import httplib
from eventlet.green import ftplib
from eventlet.support import six
if six.PY2:
to_patch = [('socket', socket), ('httplib', httplib),
('time', time), ('ftplib', ftplib)]
try:
from eventlet.green import ssl
to_patch.append(('ssl', ssl))
except ImportError:
pass
patcher.inject('urllib', globals(), *to_patch)
try:
URLopener
except NameError:
patcher.inject('urllib.request', globals(), *to_patch)
# patch a bunch of things that have imports inside the
# function body; this is lame and hacky but I don't feel
# too bad because urllib is a hacky pile of junk that no
# one should be using anyhow
URLopener.open_http = patcher.patch_function(URLopener.open_http, ('httplib', httplib))
if hasattr(URLopener, 'open_https'):
URLopener.open_https = patcher.patch_function(URLopener.open_https, ('httplib', httplib))
URLopener.open_ftp = patcher.patch_function(URLopener.open_ftp, ('ftplib', ftplib))
ftpwrapper.init = patcher.patch_function(ftpwrapper.init, ('ftplib', ftplib))
ftpwrapper.retrfile = patcher.patch_function(ftpwrapper.retrfile, ('ftplib', ftplib))
del patcher
# Run test program when run as a script
if __name__ == '__main__':
main()

View File

@ -0,0 +1,4 @@
from eventlet import patcher
from eventlet.green.urllib import response
patcher.inject('urllib.error', globals(), ('urllib.response', response))
del patcher

View File

@ -0,0 +1,3 @@
from eventlet import patcher
patcher.inject('urllib.parse', globals())
del patcher

View File

@ -0,0 +1,44 @@
from eventlet import patcher
from eventlet.green import ftplib, os, socket, time
from eventlet.green.http import client as http_client
from eventlet.green.urllib import error, parse, response
# TODO should we also have green email version?
# import email
to_patch = [
('http.client', http_client),
('os', os),
('socket', socket),
('time', time),
('urllib.error', error),
('urllib.parse', parse),
('urllib.response', response),
]
try:
from eventlet.green import ssl
except ImportError:
pass
else:
to_patch.append(('ssl', ssl))
patcher.inject('urllib.request', globals(), *to_patch)
del to_patch
to_patch_in_functions = [('ftplib', ftplib)]
del ftplib
FTPHandler.ftp_open = patcher.patch_function(FTPHandler.ftp_open, *to_patch_in_functions)
URLopener.open_ftp = patcher.patch_function(URLopener.open_ftp, *to_patch_in_functions)
ftperrors = patcher.patch_function(ftperrors, *to_patch_in_functions)
ftpwrapper.init = patcher.patch_function(ftpwrapper.init, *to_patch_in_functions)
ftpwrapper.retrfile = patcher.patch_function(ftpwrapper.retrfile, *to_patch_in_functions)
del error
del parse
del response
del to_patch_in_functions

View File

@ -0,0 +1,3 @@
from eventlet import patcher
patcher.inject('urllib.response', globals())
del patcher

View File

@ -0,0 +1,8 @@
from eventlet.support import six
from eventlet.greenio.base import * # noqa
if six.PY2:
from eventlet.greenio.py2 import * # noqa
else:
from eventlet.greenio.py3 import * # noqa

View File

@ -7,9 +7,13 @@ import time
import warnings import warnings
from eventlet.support import get_errno, six from eventlet.support import get_errno, six
from eventlet.hubs import trampoline, notify_close, notify_opened, IOClosed from eventlet.hubs import trampoline, notify_opened, IOClosed
__all__ = ['GreenSocket', 'GreenPipe', 'shutdown_safe'] __all__ = [
'GreenSocket', '_GLOBAL_DEFAULT_TIMEOUT', 'set_nonblocking',
'SOCKET_CLOSED', 'CONNECT_ERR', 'CONNECT_SUCCESS',
'shutdown_safe', 'SSL',
]
BUFFER_SIZE = 4096 BUFFER_SIZE = 4096
CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK)) CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK))
@ -17,11 +21,8 @@ CONNECT_SUCCESS = set((0, errno.EISCONN))
if sys.platform[:3] == "win": if sys.platform[:3] == "win":
CONNECT_ERR.add(errno.WSAEINVAL) # Bug 67 CONNECT_ERR.add(errno.WSAEINVAL) # Bug 67
if six.PY3: if six.PY2:
from io import IOBase as file _python2_fileobject = socket._fileobject
_fileobject = socket.SocketIO
elif six.PY2:
_fileobject = socket._fileobject
def socket_connect(descriptor, address): def socket_connect(descriptor, address):
@ -293,7 +294,7 @@ class GreenSocket(object):
else: else:
def makefile(self, *args, **kwargs): def makefile(self, *args, **kwargs):
dupped = self.dup() dupped = self.dup()
res = _fileobject(dupped, *args, **kwargs) res = _python2_fileobject(dupped, *args, **kwargs)
if hasattr(dupped, "_drop"): if hasattr(dupped, "_drop"):
dupped._drop() dupped._drop()
return res return res
@ -418,119 +419,11 @@ class GreenSocket(object):
getattr(self.fd, '_sock', self.fd)._drop() getattr(self.fd, '_sock', self.fd)._drop()
class _SocketDuckForFd(object): def _operation_on_closed_file(*args, **kwargs):
"""Class implementing all socket method used by _fileobject
in cooperative manner using low level os I/O calls.
"""
_refcount = 0
def __init__(self, fileno):
self._fileno = fileno
notify_opened(fileno)
self._closed = False
def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None):
if self._closed:
# Don't trampoline if we're already closed.
raise IOClosed()
try:
return trampoline(fd, read=read, write=write, timeout=timeout,
timeout_exc=timeout_exc,
mark_as_closed=self._mark_as_closed)
except IOClosed:
# Our fileno has been obsoleted. Defang ourselves to
# prevent spurious closes.
self._mark_as_closed()
raise
def _mark_as_closed(self):
self._closed = True
@property
def _sock(self):
return self
def fileno(self):
return self._fileno
def recv(self, buflen):
while True:
try:
data = os.read(self._fileno, buflen)
return data
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
self._trampoline(self, read=True)
def recv_into(self, buf, nbytes=0, flags=0):
if nbytes == 0:
nbytes = len(buf)
data = self.recv(nbytes)
buf[:nbytes] = data
return len(data)
def send(self, data):
while True:
try:
return os.write(self._fileno, data)
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
else:
trampoline(self, write=True)
def sendall(self, data):
len_data = len(data)
os_write = os.write
fileno = self._fileno
try:
total_sent = os_write(fileno, data)
except OSError as e:
if get_errno(e) != errno.EAGAIN:
raise IOError(*e.args)
total_sent = 0
while total_sent < len_data:
self._trampoline(self, write=True)
try:
total_sent += os_write(fileno, data[total_sent:])
except OSError as e:
if get_errno(e) != errno. EAGAIN:
raise IOError(*e.args)
def __del__(self):
self._close()
def _close(self):
notify_close(self._fileno)
self._mark_as_closed()
try:
os.close(self._fileno)
except:
# os.close may fail if __init__ didn't complete
# (i.e file dscriptor passed to popen was invalid
pass
def __repr__(self):
return "%s:%d" % (self.__class__.__name__, self._fileno)
def _reuse(self):
self._refcount += 1
def _drop(self):
self._refcount -= 1
if self._refcount == 0:
self._close()
# Python3
_decref_socketios = _drop
def _operationOnClosedFile(*args, **kwargs):
raise ValueError("I/O operation on closed file") raise ValueError("I/O operation on closed file")
class GreenPipe(_fileobject): greenpipe_doc = """
"""
GreenPipe is a cooperative replacement for file class. GreenPipe is a cooperative replacement for file class.
It will cooperate on pipes. It will block on regular file. It will cooperate on pipes. It will block on regular file.
Differneces from file class: Differneces from file class:
@ -542,115 +435,6 @@ class GreenPipe(_fileobject):
- file argument can be descriptor, file name or file object. - file argument can be descriptor, file name or file object.
""" """
def __init__(self, f, mode='r', bufsize=-1):
if not isinstance(f, six.string_types + (int, file)):
raise TypeError('f(ile) should be int, str, unicode or file, not %r' % f)
if isinstance(f, six.string_types):
f = open(f, mode, 0)
if isinstance(f, int):
fileno = f
self._name = "<fd:%d>" % fileno
else:
fileno = os.dup(f.fileno())
self._name = f.name
if not (
f.mode == mode or
six.PY3 and f.mode == 'rb+' and mode == 'wb+'
):
raise ValueError('file.mode %r does not match mode parameter %r' % (f.mode, mode))
self._name = f.name
f.close()
if six.PY3:
mode = mode.rstrip('+')
super(GreenPipe, self).__init__(_SocketDuckForFd(fileno), mode)
set_nonblocking(self)
self.softspace = 0
if six.PY3:
def write(self, data):
while data:
sent = _fileobject.write(self, data)
data = data[sent:]
@property
def name(self):
return self._name
def __repr__(self):
return "<%s %s %r, mode %r at 0x%x>" % (
self.closed and 'closed' or 'open',
self.__class__.__name__,
self.name,
self.mode,
(id(self) < 0) and (sys.maxint + id(self)) or id(self))
def close(self):
super(GreenPipe, self).close()
for method in [
'fileno', 'flush', 'isatty', 'next', 'read', 'readinto',
'readline', 'readlines', 'seek', 'tell', 'truncate',
'write', 'xreadlines', '__iter__', '__next__', 'writelines']:
setattr(self, method, _operationOnClosedFile)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def _get_readahead_len(self):
return len(self._rbuf.getvalue())
def _clear_readahead_buf(self):
len = self._get_readahead_len()
if len > 0:
self.read(len)
def tell(self):
self.flush()
try:
return os.lseek(self.fileno(), 0, 1) - self._get_readahead_len()
except OSError as e:
raise IOError(*e.args)
def seek(self, offset, whence=0):
self.flush()
if whence == 1 and offset == 0: # tell synonym
return self.tell()
if whence == 1: # adjust offset by what is read ahead
offset -= self._get_readahead_len()
try:
rv = os.lseek(self.fileno(), offset, whence)
except OSError as e:
raise IOError(*e.args)
else:
self._clear_readahead_buf()
return rv
if getattr(file, "truncate", None): # not all OSes implement truncate
def truncate(self, size=-1):
self.flush()
if size == -1:
size = self.tell()
try:
rv = os.ftruncate(self.fileno(), size)
except OSError as e:
raise IOError(*e.args)
else:
self.seek(size) # move position&clear buffer
return rv
def isatty(self):
try:
return os.isatty(self.fileno())
except OSError as e:
raise IOError(*e.args)
# import SSL module here so we can refer to greenio.SSL.exceptionclass # import SSL module here so we can refer to greenio.SSL.exceptionclass
try: try:
from OpenSSL import SSL from OpenSSL import SSL

222
eventlet/greenio/py2.py Normal file
View File

@ -0,0 +1,222 @@
import errno
import os
from eventlet.greenio.base import (
_operation_on_closed_file,
greenpipe_doc,
set_nonblocking,
socket,
SOCKET_BLOCKING,
)
from eventlet.hubs import trampoline, notify_close, notify_opened, IOClosed
from eventlet.support import get_errno, six
__all__ = ['_fileobject', 'GreenPipe']
_fileobject = socket._fileobject
class GreenPipe(_fileobject):
__doc__ = greenpipe_doc
def __init__(self, f, mode='r', bufsize=-1):
if not isinstance(f, six.string_types + (int, file)):
raise TypeError('f(ile) should be int, str, unicode or file, not %r' % f)
if isinstance(f, six.string_types):
f = open(f, mode, 0)
if isinstance(f, int):
fileno = f
self._name = "<fd:%d>" % fileno
else:
fileno = os.dup(f.fileno())
self._name = f.name
if f.mode != mode:
raise ValueError('file.mode %r does not match mode parameter %r' % (f.mode, mode))
self._name = f.name
f.close()
super(GreenPipe, self).__init__(_SocketDuckForFd(fileno), mode)
set_nonblocking(self)
self.softspace = 0
@property
def name(self):
return self._name
def __repr__(self):
return "<%s %s %r, mode %r at 0x%x>" % (
self.closed and 'closed' or 'open',
self.__class__.__name__,
self.name,
self.mode,
(id(self) < 0) and (sys.maxint + id(self)) or id(self))
def close(self):
super(GreenPipe, self).close()
for method in [
'fileno', 'flush', 'isatty', 'next', 'read', 'readinto',
'readline', 'readlines', 'seek', 'tell', 'truncate',
'write', 'xreadlines', '__iter__', '__next__', 'writelines']:
setattr(self, method, _operation_on_closed_file)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def _get_readahead_len(self):
return len(self._rbuf.getvalue())
def _clear_readahead_buf(self):
len = self._get_readahead_len()
if len > 0:
self.read(len)
def tell(self):
self.flush()
try:
return os.lseek(self.fileno(), 0, 1) - self._get_readahead_len()
except OSError as e:
raise IOError(*e.args)
def seek(self, offset, whence=0):
self.flush()
if whence == 1 and offset == 0: # tell synonym
return self.tell()
if whence == 1: # adjust offset by what is read ahead
offset -= self._get_readahead_len()
try:
rv = os.lseek(self.fileno(), offset, whence)
except OSError as e:
raise IOError(*e.args)
else:
self._clear_readahead_buf()
return rv
if getattr(file, "truncate", None): # not all OSes implement truncate
def truncate(self, size=-1):
self.flush()
if size == -1:
size = self.tell()
try:
rv = os.ftruncate(self.fileno(), size)
except OSError as e:
raise IOError(*e.args)
else:
self.seek(size) # move position&clear buffer
return rv
def isatty(self):
try:
return os.isatty(self.fileno())
except OSError as e:
raise IOError(*e.args)
class _SocketDuckForFd(object):
"""Class implementing all socket method used by _fileobject
in cooperative manner using low level os I/O calls.
"""
_refcount = 0
def __init__(self, fileno):
self._fileno = fileno
notify_opened(fileno)
self._closed = False
def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None):
if self._closed:
# Don't trampoline if we're already closed.
raise IOClosed()
try:
return trampoline(fd, read=read, write=write, timeout=timeout,
timeout_exc=timeout_exc,
mark_as_closed=self._mark_as_closed)
except IOClosed:
# Our fileno has been obsoleted. Defang ourselves to
# prevent spurious closes.
self._mark_as_closed()
raise
def _mark_as_closed(self):
self._closed = True
@property
def _sock(self):
return self
def fileno(self):
return self._fileno
def recv(self, buflen):
while True:
try:
data = os.read(self._fileno, buflen)
return data
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
self._trampoline(self, read=True)
def recv_into(self, buf, nbytes=0, flags=0):
if nbytes == 0:
nbytes = len(buf)
data = self.recv(nbytes)
buf[:nbytes] = data
return len(data)
def send(self, data):
while True:
try:
return os.write(self._fileno, data)
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
else:
trampoline(self, write=True)
def sendall(self, data):
len_data = len(data)
os_write = os.write
fileno = self._fileno
try:
total_sent = os_write(fileno, data)
except OSError as e:
if get_errno(e) != errno.EAGAIN:
raise IOError(*e.args)
total_sent = 0
while total_sent < len_data:
self._trampoline(self, write=True)
try:
total_sent += os_write(fileno, data[total_sent:])
except OSError as e:
if get_errno(e) != errno. EAGAIN:
raise IOError(*e.args)
def __del__(self):
self._close()
def _close(self):
notify_close(self._fileno)
self._mark_as_closed()
try:
os.close(self._fileno)
except:
# os.close may fail if __init__ didn't complete
# (i.e file dscriptor passed to popen was invalid
pass
def __repr__(self):
return "%s:%d" % (self.__class__.__name__, self._fileno)
def _reuse(self):
self._refcount += 1
def _drop(self):
self._refcount -= 1
if self._refcount == 0:
self._close()

191
eventlet/greenio/py3.py Normal file
View File

@ -0,0 +1,191 @@
import _pyio as _original_pyio
import errno
import os as _original_os
import socket as _original_socket
from io import (
BufferedRandom as _OriginalBufferedRandom,
BufferedReader as _OriginalBufferedReader,
BufferedWriter as _OriginalBufferedWriter,
DEFAULT_BUFFER_SIZE,
TextIOWrapper as _OriginalTextIOWrapper,
IOBase as _OriginalIOBase,
)
from types import FunctionType
from eventlet.greenio.base import (
_operation_on_closed_file,
greenpipe_doc,
set_nonblocking,
SOCKET_BLOCKING,
)
from eventlet.hubs import notify_close, notify_opened, IOClosed, trampoline
from eventlet.support import get_errno, six
__all__ = ['_fileobject', 'GreenPipe']
# TODO get rid of this, it only seems like the original _fileobject
_fileobject = _original_socket.SocketIO
# Large part of the following code is copied from the original
# eventlet.greenio module
class GreenFileIO(_OriginalIOBase):
def __init__(self, name, mode='r', closefd=True, opener=None):
if isinstance(name, int):
fileno = name
self._name = "<fd:%d>" % fileno
else:
assert isinstance(name, six.string_types)
with open(name, mode) as fd:
self._name = fd.name
fileno = _original_os.dup(fd.fileno())
notify_opened(fileno)
self._fileno = fileno
self._mode = mode
self._closed = False
set_nonblocking(self)
self._seekable = None
@property
def closed(self):
return self._closed
def seekable(self):
if self._seekable is None:
try:
_original_os.lseek(self._fileno, 0, _original_os.SEEK_CUR)
except IOError as e:
if get_errno(e) == errno.ESPIPE:
self._seekable = False
else:
raise
else:
self._seekable = True
return self._seekable
def readable(self):
return 'r' in self._mode or '+' in self._mode
def writable(self):
return 'w' in self._mode or '+' in self._mode
def fileno(self):
return self._fileno
def read(self, buflen):
while True:
try:
return _original_os.read(self._fileno, buflen)
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
self._trampoline(self, read=True)
def readinto(self, b):
up_to = len(b)
data = self.read(up_to)
bytes_read = len(data)
b[:bytes_read] = data
return bytes_read
def isatty(self):
try:
return _original_os.isatty(self.fileno())
except OSError as e:
raise IOError(*e.args)
def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None):
if self._closed:
# Don't trampoline if we're already closed.
raise IOClosed()
try:
return trampoline(fd, read=read, write=write, timeout=timeout,
timeout_exc=timeout_exc,
mark_as_closed=self._mark_as_closed)
except IOClosed:
# Our fileno has been obsoleted. Defang ourselves to
# prevent spurious closes.
self._mark_as_closed()
raise
def _mark_as_closed(self):
""" Mark this socket as being closed """
self._closed = True
def write(self, data):
while True:
try:
return _original_os.write(self._fileno, data)
except OSError as e:
if get_errno(e) not in SOCKET_BLOCKING:
raise IOError(*e.args)
else:
trampoline(self, write=True)
def close(self):
_original_os.close(self._fileno)
notify_close(self._fileno)
self._closed = True
for method in [
'fileno', 'flush', 'isatty', 'next', 'read', 'readinto',
'readline', 'readlines', 'seek', 'tell', 'truncate',
'write', 'xreadlines', '__iter__', '__next__', 'writelines']:
setattr(self, method, _operation_on_closed_file)
def truncate(self, size=-1):
if size == -1:
size = self.tell()
try:
rv = _original_os.ftruncate(self._fileno, size)
except OSError as e:
raise IOError(*e.args)
else:
self.seek(size) # move position&clear buffer
return rv
def seek(self, offset, whence=_original_os.SEEK_SET):
try:
return _original_os.lseek(self._fileno, offset, whence)
except OSError as e:
raise IOError(*e.args)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
_open_environment = dict(globals())
_open_environment.update(dict(
BufferedRandom=_OriginalBufferedRandom,
BufferedWriter=_OriginalBufferedWriter,
BufferedReader=_OriginalBufferedReader,
TextIOWrapper=_OriginalTextIOWrapper,
FileIO=GreenFileIO,
os=_original_os,
))
_open = FunctionType(
six.get_function_code(_original_pyio.open),
_open_environment,
)
def GreenPipe(name, mode="r", buffering=-1, encoding=None, errors=None,
newline=None, closefd=True, opener=None):
try:
fileno = name.fileno()
except AttributeError:
pass
else:
fileno = _original_os.dup(fileno)
name.close()
name = fileno
return _open(name, mode, buffering, encoding, errors, newline, closefd, opener)
GreenPipe.__doc__ = greenpipe_doc

View File

@ -8,12 +8,13 @@ import warnings
from eventlet.green import BaseHTTPServer from eventlet.green import BaseHTTPServer
from eventlet.green import socket from eventlet.green import socket
from eventlet.green import urllib
from eventlet import greenio from eventlet import greenio
from eventlet import greenpool from eventlet import greenpool
from eventlet import support from eventlet import support
from eventlet.support import six from eventlet.support import six
from eventlet.support.six.moves import urllib
DEFAULT_MAX_SIMULTANEOUS_REQUESTS = 1024 DEFAULT_MAX_SIMULTANEOUS_REQUESTS = 1024
DEFAULT_MAX_HTTP_VERSION = 'HTTP/1.1' DEFAULT_MAX_HTTP_VERSION = 'HTTP/1.1'
@ -395,24 +396,8 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
towrite.append(six.b("%x" % (len(data),)) + b"\r\n" + data + b"\r\n") towrite.append(six.b("%x" % (len(data),)) + b"\r\n" + data + b"\r\n")
else: else:
towrite.append(data) towrite.append(data)
try: _writelines(towrite)
_writelines(towrite) length[0] = length[0] + sum(map(len, towrite))
length[0] = length[0] + sum(map(len, towrite))
except UnicodeEncodeError:
self.server.log_message(
"Encountered non-ascii unicode while attempting to write"
"wsgi response: %r" %
[x for x in towrite if isinstance(x, six.text_type)])
self.server.log_message(traceback.format_exc())
_writelines(
["HTTP/1.1 500 Internal Server Error\r\n",
"Connection: close\r\n",
"Content-type: text/plain\r\n",
"Content-length: 98\r\n",
"Date: %s\r\n" % format_date_time(time.time()),
"\r\n",
("Internal Server Error: wsgi application passed "
"a unicode object to the server instead of a string.")])
def start_response(status, response_headers, exc_info=None): def start_response(status, response_headers, exc_info=None):
status_code[0] = status.split()[0] status_code[0] = status.split()[0]
@ -456,6 +441,9 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
minimum_write_chunk_size = int(self.environ.get( minimum_write_chunk_size = int(self.environ.get(
'eventlet.minimum_write_chunk_size', self.minimum_chunk_size)) 'eventlet.minimum_write_chunk_size', self.minimum_chunk_size))
for data in result: for data in result:
if isinstance(data, six.text_type):
data = data.encode('ascii')
towrite.append(data) towrite.append(data)
towrite_size += len(data) towrite_size += len(data)
if towrite_size >= minimum_write_chunk_size: if towrite_size >= minimum_write_chunk_size:
@ -472,7 +460,7 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
self.close_connection = 1 self.close_connection = 1
tb = traceback.format_exc() tb = traceback.format_exc()
self.server.log_message(tb) self.server.log_message(tb)
if not headers_set: if not headers_sent:
err_body = six.b(tb) if self.server.debug else b'' err_body = six.b(tb) if self.server.debug else b''
start_response("500 Internal Server Error", start_response("500 Internal Server Error",
[('Content-type', 'text/plain'), [('Content-type', 'text/plain'),
@ -522,7 +510,7 @@ class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler):
pq = self.path.split('?', 1) pq = self.path.split('?', 1)
env['RAW_PATH_INFO'] = pq[0] env['RAW_PATH_INFO'] = pq[0]
env['PATH_INFO'] = urllib.unquote(pq[0]) env['PATH_INFO'] = urllib.parse.unquote(pq[0])
if len(pq) > 1: if len(pq) > 1:
env['QUERY_STRING'] = pq[1] env['QUERY_STRING'] = pq[1]

View File

@ -667,8 +667,8 @@ class TestGreenPipe(LimitedTestCase):
def test_pipe(self): def test_pipe(self):
r, w = os.pipe() r, w = os.pipe()
rf = greenio.GreenPipe(r, 'r') rf = greenio.GreenPipe(r, 'rb')
wf = greenio.GreenPipe(w, 'w', 0) wf = greenio.GreenPipe(w, 'wb', 0)
def sender(f, content): def sender(f, content):
for ch in map(six.int2byte, six.iterbytes(content)): for ch in map(six.int2byte, six.iterbytes(content)):
@ -690,8 +690,8 @@ class TestGreenPipe(LimitedTestCase):
# also ensures that readline() terminates on '\n' and '\r\n' # also ensures that readline() terminates on '\n' and '\r\n'
r, w = os.pipe() r, w = os.pipe()
r = greenio.GreenPipe(r) r = greenio.GreenPipe(r, 'rb')
w = greenio.GreenPipe(w, 'w') w = greenio.GreenPipe(w, 'wb')
def writer(): def writer():
eventlet.sleep(.1) eventlet.sleep(.1)
@ -717,8 +717,8 @@ class TestGreenPipe(LimitedTestCase):
def test_pipe_writes_large_messages(self): def test_pipe_writes_large_messages(self):
r, w = os.pipe() r, w = os.pipe()
r = greenio.GreenPipe(r) r = greenio.GreenPipe(r, 'rb')
w = greenio.GreenPipe(w, 'w') w = greenio.GreenPipe(w, 'wb')
large_message = b"".join([1024 * six.int2byte(i) for i in range(65)]) large_message = b"".join([1024 * six.int2byte(i) for i in range(65)])

View File

@ -9,10 +9,7 @@ from tests import LimitedTestCase, main, run_python, skip_with_pyevent
base_module_contents = """ base_module_contents = """
import socket import socket
try: import urllib
import urllib.request as urllib
except ImportError:
import urllib
print("base {0} {1}".format(socket, urllib)) print("base {0} {1}".format(socket, urllib))
""" """
@ -86,7 +83,14 @@ class ImportPatched(ProcessBase):
assert 'eventlet.green.httplib' not in lines[2], repr(output) assert 'eventlet.green.httplib' not in lines[2], repr(output)
def test_import_patched_defaults(self): def test_import_patched_defaults(self):
self.write_to_tempfile("base", base_module_contents) self.write_to_tempfile("base", """
import socket
try:
import urllib.request as urllib
except ImportError:
import urllib
print("base {0} {1}".format(socket, urllib))""")
new_mod = """ new_mod = """
from eventlet import patcher from eventlet import patcher
base = patcher.import_patched('base') base = patcher.import_patched('base')

View File

@ -4,8 +4,15 @@ If either operation blocked the whole script would block and timeout.
""" """
import unittest import unittest
from eventlet.green import urllib2, BaseHTTPServer from eventlet.green import BaseHTTPServer
from eventlet import spawn, kill from eventlet import spawn, kill
from eventlet.support import six
if six.PY2:
from eventlet.green.urllib2 import HTTPError, urlopen
else:
from eventlet.green.urllib.request import urlopen
from eventlet.green.urllib.error import HTTPError
class QuietHandler(BaseHTTPServer.BaseHTTPRequestHandler): class QuietHandler(BaseHTTPServer.BaseHTTPRequestHandler):
@ -40,12 +47,12 @@ class TestGreenness(unittest.TestCase):
self.server.server_close() self.server.server_close()
kill(self.gthread) kill(self.gthread)
def test_urllib2(self): def test_urllib(self):
self.assertEqual(self.server.request_count, 0) self.assertEqual(self.server.request_count, 0)
try: try:
urllib2.urlopen('http://127.0.0.1:%s' % self.port) urlopen('http://127.0.0.1:%s' % self.port)
assert False, 'should not get there' assert False, 'should not get there'
except urllib2.HTTPError as ex: except HTTPError as ex:
assert ex.code == 501, repr(ex) assert ex.code == 501, repr(ex)
self.assertEqual(self.server.request_count, 1) self.assertEqual(self.server.request_count, 1)

View File

@ -1330,11 +1330,24 @@ class TestHttpd(_TestBase):
self.assertEqual(result.headers_lower['connection'], 'close') self.assertEqual(result.headers_lower['connection'], 'close')
assert 'transfer-encoding' not in result.headers_lower assert 'transfer-encoding' not in result.headers_lower
def test_unicode_raises_error(self): def test_unicode_with_only_ascii_characters_works(self):
def wsgi_app(environ, start_response): def wsgi_app(environ, start_response):
start_response("200 OK", []) start_response("200 OK", [])
yield u"oh hai" yield b"oh hai, "
yield u"non-encodable unicode: \u0230" yield u"xxx"
self.site.application = wsgi_app
sock = eventlet.connect(('localhost', self.port))
fd = sock.makefile('rwb')
fd.write(b'GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n')
fd.flush()
result = read_http(sock)
assert b'xxx' in result.body
def test_unicode_with_nonascii_characters_raises_error(self):
def wsgi_app(environ, start_response):
start_response("200 OK", [])
yield b"oh hai, "
yield u"xxx \u0230"
self.site.application = wsgi_app self.site.application = wsgi_app
sock = eventlet.connect(('localhost', self.port)) sock = eventlet.connect(('localhost', self.port))
fd = sock.makefile('rwb') fd = sock.makefile('rwb')
@ -1343,7 +1356,6 @@ class TestHttpd(_TestBase):
result = read_http(sock) result = read_http(sock)
self.assertEqual(result.status, 'HTTP/1.1 500 Internal Server Error') self.assertEqual(result.status, 'HTTP/1.1 500 Internal Server Error')
self.assertEqual(result.headers_lower['connection'], 'close') self.assertEqual(result.headers_lower['connection'], 'close')
assert b'unicode' in result.body
def test_path_info_decoding(self): def test_path_info_decoding(self):
def wsgi_app(environ, start_response): def wsgi_app(environ, start_response):
@ -1454,11 +1466,11 @@ class TestHttpd(_TestBase):
# (if eventlet stops using file.readline() to read HTTP headers, # (if eventlet stops using file.readline() to read HTTP headers,
# for instance) # for instance)
for runlog in sections[1:]: for runlog in sections[1:]:
debug = False if "debug set to: False" in runlog else True debug = False if b"debug set to: False" in runlog else True
if debug: if debug:
self.assertTrue("timed out" in runlog) self.assertTrue(b"timed out" in runlog)
self.assertTrue("BOOM" in runlog) self.assertTrue(b"BOOM" in runlog)
self.assertFalse("Traceback" in runlog) self.assertFalse(b"Traceback" in runlog)
def test_server_socket_timeout(self): def test_server_socket_timeout(self):
self.spawn_server(socket_timeout=0.1) self.spawn_server(socket_timeout=0.1)
@ -1743,7 +1755,16 @@ class TestChunkedInput(_TestBase):
fd = self.connect() fd = self.connect()
fd.sendall(req.encode()) fd.sendall(req.encode())
fd.close() fd.close()
eventlet.sleep(0.0)
eventlet.sleep(0)
# This is needed because on Python 3 GreenSocket.recv_into is called
# rather than recv; recv_into right now (git 5ec3a3c) trampolines to
# the hub *before* attempting to read anything from a file descriptor
# therefore we need one extra context switch to let it notice closed
# socket, die and leave the hub empty
if six.PY3:
eventlet.sleep(0)
finally: finally:
signal.alarm(0) signal.alarm(0)
signal.signal(signal.SIGALRM, signal.SIG_DFL) signal.signal(signal.SIGALRM, signal.SIG_DFL)

View File

@ -25,6 +25,7 @@ connection makefile() file objects - ExplodingSocketFile <-- these raise
from __future__ import print_function from __future__ import print_function
import eventlet import eventlet
from eventlet.support import six
import socket import socket
import sys import sys
@ -96,7 +97,8 @@ class ExplodingConnectionWrap(object):
class ExplodingSocketFile(eventlet.greenio._fileobject): class ExplodingSocketFile(eventlet.greenio._fileobject):
def __init__(self, sock, mode='rb', bufsize=-1, close=False): def __init__(self, sock, mode='rb', bufsize=-1, close=False):
super(self.__class__, self).__init__(sock, mode, bufsize, close) args = [bufsize, close] if six.PY2 else []
super(self.__class__, self).__init__(sock, mode, *args)
self.armed = False self.armed = False
def arm(self): def arm(self):