From e66baf2e143ed83f17239756a4b9a1bd4f830e96 Mon Sep 17 00:00:00 2001 From: Ryan Williams Date: Sat, 8 Aug 2009 00:36:03 -0700 Subject: [PATCH] Thanks to Marcus for the SSL patch for 2.6 compatibility. Added greenlet and pyopenssl dependencies to setup.py. --- eventlet/green/urllib2.py | 2 +- eventlet/util.py | 6 ++++- examples/webcrawler.py | 52 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 58 insertions(+), 2 deletions(-) create mode 100644 examples/webcrawler.py diff --git a/eventlet/green/urllib2.py b/eventlet/green/urllib2.py index e87b38f..642e1c0 100644 --- a/eventlet/green/urllib2.py +++ b/eventlet/green/urllib2.py @@ -51,7 +51,7 @@ def build_opener(*handlers): default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, FTPHandler, FileHandler, HTTPErrorProcessor] - if hasattr(httplib, 'HTTPS'): + if hasattr(urllib2, 'HTTPSHandler'): default_classes.append(HTTPSHandler) skip = set() for klass in default_classes: diff --git a/eventlet/util.py b/eventlet/util.py index 1fd755c..6fe408c 100644 --- a/eventlet/util.py +++ b/eventlet/util.py @@ -59,7 +59,11 @@ def tcp_socket(): try: - __original_ssl__ = socket.ssl + try: + import ssl + __original_ssl__ = ssl.wrap_socket + except ImportError: + __original_ssl__ = socket.ssl except AttributeError: __original_ssl__ = None diff --git a/examples/webcrawler.py b/examples/webcrawler.py new file mode 100644 index 0000000..e2866bb --- /dev/null +++ b/examples/webcrawler.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +"""\ +@file webcrawler.py + +This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool. It fetches as + many urls at time as coroutines in the pool. + +Copyright (c) 2007, Linden Research, Inc. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +urls = ["http://www.google.com/intl/en_ALL/images/logo.gif", + "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif", + "http://eventlet.net"] + +import time +from eventlet.green import urllib2 +from eventlet import coros + +def fetch(url): + # we could do something interesting with the result, but this is + # example code, so we'll just report that we did it + print "%s fetching %s" % (time.asctime(), url) + req = urllib2.urlopen(url) + print "%s fetched %s (%s)" % (time.asctime(), url, len(req.read())) + +pool = coros.CoroutinePool(max_size=4) +waiters = [] +for url in urls: + waiters.append(pool.execute(fetch, url)) + +# wait for all the coroutines to come back before exiting the process +for waiter in waiters: + waiter.wait() + +