diff --git a/eventlet/greenpool.py b/eventlet/greenpool.py index 223dcbc..f3a791a 100644 --- a/eventlet/greenpool.py +++ b/eventlet/greenpool.py @@ -18,7 +18,7 @@ except NameError: class GreenPool(object): """ The GreenPool class is a pool of green threads. """ - def __init__(self, size): + def __init__(self, size=1000): self.size = size self.coroutines_running = set() self.sem = semaphore.Semaphore(size) @@ -159,7 +159,7 @@ class GreenPile(object): GreenPool. To do this, construct it with an integer size parameter instead of a GreenPool """ - def __init__(self, size_or_pool): + def __init__(self, size_or_pool=1000): if isinstance(size_or_pool, GreenPool): self.pool = size_or_pool else: diff --git a/examples/connect.py b/examples/connect.py index 0980b0b..3d11b9f 100644 --- a/examples/connect.py +++ b/examples/connect.py @@ -14,7 +14,7 @@ def geturl(url): return c.recv(1024) urls = ['www.google.com', 'www.yandex.ru', 'www.python.org'] -pile = eventlet.GreenPile(200) +pile = eventlet.GreenPile() for x in urls: pile.spawn(geturl, x) diff --git a/examples/echoserver.py b/examples/echoserver.py index 0e1f482..41c0b57 100644 --- a/examples/echoserver.py +++ b/examples/echoserver.py @@ -29,7 +29,7 @@ server = socket.socket() server.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR, 1) server.bind(('0.0.0.0', 6000)) server.listen(50) -pool = eventlet.GreenPool(10000) +pool = eventlet.GreenPool() while True: try: new_sock, address = server.accept() diff --git a/examples/webcrawler.py b/examples/webcrawler.py index 53c6e8f..a4f5e2a 100644 --- a/examples/webcrawler.py +++ b/examples/webcrawler.py @@ -1,7 +1,5 @@ #! /usr/bin/env python -"""\ -@file webcrawler.py - +""" This is a simple web "crawler" that fetches a bunch of urls using a pool to control the number of outbound connections. It has as many simultaneously open connections as coroutines in the pool.