import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
for url in urls:
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
for url in urls:
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)Downloading: http://python.org/
Done: http://python.org/
Downloading: http://www.pocketplaylab.com/
Done: http://www.pocketplaylab.com/
Downloading: http://github.com/
Done: http://github.com/Downloading: http://python.org/
Done: http://python.org/
Downloading: http://www.pocketplaylab.com/
Done: http://www.pocketplaylab.com/
Downloading: http://github.com/
Done: http://github.com/from concurrent.futures import ThreadPoolExecutor
import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
def download(url):
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)
with ThreadPoolExecutor(max_workers=10) as thread_pool:
    for url in urls:
        thread_pool.submit(download, url)from concurrent.futures import ThreadPoolExecutor
import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
def download(url):
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)
with ThreadPoolExecutor(max_workers=10) as thread_pool:
    for url in urls:
        thread_pool.submit(download, url)Downloading: http://python.org/
Downloading: http://www.pocketplaylab.com/
Downloading: http://github.com/
Done: http://www.pocketplaylab.com/
Done: http://github.com/
Done: http://python.org/from concurrent.futures import ProcessPoolExecutor
import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
def download(url):
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)
with ProcessPoolExecutor(max_workers=10) as process_pool:
    for url in urls:
        process_pool.submit(download, url)# Monkey-patch the standard library
from gevent import monkey; monkey.patch_all()
from gevent.pool import Pool
import requests
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
def download(url):
    print('Downloading: ' + url)
    requests.get(url)
    print('Done: ' + url)
pool = Pool(size=10)
for url in urls:
    pool.apply_async(download, args=[url])
pool.join()...
for url in urls:
    pool.apply_async(download, args=[url])
pool.join()
# OR
responses = list(pool.imap_unordered(download, urls))Downloading: http://python.org/
Downloading: http://www.pocketplaylab.com/
Downloading: http://github.com/
Done: http://www.pocketplaylab.com/
Done: http://github.com/
Done: http://python.org/Just run gunicorn -k gevent <app>
# coding=utf-8
import time
def hello(environ, start_response):
    # Do some work...
    time.sleep(1)
    # Send the response
    response = u'สวัสดีครับ\n'.encode('utf-8')
    status = '200 OK'
    headers = [
        ('Content-Type', 'text/plain'),
        ('Content-Length', len(response)),
    ]
    start_response(status, headers)
    return responseSync:
gunicorn server:hello$ ab -c 10 -n 10 http://localhost:8000/
Percentage of the requests served within a certain time (ms)
  50%   6010
  66%   7011
  75%   8012
  80%   9013
  90%  10015
  95%  10015
  98%  10015
  99%  10015
 100%  10015 (longest request)Async:
gunicorn -k gevent server:hello$ ab -c 10 -n 10 http://localhost:8000/
Percentage of the requests served within a certain time (ms)
  50%   1005
  66%   1005
  75%   1005
  80%   1005
  90%   1006
  95%   1006
  98%   1006
  99%   1006
 100%   1006 (longest request)From a mixpanel benchmark (2010):

Just run celery worker -P gevent
import asyncio
import aiohttp
@asyncio.coroutine
def download(url):
    print('Downloading: ' + url)
    yield from aiohttp.request('GET', url)
    print('Done: ' + url)
@asyncio.coroutine
def download_parallel(urls):
    tasks = [asyncio.Task(download(url)) for url in urls]
    yield from asyncio.gather(*tasks)
urls = ['http://python.org/',
        'http://www.pocketplaylab.com/',
        'http://github.com/']
loop = asyncio.get_event_loop()
loop.run_until_complete(download_parallel(urls))import asyncio
import aiohttp.server
class HelloServer(aiohttp.server.ServerHttpProtocol):
    @asyncio.coroutine
    def handle_request(self, message, payload):
        # Do some work...
        yield from asyncio.sleep(1)
        # Send the response
        response = aiohttp.Response(self.writer, 200,
                                    http_version=message.version)
        body = u'สวัสดีครับ\n'.encode('utf-8')
        response.add_header('Content-Type', 'text/plain')
        response.add_header('Content-Length', str(len(body)))
        response.send_headers()
        response.write(body)
        yield from response.write_eof()...
loop = asyncio.get_event_loop()
create_server = loop.create_server(HelloServer, '0.0.0.0', '8000')
server = loop.run_until_complete(create_server)
print('Serving on', server.sockets[0].getsockname())
try:
    loop.run_forever()
except KeyboardInterrupt:
    pass