How to create an asynchronous generator in Python?

I am trying to rewrite this Python2.7 code in the new async world order:

def get_api_results(func, iterable):
    pool = multiprocessing.Pool(5)
    for res in pool.map(func, iterable):
        yield res

map()blocks until all results are computed, so I am trying to rewrite this as an implementation of async, which will give the results as soon as they are ready. As well map(), return values ​​must be returned in the same order as iterable. I tried this (I need it requestsdue to outdated authorization requirements):

import requests

def get(i):
    r = requests.get('https://example.com/api/items/%s' % i)
    return i, r.json()

async def get_api_results():
    loop = asyncio.get_event_loop()
    futures = []
    for n in range(1, 11):
        futures.append(loop.run_in_executor(None, get, n))
    async for f in futures:
        k, v = await f
        yield k, v

for r in get_api_results():
    print(r)

but with Python 3.6 I get:

  File "scratch.py", line 16, in <module>
    for r in get_api_results():
TypeError: 'async_generator' object is not iterable

How can i do this?

+4
source share
2 answers

(2.7) , , . , , , HTTP- - .

threading Python 3+ concurrent.futures, API Executor. python 2.7 .

python 2 python 3:

# For python 2, first run:
#
#    pip install futures
#
from __future__ import print_function

import requests
from concurrent import futures

URLS = [
    'http://httpbin.org/delay/1',
    'http://httpbin.org/delay/3',
    'http://httpbin.org/delay/6',
    'http://www.foxnews.com/',
    'http://www.cnn.com/',
    'http://europe.wsj.com/',
    'http://www.bbc.co.uk/',
    'http://some-made-up-domain.coooom/',
]


def fetch(url):
    r = requests.get(url)
    r.raise_for_status()
    return r.content


def fetch_all(urls):
    with futures.ThreadPoolExecutor(max_workers=5) as executor:
        future_to_url = {executor.submit(fetch, url): url for url in urls}
        print("All URLs submitted.")
        for future in futures.as_completed(future_to_url):
            url = future_to_url[future]
            if future.exception() is None:
                yield url, future.result()
            else:
                # print('%r generated an exception: %s' % (
                # url, future.exception()))
                yield url, None


for url, s in fetch_all(URLS):
    status = "{:,.0f} bytes".format(len(s)) if s is not None else "Failed"
    print('{}: {}'.format(url, status))

futures.ThreadPoolExecutor, . as_completed(), .

python 3.6 , run_in_executor(), futures.ProcessPoolExecutor(), IO!!

asyncio, HTTP-, asyncio, aiohttp, :

import asyncio

import aiohttp


async def fetch(session, url):
    print("Getting {}...".format(url))
    async with session.get(url) as resp:
        text = await resp.text()
    return "{}: Got {} bytes".format(url, len(text))


async def fetch_all():
    async with aiohttp.ClientSession() as session:
        tasks = [fetch(session, "http://httpbin.org/delay/{}".format(delay))
                 for delay in (1, 1, 2, 3, 3)]
        for task in asyncio.as_completed(tasks):
            print(await task)
    return "Done."


loop = asyncio.get_event_loop()
resp = loop.run_until_complete(fetch_all())
print(resp)
loop.close()

, asyncio as_completed(), -, .

+4

. . "" .

, , . .

, , :

async def main(loop): 
    for n in range(1, 11):
        future = loop.run_in_executor(None, get, n)
        k, v = await future
        # do something with the result

loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop))

get() async, , aiohttp, .

+5

Source: https://habr.com/ru/post/1665090/


All Articles