异步 dns 查找,但结果转到同一个文件
asynchronous dns lookup, but results go to the same file
我必须进行大量的 DNS NAPTR 查找(每分钟数千次)。
我使用 dnspython 运行 一个 Python 脚本,读取一个文件并写回另一个文件。请求率约为 300 requests/sec。
我尝试将异步 DNS 与 Python aiodns 一起使用,但数字是相同的。
我的脚本可能有缺陷。请看下面。这是 Python 3.4.
但是如果结果必须返回到一个文件,是否可以异步进行查找?
import asyncio
import aiodns
...
loop = asyncio.get_event_loop()
resolver = aiodns.DNSResolver(loop=loop)
resolver.nameservers = ['x.y.w.z']
...
@asyncio.coroutine
def getsip(number):
try:
strQuery = str(dns.e164.from_e164("+" + number))
answer = yield from resolver.query(strQuery, 'NAPTR')
for rdata in answer:
return rdata.regex
except:
return ""
with open(filename, 'r') as fread, open(filenameOut, 'w') as fwrite:
reader = csv.DictReader(fread, delimiter='|', quoting=csv.QUOTE_NONE)
reader.fieldnames = fieldnamesIn
writer = csv.DictWriter(fwrite, fieldnames = fieldnamesOut, delimiter='|')
for row in reader:
sys.stdout.write("Processing record number: %d \r" % (total) )
sys.stdout.flush()
total+=1
answer = loop.run_until_complete(getsip(row['NUM']))
if answer == "":
missingAnswers+=1
writer.writerow({'NUM': row['NUM'], 'SIP': answer})
print("Records not found: " + str(missingAnswers) + " of total " + str(total) + " records.")
But if results have to go back to one file, is it even possible to do lookups asynchronously?
如果你不关心结果的顺序,实现异步查找很简单。例如,您可以使用 asyncio.as_completed
将所有协程并行安排到 运行 并在每个协程完成时收到通知:
@asyncio.coroutine
def process():
with open(filename, 'r') as fread:
reader = csv.DictReader(fread, delimiter='|', quoting=csv.QUOTE_NONE)
reader.fieldnames = fieldnamesIn
rows = list(reader)
with open(filenameOut, 'w') as fwrite:
writer = csv.DictWriter(fwrite, fieldnames=fieldnamesOut, delimiter='|')
missingAnswers = 0
loop = asyncio.get_event_loop()
tasks = [loop.create_task(getsip(row['NUM'])) for row in rows]
for done_coro in asyncio.as_completed(tasks):
answer = yield from done_coro
if answer == ""
missingAnswers += 1
writer.writerow({'NUM': row['NUM'], 'SIP': answer})
print("Records not found: %d of total %d records"
% (missingAnswers, len(rows)))
loop = asyncio.get_event_loop()
loop.run_until_complete(process())
我必须进行大量的 DNS NAPTR 查找(每分钟数千次)。 我使用 dnspython 运行 一个 Python 脚本,读取一个文件并写回另一个文件。请求率约为 300 requests/sec。 我尝试将异步 DNS 与 Python aiodns 一起使用,但数字是相同的。 我的脚本可能有缺陷。请看下面。这是 Python 3.4.
但是如果结果必须返回到一个文件,是否可以异步进行查找?
import asyncio
import aiodns
...
loop = asyncio.get_event_loop()
resolver = aiodns.DNSResolver(loop=loop)
resolver.nameservers = ['x.y.w.z']
...
@asyncio.coroutine
def getsip(number):
try:
strQuery = str(dns.e164.from_e164("+" + number))
answer = yield from resolver.query(strQuery, 'NAPTR')
for rdata in answer:
return rdata.regex
except:
return ""
with open(filename, 'r') as fread, open(filenameOut, 'w') as fwrite:
reader = csv.DictReader(fread, delimiter='|', quoting=csv.QUOTE_NONE)
reader.fieldnames = fieldnamesIn
writer = csv.DictWriter(fwrite, fieldnames = fieldnamesOut, delimiter='|')
for row in reader:
sys.stdout.write("Processing record number: %d \r" % (total) )
sys.stdout.flush()
total+=1
answer = loop.run_until_complete(getsip(row['NUM']))
if answer == "":
missingAnswers+=1
writer.writerow({'NUM': row['NUM'], 'SIP': answer})
print("Records not found: " + str(missingAnswers) + " of total " + str(total) + " records.")
But if results have to go back to one file, is it even possible to do lookups asynchronously?
如果你不关心结果的顺序,实现异步查找很简单。例如,您可以使用 asyncio.as_completed
将所有协程并行安排到 运行 并在每个协程完成时收到通知:
@asyncio.coroutine
def process():
with open(filename, 'r') as fread:
reader = csv.DictReader(fread, delimiter='|', quoting=csv.QUOTE_NONE)
reader.fieldnames = fieldnamesIn
rows = list(reader)
with open(filenameOut, 'w') as fwrite:
writer = csv.DictWriter(fwrite, fieldnames=fieldnamesOut, delimiter='|')
missingAnswers = 0
loop = asyncio.get_event_loop()
tasks = [loop.create_task(getsip(row['NUM'])) for row in rows]
for done_coro in asyncio.as_completed(tasks):
answer = yield from done_coro
if answer == ""
missingAnswers += 1
writer.writerow({'NUM': row['NUM'], 'SIP': answer})
print("Records not found: %d of total %d records"
% (missingAnswers, len(rows)))
loop = asyncio.get_event_loop()
loop.run_until_complete(process())