forked from organicmaps/organicmaps
[crawler] Catch all exceptions when downloading pages. Decrease delay between downloads to 0.3s.
This commit is contained in:
parent
626981b8f2
commit
c97750449b
1 changed files with 2 additions and 2 deletions
|
@ -25,7 +25,7 @@ for i, line in enumerate(sys.stdin):
|
|||
finally:
|
||||
remoteFile.close()
|
||||
break
|
||||
except IOError as error:
|
||||
except Exception as error:
|
||||
sys.stderr.write('Try {0}, error: {1}\n'.format(tryCount, error))
|
||||
if tryCount >= 5:
|
||||
raise
|
||||
|
@ -36,4 +36,4 @@ for i, line in enumerate(sys.stdin):
|
|||
localFile.write(data)
|
||||
localFile.close()
|
||||
|
||||
time.sleep(1)
|
||||
time.sleep(0.3)
|
||||
|
|
Loading…
Add table
Reference in a new issue