From c97750449bb772135dacf5bafeec28f05d9a8a45 Mon Sep 17 00:00:00 2001 From: Yury Melnichek Date: Mon, 17 Sep 2012 12:10:34 +0200 Subject: [PATCH] [crawler] Catch all exceptions when downloading pages. Decrease delay between downloads to 0.3s. --- crawler/wikitravel-download-pages.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crawler/wikitravel-download-pages.py b/crawler/wikitravel-download-pages.py index 9634ae594e..7a658bc6d6 100755 --- a/crawler/wikitravel-download-pages.py +++ b/crawler/wikitravel-download-pages.py @@ -25,7 +25,7 @@ for i, line in enumerate(sys.stdin): finally: remoteFile.close() break - except IOError as error: + except Exception as error: sys.stderr.write('Try {0}, error: {1}\n'.format(tryCount, error)) if tryCount >= 5: raise @@ -36,4 +36,4 @@ for i, line in enumerate(sys.stdin): localFile.write(data) localFile.close() - time.sleep(1) + time.sleep(0.3)