Collect crawler warnings/errors and include them in the report

This commit is contained in:
I-Al-Istannen
2021-11-07 21:40:22 +01:00
parent 90cb6e989b
commit a82a0b19c2
3 changed files with 37 additions and 3 deletions

View File

@ -47,10 +47,12 @@ def noncritical(f: Wrapped) -> Wrapped:
try:
f(*args, **kwargs)
except (CrawlWarning, OutputDirError, MarkDuplicateError, MarkConflictError) as e:
crawler.report.add_warning(str(e))
log.warn(str(e))
crawler.error_free = False
except: # noqa: E722 do not use bare 'except'
except Exception as e:
crawler.error_free = False
crawler.report.add_error(str(e))
raise
return wrapper # type: ignore
@ -83,8 +85,10 @@ def anoncritical(f: AWrapped) -> AWrapped:
except (CrawlWarning, OutputDirError, MarkDuplicateError, MarkConflictError) as e:
log.warn(str(e))
crawler.error_free = False
except: # noqa: E722 do not use bare 'except'
crawler.report.add_warning(str(e))
except Exception as e:
crawler.error_free = False
crawler.report.add_error(str(e))
raise
return None