Bläddra i källkod

Ignore redirects

master
JustAnotherArchivist 3 år sedan
förälder
incheckning
7a999c9b0a
1 ändrade filer med 2 tillägg och 1 borttagningar
  1. +2
    -1
      wpull2-log-extract-errors

+ 2
- 1
wpull2-log-extract-errors Visa fil

@@ -18,6 +18,7 @@ then
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/ok-404’: 404 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/ok-405’: 405 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/ok-410’: 410 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/ok-301’: 301 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/error-429’: 429 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.web - INFO - Fetched ‘https://example.org/error-418’: 418 OK. Length: 1234 [text/html; charset=utf-8].
2020-09-10 23:54:25,000 - wpull.processor.base - ERROR - Fetching ‘https://example.org/error-dns’ encountered an error: DNS resolution failed: [Errno -2] Name or service not known
@@ -54,6 +55,6 @@ fi

# Logic: extract all lines of interest, process them such that they only contain a + or - indicating success or error plus the URL, filter the errors with the successes in awk.
# The output order is as each URL appears for the first time in the log. Since awk doesn't preserve the insertion order on iteration, keep the line number and sort the output on that.
grep -F -e ' - ERROR - Fetching ‘' -e ' - INFO - Fetched ‘' | sed 's,^.*‘\(.*\)’: \(200\|204\|304\|401\|403\|404\|405\|410\) .*$,+ \1,; s,^.*‘\(.*\)’.*$,- \1,' | awk '/^\+ / { successes[$2] = 1; } /^- / && ! ($2 in successes) { errors[$2] = NR; } END { for (url in errors) { if (! (url in successes)) { print errors[url] " " url; } } }' | sort -n | cut -d' ' -f2-
grep -F -e ' - ERROR - Fetching ‘' -e ' - INFO - Fetched ‘' | grep -Fv '’: 30' | sed 's,^.*‘\(.*\)’: \(200\|204\|304\|401\|403\|404\|405\|410\) .*$,+ \1,; s,^.*‘\(.*\)’.*$,- \1,' | awk '/^\+ / { successes[$2] = 1; } /^- / && ! ($2 in successes) { errors[$2] = NR; } END { for (url in errors) { if (! (url in successes)) { print errors[url] " " url; } } }' | sort -n | cut -d' ' -f2-

# Faster version without preserving order: grep -F -e ' - ERROR - Fetching ‘' -e ' - INFO - Fetched ‘' | sed 's,^.*‘\(.*\)’: \(200\|204\|304\|401\|403\|404\|405\|410\) .*$,+ \1,; s,^.*‘\(.*\)’.*$,- \1,' | awk '/^\+ / { successes[$2] = 1; } /^- / && ! ($2 in successes) { errors[$2] = 1; } END { for (url in errors) { if (! (url in successes)) { print url; } } }'

Laddar…
Avbryt
Spara