Index: Validate External Links/validate_external_links.command
===================================================================
--- Validate External Links/validate_external_links.command	(revision 1141)
+++ Validate External Links/validate_external_links.command	(revision 1142)
@@ -23,5 +23,5 @@
 
 # Run with start/end URLs, record OK codes, and don't upload
-#bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --record-ok-links --suggest-snapshots --start-url 1920 --end-url 1930
+#bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --record-ok-links --suggest-snapshots --start-url 2560 --end-url 2570
 
 # Run with local extlinks and exceptions, start/end URLs, record OK codes, and don't upload
@@ -32,6 +32,6 @@
 
 # Normal run with no upload
-#bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --suggest-snapshots --skip-archive-links --timeout 20
+#bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --suggest-snapshots --skip-archive-links --timeout 10
 
 # Normal run
-bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --suggest-snapshots --skip-archive-links --timeout 20 --upload "$UPLOAD_INFO"
+bash "$VALEXTLINKS" --links "$LINKS_ONLINE" --exceptions "$EXCEPT_ONLINE" --output "$REPORT_DIR" --suggest-snapshots --skip-archive-links --timeout 10 --upload "$UPLOAD_INFO"
Index: Validate External Links/validate_external_links.sh
===================================================================
--- Validate External Links/validate_external_links.sh	(revision 1141)
+++ Validate External Links/validate_external_links.sh	(revision 1142)
@@ -7,5 +7,5 @@
 # - RTF (for reading as a local file with clickable links)
 # - HTML (for uploading as a web page).
-# Call script with "--help" argument for documentation. Also see Read Me First.rtf for critical notes.
+# Call script with "--help" argument for documentation. Also see Read Me.rtf for critical notes.
 #
 # Recommended rule:
@@ -42,10 +42,10 @@
 TIMEOUT=10           # time to wait for a response when querying a site
 CHROME_PATH=""       # path to a copy of Google Chrome that has the command-line screenshot feature
-URL_START=1          # start at this URL in LINKS_FILE (1 by default)
+URL_START=1          # start at this URL in LINKS_FILE
 URL_LIMIT=0          # if non-zero, stop at this URL in LINKS_FILE
 UPLOAD_INFO=""       # path to a file on your hard drive with the login info needed to upload a report
 
 # Fixed strings -- see the occurrences of these variables to learn their purpose
-AGENT="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36 OPR/69.0.3686.77"
+AGENT="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36 OPR/70.0.3728.154"
 ARCHIVE_API="http://archive.org/wayback/available"
 ARCHIVE_GENERIC="https://web.archive.org/web/*"
@@ -104,4 +104,5 @@
 SKIP_UNK_CODE=0
 SKIP_EXPECT_NG=0
+SKIP_EXPECT_RD=0
 SKIP_EXPECT_EI=0
 SKIP_EXPECT_IW=0
@@ -181,5 +182,8 @@
                                take screenshots of each "OK" page.
        --timeout NUM           Wait this many seconds for a site to respond. The
-                               default is 10.
+                               default is 10. Important note: Val will attempt
+                               to reach each URL three times, so the time taken
+                               to ping an unresponsive site will be three times
+                               this setting.
        --start-url NUM         Start at this link in the links CSV file.
        --end-url NUM           Stop at this link in the links CSV file.
@@ -480,9 +484,13 @@
    # Do some math on results of session
    LINKS_PROCESSED=$((LINK_NUM-URL_START+1))
-   LINK_PROBLEMS=$((EI_LINKS+IW_LINKS+RD_LINKS+NG_LINKS))
+   TRIVIAL_RDS=$((SKIP_SLASH_ADD+SKIP_HTTPS_UP+SKIP_YOUTU_BE))
    LINK_ERRORS=$((SKIP_UNK_NS+SKIP_JS_PAGE+SKIP_BAD_URL+SKIP_NON_ASCII+SKIP_UNK_SUFFIX+SKIP_UNK_CODE))
-   LINKS_EXCEPTED=$((SKIP_EXPECT_NG+SKIP_EXPECT_EI+SKIP_EXPECT_IW))
-   TRIVIAL_RDS=$((SKIP_SLASH_ADD+SKIP_HTTPS_UP+SKIP_YOUTU_BE))
-   LINKS_CHECKED=$((LINKS_PROCESSED-LINK_ERRORS))
+   LINKS_EXCEPTED=$((SKIP_EXPECT_NG+SKIP_EXPECT_RD+SKIP_EXPECT_EI+SKIP_EXPECT_IW))
+   LINK_PROBLEMS_TOTAL=$((NG_LINKS+RD_LINKS+EI_LINKS+IW_LINKS))
+   LINK_PROBLEMS_NG=$((NG_LINKS-SKIP_EXPECT_NG))
+   LINK_PROBLEMS_RD=$((RD_LINKS-SKIP_EXPECT_RD))
+   LINK_PROBLEMS_EI=$((EI_LINKS-SKIP_EXPECT_EI))
+   LINK_PROBLEMS_IW=$((IW_LINKS-SKIP_EXPECT_IW))
+   LINK_PROBLEMS_NET=$((LINK_PROBLEMS_NG+LINK_PROBLEMS_RD+LINK_PROBLEMS_EI+LINK_PROBLEMS_IW))
 
    ## SUMMARY OUTPUT ##
@@ -496,14 +504,8 @@
    if [ $LINK_ERRORS -gt 0 ]; then valPrint ctrh "- $LINK_ERRORS $(pluralCheckNoun link $LINK_ERRORS) could not be processed"; fi
    if [ $SKIP_ARCHIVE_ORG -gt 0 ]; then valPrint ctrh "- $SKIP_ARCHIVE_ORG Archive.org $(pluralCheckNoun link $SKIP_ARCHIVE_ORG) were not checked"; fi
-   if [ $LINK_PROBLEMS -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS processed $(pluralCheckNoun link $LINK_PROBLEMS) had $(pluralCheckAn $LINK_PROBLEMS)$(pluralCheckNoun issue $LINK_PROBLEMS)"; fi
-   if [ $LINKS_EXCEPTED -gt 0 ]; then valPrint ctr "  (excepted $LINKS_EXCEPTED $(pluralCheckNoun link $LINKS_EXCEPTED) from report)"; valPrint h "&nbsp;&nbsp;(excepted $LINKS_EXCEPTED $(pluralCheckNoun link $LINKS_EXCEPTED) from report)"; fi
+   if [ $LINK_PROBLEMS_TOTAL -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS_TOTAL processed $(pluralCheckNoun link $LINK_PROBLEMS_TOTAL) had $(pluralCheckAn $LINK_PROBLEMS_TOTAL)$(pluralCheckNoun issue $LINK_PROBLEMS_TOTAL)"; fi
+   if [ $LINKS_EXCEPTED -gt 0 ]; then valPrint ctr "  (excepted $LINKS_EXCEPTED link $(pluralCheckNoun issue $LINKS_EXCEPTED) from report)"; valPrint h "&nbsp;&nbsp;(excepted $LINKS_EXCEPTED $(pluralCheckNoun link $LINKS_EXCEPTED) from report)"; fi
    if [ $OK_LINKS -gt 0 ]; then valPrint ctrh "- $OK_LINKS processed $(pluralCheckNoun link $OK_LINKS) $(pluralCheckWas $OK_LINKS) OK"; fi
    if [ $TRIVIAL_RDS -gt 0 ]; then valPrint ctr "  (counted $TRIVIAL_RDS trivial $(pluralCheckNoun redirection $TRIVIAL_RDS) as OK)"; valPrint h "&nbsp;&nbsp;(counted $TRIVIAL_RDS trivial $(pluralCheckNoun redirection $TRIVIAL_RDS) as OK)"; fi
-
-   # Print excepted link totals
-   if [ $LINKS_EXCEPTED -gt 0 ]; then valPrint ctrh "$LINKS_EXCEPTED $(pluralCheckNoun link $LINKS_EXCEPTED) excepted (see RTF or TXT report for specific links):"; fi
-   if [ $SKIP_EXPECT_NG -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_NG/$NG_LINKS NG $(pluralCheckNoun link $NG_LINKS)"; fi
-   if [ $SKIP_EXPECT_EI -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_EI/$EI_LINKS external internal $(pluralCheckNoun link $EI_LINKS)"; fi
-   if [ $SKIP_EXPECT_IW -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_IW/$IW_LINKS potential intrawiki $(pluralCheckNoun link $IW_LINKS)"; fi
 
    # Print errored link totals
@@ -516,10 +518,17 @@
    if [ $SKIP_UNK_CODE -gt 0 ]; then valPrint ctrh "- $SKIP_UNK_CODE unknown response $(pluralCheckNoun code $SKIP_UNK_CODE)"; fi
 
+   # Print excepted link totals
+   if [ $LINKS_EXCEPTED -gt 0 ]; then valPrint ctrh "$LINKS_EXCEPTED link $(pluralCheckNoun problem $LINKS_EXCEPTED) excepted (see RTF or TXT report for specific links):"; fi
+   if [ $SKIP_EXPECT_NG -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_NG/$NG_LINKS NG $(pluralCheckNoun link $NG_LINKS)"; fi
+   if [ $SKIP_EXPECT_RD -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_RD/$RD_LINKS $(pluralCheckNoun redirection $RD_LINKS)"; fi
+   if [ $SKIP_EXPECT_EI -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_EI/$EI_LINKS external internal $(pluralCheckNoun link $EI_LINKS)"; fi
+   if [ $SKIP_EXPECT_IW -gt 0 ]; then valPrint ctrh "- $SKIP_EXPECT_IW/$IW_LINKS potential intrawiki $(pluralCheckNoun link $IW_LINKS)"; fi
+
    # Print checked link totals
-   if [ $LINK_PROBLEMS -gt 0 ]; then valPrint ctrh "$LINK_PROBLEMS link $(pluralCheckNoun issue $LINK_PROBLEMS):"; fi
-   if [ $NG_LINKS -gt 0 ]; then valPrint ctrh "- $NG_LINKS NG $(pluralCheckNoun link $NG_LINKS)"; fi
-   if [ $RD_LINKS -gt 0 ]; then valPrint ctrh "- $RD_LINKS $(pluralCheckNoun redirection $RD_LINKS)"; fi
-   if [ $EI_LINKS -gt 0 ]; then valPrint ctrh "- $EI_LINKS $(pluralCheckNoun link $EI_LINKS) that could be intrawiki"; fi
-   if [ $IW_LINKS -gt 0 ]; then valPrint ctrh "- $IW_LINKS $(pluralCheckNoun link $IW_LINKS) that could be interwiki"; fi
+   if [ $LINK_PROBLEMS_NET -gt 0 ]; then valPrint ctrh "$LINK_PROBLEMS_NET link $(pluralCheckNoun issue $LINK_PROBLEMS_NET):"; fi
+   if [ $LINK_PROBLEMS_NG -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS_NG NG $(pluralCheckNoun link $LINK_PROBLEMS_NG)"; fi
+   if [ $LINK_PROBLEMS_RD -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS_RD $(pluralCheckNoun redirection $LINK_PROBLEMS_RD)"; fi
+   if [ $LINK_PROBLEMS_EI -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS_EI $(pluralCheckNoun link $LINK_PROBLEMS_EI) that could be intrawiki"; fi
+   if [ $LINK_PROBLEMS_IW -gt 0 ]; then valPrint ctrh "- $LINK_PROBLEMS_IW $(pluralCheckNoun link $LINK_PROBLEMS_IW) that could be interwiki"; fi
 
    # Close the log files' markup
@@ -616,5 +625,5 @@
 if [ $TAKE_PAGE_SHOT -eq 1 ]; then valPrint ctrh "Yes"; else valPrint ctrh "No"; fi
 
-valPrint ctrhn "Suggest Archive.org snapshots: "
+valPrint ctrhn "Suggest archive.org snapshots: "
 if [ $SUGGEST_SNAPSHOTS -eq 1 ]; then valPrint ctrh "Yes"; else valPrint ctrh "No"; fi
 
@@ -862,5 +871,5 @@
    # Get response code using 'curl' to see if this link is valid; the --insecure option avoids an
    # issue with sites that require HTTPS
-   CURL_CODE=$(curl -o /dev/null --silent --insecure --head --user-agent '$AGENT' --max-time $TIMEOUT --write-out '%{http_code}\n' $URL)
+   CURL_CODE=$(curl -o /dev/null --silent --insecure --head --user-agent '$AGENT' --max-time $TIMEOUT --retry 2 --write-out '%{http_code}\n' $URL)
    CURL_ERR=$(echo $?)
    CURL_RESULT=$CURL_CODE
@@ -998,4 +1007,8 @@
          EXCEPT_LINE="${EXCEPT_ARRAY[$i]}"
 
+         # Undo any HTML-encoding from the wiki page; for now we just worry about the ampersand, as most
+         # other HTML-encoded characters are not found in URLs
+         EXCEPT_LINE=$(echo "$EXCEPT_LINE" | sed 's/\&amp;/\&/')
+
          # Match URL
          EXCEPT_URL="${EXCEPT_LINE#*,}"
@@ -1017,4 +1030,6 @@
                elif [ $STATUS == "IW" ]; then
                   let SKIP_EXPECT_IW+=1
+               elif [ $STATUS == "RD" ]; then
+                  let SKIP_EXPECT_RD+=1
                else
                   let SKIP_EXPECT_NG+=1
