Index: ValBot/Python/check_interwiki_links.py
===================================================================
--- ValBot/Python/check_interwiki_links.py	(revision 1205)
+++ ValBot/Python/check_interwiki_links.py	(revision 1207)
@@ -10,4 +10,5 @@
 import re
 import requests # for listing members with dir() when debugging
+import time
 
 from bs4 import BeautifulSoup
@@ -20,5 +21,5 @@
 class IWLink:
    def __init__(self, iw_prefix, prefix_url, full_url, page_name, page_name_only, page_slug, hosting_page, curl_response):
-      self.iw_prefix = iw_prefix # e.g. "wp"
+      self.iw_prefix = iw_prefix # e.g. "wp" as in [[wp:Marathon (series)#Rampancy]]
       self.prefix_url = prefix_url # e.g. "https://en.wikipedia.org/wiki/"
       self.full_url = full_url # e.g. "https://en.wikipedia.org/wiki/Marathon_(series)#Rampancy"
@@ -41,4 +42,7 @@
 unintended_redirects_found = 0
 name_printed = 0
+request_delay = 1.5
+max_retries = 3
+backoff_factor = 2
 
 # Prints the name of a page on which something occurred, if it has not been printed before
@@ -116,5 +120,23 @@
    global unintended_redirects_found
    
-   the_link.curl_response = fetch(the_link.full_url)
+   # We have to carefully throttle requests because otherwise we will get hit with a 429: Too Many Requests
+   attempt = 0
+   delay = request_delay
+   while True:
+       time.sleep(delay)
+   
+       the_link.curl_response = fetch(the_link.full_url)
+   
+       if the_link.curl_response.status_code != 429:
+           break
+   
+       attempt += 1
+       if attempt > max_retries:
+          pywikibot.stdout(f'   ERROR: Maximum retries afer error 429 exceeded for "{the_link.page_slug}". Aborting script.')
+          raise SystemExit(1)
+   
+       # Increase rate limit if we got the error
+       delay *= backoff_factor
+       pywikibot.stdout(f'   WARNING: Received error 429 for "{the_link.page_slug}". Retrying in {delay:.1f}s...')
 
    # One way we tell that a redirect occurred is by checking fetch's history, as it automatically follows redirects. This will catch formal redirects which come from
