source: ValBot/check_interwiki_links.py@ 1153

Last change on this file since 1153 was 1152, checked in by iritscen, 4 years ago

ValBot: Now checks that the section link (if there is one) within an interwiki link is valid.

File size: 6.3 KB
RevLine 
[1151]1import os
2
3from urllib.parse import urljoin
4
5import pywikibot
6import re
7
8from pywikibot.bot import QuitKeyboardInterrupt
9from pywikibot import pagegenerators
10from pywikibot.tools.formatter import color_format
11from pywikibot.comms.http import fetch
12from pywikibot.specialbots import UploadRobot
13from bs4 import BeautifulSoup
14
15# Parallel arrays based on https://wiki.oni2.net/Special:Interwiki
16interwiki_prefixes = ('acronym', 'cache', 'commons', 'dictionary', 'google', 'metawikimedia', 'mw', 'wikibooks', 'wikidata', 'wikimedia', 'wikinews', 'wikipedia', 'wikiquote', 'wikisource', 'wikispecies', 'wikiversity', 'wikivoyage', 'wikt', 'wiktionary', 'wp')
17
18interwiki_urls = ('http://www.acronymfinder.com/~/search/af.aspx?string=exact&Acronym=', 'http://www.google.com/search?q=cache:', 'https://commons.wikimedia.org/wiki/', 'http://www.dict.org/bin/Dict?Database=*&Form=Dict1&Strategy=*&Query=', 'http://www.google.com/search?q=', 'https://meta.wikimedia.org/wiki/', 'https://www.mediawiki.org/wiki/', 'https://en.wikibooks.org/wiki/', 'https://www.wikidata.org/wiki/', 'https://foundation.wikimedia.org/wiki/', 'https://en.wikinews.org/wiki/', 'https://en.wikipedia.org/wiki/', 'https://en.wikiquote.org/wiki/', 'https://wikisource.org/wiki/', 'https://species.wikimedia.org/wiki/', 'https://en.wikiversity.org/wiki/', 'https://en.wikivoyage.org/wiki/', 'https://en.wiktionary.org/wiki/', 'https://en.wiktionary.org/wiki/', 'https://en.wikipedia.org/wiki/')
19
20pages_checked = 0
21iw_found = 0
22problems_found = 0
23
24# Searches the given page text for interwiki links
25def scan_for_iw_links(page_text):
26 global pages_checked
27 global iw_found
28 global problems_found
29 pages_checked = pages_checked + 1
30 cur = 0
31
32 for prefix in interwiki_prefixes:
33 # Isolate strings that start with "[[prefix:" and end with "|" or "]"
34 iw_link = "\[\[" + prefix + ":[^|\]]*(\||\])"
35 for match in re.finditer(iw_link, page_text):
36 # Extract just the page title from this regex match
37 s = match.start() + 2 + len(prefix) + 1
38 e = match.end() - 1
39
40 # Sometimes we used a space char. instead of a '_', so fix that before querying
41 page_title = page_text[s:e].replace(' ', '_')
42
43 # Construct full URL for the particular wiki
44 iw_url = interwiki_urls[cur] + page_title
[1152]45 pywikibot.output('Found {0} link {1}.'.format(prefix, page_title))
[1151]46 iw_found = iw_found + 1
47
48 # Adjust URL if this is a foreign-language WP link
49 if re.match("^[a-zA-Z]{2}:", page_title):
50 lang_code = page_title[0:2] + "."
51 # "wp:" is the Wikipedia: namespace, not a language
52 if lang_code != "wp." and lang_code != "WP.":
53 iw_url = iw_url.replace('en.', lang_code)
54 iw_url = iw_url.replace(page_title[0:3], '')
55
56 # Test the URL
[1152]57 #pywikibot.output('Testing URL {}...'.format(iw_url))
[1151]58 response = fetch(iw_url)
59
60 # Redirects are followed automatically by fetch() and treated as "200"s, so the
61 # way we tell that a redirect occurred is by checking the history
62 if response.history != []:
[1152]63 pywikibot.output('WARNING: Redirected from {}.'.format(response.history))
[1151]64 problems_found = problems_found + 1
65 elif response.status_code != 200:
66 #pywikibot.output('WARNING: Got response code {}.'.format(response.status_code)) # commented out because fetch() already prints such a msg
67 problems_found = problems_found + 1
[1152]68 elif '#' in page_title:
69 # Isolate section link
70 pywikibot.output('Detected section link on page {0}.'.format(page_title))
71 page_name, anchor_name = page_title.split('#')
72
73 # Convert dot-notation hex entities to proper characters
74 anchor_name = anchor_name.replace('.22', '"')
75 anchor_name = anchor_name.replace('.27', '\'')
76 anchor_name = anchor_name.replace('.28', '(')
77 anchor_name = anchor_name.replace('.29', ')')
78
79 # Read linked page to see if it really has this anchor link
80 soup = BeautifulSoup(response.text, 'html.parser')
81 found_section = False
82 for tag in soup.findAll('a'):
83 link = tag.get('href', None)
84 if not link:
85 #pywikibot.output('It is not a link.')
86 continue
87 #pywikibot.output('Got link {0}.'.format(link))
88 if not link.startswith('#'):
89 continue
90
91 if link == '#' + anchor_name:
92 pywikibot.output('Found section link!')
93 found_section = True
94 break
95 if found_section == False:
96 pywikibot.output('Could not find section {0} on page {1}.'.format(anchor_name, page_name))
97 problems_found = problems_found + 1
[1151]98 cur = cur + 1
99
100def main(*args):
101 cat_name = ''
102 page_name = ''
103
104 local_args = pywikibot.handle_args(args)
105 genFactory = pagegenerators.GeneratorFactory()
106
107 for arg in local_args:
108 if arg.startswith('-cat:'):
109 cat_name = arg[5:]
110 elif arg.startswith('-page:'):
111 page_name = arg[6:]
112
113 site = pywikibot.Site()
114
115 # This line of code enumerates the methods in the 'page' class
116 #pywikibot.stdout(format(dir(page)))
117
118 if cat_name != '':
119 cat_obj = pywikibot.Category(site, cat_name)
120 generator = pagegenerators.CategorizedPageGenerator(cat_obj, recurse=True)
121 for page in pagegenerators.PreloadingGenerator(generator, 100):
122 pywikibot.stdout('Checking page {0}'.format(page.title()))
123 scan_for_iw_links(page.text)
124 elif page_name != '':
125 page = pywikibot.Page(site, page_name)
126 pywikibot.stdout('Checking page {0}'.format(page.title()))
127 scan_for_iw_links(page.text)
128
129 global pages_checked
130 global iw_found
131 global problems_found
132 pywikibot.stdout('Checked {0} page(s) and found {1} interwiki link(s) with {2} problem(s).'.format(pages_checked, iw_found, problems_found))
133
134if __name__ == '__main__':
135 main()
Note: See TracBrowser for help on using the repository browser.