-
Notifications
You must be signed in to change notification settings - Fork 0
/
fixing_redirects.py
230 lines (205 loc) · 7.8 KB
/
fixing_redirects.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This script has the intention to correct all redirect
links in featured pages or only one page of each wiki.
Can be using with:
¶ms;
-featured Run over featured pages
Run fixing_redirects.py -help to see all the command-line
options -file, -ref, -links, ...
"""
#
# This script based on disambredir.py and solve_disambiguation.py
#
# (C) Pywikipedia team, 2004-2012
#
# Distributed under the terms of the MIT license.
#
__version__='$Id$'
#
import re, sys
import wikipedia as pywikibot
import pagegenerators
from pywikibot import i18n
# This is required for the text that is shown when you run this script
# with the parameter -help.
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp,
}
featured_articles = {
'ar': u'ويكيبيديا:مقالات مختارة',
'cs': u'Wikipedie:Nejlepší články',
'de': u'Wikipedia:Exzellente_Artikel',
'en': u'Wikipedia:Featured_articles',
'es': u'Wikipedia:Artículos_destacados',
'fa': u'ویکیپدیا:نوشتارهای برگزیده',
'fr': u'Wikipédia:Articles_de_qualité',
'he': u'פורטל:ערכים_מומלצים',
'is': u'Wikipedia:Úrvalsgreinar',
'it': u'Wikipedia:Articoli_in_vetrina',
'ja': u'Wikipedia:秀逸な記事',
'nl': u'Wikipedia:Etalage',
'nn': u'Wikipedia:Gode artiklar',
'no': u'Wikipedia:Anbefalte artikler',
'pl': u'Wikipedia:Artykuły_na_medal',
'pt': u'Wikipedia:Os_melhores_artigos',
'sv': u'Wikipedia:Utvalda_artiklar',
'vi': u'Wikipedia:Bài_viết_chọn_lọc',
'zh': u'Wikipedia:特色条目',
}
def firstcap(string):
return string[0].upper()+string[1:]
def treat(text, linkedPage, targetPage):
"""
Based on the method of the same name in solve_disambiguation.py
"""
mysite = pywikibot.getSite()
linktrail = mysite.linktrail()
# make a backup of the original text so we can show the changes later
linkR = re.compile(r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
curpos = 0
# This loop will run until we have finished the current page
while True:
m = linkR.search(text, pos = curpos)
if not m:
break
# Make sure that next time around we will not find this same hit.
curpos = m.start() + 1
# ignore interwiki links and links to sections of the same page
if m.group('title').strip() == '' or \
mysite.isInterwikiLink(m.group('title')):
continue
else:
actualLinkPage = pywikibot.Page(targetPage.site(), m.group('title'))
# Check whether the link found is to page.
if actualLinkPage != linkedPage:
continue
# how many bytes should be displayed around the current link
context = 15
# at the beginning of the link, start red color.
# at the end of the link, reset the color to default
#pywikibot.output(text[max(0, m.start() - context) : m.start()] + '\03{lightred}' + text[m.start() : m.end()] + '\03{default}' + text[m.end() : m.end() + context])
choice = 'y'
# The link looks like this:
# [[page_title|link_text]]trailing_chars
page_title = m.group('title')
link_text = m.group('label')
if not link_text:
# or like this: [[page_title]]trailing_chars
link_text = page_title
if m.group('section') == None:
section = ''
else:
section = m.group('section')
trailing_chars = m.group('linktrail')
if trailing_chars:
link_text += trailing_chars
if choice in "uU":
# unlink - we remove the section if there's any
text = text[:m.start()] + link_text + text[m.end():]
continue
replaceit = choice in "rR"
# remove preleading ":"
if link_text[0]==':':
link_text = link_text[1:]
if link_text[0].isupper():
new_page_title = targetPage.title()
else:
new_page_title = targetPage.title()[0].lower() + \
targetPage.title()[1:]
# remove preleading ":"
if new_page_title[0]==':':
new_page_title = new_page_title[1:]
if replaceit and trailing_chars:
newlink = "[[%s%s]]%s" % (new_page_title, section, trailing_chars)
elif replaceit or (new_page_title == link_text and not section):
newlink = "[[%s]]" % new_page_title
# check if we can create a link with trailing characters instead of a
# pipelink
elif len(new_page_title) <= len(link_text) and \
firstcap(link_text[:len(new_page_title)]) == \
firstcap(new_page_title) and \
re.sub(re.compile(linktrail), '', link_text[len(new_page_title):]) == '' and not section:
newlink = "[[%s]]%s" % (link_text[:len(new_page_title)],
link_text[len(new_page_title):])
else:
newlink = "[[%s%s|%s]]" % (new_page_title, section, link_text)
text = text[:m.start()] + newlink + text[m.end():]
continue
return text
pageCache = []
def workon(page):
mysite = pywikibot.getSite()
try:
text = page.get()
except pywikibot.IsRedirectPage:
pywikibot.output(u'%s is a redirect page. Skipping' % page)
return
except pywikibot.NoPage:
pywikibot.output(u'%s does not exist. Skipping' % page)
return
pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"
% page.title())
links = page.linkedPages()
if len(links) > 0:
pywikibot.getall(mysite,links)
else:
pywikibot.output('Nothing left to do.')
return
for page2 in links:
try:
target = page2.getRedirectTarget()
except pywikibot.NoPage:
try:
target = page2.getMovedTarget()
except pywikibot.NoPage:
continue
target = pywikibot.Page(mysite, lastmove['new_title'])
except (pywikibot.Error, pywikibot.SectionError):
continue
# no fix to user namespaces
if target.namespace() in [0, 1] and not page2.namespace() in [0, 1]:
continue
text = treat(text, page2, target)
if text != page.get():
comment = i18n.twtranslate(mysite, 'fixing_redirects-fixing')
pywikibot.showDiff(page.get(), text)
try:
page.put(text, comment)
except (pywikibot.Error):
pywikibot.error('unable to put %s' % page)
def main():
featured = False
gen = None
# This factory is responsible for processing command line arguments
# that are also used by other scripts and that determine on which pages
# to work on.
genFactory = pagegenerators.GeneratorFactory()
for arg in pywikibot.handleArgs():
if arg == '-featured':
featured = True
else:
genFactory.handleArg(arg)
mysite = pywikibot.getSite()
if mysite.sitename() == 'wikipedia:nl':
pywikibot.output(
u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}')
sys.exit()
if featured:
featuredList = pywikibot.translate(mysite, featured_articles)
ref = pywikibot.Page(pywikibot.getSite(), featuredList)
gen = pagegenerators.ReferringPageGenerator(ref)
gen = pagegenerators.NamespaceFilterPageGenerator(gen, [0])
if not gen:
gen = genFactory.getCombinedGenerator()
if gen:
for page in pagegenerators.PreloadingGenerator(gen):
workon(page)
else:
pywikibot.showHelp('fixing_redirects')
if __name__ == "__main__":
try:
main()
finally:
pywikibot.stopme()