-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathrequestedpages.py
executable file
·160 lines (140 loc) · 5.42 KB
/
requestedpages.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#!/usr/bin/python
import pywikibot
import config
from pywikibot import pagegenerators
'''
might be useful to includde what page it came from
set up to run automatically
'''
def create_request_list(site, project_name, category_name, threshold, target_page):
'''
Scans the category and creates the requested links page
project_name is purely to decide what page to put the list on
'''
cat = pywikibot.Category(site, category_name)
gen = pagegenerators.CategorizedPageGenerator(cat)
redlinks = {}
for i, page in enumerate(gen):
if page.title()[:5] == 'Talk:':
page = pywikibot.Page(site, page.title()[5:])
print(page.title())
if config.max_catalog_pages is not None:
if i > config.max_catalog_pages:
break
linkgen = page.linkedPages()
for link in linkgen:
if not link.exists():
# if configured to only include articles, and link is to
# a namespace other than an article, it does not add it to the list
if config.article_namespace_only and link.namespace() != 0:
continue
if link.title() in redlinks:
redlinks[link.title()] = redlinks[link.title()] + 1
else:
redlinks[link.title()] = 1
if config.actually_edit:
write_listed_links(site, redlinks, target_page, project_name, threshold)
else:
print(redlinks)
def write_listed_links(site, redlinks, target_page, project_name, threshold):
'''
Given a ditionary of the requested links and frequencies
Write them as a sorted list to the appropriate page
'''
# Writing output to page
requested_links_page_name = config.bot_user_name + '/Most Requested ' + project_name + ' Pages'
listpage = pywikibot.Page(site, requested_links_page_name)
if len(redlinks) < 1:
text = 'No redlinks found'
listpage.put(text, summary='No redlinks found', minorEdit=False)
if target_page != '' and config.allow_target_pages:
listpage = pywikibot.Page(site, target_page)
listpage.put(text, summary='No redlinks found', minorEdit=False)
else:
text = create_page_text(redlinks, threshold)
summary = 'Adding the {} most requested articles in the {} scope'
summary = summary.format(str(len(redlinks)), project_name)
listpage.put(text, summary=summary, minorEdit=False)
if target_page != '' and config.allow_target_pages:
listpage = pywikibot.Page(site, target_page)
listpage.put(text, summary=summary, minorEdit=False)
def create_page_text(dictionary, thresh):
'''
Returns the text for the requested link page
'''
text = ''
entries = sort_dict(dictionary)
for entry in entries:
#only includes entry if number of links is greater than the project specified threshold
if entry[1] > thresh:
text = text + '# [[' + entry[0] + ']] — ' + str(entry[1]) + '\n'
return text
def sort_dict(dictionary):
'''
Returns list of articles by number of requests
'''
entries = []
for key in dictionary:
entries.append([key, dictionary[key]])
sorted_entries = reversed(sort(entries))
return sorted_entries
def sort(array):
'''
quicksort the entries
could just use python build in sort but leaving because this is cooler
'''
ls = []
eq = []
gr = []
if len(array) > 1:
pivot = array[0][1]
for x in array:
if x[1] < pivot:
ls.append(x)
elif x[1] == pivot:
eq.append(x)
elif x[1] > pivot:
gr.append(x)
return sort(ls)+eq+sort(gr)
else:
return array
def get_projects(site):
projects = list()
master = pywikibot.Page(site, config.bot_user_name + '/Master')
mastertext = master.get()
for line in mastertext.splitlines():
if line != '':
line = line.split(',')
projects.append(line)
return projects
def create_request_lists(site):
projects = get_projects(site)
for project in projects:
project_name = project[0] #first item on a line is the name of the wikiproject
category_name = project[1] #second item on a line is the category name
given_threshold = project[2]
target_page = project[3]
#checks to make sure threshold is an actual integer and that it is not negative
if given_threshold.isdigit() and int(given_threshold) > 0:
threshold = int(given_threshold)
else:
threshold = 0 #defaults to listing every entry
create_request_list(site, project_name, category_name, threshold, target_page)
def notifiy_error(exception, user, site):
notify_page_name = 'User_talk:' + user
talk_page = pywikibot.Page(site, notify_page_name)
talk_page_text = talk_page.get()
error_text = '{{ping|' + user + '}} ' + str(exception) + ' ~~~~'
seperator = '\n\n' + '== bot errors ==' + '\n\n '
new_text = talk_page_text + seperator + error_text
summary = 'bot error happened'
talk_page.put(new_text, summary=summary, minorEdit=False)
def main():
site = pywikibot.Site()
try:
create_request_lists(site)
except Exception as exception:
for user in config.users_to_notify_on_error:
notifiy_error(exception, user, site)
if __name__ == '__main__':
main()