Talk:AllPages: Difference between revisions

From Heroes 3 wiki
Jump to navigation Jump to search
No edit summary
No edit summary
Line 1: Line 1:
Created using the media wiki api:
== List Generation ==
<br>
<pre>
http://heroes.thelazy.net/wiki/api.php?action=query&list=allpages&aplimit=500&apfilterredir=nonredirects&apto=Main_Page
#!/usr/bin/env python3
<br>
 
http://heroes.thelazy.net/wiki/api.php?action=query&list=allpages&aplimit=500&apfilterredir=nonredirects&apfrom=Main_Page
import urllib.request
<br>
import json
 
base_url = 'http://heroes.thelazy.net/wiki/'
base_query = 'http://heroes.thelazy.net/wiki/api.php?action=query&list=allpages&aplimit=500&apfilterredir=nonredirects&format=json&apfrom='
query_title = ''
titles = {}
 
while True:
    request = urllib.request.urlopen(base_query + query_title)
    response = request.read()
    results = json.loads(response.decode())
    for page in results['query']['allpages']:
        title = page['title']
        titles[title] = base_url + urllib.parse.quote(title.replace(' ', '_'))
    if 'query-continue' in results:
        query_title = results['query-continue']['allpages']['apcontinue']
    else:
        break
 
output_file = open('output.mediawiki', 'w')
 
for title, url in sorted(titles.items()):
    output_file.write('[' + url + ' ' + title + ']\n<br>\n')
</pre>
--[[User:imahero|imahero]] 04:19, 6 September 2016 (CEST)
--[[User:imahero|imahero]] 04:19, 6 September 2016 (CEST)



Revision as of 06:22, 8 September 2016

List Generation

#!/usr/bin/env python3

import urllib.request
import json

base_url = 'http://heroes.thelazy.net/wiki/'
base_query = 'http://heroes.thelazy.net/wiki/api.php?action=query&list=allpages&aplimit=500&apfilterredir=nonredirects&format=json&apfrom='
query_title = ''
titles = {}

while True:
    request = urllib.request.urlopen(base_query + query_title)
    response = request.read()
    results = json.loads(response.decode())
    for page in results['query']['allpages']:
        title = page['title']
        titles[title] = base_url + urllib.parse.quote(title.replace(' ', '_'))
    if 'query-continue' in results:
        query_title = results['query-continue']['allpages']['apcontinue']
    else:
        break

output_file = open('output.mediawiki', 'w')

for title, url in sorted(titles.items()):
    output_file.write('[' + url + ' ' + title + ']\n<br>\n')

--imahero 04:19, 6 September 2016 (CEST)

Nice technical page, but I must ask is there any use for it? At least for me this seems quite useless, causing hinder rather than help. –Kapteeni Ruoska (talk) 06:11, 7 September 2016 (CEST)
I wanted to make sure there wasn't anything I was missing. When I'm browsing through the list it's easier to click these links than copy pasting the auto-generated titles from the api query.
--imahero 03:14, 8 September 2016 (CEST)
Sure, just wondering, as the wiki already has Special:AllPages, but perhaps there is a use for that. –Kapteeni Ruoska (talk) 07:27, 8 September 2016 (CEST)