mirror of
https://github.com/WikiTeam/wikiteam
synced 2024-11-15 00:15:00 +00:00
Merge pull request #438 from Pokechu22/getXMLHeader-session
Use `session.get` instead of `requests.get` in `getXMLHeader`
This commit is contained in:
commit
5d83703d50
@ -430,22 +430,22 @@ def getXMLHeader(config={}, session=None):
|
||||
try:
|
||||
print 'Getting the XML header from the API'
|
||||
# Export and exportnowrap exist from MediaWiki 1.15, allpages from 1.18
|
||||
r = requests.get(config['api'] + '?action=query&export=1&exportnowrap=1&list=allpages&aplimit=1', timeout=10)
|
||||
r = session.get(config['api'] + '?action=query&export=1&exportnowrap=1&list=allpages&aplimit=1', timeout=10)
|
||||
xml = r.text
|
||||
# Otherwise try without exportnowrap, e.g. Wikia returns a blank page on 1.19
|
||||
if not re.match(r"\s*<mediawiki", xml):
|
||||
r = requests.get(config['api'] + '?action=query&export=1&list=allpages&aplimit=1&format=json', timeout=10)
|
||||
r = session.get(config['api'] + '?action=query&export=1&list=allpages&aplimit=1&format=json', timeout=10)
|
||||
try:
|
||||
xml = r.json()['query']['export']['*']
|
||||
except KeyError:
|
||||
pass
|
||||
if not re.match(r"\s*<mediawiki", xml):
|
||||
# Do without a generator, use our usual trick of a random page title
|
||||
r = requests.get(config['api'] + '?action=query&export=1&exportnowrap=1&titles=' + randomtitle, timeout=10)
|
||||
r = session.get(config['api'] + '?action=query&export=1&exportnowrap=1&titles=' + randomtitle, timeout=10)
|
||||
xml = r.text
|
||||
# Again try without exportnowrap
|
||||
if not re.match(r"\s*<mediawiki", xml):
|
||||
r = requests.get(config['api'] + '?action=query&export=1&format=json&titles=' + randomtitle, timeout=10)
|
||||
r = session.get(config['api'] + '?action=query&export=1&format=json&titles=' + randomtitle, timeout=10)
|
||||
try:
|
||||
xml = r.json()['query']['export']['*']
|
||||
except KeyError:
|
||||
|
Loading…
Reference in New Issue
Block a user