|
|
@ -23,35 +23,18 @@ import re
|
|
|
|
import sys
|
|
|
|
import sys
|
|
|
|
import urllib
|
|
|
|
import urllib
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
def month2num(month=''):
|
|
|
|
instructions:
|
|
|
|
month = month.strip().lower()
|
|
|
|
|
|
|
|
if month in ['january', 'gen', 'janwoar', 'januarie', 'ocak', 'taneaksat', 'sty', 'yanvar']:
|
|
|
|
it requires a list of wikia wikis
|
|
|
|
return '01'
|
|
|
|
there is one in the repository (listofwikis directory)
|
|
|
|
elif month in ['february', ]:
|
|
|
|
|
|
|
|
return '02'
|
|
|
|
run it: python wikiadownloader.py
|
|
|
|
elif month in ['march', ]:
|
|
|
|
|
|
|
|
return '03'
|
|
|
|
it you want to resume: python wikiadownloader.py wikitostart
|
|
|
|
elif month in ['april', ]:
|
|
|
|
|
|
|
|
return '04'
|
|
|
|
where wikitostart in the last downloaded wiki in the previous session
|
|
|
|
elif month in ['may', ]:
|
|
|
|
|
|
|
|
return '05'
|
|
|
|
|
|
|
|
elif month in ['june', ]:
|
|
|
|
|
|
|
|
return '06'
|
|
|
|
|
|
|
|
elif month in ['july', ]:
|
|
|
|
|
|
|
|
return '07'
|
|
|
|
|
|
|
|
elif month in ['august', ]:
|
|
|
|
|
|
|
|
return '08'
|
|
|
|
|
|
|
|
elif month in ['september', ]:
|
|
|
|
|
|
|
|
return '09'
|
|
|
|
|
|
|
|
elif month in ['october', ]:
|
|
|
|
|
|
|
|
return '10'
|
|
|
|
|
|
|
|
elif month in ['november', ]:
|
|
|
|
|
|
|
|
return '11'
|
|
|
|
|
|
|
|
elif month in ['december', 'desember']:
|
|
|
|
|
|
|
|
return '12'
|
|
|
|
|
|
|
|
print 'Error. I do not understand this month:', month
|
|
|
|
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
f = open('wikia.com', 'r')
|
|
|
|
f = open('wikia.com', 'r')
|
|
|
@ -79,13 +62,7 @@ for wiki in wikia:
|
|
|
|
for i in m:
|
|
|
|
for i in m:
|
|
|
|
urldump = i.group("urldump")
|
|
|
|
urldump = i.group("urldump")
|
|
|
|
dump = i.group("dump")
|
|
|
|
dump = i.group("dump")
|
|
|
|
"""hour = i.group("hour")
|
|
|
|
|
|
|
|
month = i.group("month")
|
|
|
|
|
|
|
|
day = i.group("day")
|
|
|
|
|
|
|
|
year = i.group("year")"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#date = datetime.datetime(year=int(year), month=int(month2num(month=month)), day=int(day))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
print 'Downloading', wiki
|
|
|
|
print 'Downloading', wiki
|
|
|
|
if not os.path.exists(path):
|
|
|
|
if not os.path.exists(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
os.makedirs(path)
|
|
|
@ -98,6 +75,5 @@ for wiki in wikia:
|
|
|
|
date = re.findall(r'{"name":"pages_%s.xml.gz","timestamp":\d+,"mwtimestamp":"(\d{8})\d{6}"}' % (dump.lower()), json)[0]
|
|
|
|
date = re.findall(r'{"name":"pages_%s.xml.gz","timestamp":\d+,"mwtimestamp":"(\d{8})\d{6}"}' % (dump.lower()), json)[0]
|
|
|
|
print urldump, dump, date #, hour, month, day, year
|
|
|
|
print urldump, dump, date #, hour, month, day, year
|
|
|
|
|
|
|
|
|
|
|
|
#os.system('wget -c "%s" -O %s/%s-%s-pages-meta-%s.gz' % (urldump, path, prefix, date.strftime('%Y%m%d'), dump.lower() == 'current' and 'current' or 'history'))
|
|
|
|
|
|
|
|
#-q, turn off verbose
|
|
|
|
#-q, turn off verbose
|
|
|
|
os.system('wget -q -c "%s" -O %s/%s-%s-pages-meta-%s.gz' % (urldump, path, prefix, date, dump.lower() == 'current' and 'current' or 'history'))
|
|
|
|
os.system('wget -q -c "%s" -O %s/%s-%s-pages-meta-%s.gz' % (urldump, path, prefix, date, dump.lower() == 'current' and 'current' or 'history'))
|
|
|
|