2017-01-28 12:52:18 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
#
|
|
|
|
# Copyright (C) 2016,2017 The University of Sheffield, UK
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import glob
|
|
|
|
import re
|
|
|
|
import requests
|
|
|
|
from time import sleep
|
|
|
|
from random import randint
|
2017-02-03 09:45:51 +00:00
|
|
|
import datetime
|
2017-01-28 13:12:47 +00:00
|
|
|
from ExtensionCrawler.config import *
|
2017-01-28 12:52:18 +00:00
|
|
|
from ExtensionCrawler.util import *
|
2017-01-28 13:12:47 +00:00
|
|
|
from ExtensionCrawler.archive import *
|
2017-01-28 12:52:18 +00:00
|
|
|
import dateutil
|
|
|
|
import dateutil.parser
|
2017-01-28 16:37:44 +00:00
|
|
|
from multiprocessing import Pool
|
|
|
|
from functools import partial
|
2017-03-13 07:02:17 +00:00
|
|
|
import shutil
|
|
|
|
import tarfile
|
|
|
|
from fs.tarfs import ReadTarFS;
|
2017-01-28 17:22:28 +00:00
|
|
|
|
2017-01-28 13:03:40 +00:00
|
|
|
class Error(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class CrawlError(Error):
|
|
|
|
def __init__(self, extid, message, pagecontent=""):
|
|
|
|
self.extid = extid
|
|
|
|
self.message = message
|
|
|
|
self.pagecontent = pagecontent
|
|
|
|
|
|
|
|
|
|
|
|
class RequestResult:
|
|
|
|
def __init__(self, response=None, exception=None):
|
|
|
|
if response is not None:
|
|
|
|
self.http_status = response.status_code
|
|
|
|
self.exception = exception
|
|
|
|
|
|
|
|
def is_ok(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return (self.exception is None) and (self.http_status == 200)
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def not_authorized(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return (self.exception is None) and (self.http_status == 401)
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def not_found(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return (self.exception is None) and (self.http_status == 404)
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def has_exception(self):
|
|
|
|
return self.exception is not None
|
|
|
|
|
|
|
|
def not_available(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return (self.exception is None) and (self.http_status == 503)
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def not_modified(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return ((self.exception is None) and (self.http_status == 304))
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
class UpdateResult:
|
2017-02-05 14:11:09 +00:00
|
|
|
def __init__(self, id, is_new, res_overview, res_crx, res_reviews,
|
|
|
|
res_support):
|
2017-01-28 13:03:40 +00:00
|
|
|
self.id = id
|
2017-01-29 13:29:46 +00:00
|
|
|
self.new = is_new
|
2017-01-28 13:03:40 +00:00
|
|
|
self.res_overview = res_overview
|
|
|
|
self.res_crx = res_crx
|
|
|
|
self.res_reviews = res_reviews
|
|
|
|
self.res_support = res_support
|
|
|
|
|
2017-01-29 13:29:46 +00:00
|
|
|
def is_new(self):
|
|
|
|
return self.new
|
2017-02-05 14:11:09 +00:00
|
|
|
|
2017-01-28 13:03:40 +00:00
|
|
|
def is_ok(self):
|
2017-01-28 13:15:05 +00:00
|
|
|
return (self.res_overview.is_ok() and
|
|
|
|
(self.res_crx.is_ok() or self.res_crx.not_modified()) and
|
|
|
|
((self.res_reviews is None) or self.res_reviews.is_ok()) and (
|
|
|
|
(self.res_support is None) or self.res_support.is_ok()))
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def not_authorized(self):
|
|
|
|
return (self.res_overview.not_authorized() or
|
|
|
|
self.res_crx.not_authorized() or
|
|
|
|
(self.res_reviews is not None and
|
|
|
|
self.res_reviews.not_authorized()) or (
|
|
|
|
self.res_support is not None and
|
|
|
|
self.res_support.not_authorized()))
|
|
|
|
|
|
|
|
def not_in_store(self):
|
|
|
|
return (
|
|
|
|
self.res_overview.not_found() or self.res_crx.not_found() or
|
|
|
|
(self.res_reviews is not None and self.res_reviews.not_found()) or
|
|
|
|
(self.res_support is not None and self.res_support.not_found()))
|
|
|
|
|
|
|
|
def has_exception(self):
|
|
|
|
return (
|
|
|
|
self.res_overview.has_exception() or
|
|
|
|
self.res_crx.has_exception() or
|
|
|
|
(self.res_reviews is not None and self.res_reviews.has_exception())
|
|
|
|
or (self.res_support is not None and
|
|
|
|
self.res_support.has_exception()))
|
|
|
|
|
|
|
|
def raised_google_ddos(self):
|
|
|
|
return (
|
|
|
|
(self.res_reviews is not None and self.res_reviews.not_available())
|
|
|
|
or (self.res_support is not None and
|
2017-03-13 07:02:17 +00:00
|
|
|
self.res_support.not_available()))
|
2017-01-28 13:03:40 +00:00
|
|
|
|
|
|
|
def not_modified(self):
|
|
|
|
return self.res_crx.not_modified()
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
|
2017-01-28 12:52:18 +00:00
|
|
|
def get_local_archive_dir(id):
|
2017-03-13 07:02:17 +00:00
|
|
|
return "{}".format(id[:3])
|
2017-01-28 12:52:18 +00:00
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def write_text(tar,date, fname, text):
|
|
|
|
dir=os.path.join(os.path.splitext(tar)[0],date)
|
|
|
|
os.makedirs(dir, exist_ok=True)
|
2017-01-28 12:52:18 +00:00
|
|
|
with open(os.path.join(dir, fname), 'w') as f:
|
|
|
|
f.write(text)
|
|
|
|
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def store_request_metadata(tar,date, fname, request):
|
|
|
|
write_text(tar,date, fname + ".headers", str(request.headers))
|
|
|
|
write_text(tar,date, fname + ".status", str(request.status_code))
|
|
|
|
write_text(tar,date, fname + ".url", str(request.url))
|
2017-01-28 12:52:18 +00:00
|
|
|
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def store_request_text(tar,date, fname, request):
|
|
|
|
write_text(tar,date, fname, request.text)
|
|
|
|
store_request_metadata(tar,date, fname, request)
|
2017-01-28 12:52:18 +00:00
|
|
|
|
2017-01-28 13:15:05 +00:00
|
|
|
|
2017-01-28 12:52:18 +00:00
|
|
|
def httpdate(dt):
|
|
|
|
weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()]
|
|
|
|
month = [
|
|
|
|
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct",
|
|
|
|
"Nov", "Dec"
|
|
|
|
][dt.month - 1]
|
|
|
|
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
|
|
|
|
weekday, dt.day, month, dt.year, dt.hour, dt.minute, dt.second)
|
|
|
|
|
|
|
|
def last_modified_utc_date(path):
|
|
|
|
if path is "":
|
|
|
|
return ""
|
|
|
|
return os.path.split(os.path.dirname(path))[1]
|
|
|
|
|
|
|
|
|
|
|
|
def last_modified_http_date(path):
|
|
|
|
if path is "":
|
|
|
|
return ""
|
|
|
|
return httpdate(dateutil.parser.parse(last_modified_utc_date(path)))
|
2017-01-28 13:15:05 +00:00
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def last_crx(archivedir, extid):
|
2017-03-01 12:48:29 +00:00
|
|
|
last_crx = ""
|
2017-03-13 07:02:17 +00:00
|
|
|
tar=os.path.join(archivedir,get_local_archive_dir(extid),extid+".tar")
|
2017-03-01 12:48:29 +00:00
|
|
|
if os.path.exists(tar):
|
|
|
|
archive=ReadTarFS(tar)
|
|
|
|
old_crxs=sorted(list(archive.walk.files(filter=['*.crx'])))
|
|
|
|
if old_crxs != []:
|
|
|
|
last_crx = old_crxs[-1]
|
|
|
|
return last_crx
|
2017-01-28 13:03:40 +00:00
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def update_overview(tar, date, verbose, ext_id):
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, "", " * overview page: ")
|
2017-01-31 09:52:21 +00:00
|
|
|
res = None
|
2017-01-28 13:12:47 +00:00
|
|
|
try:
|
2017-02-05 14:11:09 +00:00
|
|
|
res = requests.get(const_overview_url(ext_id), timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}".format(str(res.status_code)))
|
2017-03-13 07:02:17 +00:00
|
|
|
store_request_text(tar,date, 'overview.html', res)
|
2017-01-28 13:12:47 +00:00
|
|
|
except Exception as e:
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, " / Exception: {}\n".format(str(e)))
|
2017-03-13 07:02:17 +00:00
|
|
|
write_text(tar, date, 'overview.html.exception', str(e))
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res, e), logtxt
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "\n")
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res), logtxt
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
|
2017-01-28 15:05:56 +00:00
|
|
|
def validate_crx_response(res, extid, extfilename):
|
2017-01-28 13:12:47 +00:00
|
|
|
regex_extfilename = re.compile(r'^extension[_0-9]+\.crx$')
|
|
|
|
if not 'Content-Type' in res.headers:
|
|
|
|
raise CrawlError(extid, 'Did not find Content-Type header.',
|
|
|
|
'\n'.join(res.iter_lines()))
|
|
|
|
if not res.headers['Content-Type'] == 'application/x-chrome-extension':
|
|
|
|
text = [line.decode('utf-8') for line in res.iter_lines()]
|
|
|
|
raise CrawlError(
|
|
|
|
extid,
|
|
|
|
'Expected Content-Type header to be application/x-chrome-extension, but got {}.'.
|
|
|
|
format(res.headers['Content-Type']), '\n'.join(text))
|
|
|
|
if not regex_extfilename.match(extfilename):
|
|
|
|
raise CrawlError(
|
|
|
|
extid, '{} is not a valid extension file name, skipping...'.format(
|
|
|
|
extfilename))
|
|
|
|
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def update_crx(archive_dir, verbose, ext_id, date):
|
2017-01-31 09:52:21 +00:00
|
|
|
res = None
|
2017-02-05 14:11:09 +00:00
|
|
|
extfilename = "default_ext_archive.crx"
|
2017-03-13 07:02:17 +00:00
|
|
|
last_crx_file = last_crx(archive_dir, ext_id)
|
2017-01-28 13:12:47 +00:00
|
|
|
last_crx_http_date = last_modified_http_date(last_crx_file)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, "",
|
|
|
|
" * crx archive (Last: {}): ".format(
|
|
|
|
valueOf(last_crx_http_date, "n/a")))
|
2017-01-28 13:12:47 +00:00
|
|
|
headers = ""
|
|
|
|
if last_crx_file is not "":
|
|
|
|
headers = {'If-Modified-Since': last_crx_http_date}
|
|
|
|
try:
|
2017-01-28 13:15:05 +00:00
|
|
|
res = requests.get(const_download_url().format(ext_id),
|
|
|
|
stream=True,
|
2017-02-05 14:11:09 +00:00
|
|
|
headers=headers,
|
|
|
|
timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}".format(str(res.status_code)))
|
2017-01-28 13:12:47 +00:00
|
|
|
extfilename = os.path.basename(res.url)
|
2017-01-28 17:22:28 +00:00
|
|
|
if re.search('&', extfilename):
|
|
|
|
extfilename = "default.crx"
|
2017-03-13 07:02:17 +00:00
|
|
|
tar=os.path.join(archive_dir,get_local_archive_dir(ext_id),ext_id+".tar")
|
|
|
|
dir=os.path.join(os.path.splitext(tar)[0],date)
|
|
|
|
|
|
|
|
store_request_metadata(tar, date, extfilename, res)
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
if res.status_code == 304:
|
2017-03-13 07:02:17 +00:00
|
|
|
write_text(tar,date, extfilename + ".link",
|
2017-01-28 13:12:47 +00:00
|
|
|
os.path.join("..",
|
|
|
|
last_modified_utc_date(last_crx_file),
|
|
|
|
extfilename) + "\n")
|
2017-01-28 17:20:17 +00:00
|
|
|
elif res.status_code == 200:
|
2017-01-28 15:05:56 +00:00
|
|
|
validate_crx_response(res, ext_id, extfilename)
|
2017-01-28 13:12:47 +00:00
|
|
|
with open(os.path.join(dir, extfilename), 'wb') as f:
|
|
|
|
for chunk in res.iter_content(chunk_size=512 * 1024):
|
|
|
|
if chunk: # filter out keep-alive new chunks
|
|
|
|
f.write(chunk)
|
|
|
|
except Exception as e:
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, " / Exception: {}\n".format(str(e)))
|
2017-01-28 13:12:47 +00:00
|
|
|
write_text(dir, extfilename + ".exception", str(e))
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res, e), logtxt
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "\n")
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res), logtxt
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def update_reviews(tar,date, verbose, ext_id):
|
|
|
|
dir=os.path.join(os.path.splitext(tar)[0],date)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, "", " * review page: ")
|
2017-01-28 13:12:47 +00:00
|
|
|
res = None
|
|
|
|
try:
|
|
|
|
google_dos_protection()
|
|
|
|
res = requests.post(
|
2017-02-05 14:11:09 +00:00
|
|
|
const_review_url(),
|
|
|
|
data=const_review_payload(ext_id, "0", "100"),
|
|
|
|
timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}/".format(str(res.status_code)))
|
2017-03-13 07:02:17 +00:00
|
|
|
store_request_text(tar,date, 'reviews000-099.text', res)
|
2017-01-28 13:12:47 +00:00
|
|
|
google_dos_protection()
|
|
|
|
res = requests.post(
|
2017-02-05 14:11:09 +00:00
|
|
|
const_review_url(),
|
|
|
|
data=const_review_payload(ext_id, "0", "100"),
|
|
|
|
timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}".format(str(res.status_code)))
|
2017-03-13 07:02:17 +00:00
|
|
|
store_request_text(tar,date, 'reviews100-199.text', res)
|
2017-01-28 13:12:47 +00:00
|
|
|
except Exception as e:
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, " / Exception: {}\n".format(str(e)))
|
2017-03-13 07:02:17 +00:00
|
|
|
write_text(tar,date, 'reviews.html.exception', str(e))
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res, e), logtxt
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "\n")
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res), logtxt
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
|
2017-03-13 07:02:17 +00:00
|
|
|
def update_support(tar, date, verbose, ext_id):
|
|
|
|
dir=os.path.join(os.path.splitext(tar)[0],date)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, "", " * support page: ")
|
2017-01-28 13:12:47 +00:00
|
|
|
res = None
|
|
|
|
try:
|
|
|
|
google_dos_protection()
|
|
|
|
res = requests.post(
|
|
|
|
const_support_url(),
|
2017-02-05 14:11:09 +00:00
|
|
|
data=const_support_payload(ext_id, "0", "100"),
|
|
|
|
timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}/".format(str(res.status_code)))
|
2017-03-13 07:02:17 +00:00
|
|
|
store_request_text(tar,date, 'support000-099.text', res)
|
2017-01-28 13:12:47 +00:00
|
|
|
google_dos_protection()
|
|
|
|
res = requests.post(
|
|
|
|
const_support_url(),
|
2017-02-05 14:11:09 +00:00
|
|
|
data=const_support_payload(ext_id, "100", "100"),
|
|
|
|
timeout=10)
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "{}".format(str(res.status_code)))
|
2017-03-13 07:02:17 +00:00
|
|
|
store_request_text(tar,date, 'support100-199.text', res)
|
2017-01-28 13:12:47 +00:00
|
|
|
except Exception as e:
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, " / Exception: {}\n".format(str(e)))
|
2017-03-13 07:02:17 +00:00
|
|
|
write_text(tar,date, 'support.html.exception', str(e))
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res, e), logtxt
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, "\n")
|
2017-01-28 17:08:18 +00:00
|
|
|
return RequestResult(res), logtxt
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_extension(archivedir, verbose, forums, ext_id):
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, "", " Updating {}".format(ext_id))
|
2017-01-29 13:29:46 +00:00
|
|
|
is_new = False
|
2017-03-13 07:02:17 +00:00
|
|
|
|
2017-01-28 13:12:47 +00:00
|
|
|
if forums:
|
2017-01-28 17:22:28 +00:00
|
|
|
logtxt = logmsg(verbose, logtxt, " (including forums)")
|
|
|
|
logtxt = logmsg(verbose, logtxt, "\n")
|
2017-02-03 09:45:51 +00:00
|
|
|
date = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
2017-03-13 07:02:17 +00:00
|
|
|
|
|
|
|
tardir = os.path.join(archivedir, get_local_archive_dir(ext_id),ext_id)
|
|
|
|
tar = (tardir+".tar")
|
|
|
|
if not os.path.exists(tar):
|
2017-02-05 14:11:09 +00:00
|
|
|
is_new = True
|
2017-03-13 07:02:17 +00:00
|
|
|
else:
|
|
|
|
shutil.rmtree(path=tardir,ignore_errors=True)
|
|
|
|
ar = tarfile.open(tar)
|
|
|
|
ar.extractall(path=os.path.join(archivedir, get_local_archive_dir(ext_id)))
|
|
|
|
ar.close
|
|
|
|
|
|
|
|
os.makedirs(os.path.join(archivedir, get_local_archive_dir(ext_id), ext_id), exist_ok=True)
|
|
|
|
res_overview, msg_overview = update_overview(tar, date,verbose, ext_id)
|
|
|
|
res_crx, msg_crx = update_crx(archivedir, verbose, ext_id, date)
|
2017-01-28 13:12:47 +00:00
|
|
|
res_reviews = None
|
2017-01-28 17:08:18 +00:00
|
|
|
msg_reviews = ""
|
2017-01-28 13:12:47 +00:00
|
|
|
res_support = None
|
2017-01-28 17:08:18 +00:00
|
|
|
msg_support = ""
|
2017-01-28 13:12:47 +00:00
|
|
|
if forums:
|
2017-03-13 07:02:17 +00:00
|
|
|
res_reviews, msg_reviews = update_reviews(tar, date,verbose, ext_id)
|
|
|
|
res_support, msg_support = update_support(tar, date,verbose, ext_id)
|
2017-01-28 17:22:28 +00:00
|
|
|
log(verbose, logtxt + msg_overview + msg_crx + msg_reviews + msg_support)
|
2017-03-13 07:02:17 +00:00
|
|
|
if os.path.exists(tar):
|
|
|
|
shutil.move(tar,tardir+".bak.tar")
|
|
|
|
ar=tarfile.open(tar, mode='w')
|
|
|
|
ar.add(tardir, arcname=ext_id)
|
|
|
|
ar.close()
|
|
|
|
shutil.rmtree(path=os.path.join(archivedir, get_local_archive_dir(ext_id),ext_id))
|
2017-01-29 13:29:46 +00:00
|
|
|
return UpdateResult(ext_id, is_new, res_overview, res_crx, res_reviews,
|
2017-01-28 13:12:47 +00:00
|
|
|
res_support)
|
|
|
|
|
2017-01-29 13:29:46 +00:00
|
|
|
def update_extensions(archivedir, verbose, forums_ext_ids, ext_ids):
|
2017-01-28 17:22:28 +00:00
|
|
|
ext_with_forums = []
|
|
|
|
ext_without_forums = []
|
2017-01-29 13:29:46 +00:00
|
|
|
ext_ids = list(set(ext_ids) - set(forums_ext_ids))
|
|
|
|
forums_ext_ids = list(set(forums_ext_ids))
|
2017-02-05 14:11:09 +00:00
|
|
|
log(verbose, "Updating {} extensions ({} including forums)\n".format(
|
|
|
|
len(ext_ids), len(forums_ext_ids)))
|
2017-01-28 16:37:44 +00:00
|
|
|
# First, update extensions with forums sequentially (and with delays) to
|
|
|
|
# avoid running into Googles DDOS detection.
|
|
|
|
log(verbose,
|
2017-01-28 17:22:28 +00:00
|
|
|
" Updating {} extensions including forums (sequentially))\n".format(
|
|
|
|
len(forums_ext_ids)))
|
2017-03-13 07:02:17 +00:00
|
|
|
|
2017-01-28 17:22:28 +00:00
|
|
|
ext_with_forums = list(
|
|
|
|
map(
|
|
|
|
partial(update_extension, archivedir, verbose, True),
|
|
|
|
forums_ext_ids))
|
2017-01-28 13:12:47 +00:00
|
|
|
|
2017-01-28 16:37:44 +00:00
|
|
|
# Second, update extensions without forums parallel to increase speed.
|
|
|
|
parallel_ids = list(set(ext_ids) - set(forums_ext_ids))
|
|
|
|
log(verbose,
|
2017-01-28 17:22:28 +00:00
|
|
|
" Updating {} extensions excluding forums (parallel))\n".format(
|
|
|
|
len(parallel_ids)))
|
2017-01-28 17:20:17 +00:00
|
|
|
with Pool(16) as p:
|
2017-01-28 17:22:28 +00:00
|
|
|
ext_without_forums = list(
|
|
|
|
p.map(
|
|
|
|
partial(update_extension, archivedir, verbose, False),
|
|
|
|
parallel_ids))
|
|
|
|
|
|
|
|
return ext_with_forums + ext_without_forums
|
|
|
|
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
def get_existing_ids(archivedir, verbose):
|
|
|
|
byte = '[0-9a-z][0-9a-z][0-9a-z][0-9a-z][0-9a-z][0-9a-z][0-9a-z][0-9a-z]'
|
|
|
|
word = byte + byte + byte + byte
|
|
|
|
return list(
|
2017-03-01 12:48:29 +00:00
|
|
|
map(lambda d: re.sub(".tar$","",re.sub("^.*\/", "", d)),
|
|
|
|
glob.glob(os.path.join(archivedir, "*", word+".tar"))))
|
2017-01-28 13:12:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_forum_ext_ids(confdir, verbose):
|
|
|
|
with open(os.path.join(confdir, "forums.conf")) as f:
|
|
|
|
ids = f.readlines()
|
|
|
|
ids = [x.strip() for x in ids]
|
|
|
|
return ids
|