231 lines
8.6 KiB
Python
Executable File
231 lines
8.6 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
#
|
|
# Copyright (C) 2016,2017 The University of Sheffield, UK
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
#
|
|
|
|
import os
|
|
import sys
|
|
import glob
|
|
import re
|
|
import requests
|
|
from time import sleep
|
|
from random import randint
|
|
import datetime
|
|
from ExtensionCrawler.discover import *
|
|
from ExtensionCrawler.archive import *
|
|
from ExtensionCrawler.util import *
|
|
from ExtensionCrawler.discover import *
|
|
import dateutil
|
|
import dateutil.parser
|
|
import time
|
|
import getopt
|
|
|
|
# Script should run with python 3.4 or 3.5
|
|
assert sys.version_info >= (3, 4) and sys.version_info < (3, 6)
|
|
|
|
|
|
def write_log(dir, fname, text):
|
|
os.makedirs(dir, exist_ok=True)
|
|
with open(os.path.join(dir, fname), 'w') as f:
|
|
f.write(text)
|
|
|
|
|
|
def log_failures_to_file(dir, today, res):
|
|
not_authorized = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.not_authorized(), res))),
|
|
"")
|
|
write_log(dir, today + "-not-authorized.log", not_authorized)
|
|
updated = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(
|
|
map(lambda x: x.id,
|
|
filter(lambda x: x.is_ok() and not x.not_modified(), res))),
|
|
"")
|
|
write_log(dir, today + "-updated.log", updated)
|
|
has_exception = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.has_exception(), res))),
|
|
"")
|
|
write_log(dir, today + "-raised-exception.log", has_exception)
|
|
raised_ddos = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(
|
|
map(lambda x: x.id, filter(lambda x: x.raised_google_ddos(),
|
|
res))), "")
|
|
write_log(dir, today + "-raised-ddos.log", raised_ddos)
|
|
not_in_store = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.not_in_store(), res))),
|
|
"")
|
|
write_log(dir, today + "-not-in-store.log", not_in_store)
|
|
new = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.is_new(), res))), "")
|
|
write_log(dir, today + "-new-in-store.log", new)
|
|
file_corruption = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.corrupt_tar(), res))),
|
|
"")
|
|
write_log(dir, today + "-file-corruption.log", file_corruption)
|
|
|
|
sql_exception = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.sql_exception(), res))),
|
|
"")
|
|
write_log(dir, today + "-sql-exception.log", sql_exception)
|
|
|
|
sql_success = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: not x.sql_success(), res))),
|
|
"")
|
|
write_log(dir, today + "-sql-not-updated.log", sql_success)
|
|
|
|
|
|
def log_summary(verbose, res, stderr=False, runtime=0):
|
|
def p(s):
|
|
if stderr:
|
|
sys.stderr.write(s)
|
|
else:
|
|
log(verbose, s)
|
|
|
|
total = len(res)
|
|
success = len(list(filter(lambda x: x.is_ok(), res)))
|
|
not_authorized = len(list(filter(lambda x: x.not_authorized(), res)))
|
|
has_exception = len(list(filter(lambda x: x.has_exception(), res)))
|
|
raised_ddos = len(list(filter(lambda x: x.raised_google_ddos(), res)))
|
|
not_in_store = len(list(filter(lambda x: x.not_in_store(), res)))
|
|
not_modified = len(list(filter(lambda x: x.not_modified(), res)))
|
|
corrupt_tar_archives = list(filter(lambda x: x.corrupt_tar(), res))
|
|
sql_exception = len(list(filter(lambda x: x.sql_exception(), res)))
|
|
sql_success = len(list(filter(lambda x: x.sql_success(), res)))
|
|
|
|
new = len(list(filter(lambda x: x.is_new(), res)))
|
|
updated = len(
|
|
list(filter(lambda x: x.is_ok() and not x.not_modified(), res)))
|
|
|
|
p("\n")
|
|
p("Summary:\n")
|
|
p(" Updated {} out of {} extensions successfully\n".format(
|
|
str(success), str(total)))
|
|
p(" Updated extensions: {:8d}\n".format(updated))
|
|
p(" Updated SQL databases: {:8d}\n".format(sql_success))
|
|
p(" New extensions: {:8d}\n".format(new))
|
|
p(" Not authorized: {:8d}\n".format(not_authorized))
|
|
p(" Raised Google DDOS: {:8d}\n".format(raised_ddos))
|
|
p(" Not modified archives: {:8d}\n".format(not_modified))
|
|
p(" Extensions not in store: {:8d}\n".format(not_in_store))
|
|
p(" Unknown exception: {:8d}\n".format(has_exception))
|
|
p(" Corrupt tar archives: {:8d}\n".format(len(corrupt_tar_archives)))
|
|
p(" SQL exception: {:8d}\n".format(sql_exception))
|
|
p(" Total runtime: {}\n".format(
|
|
str(datetime.timedelta(seconds=int(runtime)))))
|
|
|
|
if not (corrupt_tar_archives == []):
|
|
p("\n\n")
|
|
p("List of extensions with corrupted files/archives:\n")
|
|
list(
|
|
map(lambda x: p(" " + x.id + ": " + str(x.exception) + "\n"),
|
|
corrupt_tar_archives))
|
|
p("\n")
|
|
|
|
|
|
def help():
|
|
print("crawler [OPTION]")
|
|
print(" -h print this help text")
|
|
print(" -s silent (no log messages)")
|
|
print(" -d disover new extensions")
|
|
print(" -a=<DIR> archive directory")
|
|
|
|
|
|
def main(argv):
|
|
today = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
basedir = "archive"
|
|
parallel = 24
|
|
verbose = True
|
|
discover = False
|
|
try:
|
|
opts, args = getopt.getopt(argv, "hsda:p:", ["archive=",'parallel='])
|
|
except getopt.GetoptError:
|
|
help()
|
|
sys.exit(2)
|
|
for opt, arg in opts:
|
|
if opt == '-h':
|
|
help()
|
|
sys.exit()
|
|
elif opt in ("-a", "--archive"):
|
|
basedir = arg
|
|
elif opt in ("-p", "--parallel"):
|
|
parallel = int(arg)
|
|
elif opt == '-s':
|
|
verbose = False
|
|
elif opt == '-d':
|
|
discover = True
|
|
|
|
archive_dir = os.path.join(basedir, "data")
|
|
os.makedirs(archive_dir, exist_ok=True)
|
|
conf_dir = os.path.join(basedir, "conf")
|
|
open(os.path.join(conf_dir, "forums.conf"), 'a').close()
|
|
os.makedirs(conf_dir, exist_ok=True)
|
|
log_dir = os.path.join(basedir, "log")
|
|
os.makedirs(log_dir, exist_ok=True)
|
|
|
|
start_time = time.time()
|
|
|
|
log(verbose, "Configuration:\n")
|
|
log(verbose, " Base dir: {}\n".format(basedir))
|
|
log(verbose, " Archive directory: {}\n".format(archive_dir))
|
|
log(verbose, " Configuration directory: {}\n".format(conf_dir))
|
|
log(verbose, " Discover new extensions: {}\n".format(discover))
|
|
log(verbose, " Max num. of concurrent downloads: {}\n".format(parallel))
|
|
log(verbose, "\n")
|
|
|
|
forum_ext_ids = get_forum_ext_ids(conf_dir, verbose)
|
|
existing_ids = get_existing_ids(archive_dir, verbose)
|
|
known_ids = list(set(existing_ids) | set(forum_ext_ids))
|
|
discovered_ids = []
|
|
if discover:
|
|
discovered_ids = get_new_ids(verbose, known_ids)
|
|
ext_ids = list(set(discovered_ids) | set(known_ids))
|
|
|
|
res = update_extensions(archive_dir, verbose, parallel, forum_ext_ids, ext_ids)
|
|
|
|
# We re-try (once) the extensions with unknown exceptions, as
|
|
# they are often temporary
|
|
has_exception = list(filter(lambda x: x.has_exception(), res))
|
|
if not (has_exception == []):
|
|
log(verbose,
|
|
" {} extensions with unknown exceptions, start another try ...\n".
|
|
format(str(len(has_exception))))
|
|
has_exception_ids = list(map(lambda x: x.id, has_exception))
|
|
oldres = list(set(res) - set(has_exception))
|
|
forum_ext_ids_except = list(
|
|
set(forum_ext_ids).intersection(set(has_exception_ids)))
|
|
ext_ids_except = sorted(
|
|
list(set(has_exception_ids) - set(forum_ext_ids_except)))
|
|
res_update = update_extensions(archive_dir, verbose, parallel,
|
|
forum_ext_ids_except, ext_ids_except)
|
|
res = oldres + res_update
|
|
|
|
end_time = time.time()
|
|
log_summary(verbose, res, False, end_time - start_time)
|
|
log_summary(verbose, res, True, end_time - start_time)
|
|
log_failures_to_file(log_dir, today, res)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main(sys.argv[1:])
|