ExtensionCrawler/crawler

242 lines
10 KiB
Plaintext
Raw Normal View History

2017-11-02 18:46:20 +00:00
#!/usr/bin/env python3.6
2017-01-23 18:54:32 +00:00
#
2018-09-02 23:30:58 +00:00
# Copyright (C) 2016-2017 The University of Sheffield, UK
#
2017-01-23 18:54:32 +00:00
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
2018-09-02 23:30:58 +00:00
# SPDX-License-Identifier: GPL-3.0-or-later
2017-07-28 19:21:38 +00:00
"""
A crawler for extensions from the Chrome Web Store.
"""
2017-01-23 18:54:32 +00:00
import sys
2017-02-03 09:45:51 +00:00
import datetime
2017-01-28 13:32:47 +00:00
import time
2017-01-28 14:01:53 +00:00
import getopt
import logging
import itertools
import multiprocessing
2017-07-28 14:57:17 +00:00
from functools import reduce
from ExtensionCrawler.discover import get_new_ids
2017-07-28 18:44:51 +00:00
from ExtensionCrawler.archive import get_forum_ext_ids, get_existing_ids, update_extensions
from ExtensionCrawler.config import *
2018-04-10 17:19:12 +00:00
from ExtensionCrawler.util import log_info, log_exception, setup_logger
2017-01-23 18:54:32 +00:00
2017-07-28 14:57:17 +00:00
def write_log(dirname, fname, text):
2017-07-28 19:21:38 +00:00
"""Write text into the file with name fname in directory dirname."""
2017-07-28 14:57:17 +00:00
os.makedirs(dirname, exist_ok=True)
2017-07-28 18:44:51 +00:00
with open(os.path.join(dirname, fname), 'w') as logfile:
logfile.write(text)
2017-03-14 06:52:58 +00:00
2017-07-28 14:57:17 +00:00
def log_failures_to_file(dirname, today, res):
2017-07-28 19:21:38 +00:00
"""Log failures during download/update in the log directory dirname."""
2018-04-22 20:47:30 +00:00
not_authorized = "\n".join(sorted([x.ext_id for x in res if x.not_authorized()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-not-authorized.log", not_authorized)
2018-04-22 20:47:30 +00:00
updated = "\n".join(sorted([x.ext_id for x in res if x.is_ok() and not x.not_modified()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-updated.log", updated)
2018-04-22 20:47:30 +00:00
has_exception = "\n".join(sorted([x.ext_id for x in res if x.has_exception()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-raised-exception.log", has_exception)
2018-04-22 20:47:30 +00:00
raised_ddos = "\n".join(sorted([x.ext_id for x in res if x.raised_google_ddos()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-raised-ddos.log", raised_ddos)
2018-04-22 20:47:30 +00:00
not_in_store = "\n".join(sorted([x.ext_id for x in res if x.not_in_store()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-not-in-store.log", not_in_store)
2018-04-22 20:47:30 +00:00
new = "\n".join(sorted([x.ext_id for x in res if x.is_new()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-new-in-store.log", new)
2018-04-22 20:47:30 +00:00
file_corruption = "\n".join(sorted([x.ext_id for x in res if x.corrupt_tar()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-file-corruption.log", file_corruption)
2018-04-22 20:47:30 +00:00
sql_exception = "\n".join(sorted([x.ext_id for x in res if x.sql_exception()]))
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-sql-exception.log", sql_exception)
2018-04-22 20:47:30 +00:00
worker_exception = "\n".join(sorted([x.ext_id for x in res if x.worker_exception]))
write_log(dirname, today + "-worker-exception.log", worker_exception)
2018-04-22 20:47:30 +00:00
sql_fail = "\n".join(sorted([x.ext_id for x in res if not x.sql_success()]))
write_log(dirname, today + "-sql-not-updated.log", sql_fail)
2017-01-28 01:25:24 +00:00
2017-03-30 06:53:04 +00:00
def log_summary(res, runtime=0):
"""Log brief result summary."""
2017-01-28 14:20:56 +00:00
corrupt_tar_archives = list(filter(lambda x: x.corrupt_tar(), res))
2017-01-28 13:32:47 +00:00
2017-08-30 14:12:54 +00:00
log_info("Summary:")
log_info(" Updated {} out of {} extensions successfully".format(str(len(list(filter(lambda x: x.is_ok(), res)))),
str(len(res))))
2017-08-30 14:12:54 +00:00
log_info(" Updated extensions: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.is_ok() and not x.not_modified(), res)))))
log_info(" Updated SQL databases: {:8d}".format(len(list(filter(lambda x: x.sql_success(), res)))))
log_info(" New extensions: {:8d}".format(len(list(filter(lambda x: x.is_new(), res)))))
log_info(" Not authorized: {:8d}".format(len(list(filter(lambda x: x.not_authorized(), res)))))
log_info(" Raised Google DDOS: {:8d}".format(len(list(filter(lambda x: x.raised_google_ddos(), res)))))
log_info(" Not modified archives: {:8d}".format(len(list(filter(lambda x: x.not_modified(), res)))))
log_info(" Extensions not in store: {:8d}".format(len(list(filter(lambda x: x.not_in_store(), res)))))
log_info(" Unknown exception: {:8d}".format(len(list(filter(lambda x: x.has_exception(), res)))))
log_info(" Corrupt tar archives: {:8d}".format(len(corrupt_tar_archives)))
log_info(" SQL exception: {:8d}".format(len(list(filter(lambda x: x.sql_exception(), res)))))
log_info(
" Worker exception: {:8d}".format(len(list(filter(lambda x: x.worker_exception is not None, res)))))
log_info(" Total runtime: {}".format(str(datetime.timedelta(seconds=int(runtime)))))
2017-01-28 01:25:24 +00:00
2018-04-21 18:00:07 +00:00
if corrupt_tar_archives:
2017-08-30 14:12:54 +00:00
log_info("")
log_info("List of extensions with corrupted files/archives:")
2018-04-23 14:50:31 +00:00
for x in corrupt_tar_archives:
log_info("{}: {}".format(x.ext_id, x.exception), 1)
2017-08-30 14:12:54 +00:00
log_info("")
2017-07-28 14:57:17 +00:00
def helpmsg():
2017-07-28 19:21:38 +00:00
"""Print help message."""
2017-01-28 14:01:53 +00:00
print("crawler [OPTION]")
print(" -h print this help text")
print(" -s silent (no log messages)")
print(" -d discover new extensions")
2018-04-08 09:10:30 +00:00
print(" -p <N> number of concurrent downloads")
print(" -a <DIR> archive directory")
2018-04-06 06:27:57 +00:00
print(
2018-04-08 09:10:30 +00:00
" -t <N> timeout for an individual extension download")
print(" --max-discover <N> discover at most N new extensions")
print(" --pystuck start pystuck server for all processes")
2017-01-28 14:01:53 +00:00
2018-04-06 06:27:57 +00:00
def print_config(basedir, archive_dir, conf_dir, discover, parallel,
2018-04-21 16:25:22 +00:00
ext_timeout, start_pystuck):
2017-07-28 20:18:10 +00:00
"""Print current configuration."""
2017-08-30 14:12:54 +00:00
log_info("Configuration:")
log_info(" Base dir: {}".format(basedir))
log_info(" Archive directory: {}".format(archive_dir))
log_info(" Configuration directory: {}".format(conf_dir))
log_info(" Discover new extensions: {}".format(discover))
log_info(" Max num. of concurrent downloads: {}".format(parallel))
log_info(" Download timeout: {}".format(ext_timeout))
log_info(" Start PyStuck: {}".format(start_pystuck))
2017-07-28 20:18:10 +00:00
def parse_args(argv):
"""Parse command line arguments. """
basedir = const_basedir()
parallel = const_parallel_downloads()
verbose = const_verbose()
discover = const_discover()
ext_timeout = const_ext_timeout()
max_discover = None
start_pystuck = False
2017-01-28 14:01:53 +00:00
try:
2018-04-06 06:27:57 +00:00
opts, _ = getopt.getopt(
2018-04-21 18:28:47 +00:00
argv, "hsda:p:t:",
["timeout=", "archive=", 'parallel=', 'max-discover=', 'pystuck'])
2017-01-28 14:01:53 +00:00
except getopt.GetoptError:
2017-07-28 14:57:17 +00:00
helpmsg()
2017-01-28 14:01:53 +00:00
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
2017-07-28 14:57:17 +00:00
helpmsg()
2017-01-28 14:01:53 +00:00
sys.exit()
elif opt in ("-a", "--archive"):
basedir = arg
elif opt in ("-p", "--parallel"):
parallel = int(arg)
elif opt in ("-t", "--timeout"):
2018-04-08 12:10:26 +00:00
ext_timeout = int(arg)
2017-01-28 14:01:53 +00:00
elif opt == '-s':
verbose = False
elif opt == '-d':
discover = True
elif opt == '--max-discover':
discover = True
max_discover = int(arg)
elif opt == '--pystuck':
start_pystuck = True
return basedir, parallel, verbose, discover, max_discover, ext_timeout, start_pystuck
2017-07-28 20:18:10 +00:00
2018-04-06 06:27:57 +00:00
2017-07-28 20:18:10 +00:00
def main(argv):
"""Main function of the extension crawler."""
2017-07-28 20:18:10 +00:00
today = datetime.datetime.now(datetime.timezone.utc).isoformat()
basedir, parallel, verbose, discover, max_discover, ext_timeout, start_pystuck = parse_args(argv)
2017-01-23 18:54:32 +00:00
2018-04-10 17:19:12 +00:00
setup_logger(verbose)
if start_pystuck:
import pystuck
pystuck.run_server(port=10000)
2017-09-18 11:59:52 +00:00
# Surpressing these "Starting HTTPS connection ..." log messages
# Older versions of requests use loglevel INFO for that, newer ones DEBUG
logging.getLogger("requests").setLevel(logging.WARNING)
2017-09-18 11:59:52 +00:00
2017-01-28 14:37:35 +00:00
archive_dir = os.path.join(basedir, "data")
os.makedirs(archive_dir, exist_ok=True)
2017-01-28 14:01:53 +00:00
conf_dir = os.path.join(basedir, "conf")
2017-01-28 14:37:35 +00:00
os.makedirs(conf_dir, exist_ok=True)
2017-07-28 20:18:10 +00:00
open(os.path.join(conf_dir, "forums.conf"), 'a').close()
2017-01-28 14:37:35 +00:00
log_dir = os.path.join(basedir, "log")
os.makedirs(log_dir, exist_ok=True)
2017-01-28 13:32:47 +00:00
start_time = time.time()
2018-04-06 06:27:57 +00:00
print_config(basedir, archive_dir, conf_dir, discover, parallel,
2018-04-21 16:25:22 +00:00
ext_timeout, start_pystuck)
2017-01-27 22:31:21 +00:00
forum_ext_ids = get_forum_ext_ids(conf_dir)
known_ids = list(set(get_existing_ids(archive_dir)) | set(forum_ext_ids))
discovered_ids = []
2017-01-28 14:01:53 +00:00
if discover:
log_info("Discovering new ids {}...".format(
"(at most {}) ".format(max_discover) if max_discover is not None else ""))
try:
discovered_ids = list(get_new_ids(known_ids, max_discover))
except Exception:
log_exception("Exception when discovering new ids")
log_info("Discovered {} new extensions".format(len(discovered_ids)), 1)
ext_ids = list(set(discovered_ids) | set(known_ids))
2017-01-26 03:53:16 +00:00
discovered_ids = None
known_ids = None
res = update_extensions(archive_dir, parallel, forum_ext_ids, ext_ids, ext_timeout, verbose, start_pystuck)
2017-02-04 00:07:48 +00:00
# We re-try (once) the extensions with unknown exceptions, as
# they are often temporary
2017-02-04 00:07:48 +00:00
has_exception = list(filter(lambda x: x.has_exception(), res))
2018-04-21 18:00:07 +00:00
if has_exception:
log_info(" {} extensions with unknown exceptions, start another try ...".format(str(len(has_exception))))
2018-04-23 14:50:31 +00:00
has_exception_ids = [x.ext_id for x in has_exception]
2017-02-04 00:07:48 +00:00
forum_ext_ids_except = list(
set(forum_ext_ids).intersection(set(has_exception_ids)))
ext_ids_except = sorted(
list(set(has_exception_ids) - set(forum_ext_ids_except)))
res_update = update_extensions(archive_dir, parallel,
2018-04-21 16:25:22 +00:00
forum_ext_ids_except, ext_ids_except, ext_timeout, verbose, start_pystuck)
2017-07-28 20:18:10 +00:00
res = list(set(res) - set(has_exception)) + res_update
2017-01-28 13:32:47 +00:00
end_time = time.time()
2018-04-21 18:00:07 +00:00
log_summary(res, int(end_time - start_time))
2017-02-04 00:07:48 +00:00
log_failures_to_file(log_dir, today, res)
2017-01-23 18:54:32 +00:00
2017-01-28 01:25:24 +00:00
2017-01-28 14:01:53 +00:00
if __name__ == "__main__":
main(sys.argv[1:])