ExtensionCrawler/crawler

307 lines
13 KiB
Plaintext
Raw Normal View History

2017-11-02 18:46:20 +00:00
#!/usr/bin/env python3.6
2017-01-23 18:54:32 +00:00
#
# Copyright (C) 2016,2017 The University of Sheffield, UK
#
2017-01-23 18:54:32 +00:00
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
2017-07-28 19:21:38 +00:00
"""
A crawler for extensions from the Chrome Web Store.
"""
2017-01-23 18:54:32 +00:00
import os
import sys
2017-02-03 09:45:51 +00:00
import datetime
2017-01-28 13:32:47 +00:00
import time
2017-01-28 14:01:53 +00:00
import getopt
import logging
import itertools
2017-07-28 14:57:17 +00:00
from functools import reduce
from ExtensionCrawler.discover import get_new_ids
2017-07-28 18:44:51 +00:00
from ExtensionCrawler.archive import get_forum_ext_ids, get_existing_ids, update_extensions
from ExtensionCrawler.config import *
2018-04-10 17:19:12 +00:00
from ExtensionCrawler.util import log_info, log_exception, setup_logger
2017-01-23 18:54:32 +00:00
2017-07-28 14:57:17 +00:00
def write_log(dirname, fname, text):
2017-07-28 19:21:38 +00:00
"""Write text into the file with name fname in directory dirname."""
2017-07-28 14:57:17 +00:00
os.makedirs(dirname, exist_ok=True)
2017-07-28 18:44:51 +00:00
with open(os.path.join(dirname, fname), 'w') as logfile:
logfile.write(text)
2017-03-14 06:52:58 +00:00
2017-07-28 14:57:17 +00:00
def log_failures_to_file(dirname, today, res):
2017-07-28 19:21:38 +00:00
"""Log failures during download/update in the log directory dirname."""
2018-04-06 06:27:57 +00:00
not_authorized = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.not_authorized(),
res))), "")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-not-authorized.log", not_authorized)
2017-02-04 00:07:48 +00:00
updated = reduce(
lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
2018-04-06 06:27:57 +00:00
filter(lambda x: x.is_ok() and not x.not_modified(),
res))), "")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-updated.log", updated)
2018-04-06 06:27:57 +00:00
has_exception = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.has_exception(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-raised-exception.log", has_exception)
2018-04-06 06:27:57 +00:00
raised_ddos = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.raised_google_ddos(),
res))), "")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-raised-ddos.log", raised_ddos)
2018-04-06 06:27:57 +00:00
not_in_store = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.not_in_store(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-not-in-store.log", not_in_store)
2018-04-06 06:27:57 +00:00
new = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id, filter(lambda x: x.is_new(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-new-in-store.log", new)
2018-04-06 06:27:57 +00:00
file_corruption = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.corrupt_tar(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-file-corruption.log", file_corruption)
2018-04-06 06:27:57 +00:00
sql_exception = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: x.sql_exception(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-sql-exception.log", sql_exception)
2018-04-06 06:27:57 +00:00
sql_success = reduce(lambda x, y: x + "\n" + y,
sorted(
map(lambda x: x.id,
filter(lambda x: not x.sql_success(), res))),
"")
2017-07-28 14:57:17 +00:00
write_log(dirname, today + "-sql-not-updated.log", sql_success)
2017-01-28 01:25:24 +00:00
2017-03-30 06:53:04 +00:00
def log_summary(res, runtime=0):
"""Log brief result summary."""
2017-01-28 14:20:56 +00:00
corrupt_tar_archives = list(filter(lambda x: x.corrupt_tar(), res))
2017-01-28 13:32:47 +00:00
2017-08-30 14:12:54 +00:00
log_info("Summary:")
log_info(" Updated {} out of {} extensions successfully".format(
2017-07-28 20:18:10 +00:00
str(len(list(filter(lambda x: x.is_ok(), res)))), str(len(res))))
2017-08-30 14:12:54 +00:00
log_info(" Updated extensions: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.is_ok() and not x.not_modified(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Updated SQL databases: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.sql_success(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" New extensions: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.is_new(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Not authorized: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.not_authorized(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Raised Google DDOS: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.raised_google_ddos(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Not modified archives: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.not_modified(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Extensions not in store: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.not_in_store(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Unknown exception: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.has_exception(), res)))))
2018-04-06 06:27:57 +00:00
log_info(" Corrupt tar archives: {:8d}".format(
len(corrupt_tar_archives)))
2017-08-30 14:12:54 +00:00
log_info(" SQL exception: {:8d}".format(
2017-07-28 20:18:10 +00:00
len(list(filter(lambda x: x.sql_exception(), res)))))
2017-08-30 14:12:54 +00:00
log_info(" Total runtime: {}".format(
2017-01-28 14:20:56 +00:00
str(datetime.timedelta(seconds=int(runtime)))))
2017-01-28 01:25:24 +00:00
2017-07-28 18:44:51 +00:00
if corrupt_tar_archives != []:
2017-08-30 14:12:54 +00:00
log_info("")
log_info("List of extensions with corrupted files/archives:")
2017-03-30 06:53:04 +00:00
list(
2017-11-04 19:27:15 +00:00
map(lambda x: log_info(" " + x.id + ": " + str(x.exception)),
corrupt_tar_archives))
2017-08-30 14:12:54 +00:00
log_info("")
2017-07-28 14:57:17 +00:00
def helpmsg():
2017-07-28 19:21:38 +00:00
"""Print help message."""
2017-01-28 14:01:53 +00:00
print("crawler [OPTION]")
print(" -h print this help text")
print(" -s silent (no log messages)")
print(" -d discover new extensions")
2018-04-08 09:10:30 +00:00
print(" -p <N> number of concurrent downloads")
print(" -P use ProcessPool (default: Pool) for concurrency")
2018-04-06 06:27:57 +00:00
print(
" -F do not download extensions with forums (skip sequential download)"
)
print(
2018-04-08 09:10:30 +00:00
" -N do not download extensions without forums (skip concurrent download)"
2018-04-06 06:27:57 +00:00
)
2018-04-08 09:10:30 +00:00
print(" -a <DIR> archive directory")
2018-04-06 06:27:57 +00:00
print(
2018-04-08 09:10:30 +00:00
" -t <N> timeout for an individual extension download")
print(" --max-discover <N> discover at most N new extensions")
2017-01-28 14:01:53 +00:00
2018-04-06 06:27:57 +00:00
def print_config(basedir, archive_dir, conf_dir, discover, parallel,
download_ext_ids_without_forums, download_ext_ids_with_forums,
ext_timeout, use_process_pool):
2017-07-28 20:18:10 +00:00
"""Print current configuration."""
2017-08-30 14:12:54 +00:00
log_info("Configuration:")
log_info(" Base dir: {}".format(basedir))
log_info(" Archive directory: {}".format(archive_dir))
log_info(" Configuration directory: {}".format(conf_dir))
log_info(" Discover new extensions: {}".format(discover))
2018-04-06 06:27:57 +00:00
log_info(" Download ext. without forums: {}".format(
download_ext_ids_without_forums))
log_info(" Download ext. with forums: {}".format(
download_ext_ids_with_forums))
2017-08-30 14:12:54 +00:00
log_info(" Max num. of concurrent downloads: {}".format(parallel))
log_info(" Use ProcessPool: {}".format(use_process_pool))
log_info(" Download timeout: {}".format(ext_timeout))
2017-07-28 20:18:10 +00:00
def parse_args(argv):
"""Parse command line arguments. """
basedir = const_basedir()
parallel = const_parallel_downloads()
verbose = const_verbose()
discover = const_discover()
use_process_pool = const_use_process_pool()
download_ext_ids_with_forums = const_download_ext_ids_with_forums()
download_ext_ids_without_forums = const_download_ext_ids_without_forums()
ext_timeout = const_ext_timeout()
max_discover = None
2017-01-28 14:01:53 +00:00
try:
2018-04-06 06:27:57 +00:00
opts, _ = getopt.getopt(
argv, "hsdFNPa:p:t:",
2018-04-06 06:27:57 +00:00
["timeout=", "archive=", 'parallel=', 'max-discover='])
2017-01-28 14:01:53 +00:00
except getopt.GetoptError:
2017-07-28 14:57:17 +00:00
helpmsg()
2017-01-28 14:01:53 +00:00
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
2017-07-28 14:57:17 +00:00
helpmsg()
2017-01-28 14:01:53 +00:00
sys.exit()
elif opt in ("-a", "--archive"):
basedir = arg
elif opt in ("-p", "--parallel"):
parallel = int(arg)
elif opt in ("-t", "--timeout"):
2018-04-08 12:10:26 +00:00
ext_timeout = int(arg)
2017-01-28 14:01:53 +00:00
elif opt == '-s':
verbose = False
elif opt == '-P':
use_process_pool = True
2017-01-28 14:01:53 +00:00
elif opt == '-d':
discover = True
elif opt == '-F':
2018-04-06 06:27:57 +00:00
download_ext_ids_with_forums = False
elif opt == '-N':
2018-04-06 06:27:57 +00:00
download_ext_ids_without_forums = False
elif opt == '--max-discover':
discover = True
max_discover = int(arg)
return basedir, parallel, verbose, discover, max_discover, download_ext_ids_with_forums, download_ext_ids_without_forums, ext_timeout, use_process_pool
2017-07-28 20:18:10 +00:00
2018-04-06 06:27:57 +00:00
2017-07-28 20:18:10 +00:00
def main(argv):
"""Main function of the extension crawler."""
2017-07-28 20:18:10 +00:00
today = datetime.datetime.now(datetime.timezone.utc).isoformat()
basedir, parallel, verbose, discover, max_discover, download_ext_ids_with_forums, download_ext_ids_without_forums, ext_timeout, use_process_pool = parse_args(
2018-04-06 06:27:57 +00:00
argv)
2017-01-23 18:54:32 +00:00
2018-04-10 17:19:12 +00:00
setup_logger(verbose)
2017-09-18 11:59:52 +00:00
# Surpressing these "Starting HTTPS connection ..." log messages
# Older versions of requests use loglevel INFO for that, newer ones DEBUG
logging.getLogger("requests").setLevel(logging.WARNING)
2017-09-18 11:59:52 +00:00
2017-01-28 14:37:35 +00:00
archive_dir = os.path.join(basedir, "data")
os.makedirs(archive_dir, exist_ok=True)
2017-01-28 14:01:53 +00:00
conf_dir = os.path.join(basedir, "conf")
2017-01-28 14:37:35 +00:00
os.makedirs(conf_dir, exist_ok=True)
2017-07-28 20:18:10 +00:00
open(os.path.join(conf_dir, "forums.conf"), 'a').close()
2017-01-28 14:37:35 +00:00
log_dir = os.path.join(basedir, "log")
os.makedirs(log_dir, exist_ok=True)
2017-01-28 13:32:47 +00:00
start_time = time.time()
2018-04-06 06:27:57 +00:00
print_config(basedir, archive_dir, conf_dir, discover, parallel,
download_ext_ids_with_forums, download_ext_ids_without_forums,
ext_timeout, use_process_pool)
2017-01-27 22:31:21 +00:00
forum_ext_ids = get_forum_ext_ids(conf_dir)
known_ids = list(set(get_existing_ids(archive_dir)) | set(forum_ext_ids))
discovered_ids = []
2017-01-28 14:01:53 +00:00
if discover:
log_info("Discovering new ids {}...".format("(at most {}) ".format(
max_discover) if max_discover is not None else ""))
try:
2017-11-04 19:27:15 +00:00
discovered_ids = list(
itertools.islice(get_new_ids(known_ids), max_discover))
except Exception:
log_exception("Exception when discovering new ids")
log_info("Discovered {} new extensions".format(len(discovered_ids)), 1)
ext_ids = list(set(discovered_ids) | set(known_ids))
2017-01-26 03:53:16 +00:00
discovered_ids = None
known_ids = None
if download_ext_ids_with_forums:
if not download_ext_ids_without_forums:
2018-04-06 06:27:57 +00:00
ext_ids = []
else:
if download_ext_ids_without_forums:
2018-04-06 06:27:57 +00:00
forum_ext_ids = []
else:
2018-04-06 06:27:57 +00:00
# download neither type of extensions
ext_ids = []
forum_ext_ids = []
2018-04-05 16:32:11 +00:00
2018-04-06 06:27:57 +00:00
res = update_extensions(archive_dir, parallel, forum_ext_ids, ext_ids,
2018-04-10 17:19:12 +00:00
ext_timeout, use_process_pool, verbose)
2017-02-04 00:07:48 +00:00
# We re-try (once) the extensions with unknown exceptions, as
# they are often temporary
2017-02-04 00:07:48 +00:00
has_exception = list(filter(lambda x: x.has_exception(), res))
2017-07-28 18:44:51 +00:00
if has_exception != []:
2017-08-30 14:12:54 +00:00
log_info(
" {} extensions with unknown exceptions, start another try ...".
2017-02-04 00:07:48 +00:00
format(str(len(has_exception))))
has_exception_ids = list(map(lambda x: x.id, has_exception))
forum_ext_ids_except = list(
set(forum_ext_ids).intersection(set(has_exception_ids)))
ext_ids_except = sorted(
list(set(has_exception_ids) - set(forum_ext_ids_except)))
res_update = update_extensions(archive_dir, parallel,
2018-04-10 17:19:12 +00:00
forum_ext_ids_except, ext_ids_except, ext_timeout, use_process_pool, verbose)
2017-07-28 20:18:10 +00:00
res = list(set(res) - set(has_exception)) + res_update
2017-01-28 13:32:47 +00:00
end_time = time.time()
log_summary(res, end_time - start_time)
2017-02-04 00:07:48 +00:00
log_failures_to_file(log_dir, today, res)
2017-01-23 18:54:32 +00:00
2017-01-28 01:25:24 +00:00
2017-01-28 14:01:53 +00:00
if __name__ == "__main__":
main(sys.argv[1:])