forked from BrowserSecurity/ExtensionCrawler
You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
251 lines
9.5 KiB
Python
251 lines
9.5 KiB
Python
#!/usr/bin/env python3
|
|
#
|
|
# Copyright (C) 2016,2017 The University of Sheffield, UK
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
#
|
|
"""
|
|
A crawler for extensions from the Chrome Web Store.
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import datetime
|
|
import time
|
|
import getopt
|
|
import sqlite3
|
|
from functools import reduce
|
|
from ExtensionCrawler.discover import get_new_ids
|
|
from ExtensionCrawler.archive import get_forum_ext_ids, get_existing_ids, update_extensions
|
|
from ExtensionCrawler.util import log
|
|
import ExtensionCrawler.config
|
|
|
|
# Script should run with python 3.4 or 3.5
|
|
assert sys.version_info >= (3, 4) and sys.version_info < (3, 6)
|
|
|
|
|
|
def write_log(dirname, fname, text):
|
|
"""Write text into the file with name fname in directory dirname."""
|
|
os.makedirs(dirname, exist_ok=True)
|
|
with open(os.path.join(dirname, fname), 'w') as logfile:
|
|
logfile.write(text)
|
|
|
|
|
|
def log_failures_to_file(dirname, today, res):
|
|
"""Log failures during download/update in the log directory dirname."""
|
|
not_authorized = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.not_authorized(), res))),
|
|
"")
|
|
write_log(dirname, today + "-not-authorized.log", not_authorized)
|
|
updated = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(
|
|
map(lambda x: x.id,
|
|
filter(lambda x: x.is_ok() and not x.not_modified(), res))),
|
|
"")
|
|
write_log(dirname, today + "-updated.log", updated)
|
|
has_exception = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.has_exception(), res))),
|
|
"")
|
|
write_log(dirname, today + "-raised-exception.log", has_exception)
|
|
raised_ddos = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(
|
|
map(lambda x: x.id, filter(lambda x: x.raised_google_ddos(),
|
|
res))), "")
|
|
write_log(dirname, today + "-raised-ddos.log", raised_ddos)
|
|
not_in_store = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.not_in_store(), res))),
|
|
"")
|
|
write_log(dirname, today + "-not-in-store.log", not_in_store)
|
|
new = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.is_new(), res))), "")
|
|
write_log(dirname, today + "-new-in-store.log", new)
|
|
file_corruption = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.corrupt_tar(), res))),
|
|
"")
|
|
write_log(dirname, today + "-file-corruption.log", file_corruption)
|
|
|
|
sql_exception = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(map(lambda x: x.id, filter(lambda x: x.sql_exception(), res))),
|
|
"")
|
|
write_log(dirname, today + "-sql-exception.log", sql_exception)
|
|
|
|
sql_success = reduce(
|
|
lambda x, y: x + "\n" + y,
|
|
sorted(
|
|
map(lambda x: x.id, filter(lambda x: not x.sql_success(), res))),
|
|
"")
|
|
write_log(dirname, today + "-sql-not-updated.log", sql_success)
|
|
|
|
|
|
def log_summary(verbose, res, stderr=False, runtime=0):
|
|
"""Log brief result summary to log stream of stderr."""
|
|
|
|
def printlog(msg):
|
|
"""Print log message."""
|
|
if stderr:
|
|
sys.stderr.write(msg)
|
|
else:
|
|
log(verbose, msg)
|
|
|
|
corrupt_tar_archives = list(filter(lambda x: x.corrupt_tar(), res))
|
|
|
|
printlog("\n")
|
|
printlog("Summary:\n")
|
|
printlog(" Updated {} out of {} extensions successfully\n".format(
|
|
str(len(list(filter(lambda x: x.is_ok(), res)))), str(len(res))))
|
|
printlog(" Updated extensions: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.is_ok() and not x.not_modified(), res)))))
|
|
printlog(" Updated SQL databases: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.sql_success(), res)))))
|
|
printlog(" New extensions: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.is_new(), res)))))
|
|
printlog(" Not authorized: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.not_authorized(), res)))))
|
|
printlog(" Raised Google DDOS: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.raised_google_ddos(), res)))))
|
|
printlog(" Not modified archives: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.not_modified(), res)))))
|
|
printlog(" Extensions not in store: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.not_in_store(), res)))))
|
|
printlog(" Unknown exception: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.has_exception(), res)))))
|
|
printlog(" Corrupt tar archives: {:8d}\n".format(
|
|
len(corrupt_tar_archives)))
|
|
printlog(" SQL exception: {:8d}\n".format(
|
|
len(list(filter(lambda x: x.sql_exception(), res)))))
|
|
printlog(" Total runtime: {}\n".format(
|
|
str(datetime.timedelta(seconds=int(runtime)))))
|
|
|
|
if corrupt_tar_archives != []:
|
|
printlog("\n\n")
|
|
printlog("List of extensions with corrupted files/archives:\n")
|
|
list(
|
|
map(lambda x: printlog(" " + x.id + ": " + str(x.exception) + "\n"),
|
|
corrupt_tar_archives))
|
|
printlog("\n")
|
|
|
|
|
|
def helpmsg():
|
|
"""Print help message."""
|
|
print("crawler [OPTION]")
|
|
print(" -h print this help text")
|
|
print(" -s silent (no log messages)")
|
|
print(" -d disover new extensions")
|
|
print(" -a=<DIR> archive directory")
|
|
|
|
|
|
def print_config(verbose, basedir, archive_dir, conf_dir, discover, parallel):
|
|
"""Print current configuration."""
|
|
log(verbose, "Configuration:\n")
|
|
log(verbose, " Base dir: {}\n".format(basedir))
|
|
log(verbose,
|
|
" Archive directory: {}\n".format(archive_dir))
|
|
log(verbose, " Configuration directory: {}\n".format(conf_dir))
|
|
log(verbose, " Discover new extensions: {}\n".format(discover))
|
|
log(verbose, " Max num. of concurrent downloads: {}\n".format(parallel))
|
|
log(verbose, " SQLite 3 version: {}\n".format(
|
|
sqlite3.sqlite_version))
|
|
log(verbose, "\n")
|
|
|
|
|
|
def parse_args(argv):
|
|
"""Parse command line arguments. """
|
|
basedir = ExtensionCrawler.config.const_basedir()
|
|
parallel = ExtensionCrawler.config.const_parallel_downloads()
|
|
verbose = ExtensionCrawler.config.const_verbose()
|
|
discover = ExtensionCrawler.config.const_discover()
|
|
try:
|
|
opts, _ = getopt.getopt(argv, "hsda:p:", ["archive=", 'parallel='])
|
|
except getopt.GetoptError:
|
|
helpmsg()
|
|
sys.exit(2)
|
|
for opt, arg in opts:
|
|
if opt == '-h':
|
|
helpmsg()
|
|
sys.exit()
|
|
elif opt in ("-a", "--archive"):
|
|
basedir = arg
|
|
elif opt in ("-p", "--parallel"):
|
|
parallel = int(arg)
|
|
elif opt == '-s':
|
|
verbose = False
|
|
elif opt == '-d':
|
|
discover = True
|
|
return basedir, parallel, verbose, discover
|
|
|
|
|
|
def main(argv):
|
|
"""Main function of the extension crawler."""
|
|
today = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
basedir, parallel, verbose, discover = parse_args(argv)
|
|
|
|
archive_dir = os.path.join(basedir, "data")
|
|
os.makedirs(archive_dir, exist_ok=True)
|
|
conf_dir = os.path.join(basedir, "conf")
|
|
os.makedirs(conf_dir, exist_ok=True)
|
|
open(os.path.join(conf_dir, "forums.conf"), 'a').close()
|
|
log_dir = os.path.join(basedir, "log")
|
|
os.makedirs(log_dir, exist_ok=True)
|
|
|
|
start_time = time.time()
|
|
|
|
print_config(verbose, basedir, archive_dir, conf_dir, discover, parallel)
|
|
|
|
forum_ext_ids = get_forum_ext_ids(conf_dir, verbose)
|
|
known_ids = list(
|
|
set(get_existing_ids(archive_dir, verbose)) | set(forum_ext_ids))
|
|
discovered_ids = []
|
|
if discover:
|
|
discovered_ids = get_new_ids(verbose, known_ids)
|
|
ext_ids = list(set(discovered_ids) | set(known_ids))
|
|
|
|
discovered_ids = None
|
|
known_ids = None
|
|
|
|
res = update_extensions(archive_dir, verbose, parallel, forum_ext_ids,
|
|
ext_ids)
|
|
|
|
# We re-try (once) the extensions with unknown exceptions, as
|
|
# they are often temporary
|
|
has_exception = list(filter(lambda x: x.has_exception(), res))
|
|
if has_exception != []:
|
|
log(verbose,
|
|
" {} extensions with unknown exceptions, start another try ...\n".
|
|
format(str(len(has_exception))))
|
|
has_exception_ids = list(map(lambda x: x.id, has_exception))
|
|
forum_ext_ids_except = list(
|
|
set(forum_ext_ids).intersection(set(has_exception_ids)))
|
|
ext_ids_except = sorted(
|
|
list(set(has_exception_ids) - set(forum_ext_ids_except)))
|
|
res_update = update_extensions(archive_dir, verbose, parallel,
|
|
forum_ext_ids_except, ext_ids_except)
|
|
res = list(set(res) - set(has_exception)) + res_update
|
|
|
|
end_time = time.time()
|
|
log_summary(verbose, res, False, end_time - start_time)
|
|
log_summary(verbose, res, True, end_time - start_time)
|
|
log_failures_to_file(log_dir, today, res)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main(sys.argv[1:])
|