autocorres: update release tool for Isabelle2018

Note that we have removed the LIB_FILES manifest and no longer intend
to maintain it manually. Instead, we just extract the entire Lib and
CLib sessions from the L4.verified repository. This means that the
next AutoCorres release will have some unneeded theories and a couple
of files with GPL licenses.
This commit is contained in:
Japheth Lim 2018-08-28 14:09:16 +10:00
parent 703c43fa2b
commit 1af23137f0
7 changed files with 154 additions and 200 deletions

View File

@ -9,15 +9,16 @@
# @TAG(NICTA_BSD)
#
import os
import errno
import subprocess
import argparse
import shutil
import tempfile
import glob
import time
import errno
import fnmatch
import glob
import os
import re
import shutil
import subprocess
import tempfile
import time
# Create a temporary directory
class TempDir():
@ -72,7 +73,7 @@ def read_manifest(filename, base):
base_dir = os.path.split(pattern)[0]
g = glob.glob(os.path.join(base, pattern))
if len(g) == 0:
print ("Warning: Pattern '%s' matches 0 files." % pattern)
print("Warning: Pattern '%s' matches 0 files." % pattern)
results += [(base_dir, x) for x in g]
return results
@ -119,14 +120,19 @@ parser.add_argument('--no-cleanup', action='store_true',
help='Don''t delete temporary directories.', default=False)
parser.add_argument('-r', '--repository', metavar='REPO',
type=str, help='Path to the L4.verified repository base.', default=None)
parser.add_argument('--archs', metavar='ARCH,...',
type=str, default='ARM,ARM_HYP,X64',
help='L4V_ARCHs to include (comma-separated)')
args = parser.parse_args()
args.archs = args.archs.split(',')
# Setup output filename if the user used the default.
if args.output == None:
if args.output is None:
args.output = "autocorres-%s.tar.gz" % args.version
# If no repository was specified, assume it is in the cwd.
if args.repository == None:
if args.repository is None:
try:
args.repository = subprocess.check_output([
"git", "rev-parse", "--show-toplevel"]).strip()
@ -151,6 +157,13 @@ if not os.path.exists(args.isabelle_tar) or not args.isabelle_tar.endswith(".tar
parser.error("Expected a path to the official Isabelle release.")
args.isabelle_tar = os.path.abspath(args.isabelle_tar)
# Tools for theory dependencies.
thydeps_tool = os.path.join(args.repository, 'misc', 'scripts', 'thydeps')
repo_isabelle_dir = os.path.join(args.repository, 'isabelle')
# User's preferred shell, if any.
user_shell = os.environ.get('SHELL', '/bin/sh')
# Create temp dir.
with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
# Generate base directory.
@ -159,42 +172,31 @@ with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
os.mkdir(target_dir)
# Copy autocorres files.
print "Copying files..."
print("Copying files...")
ac_files = copy_manifest(target_dir,
os.path.join(release_files_dir, "AUTOCORRES_FILES"),
os.path.join(args.repository, "tools", "autocorres"), "autocorres")
# Copy lib files.
lib_files = copy_manifest(target_dir,
os.path.join(release_files_dir, "LIB_FILES"),
os.path.join(args.repository, "lib"), "lib")
# Copy theories from dependent sessions in the lib directory.
lib_deps = ''
for arch in args.archs:
lib_deps += subprocess.check_output(
[thydeps_tool, '-I', repo_isabelle_dir, '-d', '.', '-d', 'tools/autocorres/tests',
'-b', 'lib', '-r',
'AutoCorresTest'],
cwd=args.repository, env=dict(os.environ, L4V_ARCH=arch))
lib_deps = sorted(set(lib_deps.splitlines()))
# Double-check our theory dependencies.
thydeps_tool = os.path.join(args.repository, 'misc/scripts/thydeps')
if os.path.exists(thydeps_tool):
print "Checking theory dependencies..."
included_thys = [os.path.join(dir, file)
for dir, file in lib_files if file.endswith('.thy')]
included_thys += [os.path.join(dir, file)
for dir, file in ac_files if file.endswith('.thy')]
thy_deps = ''
for arch in ["ARM","X64"]:
thy_deps += subprocess.check_output([thydeps_tool, '-T', 'text', '-o', '-'] + included_thys,
cwd=args.repository, env=dict(os.environ, L4V_ARCH=arch))
thy_deps = sorted(set(thy_deps.splitlines()))
needed_files = [os.path.join(args.repository, f)
for f in thy_deps if f.startswith('lib') or f.startswith('tools/autocorres')]
manifest_files = [os.path.join(dir, file) for dir, file in ac_files + lib_files]
missing_files = [f for f in needed_files if f not in manifest_files]
if missing_files:
print "Warning: missing dependencies from release manifest:"
for f in missing_files:
print(" - %s" % f)
else:
print "Warning: cannot check theory dependencies: missing tool misc/scripts/thydeps"
for f in lib_deps:
f_src = os.path.join(args.repository, 'lib', f)
f_dest = os.path.join(target_dir, 'lib', os.path.relpath(f, args.repository))
mkdir_p(os.path.dirname(f_dest))
shutil.copyfile(f_src, f_dest)
# Copy various other files.
shutil.copyfile(
os.path.join(args.repository, 'lib', 'Word_Lib', 'ROOT'),
os.path.join(target_dir, 'lib', 'Word_Lib', 'ROOT'))
shutil.copyfile(
os.path.join(release_files_dir, "ROOT.release"),
os.path.join(target_dir, "autocorres", "ROOT"))
@ -213,6 +215,40 @@ with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
shutil.copyfile(
os.path.join(args.repository, "LICENSE_BSD2.txt"),
os.path.join(target_dir, "LICENSE_BSD2.txt"))
shutil.copyfile(
os.path.join(args.repository, "LICENSE_GPLv2.txt"),
os.path.join(target_dir, "LICENSE_GPLv2.txt"))
# Extract dependent sessions in lib. FIXME: rather kludgy
print('Extracting sessions from lib/ROOT...')
# Set up ROOT for the tests dir, for the thydeps tool
subprocess.check_call(
['make', 'tests/ROOT'],
cwd=os.path.join(args.repository, 'tools', 'autocorres'))
lib_sessions = ['Lib', 'CLib']
lib_ROOT = os.path.join(args.repository, 'lib', 'ROOT')
with open(lib_ROOT, 'r') as lib_root:
data = lib_root.read()
# Split out session specs. Assume ROOT file has standard indentation.
chunks = data.split('\nsession ')
# This will have the license header, etc.
header = chunks[0]
# Remaining sections. Try to remove comments
sessions = ['session ' + re.sub(r'\(\*.*?\*\)', '', x, flags=re.DOTALL)
for x in chunks[1:]]
new_root = header
wanted_specs = {}
for wanted in lib_sessions:
spec = [spec for spec in sessions if spec.startswith('session %s ' % wanted)]
if len(spec) != 1:
print('error: %s session not found in %r' % (wanted, lib_ROOT))
new_root += '\n' + spec[0]
with open(os.path.join(target_dir, 'lib', 'ROOT'), 'w') as root_f:
root_f.write(new_root)
# For the examples, generate ".thy" files where appropriate, and also
# generate an "All.thy" which contains all the examples.
@ -220,12 +256,22 @@ with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
thy_file = os.path.splitext(c_file)[0] + ".thy"
base_name = os.path.splitext(os.path.basename(c_file))[0]
with open(thy_file, "w") as f:
f.write('theory %s\n' % base_name)
f.write('imports "../../AutoCorres"\n')
f.write('begin\n\n')
f.write('install_C_file "%s"\n\n' % os.path.basename(c_file))
f.write('autocorres "%s"\n\n' % os.path.basename(c_file))
f.write('end\n')
f.write('''
theory %s
imports
"AutoCorres.AutoCorres"
begin
install_C_file "%s"
autocorres "%s"
end
'''.strip() % (base_name,
os.path.basename(c_file),
os.path.basename(c_file)
))
for f in glob.glob(os.path.join(target_dir, "autocorres", "tests", "parse-tests", "*.c")):
gen_thy_file(f)
subprocess.check_call([
@ -242,70 +288,70 @@ with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
with open(filename) as f:
data = f.read()
new_data = data.replace(old_string, new_string)
if new_data != data:
print(' replaced ../../lib with ../lib in %r' % filename)
with open(filename, "w") as f:
f.write(new_data)
for f in rglob(os.path.join(target_dir, "autocorres"), "*.thy"):
inplace_replace_string(f, "../../lib/", "../lib/")
for f in rglob(os.path.join(target_dir, "lib"), "*.thy"):
inplace_replace_string(f, "../tools/c-parser/", "../c-parser/")
# The l4v tests already check license headers, so these are no longer needed
## Check licenses
#print "Checking licenses..."
#subprocess.check_call([
# os.path.join(args.repository, "misc", "license-tool", "check_license.py"),
# "--exclude", os.path.join(release_files_dir, "licenses-ignore"),
# os.path.join(target_dir)])
## Expand licenses.
#print "Expanding licenses..."
#subprocess.check_call([
# os.path.join(args.repository, "misc", "license-tool", "expand_license.py"),
# os.path.join(release_files_dir, "licenses"),
# os.path.join(target_dir)])
# Extract the C parser
print "Extracting C parser..."
print("Extracting C parser...")
# We want to mix the C parser directory structure around a little.
with TempDir() as c_parser_working_dir:
subprocess.check_call(["tar", "-xz", "-C", c_parser_working_dir, "--strip-components=1", "-f", args.cparser_tar])
# The C parser uses mllex and mlyacc to generate its grammar. We build
# the grammar files so that our release won't have a dependency on mlton.
print "Generating C parser grammar files..."
print("Generating C parser grammar files...")
subprocess.check_call(['make', 'c-parser-deps'], cwd=os.path.join(c_parser_working_dir, "src", "c-parser"))
shutil.move(os.path.join(c_parser_working_dir, "src", "c-parser"), os.path.join(target_dir, "c-parser"))
shutil.move(os.path.join(c_parser_working_dir, "README"), os.path.join(target_dir, "c-parser", "README"))
shutil.move(os.path.join(c_parser_working_dir, "doc", "ctranslation.pdf"), os.path.join(target_dir, "c-parser", "doc", "ctranslation.pdf"))
# Patch the release.
def do_patch(patch_file):
subprocess.check_call(["patch", "-p", "1", "-r", "-", "--no-backup-if-mismatch", "--quiet",
"-i", patch_file], cwd=target_dir)
for i in sorted(glob.glob(os.path.join(release_files_dir, "*.patch"))):
do_patch(i)
# Double-check our theory dependencies.
if os.path.exists(thydeps_tool):
print("Checking theory dependencies...")
thy_deps = ''
for arch in args.archs:
thy_deps += subprocess.check_output(
[thydeps_tool, '-I', repo_isabelle_dir, '-b', '.', '-r',
'AutoCorres', 'AutoCorresTest'],
cwd=target_dir, env=dict(os.environ, L4V_ARCH=arch))
thy_deps = sorted(set(thy_deps.splitlines()))
needed_files = [os.path.join(args.repository, f)
for f in thy_deps
if f.startswith('tools/autocorres')]
manifest_files = [os.path.join(dir, file) for dir, file in ac_files]
missing_files = [f for f in needed_files if f not in manifest_files]
if missing_files:
print("Warning: missing dependencies from release manifest:")
for f in missing_files:
print(" - %s" % f)
else:
print("Warning: cannot check theory dependencies: missing tool %r" % thydeps_tool)
# Check for bad strings.
print "Searching for bad strings..."
for s in ["davidg", "dgreenaway", "jlim", "jalim", "autorefine", "@LICENSE"]:
print("Searching for bad strings...")
for s in ["davidg", "dgreenaway", "jlim", "jalim", "autorefine"]:
ret = subprocess.call(["grep", "-i", "-r", s, base_dir])
if not ret:
raise Exception("Found a bad string")
# Set modified date of everything.
print "Setting file modification/access dates..."
print("Setting file modification/access dates...")
target_time = int(time.mktime(time.localtime()))
for (root, dirs, files) in os.walk(base_dir, followlinks=False):
for i in dirs + files:
os.utime(os.path.join(root, i), (target_time, target_time))
# Extract the Isabelle release
print "Extracting Isabelle..."
isabelle_dir = os.path.join(base_dir, "isabelle")
mkdir_p(isabelle_dir)
subprocess.check_call(["tar", "-xz", "-C", isabelle_dir, "--strip-components=1", "-f", args.isabelle_tar])
isabelle_bin = os.path.join(isabelle_dir, "bin", "isabelle")
assert os.path.exists(isabelle_bin)
print("Extracting Isabelle...")
base_isabelle_dir = os.path.join(base_dir, "isabelle")
mkdir_p(base_isabelle_dir)
subprocess.check_call(["tar", "-xz", "-C", base_isabelle_dir, "--strip-components=1", "-f", args.isabelle_tar])
base_isabelle_bin = os.path.join(base_isabelle_dir, "bin", "isabelle")
assert os.path.exists(base_isabelle_bin)
# Build the documentation.
def build_docs(tree, isabelle_bin):
@ -316,42 +362,43 @@ with TempDir(cleanup=(not args.no_cleanup)) as base_dir:
# Build the docs.
try:
subprocess.check_call(
[isabelle_bin, "build", "-c", "-d", ".", "-d", "./autocorres/doc/quickstart", "AutoCorresQuickstart"],
[isabelle_bin, "build", "-c", "-d", ".", "-d", "./autocorres/doc/quickstart", "-v", "AutoCorresQuickstart"],
cwd=os.path.join(doc_build_dir, "doc"), env=dict(os.environ, L4V_ARCH="ARM"))
except subprocess.CalledProcessError:
print "Building documentation failed."
print("Building documentation failed.")
if args.browse:
subprocess.call("zsh", cwd=target_dir)
subprocess.call(user_shell, cwd=target_dir)
# Copy the generated PDF into our output.
shutil.copyfile(
os.path.join(doc_build_dir, "doc", "autocorres", "doc", "quickstart", "output", "document.pdf"),
os.path.join(tree, "quickstart.pdf"))
print "Building documentation..."
build_docs(target_dir, isabelle_bin)
print("Building documentation...")
build_docs(target_dir, base_isabelle_bin)
# Compress everything up.
if args.output != None:
print "Creating tarball..."
print("Creating tarball...")
subprocess.check_call(["tar", "-cz", "--numeric-owner",
"--owner", "root", "--group", "root",
"--owner", "nobody", "--group", "nogroup",
"-C", base_dir, "-f", args.output, target_dir_name])
# Run a test if requested.
if args.test:
print "Testing release..."
print("Testing release...")
try:
subprocess.check_call([isabelle_bin, "version"], cwd=target_dir)
for arch in ["ARM","X64"]:
subprocess.check_call([isabelle_bin, "build", "-d", ".", "AutoCorres", "AutoCorresTest"],
subprocess.check_call([base_isabelle_bin, "version"], cwd=target_dir)
for arch in args.archs:
subprocess.check_call([base_isabelle_bin, "build", "-d", ".", "-v",
"AutoCorres", "AutoCorresTest"],
cwd=target_dir, env=dict(os.environ, L4V_ARCH=arch))
except subprocess.CalledProcessError:
print "Test failed"
print("Test failed")
if args.browse:
subprocess.call("zsh", cwd=target_dir)
subprocess.call(user_shell, cwd=target_dir)
# Open a shell in the directory if requested.
if args.browse:
print "Opening shell..."
subprocess.call("zsh", cwd=target_dir)
print("Opening shell...")
subprocess.call(user_shell, cwd=target_dir)

View File

@ -1,3 +1,8 @@
AutoCorres 1.5 (DATE TBD)
--------------
* Isabelle2018 edition of both AutoCorres and the C parser.
AutoCorres 1.4 (2 March 2018)
--------------

View File

@ -1,91 +0,0 @@
Word_Lib/HOL_Lemmas.thy:
Generic theorems about HOL.
Word_Lib/Aligned.thy:
Theorems about alignment of words.
Distinct_Prop.thy:
Definitions and theorems about "distinct_prof", which ensures a
property P holds across all pairs of elements in a list.
Word_Lib/Enumeration.thy:
Word_Lib/Word_Enum.thy:
Type class for enumerations, and an instantiation for Words.
Lib.thy:
Miscellaneous HOL theorems.
Word_Lib/More_Divides.thy:
Miscellaneous theorems relating to division.
ARM/WordSetup.thy:
X64/WordSetup.thy:
Word_Lib/Word_Setup_32.thy:
Word_Lib/Word_Setup_64.thy:
Word_Lib/Hex_Words.thy:
Word_Lib/Norm_Words.thy:
Word_Lib/WordBitwise_Signed.thy:
Word_Lib/Word_Syntax.thy:
Word_Lib/Word_Lib.thy:
Word_Lib/Signed_Words.thy:
Word_Lib/Word_Lemmas.thy:
Word_Lib/Word_Lemmas_32.thy:
Word_Lib/Word_Lemmas_64.thy:
Word_Lib/Word_Type_Syntax.thy:
Lemmas and definitions relating to Words.
Monad_WP/OptionMonad.thy:
Monad_WP/OptionMonadND.thy:
Monad_WP/OptionMonadWP.thy:
Definition of Option Monad and its relation to the non-deterministic option
monad.
NonDetMonadLemmaBucket.thy:
Monad_WP/NonDetMonad.thy:
Monad_WP/NonDetMonadLemmas.thy:
Monad_WP/NonDetMonadVCG.thy:
Monad_WP/WhileLoopRules.thy:
Monad_WP/WhileLoopRulesCompleteness.thy:
MonadEq.thy:
Non-deterministic state monad definitions, lemmas and tools.
Monad_WP/wp/WP-method.ML:
Monad_WP/wp/WP_Pre.thy:
Monad_WP/wp/WP.thy:
Monad_WP/wp/WPC.thy:
"Weakest-precondition" method for operating on Hoare-triples.
StringOrd.thy:
Ostensibly required by the C parser.
NICTATools.thy:
Apply_Debug.thy:
Apply_Trace.thy:
Apply_Trace_Cmd.thy:
Find_Names.thy:
AutoLevity_Base.thy:
AutoLevity_Hooks.thy:
Insulin.thy:
ShowTypes.thy:
Trace_Attribs.thy:
trace_attribs.ML:
more_xml.ML:
Solves_Tac.thy:
subgoal_focus/Subgoal_Methods.thy:
Rule_By_Method.thy:
Eisbach_Methods.thy:
ml-helpers/TermPatternAntiquote.thy:
Monad_WP/Strengthen.thy:
Eval_Bool.thy:
Extract_Conjunct.thy:
Try_Methods.thy:
Value_Abbreviation.thy:
Compatibility libraries for NICTA developments.
set.ML:
A simple set abstract data type in SML.
LemmaBucket_C.thy:
TypHeapLib.thy:
Proofs relating to Tuch's heap lift framework.

View File

@ -9,12 +9,16 @@
*)
session AutoCorres = CParser +
sessions
"HOL-Eisbach"
Lib
CLib
theories
"AutoCorres"
session AutoCorresTest = AutoCorres +
sessions
"HOL-Number_Theory"
AutoCorres
theories
"tests/AutoCorresTest"

View File

@ -1,2 +1,4 @@
lib
lib/Word_Lib
c-parser
autocorres

View File

@ -1,6 +0,0 @@
*:
NICTA -> NICTA_BSD_release
NICTA_CORE -> NICTA_BSD_release
KARLSRUHE_BSD -> STRIP
OTHER_BSD -> STRIP

View File

@ -1,7 +0,0 @@
*/README
*/MAINTAINERS
*/CONTRIBUTORS
*/ChangeLog
*/ROOTS
*/IsaMakefile
*/tests/AutoCorresTest.thy