Compare commits

...

18 Commits

Author SHA1 Message Date
Achim D. Brucker 840c2d63ed Initial commit.
ci/woodpecker/push/build Pipeline was successful Details
2023-05-14 17:37:28 +02:00
Achim D. Brucker 7149ba4cf1 Tuned.
ci/woodpecker/push/build Pipeline was successful Details
2023-02-13 07:23:23 +00:00
Achim D. Brucker 6d6327a000 Exclude diff files. 2023-02-12 07:01:23 +00:00
Achim D. Brucker 712a372372 Use full qualified name for containers.
ci/woodpecker/push/build Pipeline failed Details
2023-02-11 22:01:23 +00:00
Achim D. Brucker 343e4a4012 Added build setup checks. 2023-02-11 22:01:23 +00:00
Achim D. Brucker eca181309d Update to Isabelle 2022. 2022-10-29 22:01:23 +01:00
Achim D. Brucker d536dcff8a Update to Isabelle 2022.
ci/woodpecker/push/build Pipeline failed Details
2022-10-29 21:08:03 +01:00
Achim D. Brucker 50219d34cc Removed Nano_JSON, which is is now an official AFP entry: https://www.isa-afp.org/entries/Nano_JSON.html
ci/woodpecker/push/build Pipeline was successful Details
2022-10-29 20:36:43 +01:00
Achim D. Brucker 6eb7d1cde3 Renaming to comply with the usual naming convention that theory names start with an uppercase letter.
ci/woodpecker/push/build Pipeline was successful Details
2022-04-02 13:12:59 +01:00
Achim D. Brucker 29fbeaed36 Added email notification for failed builds.
ci/woodpecker/push/build Pipeline was successful Details
2022-04-01 08:16:37 +01:00
Achim D. Brucker 3ae86d2624 Renamed upstream repository.
ci/woodpecker/push/build Pipeline was successful Details
2022-03-28 07:24:02 +01:00
Achim D. Brucker 10c0ebcd38 Initial commit: example of a simple external oracle.
ci/woodpecker/push/build Pipeline was successful Details
2022-03-21 06:50:23 +00:00
Achim D. Brucker 00a830f09c Point directly to session specific HTML files.
ci/woodpecker/push/build Pipeline was successful Details
2022-03-20 10:42:37 +00:00
Achim D. Brucker b71825b3d4 Fixed artifact deployment.
ci/woodpecker/push/build Pipeline was successful Details
2022-03-20 10:38:58 +00:00
Achim D. Brucker 85619d4be3 Reorganized CI setup.
ci/woodpecker/push/build Pipeline failed Details
2022-03-20 10:36:46 +00:00
Achim D. Brucker 70d50de6e2 Switched from Jenkins to Woodpecker CI.
ci/woodpecker/push/build Pipeline was successful Details
2022-03-20 08:09:26 +00:00
Achim D. Brucker cdf1aaff5f Switched from Jenkins to Woodpecker CI. 2022-03-20 08:01:38 +00:00
Achim D. Brucker 87794eef79 Updated Isabelle version. 2021-12-23 20:39:09 +00:00
16 changed files with 447 additions and 806 deletions

10
.ci/Jenkinsfile vendored
View File

@ -1,10 +0,0 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
sh 'docker run -v $PWD/:/WORKING logicalhacking:isabelle2019 isabelle build -D /WORKING'
}
}
}
}

14
.woodpecker/README.md Normal file
View File

@ -0,0 +1,14 @@
# Continuous Build and Release Setup
[![status-badge](https://ci.logicalhacking.com/api/badges/adbrucker/isabelle-hacks/status.svg)](https://ci.logicalhacking.com/adbrucker/isabelle-hacks)
This directory contains the CI configuration for the [Woodpecker CI](https://woodpecker-ci.org/).
It may also contain additional tools and script that are useful for preparing a release.
## Generated Artifacts
### Latest Build
* [document.pdf](https://artifacts.logicalhacking.com/ci/adbrucker/isabelle-hacks/main/latest/document.pdf)
* [browser_info](https://artifacts.logicalhacking.com/ci/adbrucker/isabelle-hacks/main/latest/browser_info/Unsorted/isabelle-hacks/)
* [aux files](https://artifacts.logicalhacking.com/ci/adbrucker/isabelle-hacks/main/latest/)

38
.woodpecker/build.yml Normal file
View File

@ -0,0 +1,38 @@
pipeline:
build:
image: docker.io/logicalhacking/isabelle2022
commands:
- ./.woodpecker/check_dangling_theories
- ./.woodpecker/check_external_file_refs
- ./.woodpecker/check_quick_and_dirty
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/
- mkdir -p $ARTIFACT_DIR
- export `isabelle getenv ISABELLE_HOME_USER`
- isabelle build -D . -o browser_info
- isabelle scala .woodpecker/profiling.scala --sort e isabelle-hacks
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
- cp output/document.pdf $ARTIFACT_DIR
- cd $ARTIFACT_DIR
- cd ..
- ln -s * latest
deploy:
image: docker.io/drillster/drone-rsync
settings:
hosts: [ "ci.logicalhacking.com"]
port: 22
source: .artifacts/$CI_REPO_OWNER/*
target: $CI_REPO_OWNER
include: [ "**.*"]
key:
from_secret: artifacts_ssh
user: artifacts
notify:
image: docker.io/drillster/drone-email
settings:
host: smtp.0x5f.org
username: woodpecker
password:
from_secret: email
from: ci@logicalhacking.com
when:
status: [ changed, failure ]

View File

@ -0,0 +1,33 @@
#!/bin/bash
set -e
failuremsg="Error"
failurecode=1
while [ $# -gt 0 ]
do
case "$1" in
--warning|-w)
failuremsg="Warning"
failurecode=0;;
esac
shift
done
echo "Checking for theories that are not part of an Isabelle session:"
echo "==============================================================="
PWD=`pwd`
TMPDIR=`mktemp -d`
isabelle build -D . -l -n | grep $PWD | sed -e "s| *${PWD}/||" | sort -u | grep thy$ > ${TMPDIR}/sessions-thy-files.txt
find * -type f | sort -u | grep thy$ > ${TMPDIR}/actual-thy-files.txt
thylist=`comm -13 ${TMPDIR}/sessions-thy-files.txt ${TMPDIR}/actual-thy-files.txt`
if [ -z "$thylist" ] ; then
echo " * Success: No dangling theories found."
exit 0
else
echo -e "$thylist"
echo "$failuremsg: Dangling theories found (see list above)!"
exit $failurecode
fi

View File

@ -0,0 +1,45 @@
#!/bin/bash
failuremsg="Error"
failurecode=1
while [ $# -gt 0 ]
do
case "$1" in
--warning|-w)
failuremsg="Warning"
failurecode=0;;
esac
shift
done
DIRREGEXP="\\.\\./"
echo "Checking for references pointing outside of session directory:"
echo "=============================================================="
REGEXP=$DIRREGEXP
DIR=$DIRMATCH
failed=0
for i in $(seq 1 10); do
FILES=`find * -mindepth $((i-1)) -maxdepth $i -type f -not -name "*.diff" | xargs`
if [ -n "$FILES" ]; then
grep -s ${REGEXP} ${FILES}
exit=$?
if [ "$exit" -eq 0 ] ; then
failed=1
fi
fi
REGEXP="${DIRREGEXP}${REGEXP}"
done
if [ "$failed" -ne 0 ] ; then
echo "$failuremsg: Forbidden reference to files outside of their session directory!"
exit $failurecode
fi
echo " * Success: No relative references to files outside of their session directory found."
exit 0

View File

@ -0,0 +1,30 @@
#!/bin/bash
set -e
failuremsg="Error"
failurecode=1
while [ $# -gt 0 ]
do
case "$1" in
--warning|-w)
failuremsg="Warning"
failurecode=0;;
esac
shift
done
echo "Checking for sessions with quick_and_dirty mode enabled:"
echo "========================================================"
rootlist=`find -name 'ROOT' -exec grep -l 'quick_and_dirty *= *true' {} \;`
if [ -z "$rootlist" ] ; then
echo " * Success: No sessions with quick_and_dirty mode enabled found."
exit 0
else
echo -e "$rootlist"
echo "$failuremsg: Sessions with quick_and_dirty mode enabled found (see list above)!"
exit $failurecode
fi

88
.woodpecker/profiling.scala Executable file
View File

@ -0,0 +1,88 @@
#!/bin/sh
exec isabelle scala "$0" "$@"
!#
import isabelle._
import scala.io.Source
import scala.math.Ordering.Implicits._
def print_times (sessions:List[String], sort:String):Unit = {
val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
val store = Sessions.store(Options.init())
for (session <- sessions) {
val db_path = user_output_dir+store.database(session)
if (db_path.file.exists()) {
val db = SQLite.open_database(db_path)
val command_timings = store.read_command_timings(db, session)
val theory_timings = store.read_theory_timings(db, session)
if (!(theory_timings.isEmpty)) {
println()
println(f" +---------------------------------------------------------+-------------+------------+------------+")
println(f" | Theory | elapsed [s] | cpu [s] | gc [s] |")
println(f" +---------------------------------------------------------+-------------+------------+------------+")
val timings = sort match {
case "n" => theory_timings.sortBy(p => p.filter(e => e._1 == "name").head)
case "e" => theory_timings.sortBy(p => p.filter(e => e._1 == "elapsed").head._2.toDouble).reverse
case "c" => theory_timings.sortBy(p => p.filter(e => e._1 == "cpu").head._2.toDouble).reverse
case "g" => theory_timings.sortBy(p => p.filter(e => e._1 == "gc").head._2.toDouble).reverse
case _ => theory_timings
}
var telapsed = 0.0
var tcpu = 0.0
var tgc = 0.0
timings.foreach { p => {
val (_, theory) = p.filter (e => e._1 == "name").head
val (_, elapsed) = p.filter (e => e._1 == "elapsed").head
val (_, cpu) = p.filter (e => e._1 == "cpu").head
val (_, gc) = p.filter (e => e._1 == "gc").head
telapsed = telapsed + elapsed.toDouble
tcpu = tcpu + cpu.toDouble
tgc = tgc + gc.toDouble
println(f" | $theory%-55s | $elapsed%11s | $cpu%10s | $gc%10s |")}
}
println(f" +---------------------------------------------------------+-------------+------------+------------+")
println(f" | Total | $telapsed%11.3f | $tcpu%10.3f | $tgc%10.3f |")
println(f" +---------------------------------------------------------+-------------+------------+------------+")
println()
}else{
System.err.println("Warning: No theory timings found!")
}
}
}
}
def main(args: Array[String]):Unit = {
val usage = """
Usage: isabelle scala extract_timing_as_csv.scala [--help] SESSION_NAME
--sort n|c|e|g sort by [n]ame, [c]pu, [e]lapsed, [g]c
--help prints this help message
"""
val arglist = args.toList
type OptionMap = Map[Symbol, Any]
Map(scala.Symbol("sort") -> "u")
def nextOption(map : OptionMap, list: List[String]) : OptionMap = {
def isSwitch(s : String) = (s(0) == '-')
list match {
case Nil => map
case "--help" :: tail => println(usage)
System.exit(0)
map
case "--sort" :: o :: tail => o match {
case "n" => nextOption(map ++ Map(scala.Symbol("sort") -> "n"), tail)
case "c" => nextOption(map ++ Map(scala.Symbol("sort") -> "c"), tail)
case "e" => nextOption(map ++ Map(scala.Symbol("sort") -> "e"), tail)
case "g" => nextOption(map ++ Map(scala.Symbol("sort") -> "g"), tail)
}
case string :: Nil => nextOption(map ++ Map(scala.Symbol("session") -> string), list.tail)
case option :: tail => println("Unknown option "+option)
System.exit(1)
map
}
}
val options = nextOption(Map(),arglist)
val sessions = List(options(scala.Symbol("session")).asInstanceOf[String])
print_times (sessions, options(scala.Symbol("sort")).asInstanceOf[String])
}

View File

@ -28,7 +28,7 @@
* Dependencies: None
***********************************************************************************)
chapter\<open>Code Reflection for \<close>
chapter\<open>Code Reflection for Isabelle\<close>
theory
"Code_Reflection"
imports
@ -44,7 +44,7 @@ ML\<open>
fun is_sml_file f = String.isSuffix ".ML" (Path.implode (#path f))
val files = (map (Generated_Files.check_files_in (Context.proof_of ctxt)) args)
val ml_files = filter is_sml_file (map #1 (maps Generated_Files.get_files_in files))
val ml_content = map (fn f => Syntax.read_input (#content f)) ml_files
val ml_content = map (fn f => Syntax.read_input (Bytes.content (#content f))) ml_files
fun eval ml_content = fold (fn sml => (ML_Context.exec
(fn () => ML_Context.eval_source ML_Compiler.flags sml)))
ml_content

View File

@ -44,7 +44,7 @@ to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
***********************************************************************************)
chapter\<open>The Functional XML Parser for Isabelle\<close>
theory fxp
theory Fxp
imports Main
begin

View File

@ -45,7 +45,7 @@ to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
chapter\<open>ML Yacc Library for Isabelle\<close>
theory
"ml_yacc_lib"
"Ml_Yacc_Lib"
imports
Main
begin

View File

@ -1,763 +0,0 @@
(***********************************************************************************
* Copyright (c) 2019 Achim D. Brucker
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
* Repository: https://git.logicalhacking.com/adbrucker/isabelle-hacks/
* Dependencies: None (assert.thy is used for testing the theory but it is
* not required for providing the functionality of this hack)
***********************************************************************************)
(***********************************************************************************
# Changelog
This comment documents all notable changes to this file in a format inspired by
[Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres
to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
- Improved representation of IEEE reals
- Fixed serializer for definitions using equality from Pure
## [1.0.0] - 2019-01-21
- Initial release
***********************************************************************************)
chapter\<open>An Import/Export of JSON-like Formats for Isabelle/HOL\<close>
theory
"Nano_JSON"
imports
Complex_Main (* required for type real *)
keywords
"import_JSON" :: thy_decl
and "definition_JSON" :: thy_decl
and "serialize_JSON" :: thy_decl
begin
text\<open>
This theory implements an import/export format for Isabelle/HOL that is inspired by
JSON (JavaScript Object Notation). While the format defined in this theory is inspired
by the JSON standard (@{url "https://www.json.org"}), it is not fully compliant. Most
notably,
\<^item> only basic support for Unicode characters
\<^item> numbers are mapped to @{type "real"}, which is not a faithful representation of IEEE floating
point numbers, moreover, we extended the abstract syntax to allow for representing integers as
@{type "int"}.
Still, our JSON-like import/expert should work with most real-world JSON files, i.e., simplifying
the data exchange between Isabelle/HOL and tools that can read/write JSON.
Overall, this theory should enable you to work with JSON encoded data in Isabelle/HOL without
the need of implementing parsers or serialization in Isabelle/ML. You should be able to implement
mapping from the Nano JSON HOL data types to your own data types on the level of Isabelle/HOL (i.e.,
as executable HOL functions). Nevertheless, the provided ML routine that converts between the
ML representation and the HOL representation of Nano JSON can also serve as a starting point
for converting the ML representation to your own, domain-specific, HOL encoding.
\<close>
section\<open>Defining a JSON-like Data Structure\<close>
text\<open>
In this section
\<close>
datatype number = INTEGER int | REAL real
datatype json = OBJECT "(string * json) list"
| ARRAY "json list"
| NUMBER "number"
| STRING "string"
| BOOL "bool"
| NULL
text\<open>
Using the data type @{typ "json"}, we can now represent JSON encoded data easily in HOL:
\<close>
subsection\<open>Example\<close>
definition example01::json where
"example01 =
OBJECT [(''menu'', OBJECT [(''id'', STRING ''file''), (''value'', STRING ''File''),
(''popup'', OBJECT [(''menuitem'', ARRAY
[OBJECT [(''value'', STRING ''New''), (''onclick'', STRING ''CreateNewDoc()'')],
OBJECT [(''value'', STRING ''Open''), (''onclick'', STRING ''OpenDoc()'')],
OBJECT [(''value'', STRING ''Close''), (''onclick'', STRING ''CloseDoc()'')]
])]
)])]"
text\<open>
The translation of the data type @{typ "json"} to ML is straight forward. In addition, we also
provide methods for converting JSON instances between the representation as Isabelle terms and
the representation as ML data structure.
\<close>
subsection\<open>ML Implementation\<close>
ML\<open>
signature NANO_JSON_TYPE = sig
datatype number = INTEGER of int | REAL of IEEEReal.decimal_approx
datatype json = OBJECT of (string * json) list
| ARRAY of json list
| NUMBER of number
| STRING of string
| BOOL of bool
| NULL
val term_of_json: json -> term
val json_of_term: term -> json
end
structure Nano_Json_Type : NANO_JSON_TYPE = struct
datatype number = INTEGER of int | REAL of IEEEReal.decimal_approx
datatype json = OBJECT of (string * json) list
| ARRAY of json list
| NUMBER of number
| STRING of string
| BOOL of bool
| NULL
fun ieee_real_to_rat_approx rat = let
val _ = warning ("Conversion of (real) numbers is not JSON compliant.")
(* val rat = Real.toDecimal r *)
fun pow (_, 0) = 1
| pow (x, n) = if n mod 2 = 0 then pow (x*x, n div 2)
else x * pow (x*x, n div 2);
fun rat_of_dec rat = let
val sign = #sign rat
val digits = #digits rat
val exp = #exp rat
fun numerator_of _ [] = 0
| numerator_of c (x::xs) = x*pow(10,c) + (numerator_of (c+1) xs)
val numerator_abs = numerator_of 0 (rev digits)
val denominator = pow(10, (List.length digits - exp))
in
(if sign then ~ numerator_abs else numerator_abs, denominator)
end
in
case #class rat of
IEEEReal.ZERO => (0,0)
| IEEEReal.SUBNORMAL => rat_of_dec rat
| IEEEReal.NORMAL => rat_of_dec rat
| IEEEReal.INF => error "Real is INF, not yet supported."
| IEEEReal.NAN => error "Real is NaN, not yet supported."
end
fun mk_divide t1 t2 = @{const Rings.divide_class.divide (real)} $ t1 $ t2
fun mk_real_num i = HOLogic.mk_number @{typ "Real.real"} i
fun mk_real (p,q) = if q = 1 then mk_real_num p else mk_divide (mk_real_num p) (mk_real_num q)
fun dest_real (@{const Rings.divide_class.divide (real)} $a$b) =
Real.toDecimal(Real.fromInt(HOLogic.dest_number a |> snd)
/ Real.fromInt(HOLogic.dest_number b |> snd))
| dest_real t = Real.toDecimal (Real.fromInt (HOLogic.dest_number t |> snd))
fun term_of_json (OBJECT l) = @{const "OBJECT"}
$(HOLogic.mk_list ((HOLogic.mk_prodT (HOLogic.stringT,@{typ "json"})))
(map (fn (s,j) => HOLogic.mk_tuple[HOLogic.mk_string s, term_of_json j]) l))
| term_of_json (ARRAY l) = @{const "ARRAY"}
$(HOLogic.mk_list ( @{typ "json"}) (map term_of_json l))
| term_of_json (NUMBER (INTEGER i)) = @{const "NUMBER"}
$(@{const "INTEGER"}$(HOLogic.mk_number @{typ "int"} i))
| term_of_json (NUMBER (REAL r)) = @{const "NUMBER"}
$(@{const "REAL"}$(mk_real (ieee_real_to_rat_approx r)))
| term_of_json (STRING s) = @{const "STRING"}$(HOLogic.mk_string s)
| term_of_json (BOOL v) = @{const "BOOL"}$(if v then @{const "True"} else @{const "False"})
| term_of_json (NULL) = @{const "NULL"}
fun json_of_term t = let
fun dest_key_value [string, json] = (HOLogic.dest_string string, json_of json)
| dest_key_value _ = error "dest_key_value: not a key-value pair."
and json_of (@{const "OBJECT"} $ l) = OBJECT (map (dest_key_value o HOLogic.strip_tuple) (HOLogic.dest_list l))
| json_of (@{const "ARRAY"} $ l) = ARRAY (map json_of (HOLogic.dest_list l))
| json_of (@{const "NUMBER"} $ @{const "INTEGER"} $ i) = (NUMBER (INTEGER (HOLogic.dest_numeral i)))
| json_of (@{const "NUMBER"} $ @{const "REAL"} $ r) = (NUMBER (REAL (dest_real r)))
| json_of (@{const "STRING"} $ s) = STRING (HOLogic.dest_string s)
| json_of (@{const "BOOL"} $ @{const "True"}) = BOOL true
| json_of (@{const "BOOL"} $ @{const "False"}) = BOOL true
| json_of @{const "NULL"} = NULL
| json_of _ = error "Term not supported in json_of_term."
in
if type_of t = @{typ "json"} then json_of t
else error "Term not of type json."
end
end
\<close>
section\<open>Parsing Nano JSON\<close>
text\<open>
In this section, we define the infrastructure for parsing JSON-like data structures as
well as for importing them into Isabelle/HOL. This implementation was inspired by the
``Simple Standard ML JSON parser'' from Chris Cannam.
\<close>
subsection\<open>ML Implementation\<close>
subsubsection\<open>Lexer\<close>
ML\<open>
signature NANO_JSON_LEXER = sig
structure T : sig
datatype token = NUMBER of char list
| STRING of string
| BOOL of bool
| NULL
| CURLY_L
| CURLY_R
| SQUARE_L
| SQUARE_R
| COLON
| COMMA
val string_of_T : token -> string
end
val tokenize_string: string -> T.token list
end
structure Nano_Json_Lexer : NANO_JSON_LEXER = struct
structure T = struct
datatype token = NUMBER of char list
| STRING of string
| BOOL of bool
| NULL
| CURLY_L
| CURLY_R
| SQUARE_L
| SQUARE_R
| COLON
| COMMA
fun string_of_T t =
case t of NUMBER digits => String.implode digits
| STRING s => s
| BOOL b => Bool.toString b
| NULL => "null"
| CURLY_L => "{"
| CURLY_R => "}"
| SQUARE_L => "["
| SQUARE_R => "]"
| COLON => ":"
| COMMA => ","
end
fun lexer_error pos text = error (text ^ " at character position " ^
Int.toString (pos - 1))
fun token_error pos = lexer_error pos ("Unexpected token")
fun bmp_to_utf8 cp = map (Char.chr o Word.toInt)
(if cp < 0wx80
then [cp]
else if cp < 0wx800
then [Word.orb(0wxc0, Word.>>(cp,0w6)),
Word.orb(0wx8, Word.andb (cp, 0wx3f))]
else if cp < 0wx10000
then [Word.orb(0wxe0,Word.>>(cp, 0w12)),
Word.orb(0wx80, Word.andb(Word.>>(cp,0w6), 0wx3f)),
Word.orb(0wx80,Word.andb(cp, 0wx3f))]
else error ("Invalid BMP point in bmp_to_utf8 " ^ (Word.toString cp)))
fun lexNull pos acc (#"u" :: #"l" :: #"l" :: xs) =
lex (pos + 3) (T.NULL :: acc) xs
| lexNull pos _ _ = token_error pos
and lexTrue pos acc (#"r" :: #"u" :: #"e" :: xs) =
lex (pos + 3) (T.BOOL true :: acc) xs
| lexTrue pos _ _ = token_error pos
and lexFalse pos acc (#"a" :: #"l" :: #"s" :: #"e" :: xs) =
lex (pos + 4) (T.BOOL false :: acc) xs
| lexFalse pos _ _ = token_error pos
and lexChar tok pos acc xs =
lex pos (tok :: acc) xs
and lexString pos acc cc =
let datatype escaped = ESCAPED | NORMAL
fun lexString' pos _ ESCAPED [] =
lexer_error pos "End of input during escape sequence"
| lexString' pos _ NORMAL [] =
lexer_error pos "End of input during string"
| lexString' pos text ESCAPED (x :: xs) =
let fun esc c = lexString' (pos + 1) (c :: text) NORMAL xs
in case x of
#"\"" => esc x
| #"\\" => esc x
| #"/" => esc x
| #"b" => esc #"\b"
| #"f" => esc #"\f"
| #"n" => esc #"\n"
| #"r" => esc #"\r"
| #"t" => esc #"\t"
| _ => lexer_error pos ("Invalid escape \\" ^
Char.toString x)
end
| lexString' pos text NORMAL (#"\\" :: #"u" ::a::b::c::d:: xs) =
if List.all Char.isHexDigit [a,b,c,d]
then case Word.fromString ("0wx" ^ (String.implode [a,b,c,d])) of
SOME w => (let val utf = rev (bmp_to_utf8 w) in
lexString' (pos + 6) (utf @ text)
NORMAL xs
end
handle Fail err => lexer_error pos err)
| NONE => lexer_error pos "Invalid Unicode BMP escape sequence"
else lexer_error pos "Invalid Unicode BMP escape sequence"
| lexString' pos text NORMAL (x :: xs) =
if Char.ord x < 0x20
then lexer_error pos "Invalid unescaped control character"
else
case x of
#"\"" => (rev text, xs, pos + 1)
| #"\\" => lexString' (pos + 1) text ESCAPED xs
| _ => lexString' (pos + 1) (x :: text) NORMAL xs
val (text, rest, newpos) = lexString' pos [] NORMAL cc
in
lex newpos (T.STRING (String.implode text) :: acc) rest
end
and lexNumber firstChar pos acc cc =
let val valid = String.explode ".+-e"
fun lexNumber' pos digits [] = (rev digits, [], pos)
| lexNumber' pos digits (x :: xs) =
if x = #"E" then lexNumber' (pos + 1) (#"e" :: digits) xs
else if Char.isDigit x orelse List.exists (fn c => x = c) valid
then lexNumber' (pos + 1) (x :: digits) xs
else (rev digits, x :: xs, pos)
val (digits, rest, newpos) =
lexNumber' (pos - 1) [] (firstChar :: cc)
in
case digits of
[] => token_error pos
| _ => lex newpos (T.NUMBER digits :: acc) rest
end
and lex _ acc [] = rev acc
| lex pos acc (x::xs) =
(case x of
#" " => lex
| #"\t" => lex
| #"\n" => lex
| #"\r" => lex
| #"{" => lexChar T.CURLY_L
| #"}" => lexChar T.CURLY_R
| #"[" => lexChar T.SQUARE_L
| #"]" => lexChar T.SQUARE_R
| #":" => lexChar T.COLON
| #"," => lexChar T.COMMA
| #"\"" => lexString
| #"t" => lexTrue
| #"f" => lexFalse
| #"n" => lexNull
| x => lexNumber x) (pos + 1) acc xs
fun tokenize_string str = lex 1 [] (String.explode str)
end
\<close>
subsubsection\<open>Parser\<close>
ML\<open>
signature NANO_JSON_PARSER = sig
val json_of_string : string -> Nano_Json_Type.json
val term_of_json_string : string -> term
end
structure Nano_Json_Parser : NANO_JSON_PARSER = struct
open Nano_Json_Type
open Nano_Json_Lexer
fun show [] = "end of input"
| show (tok :: _) = T.string_of_T tok
val parse_error = error
fun parseNumber digits =
let open Char
fun okExpDigits [] = false
| okExpDigits (c :: []) = isDigit c
| okExpDigits (c :: cs) = isDigit c andalso okExpDigits cs
fun okExponent [] = false
| okExponent (#"+" :: cs) = okExpDigits cs
| okExponent (#"-" :: cs) = okExpDigits cs
| okExponent cc = okExpDigits cc
fun okFracTrailing [] = true
| okFracTrailing (c :: cs) =
(isDigit c andalso okFracTrailing cs) orelse
(c = #"e" andalso okExponent cs)
fun okFraction [] = false
| okFraction (c :: cs) =
isDigit c andalso okFracTrailing cs
fun okPosTrailing [] = true
| okPosTrailing (#"." :: cs) = okFraction cs
| okPosTrailing (#"e" :: cs) = okExponent cs
| okPosTrailing (c :: cs) =
isDigit c andalso okPosTrailing cs
fun okPositive [] = false
| okPositive (#"0" :: []) = true
| okPositive (#"0" :: #"." :: cs) = okFraction cs
| okPositive (#"0" :: #"e" :: cs) = okExponent cs
| okPositive (#"0" :: _) = false
| okPositive (c :: cs) = isDigit c andalso okPosTrailing cs
fun okNumber (#"-" :: cs) = okPositive cs
| okNumber cc = okPositive cc
in
if okNumber digits then let
val number = String.implode digits
in
if List.all (Char.isDigit) (String.explode number)
then (case Int.fromString (String.implode digits) of
NONE => parse_error "Number out of range"
| SOME r => INTEGER r)
else (case IEEEReal.fromString (String.implode digits) of
NONE => parse_error "Number out of range"
| SOME r => REAL r)
end
else parse_error ("Invalid number \"" ^ (String.implode digits) ^ "\"")
end
fun parseObject (T.CURLY_R :: xs) = (OBJECT [], xs)
| parseObject tokens =
let fun parsePair (T.STRING key :: T.COLON :: xs) = let
val (j, xs) = parseTokens xs
in
((key, j), xs)
end
| parsePair other =
parse_error("Object key/value pair expected around \"" ^
show other ^ "\"")
fun parseObject' _ [] = parse_error "End of input during object"
| parseObject' acc tokens =
case parsePair tokens of
(pair, T.COMMA :: xs) =>
parseObject' (pair :: acc) xs
| (pair, T.CURLY_R :: xs) =>
(OBJECT (rev (pair :: acc)), xs)
| (_, _) =>parse_error "Expected , or } after object element"
in
parseObject' [] tokens
end
and parseArray (T.SQUARE_R :: xs) = (ARRAY [], xs)
| parseArray tokens =
let fun parseArray' _ [] = error "End of input during array"
| parseArray' acc tokens =
case parseTokens tokens of
(j, T.COMMA :: xs) => parseArray' (j :: acc) xs
| (j, T.SQUARE_R :: xs) => (ARRAY (rev (j :: acc)), xs)
| (_, _) => error "Expected , or ] after array element"
in
parseArray' [] tokens
end
and parseTokens [] = parse_error "Value expected"
| parseTokens (tok :: xs) =
(case tok of
T.NUMBER d => (NUMBER ((parseNumber d)), xs)
| T.STRING s => (STRING s, xs)
| T.BOOL b => (BOOL b, xs)
| T.NULL => (NULL, xs)
| T.CURLY_L => parseObject xs
| T.SQUARE_L => parseArray xs
| _ => parse_error ("Unexpected token " ^ T.string_of_T tok ^
" before " ^ show xs))
fun json_of_string str = case parseTokens (Nano_Json_Lexer.tokenize_string str) of
(value, []) => value
| (_, _) => parse_error "Extra data after input"
val term_of_json_string = term_of_json o json_of_string
end
\<close>
subsection\<open>Isar Setup\<close>
subsubsection\<open>The JSON Cartouche\<close>
syntax "_cartouche_nano_json" :: "cartouche_position \<Rightarrow> 'a" ("JSON _")
parse_translation\<open>
let
fun translation args =
let
fun err () = raise TERM ("Common._cartouche_nano_json", args)
fun input s pos = Symbol_Pos.implode (Symbol_Pos.cartouche_content (Symbol_Pos.explode (s, pos)))
in
case args of
[(c as Const (@{syntax_const "_constrain"}, _)) $ Free (s, _) $ p] =>
(case Term_Position.decode_position p of
SOME (pos, _) => c $ Nano_Json_Parser.term_of_json_string (input s pos) $ p
| NONE => err ())
| _ => err ()
end
in
[(@{syntax_const "_cartouche_nano_json"}, K translation)]
end
\<close>
subsubsection\<open>Isar Top-Level Commands\<close>
ML\<open>
structure Nano_Json_Parser_Isar = struct
fun make_const_def (constname, trm) lthy = let
val arg = ((Binding.name constname, NoSyn), ((Binding.name (constname^"_def"),[]), trm))
val ((_, (_ , thm)), lthy') = Local_Theory.define arg lthy
in
(thm, lthy')
end
fun def_json name json = snd o (make_const_def (name, Nano_Json_Parser.term_of_json_string json ))
fun def_json_file name filename lthy = let
val filename = Path.explode filename
val thy = Proof_Context.theory_of lthy
val master_dir = Resources.master_directory thy
val abs_filename = if (Path.is_absolute filename)
then filename
else Path.append master_dir filename
val json = File.read abs_filename
in
def_json name json lthy
end
val jsonFileP = Parse.name -- Parse.name
val jsonP = Parse.name -- Parse.cartouche
end
\<close>
ML\<open>
val _ = Outer_Syntax.local_theory @{command_keyword "definition_JSON"} "Define JSON."
(Nano_Json_Parser_Isar.jsonP >> (fn (name, json) => Nano_Json_Parser_Isar.def_json name json));
val _ = Outer_Syntax.local_theory @{command_keyword "import_JSON"} "Define JSON from file."
(Nano_Json_Parser_Isar.jsonFileP >> (fn (name, filename) => Nano_Json_Parser_Isar.def_json_file name filename));
\<close>
subsection\<open>Examples\<close>
text\<open>
Now we can use the JSON Cartouche for defining JSON-like data ``on-the-fly'', e.g.:
\<close>
lemma \<open>y == JSON\<open>{"name": [true,false,"test"]}\<close>\<close>
oops
text\<open>
Note that you need to escape quotes within the JSON Cartouche, if you are using
quotes as lemma delimiters, e.g.,:
\<close>
lemma "y == JSON\<open>{\"name\": [true,false,\"test\"]}\<close>"
oops
text\<open>
Thus, we recommend to use the Cartouche delimiters when using the JSON Cartouche with non
trivial data structures:
\<close>
lemma \<open> example01 == JSON \<open>{"menu": {
"id": "file",
"value": "File",
"popup": {
"menuitem": [
{"value": "New", "onclick": "CreateNewDoc()"},
{"value": "Open", "onclick": "OpenDoc()"},
{"value": "Close", "onclick": "CloseDoc()"}
]
}
}}\<close>\<close>
by(simp add: example01_def)
text\<open>
Using the top level Isar commands defined in the last section, we can now easily define
JSON-like data:
\<close>
definition_JSON example02 \<open>
{"menu": {
"id": "file",
"value": "File",
"popup": {
"menuitem": [
{"value": "New", "onclick": "CreateNewDoc()"},
{"value": "Open", "onclick": "OpenDoc()"},
{"value": "Close", "onclick": "CloseDoc()"}
]
}
}}
\<close>
thm example02_def
lemma "example01 = example02"
by(simp add: example01_def example02_def)
text\<open>
Moreover, we can import JSON from external files:
\<close>
import_JSON example03 "example.json"
thm example03_def
lemma "example01 = example03"
by(simp add: example01_def example03_def)
section\<open>Serializing Nano JSON\<close>
text\<open>
In this section, we define the necessary infrastructure to serialize (export) data from HOL using
a JSON-like data structure that other JSON tools should be able to import.
\<close>
subsection\<open>ML Implementation\<close>
ML\<open>
signature NANO_JSON_SERIALIZER = sig
val serialize_json: Nano_Json_Type.json -> string
val serialize_json_pretty: Nano_Json_Type.json -> string
val serialize_term: term -> string
val serialize_term_pretty: term -> string
end
structure Nano_Json_Serializer : NANO_JSON_SERIALIZER = struct
open Nano_Json_Type
fun escapeJsonString s =
let fun bs c = "\\"^(Char.toString c)
fun escape #"\"" = bs #"\""
| escape #"\\" = bs #"\\"
| escape #"\b" = bs #"b"
| escape #"\f" = bs #"f"
| escape #"\n" = bs #"n"
| escape #"\r" = bs #"r"
| escape #"\t" = bs #"t"
| escape c =
let val ord = Char.ord c
in
if ord < 0x20
then let val hex = Word.toString (Word.fromInt ord)
val prfx = if ord < 0x10 then "\\u000" else "\\u00"
in
prfx^hex
end
else
Char.toString c
end
in
String.concat (map escape (String.explode s))
end
fun serialize pretty json = let
val nl = if pretty = NONE then "" else "\n"
fun indent' 0 = ""
| indent' n = " "^(indent' (n-1))
fun indent n = (case pretty of NONE => ""
| SOME n' => indent' (n+n'))
fun serialize' _ (OBJECT []) = "{}"
| serialize' _ (ARRAY []) = "[]"
| serialize' n (OBJECT pp) = "{"^nl^(indent (n+1)) ^ String.concatWith
(","^nl^(indent (n+1)))
(map (fn (key, value) =>
serialize' (n+1) (STRING key) ^ ":" ^
serialize' (n+1) value) pp) ^
nl^(indent n)^"}"
| serialize' n (ARRAY arr) = "["^nl^(indent (n+1)) ^ String.concatWith
(","^nl^(indent (n+1)))
(map (serialize' (n+1) ) arr) ^
nl^(indent n)^"]"
| serialize' _ (NUMBER (REAL n)) = String.implode (map (fn #"~" => #"-" | c => c)
(String.explode (IEEEReal.toString n)))
| serialize' _ (NUMBER (INTEGER n)) = String.implode (map (fn #"~" => #"-" | c => c)
(String.explode (Int.toString n)))
| serialize' _ (STRING s) = "\"" ^ escapeJsonString s ^ "\""
| serialize' _ (BOOL b) = Bool.toString b
| serialize' _ NULL = "null"
in
(serialize' 0 json)^nl
end
val serialize_json = serialize NONE
val serialize_json_pretty = serialize (SOME 0)
val serialize_term = (serialize NONE) o json_of_term
val serialize_term_pretty = (serialize (SOME 0)) o json_of_term
end
\<close>
subsection\<open>Isar Setup\<close>
ML\<open>
structure Nano_Json_Serialize_Isar = struct
fun export_json ctxt json_const filename =
let
val thy = Proof_Context.theory_of ctxt
val master_dir = Resources.master_directory thy
val term = Thm.concl_of (Global_Theory.get_thm thy (json_const^"_def"))
val json_term = case term of
Const (@{const_name "Pure.eq"}, _) $ _ $ json_term => json_term
| _ $ (_ $ json_term) => json_term
| _ => error ("Term structure not supported: "
^(Sledgehammer_Util.hackish_string_of_term ctxt term))
val json_string = Nano_Json_Serializer.serialize_term_pretty json_term
in
case filename of
SOME filename => let
val filename = Path.explode filename
val abs_filename = if (Path.is_absolute filename)
then filename
else Path.append master_dir filename
in
File.write (abs_filename) json_string
handle (IO.Io{name=name,...}) => warning ("Could not write \""^name^"\".")
end
| NONE => tracing json_string
end
end
\<close>
ML\<open>
Outer_Syntax.command ("serialize_JSON", Position.none) "export JSON data to an external file"
(Parse.name -- Scan.option Parse.name >> (fn (const_name,filename) =>
(Toplevel.keep (fn state => Nano_Json_Serialize_Isar.export_json (Toplevel.context_of state) const_name filename))));
\<close>
subsection\<open>Examples\<close>
text\<open>
We can now serialize JSON and print the result in the output window of Isabelle/HOL:
\<close>
serialize_JSON example01
text\<open>
Alternatively, we can export the serialized JSON into a file:
\<close>
serialize_JSON example01 example01.json
section\<open>Putting Everything Together\<close>
text\<open>
For convenience, we provide an ML structure that provides access to both the parser and the
serializer:,
\<close>
ML\<open>
structure Nano_Json = struct
open Nano_Json_Type
open Nano_Json_Parser
open Nano_Json_Serializer
end
\<close>
end

View File

@ -5,7 +5,7 @@ functionality to [Isabelle](https://isabelle.in.tum.de) or showcase
specific functionality. The individual hacks usually consist out of
a single theory file and all documentation is contained in that
theory file. The master branch should work with the latest official
release of Isabelle (Isabelle 2021, at time of writing), hacks for
release of Isabelle (Isabelle 2022, at time of writing), hacks for
older versions might be available on a dedicated branch.
## List of Isabelle Hacks
@ -14,6 +14,13 @@ older versions might be available on a dedicated branch.
that provides a simple way for specifying assertions that Isabelle
checks while processing a theory.
* [Code_Reflection.thy](Code_Reflection.thy) provides a new top-level
command for reflecting generated SML code into Isabelle's ML
environment.
* [Fxp.thy](Fxp.thy) provides Isabelle support for The Functional XML
Parser (fxp).
* [Hiding_Type_Variables.thy](Hiding_Type_Variables.thy) provides
print a setup for defining default type variables of type
constructors. The default type variables can be hidden in output,
@ -22,18 +29,28 @@ older versions might be available on a dedicated branch.
which (sometimes) helps to focus on the important parts of complex
type declarations.
* [Nano_JSON.thy](Nano_JSON.thy) provides support for a JSON-like
data exchange for Isabelle/HOL.
* [Code_Reflection.thy](Code_Reflection.thy) provides a new top-level
command for reflecting generated SML code into Isabelle's ML
environment.
* [ml_yacc_lib.thy](ml_yacc_lib.thy) provides Isabelle support for parser
* [Ml_Yacc_Lib.thy](Ml_Yacc_Lib.thy) provides Isabelle support for parser
generated by ml-yacc (part of sml/NJ).
* [fxp.thy](fxp.thy) provides Isabelle support for The Functional XML
Parser (fxp).
* [Simple_Oracle.thy](Simple_Oracle.thy) provides an example on integrating
an external tool as simple oracle or counter example generator, similar
to the built-in quickcheck.
### No Longer Maintained Hacks
* Nano_JSON.thy (support for a JSON-like data exchange for Isabelle/HOL)
has been developed into an AFP entry "[Nano_JSON](https://www.isa-afp.org/entries/Nano_JSON.html)",
which contains documentation and examples for using JSON encoded data with
Isabelle/HOL and Isabelle/ML.
## Authors
Main author: [Achim D. Brucker](http://www.brucker.ch/)
## License
If not otherwise stated, all hacks are licensed under a 2-clause
BSD-style license.
## Authors
@ -46,8 +63,8 @@ BSD-style license.
SPDX-License-Identifier: BSD-2-Clause
## Master Repository
## Upstream Repository
The master git repository for this project is hosted by the [Software
Assurance & Security Research Team](https://logicalhacking.com) at
The upstream git repository, i.e., the single source of truth, for this project is hosted
by the [Software Assurance & Security Research Team](https://logicalhacking.com) at
<https://git.logicalhacking.com/adbrucker/isabelle-hacks>.

8
ROOT
View File

@ -2,10 +2,10 @@ session "isabelle-hacks" = "HOL" +
options [timeout = 600, document = pdf, document_variants="document:outline=/proof,/ML",document_output=output]
theories
Assert
Hiding_Type_Variables
Nano_JSON
Code_Reflection
fxp
ml_yacc_lib
Fxp
Hiding_Type_Variables
Ml_Yacc_Lib
Simple_Oracle
document_files
root.tex

147
Simple_Oracle.thy Normal file
View File

@ -0,0 +1,147 @@
(***********************************************************************************
* Copyright (c) 2022 Achim D. Brucker
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* SPDX-License-Identifier: BSD-2-Clause
***********************************************************************************)
section\<open>Implementation\<close>
theory
Simple_Oracle
imports
Main
keywords
"check" :: diag
begin
subsection\<open>Implementation\<close>
text\<open>The following ML-block defines a configuration attribute named
oracle\_path that allows for configuring the name and path of the
oracle binary (inspect the theory oracle\_example for an example how to use
this configuration attribute).\<close>
ML\<open>
val oracle_path = let
val (oracle_path_config, oracle_path_setup) =
Attrib.config_string (Binding.name "oracle_path") (K "oracle")
in
Context.>>(Context.map_theory oracle_path_setup);
oracle_path_config
end
\<close>
text\<open>The following ML-Block defines a structure oracle that provides two methods:
\<^item> `exec' is a basic method that calls oracle on the provided input file and returns
the output of the oracle process as string
\<^item> `oracle\_cmd' is a method that obtains the formulae from subgoal i of the current
proof state, converts it to a string that is written into a temporary file. The
previously described method exec is invoked on this file name. The output is finally
written to the output window of Isabelle.
\<close>
ML\<open>
structure oracle = struct
open OS.FileSys OS.Process
fun exec {oracle_path, error_detail} filename = let
val tmpname = tmpName()
val err_tmpname = tmpName()
fun plural 1 = "" | plural _ = "s"
val oracle = case oracle_path of
SOME s => s
| NONE => raise error ("oracle_path not specified")
val cmdline = oracle ^ " \"" ^ filename ^ "\" > " ^ tmpname ^ " 2> " ^ err_tmpname
val sysres = system cmdline
val (errmsg, rest) = File.read_lines (Path.explode err_tmpname) |> chop error_detail
val msg = cat_lines (File.read_lines (Path.explode tmpname))
val _ = OS.FileSys.remove err_tmpname
val _ = OS.FileSys.remove tmpname
val _ = if isSuccess (sysres) then ()
else let
val _ = warning ("oracle failed on " ^ filename ^ "\nCommand: " ^ cmdline ^
"\n\nOutput:\n" ^
cat_lines (errmsg @ (if null rest then [] else
["(... " ^ string_of_int (length rest) ^
" more line" ^ plural (length rest) ^ ")"])))
in raise error ("oracle failed on " ^ filename) end
in
msg
end
fun oracle_cmd args i st = let
val thy = Toplevel.theory_of st
val proof_state = (Toplevel.proof_of st)
val ctx = Proof.context_of proof_state
val goal_term = Logic.get_goal (Thm.prop_of (#goal (Proof.goal proof_state))) i
val goal_str = Sledgehammer_Util.hackish_string_of_term ctx goal_term
val tmpname = tmpName()
val _ = File.write (Path.explode tmpname) goal_str
val oracle = Config.get_global thy oracle_path
val res = exec {error_detail=5, oracle_path = SOME oracle}
(Path.implode (Path.explode tmpname))
in
writeln(res)
end
end
\<close>
text\<open>This ML-Block provides the high-level Isar command oraclecheck\<close>
ML\<open>
val parse_arg =
Parse.name --
(Scan.optional (\<^keyword>\<open>=\<close> |--
(((Parse.name || Parse.float_number) >> single) ||
(\<^keyword>\<open>[\<close> |-- Parse.list1 Parse.name --| \<^keyword>\<open>]\<close>))) ["true"]);
val parse_args =
\<^keyword>\<open>[\<close> |-- Parse.list1 parse_arg --| \<^keyword>\<open>]\<close> || Scan.succeed [];
val _ =
Outer_Syntax.command \<^command_keyword>\<open>check\<close>
"search for counter example using an external oracle"
(parse_args -- Scan.optional Parse.nat 1 >>
(fn (args, i) => Toplevel.keep_proof (oracle.oracle_cmd args i)));
\<close>
subsection\<open>Example\<close>
text\<open>This is a simple example of a providing a new top-level Isar command that
analyses the current goal using an external system command. The example
setup provides a fake implementation of such a program as shell script,
named ``bin/oracle''. We use the declare command to configure
oracle\_path attribute to point to this shell script: \<close>
declare [[oracle_path="Simple_Oracle/bin/oracle"]]
lemma "True = False \<or> False"
text\<open>the following command reads, by default, the first subgoal (it also takes
the subgoal number as an optional argument) and invoked the external tool with
this formula. The output is shown in the output window of Isabelle/JEdit. Note that
the proof state is not modified. The command fails, if the external process returns
an error (i.e., a non-zero exit code).
\<close>
check
oops
end

13
Simple_Oracle/bin/oracle Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
echo "Welcome to Simple Oracle 0.0"
echo "Ignoring counter examples since 1842"
echo ""
echo "Analyzing '$(<$1)' ..."
sleep 1
result=$(($RANDOM % 2))
if [[ "$result" -eq 1 ]]; then
echo "Counter example found ..."
else
echo "Inconclusive ..."
fi
exit

View File

@ -1,11 +0,0 @@
{"menu": {
"id": "file",
"value": "File",
"popup": {
"menuitem": [
{"value": "New", "onclick": "CreateNewDoc()"},
{"value": "Open", "onclick": "OpenDoc()"},
{"value": "Close", "onclick": "CloseDoc()"}
]
}
}}