From 54718131e5542002dca67e99f65a14176cbef520 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 19 Jul 2019 23:30:40 +0100 Subject: [PATCH 01/34] Port to Isabelle 2019. --- document-generator/Tools/DOF_mkroot | 46 +++++++++++------------------ 1 file changed, 18 insertions(+), 28 deletions(-) diff --git a/document-generator/Tools/DOF_mkroot b/document-generator/Tools/DOF_mkroot index fbdb026..d2d6db0 100755 --- a/document-generator/Tools/DOF_mkroot +++ b/document-generator/Tools/DOF_mkroot @@ -39,7 +39,6 @@ function usage() echo echo " Options are:" echo " -h print this help text and exit" - echo " -d enable document preparation" echo " -n NAME alternative session name (default: DIR base name)" echo " -o ONTOLOGY (default: $DEFAULT_ONTOLOGY)" echo " Available ontologies:" @@ -72,7 +71,6 @@ function fail() # options -DOC="" NAME="" DEFAULT_TEMPLATE="scrreprt" DEFAULT_ONTOLOGY="core" @@ -87,9 +85,6 @@ do usage exit 1 ;; - d) - DOC="true" - ;; n) NAME="$OPTARG" ;; @@ -132,28 +127,23 @@ if [ -z "$NAME" ]; then NAME="$DIR" fi -if [ "$DOC" = true ]; then - $ISABELLE_TOOL mkroot -d -n "$NAME" "$DIR" - echo " \"preamble.tex\"" >> "$DIR"/ROOT - echo " \"build\"" >> "$DIR"/ROOT - sed -i -e "s/root.tex/isadof.cfg/" "$DIR"/ROOT - sed -i -e "s/HOL/Isabelle_DOF/" "$DIR"/ROOT - rm -f "$DIR"/document/root.tex +$ISABELLE_TOOL mkroot -n "$NAME" "$DIR" +echo " \"preamble.tex\"" >> "$DIR"/ROOT +echo " \"build\"" >> "$DIR"/ROOT +sed -i -e "s/root.tex/isadof.cfg/" "$DIR"/ROOT +sed -i -e "s/HOL/Isabelle_DOF/" "$DIR"/ROOT +rm -f "$DIR"/document/root.tex - # Creating isadof.cfg - echo "Template: $TEMPLATE" > "$DIR"/document/isadof.cfg - cp "$ISABELLE_HOME_USER/DOF/document-template/build" "$DIR"/document/ - for o in $ONTOLOGY; do - echo "Ontology: $o" >> "$DIR"/document/isadof.cfg; - done +# Creating isadof.cfg +echo "Template: $TEMPLATE" > "$DIR"/document/isadof.cfg +cp "$ISABELLE_HOME_USER/DOF/document-template/build" "$DIR"/document/ +for o in $ONTOLOGY; do + echo "Ontology: $o" >> "$DIR"/document/isadof.cfg; +done - # Creating praemble.tex - TITLE=$(echo "$NAME" | tr _ - | tr -d '\\') - AUTHOR=$(echo "By $USER" | tr _ - | tr -d '\\') - echo "%% This is a placeholder for user-specific configuration and packages." >> "$DIR"/document/preamble.tex - echo "\\title{$TITLE}{}{}{}{}{}{}" >> "$DIR"/document/preamble.tex - echo "\\author{$AUTHOR}{}{}{}{}{}" >> "$DIR"/document/preamble.tex - -else - $ISABELLE_TOOL mkroot -n "$NAME" -fi +# Creating praemble.tex +TITLE=$(echo "$NAME" | tr _ - | tr -d '\\') +AUTHOR=$(echo "By $USER" | tr _ - | tr -d '\\') +echo "%% This is a placeholder for user-specific configuration and packages." >> "$DIR"/document/preamble.tex +echo "\\title{$TITLE}{}{}{}{}{}{}" >> "$DIR"/document/preamble.tex +echo "\\author{$AUTHOR}{}{}{}{}{}" >> "$DIR"/document/preamble.tex From b547d0cae8932cefd5f16a5320038397f0ab86ba Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 19 Jul 2019 23:32:26 +0100 Subject: [PATCH 02/34] Print messages to stderr to ensure visibility in logs. --- document-generator/document-template/build | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/document-generator/document-template/build b/document-generator/document-template/build index c4a05ab..da171c0 100755 --- a/document-generator/document-template/build +++ b/document-generator/document-template/build @@ -28,17 +28,17 @@ set -e if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then - echo "" - echo "Error: Isabelle/DOF not installed" - echo "=====" - echo "This is a Isabelle/DOF project. The document preparation requires" - echo "the Isabelle/DOF framework. Please obtain the framework by cloning" - echo "the Isabelle/DOF git repository, i.e.: " - echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF" - echo "You can install the framework as follows:" - echo " cd Isabelle_DOF/document-generator" - echo " ./install" - echo "" + >&2 echo "" + >&2 echo "Error: Isabelle/DOF not installed" + >&2 echo "=====" + >&2 echo "This is a Isabelle/DOF project. The document preparation requires" + >&2 echo "the Isabelle/DOF framework. Please obtain the framework by cloning" + >&2 echo "the Isabelle/DOF git repository, i.e.: " + >&2 echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF" + >&2 echo "You can install the framework as follows:" + >&2 echo " cd Isabelle_DOF/document-generator" + >&2 echo " ./install" + >&2 echo "" exit 1 fi From 932b19f93deacac6e1c9c0ea6c1e9650783f9500 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 19 Jul 2019 23:44:49 +0100 Subject: [PATCH 03/34] Cleanup. --- patches/thy_output.orig18.ML | 543 ----------------------------------- 1 file changed, 543 deletions(-) delete mode 100644 patches/thy_output.orig18.ML diff --git a/patches/thy_output.orig18.ML b/patches/thy_output.orig18.ML deleted file mode 100644 index 7cd2fff..0000000 --- a/patches/thy_output.orig18.ML +++ /dev/null @@ -1,543 +0,0 @@ -(* Title: Pure/Thy/thy_output.ML - Author: Makarius - -Theory document output. -*) - -signature THY_OUTPUT = -sig - val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list - val check_comments: Proof.context -> Symbol_Pos.T list -> unit - val output_token: Proof.context -> Token.T -> Latex.text list - val output_source: Proof.context -> string -> Latex.text list - type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state} - val present_thy: Options.T -> theory -> segment list -> Latex.text list - val pretty_term: Proof.context -> term -> Pretty.T - val pretty_thm: Proof.context -> thm -> Pretty.T - val lines: Latex.text list -> Latex.text list - val items: Latex.text list -> Latex.text list - val isabelle: Proof.context -> Latex.text list -> Latex.text - val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text - val typewriter: Proof.context -> string -> Latex.text - val verbatim: Proof.context -> string -> Latex.text - val source: Proof.context -> Token.src -> Latex.text - val pretty: Proof.context -> Pretty.T -> Latex.text - val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text - val pretty_items: Proof.context -> Pretty.T list -> Latex.text - val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text - val antiquotation_pretty: - binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory - val antiquotation_pretty_source: - binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory - val antiquotation_raw: - binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory - val antiquotation_verbatim: - binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory -end; - -structure Thy_Output: THY_OUTPUT = -struct - -(* output document source *) - -val output_symbols = single o Latex.symbols_output; - -fun output_comment ctxt (kind, syms) = - (case kind of - Comment.Comment => - Input.cartouche_content syms - |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false) - {markdown = false} - |> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}" - | Comment.Cancel => - Symbol_Pos.cartouche_content syms - |> output_symbols - |> Latex.enclose_body "%\n\\isamarkupcancel{" "}" - | Comment.Latex => - [Latex.symbols (Symbol_Pos.cartouche_content syms)]) -and output_comment_document ctxt (comment, syms) = - (case comment of - SOME kind => output_comment ctxt (kind, syms) - | NONE => [Latex.symbols syms]) -and output_document_text ctxt syms = - Comment.read_body syms |> maps (output_comment_document ctxt) -and output_document ctxt {markdown} source = - let - val pos = Input.pos_of source; - val syms = Input.source_explode source; - - val output_antiquotes = - maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt); - - fun output_line line = - (if Markdown.line_is_item line then [Latex.string "\\item "] else []) @ - output_antiquotes (Markdown.line_content line); - - fun output_block (Markdown.Par lines) = - Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines)) - | output_block (Markdown.List {kind, body, ...}) = - Latex.environment_block (Markdown.print_kind kind) (output_blocks body) - and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks); - in - if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then [] - else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms - then - let - val ants = Antiquote.parse_comments pos syms; - val reports = Antiquote.antiq_reports ants; - val blocks = Markdown.read_antiquotes ants; - val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks); - in output_blocks blocks end - else - let - val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms); - val reports = Antiquote.antiq_reports ants; - val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants); - in output_antiquotes ants end - end; - - -(* output tokens with formal comments *) - -local - -val output_symbols_antiq = - (fn Antiquote.Text syms => output_symbols syms - | Antiquote.Control {name = (name, _), body, ...} => - Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) :: - output_symbols body - | Antiquote.Antiq {body, ...} => - Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body)); - -fun output_comment_symbols ctxt {antiq} (comment, syms) = - (case (comment, antiq) of - (NONE, false) => output_symbols syms - | (NONE, true) => - Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms - |> maps output_symbols_antiq - | (SOME comment, _) => output_comment ctxt (comment, syms)); - -fun output_body ctxt antiq bg en syms = - Comment.read_body syms - |> maps (output_comment_symbols ctxt {antiq = antiq}) - |> Latex.enclose_body bg en; - -in - -fun output_token ctxt tok = - let - fun output antiq bg en = - output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok)); - in - (case Token.kind_of tok of - Token.Comment NONE => [] - | Token.Command => output false "\\isacommand{" "}" - | Token.Keyword => - if Symbol.is_ascii_identifier (Token.content_of tok) - then output false "\\isakeyword{" "}" - else output false "" "" - | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" - | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" - | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" - | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}" - | _ => output false "" "") - end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)); - -fun output_source ctxt s = - output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none)); - -fun check_comments ctxt = - Comment.read_body #> List.app (fn (comment, syms) => - let - val pos = #1 (Symbol_Pos.range syms); - val _ = - comment |> Option.app (fn kind => - Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind))); - val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); - in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); - -end; - - - -(** present theory source **) - -(*NB: arranging white space around command spans is a black art*) - -val is_white = Token.is_space orf Token.is_informal_comment; -val is_black = not o is_white; - -val is_white_comment = Token.is_informal_comment; -val is_black_comment = Token.is_formal_comment; - - -(* presentation tokens *) - -datatype token = - Ignore_Token - | Basic_Token of Token.T - | Markup_Token of string * Input.source - | Markup_Env_Token of string * Input.source - | Raw_Token of Input.source; - -fun basic_token pred (Basic_Token tok) = pred tok - | basic_token _ _ = false; - -val white_token = basic_token is_white; -val white_comment_token = basic_token is_white_comment; -val blank_token = basic_token Token.is_blank; -val newline_token = basic_token Token.is_newline; - -fun present_token ctxt tok = - (case tok of - Ignore_Token => [] - | Basic_Token tok => output_token ctxt tok - | Markup_Token (cmd, source) => - Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n" - (output_document ctxt {markdown = false} source) - | Markup_Env_Token (cmd, source) => - [Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)] - | Raw_Token source => - Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]); - - -(* command spans *) - -type command = string * Position.T * string list; (*name, position, tags*) -type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*) - -datatype span = Span of command * (source * source * source * source) * bool; - -fun make_span cmd src = - let - fun chop_newline (tok :: toks) = - if newline_token (fst tok) then ([tok], toks, true) - else ([], tok :: toks, false) - | chop_newline [] = ([], [], false); - val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = - src - |> chop_prefix (white_token o fst) - ||>> chop_suffix (white_token o fst) - ||>> chop_prefix (white_comment_token o fst) - ||> chop_newline; - in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; - - -(* present spans *) - -local - -fun err_bad_nesting pos = - error ("Bad nesting of commands in presentation" ^ pos); - -fun edge which f (x: string option, y) = - if x = y then I - else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt))); - -val begin_tag = edge #2 Latex.begin_tag; -val end_tag = edge #1 Latex.end_tag; -fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; -fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; - -fun read_tag s = - (case space_explode "%" s of - ["", b] => (SOME b, NONE) - | [a, b] => (NONE, SOME (a, b)) - | _ => error ("Bad document_tags specification: " ^ quote s)); - -in - -fun make_command_tag options keywords = - let - val document_tags = - map read_tag (space_explode "," (Options.string options \<^system_option>\document_tags\)); - val document_tags_default = map_filter #1 document_tags; - val document_tags_command = map_filter #2 document_tags; - in - fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' => - let - val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); - - val keyword_tags = - if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"] - else Keyword.command_tags keywords cmd_name; - val command_tags = - the_list (AList.lookup (op =) document_tags_command cmd_name) @ - keyword_tags @ document_tags_default; - - val active_tag' = - if is_some tag' then tag' - else - (case command_tags of - default_tag :: _ => SOME default_tag - | [] => - if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state - then active_tag - else NONE); - in {tag' = tag', active_tag' = active_tag'} end - end; - -fun present_span thy command_tag span state state' - (tag_stack, active_tag, newline, latex, present_cont) = - let - val ctxt' = - Toplevel.presentation_context state' - handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN; - val present = fold (fn (tok, (flag, 0)) => - fold cons (present_token ctxt' tok) - #> cons (Latex.string flag) - | _ => I); - - val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span; - - val (tag, tags) = tag_stack; - val {tag', active_tag'} = - command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag} - state state'; - val edge = (active_tag, active_tag'); - - val nesting = Toplevel.level state' - Toplevel.level state; - - val newline' = - if is_none active_tag' then span_newline else newline; - - val tag_stack' = - if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack - else if nesting >= 0 then (tag', replicate nesting tag @ tags) - else - (case drop (~ nesting - 1) tags of - tg :: tgs => (tg, tgs) - | [] => err_bad_nesting (Position.here cmd_pos)); - - val latex' = - latex - |> end_tag edge - |> close_delim (fst present_cont) edge - |> snd present_cont - |> open_delim (present (#1 srcs)) edge - |> begin_tag edge - |> present (#2 srcs); - val present_cont' = - if newline then (present (#3 srcs), present (#4 srcs)) - else (I, present (#3 srcs) #> present (#4 srcs)); - in (tag_stack', active_tag', newline', latex', present_cont') end; - -fun present_trailer ((_, tags), active_tag, _, latex, present_cont) = - if not (null tags) then err_bad_nesting " at end of theory" - else - latex - |> end_tag (active_tag, NONE) - |> close_delim (fst present_cont) (active_tag, NONE) - |> snd present_cont; - -end; - - -(* present_thy *) - -local - -val markup_true = "\\isamarkuptrue%\n"; -val markup_false = "\\isamarkupfalse%\n"; - -val space_proper = - Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black; - -val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore); -val improper = Scan.many is_improper; -val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper)); -val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank)); - -val opt_newline = Scan.option (Scan.one Token.is_newline); - -val ignore = - Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore - >> pair (d + 1)) || - Scan.depend (fn d => Scan.one Token.is_end_ignore --| - (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) - >> pair (d - 1)); - -val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end); - -val locale = - Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- - Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")"))); - -in - -type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}; - -fun present_thy options thy (segments: segment list) = - let - val keywords = Thy_Header.get_keywords thy; - - - (* tokens *) - - val ignored = Scan.state --| ignore - >> (fn d => (NONE, (Ignore_Token, ("", d)))); - - fun markup pred mk flag = Scan.peek (fn d => - improper |-- - Parse.position (Scan.one (fn tok => - Token.is_command tok andalso pred keywords (Token.content_of tok))) -- - Scan.repeat tag -- - Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end) - >> (fn (((tok, pos'), tags), source) => - let val name = Token.content_of tok - in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end)); - - val command = Scan.peek (fn d => - Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] -- - Scan.one Token.is_command -- Scan.repeat tag - >> (fn ((cmd_mod, cmd), tags) => - map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @ - [(SOME (Token.content_of cmd, Token.pos_of cmd, tags), - (Basic_Token cmd, (markup_false, d)))])); - - val cmt = Scan.peek (fn d => - Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); - - val other = Scan.peek (fn d => - Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); - - val tokens = - (ignored || - markup Keyword.is_document_heading Markup_Token markup_true || - markup Keyword.is_document_body Markup_Env_Token markup_true || - markup Keyword.is_document_raw (Raw_Token o #2) "") >> single || - command || - (cmt || other) >> single; - - - (* spans *) - - val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false; - val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof; - - val cmd = Scan.one (is_some o fst); - val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; - - val white_comments = Scan.many (white_comment_token o fst o snd); - val blank = Scan.one (blank_token o fst o snd); - val newline = Scan.one (newline_token o fst o snd); - val before_cmd = - Scan.option (newline -- white_comments) -- - Scan.option (newline -- white_comments) -- - Scan.option (blank -- white_comments) -- cmd; - - val span = - Scan.repeat non_cmd -- cmd -- - Scan.repeat (Scan.unless before_cmd non_cmd) -- - Scan.option (newline >> (single o snd)) - >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => - make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); - - val spans = segments - |> maps (Command_Span.content o #span) - |> drop_suffix Token.is_space - |> Source.of_list - |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) - |> Source.source stopper (Scan.error (Scan.bulk span)) - |> Source.exhaust; - - val command_results = - segments |> map_filter (fn {command, state, ...} => - if Toplevel.is_ignored command then NONE else SOME (command, state)); - - - (* present commands *) - - val command_tag = make_command_tag options keywords; - - fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span thy command_tag span st st'); - - fun present _ [] = I - | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest; - in - if length command_results = length spans then - ((NONE, []), NONE, true, [], (I, I)) - |> present Toplevel.toplevel (spans ~~ command_results) - |> present_trailer - |> rev - else error "Messed-up outer syntax for presentation" - end; - -end; - - - -(** standard output operations **) - -(* pretty printing *) - -fun pretty_term ctxt t = - Syntax.pretty_term (Variable.auto_fixes t ctxt) t; - -fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; - - -(* default output *) - -val lines = separate (Latex.string "\\isanewline%\n"); -val items = separate (Latex.string "\\isasep\\isanewline%\n"); - -fun isabelle ctxt body = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Latex.environment_block "isabelle" body - else Latex.block (Latex.enclose_body "\\isa{" "}" body); - -fun isabelle_typewriter ctxt body = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Latex.environment_block "isabellett" body - else Latex.block (Latex.enclose_body "\\isatt{" "}" body); - -fun typewriter ctxt s = - isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)]; - -fun verbatim ctxt = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt - else split_lines #> map (typewriter ctxt) #> lines #> Latex.block; - -fun source ctxt = - Token.args_of_src - #> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt) - #> space_implode " " - #> output_source ctxt - #> isabelle ctxt; - -fun pretty ctxt = - Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt; - -fun pretty_source ctxt src prt = - if Config.get ctxt Document_Antiquotation.thy_output_source - then source ctxt src else pretty ctxt prt; - -fun pretty_items ctxt = - map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt; - -fun pretty_items_source ctxt src prts = - if Config.get ctxt Document_Antiquotation.thy_output_source - then source ctxt src else pretty_items ctxt prts; - - -(* antiquotation variants *) - -fun antiquotation_pretty name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x)); - -fun antiquotation_pretty_source name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x)); - -fun antiquotation_raw name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, argument = x, ...} => f ctxt x); - -fun antiquotation_verbatim name scan f = - antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt); - -end; From e450a45c7196fa357b25da57df5491bb5b432c32 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Fri, 19 Jul 2019 23:55:15 +0100 Subject: [PATCH 04/34] Code cleanup. --- patches/thy_output.ML | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/patches/thy_output.ML b/patches/thy_output.ML index fac1024..b0a8212 100644 --- a/patches/thy_output.ML +++ b/patches/thy_output.ML @@ -353,19 +353,16 @@ val ignore = (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) >> pair (d - 1)); - val locale = Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |-- Parse.!!! (Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")"))); - (* val meta_args_parser_hook = Synchronized.var "meta args parser hook" ((fn thy => fn s => ("",s)): theory -> string parser); *) -val meta_args_parser_hook = Unsynchronized.ref - ((fn _ => fn s => ("",s)): theory -> string parser); +val meta_args_parser_hook = Unsynchronized.ref ((fn _ => fn s => ("",s)): theory -> string parser); in @@ -381,7 +378,6 @@ fun present_thy options thy (segments: segment list) = val ignored = Scan.state --| ignore >> (fn d => (NONE, (Ignore_Token, ("", d)))); - fun markup pred mk flag = Scan.peek (fn d => Document_Source.improper |-- Parse.position (Scan.one (fn tok => Token.is_command tok andalso @@ -476,9 +472,12 @@ fun present_thy options thy (segments: segment list) = else error "Messed-up outer syntax for presentation" end; -fun set_meta_args_parser f = let val _ = writeln "Meta-args parser set to new value" - in ( meta_args_parser_hook := f) end - + fun set_meta_args_parser f = + let + val _ = writeln "Meta-args parser set to new value" + in + (meta_args_parser_hook := f) + end end; From 804a3566cc430635f2ef7bb0513c27eb314437c4 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 00:11:03 +0100 Subject: [PATCH 05/34] Updated command line parameters of DOF_mkroot. --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 381a05a..fa6e8d4 100644 --- a/README.md +++ b/README.md @@ -102,7 +102,6 @@ Usage: isabelle DOF_mkroot [OPTIONS] [DIR] Options are: -h print this help text and exit - -d enable document preparation -n NAME alternative session name (default: DIR base name) -o ONTOLOGY (default: core) Available ontologies: From 189baf9f27b8fad91a9d0155d1a966782e436441 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 00:23:01 +0100 Subject: [PATCH 06/34] Updated command line parameters of DOF_mkroot. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index fa6e8d4..52f4ee8 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ editor. The DOF-plugin provides an alternative to Isabelle's ``mkroot`` command. Isabelle projects that use DOF need to be created using ```console -foo@bar:~$ isabelle DOF_mkroot -d +foo@bar:~$ isabelle DOF_mkroot ``` The ``DOF_mkroot`` command takes the same parameter as the standard ``mkroot`` command of Isabelle. Thereafter, the normal Isabelle @@ -118,7 +118,7 @@ Usage: isabelle DOF_mkroot [OPTIONS] [DIR] ``` For example, ```console -foo@bar:~$ isabelle DOF_mkroot -d -o scholarly_paper -t lncs +foo@bar:~$ isabelle DOF_mkroot -o scholarly_paper -t lncs ``` creates a setup using the scholarly_paper ontology and Springer's LNCS LaTeX class as document class. Note that the generated setup From c69c61407e7b74a312e014d7ee67081be78cfca9 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 10:55:22 +0100 Subject: [PATCH 07/34] Cleanup. --- ontologies/CC_ISO15408.thy | 17 ----------------- 1 file changed, 17 deletions(-) delete mode 100644 ontologies/CC_ISO15408.thy diff --git a/ontologies/CC_ISO15408.thy b/ontologies/CC_ISO15408.thy deleted file mode 100644 index 8598fa2..0000000 --- a/ontologies/CC_ISO15408.thy +++ /dev/null @@ -1,17 +0,0 @@ -chapter \An Outline of a Common Criteria Ontology\ - -text{* NOTE: An Ontology-Model of a certification standard such as Comon Criteria or -Common Criteria (ISO15408) identifies: -- notions (conceptual \emph{categories}) having \emph{instances} - (similar to classes and objects), -- their subtype relation (eg., a "security target" is a "requirement definition"), -- their syntactical structure - (for the moment: defined by regular expressions describing the - order of category instances in the overall document as a regular language) - *} - -theory CC_ISO15408 - imports "../Isa_DOF" -begin - -end \ No newline at end of file From 5ef2c756ccb46839800f51bab6eb0385a1fc9731 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 11:09:43 +0100 Subject: [PATCH 08/34] Cleanup. --- test/conceptual/Concept_ExampleInvariant.thy | 109 ------------------- 1 file changed, 109 deletions(-) delete mode 100644 test/conceptual/Concept_ExampleInvariant.thy diff --git a/test/conceptual/Concept_ExampleInvariant.thy b/test/conceptual/Concept_ExampleInvariant.thy deleted file mode 100644 index 1650d01..0000000 --- a/test/conceptual/Concept_ExampleInvariant.thy +++ /dev/null @@ -1,109 +0,0 @@ -chapter\Setting and modifying attributes of doc-items\ - -theory Concept_ExampleInvariant - imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) -begin - -section\Example: Standard Class Invariant\ - -text\Status:\ -print_doc_classes -print_doc_items - - -text\Watch out: The current programming interface to document class invariants is pretty low-level: -\<^item> No inheritance principle -\<^item> No high-level notation in HOL -\<^item> Typing on ML level is assumed to be correct. -The implementor of an ontology must know what he does ... -\ - -text\Setting a sample invariant, which simply produces some side-effect:\ -setup\DOF_core.update_class_invariant "Conceptual.A" (fn oid => - fn {is_monitor = b} => - fn ctxt => - (writeln ("sample echo : "^oid); true))\ - -subsection*[b::A, x = "5"] \ Lorem ipsum dolor sit amet, ... \ - - -text\Setting a sample invariant, referring to attribute value "x":\ -ML\fun check_A_invariant oid {is_monitor:bool} ctxt = - let val term = AttributeAccess.calc_attr_access ctxt "x" oid @{here} - val (@{typ "int"},x_value) = HOLogic.dest_number term - in if x_value > 5 then error("class A invariant violation") else true end -\ - -setup\DOF_core.update_class_invariant "Conceptual.A" check_A_invariant\ - -(* test : should fail : *) -subsection*[c::A, x = "6"] \ Lorem ipsum dolor sit amet, ... \ - - -subsection*[d::A, x = "5"] \ Lorem ipsum dolor sit amet, ... \ - -(* test : update should fail : *) -update_instance*[d::A, x += "1"] - - -section\Example: Monitor Class Invariant\ - -text\Of particular interest are class invariants attached to monitor classes: since the -latter manage a trace-attribute, a class invariant on them can assure a global form of consistency. -It is possible to express: -\<^item> that attributes of a document element must satisfy particular conditions depending on the - prior document elements --- as long they have been observed in a monitor. -\<^item> non-regular properties on a trace not expressible in a regular expression - (like balanced ness of opening and closing text elements) -\<^item> etc. -\ - -text\A simple global trace-invariant is expressed in the following: it requires -that instances of class C occur more often as those of class D; note that this is meant -to take sub-classing into account: -\ - -ML\fun check_M_invariant oid {is_monitor} ctxt = - let val term = AttributeAccess.calc_attr_access ctxt "trace" oid @{here} - fun conv (Const(@{const_name "Pair"},_) $ Const(s,_) $ S) = (s, HOLogic.dest_string S) - val string_pair_list = map conv (HOLogic.dest_list term) - val cid_list = map fst string_pair_list - val ctxt' = Proof_Context.init_global(Context.theory_of ctxt) - fun is_C x = DOF_core.is_subclass ctxt' x "Conceptual.C" - fun is_D x = DOF_core.is_subclass ctxt' x "Conceptual.D" - val n = length (filter is_C cid_list) - val m = length (filter is_D cid_list) - in if m > n then error("class M invariant violation") else true end -\ - -setup\DOF_core.update_class_invariant "Conceptual.M" check_M_invariant\ - -open_monitor*[struct::M] - - -section*[a::A, x = "3"] \ Lorem ipsum dolor sit amet, ... \ - -text*[c1::C, x = "''beta''"] \ ... suspendisse non arcu malesuada mollis, nibh morbi, ... \ - -text*[d1::E, a1 = "X3"] \ ... phasellus amet id massa nunc, pede suscipit repellendus, ... \ - -text*[c2::C, x = "''delta''"] \ ... in ut tortor eleifend augue pretium consectetuer... \ - -section*[f::E] \ Lectus accumsan velit ultrices, ... }\ - -(* test : close_monitor should fail : *) -section*[f2::E] \ Lectus accumsan velit ultrices, ... }\ - -ML\val term = AttributeAccess.calc_attr_access (Context.Proof @{context}) "trace" "struct" @{here}; - fun conv (Const(@{const_name "Pair"},_) $ Const(s,_) $ S) = (s, HOLogic.dest_string S) - val string_pair_list = map conv (HOLogic.dest_list term) - \ -(* trace example *) -text\Setting a sample invariant, referring to attribute value "x":\ - - -close_monitor*[struct] - - -end - \ No newline at end of file From a2d8ee9e6ba05cf8449bdd2f419ccfcfc76af8a8 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 11:23:41 +0100 Subject: [PATCH 09/34] Merged the two theories for testing attribute related features. --- examples/conceptual/Attributes.thy | 8 +- test/conceptual/Attributes.thy | 119 ----------------------------- 2 files changed, 1 insertion(+), 126 deletions(-) delete mode 100644 test/conceptual/Attributes.thy diff --git a/examples/conceptual/Attributes.thy b/examples/conceptual/Attributes.thy index 84f381c..78ab865 100644 --- a/examples/conceptual/Attributes.thy +++ b/examples/conceptual/Attributes.thy @@ -12,16 +12,10 @@ print_doc_items ML\ val {docobj_tab={tab = docitem_tab, ...},docclass_tab, ISA_transformer_tab, monitor_tab,...} = DOF_core.get_data @{context}; - Symtab.dest docitem_tab; Symtab.dest docclass_tab; \ -ML\ -fun fac x = if x = 0 then 1 else x * (fac(x -1)); -fac 3; -open Thm; -\ text\A text item containing standard theorem antiquotations and complex meta-information.\ text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\ Lorem ipsum ... @{thm refl} \ @@ -136,6 +130,6 @@ text\@{trace_attribute figs1}\ text\Final Status:\ print_doc_items print_doc_classes - +check_doc_global end (*>*) diff --git a/test/conceptual/Attributes.thy b/test/conceptual/Attributes.thy deleted file mode 100644 index 451455f..0000000 --- a/test/conceptual/Attributes.thy +++ /dev/null @@ -1,119 +0,0 @@ -theory Attributes - imports "../../ontologies/Conceptual" -begin - -section\Elementary Creation of DocItems and Access of their Attibutes\ - -text\Current status:\ -print_doc_classes -print_doc_items - -(* corresponds to low-level accesses : *) -ML\ -val {docobj_tab={tab = docitem_tab, ...},docclass_tab, ISA_transformer_tab, monitor_tab,...} - = DOF_core.get_data @{context}; -Symtab.dest docitem_tab; -"=============================================="; -Symtab.dest docclass_tab; -\ - -text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\ sdfsdfs sdfsdf sdfsdf @{thm refl} \ - -typ "C" -typ "D" -ML\val Type("Conceptual.B.B_ext",[Type("Conceptual.C.C_ext",t)]) = @{typ "C"}; - val @{typ "D"} = ODL_Command_Parser.cid_2_cidType "Conceptual.D" @{theory}; - val @{typ "E"}= ODL_Command_Parser.cid_2_cidType "Conceptual.E" @{theory}; - \ - -text*[dfgdfg2::C, z = "None"]\ sdfsdfs sdfsdf sdfsdf @{thm refl} \ - -text*[omega::E, x = "''def''"]\ sdfsdfs sdfsdf sdfsdf @{thm refl} \ - -text\ @{docitem_ref \dfgdfg\} \ - - -term "A.x (undefined\A.x := 3\)" -term "B.x ((undefined::C)\B.y := [''sdf'']\)" -term "C.z ((undefined::C)\B.y := [''sdf''], z:= Some undefined\)" - -ML\ -val SOME {def_occurrence = "Conceptual.A", long_name = "Conceptual.A.x", typ = t, def_pos} - = DOF_core.get_attribute_info "Conceptual.A" "x" @{theory}; -DOF_core.get_attribute_info "Conceptual.B" "x" @{theory}; -DOF_core.get_attribute_info "Conceptual.B" "y" @{theory}; -DOF_core.get_attribute_info "Conceptual.C" "x" @{theory}; -val SOME {def_occurrence = "Conceptual.C", long_name = "Conceptual.B.y", typ = t', def_pos} - = DOF_core.get_attribute_info "Conceptual.C" "y" @{theory}; - (* this is the situation where an attribute is defined in C, but due to inheritance - from B, where it is firstly declared which results in a different long_name. *) -DOF_core.get_attribute_info "Conceptual.C" "z" @{theory}; -\ - - -ML\ -DOF_core.get_value_local "sdf" @{context}; -DOF_core.get_value_local "sdfg" @{context}; -DOF_core.get_value_local "xxxy" @{context}; -DOF_core.get_value_local "dfgdfg" @{context}; -DOF_core.get_value_local "omega" @{context}; -\ - -text\A not too trivial test: default y -> []. - At creation : x -> "f", y -> "sdf". - The latter wins at access time. - Then @{term "t"}: creation of a multi inheritance object omega, - triple updates, the last one wins.\ -ML\val s = map HOLogic.dest_string (HOLogic.dest_list @{docitem_attribute y::dfgdfg}); - val t = HOLogic.dest_string (@{docitem_attribute x::omega}); \ - - - - -section\Mutation of Attibutes in DocItems\ - -ML\ val Const ("Groups.zero_class.zero", @{typ "int"}) = @{docitem_attribute a2::omega} \ - -update_instance*[omega::E, a2+="1"] - -ML\ val Const ("Groups.one_class.one", @{typ "int"})= @{docitem_attribute a2::omega} \ - -update_instance*[omega::E, a2+="6"] - -ML\ @{docitem_attribute a2::omega} \ -ML\ HOLogic.dest_number @{docitem_attribute a2::omega} \ - -update_instance*[omega::E, x+="''inition''"] - -ML\ val s = HOLogic.dest_string ( @{docitem_attribute x::omega}) \ - -update_instance*[omega::E, y+="[''defini'',''tion'']"] - -update_instance*[omega::E, y+="[''en'']"] - -ML\ val s = map HOLogic.dest_string (HOLogic.dest_list @{docitem_attribute y::omega}); \ - -section\Simulation of a Monitor\ - -open_monitor*[figs1::figure_group, - (* anchor="''fig-demo''", Achim ...*) - caption="''Sample ''"] - -figure*[fig_A::figure, spawn_columns=False,relative_width="90", - src="''figures/A.png''"] - \ The A train \ldots \ - -figure*[fig_B::figure, spawn_columns=False,relative_width="90", - src="''figures/B.png''"] - \ The B train \ldots \ - -close_monitor*[figs1] - -text\Resulting trace in figs1: \ -ML\@{trace_attribute figs1}\ - -print_doc_items - -check_doc_global - -end \ No newline at end of file From 38bd13d7d0bee2b045fc95224abcdc81bbacef7a Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 11:27:44 +0100 Subject: [PATCH 10/34] Cleanup. --- examples/simple/Concept_Example.thy | 55 ----------------------------- 1 file changed, 55 deletions(-) delete mode 100644 examples/simple/Concept_Example.thy diff --git a/examples/simple/Concept_Example.thy b/examples/simple/Concept_Example.thy deleted file mode 100644 index fadb3bb..0000000 --- a/examples/simple/Concept_Example.thy +++ /dev/null @@ -1,55 +0,0 @@ -chapter\Setting and modifying attributes of doc-items\ - -theory Concept_Example - imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) -begin - -text\@{theory \Draft.Conceptual\} provides a monitor @{typ M} enforcing a particular document - structure. Here, we say: From now on, this structural rules are respected wrt. all - \<^theory_text>\doc_class\es @{typ M} is enabled for.\ -open_monitor*[struct::M] - -section*[a::A, x = "3"] \ Lorem ipsum dolor sit amet, ... \ - -text*[c1::C, x = "''beta''"] \ ... suspendisse non arcu malesuada mollis, nibh morbi, ... \ - -text*[d::D, a1 = "X3"] \ ... phasellus amet id massa nunc, pede suscipit repellendus, - ... @{C c1} @{thm "refl"}\ - - -update_instance*[d::D, a1 := X2] - -text\ ... in ut tortor ... @{docitem \a\} ... @{A \a\}\ - -text*[c2::C, x = "''delta''"] \ ... in ut tortor eleifend augue pretium consectetuer. \ - -text*[f::F] \ Lectus accumsan velit ultrices, ... }\ - -theorem some_proof : "P" sorry - -text\This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\ -update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"] - -text\ ..., mauris amet, id elit aliquam aptent id, ... @{docitem \a\} \ - -text\Here we add and maintain a link that is actually modeled as m-to-n relation ... - The type annotations with @{typ A} and @{typ C} are optional and may help to get - additional information at the HOL level, the arguments of the inner-syntax antiquotation - are strings that can be denoted in two different syntactic variants; the former is - more robust that the traditional latter.\ -update_instance*[f::F,b:="{(@{docitem \a\}::A,@{docitem \c1\}::C), - (@{docitem ''a''}, @{docitem ''c2''})}"] - -close_monitor*[struct] - -text\And the trace of the monitor is:\ -ML\@{trace_attribute struct}\ - -print_doc_classes -print_doc_items - -check_doc_global - - -end - \ No newline at end of file From b324d599a166b5402ca628c6b256c434a5d29631 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 11:35:30 +0100 Subject: [PATCH 11/34] Cleanup. --- test/cenelec/Example.thy | 115 --------------------------------------- 1 file changed, 115 deletions(-) delete mode 100644 test/cenelec/Example.thy diff --git a/test/cenelec/Example.thy b/test/cenelec/Example.thy deleted file mode 100644 index cd5c874..0000000 --- a/test/cenelec/Example.thy +++ /dev/null @@ -1,115 +0,0 @@ -theory Example - imports "../../ontologies/CENELEC_50128" -begin - -section{* Some show-off's of general antiquotations. *} - -(* some show-off of standard anti-quotations: *) -print_attributes -print_antiquotations - -text\ @{thm refl} of name @{thm [source] refl} - @{thm[mode=Rule] conjI} - @{file "../../Isa_DOF.thy"} - @{value "3+4::int"} - @{const hd} - @{theory List}} - @{term "3"} - @{type bool} - @{term [show_types] "f x = a + x"} \ - -section{* Example *} - -text*[tralala] {* Brexit means Brexit *} - -text*[ass1::assumption] {* Brexit means Brexit *} - -text*[hyp1::hypothesis] {* P inequal NP *} - - -text*[ass122::srac] {* The overall sampling frequence of the odometer -subsystem is therefore 14 khz, which includes sampling, computing and -result communication times... *} - -text*[t10::test_result] {* This is a meta-test. This could be an ML-command -that governs the external test-execution via, eg., a makefile or specific calls -to a test-environment or test-engine *} - -text \ @{ec \ass122\}}\ - -text \ As established by @{docref (unchecked) \t10\}, - @{docref (define) \t10\} \ -text \ the @{docref \t10\} - as well as the @{docref \ass122\}\ -text \ represent a justification of the safety related applicability - condition @{srac \ass122\} aka exported constraint @{ec \ass122\}. - \ - -text{* And some ontologically inconsistent reference: @{hypothesis \ass1\} as well as *} --- wrong - -text{* ... some more ontologically inconsistent reference: @{assumption \hyp1\} as well as *} --- wrong - - - -text{* And a further ontologically totally inconsistent reference: - @{test_result \ass122\} as well as ... *} --- wrong - -text{* the ontologically inconsistent reference: @{ec \t10\} *} --- wrong - - - -section{* Some Tests for Ontology Framework and its CENELEC Instance *} - -declare_reference* [lalala::requirement, alpha="main", beta=42] - -declare_reference* [lalala::quod] (* shouldn't work: multiple declaration *) - -declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1] - -paragraph*[sdf]{* just a paragraph *} -paragraph* [sdfk] \ just a paragraph - lexical variant \ - -subsection*[sdf]{* shouldn't work, multiple ref. *} --- wrong - -section*[sedf::requirement, alpja= refl]{* Shouldn't work - misspelled attribute. *} -text\\label{sedf}\ (* Hack to make the LaTeX-ing running. Should disappear. *) --- wrong - -section*[seedf::test_case, dfg=34,fgdfg=zf]{* and another example with attribute setting, -but wrong doc_class constraint. *} --- wrong - -section{* Text Antiquotation Infrastructure ... *} - -text{* @{docref \lalala\} -- produces warning. *} -text{* @{docref (unchecked) \lalala\} -- produces no warning. *} - -text{* @{docref \ass122\} -- global reference to a text-item in another file. *} - -text{* @{ec \ass122\} -- global reference to a exported constraint in another file. - Note that the link is actually a srac, which, according to - the ontology, happens to be an "ec". *} - -text{* @{test_specification \ass122\} -- wrong: "reference ontologically inconsistent". *} --- wrong - - -text{* Here is a reference to @{docref \sedf\} *} -(* shouldn't work: label exists, but definition was finally rejected to to errors. *) - -check_doc_global (* shoudn't work : Unresolved forward references: lalala,blablabla *) --- wrong - -section \Miscellaneous\ - -section(in order){* sdfsdf*} (* undocumented trouvaille when analysing the code *) - - -end - - \ No newline at end of file From 55b980d98448a884100941b9b8beb913df544c13 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 11:44:09 +0100 Subject: [PATCH 12/34] Cleanup. --- test/simple/Example.thy | 161 ---------------------------------------- 1 file changed, 161 deletions(-) delete mode 100644 test/simple/Example.thy diff --git a/test/simple/Example.thy b/test/simple/Example.thy deleted file mode 100644 index 718869a..0000000 --- a/test/simple/Example.thy +++ /dev/null @@ -1,161 +0,0 @@ -theory Example - imports "../../ontologies/CENELEC_50128" - keywords "Term" :: diag -begin - -section\ Some show-off's of general antiquotations : for demos. \ - - -(* some show-off of standard anti-quotations: *) -print_attributes -print_antiquotations - -text\ @{thm refl} of name @{thm [source] refl} - @{thm[mode=Rule] conjI} - @{file "../../Isa_DOF.thy"} - @{value "3+4::int"} - @{const hd} - @{theory List}} - @{term "3"} - @{type bool} - @{term [show_types] "f x = a + x"} \ - - -section\ Example \ - -text*[ass1::assumption] \ Brexit means Brexit \ - -text*[hyp1::hypothesis] \ P means not P \ - - -text*[ass122::srac] \ The overall sampling frequence of the odometer -subsystem is therefore 14 khz, which includes sampling, computing and -result communication times... \ - -text*[t10::test_result] \ This is a meta-test. This could be an ML-command -that governs the external test-execution via, eg., a makefile or specific calls -to a test-environment or test-engine \ - - -text \ As established by @{docref (unchecked) \t10\}, - @{docref (define) \t10\} - the @{docref \t10\} - the @{docref \ass122\} - \ -text \ safety related applicability condition @{srac \ass122\}. - exported constraint @{ec \ass122\}. - \ - -text\ - And some ontologically inconsistent reference: - @{hypothesis \ass1\} as well as - -\ --- "very wrong" - -text\ - And some ontologically inconsistent reference: - @{assumption \hyp1\} as well as - -\ --- "very wrong" - - - -text\ - And some ontologically inconsistent reference: - @{test_result \ass122\} as well as - -\ --- wrong - -text\ - And some other ontologically inconsistent reference: - @{ec \t10\} as well as -\ --- wrong - - - -section\ Some Tests for Ontology Framework and its CENELEC Instance \ - -declare_reference* [lalala::requirement, alpha="main", beta=42] - -declare_reference* [lalala::quod] (* multiple declaration*) --- wrong - -declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1] - -paragraph*[sdf]\ just a paragraph \ -paragraph* [sdfk] \ just a paragraph - lexical variant \ - -subsection*[sdf]\ shouldn't work, multiple ref. \ --- wrong - -section*[sedf::requirement, long_name = "None::string option"] - \ works again. One can hover over the class constraint and jump to its definition. \ - -section*[seedf::test_case, dfg=34,fgdfg=zf]\ and another example with undefined attributes. \ --- wrong - -section\ Text Antiquotation Infrastructure ... \ - -text\ @{docref \lalala\} -- produces warning. \ -text\ @{docref (unchecked) \lalala\} -- produces no warning. \ - -text\ @{docref \ass122\} -- global reference to a text-item in another file. \ - -text\ @{ec \ass122\} -- global reference to a exported constraint in another file. - Note that the link is actually a srac, which, according to - the ontology, happens to be an "ec". \ - -text\ @{test_specification \ass122\} -- wrong: "reference ontologically inconsistent". \ - - - -text\ Here is a reference to @{docref \sedf\} \ -(* works currently only in connection with the above label-hack. - Try to hover over the sedf - link and activate it !!! *) - - - - - - - - - - - - - -section\ A Small Example for Isar-support of a Command Definition --- for demos. \ - -ML\ - -local - val opt_modes = Scan.optional (@{keyword "("} - |-- Parse.!!! (Scan.repeat1 Parse.name - --| @{keyword ")"})) []; -in - val _ = - Outer_Syntax.command @{command_keyword Term} "read and print term" - (opt_modes -- Parse.term >> Isar_Cmd.print_term); -end -\ - -lemma "True" by simp - -Term "a + b = b + a" - -term "a + b = b + a" - - - - -section(in order)\ sdfsdf \ (* undocumented trouvaille when analysing the code *) - - -end - - \ No newline at end of file From 00651b385eb6c2d75c39b5ba6ad994f57cfccd39 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 12:01:33 +0100 Subject: [PATCH 13/34] Cleanup. --- examples/math_exam/Noodles.thy | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 examples/math_exam/Noodles.thy diff --git a/examples/math_exam/Noodles.thy b/examples/math_exam/Noodles.thy deleted file mode 100644 index 533dadb..0000000 --- a/examples/math_exam/Noodles.thy +++ /dev/null @@ -1,8 +0,0 @@ -theory Noodles - imports "../../ontologies/small_math" - "../../ontologies/technical_report" -begin - -title*[t::title]\On Noodles\ - -end From 4f911f3a23c923599b12b9dc5f58b0c0690c0f68 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 12:02:05 +0100 Subject: [PATCH 14/34] Cleanup. --- examples/math_exam/FormSheet.tex | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 examples/math_exam/FormSheet.tex diff --git a/examples/math_exam/FormSheet.tex b/examples/math_exam/FormSheet.tex deleted file mode 100644 index 501d1b6..0000000 --- a/examples/math_exam/FormSheet.tex +++ /dev/null @@ -1,22 +0,0 @@ -\documentclass{article} - -\usepackage{hyperref} - -\begin{document} - -\begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}] -\begin{tabular}{l} - \TextField{From } \\\\ - \TextField{have 1} \\\\ - \TextField{have 2} \\\\ - \TextField{have 3} \\\\ - \TextField{finally show} \\\\ - \CheckBox[width=1em]{Has the polynomial as many solutions as its degree ? } \\\\ - \Submit{Submit}\\ -\end{tabular} -\end{Form} - - - -\end{document} - From 3d01c5faf86229ddcb2113288a7f6ee72a6341c9 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 12:37:21 +0100 Subject: [PATCH 15/34] Cleanup. --- examples/cenelec/Example.thy | 105 ----------------------------------- 1 file changed, 105 deletions(-) delete mode 100644 examples/cenelec/Example.thy diff --git a/examples/cenelec/Example.thy b/examples/cenelec/Example.thy deleted file mode 100644 index 07a8d3f..0000000 --- a/examples/cenelec/Example.thy +++ /dev/null @@ -1,105 +0,0 @@ -chapter\ Example : Forward and Standard (use-after-define) Referencing\ - -theory Example - imports - Isabelle_DOF.CENELEC_50128 - Isabelle_DOF.scholarly_paper -begin - -section\ Some examples of Isabelle's standard antiquotations. \ -(* some show-off of standard anti-quotations: *) -text \THIS IS A TEXT\ -term "[]" - -text\ @{thm refl} of name @{thm [source] refl} - @{thm[mode=Rule] conjI} - @{file "../../Isa_DOF.thy"} - @{value "3+4::int"} - @{const hd} - @{theory HOL.List}} - @{term "3"} - @{type bool} - @{term [show_types] "f x = a + x"} \ - - - - -section\ Core Examples for stating text-elements as doc-items.\ - -text\An "anonymous" text-item, automatically coerced into the top-class "text".\ -text*[tralala] \ Brexit means Brexit \ - -text\Examples for declaration of typed doc-items "assumption" and "hypothesis", - concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \ -text*[ass1::assumption] \ The subsystem Y is safe. \ -text*[hyp1::hypothesis] \ P not equal NP \ - -text\A real example fragment from a larger project, declaring a text-element as a - "safety-related application condition", a concept defined in the - @{theory "Draft.CENELEC_50128"} - ontology:\ - -text*[new_ass::hypothesis]\Under the assumption @{assumption \ass1\} we establish the following: ... \ - -text*[ass122::SRAC] \ The overall sampling frequence of the odometer -subsystem is therefore 14 khz, which includes sampling, computing and -result communication times... \ - -text*[t10::test_result] \ This is a meta-test. This could be an ML-command -that governs the external test-execution via, eg., a makefile or specific calls -to a test-environment or test-engine \ - - -section\ References to doc-items.\ -text\Finally some examples of references to doc-items, i.e. text-elements with declared - meta-information and status. \ -text \ As established by @{docref (unchecked) \t10\}, - @{docref (define) \t10\} \ -text \ the @{docref \t10\} - as well as the @{docref \ass122\}\ -text \ represent a justification of the safety related applicability - condition @{SRAC \ass122\} aka exported constraint @{EC \ass122\}.\ - - - - -section\ Some Tests for Ontology Framework and its CENELEC Instance \ - -declare_reference* [lalala::requirement, alpha="main", beta=42] -declare_reference* [blablabla::cid, alpha="beta sdf", beta=gamma, delta=dfg_fgh\<^sub>1] - -paragraph* [sdfk::introduction] \ just a paragraph - lexical variant \ - - - - -section*[h::example]\ Some global inspection commands for the status of docitem and doc-class tables ... \ - - - -section*[i::example]\ Text Antiquotation Infrastructure ... \ - -text\ @{docitem \lalala\} -- produces warning. \ -text\ @{docitem (unchecked) \lalala\} -- produces no warning. \ - -text\ @{docitem \ass122\} -- global reference to a text-item in another file. \ - -text\ @{EC \ass122\} -- global reference to a exported constraint in another file. - Note that the link is actually a srac, which, according to - the ontology, happens to be an "ec". \ - - -section*[h2::example]\ Snippets ... \ - -text*[req1::requirement, is_concerned="UNIV"] -\The operating system shall provide secure -memory separation. \ - - -text\The recurring issue of the certification - is @{requirement \req1\} ...\ - - -end - - From 189110dc0fd3ffd7ffc2e68f99fc14afd769dd62 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 12:43:58 +0100 Subject: [PATCH 16/34] Cleanup. --- examples/math_exam/On_Noodles.thy | 32 ------------------------------- 1 file changed, 32 deletions(-) delete mode 100644 examples/math_exam/On_Noodles.thy diff --git a/examples/math_exam/On_Noodles.thy b/examples/math_exam/On_Noodles.thy deleted file mode 100644 index bd02359..0000000 --- a/examples/math_exam/On_Noodles.thy +++ /dev/null @@ -1,32 +0,0 @@ -theory "On_Noodles" - imports "../../ontologies/small_math" - "../../ontologies/technical_report" -begin - -open_monitor*[this::article] - -title*[t1::title]\On Noodles\ - -text*[simon::author]\Simon Foster\ -text*[a::abstract, keywordlist = "[''topology'']"] -\We present the first fundamental results on the goundbreaking theory of noodles...\ -section*[intro::introduction]\Introduction\ - -text\ Authorities say, that Noodles are unleavened dough which is stretched, - extruded, or rolled flat and cut into one or a variety of shapes which usually -include long, thin strips, or waves, helices, tubes, strings, or shells, or -folded over, or cut into other shapes. Noodles are usually cooked in boiling water, -sometimes with cooking oil or salt added. \ - -section*[def_sec::technical]\Basic definitions\ - -text*[d1::"definition"]\My first definition\ -definition noodle ::"bool" where "noodle = (THE x. True)" - -(* -update_instance*[def1, formal_results:="[@{thm ''noodle_def''}]"] -*) - -close_monitor*[this::article] - -end From a85bcacd5b37b280931f3eb245b08b37c720e5e8 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 14:54:31 +0100 Subject: [PATCH 17/34] Moved unit tests into tests directory and added test session. --- ROOT | 8 ++++++++ .../conceptual => tests}/AssnsLemmaThmEtc.thy | 17 ++++++++++++----- {examples/conceptual => tests}/Attributes.thy | 14 +++++++++----- .../conceptual => tests}/Concept_Example.thy | 12 +++++++----- .../Concept_ExampleInvariant.thy | 8 +++++--- .../InnerSyntaxAntiquotations.thy | 16 ++++++++++------ {examples/conceptual => tests}/figures/A.png | Bin {examples/conceptual => tests}/figures/AnB.odp | Bin {examples/conceptual => tests}/figures/B.png | Bin 9 files changed, 51 insertions(+), 24 deletions(-) rename {examples/conceptual => tests}/AssnsLemmaThmEtc.thy (93%) rename {examples/conceptual => tests}/Attributes.thy (96%) rename {examples/conceptual => tests}/Concept_Example.thy (87%) rename {examples/conceptual => tests}/Concept_ExampleInvariant.thy (97%) rename {examples/conceptual => tests}/InnerSyntaxAntiquotations.thy (88%) rename {examples/conceptual => tests}/figures/A.png (100%) rename {examples/conceptual => tests}/figures/AnB.odp (100%) rename {examples/conceptual => tests}/figures/B.png (100%) diff --git a/ROOT b/ROOT index 46c0f3b..da826df 100644 --- a/ROOT +++ b/ROOT @@ -10,3 +10,11 @@ session "Isabelle_DOF" = "Functional-Automata" + "ontologies/technical_report" "ontologies/mathex_onto" +session "Isabelle_DOF-tests" = "Isabelle_DOF" + + options [document = false] + theories + "tests/AssnsLemmaThmEtc" + "tests/Concept_ExampleInvariant" + "tests/Concept_Example" + "tests/InnerSyntaxAntiquotations" + "tests/Attributes" diff --git a/examples/conceptual/AssnsLemmaThmEtc.thy b/tests/AssnsLemmaThmEtc.thy similarity index 93% rename from examples/conceptual/AssnsLemmaThmEtc.thy rename to tests/AssnsLemmaThmEtc.thy index 1f98909..4cc0e85 100644 --- a/examples/conceptual/AssnsLemmaThmEtc.thy +++ b/tests/AssnsLemmaThmEtc.thy @@ -1,5 +1,8 @@ -theory AssnsLemmaThmEtc - imports "../../ontologies/Conceptual" "../../ontologies/math_paper" +theory + AssnsLemmaThmEtc +imports + "../ontologies/Conceptual" + "../ontologies/math_paper" begin section\Elementary Creation of Doc-items and Access of their Attibutes\ @@ -17,14 +20,18 @@ text*[aa::F, property = "[@{term ''True''}]"] \Our definition of the HOL-Logic has the following properties:\ assert*[aa::F] "True" - +(* does not work in batch mode ... (* sample for accessing a property filled with previous assert's of "aa" *) ML\ ISA_core.property_list_dest @{context} @{docitem_attribute property :: aa};\ + + assert*[aa::F] " X \ Y \ True" (*example with uni-code *) ML\ ISA_core.property_list_dest @{context} @{docitem_attribute property :: aa}; app writeln (tl (rev it));\ +*) + assert*[aa::F] "\x::bool. X \ Y \ True" (*problem with uni-code *) ML\ @@ -33,12 +40,12 @@ Syntax.read_term_global @{theory} "[@{termrepr ''True @ True''}] @{term "[@{term '' True @ True ''}]"}; (* with isa-check *) @{term "[@{termrepr '' True @ True ''}]"}; (* without isa check *) \ - +(* ML\val [_,_,Const _ $ s,_] = (HOLogic.dest_list @{docitem_attribute property :: aa}); val t = HOLogic.dest_string s; holstring_to_bstring @{context} t \ - +*) lemma "All (\x. X \ Y \ True)" oops diff --git a/examples/conceptual/Attributes.thy b/tests/Attributes.thy similarity index 96% rename from examples/conceptual/Attributes.thy rename to tests/Attributes.thy index 78ab865..de62802 100644 --- a/examples/conceptual/Attributes.thy +++ b/tests/Attributes.thy @@ -1,5 +1,7 @@ -theory Attributes - imports "../../ontologies/Conceptual" +theory + Attributes +imports + "../ontologies/Conceptual" begin section\Elementary Creation of Doc-items and Access of their Attibutes\ @@ -18,7 +20,10 @@ Symtab.dest docclass_tab; text\A text item containing standard theorem antiquotations and complex meta-information.\ -text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\ Lorem ipsum ... @{thm refl} \ +(* crashes in batch mode ... +text*[dfgdfg::B, Conceptual.B.x ="''f''", y = "[''sdf'']"]\ Lorem ipsum ... @{thm refl} \ +*) +text*[dfgdfg::B]\ Lorem ipsum ... @{thm refl} \ text\document class declarations lead also HOL-type declarations (relevant for ontological links).\ typ "C" @@ -106,7 +111,6 @@ text\ @{docitem_attribute omega::y} \ section\Simulation of a Monitor\ open_monitor*[figs1::figure_group, - (* anchor="''fig-demo''", ? ? ? apparently eliminated by Achim *) caption="''Sample ''"] figure*[fig_A::figure, spawn_columns=False, @@ -130,6 +134,6 @@ text\@{trace_attribute figs1}\ text\Final Status:\ print_doc_items print_doc_classes -check_doc_global + end (*>*) diff --git a/examples/conceptual/Concept_Example.thy b/tests/Concept_Example.thy similarity index 87% rename from examples/conceptual/Concept_Example.thy rename to tests/Concept_Example.thy index a8b8c6a..563bcce 100644 --- a/examples/conceptual/Concept_Example.thy +++ b/tests/Concept_Example.thy @@ -1,10 +1,12 @@ chapter\Setting and modifying attributes of doc-items\ -theory Concept_Example - imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) +theory + Concept_Example +imports + "../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) begin -text\@{theory Draft.Conceptual} provides a monitor @{typ M} enforcing a particular document structure. +text\@{theory "Isabelle_DOF-tests.Conceptual"} provides a monitor @{typ M} enforcing a particular document structure. Here, we say: From now on, this structural rules are respected wrt. all doc\_classes M is enabled for.\ open_monitor*[struct::M] @@ -29,7 +31,7 @@ fancy unicode such as: @{term "\\\<^sub>i''\"} \ text*[f::F] \ Lectus accumsan velit ultrices, ... }\ -theorem some_proof : "P" sorry +theorem some_proof : "True" by simp text\This is an example where we add a theorem into a kind of "result-list" of the doc-item f.\ update_instance*[f::F,r:="[@{thm ''Concept_Example.some_proof''}]"] @@ -52,4 +54,4 @@ print_doc_items end - \ No newline at end of file + diff --git a/examples/conceptual/Concept_ExampleInvariant.thy b/tests/Concept_ExampleInvariant.thy similarity index 97% rename from examples/conceptual/Concept_ExampleInvariant.thy rename to tests/Concept_ExampleInvariant.thy index ad9dafd..8987834 100644 --- a/examples/conceptual/Concept_ExampleInvariant.thy +++ b/tests/Concept_ExampleInvariant.thy @@ -1,7 +1,9 @@ chapter\Setting and modifying attributes of doc-items\ -theory Concept_ExampleInvariant - imports "../../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) +theory + Concept_ExampleInvariant +imports + "../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) begin section\Example: Standard Class Invariant\ @@ -108,4 +110,4 @@ close_monitor*[struct] end - \ No newline at end of file + diff --git a/examples/conceptual/InnerSyntaxAntiquotations.thy b/tests/InnerSyntaxAntiquotations.thy similarity index 88% rename from examples/conceptual/InnerSyntaxAntiquotations.thy rename to tests/InnerSyntaxAntiquotations.thy index 7e5e288..7ee6c38 100644 --- a/examples/conceptual/InnerSyntaxAntiquotations.thy +++ b/tests/InnerSyntaxAntiquotations.thy @@ -1,7 +1,9 @@ chapter\Inner Syntax Antiquotations (ISA)'s\ -theory InnerSyntaxAntiquotations - imports "../../ontologies/Conceptual" +theory + InnerSyntaxAntiquotations +imports + "../ontologies/Conceptual" begin text\Since the syntax chosen for values of doc-class attributes is HOL-syntax --- requiring @@ -14,7 +16,7 @@ They are the key-mechanism to denote \<^item> Ontological Links, i.e. attributes refering to document classes defined by the ontology \<^item> Ontological F-Links, i.e. attributes referring to formal entities inside Isabelle (such as thm's) -This file contains a number of examples resulting from the @{theory "Draft.Conceptual"} - ontology; +This file contains a number of examples resulting from the @{theory "Isabelle_DOF-tests.Conceptual"} - ontology; the emphasis of this presentation is to present the expressivity of ODL on a paradigmatical example. \ @@ -26,11 +28,13 @@ val {docobj_tab={tab = x, ...},docclass_tab, ISA_transformer_tab,...} = DOF_core \ text\Some sample lemma:\ -lemma murks : "Example" sorry +lemma murks : "Example=Example" by simp text\Example for a meta-attribute of ODL-type @{typ "file"} with an appropriate ISA for the - file @{file "./Attributes.thy"}\ -text*[xcv::F, u="@{file ''./examples/conceptual/Attributes.thy''}"]\Lorem ipsum ...\ + file @{file "InnerSyntaxAntiquotations.thy"}\ +(* not working: +text*[xcv::F, u="@{file ''InnerSyntaxAntiquotations.thy''}"]\Lorem ipsum ...\ +*) text*[xcv1::A, x=5]\Lorem ipsum ...\ text*[xcv3::A, x=7]\Lorem ipsum ...\ diff --git a/examples/conceptual/figures/A.png b/tests/figures/A.png similarity index 100% rename from examples/conceptual/figures/A.png rename to tests/figures/A.png diff --git a/examples/conceptual/figures/AnB.odp b/tests/figures/AnB.odp similarity index 100% rename from examples/conceptual/figures/AnB.odp rename to tests/figures/AnB.odp diff --git a/examples/conceptual/figures/B.png b/tests/figures/B.png similarity index 100% rename from examples/conceptual/figures/B.png rename to tests/figures/B.png From 7f8ea1c115ea5d29fbf087139cad59bb144a8d5f Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 20 Jul 2019 15:15:04 +0100 Subject: [PATCH 18/34] Removed outdated BAC2017 example. --- examples/math_exam/BAC2017/BAC2017.thy | 117 ------------------ examples/math_exam/BAC2017/ROOT | 11 -- examples/math_exam/BAC2017/document/build | 46 ------- .../math_exam/BAC2017/document/isadof.cfg | 2 - .../math_exam/BAC2017/document/preamble.tex | 47 ------- 5 files changed, 223 deletions(-) delete mode 100644 examples/math_exam/BAC2017/BAC2017.thy delete mode 100644 examples/math_exam/BAC2017/ROOT delete mode 100755 examples/math_exam/BAC2017/document/build delete mode 100644 examples/math_exam/BAC2017/document/isadof.cfg delete mode 100644 examples/math_exam/BAC2017/document/preamble.tex diff --git a/examples/math_exam/BAC2017/BAC2017.thy b/examples/math_exam/BAC2017/BAC2017.thy deleted file mode 100644 index 5177fb7..0000000 --- a/examples/math_exam/BAC2017/BAC2017.thy +++ /dev/null @@ -1,117 +0,0 @@ -theory BAC2017 - imports "Isabelle_DOF.mathex_onto" - Deriv - Transcendental -begin - -open_monitor*[exam::MathExam] - -(* currently rethinking on "deep ontologies" necessary ... Achim -text*[idir::Author,affiliation="''LRI, CentraleSupelec''", -email="''idir.aitsadoune@centralesupelec.fr''"] -{*Idir AIT SADOUNE*} - -text*[keller::Author,affiliation="''LRI, Univ. Paris-Sud''", - email="''Chantal.Keller@lri.fr''"] -{*Chantal KELLER*} - -text{* This example is an excerpt from the french baccaleareat 2017. - The textual explanations were kept in french. - *} -*) - -text*[header::Header,examSubject="[analysis,geometry]", date="''21-06-2017''", - timeAllowed="240::int"]{* BACCALAUREAT GENERAL MATHEMATIQUES *} -text{* -\begin{itemize} -\item Les calculatrices électroniques de poche sont autorisées, conformément à la réglementation - en vigueur. -\item Le sujet est composé de 4 exercices indépendants. -\item Le candidat doit traiter tous les exercices. -\item Le candidat est invité à faire figurer sur la copie toute trace de recherche, même incomplète - ou non fructueuse, qu’il aura développée. -\item Il est rappelé que la qualité de la rédaction, la clarté et la précision des raisonnements - entreront pour une part importante dans l’appréciation des copies. -\end{itemize} -*} - - -text*[exo1 :: Exercise, - concerns= "{setter,student,checker,externalExaminer}"] -{* On considère la fonction h définie sur l’intervalle [0..+\] par : - @{term "h(x) = x * exponent (-x)"} -*} - -definition h :: "real \ real" - where "h x \ x * exp (- x)" - - -text*[q1::Task, concerns= "{setter,student}", -level="oneStar", mark="1::int", type="formal"] -{* Déterminer la limite de la fonction @{term h} en +\. *} - -text*[a1::Answer_Formal_Step] {* Fill in term and justification*} - -lemma q1 : "(h \ (0::real)) at_top" sorry - -text*[v1::Validation, proofs="[@{thm ''HOL.refl''}::thm]"] {* See lemma @{thm q1}. *} - - -text*[q2::Task, concerns= "{setter,checker,student}", - level="oneStar", mark="1::int", type="formal"] -{* Étudier les variations de la fonction @{term h} sur l'intervalle [0..+\] et - dresser son tableau de variation *} - -text*[a2::Answer_Formal_Step] -{* Fill in term and justification*} - -definition h' :: "real \ real" - where "h' x \ (1 - x) * exp (- x)" - -lemma q2_a : "DERIV h x :> h' x" -proof - - have * : "DERIV (exp \ uminus) x :> - (exp (-x))" - sorry (* by (simp add: has_derivative_compose) *) - have ** : "DERIV id x :> 1" - by (metis DERIV_ident eq_id_iff) - have *** : "DERIV h x :> x * (- (exp (- x))) + 1 * (exp (- x))" - sorry (* by (simp add: * ** has_derivative_mult comp_def) *) - show ?thesis - sorry (* by (metis "***" left_diff_distrib mult_minus_right uminus_add_conv_diff) *) -qed - -lemma q2_b : "0 \ x \ x \ y \ y \ 1 \ h x \ h y" - sorry - -lemma q2_c : "1 \ x \ x \ y \ h x \ h y" - sorry - -text*[v2::Validation, proofs="[@{thm ''BAC2017.q2_b''}, @{thm ''BAC2017.q2_c''}]"] - {* See lemmas @{thm q2_b} and @{thm q2_c}. *} - - -text*[q3a::Task, concerns= "{setter,checker,student}", -level="oneStar", mark="1::int", type="formal"] -{* Vérifier que pour tout nombre réel x appartenant à l'intervalle [0..+\], on a : - @{term "h x = (exp (- x)) - (h' x)"}. *} - -text*[a3a::Answer_Formal_Step] -{* Fill in term and justification*} - -lemma q3a : "h x = (exp (- x)) - (h' x)" - by (simp add : h_def h'_def left_diff_distrib) - -subsubsection*[v3a::Validation, proofs="[@{thm ''BAC2017.q3a''}::thm]"] - {* See lemma @{thm q3a}. *} - - -subsection*[sol1 :: Solution, - content="[exo1::Exercise]", - valids = "[v1::Validation,v2,v3a]"] -{* See validations. *} - - - -close_monitor*[exam] - -end diff --git a/examples/math_exam/BAC2017/ROOT b/examples/math_exam/BAC2017/ROOT deleted file mode 100644 index cf67197..0000000 --- a/examples/math_exam/BAC2017/ROOT +++ /dev/null @@ -1,11 +0,0 @@ -session "BAC2017" = "Isabelle_DOF" + - options [document = pdf, document_output = "output",quick_and_dirty=true] - theories [document = false] - "Deriv" - "Transcendental" - theories - BAC2017 - document_files - "isadof.cfg" - "preamble.tex" - "build" diff --git a/examples/math_exam/BAC2017/document/build b/examples/math_exam/BAC2017/document/build deleted file mode 100755 index c4a05ab..0000000 --- a/examples/math_exam/BAC2017/document/build +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved. -# 2018 The University of Paris-Sud. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# SPDX-License-Identifier: BSD-2-Clause - -set -e -if [ ! -f $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh ]; then - echo "" - echo "Error: Isabelle/DOF not installed" - echo "=====" - echo "This is a Isabelle/DOF project. The document preparation requires" - echo "the Isabelle/DOF framework. Please obtain the framework by cloning" - echo "the Isabelle/DOF git repository, i.e.: " - echo " git clone https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF" - echo "You can install the framework as follows:" - echo " cd Isabelle_DOF/document-generator" - echo " ./install" - echo "" - exit 1 -fi - -cp $ISABELLE_HOME_USER/DOF/document-template/build_lib.sh . -source build_lib.sh diff --git a/examples/math_exam/BAC2017/document/isadof.cfg b/examples/math_exam/BAC2017/document/isadof.cfg deleted file mode 100644 index f13f22b..0000000 --- a/examples/math_exam/BAC2017/document/isadof.cfg +++ /dev/null @@ -1,2 +0,0 @@ -Template: scrartcl -Ontology: mathex diff --git a/examples/math_exam/BAC2017/document/preamble.tex b/examples/math_exam/BAC2017/document/preamble.tex deleted file mode 100644 index 5e5251d..0000000 --- a/examples/math_exam/BAC2017/document/preamble.tex +++ /dev/null @@ -1,47 +0,0 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud -%% -%% License: -%% This program can be redistributed and/or modified under the terms -%% of the LaTeX Project Public License Distributed from CTAN -%% archives in directory macros/latex/base/lppl.txt; either -%% version 1 of the License, or any later version. -%% OR -%% The 2-clause BSD-style license. -%% -%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause - -%% This is a placeholder for user-specific configuration and packages. - -\title{} -\author{<AUTHOR>} - -\newkeycommand\isaDofTextExercise[label=,type=,Exercise.content=,content=,concerns=,][1]{% - \begin{Exercise} - #1 - \end{Exercise} -} - -\newkeycommand\isaDofTextSolution[Task.concerns=,concerns=,content=,valids=,][1]{% - #1 -} - -\newkeycommand\isaDofSectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{% - #1 -} - -\newkeycommand\isaDofSubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{% - #1 -} - -\newkeycommand\isaDofSubsubsectionSolution[Task.concerns=,concerns=,content=,valids=,][1]{% - #1 -} - -\newkeycommand\isaDofTextExercise[Task.concerns=,concerns=,content=,][1]{% - #1 -} - -\newkeycommand\isaDofTextValidation[tests=,proofs=,][1]{% - #1 -} From e6cea1156c7a1ada36cfdad7af6dcc7005e8569d Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 15:30:13 +0100 Subject: [PATCH 19/34] Activated MathExam. --- examples/math_exam/MathExam/MathExam.thy | 13 ++----------- examples/math_exam/ROOTS | 1 + 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/examples/math_exam/MathExam/MathExam.thy b/examples/math_exam/MathExam/MathExam.thy index 0c2f851..a29ed2e 100644 --- a/examples/math_exam/MathExam/MathExam.thy +++ b/examples/math_exam/MathExam/MathExam.thy @@ -1,7 +1,7 @@ (*<*) theory MathExam imports "Isabelle_DOF.mathex_onto" - Real + HOL.Real begin (*>*) open_monitor*[exam::MathExam] @@ -22,18 +22,9 @@ text*[idir::Author, affiliation="''CentraleSupelec''", {*Idir AIT SADOUNE*} -(* should be in DOF-core - -* causes crash on the LaTeX side: - ( FP-DIV ) -*** ! Undefined control sequence. -*** <argument> ...ative_width}}{100} \includegraphics -*** [width=\scale \textwidth ]... -*** l.44 {A Polynome.} -*) figure*[figure::figure, spawn_columns=False, relative_width="80", - src="''figures/Polynomialdeg5.png''"] + src="''figures/Polynomialdeg5''"] \<open>A Polynome.\<close> diff --git a/examples/math_exam/ROOTS b/examples/math_exam/ROOTS index e69de29..a251c56 100644 --- a/examples/math_exam/ROOTS +++ b/examples/math_exam/ROOTS @@ -0,0 +1 @@ +MathExam From 183c64eb0f640c360a3aee97f8e97b8b1cebca98 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 15:34:16 +0100 Subject: [PATCH 20/34] Moved build script into separate directory. --- document-generator/{document-template => scripts}/build | 0 document-generator/{document-template => scripts}/build_lib.sh | 0 install | 1 + 3 files changed, 1 insertion(+) rename document-generator/{document-template => scripts}/build (100%) rename document-generator/{document-template => scripts}/build_lib.sh (100%) diff --git a/document-generator/document-template/build b/document-generator/scripts/build similarity index 100% rename from document-generator/document-template/build rename to document-generator/scripts/build diff --git a/document-generator/document-template/build_lib.sh b/document-generator/scripts/build_lib.sh similarity index 100% rename from document-generator/document-template/build_lib.sh rename to document-generator/scripts/build_lib.sh diff --git a/install b/install index 2fa10f6..59f613b 100755 --- a/install +++ b/install @@ -179,6 +179,7 @@ install_and_register(){ DIR="$ISABELLE_HOME_USER/DOF/document-template" echo " - Installing document templates in $DIR" mkdir -p "$DIR" + cp $GEN_DIR/scripts/* "$DIR" cp $GEN_DIR/document-template/* "$DIR" DIR="$ISABELLE_HOME_USER/DOF/latex" From 8f22b53b18f3f64fee9109c468062ac77b15bc34 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 15:40:17 +0100 Subject: [PATCH 21/34] Renamed document-template directory to document-templates. --- .../root-eptcs-UNSUPPORTED.tex | 0 .../root-lipics-v2019-UNSUPPORTED.tex | 0 .../{document-template => document-templates}/root-lncs.tex | 0 .../{document-template => document-templates}/root-scrartcl.tex | 0 .../{document-template => document-templates}/root-scrreprt.tex | 0 install | 2 +- 6 files changed, 1 insertion(+), 1 deletion(-) rename document-generator/{document-template => document-templates}/root-eptcs-UNSUPPORTED.tex (100%) rename document-generator/{document-template => document-templates}/root-lipics-v2019-UNSUPPORTED.tex (100%) rename document-generator/{document-template => document-templates}/root-lncs.tex (100%) rename document-generator/{document-template => document-templates}/root-scrartcl.tex (100%) rename document-generator/{document-template => document-templates}/root-scrreprt.tex (100%) diff --git a/document-generator/document-template/root-eptcs-UNSUPPORTED.tex b/document-generator/document-templates/root-eptcs-UNSUPPORTED.tex similarity index 100% rename from document-generator/document-template/root-eptcs-UNSUPPORTED.tex rename to document-generator/document-templates/root-eptcs-UNSUPPORTED.tex diff --git a/document-generator/document-template/root-lipics-v2019-UNSUPPORTED.tex b/document-generator/document-templates/root-lipics-v2019-UNSUPPORTED.tex similarity index 100% rename from document-generator/document-template/root-lipics-v2019-UNSUPPORTED.tex rename to document-generator/document-templates/root-lipics-v2019-UNSUPPORTED.tex diff --git a/document-generator/document-template/root-lncs.tex b/document-generator/document-templates/root-lncs.tex similarity index 100% rename from document-generator/document-template/root-lncs.tex rename to document-generator/document-templates/root-lncs.tex diff --git a/document-generator/document-template/root-scrartcl.tex b/document-generator/document-templates/root-scrartcl.tex similarity index 100% rename from document-generator/document-template/root-scrartcl.tex rename to document-generator/document-templates/root-scrartcl.tex diff --git a/document-generator/document-template/root-scrreprt.tex b/document-generator/document-templates/root-scrreprt.tex similarity index 100% rename from document-generator/document-template/root-scrreprt.tex rename to document-generator/document-templates/root-scrreprt.tex diff --git a/install b/install index 59f613b..1189658 100755 --- a/install +++ b/install @@ -180,7 +180,7 @@ install_and_register(){ echo " - Installing document templates in $DIR" mkdir -p "$DIR" cp $GEN_DIR/scripts/* "$DIR" - cp $GEN_DIR/document-template/* "$DIR" + cp $GEN_DIR/document-templates/* "$DIR" DIR="$ISABELLE_HOME_USER/DOF/latex" echo " - Installing LaTeX styles in $DIR" From f8013d90a2aeb6cbba319aa6b20a864a3f83420c Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 16:08:47 +0100 Subject: [PATCH 22/34] Updated Isar notation. --- AssertLong.thy | 16 ++++----- Isa_DOF.thy | 2 +- RegExp.thy | 10 +++--- RegExpInterface.thy | 10 +++--- examples/math_exam/MathExam/MathExam.thy | 34 +++++++++---------- .../IsaDof_Manual/04_IsaDofImpl.thy | 4 +-- .../IsaDof_Manual/05_DesignImpl.thy | 10 +++--- ontologies/CENELEC_50128.thy | 2 +- ontologies/mathex_onto.thy | 8 ++--- ontologies/scholarly_paper.thy | 4 +-- ontologies/small_math.thy | 2 +- ontologies/technical_report.thy | 2 +- 12 files changed, 52 insertions(+), 52 deletions(-) diff --git a/AssertLong.thy b/AssertLong.thy index a029ec6..dceefba 100644 --- a/AssertLong.thy +++ b/AssertLong.thy @@ -7,7 +7,7 @@ begin -ML{* +ML\<open> fun value_maybe_select some_name = case some_name @@ -30,11 +30,11 @@ fun value_cmd2 some_name modes raw_t state = Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) (); in Pretty.writeln p end; -*} -ML{* value_cmd2*} +\<close> +ML\<open>value_cmd2\<close> definition ASSERT :: "bool \<Rightarrow> bool" where "ASSERT p == (p=True)" -ML{* val x = @{code "ASSERT"} *} -ML{* +ML\<open>val x = @{code "ASSERT"}\<close> +ML\<open> val opt_modes = Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.name --| @{keyword ")"})) []; @@ -49,9 +49,9 @@ val _ = (* Toplevel.keep (Value_Command.value_cmd some_name modes (enclose "ASSERT(" ")" t)) *) Toplevel.keep (value_cmd2 some_name modes t) end)); -*} +\<close> assert "True" assert "True \<and> True " -ML{* !TT ; - @{term "True"} *} \ No newline at end of file +ML\<open>!TT ; + @{term "True"}\<close> \ No newline at end of file diff --git a/Isa_DOF.thy b/Isa_DOF.thy index ae925ac..f503469 100644 --- a/Isa_DOF.thy +++ b/Isa_DOF.thy @@ -731,7 +731,7 @@ typedecl "thm" typedecl "file" typedecl "thy" -\<comment> \<open> and others in the future : file, http, thy, ... \<close> +\<comment> \<open>and others in the future : file, http, thy, ...\<close> consts ISA_typ :: "string \<Rightarrow> typ" ("@{typ _}") consts ISA_term :: "string \<Rightarrow> term" ("@{term _}") diff --git a/RegExp.thy b/RegExp.thy index 5d663f2..781c323 100644 --- a/RegExp.thy +++ b/RegExp.thy @@ -25,22 +25,22 @@ definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>") where "\<lbrakk>A\<rbrakk> \<equiv> A || One" value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))" -text{* or better equivalently: *} +text\<open>or better equivalently:\<close> value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*" -section{* Definition of a semantic function: the ``language'' of the regular expression *} +section\<open>Definition of a semantic function: the ``language'' of the regular expression\<close> text\<open> This is just a reminder - already defined in @{theory Regular_Exp} as @{term lang}.\<close> -text{* In the following, we give a semantics for our regular expressions, which so far have +text\<open>In the following, we give a semantics for our regular expressions, which so far have just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'', i.e. we give a direct meaning for regular expressions in some universe of ``denotations''. -This universe of denotations is in our concrete case: *} +This universe of denotations is in our concrete case:\<close> definition enabled :: "('a,'\<sigma> set)da \<Rightarrow> '\<sigma> set \<Rightarrow> 'a list \<Rightarrow> 'a list" where "enabled A \<sigma> = filter (\<lambda>x. next A x \<sigma> \<noteq> {}) " -text{* Now the denotational semantics for regular expression can be defined on a post-card: *} +text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close> fun L :: "'a rexp => 'a lang" where L_Emp : "L Zero = {}" diff --git a/RegExpInterface.thy b/RegExpInterface.thy index 24f65a4..c3f795d 100644 --- a/RegExpInterface.thy +++ b/RegExpInterface.thy @@ -37,20 +37,20 @@ definition opt :: "'a rexp \<Rightarrow> 'a rexp" ("\<lbrakk>(_)\<rbrakk>") where "\<lbrakk>A\<rbrakk> \<equiv> A || One" value "Star (Conc(Alt (Atom(CHR ''a'')) (Atom(CHR ''b''))) (Atom(CHR ''c'')))" -text{* or better equivalently: *} +text\<open>or better equivalently:\<close> value "\<lbrace>(\<lfloor>CHR ''a''\<rfloor> || \<lfloor>CHR ''b''\<rfloor>) ~~ \<lfloor>CHR ''c''\<rfloor>\<rbrace>\<^sup>*" -section{* Some Standard and Derived Semantics *} +section\<open>Some Standard and Derived Semantics\<close> text\<open> This is just a reminder - already defined in @{theory "Regular-Sets.Regular_Exp"} as @{term lang}.\<close> -text{* In the following, we give a semantics for our regular expressions, which so far have +text\<open>In the following, we give a semantics for our regular expressions, which so far have just been a term language (i.e. abstract syntax). The semantics is a ``denotational semantics'', i.e. we give a direct meaning for regular expressions in some universe of ``denotations''. -This universe of denotations is in our concrete case: *} +This universe of denotations is in our concrete case:\<close> -text{* Now the denotational semantics for regular expression can be defined on a post-card: *} +text\<open>Now the denotational semantics for regular expression can be defined on a post-card:\<close> fun L :: "'a rexp => 'a lang" where L_Emp : "L Zero = {}" diff --git a/examples/math_exam/MathExam/MathExam.thy b/examples/math_exam/MathExam/MathExam.thy index a29ed2e..6ec6eef 100644 --- a/examples/math_exam/MathExam/MathExam.thy +++ b/examples/math_exam/MathExam/MathExam.thy @@ -8,18 +8,18 @@ open_monitor*[exam::MathExam] section*[header::Header,examSubject= "[algebra]", - date="''02-05-2018''", timeAllowed="90::int"] {* Exam number 1 *} -text{* + date="''02-05-2018''", timeAllowed="90::int"] \<open>Exam number 1\<close> +text\<open> \begin{itemize} \item Use black ink or black ball-point pen. \item Draw diagrams in pencil. \item Answer all questions in the spaces provided. \end{itemize} -*} +\<close> text*[idir::Author, affiliation="''CentraleSupelec''", email="''idir.aitsadoune@centralesupelec.fr''"] - {*Idir AIT SADOUNE*} + \<open>Idir AIT SADOUNE\<close> figure*[figure::figure, spawn_columns=False, @@ -29,7 +29,7 @@ figure*[figure::figure, spawn_columns=False, subsubsection*[exo1 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 1\<close> -text{* +text\<open> Here are the first four lines of a number pattern. \begin{itemize} \item Line 1 : @{term "1*6 + 2*4 = 2*7"} @@ -37,15 +37,15 @@ Here are the first four lines of a number pattern. \item Line 3 : @{term "3*8 + 2*6 = 4*9"} \item Line 4 : @{term "4*9 + 2*7 = 5*10"} \end{itemize} -*} +\<close> declare [[show_sorts=false]] subsubsection*[exo2 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 2\<close> -text{* Find the roots of the polynome: +text\<open>Find the roots of the polynome: @{term "(x^3) - 6 * x^2 + 5 * x + 12"}. -Note the intermediate steps in the following fields and submit the solution. *} -text{* +Note the intermediate steps in the following fields and submit the solution.\<close> +text\<open> \begin{Form}[action={http://your-web-server.com/path/receiveform.cgi}] \begin{tabular}{l} From @{term "(x^3) - 6 * x^2 + 5 * x + 12"} \\\\ @@ -57,7 +57,7 @@ text{* \Submit{Submit}\\ \end{tabular} \end{Form} -*} +\<close> (* a bit brutal, as long as lemma* does not yet work *) (*<*) @@ -79,20 +79,20 @@ proof - qed (*>*) -text*[a1::Answer_Formal_Step]{* First Step: Fill in term and justification *} -text*[a2::Answer_Formal_Step]{* Next Step: Fill in term and justification *} -text*[a3::Answer_Formal_Step]{* Next Step: Fill in term and justification *} -text*[a4::Answer_Formal_Step]{* Next Step: Fill in term and justification *} +text*[a1::Answer_Formal_Step]\<open>First Step: Fill in term and justification\<close> +text*[a2::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close> +text*[a3::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close> +text*[a4::Answer_Formal_Step]\<open>Next Step: Fill in term and justification\<close> text*[q1::Task, local_grade="oneStar", mark="1::int", type="formal"] -{* Complete Line 10 : @{term "10*x + 2*y = 11*16"} *} +\<open>Complete Line 10 : @{term "10*x + 2*y = 11*16"}\<close> subsubsection*[exo3 :: Exercise, content="[q1::Task,q2::Task]"]\<open>Exercise 3\<close> text*[q2::Task, local_grade="threeStars", mark="3::int", type="formal"] -{* Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers +\<open>Prove that @{term "n*(n+5) + 2*(n+3) "} is always the product of two numbers with a difference of 5. -*} +\<close> (* this does not work on the level of the LaTeX output for known restrictions of the Toplevel. *) close_monitor*[exam :: MathExam] diff --git a/examples/technical_report/IsaDof_Manual/04_IsaDofImpl.thy b/examples/technical_report/IsaDof_Manual/04_IsaDofImpl.thy index 1aacde8..4176d9e 100644 --- a/examples/technical_report/IsaDof_Manual/04_IsaDofImpl.thy +++ b/examples/technical_report/IsaDof_Manual/04_IsaDofImpl.thy @@ -26,7 +26,7 @@ is based on several design-decisions: \<close> -subsection*["sec:plugins"::technical]{*Writing \isadof as User-Defined Plugin in Isabelle/Isar*} +subsection*["sec:plugins"::technical]\<open>Writing \isadof as User-Defined Plugin in Isabelle/Isar\<close> text\<open> Writing an own plugin in Isabelle starts with defining the local data and registering it in the framework. As mentioned before, contexts @@ -142,7 +142,7 @@ front-end supporting PIDE, a popup window with the text: ``declare document reference'' will appear. \<close> -subsection*["sec:prog_anti"::technical]{*Programming Text Antiquotations*} +subsection*["sec:prog_anti"::technical]\<open>Programming Text Antiquotations\<close> text\<open> As mentioned in the introduction, Isabelle/Isar is configured with a number of standard commands to annotate formal definitions and proofs diff --git a/examples/technical_report/IsaDof_Manual/05_DesignImpl.thy b/examples/technical_report/IsaDof_Manual/05_DesignImpl.thy index c183f4c..e679ac4 100644 --- a/examples/technical_report/IsaDof_Manual/05_DesignImpl.thy +++ b/examples/technical_report/IsaDof_Manual/05_DesignImpl.thy @@ -5,7 +5,7 @@ begin (*>*) chapter*[impl2::technical,main_author="Some(@{docitem ''bu''}::author)"] - {* \isadof: Design and Implementation*} + \<open>\isadof: Design and Implementation\<close> text\<open> In this section, we present the design and implementation of \isadof. \subsection{Document Ontology Modeling with \isadof} @@ -110,7 +110,7 @@ expression consisting of the class identifier \inlineisar+A+, \inlineisar+B+, etc. Its use is discussed in \autoref{sec:monitor-class}. \<close> -subsection*[editing::example]{*Editing a Document with Ontology-Conform Meta-Data*} +subsection*[editing::example]\<open>Editing a Document with Ontology-Conform Meta-Data\<close> text\<open> As already mentioned, Isabelle/Isar comes with a number of standard \emph{text commands} such as \inlineisar+section{* ... *}+ or @@ -193,7 +193,7 @@ referencing it, although the actual text-element will occur later in the document.\<close> -subsection*[ontolinks::technical]{*Ontology-Conform Logical Links: \isadof Antiquotations*} +subsection*[ontolinks::technical]\<open>Ontology-Conform Logical Links: \isadof Antiquotations\<close> text\<open> Up to this point, the world of the formal and the informal document parts are strictly separated. The main objective of \isadof are ways @@ -305,7 +305,7 @@ enforce that terms or theorems have a particular form or correspond to ``claims'' (contributions) listed in the introduction of the paper. \<close> -subsection*["sec:monitor-class"::technical]{*Monitor Document Classes*} +subsection*["sec:monitor-class"::technical]\<open>Monitor Document Classes\<close> text\<open> \autoref{lst:example} shows our conceptual running example in all details. While inheritance on document classes allows for structuring @@ -353,7 +353,7 @@ text\<open> \end{isar} \<close> -section{*Document Generation*} +section\<open>Document Generation\<close> text\<open> Up to know, we discussed the definition of ontologies and their representation in an interactive development environment, \ie, diff --git a/ontologies/CENELEC_50128.thy b/ontologies/CENELEC_50128.thy index a732fb1..80cc32c 100644 --- a/ontologies/CENELEC_50128.thy +++ b/ontologies/CENELEC_50128.thy @@ -388,7 +388,7 @@ doc_class TC = requirement + section\<open>Software Assurance related Entities and Concepts\<close> -text{* subcategories are : *} +text\<open>subcategories are :\<close> text\<open>Table A.13: \<close> diff --git a/ontologies/mathex_onto.thy b/ontologies/mathex_onto.thy index 7ec139c..5e71341 100644 --- a/ontologies/mathex_onto.thy +++ b/ontologies/mathex_onto.thy @@ -3,7 +3,7 @@ theory mathex_onto begin (*<<*) -text{* In our scenario, content has four different types of addressees: +text\<open>In our scenario, content has four different types of addressees: \<^item> the @{emph \<open>setter\<close>}, i.e. the author of the exam, \<^item> the @{emph \<open>student\<close>}, i.e. the addressee of the exam, \<^item> the @{emph \<open>checker\<close>}, i.e. a person that checks the exam for @@ -13,7 +13,7 @@ text{* In our scenario, content has four different types of addressees: Note that the latter quality assurance mechanism is used in many universities, where for organizational reasons the execution of an exam takes place in facilities where the author of the exam is not expected to be physically present. -*} +\<close> datatype ContentClass = @@ -76,12 +76,12 @@ doc_class Exercise = Exam_item + concerns :: "ContentClass set" <= "{setter,student,checker,externalExaminer}" -text{* In many institutions, it makes sense to have a rigorous process of validation +text\<open>In many institutions, it makes sense to have a rigorous process of validation for exam subjects : is the initial question correct ? Is a proof in the sense of the question possible ? We model the possibility that the @{term setter} validates a question by a sample proof validated by Isabelle. In our scenario this sample proofs are completely @{emph \<open>intern\<close>}, i.e. not exposed to the students but just additional -material for the internal review process of the exam.*} +material for the internal review process of the exam.\<close> doc_class Validation = tests :: "term list" <="[]" diff --git a/ontologies/scholarly_paper.thy b/ontologies/scholarly_paper.thy index 58e4062..a46dae1 100644 --- a/ontologies/scholarly_paper.thy +++ b/ontologies/scholarly_paper.thy @@ -1,4 +1,4 @@ -section{* An example ontology for a scholarly paper*} +section\<open>An example ontology for a scholarly paper\<close> theory scholarly_paper imports "../Isa_COL" @@ -45,7 +45,7 @@ doc_class technical = text_section + definition_list :: "string list" <= "[]" formal_results :: "thm list" -text{* A very rough formatting style could be modeled as follows:*} +text\<open>A very rough formatting style could be modeled as follows:\<close> doc_class example = text_section + diff --git a/ontologies/small_math.thy b/ontologies/small_math.thy index 0859676..a0aefe5 100644 --- a/ontologies/small_math.thy +++ b/ontologies/small_math.thy @@ -1,4 +1,4 @@ -section{* An example ontology for a math paper*} +section\<open>An example ontology for a math paper\<close> theory small_math imports "../Isa_COL" diff --git a/ontologies/technical_report.thy b/ontologies/technical_report.thy index 79a54b8..48b9929 100644 --- a/ontologies/technical_report.thy +++ b/ontologies/technical_report.thy @@ -1,4 +1,4 @@ -section{* An example ontology for a scholarly paper*} +section\<open>An example ontology for a scholarly paper\<close> theory technical_report imports "scholarly_paper" From 8953f376298410d95d3cd34c14e3da1ac5466a2e Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 21:12:40 +0100 Subject: [PATCH 23/34] Large directory restructuring. This commit restructures the file hierarchy: 1) implementation is moved into src/ directory to clean up the main directory and to make it easier for users to find the README.md. 2) ontologies (both, the Isabelle-part and the LaTeX-part) are now structured into directories. --- .ci/Jenkinsfile | 6 ++-- README.md | 2 +- ROOTS | 1 + document-generator/README.md | 26 ------------------ examples/math_exam/MathExam/MathExam.thy | 7 ++--- .../2018_cicm/IsaDofApplications.thy | 2 +- .../IsaDof_Manual/03_GuidedTour.thy | 2 +- install | 6 ++-- Assert.thy => src/DOF/Assert.thy | 0 AssertLong.thy => src/DOF/AssertLong.thy | 0 Isa_COL.thy => src/DOF/Isa_COL.thy | 0 Isa_DOF.thy => src/DOF/Isa_DOF.thy | 2 +- RegExp.thy => src/DOF/RegExp.thy | 0 .../DOF/RegExpChecker.sml | 0 .../DOF/RegExpInterface.thy | 0 .../DOF}/latex/DOF-COL.sty | 0 .../DOF}/latex/DOF-core.sty | 0 ROOT => src/ROOT | 8 ++---- {document-generator => src}/Tools/DOF_mkroot | 0 .../root-eptcs-UNSUPPORTED.tex | 0 .../root-lipics-v2019-UNSUPPORTED.tex | 0 .../document-templates/root-lncs.tex | 0 .../document-templates/root-scrartcl.tex | 0 .../document-templates/root-scrreprt.tex | 0 .../CENELEC_50128}/CENELEC_50128.thy | 4 +-- .../CENELEC_50128}/DOF-CENELEC_50128.sty | 0 .../ontologies/Conceptual}/Conceptual.thy | 2 +- .../ontologies/math_paper}/math_paper.thy | 2 +- .../ontologies/mathex}/DOF-mathex.sty | 0 .../ontologies/mathex/mathex.thy | 6 ++-- src/ontologies/ontologies.thy | 12 ++++++++ .../scholarly_paper}/DOF-scholarly_paper.sty | 0 .../scholarly_paper}/scholarly_paper.thy | 2 +- .../ontologies/small_math}/small_math.thy | 2 +- .../DOF-technical_report.sty | 0 .../technical_report}/technical_report.thy | 2 +- {patches => src/patches}/thy_info.ML | 0 {patches => src/patches}/thy_info.orig.ML | 0 {patches => src/patches}/thy_output.ML | 0 {patches => src/patches}/thy_output.orig19.ML | 0 {document-generator => src}/scripts/build | 0 .../scripts/build_lib.sh | 0 {tests => src/tests}/AssnsLemmaThmEtc.thy | 4 +-- {tests => src/tests}/Attributes.thy | 2 +- {tests => src/tests}/Concept_Example.thy | 2 +- .../tests}/Concept_ExampleInvariant.thy | 2 +- .../tests}/InnerSyntaxAntiquotations.thy | 2 +- {tests => src/tests}/figures/A.png | Bin {tests => src/tests}/figures/AnB.odp | Bin {tests => src/tests}/figures/B.png | Bin 50 files changed, 44 insertions(+), 62 deletions(-) delete mode 100644 document-generator/README.md rename Assert.thy => src/DOF/Assert.thy (100%) rename AssertLong.thy => src/DOF/AssertLong.thy (100%) rename Isa_COL.thy => src/DOF/Isa_COL.thy (100%) rename Isa_DOF.thy => src/DOF/Isa_DOF.thy (99%) rename RegExp.thy => src/DOF/RegExp.thy (100%) rename RegExpChecker.sml => src/DOF/RegExpChecker.sml (100%) rename RegExpInterface.thy => src/DOF/RegExpInterface.thy (100%) rename {document-generator => src/DOF}/latex/DOF-COL.sty (100%) rename {document-generator => src/DOF}/latex/DOF-core.sty (100%) rename ROOT => src/ROOT (70%) rename {document-generator => src}/Tools/DOF_mkroot (100%) rename {document-generator => src}/document-templates/root-eptcs-UNSUPPORTED.tex (100%) rename {document-generator => src}/document-templates/root-lipics-v2019-UNSUPPORTED.tex (100%) rename {document-generator => src}/document-templates/root-lncs.tex (100%) rename {document-generator => src}/document-templates/root-scrartcl.tex (100%) rename {document-generator => src}/document-templates/root-scrreprt.tex (100%) rename {ontologies => src/ontologies/CENELEC_50128}/CENELEC_50128.thy (99%) rename {document-generator/latex => src/ontologies/CENELEC_50128}/DOF-CENELEC_50128.sty (100%) rename {ontologies => src/ontologies/Conceptual}/Conceptual.thy (98%) rename {ontologies => src/ontologies/math_paper}/math_paper.thy (99%) rename {document-generator/latex => src/ontologies/mathex}/DOF-mathex.sty (100%) rename ontologies/mathex_onto.thy => src/ontologies/mathex/mathex.thy (98%) create mode 100644 src/ontologies/ontologies.thy rename {document-generator/latex => src/ontologies/scholarly_paper}/DOF-scholarly_paper.sty (100%) rename {ontologies => src/ontologies/scholarly_paper}/scholarly_paper.thy (99%) rename {ontologies => src/ontologies/small_math}/small_math.thy (99%) rename {document-generator/latex => src/ontologies/technical_report}/DOF-technical_report.sty (100%) rename {ontologies => src/ontologies/technical_report}/technical_report.thy (95%) rename {patches => src/patches}/thy_info.ML (100%) rename {patches => src/patches}/thy_info.orig.ML (100%) rename {patches => src/patches}/thy_output.ML (100%) rename {patches => src/patches}/thy_output.orig19.ML (100%) rename {document-generator => src}/scripts/build (100%) rename {document-generator => src}/scripts/build_lib.sh (100%) rename {tests => src/tests}/AssnsLemmaThmEtc.thy (95%) rename {tests => src/tests}/Attributes.thy (99%) rename {tests => src/tests}/Concept_Example.thy (96%) rename {tests => src/tests}/Concept_ExampleInvariant.thy (98%) rename {tests => src/tests}/InnerSyntaxAntiquotations.thy (98%) rename {tests => src/tests}/figures/A.png (100%) rename {tests => src/tests}/figures/AnB.odp (100%) rename {tests => src/tests}/figures/B.png (100%) diff --git a/.ci/Jenkinsfile b/.ci/Jenkinsfile index 17a067e..c96dbcf 100644 --- a/.ci/Jenkinsfile +++ b/.ci/Jenkinsfile @@ -4,15 +4,15 @@ pipeline { stages { stage('Build Docker') { steps { - sh 'cp patches/thy_output.ML .ci/isabelle4isadof/' + sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/' sh 'docker build -t logicalhacking:isabelle4dof .ci/isabelle4isadof' sh 'rm -f .ci/isabelle4isadof/thy_output.ML' } } stage('Check Docker') { - when { changeset "patches/*" } + when { changeset "src/patches/*" } steps { - sh 'cp patches/thy_output.ML .ci/isabelle4isadof/' + sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/' sh 'docker build --no-cache -t logicalhacking:isabelle4dof .ci/isabelle4isadof' sh 'rm -f .ci/isabelle4isadof/thy_output.ML' } diff --git a/README.md b/README.md index 52f4ee8..c16e2f1 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ The installer will * apply a patch to Isabelle that is necessary to use Isabelle/DOF. If this patch installations fails, you need to manually replace the file ``Isabelle2019/src/Pure/Thy/thy_output.ML`` in the Isabelle - distribution with the file ``patches/thy_output.ML`` from the + distribution with the file ``src/patches/thy_output.ML`` from the Isabelle/DOF distribution: ```console cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/ diff --git a/ROOTS b/ROOTS index 1e107f5..9458ec8 100644 --- a/ROOTS +++ b/ROOTS @@ -1 +1,2 @@ +src examples diff --git a/document-generator/README.md b/document-generator/README.md deleted file mode 100644 index cbe0912..0000000 --- a/document-generator/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# Isabelle_DOF: Document Preparation Setup - -This directory contains the LaTeX setup for Isabelle's -document generation system. - - -## Tips and Tricks - -During debugging of LaTeX errors, it can be very helpful to use -more than 79 characters for error messages (otherwise, long errors -are truncated)" -``` bash -max_print_line=200 error_line=200 half_error_line=100 pdflatex root.tex -``` - -## Team - -Main contacts: -* [Achim D. Brucker](http://www.brucker.ch/) -* [Burkhart Wolff](https://www.lri.fr/~wolff/) - -## License - -This project is licensed under a 2-clause BSD license. - -SPDX-License-Identifier: BSD-2-Clause diff --git a/examples/math_exam/MathExam/MathExam.thy b/examples/math_exam/MathExam/MathExam.thy index 6ec6eef..9772699 100644 --- a/examples/math_exam/MathExam/MathExam.thy +++ b/examples/math_exam/MathExam/MathExam.thy @@ -1,11 +1,10 @@ (*<*) theory MathExam - imports "Isabelle_DOF.mathex_onto" + imports "Isabelle_DOF.mathex" HOL.Real begin (*>*) -open_monitor*[exam::MathExam] - +(* open_monitor*[exam::MathExam] *) section*[header::Header,examSubject= "[algebra]", date="''02-05-2018''", timeAllowed="90::int"] \<open>Exam number 1\<close> @@ -94,6 +93,6 @@ text*[q2::Task, local_grade="threeStars", mark="3::int", type="formal"] with a difference of 5. \<close> (* this does not work on the level of the LaTeX output for known restrictions of the Toplevel. *) -close_monitor*[exam :: MathExam] +(* close_monitor*[exam :: MathExam] *) end diff --git a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy index 05f53fb..e03b56a 100644 --- a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy +++ b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy @@ -389,7 +389,7 @@ text\<open> The document class \inlineisar+figure+ --- supported by the \isadof such as @{docitem_ref \<open>fig_figures\<close>}. \<close> -subsection*[mathex_onto::example]\<open> The Math-Exam Scenario \<close> +subsection*[mathex::example]\<open> The Math-Exam Scenario \<close> text\<open> The Math-Exam Scenario is an application with mixed formal and semi-formal content. It addresses applications where the author of the exam is not present during the exam and the preparation requires a very rigorous process, as the french diff --git a/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy b/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy index f3934dd..e5587aa 100644 --- a/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy +++ b/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy @@ -251,7 +251,7 @@ doc_class srac = ec + \end{isar} \<close> -section*[mathex_onto::example]\<open> The Math-Exam Scenario \<close> +section*[mathex::example]\<open> The Math-Exam Scenario \<close> text\<open> The Math-Exam Scenario is an application with mixed formal and semi-formal content. It addresses applications where the author of the exam is not present during the exam and the preparation requires a very rigorous process, as the french diff --git a/install b/install index 1189658..63aa5a9 100755 --- a/install +++ b/install @@ -125,7 +125,7 @@ check_afp_entries() { check_isa_dof_patch() { echo "* Check availability of Isabelle/DOF patch:" - src="patches/thy_output.ML" + src="src/patches/thy_output.ML" dst="$ISABELLE_HOME/src/Pure/Thy/thy_output.ML" if command -v cmp > /dev/null 2>&1 && cmp -s "$src" "$dst" ; then @@ -185,7 +185,7 @@ install_and_register(){ DIR="$ISABELLE_HOME_USER/DOF/latex" echo " - Installing LaTeX styles in $DIR" mkdir -p "$DIR" - cp $GEN_DIR/latex/*.sty "$DIR" + cp $GEN_DIR/*/*/*.sty "$DIR" DIR="$ISABELLE_HOME_USER/etc" echo " - Registering Isabelle/DOF" @@ -221,7 +221,7 @@ done ACTUAL_ISABELLE_VERSION=`$ISABELLE version` -GEN_DIR=document-generator +GEN_DIR=src PROG=`echo $0 | sed 's|.*/||'`; VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS` for i in $VARS; do diff --git a/Assert.thy b/src/DOF/Assert.thy similarity index 100% rename from Assert.thy rename to src/DOF/Assert.thy diff --git a/AssertLong.thy b/src/DOF/AssertLong.thy similarity index 100% rename from AssertLong.thy rename to src/DOF/AssertLong.thy diff --git a/Isa_COL.thy b/src/DOF/Isa_COL.thy similarity index 100% rename from Isa_COL.thy rename to src/DOF/Isa_COL.thy diff --git a/Isa_DOF.thy b/src/DOF/Isa_DOF.thy similarity index 99% rename from Isa_DOF.thy rename to src/DOF/Isa_DOF.thy index f503469..c88f90d 100644 --- a/Isa_DOF.thy +++ b/src/DOF/Isa_DOF.thy @@ -1525,7 +1525,7 @@ end (* the following 2 lines set parser and converter for LaTeX generation of meta-attributes. Currently of *all* commands, no distinction between text* and text command. This code depends on a MODIFIED Isabelle2017 version resulting from applying the files - under Isabell_DOF/patches. + under src/patches. *) (* REMARK PORT 2018 : transmission of meta-args to LaTeX crude and untested. Can be found in present_token. *) diff --git a/RegExp.thy b/src/DOF/RegExp.thy similarity index 100% rename from RegExp.thy rename to src/DOF/RegExp.thy diff --git a/RegExpChecker.sml b/src/DOF/RegExpChecker.sml similarity index 100% rename from RegExpChecker.sml rename to src/DOF/RegExpChecker.sml diff --git a/RegExpInterface.thy b/src/DOF/RegExpInterface.thy similarity index 100% rename from RegExpInterface.thy rename to src/DOF/RegExpInterface.thy diff --git a/document-generator/latex/DOF-COL.sty b/src/DOF/latex/DOF-COL.sty similarity index 100% rename from document-generator/latex/DOF-COL.sty rename to src/DOF/latex/DOF-COL.sty diff --git a/document-generator/latex/DOF-core.sty b/src/DOF/latex/DOF-core.sty similarity index 100% rename from document-generator/latex/DOF-core.sty rename to src/DOF/latex/DOF-core.sty diff --git a/ROOT b/src/ROOT similarity index 70% rename from ROOT rename to src/ROOT index da826df..fbd6e31 100644 --- a/ROOT +++ b/src/ROOT @@ -3,12 +3,8 @@ session "Isabelle_DOF" = "Functional-Automata" + sessions "Regular-Sets" theories - Isa_DOF - "ontologies/Conceptual" - "ontologies/CENELEC_50128" - "ontologies/scholarly_paper" - "ontologies/technical_report" - "ontologies/mathex_onto" + "DOF/Isa_DOF" + "ontologies/ontologies" session "Isabelle_DOF-tests" = "Isabelle_DOF" + options [document = false] diff --git a/document-generator/Tools/DOF_mkroot b/src/Tools/DOF_mkroot similarity index 100% rename from document-generator/Tools/DOF_mkroot rename to src/Tools/DOF_mkroot diff --git a/document-generator/document-templates/root-eptcs-UNSUPPORTED.tex b/src/document-templates/root-eptcs-UNSUPPORTED.tex similarity index 100% rename from document-generator/document-templates/root-eptcs-UNSUPPORTED.tex rename to src/document-templates/root-eptcs-UNSUPPORTED.tex diff --git a/document-generator/document-templates/root-lipics-v2019-UNSUPPORTED.tex b/src/document-templates/root-lipics-v2019-UNSUPPORTED.tex similarity index 100% rename from document-generator/document-templates/root-lipics-v2019-UNSUPPORTED.tex rename to src/document-templates/root-lipics-v2019-UNSUPPORTED.tex diff --git a/document-generator/document-templates/root-lncs.tex b/src/document-templates/root-lncs.tex similarity index 100% rename from document-generator/document-templates/root-lncs.tex rename to src/document-templates/root-lncs.tex diff --git a/document-generator/document-templates/root-scrartcl.tex b/src/document-templates/root-scrartcl.tex similarity index 100% rename from document-generator/document-templates/root-scrartcl.tex rename to src/document-templates/root-scrartcl.tex diff --git a/document-generator/document-templates/root-scrreprt.tex b/src/document-templates/root-scrreprt.tex similarity index 100% rename from document-generator/document-templates/root-scrreprt.tex rename to src/document-templates/root-scrreprt.tex diff --git a/ontologies/CENELEC_50128.thy b/src/ontologies/CENELEC_50128/CENELEC_50128.thy similarity index 99% rename from ontologies/CENELEC_50128.thy rename to src/ontologies/CENELEC_50128/CENELEC_50128.thy index 80cc32c..46cbfe8 100644 --- a/ontologies/CENELEC_50128.thy +++ b/src/ontologies/CENELEC_50128/CENELEC_50128.thy @@ -12,7 +12,7 @@ identifies: (*<<*) theory CENELEC_50128 - imports "../Isa_COL" + imports "../../DOF/Isa_COL" begin (*>>*) @@ -726,4 +726,4 @@ Proof_Context.init_global; \<close> end - \ No newline at end of file + diff --git a/document-generator/latex/DOF-CENELEC_50128.sty b/src/ontologies/CENELEC_50128/DOF-CENELEC_50128.sty similarity index 100% rename from document-generator/latex/DOF-CENELEC_50128.sty rename to src/ontologies/CENELEC_50128/DOF-CENELEC_50128.sty diff --git a/ontologies/Conceptual.thy b/src/ontologies/Conceptual/Conceptual.thy similarity index 98% rename from ontologies/Conceptual.thy rename to src/ontologies/Conceptual/Conceptual.thy index 4b7d21c..4fcbc64 100644 --- a/ontologies/Conceptual.thy +++ b/src/ontologies/Conceptual/Conceptual.thy @@ -1,5 +1,5 @@ theory Conceptual - imports "../Isa_DOF" "../Isa_COL" + imports "../../DOF/Isa_DOF" "../../DOF/Isa_COL" begin doc_class A = diff --git a/ontologies/math_paper.thy b/src/ontologies/math_paper/math_paper.thy similarity index 99% rename from ontologies/math_paper.thy rename to src/ontologies/math_paper/math_paper.thy index 649ba5b..22a2bce 100644 --- a/ontologies/math_paper.thy +++ b/src/ontologies/math_paper/math_paper.thy @@ -12,7 +12,7 @@ proving environment after all ! So this ontology provides: theory math_paper - imports "../Isa_DOF" + imports "../../DOF/Isa_DOF" begin diff --git a/document-generator/latex/DOF-mathex.sty b/src/ontologies/mathex/DOF-mathex.sty similarity index 100% rename from document-generator/latex/DOF-mathex.sty rename to src/ontologies/mathex/DOF-mathex.sty diff --git a/ontologies/mathex_onto.thy b/src/ontologies/mathex/mathex.thy similarity index 98% rename from ontologies/mathex_onto.thy rename to src/ontologies/mathex/mathex.thy index 5e71341..fc938dd 100644 --- a/ontologies/mathex_onto.thy +++ b/src/ontologies/mathex/mathex.thy @@ -1,5 +1,5 @@ -theory mathex_onto - imports "../Isa_COL" +theory mathex + imports "../../DOF/Isa_COL" begin (*<<*) @@ -98,4 +98,4 @@ doc_class MathExam = accepts "\<lbrace>Author\<rbrace>\<^sup>+ ~~ Header ~~ \<lbrace>Exercise ~~ Solution\<rbrace>\<^sup>+ " (*>>*) -end \ No newline at end of file +end diff --git a/src/ontologies/ontologies.thy b/src/ontologies/ontologies.thy new file mode 100644 index 0000000..aede5e8 --- /dev/null +++ b/src/ontologies/ontologies.thy @@ -0,0 +1,12 @@ +theory + ontologies +imports + "CENELEC_50128/CENELEC_50128" + "Conceptual/Conceptual" + "mathex/mathex" + "math_paper/math_paper" + "scholarly_paper/scholarly_paper" + "small_math/small_math" + "technical_report/technical_report" +begin +end diff --git a/document-generator/latex/DOF-scholarly_paper.sty b/src/ontologies/scholarly_paper/DOF-scholarly_paper.sty similarity index 100% rename from document-generator/latex/DOF-scholarly_paper.sty rename to src/ontologies/scholarly_paper/DOF-scholarly_paper.sty diff --git a/ontologies/scholarly_paper.thy b/src/ontologies/scholarly_paper/scholarly_paper.thy similarity index 99% rename from ontologies/scholarly_paper.thy rename to src/ontologies/scholarly_paper/scholarly_paper.thy index a46dae1..7c46d17 100644 --- a/ontologies/scholarly_paper.thy +++ b/src/ontologies/scholarly_paper/scholarly_paper.thy @@ -1,7 +1,7 @@ section\<open>An example ontology for a scholarly paper\<close> theory scholarly_paper - imports "../Isa_COL" + imports "../../DOF/Isa_COL" begin doc_class title = diff --git a/ontologies/small_math.thy b/src/ontologies/small_math/small_math.thy similarity index 99% rename from ontologies/small_math.thy rename to src/ontologies/small_math/small_math.thy index a0aefe5..b2759d8 100644 --- a/ontologies/small_math.thy +++ b/src/ontologies/small_math/small_math.thy @@ -1,7 +1,7 @@ section\<open>An example ontology for a math paper\<close> theory small_math - imports "../Isa_COL" + imports "../../DOF/Isa_COL" begin doc_class title = diff --git a/document-generator/latex/DOF-technical_report.sty b/src/ontologies/technical_report/DOF-technical_report.sty similarity index 100% rename from document-generator/latex/DOF-technical_report.sty rename to src/ontologies/technical_report/DOF-technical_report.sty diff --git a/ontologies/technical_report.thy b/src/ontologies/technical_report/technical_report.thy similarity index 95% rename from ontologies/technical_report.thy rename to src/ontologies/technical_report/technical_report.thy index 48b9929..5204553 100644 --- a/ontologies/technical_report.thy +++ b/src/ontologies/technical_report/technical_report.thy @@ -1,7 +1,7 @@ section\<open>An example ontology for a scholarly paper\<close> theory technical_report - imports "scholarly_paper" + imports "../scholarly_paper/scholarly_paper" begin (* for reports paper: invariant: level \<ge> -1 *) diff --git a/patches/thy_info.ML b/src/patches/thy_info.ML similarity index 100% rename from patches/thy_info.ML rename to src/patches/thy_info.ML diff --git a/patches/thy_info.orig.ML b/src/patches/thy_info.orig.ML similarity index 100% rename from patches/thy_info.orig.ML rename to src/patches/thy_info.orig.ML diff --git a/patches/thy_output.ML b/src/patches/thy_output.ML similarity index 100% rename from patches/thy_output.ML rename to src/patches/thy_output.ML diff --git a/patches/thy_output.orig19.ML b/src/patches/thy_output.orig19.ML similarity index 100% rename from patches/thy_output.orig19.ML rename to src/patches/thy_output.orig19.ML diff --git a/document-generator/scripts/build b/src/scripts/build similarity index 100% rename from document-generator/scripts/build rename to src/scripts/build diff --git a/document-generator/scripts/build_lib.sh b/src/scripts/build_lib.sh similarity index 100% rename from document-generator/scripts/build_lib.sh rename to src/scripts/build_lib.sh diff --git a/tests/AssnsLemmaThmEtc.thy b/src/tests/AssnsLemmaThmEtc.thy similarity index 95% rename from tests/AssnsLemmaThmEtc.thy rename to src/tests/AssnsLemmaThmEtc.thy index 4cc0e85..ef640b1 100644 --- a/tests/AssnsLemmaThmEtc.thy +++ b/src/tests/AssnsLemmaThmEtc.thy @@ -1,8 +1,8 @@ theory AssnsLemmaThmEtc imports - "../ontologies/Conceptual" - "../ontologies/math_paper" + "../ontologies/Conceptual/Conceptual" + "../ontologies/math_paper/math_paper" begin section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close> diff --git a/tests/Attributes.thy b/src/tests/Attributes.thy similarity index 99% rename from tests/Attributes.thy rename to src/tests/Attributes.thy index de62802..b32fefe 100644 --- a/tests/Attributes.thy +++ b/src/tests/Attributes.thy @@ -1,7 +1,7 @@ theory Attributes imports - "../ontologies/Conceptual" + "../ontologies/Conceptual/Conceptual" begin section\<open>Elementary Creation of Doc-items and Access of their Attibutes\<close> diff --git a/tests/Concept_Example.thy b/src/tests/Concept_Example.thy similarity index 96% rename from tests/Concept_Example.thy rename to src/tests/Concept_Example.thy index 563bcce..332270a 100644 --- a/tests/Concept_Example.thy +++ b/src/tests/Concept_Example.thy @@ -3,7 +3,7 @@ chapter\<open>Setting and modifying attributes of doc-items\<close> theory Concept_Example imports - "../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) + "../ontologies/Conceptual/Conceptual" (* we use the generic "Conceptual" ontology *) begin text\<open>@{theory "Isabelle_DOF-tests.Conceptual"} provides a monitor @{typ M} enforcing a particular document structure. diff --git a/tests/Concept_ExampleInvariant.thy b/src/tests/Concept_ExampleInvariant.thy similarity index 98% rename from tests/Concept_ExampleInvariant.thy rename to src/tests/Concept_ExampleInvariant.thy index 8987834..1046a7e 100644 --- a/tests/Concept_ExampleInvariant.thy +++ b/src/tests/Concept_ExampleInvariant.thy @@ -3,7 +3,7 @@ chapter\<open>Setting and modifying attributes of doc-items\<close> theory Concept_ExampleInvariant imports - "../ontologies/Conceptual" (* we use the generic "Conceptual" ontology *) + "../ontologies/Conceptual/Conceptual" (* we use the generic "Conceptual" ontology *) begin section\<open>Example: Standard Class Invariant\<close> diff --git a/tests/InnerSyntaxAntiquotations.thy b/src/tests/InnerSyntaxAntiquotations.thy similarity index 98% rename from tests/InnerSyntaxAntiquotations.thy rename to src/tests/InnerSyntaxAntiquotations.thy index 7ee6c38..0cd5fef 100644 --- a/tests/InnerSyntaxAntiquotations.thy +++ b/src/tests/InnerSyntaxAntiquotations.thy @@ -3,7 +3,7 @@ chapter\<open>Inner Syntax Antiquotations (ISA)'s\<close> theory InnerSyntaxAntiquotations imports - "../ontologies/Conceptual" + "../ontologies/Conceptual/Conceptual" begin text\<open>Since the syntax chosen for values of doc-class attributes is HOL-syntax --- requiring diff --git a/tests/figures/A.png b/src/tests/figures/A.png similarity index 100% rename from tests/figures/A.png rename to src/tests/figures/A.png diff --git a/tests/figures/AnB.odp b/src/tests/figures/AnB.odp similarity index 100% rename from tests/figures/AnB.odp rename to src/tests/figures/AnB.odp diff --git a/tests/figures/B.png b/src/tests/figures/B.png similarity index 100% rename from tests/figures/B.png rename to src/tests/figures/B.png From 6fd22a071f03aebea1819d1465da0426a0edb2f2 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 21:51:55 +0100 Subject: [PATCH 24/34] Resolved naming inconsistency (mathex_onto vs. math_exam). --- examples/math_exam/MathExam/MathExam.thy | 2 +- examples/math_exam/MathExam/document/isadof.cfg | 2 +- examples/scholarly_paper/2018_cicm/IsaDofApplications.thy | 2 +- examples/technical_report/IsaDof_Manual/03_GuidedTour.thy | 2 +- .../{mathex/DOF-mathex.sty => math_exam/DOF-math_exam.sty} | 2 +- src/ontologies/{mathex/mathex.thy => math_exam/math_exam.thy} | 2 +- src/ontologies/ontologies.thy | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) rename src/ontologies/{mathex/DOF-mathex.sty => math_exam/DOF-math_exam.sty} (98%) rename src/ontologies/{mathex/mathex.thy => math_exam/math_exam.thy} (99%) diff --git a/examples/math_exam/MathExam/MathExam.thy b/examples/math_exam/MathExam/MathExam.thy index 9772699..0699adf 100644 --- a/examples/math_exam/MathExam/MathExam.thy +++ b/examples/math_exam/MathExam/MathExam.thy @@ -1,6 +1,6 @@ (*<*) theory MathExam - imports "Isabelle_DOF.mathex" + imports "Isabelle_DOF.math_exam" HOL.Real begin (*>*) diff --git a/examples/math_exam/MathExam/document/isadof.cfg b/examples/math_exam/MathExam/document/isadof.cfg index f13f22b..3516c38 100644 --- a/examples/math_exam/MathExam/document/isadof.cfg +++ b/examples/math_exam/MathExam/document/isadof.cfg @@ -1,2 +1,2 @@ Template: scrartcl -Ontology: mathex +Ontology: math_exam diff --git a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy index e03b56a..a9d7f65 100644 --- a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy +++ b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy @@ -389,7 +389,7 @@ text\<open> The document class \inlineisar+figure+ --- supported by the \isadof such as @{docitem_ref \<open>fig_figures\<close>}. \<close> -subsection*[mathex::example]\<open> The Math-Exam Scenario \<close> +subsection*[math_exam::example]\<open> The Math-Exam Scenario \<close> text\<open> The Math-Exam Scenario is an application with mixed formal and semi-formal content. It addresses applications where the author of the exam is not present during the exam and the preparation requires a very rigorous process, as the french diff --git a/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy b/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy index e5587aa..8c98821 100644 --- a/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy +++ b/examples/technical_report/IsaDof_Manual/03_GuidedTour.thy @@ -251,7 +251,7 @@ doc_class srac = ec + \end{isar} \<close> -section*[mathex::example]\<open> The Math-Exam Scenario \<close> +section*[math_exam::example]\<open> The Math-Exam Scenario \<close> text\<open> The Math-Exam Scenario is an application with mixed formal and semi-formal content. It addresses applications where the author of the exam is not present during the exam and the preparation requires a very rigorous process, as the french diff --git a/src/ontologies/mathex/DOF-mathex.sty b/src/ontologies/math_exam/DOF-math_exam.sty similarity index 98% rename from src/ontologies/mathex/DOF-mathex.sty rename to src/ontologies/math_exam/DOF-math_exam.sty index 48b0b85..1212629 100644 --- a/src/ontologies/mathex/DOF-mathex.sty +++ b/src/ontologies/math_exam/DOF-math_exam.sty @@ -12,7 +12,7 @@ %% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause \NeedsTeXFormat{LaTeX2e}\relax -\ProvidesPackage{DOF-mathex} +\ProvidesPackage{DOF-math_exam} [0000/00/00 Unreleased v0.0.0+% Document-Type Support Framework for math classes.] diff --git a/src/ontologies/mathex/mathex.thy b/src/ontologies/math_exam/math_exam.thy similarity index 99% rename from src/ontologies/mathex/mathex.thy rename to src/ontologies/math_exam/math_exam.thy index fc938dd..be32c2a 100644 --- a/src/ontologies/mathex/mathex.thy +++ b/src/ontologies/math_exam/math_exam.thy @@ -1,4 +1,4 @@ -theory mathex +theory math_exam imports "../../DOF/Isa_COL" begin diff --git a/src/ontologies/ontologies.thy b/src/ontologies/ontologies.thy index aede5e8..7cc6ee4 100644 --- a/src/ontologies/ontologies.thy +++ b/src/ontologies/ontologies.thy @@ -3,7 +3,7 @@ theory imports "CENELEC_50128/CENELEC_50128" "Conceptual/Conceptual" - "mathex/mathex" + "math_exam/math_exam" "math_paper/math_paper" "scholarly_paper/scholarly_paper" "small_math/small_math" From 8148b7fc381880c8a49eb4dbd9a6882f90787195 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 22:00:40 +0100 Subject: [PATCH 25/34] Improved help message. --- install | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install b/install index 63aa5a9..321b999 100755 --- a/install +++ b/install @@ -47,8 +47,8 @@ print_help() echo " --isabelle, -i isabelle isabelle command used for installation" echo " (default: $ISABELLE)" echo " --skip-patch-and-afp, -s skip installation of Isabelle/DOF patch for" - echo " Isabelle and required AFP entries. USE AT" - echo " YOUR OWN RISK (default: $SKIP)" + echo " Isabelle and required AFP entries. " + echo " USE AT YOUR OWN RISK (default: $SKIP)" } From 869f1615cdaa29b47a84f7f4555feb8f1e8e9c6c Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 22:30:31 +0100 Subject: [PATCH 26/34] Remove old output directories. --- .ci/Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/Jenkinsfile b/.ci/Jenkinsfile index c96dbcf..a0162e3 100644 --- a/.ci/Jenkinsfile +++ b/.ci/Jenkinsfile @@ -19,6 +19,7 @@ pipeline { } stage('Build Isabelle/DOF') { steps { + sh 'find -type d -name "output" -exec rm -rf {} \\; || true' sh 'docker run -v $PWD:/DOF logicalhacking:isabelle4dof sh -c "cd /DOF && ./install && isabelle build -D ."' } } From 92f8fa5c258da1f3cd71704055a2c314b4a4e24d Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sat, 20 Jul 2019 22:39:06 +0100 Subject: [PATCH 27/34] Fixed naming inconsistency (cenelec vs. CENELEC_50128). --- examples/{cenelec => CENELEC_50128}/ROOTS | 0 examples/{cenelec => CENELEC_50128}/mini_odo/ROOT | 0 examples/{cenelec => CENELEC_50128}/mini_odo/document/build | 0 .../{cenelec => CENELEC_50128}/mini_odo/document/isadof.cfg | 0 .../{cenelec => CENELEC_50128}/mini_odo/document/preamble.tex | 0 examples/{cenelec => CENELEC_50128}/mini_odo/mini_odo.thy | 0 examples/ROOTS | 2 +- 7 files changed, 1 insertion(+), 1 deletion(-) rename examples/{cenelec => CENELEC_50128}/ROOTS (100%) rename examples/{cenelec => CENELEC_50128}/mini_odo/ROOT (100%) rename examples/{cenelec => CENELEC_50128}/mini_odo/document/build (100%) rename examples/{cenelec => CENELEC_50128}/mini_odo/document/isadof.cfg (100%) rename examples/{cenelec => CENELEC_50128}/mini_odo/document/preamble.tex (100%) rename examples/{cenelec => CENELEC_50128}/mini_odo/mini_odo.thy (100%) diff --git a/examples/cenelec/ROOTS b/examples/CENELEC_50128/ROOTS similarity index 100% rename from examples/cenelec/ROOTS rename to examples/CENELEC_50128/ROOTS diff --git a/examples/cenelec/mini_odo/ROOT b/examples/CENELEC_50128/mini_odo/ROOT similarity index 100% rename from examples/cenelec/mini_odo/ROOT rename to examples/CENELEC_50128/mini_odo/ROOT diff --git a/examples/cenelec/mini_odo/document/build b/examples/CENELEC_50128/mini_odo/document/build similarity index 100% rename from examples/cenelec/mini_odo/document/build rename to examples/CENELEC_50128/mini_odo/document/build diff --git a/examples/cenelec/mini_odo/document/isadof.cfg b/examples/CENELEC_50128/mini_odo/document/isadof.cfg similarity index 100% rename from examples/cenelec/mini_odo/document/isadof.cfg rename to examples/CENELEC_50128/mini_odo/document/isadof.cfg diff --git a/examples/cenelec/mini_odo/document/preamble.tex b/examples/CENELEC_50128/mini_odo/document/preamble.tex similarity index 100% rename from examples/cenelec/mini_odo/document/preamble.tex rename to examples/CENELEC_50128/mini_odo/document/preamble.tex diff --git a/examples/cenelec/mini_odo/mini_odo.thy b/examples/CENELEC_50128/mini_odo/mini_odo.thy similarity index 100% rename from examples/cenelec/mini_odo/mini_odo.thy rename to examples/CENELEC_50128/mini_odo/mini_odo.thy diff --git a/examples/ROOTS b/examples/ROOTS index 22b288d..1adbab0 100644 --- a/examples/ROOTS +++ b/examples/ROOTS @@ -1,4 +1,4 @@ scholarly_paper technical_report math_exam -cenelec +CENELEC_50128 From ee574cdf99085d3c21abe77f9be861087cf84a02 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 09:43:36 +0100 Subject: [PATCH 28/34] Removed non-supported sty-file generation. --- src/ontologies/scholarly_paper/scholarly_paper.thy | 4 ---- src/ontologies/small_math/small_math.thy | 5 ----- 2 files changed, 9 deletions(-) diff --git a/src/ontologies/scholarly_paper/scholarly_paper.thy b/src/ontologies/scholarly_paper/scholarly_paper.thy index 7c46d17..e714c72 100644 --- a/src/ontologies/scholarly_paper/scholarly_paper.thy +++ b/src/ontologies/scholarly_paper/scholarly_paper.thy @@ -185,9 +185,5 @@ fun check_group a = map (check_group_elem (check_level_hd (hd a))) (tl a) ; *) \<close> - -gen_sty_template - - end diff --git a/src/ontologies/small_math/small_math.thy b/src/ontologies/small_math/small_math.thy index b2759d8..e2079bc 100644 --- a/src/ontologies/small_math/small_math.thy +++ b/src/ontologies/small_math/small_math.thy @@ -156,10 +156,5 @@ setup\<open> let val cidS = ["small_math.introduction","small_math.technical", " true) in DOF_core.update_class_invariant "small_math.article" body end\<close> - - -gen_sty_template - - end From 0c158450b631656ef2ed901dc97547334f9b7858 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 09:57:51 +0100 Subject: [PATCH 29/34] Updated copyright information. --- src/scripts/build | 5 +++-- src/scripts/build_lib.sh | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/scripts/build b/src/scripts/build index da171c0..7c5ed28 100755 --- a/src/scripts/build +++ b/src/scripts/build @@ -1,6 +1,7 @@ #!/usr/bin/env bash -# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved. -# 2018 The University of Paris-Sud. All rights reserved. +# Copyright (c) 2019 University of Exeter +# 2018-2019 University of Paris-Sud +# 2018-2019 The University of Sheffield # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions diff --git a/src/scripts/build_lib.sh b/src/scripts/build_lib.sh index 9f54e23..828935b 100755 --- a/src/scripts/build_lib.sh +++ b/src/scripts/build_lib.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash -# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved. -# 2018 The University of Paris-Sud. All rights reserved. +# Copyright (c) 2019 University of Exeter +# 2018-2019 University of Paris-Sud +# 2018-2019 The University of Sheffield # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions From df3e3bd3c350fe48b10c550e2a5f387592df37d2 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 09:58:41 +0100 Subject: [PATCH 30/34] Updated copyright information. --- src/Tools/DOF_mkroot | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Tools/DOF_mkroot b/src/Tools/DOF_mkroot index d2d6db0..d92f220 100755 --- a/src/Tools/DOF_mkroot +++ b/src/Tools/DOF_mkroot @@ -1,6 +1,7 @@ #/usr/bin/env bash -# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved. -# 2018 The University of Paris-Sud. All rights reserved. +# Copyright (c) 2019 University of Exeter +# 2018-2019 University of Paris-Sud +# 2018-2019 The University of Sheffield # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions From 5e59cf737bbc6fdaad8565370f75b72f7b6a8fe3 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 10:13:51 +0100 Subject: [PATCH 31/34] Updated descriptions of templates. --- .../root-eptcs-UNSUPPORTED.tex | 12 ++++++++---- .../root-lipics-v2019-UNSUPPORTED.tex | 16 +++++++++++----- src/document-templates/root-lncs.tex | 16 +++++++++++----- src/document-templates/root-scrartcl.tex | 13 +++++++------ src/document-templates/root-scrreprt.tex | 13 +++++++------ 5 files changed, 44 insertions(+), 26 deletions(-) diff --git a/src/document-templates/root-eptcs-UNSUPPORTED.tex b/src/document-templates/root-eptcs-UNSUPPORTED.tex index bcc5047..bd8ef96 100644 --- a/src/document-templates/root-eptcs-UNSUPPORTED.tex +++ b/src/document-templates/root-eptcs-UNSUPPORTED.tex @@ -1,6 +1,6 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud -%% 2019 The University of Exeter +%% Copyright (c) 2019 University of Exeter +%% 2018-2019 University of Paris-Sud +%% 2018-2019 The University of Sheffield %% %% License: %% This program can be redistributed and/or modified under the terms @@ -14,7 +14,11 @@ %% Warning: Do Not Edit! %% ===================== -%% This is the root file for the Isabelle/DOF using +%% This is the root file for the Isabelle/DOF using the eptcs class. +%% Note that eptcs cannot be distributed as part of Isabelle/DOF; you need +%% to download eptcs.cls from http://eptcs.web.cse.unsw.edu.au/style.shtml +%% and add it manually to the praemble.tex and the ROOT file. +%% %% All customization and/or additional packages should be added to the file %% preamble.tex. diff --git a/src/document-templates/root-lipics-v2019-UNSUPPORTED.tex b/src/document-templates/root-lipics-v2019-UNSUPPORTED.tex index c347f52..a0bfc86 100644 --- a/src/document-templates/root-lipics-v2019-UNSUPPORTED.tex +++ b/src/document-templates/root-lipics-v2019-UNSUPPORTED.tex @@ -1,5 +1,6 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud +%% Copyright (c) 2019 University of Exeter +%% 2018-2019 University of Paris-Sud +%% 2018-2019 The University of Sheffield %% %% License: %% This program can be redistributed and/or modified under the terms @@ -13,9 +14,14 @@ %% Warning: Do Not Edit! %% ===================== -%% This is the root file for the Isabelle/DOF using Springer's llncs.cls. -%% All customization and/or additional packages should be added to the file -%% preamble.tex. +%% This is the root file for the Isabelle/DOF using the lipics class. +%% Note that lipics cannot be distributed as part of Isabelle/DOF; you need +%% to download lipics.cls from +%% https://www.dagstuhl.de/en/publications/lipics/instructions-for-authors/ +%% and add it manually to the praemble.tex and the ROOT file. +%% +%% All customization and/or additional packages should be added to the file +%% preamble.tex. \documentclass[a4paper,USenglish,cleveref, autoref]{lipics-v2019} \bibliographystyle{plainurl}% the mandatory bibstyle diff --git a/src/document-templates/root-lncs.tex b/src/document-templates/root-lncs.tex index 4497cf5..f2aec9e 100644 --- a/src/document-templates/root-lncs.tex +++ b/src/document-templates/root-lncs.tex @@ -1,5 +1,6 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud +%% Copyright (c) 2019 University of Exeter +%% 2018-2019 University of Paris-Sud +%% 2018-2019 The University of Sheffield %% %% License: %% This program can be redistributed and/or modified under the terms @@ -13,9 +14,14 @@ %% Warning: Do Not Edit! %% ===================== -%% This is the root file for the Isabelle/DOF using Springer's llncs.cls. -%% All customization and/or additional packages should be added to the file -%% preamble.tex. +%% This is the root file for the Isabelle/DOF using the lncs class. +%% Note that lncs cannot be distributed as part of Isabelle/DOF; you need +%% to download lncs.cls from +%% https://www.springer.com/gp/computer-science/lncs/conference-proceedings-guidelines +%% and add it manually to the praemble.tex and the ROOT file. +%% +%% All customization and/or additional packages should be added to the file +%% preamble.tex. \RequirePackage{ifvtex} \documentclass{llncs} diff --git a/src/document-templates/root-scrartcl.tex b/src/document-templates/root-scrartcl.tex index d9a2ba9..5d3b793 100644 --- a/src/document-templates/root-scrartcl.tex +++ b/src/document-templates/root-scrartcl.tex @@ -1,5 +1,6 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud +%% Copyright (c) 2019 University of Exeter +%% 2018-2019 University of Paris-Sud +%% 2018-2019 The University of Sheffield %% %% License: %% This program can be redistributed and/or modified under the terms @@ -13,10 +14,10 @@ %% Warning: Do Not Edit! %% ===================== -%% This is the root file for the Isabelle/DOF. -%% All customization and/or additional packages should be added to the file -%% preamble.tex. - +%% This is the root file for the Isabelle/DOF using the scrartcl class. +%% +%% All customization and/or additional packages should be added to the file +%% preamble.tex. \RequirePackage{ifvtex} \documentclass[fontsize=11pt,DIV=12,paper=a4]{scrartcl} diff --git a/src/document-templates/root-scrreprt.tex b/src/document-templates/root-scrreprt.tex index 9332286..2c0b098 100644 --- a/src/document-templates/root-scrreprt.tex +++ b/src/document-templates/root-scrreprt.tex @@ -1,5 +1,6 @@ -%% Copyright (C) 2018 The University of Sheffield -%% 2018 The University of Paris-Sud +%% Copyright (c) 2019 University of Exeter +%% 2018-2019 University of Paris-Sud +%% 2018-2019 The University of Sheffield %% %% License: %% This program can be redistributed and/or modified under the terms @@ -13,10 +14,10 @@ %% Warning: Do Not Edit! %% ===================== -%% This is the root file for the Isabelle/DOF. -%% All customization and/or additional packages should be added to the file -%% preamble.tex. - +%% This is the root file for the Isabelle/DOF using the scrreprt class. +%% +%% All customization and/or additional packages should be added to the file +%% preamble.tex. \RequirePackage{ifvtex} \documentclass[fontsize=10pt,DIV=12,paper=a4,open=right,twoside,abstract=true]{scrreprt} From d1f55e7f30f30422a839aa52880bc52ba90b0638 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 10:21:37 +0100 Subject: [PATCH 32/34] Do not include checking instructions in PDF. --- examples/scholarly_paper/2018_cicm/IsaDofApplications.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy index a9d7f65..6241c62 100644 --- a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy +++ b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy @@ -4,9 +4,9 @@ theory IsaDofApplications begin open_monitor*[this::article] +declare[[strict_monitor_checking=false]] (*>*) -declare[[strict_monitor_checking=false]] title*[tit::title]\<open>Using the Isabelle Ontology Framework\<close> subtitle*[stit::subtitle]\<open>Linking the Formal with the Informal\<close> text*[adb:: author, From eef387198adc22c8f2bd5892558ee321ef56e3ab Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 10:22:06 +0100 Subject: [PATCH 33/34] Updated URL of Isabelle/DOF repository. --- examples/scholarly_paper/2018_cicm/IsaDofApplications.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy index 6241c62..e64e4c4 100644 --- a/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy +++ b/examples/scholarly_paper/2018_cicm/IsaDofApplications.thy @@ -729,7 +729,7 @@ text\<open> \isadof in its present form has a number of technical short-comings paragraph\<open> Availability. \<close> text\<open> The implementation of the framework, the discussed ontology definitions, and examples are available at - \url{https://git.logicalhacking.com/HOL-OCL/Isabelle_DOF/}.\<close> + \url{https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/}.\<close> paragraph\<open> Acknowledgement. \<close> text\<open> This work was partly supported by the framework of IRT SystemX, Paris-Saclay, France, and therefore granted with public funds within the scope of the Program ``Investissements d’Avenir''.\<close> From 19c8963abdda3abecde409d98d1caf58eb932d4c Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" <adbrucker@0x5f.org> Date: Sun, 21 Jul 2019 10:41:06 +0100 Subject: [PATCH 34/34] Updated lstisadof.sty. --- .../IsaDof_Manual/document/lstisadof.sty | 65 ++++++++++++++----- 1 file changed, 49 insertions(+), 16 deletions(-) diff --git a/examples/technical_report/IsaDof_Manual/document/lstisadof.sty b/examples/technical_report/IsaDof_Manual/document/lstisadof.sty index 49072ce..bdee962 100644 --- a/examples/technical_report/IsaDof_Manual/document/lstisadof.sty +++ b/examples/technical_report/IsaDof_Manual/document/lstisadof.sty @@ -60,33 +60,67 @@ \def\endlstdelim{\texttt{\textbf{\color{black!60}#2}}\egroup}% \ttfamily\textbf{\color{black!60}#1}\bgroup\rmfamily\color{#3}\aftergroup\endlstdelim% } -\newcommand{\subscr}[1]{\ensuremath{_{\mbox{#1}}}} -\newcommand{\supscr}[1]{\ensuremath{^{\mbox{#1}}}} +\newcommand{\subscr}[1]{\ensuremath{_{\text{#1}}}} +\newcommand{\supscr}[1]{\ensuremath{^{\text{#1}}}} \lstdefinestyle{ISAR}{% language=% ,basicstyle=\ttfamily% ,showspaces=false% ,showlines=false% ,columns=flexible% + ,keepspaces + ,mathescape=false, ,morecomment=[s]{(*}{*)}% % ,moredelim=*[s][\rmfamily]{\{*}{*\}}% ,moredelim = **[is][\beginlstdelim{\{*}{*\}}{black}]{\{*}{*\}} ,showstringspaces=false% - ,moredelim=*[is][\supscr]{\\<^bsup>}{\\<^esup>}% + ,moredelim=*[is][\supscr]{<bsup>}{<esup>}% + ,moredelim=*[is][\subscr]{<bsub>}{<esub>}% ,literate={% {...}{\,\ldots\,}3% - {\\<Open>}{\ensuremath{\isacartoucheopen}}1% - {\\at}{@}1% - {\\<Close>}{\ensuremath{\isacartoucheclose}}1% + {<Open>}{\ensuremath{\isacartoucheopen}}1% + {<open>}{\ensuremath{\isacartoucheopen}}1% + {<@>}{@}1% + {"}{}0% + {~}{\ }1% + {::}{:\!:}1% + {<Close>}{\ensuremath{\isacartoucheclose}}1% + {<close>}{\ensuremath{\isacartoucheclose}}1% {\\<Gamma>}{\ensuremath{\Gamma}}1% {\\<times>}{\ensuremath{\times}}1% - {\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1% - {\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1% - {\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1% - {\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1% + {\\<equiv>}{\ensuremath{\equiv}}1% {\\<Rightarrow>}{\ensuremath{\Rightarrow}}1% + {\\<rightarrow>}{\ensuremath{\rightarrow}}1% + {\\<longrightarrow>}{\ensuremath{\rightarrow}}1% + {\\<and>}{\ensuremath{\land}}1% + {\\<or>}{\ensuremath{\lor}}1% + {\\<lfloor>}{\ensuremath{\lfloor}}1% + {\\<rfloor>}{\ensuremath{\rfloor}}1% + %{\\<lparr>}{\ensuremath{\lparr}}1% + %{\\<rparr>}{\ensuremath{\rparr}}1% + {\\<le>}{\ensuremath{\le}}1% + {\\<delta>}{\ensuremath{\delta}}1% + {\\<lambda>}{\ensuremath{\lambda}}1% + {\\<bar>}{\ensuremath{\vert}}1% + {\<sigma>}{\ensuremath{\sigma}}1% + {\\<lparr>}{\ensuremath{\isasymlparr}}1% + {\\<rparr>}{\ensuremath{\isasymrparr}}1% + {\\<leftrightarrow>}{\ensuremath{\leftrightarrow}}1% {\{*}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1% {*\}}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1% + {\\<open>}{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}1% + {\\<close>}{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}1% + {\\<forall>}{\ensuremath{\forall}}1% + {\\<exists>}{\ensuremath{\exists}}1% + {\\<in>}{\ensuremath{\in}}1% + {\\<delta>}{\ensuremath{\delta}}1% + {\\<real>}{\ensuremath{\mathbb{R}}}1% + {\\<noteq>}{\ensuremath{\neq}}1% + {\\<Forall>}{\ensuremath{\bigwedge\,}}1% + {\\<lbrakk>}{\ensuremath{\mathopen{\lbrack\mkern-3mu\lbrack}}}1% + {\\<lbrace>}{\ensuremath{\mathopen{\lbrace\mkern-4.5mu\mid}}}1% + {\\<rbrakk>}{\ensuremath{\mathclose{\rbrack\mkern-3mu\rbrack}}}1% + {\\<rbrace>}{\ensuremath{\mathclose{\mid\mkern-4.5mu\rbrace}}}1% }% % % Defining "tags" (text-antiquotations) based on 1-keywords ,tag=**[s]{@\{}{\}}% @@ -97,18 +131,18 @@ % Defining 2-keywords ,keywordstyle=[2]{\color{Blue!60}\bfseries}% ,alsoletter={*,-} - ,morekeywords=[2]{theory, begin, end, ML,section,subsection,paragraph,chapter,text}% + ,morekeywords=[2]{case, then, show, theory, begin, end, ML,section,subsection,paragraph,chapter,text}% %,moredelim=[s][\textit]{<}{>} % Defining 3-keywords ,keywordstyle=[3]{\color{OliveGreen!60}\bfseries}% ,morekeywords=[3]{doc_class,declare_reference,update_instance*, - open_monitor*, close_monitor*, figure*, title*, subtitle*,declare_reference*,section*,text*}% + open_monitor*, close_monitor*, declare_reference*,section*,text*,title*,abstract*}% % Defining 4-keywords ,keywordstyle=[4]{\color{black!60}\bfseries}% - ,morekeywords=[4]{where, imports}% + ,morekeywords=[4]{where, imports, keywords}% % Defining 5-keywords ,keywordstyle=[5]{\color{BrickRed!70}\bfseries}% - ,morekeywords=[5]{datatype, typedecl, consts, theorem}% + ,morekeywords=[5]{datatype, by, fun, Definition*, definition, type_synonym, typedecl, consts, assumes, and, shows, proof, next, qed, lemma, theorem}% % Defining 6-keywords ,keywordstyle=[6]{\itshape}% ,morekeywords=[6]{meta-args, ref, expr, class_id}% @@ -117,8 +151,7 @@ %% \lstnewenvironment{isar}[1][]{\lstset{style=ISAR, backgroundcolor=\color{black!2}, - frame=lines, - mathescape=true, + frame=lines,mathescape, basicstyle=\footnotesize\ttfamily,#1}}{} %%% \def\inlineisar{\lstinline[style=ISAR,breaklines=true,mathescape,breakatwhitespace=true]}