From cb72f2f16e5257ca60ce9a018f21f851616a1364 Mon Sep 17 00:00:00 2001 From: bu Date: Fri, 17 May 2019 12:02:45 +0200 Subject: [PATCH 01/23] Diverse patches um den Crash des LaTeX generators zu verstehen. --- Isa_DOF.thy | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/Isa_DOF.thy b/Isa_DOF.thy index 2c1e115..a18fd31 100644 --- a/Isa_DOF.thy +++ b/Isa_DOF.thy @@ -13,7 +13,8 @@ text\ Offering \<^item> LaTeX support. \ text\ In this section, we develop on the basis of a management of references Isar-markups -that provide direct support in the PIDE framework. \ +that provide direct support in the PIDE framework. + \ theory Isa_DOF (* Isabelle Document Ontology Framework *) @@ -44,6 +45,8 @@ theory Isa_DOF (* Isabelle Document Ontology Framework *) begin +text\ @{file "$ISABELLE_HOME/src/Pure/ROOT.ML"} +\ section\Primitive Markup Generators\ ML\ @@ -1527,9 +1530,9 @@ end Currently of *all* commands, no distinction between text* and text command. This code depends on a MODIFIED Isabelle2017 version resulting fro; applying the files under Isabell_DOF/patches. - *) + *) val _ = Thy_Output.set_meta_args_parser - (fn thy => (Scan.optional (ODL_Command_Parser.attributes >> meta_args_2_string thy) "")) + (fn thy => (Scan.optional (ODL_Command_Parser.attributes >> meta_args_2_string thy) "")) end From 39196965210b5d5953283c91b49c081de1d9466b Mon Sep 17 00:00:00 2001 From: bu Date: Mon, 27 May 2019 11:34:47 +0200 Subject: [PATCH 02/23] eliminated bullocks. --- patches/thy_output.ML | 708 +++++++++++++++++------------------------- 1 file changed, 289 insertions(+), 419 deletions(-) diff --git a/patches/thy_output.ML b/patches/thy_output.ML index a503c19..acf55de 100644 --- a/patches/thy_output.ML +++ b/patches/thy_output.ML @@ -1,252 +1,181 @@ (* Title: Pure/Thy/thy_output.ML - Author: Markus Wenzel, TU Muenchen + Author: Makarius -Theory document output with antiquotations. +Theory document output. *) signature THY_OUTPUT = sig - val display: bool Config.T - val quotes: bool Config.T - val margin: int Config.T - val indent: int Config.T - val source: bool Config.T - val break: bool Config.T - val modes: string Config.T - val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context - val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory - val check_command: Proof.context -> xstring * Position.T -> string - val check_option: Proof.context -> xstring * Position.T -> string - val print_antiquotations: bool -> Proof.context -> unit - val antiquotation: binding -> 'a context_parser -> - ({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) -> - theory -> theory - val boolean: string -> bool - val integer: string -> int - val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string - val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string - val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T + val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list + val check_comments: Proof.context -> Symbol_Pos.T list -> unit + val output_token: Proof.context -> Token.T -> Latex.text list + val output_source: Proof.context -> string -> Latex.text list + type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state} + val present_thy: Options.T -> theory -> segment list -> Latex.text list val set_meta_args_parser : (theory -> string parser) -> unit - val pretty_text: Proof.context -> string -> Pretty.T val pretty_term: Proof.context -> term -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T - val str_of_source: Token.src -> string - val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context -> - Token.src -> 'a list -> Pretty.T list - val string_of_margin: Proof.context -> Pretty.T -> string - val output: Proof.context -> Pretty.T list -> string - val verbatim_text: Proof.context -> string -> string - val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source -> - Toplevel.transition -> Toplevel.transition + val lines: Latex.text list -> Latex.text list + val items: Latex.text list -> Latex.text list + val isabelle: Proof.context -> Latex.text list -> Latex.text + val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text + val typewriter: Proof.context -> string -> Latex.text + val verbatim: Proof.context -> string -> Latex.text + val source: Proof.context -> Token.src -> Latex.text + val pretty: Proof.context -> Pretty.T -> Latex.text + val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text + val pretty_items: Proof.context -> Pretty.T list -> Latex.text + val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text + val antiquotation_pretty: + binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory + val antiquotation_pretty_source: + binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory + val antiquotation_raw: + binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory + val antiquotation_verbatim: + binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory end; structure Thy_Output: THY_OUTPUT = struct -(** options **) +(* output document source *) -val display = Attrib.setup_option_bool ("thy_output_display", \<^here>); -val break = Attrib.setup_option_bool ("thy_output_break", \<^here>); -val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>); -val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>); -val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>); -val source = Attrib.setup_option_bool ("thy_output_source", \<^here>); -val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>); +val output_symbols = single o Latex.symbols_output; - -structure Wrappers = Proof_Data -( - type T = ((unit -> string) -> unit -> string) list; - fun init _ = []; -); - -fun add_wrapper wrapper = Wrappers.map (cons wrapper); - -val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f); - - - -(** maintain global antiquotations **) - -structure Antiquotations = Theory_Data -( - type T = - (Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table * - (string -> Proof.context -> Proof.context) Name_Space.table; - val empty : T = - (Name_Space.empty_table Markup.document_antiquotationN, - Name_Space.empty_table Markup.document_antiquotation_optionN); - val extend = I; - fun merge ((commands1, options1), (commands2, options2)) : T = - (Name_Space.merge_tables (commands1, commands2), - Name_Space.merge_tables (options1, options2)); -); - -val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; - -fun add_command name cmd thy = thy - |> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd)); - -fun add_option name opt thy = thy - |> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd)); - -fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt)); - -fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)); - -fun command src state ctxt = - let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src - in f src' state ctxt end; - -fun option ((xname, pos), s) ctxt = +fun output_comment ctxt (kind, syms) = + (case kind of + Comment.Comment => + Input.cartouche_content syms + |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false) + {markdown = false} + |> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}" + | Comment.Cancel => + Symbol_Pos.cartouche_content syms + |> output_symbols + |> Latex.enclose_body "%\n\\isamarkupcancel{" "}" + | Comment.Latex => + [Latex.symbols (Symbol_Pos.cartouche_content syms)]) +and output_comment_document ctxt (comment, syms) = + (case comment of + SOME kind => output_comment ctxt (kind, syms) + | NONE => [Latex.symbols syms]) +and output_document_text ctxt syms = + Comment.read_body syms |> maps (output_comment_document ctxt) +and output_document ctxt {markdown} source = let - val (_, opt) = - Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos); - in opt s ctxt end; - -fun print_antiquotations verbose ctxt = - let - val (commands, options) = get_antiquotations ctxt; - val command_names = map #1 (Name_Space.markup_table verbose ctxt commands); - val option_names = map #1 (Name_Space.markup_table verbose ctxt options); - in - [Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names), - Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)] - end |> Pretty.writeln_chunks; - -fun antiquotation name scan body = - add_command name - (fn src => fn state => fn ctxt => - let val (x, ctxt') = Token.syntax scan src ctxt - in body {source = src, state = state, context = ctxt'} x end); - - - -(** syntax of antiquotations **) - -(* option values *) - -fun boolean "" = true - | boolean "true" = true - | boolean "false" = false - | boolean s = error ("Bad boolean value: " ^ quote s); - -fun integer s = - let - fun int ss = - (case Library.read_int ss of (i, []) => i - | _ => error ("Bad integer value: " ^ quote s)); - in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end; - - -(* outer syntax *) - -local - -val property = - Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; - -val properties = - Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) []; - -in - -val antiq = - Parse.!!! - (Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof) - >> (fn ((name, props), args) => (props, name :: args)); - -end; - - -(* eval antiquote *) - -local - -fun eval_antiq state (opts, src) = - let - val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); - val print_ctxt = Context_Position.set_visible false preview_ctxt; - - fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); - val _ = cmd preview_ctxt; - val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN]; - in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end; - -in - -fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss - | eval_antiquote state (Antiquote.Control {name, body, ...}) = - eval_antiq state - ([], Token.make_src name (if null body then [] else [Token.read_cartouche body])) - | eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) = - let - val keywords = - (case try Toplevel.presentation_context_of state of - SOME ctxt => Thy_Header.get_keywords' ctxt - | NONE => - error ("Unknown context -- cannot expand document antiquotations" ^ - Position.here pos)); - in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end; - -end; - - -(* output text *) - -fun output_text state {markdown} source = - let - val is_reported = - (case try Toplevel.context_of state of - SOME ctxt => Context_Position.is_visible ctxt - | NONE => true); - val pos = Input.pos_of source; val syms = Input.source_explode source; - val _ = - if is_reported then - Position.report pos (Markup.language_document (Input.is_delimited source)) - else (); - - val output_antiquotes = map (eval_antiquote state) #> implode; + val output_antiquotes = + maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt); fun output_line line = - (if Markdown.line_is_item line then "\\item " else "") ^ + (if Markdown.line_is_item line then [Latex.string "\\item "] else []) @ output_antiquotes (Markdown.line_content line); - fun output_blocks blocks = space_implode "\n\n" (map output_block blocks) - and output_block (Markdown.Par lines) = cat_lines (map output_line lines) + fun output_block (Markdown.Par lines) = + Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines)) | output_block (Markdown.List {kind, body, ...}) = - Latex.environment (Markdown.print_kind kind) (output_blocks body); + Latex.environment_block (Markdown.print_kind kind) (output_blocks body) + and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks); in - if Toplevel.is_skipped_proof state then "" + if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then [] else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms then let - val ants = Antiquote.parse pos syms; + val ants = Antiquote.parse_comments pos syms; val reports = Antiquote.antiq_reports ants; val blocks = Markdown.read_antiquotes ants; - val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else (); + val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks); in output_blocks blocks end else let - val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms); + val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms); val reports = Antiquote.antiq_reports ants; - val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else (); + val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants); in output_antiquotes ants end end; +(* output tokens with formal comments *) + +local + +val output_symbols_antiq = + (fn Antiquote.Text syms => output_symbols syms + | Antiquote.Control {name = (name, _), body, ...} => + Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) :: + output_symbols body + | Antiquote.Antiq {body, ...} => + Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body)); + +fun output_comment_symbols ctxt {antiq} (comment, syms) = + (case (comment, antiq) of + (NONE, false) => output_symbols syms + | (NONE, true) => + Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms + |> maps output_symbols_antiq + | (SOME comment, _) => output_comment ctxt (comment, syms)); + +fun output_body ctxt antiq bg en syms = + Comment.read_body syms + |> maps (output_comment_symbols ctxt {antiq = antiq}) + |> Latex.enclose_body bg en; + +in + +fun output_token ctxt tok = + let + fun output antiq bg en = + output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok)); + in + (case Token.kind_of tok of + Token.Comment NONE => [] + | Token.Command => output false "\\isacommand{" "}" + | Token.Keyword => + if Symbol.is_ascii_identifier (Token.content_of tok) + then output false "\\isakeyword{" "}" + else output false "" "" + | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" + | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" + | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" + | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}" + | _ => output false "" "") + end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)); + +fun output_source ctxt s = + output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none)); + +fun check_comments ctxt = + Comment.read_body #> List.app (fn (comment, syms) => + let + val pos = #1 (Symbol_Pos.range syms); + val _ = + comment |> Option.app (fn kind => + Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind))); + val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); + in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); + +end; + + (** present theory source **) (*NB: arranging white space around command spans is a black art*) +val is_white = Token.is_space orf Token.is_informal_comment; +val is_black = not o is_white; + +val is_white_comment = Token.is_informal_comment; +val is_black_comment = Token.is_formal_comment; + + (* presentation tokens *) datatype token = - No_Token + Ignore_Token | Basic_Token of Token.T | Markup_Token of string * string * Input.source | Markup_Env_Token of string * string * Input.source @@ -255,25 +184,24 @@ datatype token = fun basic_token pred (Basic_Token tok) = pred tok | basic_token _ _ = false; -val improper_token = basic_token Token.is_improper; -val comment_token = basic_token Token.is_comment; +val white_token = basic_token is_white; +val white_comment_token = basic_token is_white_comment; val blank_token = basic_token Token.is_blank; val newline_token = basic_token Token.is_newline; - -(* output token *) - -fun output_token state tok = +fun present_token ctxt tok = (case tok of - No_Token => "" - | Basic_Token tok => Latex.output_token tok + Ignore_Token => [] + | Basic_Token tok => output_token ctxt tok | Markup_Token (cmd, meta_args, source) => - "%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n" + Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ meta_args ^ "{") "%\n}\n" + (output_document ctxt {markdown = false} source) | Markup_Env_Token (cmd, meta_args, source) => - Latex.environment ("isamarkup" ^ cmd) - (meta_args ^ output_text state {markdown = true} source) + [Latex.environment_block + ("isamarkup" ^ cmd) + (Latex.string meta_args :: output_document ctxt {markdown = true} source)] | Raw_Token source => - "%\n" ^ output_text state {markdown = true} source ^ "\n"); + Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]); (* command spans *) @@ -285,16 +213,16 @@ datatype span = Span of command * (source * source * source * source) * bool; fun make_span cmd src = let - fun take_newline (tok :: toks) = + fun chop_newline (tok :: toks) = if newline_token (fst tok) then ([tok], toks, true) else ([], tok :: toks, false) - | take_newline [] = ([], [], false); + | chop_newline [] = ([], [], false); val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = src - |> take_prefix (improper_token o fst) - ||>> take_suffix (improper_token o fst) - ||>> take_prefix (comment_token o fst) - ||> take_newline; + |> chop_prefix (white_token o fst) + ||>> chop_suffix (white_token o fst) + ||>> chop_prefix (white_comment_token o fst) + ||> chop_newline; in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; @@ -307,42 +235,72 @@ fun err_bad_nesting pos = fun edge which f (x: string option, y) = if x = y then I - else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt)); + else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt))); val begin_tag = edge #2 Latex.begin_tag; val end_tag = edge #1 Latex.end_tag; fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; +fun read_tag s = + (case space_explode "%" s of + ["", b] => (SOME b, NONE) + | [a, b] => (NONE, SOME (a, b)) + | _ => error ("Bad document_tags specification: " ^ quote s)); + in -fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) = +fun make_command_tag options keywords = let + val document_tags = + map read_tag (space_explode "," (Options.string options \<^system_option>\document_tags\)); + val document_tags_default = map_filter #1 document_tags; + val document_tags_command = map_filter #2 document_tags; + in + fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' => + let + val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); + + val keyword_tags = + if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"] + else Keyword.command_tags keywords cmd_name; + val command_tags = + the_list (AList.lookup (op =) document_tags_command cmd_name) @ + keyword_tags @ document_tags_default; + + val active_tag' = + if is_some tag' then tag' + else + (case command_tags of + default_tag :: _ => SOME default_tag + | [] => + if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state + then active_tag + else NONE); + in {tag' = tag', active_tag' = active_tag'} end + end; + +fun present_span thy command_tag span state state' + (tag_stack, active_tag, newline, latex, present_cont) = + let + val ctxt' = + Toplevel.presentation_context state' + handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN; val present = fold (fn (tok, (flag, 0)) => - Buffer.add (output_token state' tok) - #> Buffer.add flag + fold cons (present_token ctxt' tok) + #> cons (Latex.string flag) | _ => I); val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span; val (tag, tags) = tag_stack; - val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); + val {tag', active_tag'} = + command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag} + state state'; + val edge = (active_tag, active_tag'); val nesting = Toplevel.level state' - Toplevel.level state; - val active_tag' = - if is_some tag' then tag' - else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE - else - (case Keyword.command_tags keywords cmd_name of - default_tag :: _ => SOME default_tag - | [] => - if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state - then active_tag - else NONE); - - val edge = (active_tag, active_tag'); - val newline' = if is_none active_tag' then span_newline else newline; @@ -354,8 +312,8 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf tg :: tgs => (tg, tgs) | [] => err_bad_nesting (Position.here cmd_pos)); - val buffer' = - buffer + val latex' = + latex |> end_tag edge |> close_delim (fst present_cont) edge |> snd present_cont @@ -365,12 +323,12 @@ fun present_span keywords span state state' (tag_stack, active_tag, newline, buf val present_cont' = if newline then (present (#3 srcs), present (#4 srcs)) else (I, present (#3 srcs) #> present (#4 srcs)); - in (tag_stack', active_tag', newline', buffer', present_cont') end; + in (tag_stack', active_tag', newline', latex', present_cont') end; -fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) = +fun present_trailer ((_, tags), active_tag, _, latex, present_cont) = if not (null tags) then err_bad_nesting " at end of theory" else - buffer + latex |> end_tag (active_tag, NONE) |> close_delim (fst present_cont) (active_tag, NONE) |> snd present_cont; @@ -386,9 +344,9 @@ val markup_true = "\\isamarkuptrue%\n"; val markup_false = "\\isamarkupfalse%\n"; val space_proper = - Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper; + Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black; -val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore); +val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore); val improper = Scan.many is_improper; val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper)); val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank)); @@ -408,32 +366,35 @@ val locale = Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")"))); -val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser) +val meta_args_parser_hook = Unsynchronized.ref ((fn thy => fn s => ("",s)): theory -> string parser) in -fun present_thy thy command_results toks = +type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}; + +fun present_thy options thy (segments: segment list) = let val keywords = Thy_Header.get_keywords thy; + (* tokens *) val ignored = Scan.state --| ignore - >> (fn d => (NONE, (No_Token, ("", d)))); + >> (fn d => (NONE, (Ignore_Token, ("", d)))); fun markup pred mk flag = Scan.peek (fn d => improper |-- - Parse.position (Scan.one (fn tok => Token.is_command tok andalso - pred keywords (Token.content_of tok))) -- + Parse.position (Scan.one (fn tok => + Token.is_command tok andalso pred keywords (Token.content_of tok))) -- Scan.repeat tag -- (improper |-- (Parse.!!!! - ( (!meta_args_parser_hook thy) + ((!meta_args_parser_hook thy) -- ( (improper -- locale -- improper) |-- (Parse.document_source)) --| improper_end))) - >> (fn (((tok, pos'), tags), (meta_args,source)) => + >> (fn (((tok, pos'), tags), (meta_args, source)) => let val name = Token.content_of tok in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end)); @@ -446,9 +407,7 @@ fun present_thy thy command_results toks = (Basic_Token cmd, (markup_false, d)))])); val cmt = Scan.peek (fn d => - (Parse.$$$ "--" || Parse.$$$ Symbol.comment) |-- - Parse.!!!! (improper |-- Parse.document_source) >> - (fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d))))); + Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); val other = Scan.peek (fn d => Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); @@ -470,13 +429,13 @@ fun present_thy thy command_results toks = val cmd = Scan.one (is_some o fst); val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; - val comments = Scan.many (comment_token o fst o snd); + val white_comments = Scan.many (white_comment_token o fst o snd); val blank = Scan.one (blank_token o fst o snd); val newline = Scan.one (newline_token o fst o snd); val before_cmd = - Scan.option (newline -- comments) -- - Scan.option (newline -- comments) -- - Scan.option (blank -- comments) -- cmd; + Scan.option (newline -- white_comments) -- + Scan.option (newline -- white_comments) -- + Scan.option (blank -- white_comments) -- cmd; val span = Scan.repeat non_cmd -- cmd -- @@ -485,202 +444,113 @@ fun present_thy thy command_results toks = >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); - val spans = toks - |> take_suffix Token.is_space |> #1 - |> Source.of_list + val spans = segments + |> maps (Command_Span.content o #span) + |> drop_suffix Token.is_space + |> Source.of_list |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) |> Source.source stopper (Scan.error (Scan.bulk span)) |> Source.exhaust; + val command_results = + segments |> map_filter (fn {command, state, ...} => + if Toplevel.is_ignored command then NONE else SOME (command, state)); + + (* present commands *) + val command_tag = make_command_tag options keywords; + fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span keywords span st st'); + Toplevel.setmp_thread_position tr (present_span thy command_tag span st st'); fun present _ [] = I - | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; + | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest; in if length command_results = length spans then - ((NONE, []), NONE, true, Buffer.empty, (I, I)) - |> present Toplevel.toplevel (command_results ~~ spans) + ((NONE, []), NONE, true, [], (I, I)) + |> present Toplevel.toplevel (spans ~~ command_results) |> present_trailer + |> rev else error "Messed-up outer syntax for presentation" end; -fun set_meta_args_parser f = (meta_args_parser_hook:= f) +fun set_meta_args_parser f = meta_args_parser_hook := f end; -(** setup default output **) +(** standard output operations **) -(* options *) +(* pretty printing *) -val _ = Theory.setup - (add_option @{binding show_types} (Config.put show_types o boolean) #> - add_option @{binding show_sorts} (Config.put show_sorts o boolean) #> - add_option @{binding show_structs} (Config.put show_structs o boolean) #> - add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #> - add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #> - add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #> - add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #> - add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #> - add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #> - add_option @{binding display} (Config.put display o boolean) #> - add_option @{binding break} (Config.put break o boolean) #> - add_option @{binding quotes} (Config.put quotes o boolean) #> - add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #> - add_option @{binding margin} (Config.put margin o integer) #> - add_option @{binding indent} (Config.put indent o integer) #> - add_option @{binding source} (Config.put source o boolean) #> - add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer)); - - -(* basic pretty printing *) - -fun perhaps_trim ctxt = - not (Config.get ctxt display) ? Symbol.trim_blanks; - -fun pretty_text ctxt = - Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines; - -fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t; +fun pretty_term ctxt t = + Syntax.pretty_term (Variable.auto_fixes t ctxt) t; fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; -fun pretty_term_style ctxt (style, t) = - pretty_term ctxt (style t); - -fun pretty_thm_style ctxt (style, th) = - pretty_term ctxt (style (Thm.full_prop_of th)); - -fun pretty_term_typ ctxt (style, t) = - let val t' = style t - in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; - -fun pretty_term_typeof ctxt (style, t) = - Syntax.pretty_typ ctxt (Term.fastype_of (style t)); - -fun pretty_const ctxt c = - let - val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) - handle TYPE (msg, _, _) => error msg; - val ([t'], _) = Variable.import_terms true [t] ctxt; - in pretty_term ctxt t' end; - -fun pretty_abbrev ctxt s = - let - val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; - fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); - val (head, args) = Term.strip_comb t; - val (c, T) = Term.dest_Const head handle TERM _ => err (); - val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c - handle TYPE _ => err (); - val t' = Term.betapplys (Envir.expand_atom T (U, u), args); - val eq = Logic.mk_equals (t, t'); - val ctxt' = Variable.auto_fixes eq ctxt; - in Proof_Context.pretty_term_abbrev ctxt' eq end; - -fun pretty_locale ctxt (name, pos) = - let - val thy = Proof_Context.theory_of ctxt - in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end; - -fun pretty_class ctxt = - Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt; - -fun pretty_type ctxt s = - let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s - in Pretty.str (Proof_Context.extern_type ctxt name) end; - -fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full; - -fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name); - (* default output *) -val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src; +val lines = separate (Latex.string "\\isanewline%\n"); +val items = separate (Latex.string "\\isasep\\isanewline%\n"); -fun maybe_pretty_source pretty ctxt src xs = - map (pretty ctxt) xs (*always pretty in order to exhibit errors!*) - |> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I); +fun isabelle ctxt body = + if Config.get ctxt Document_Antiquotation.thy_output_display + then Latex.environment_block "isabelle" body + else Latex.block (Latex.enclose_body "\\isa{" "}" body); -fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin); +fun isabelle_typewriter ctxt body = + if Config.get ctxt Document_Antiquotation.thy_output_display + then Latex.environment_block "isabellett" body + else Latex.block (Latex.enclose_body "\\isatt{" "}" body); -fun output ctxt prts = - prts - |> Config.get ctxt quotes ? map Pretty.quote - |> (if Config.get ctxt display then - map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output) - #> space_implode "\\isasep\\isanewline%\n" - #> Latex.environment "isabelle" - else - map - ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of) - #> Output.output) - #> space_implode "\\isasep\\isanewline%\n" - #> enclose "\\isa{" "}"); +fun typewriter ctxt s = + isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)]; + +fun verbatim ctxt = + if Config.get ctxt Document_Antiquotation.thy_output_display + then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt + else split_lines #> map (typewriter ctxt) #> lines #> Latex.block; + +fun source ctxt = + Token.args_of_src + #> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt) + #> space_implode " " + #> output_source ctxt + #> isabelle ctxt; + +fun pretty ctxt = + Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt; + +fun pretty_source ctxt src prt = + if Config.get ctxt Document_Antiquotation.thy_output_source + then source ctxt src else pretty ctxt prt; + +fun pretty_items ctxt = + map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt; + +fun pretty_items_source ctxt src prts = + if Config.get ctxt Document_Antiquotation.thy_output_source + then source ctxt src else pretty_items ctxt prts; -(* verbatim text *) +(* antiquotation variants *) -fun verbatim_text ctxt = - if Config.get ctxt display then - split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #> - Latex.output_ascii #> Latex.environment "isabellett" - else - split_lines #> - map (Latex.output_ascii #> enclose "\\isatt{" "}") #> - space_implode "\\isasep\\isanewline%\n"; +fun antiquotation_pretty name scan f = + Document_Antiquotation.setup name scan + (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x)); +fun antiquotation_pretty_source name scan f = + Document_Antiquotation.setup name scan + (fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x)); -(* antiquotations for basic entities *) +fun antiquotation_raw name scan f = + Document_Antiquotation.setup name scan + (fn {context = ctxt, argument = x, ...} => f ctxt x); -local - -fun basic_entities name scan pretty = - antiquotation name scan (fn {source, context = ctxt, ...} => - output ctxt o maybe_pretty_source pretty ctxt source); - -fun basic_entities_style name scan pretty = - antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) => - output ctxt - (maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs)); - -fun basic_entity name scan = basic_entities name (scan >> single); - -in - -val _ = Theory.setup - (basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #> - basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #> - basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #> - basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #> - basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #> - basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> - basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #> - basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #> - basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #> - basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #> - basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #> - basic_entities @{binding prf} Attrib.thms (pretty_prf false) #> - basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #> - basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory); - -end; - - -(** document command **) - - -fun document_command markdown (loc, txt) = - Toplevel.keep (fn state => - (case loc of - NONE => ignore (output_text state markdown txt) - | SOME (_, pos) => - error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o - Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); +fun antiquotation_verbatim name scan f = + antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt); end; From 52ecf3f70cac2dbab99bb0a923a9242bdc5da03a Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 11 Jun 2019 11:02:23 +0100 Subject: [PATCH 03/23] Migration to Isabelle 2018. --- install | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/install b/install index 694a534..8d0ab05 100755 --- a/install +++ b/install @@ -30,14 +30,14 @@ shopt -s nocasematch # Global configuration -ISABELLE_VERSION="Isabelle2017: October 2017" -ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2017/" -AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz" +ISABELLE_VERSION="Isabelle2018: August 2018" +ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/" +AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz" ISABELLE=`which isabelle` GEN_DIR=document-generator PROG=`echo $0 | sed 's|.*/||'`; -ISABELLE_VERSION=`$ISABELLE version` +ACTUAL_ISABELLE_VERSION=`$ISABELLE version` SKIP="false" VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS` for i in $VARS; do @@ -69,14 +69,14 @@ exit_error() { check_isabelle_version() { echo "* Checking Isabelle version:" - if [ "$ISABELLE_VERSION" != "$ISABELLE_VERSION" ]; then + if [ "$ISABELLE_VERSION" != "$ACTUAL_ISABELLE_VERSION" ]; then echo " WARNING:" - echo " The version of Isabelle (i.e., $ISABELLE_VERSION) you are using" + echo " The version of Isabelle (i.e., $ACTUAL_ISABELLE_VERSION) you are using" echo " IS NOT SUPPORTED" echo " by the current version of Isabelle/DOF. Please install a supported" echo " version of Isabelle and rerun the install script, providing the" echo " the \"isabelle\" command as argument." - echo " Isabelle 2017 can be obtained from:" + echo " Isabelle ($ISABELLE_VERSION) can be obtained from:" echo " $ISABELLE_URL" echo read -p " Still continue (y/N)? " -n 1 -r @@ -109,7 +109,7 @@ check_afp_entries() { echo " Trying to install AFP (this might take a few *minutes*) ...." extract="" for e in $missing; do - extract="$extract afp-2018-08-14/thys/$e" + extract="$extract afp-2019-06-04/thys/$e" done mkdir -p .afp if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then From 033114e29d7a129837ccde6db5363d19129cba0b Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 11 Jun 2019 11:02:51 +0100 Subject: [PATCH 04/23] Cleanup. --- afp-Functional-Automata-current.tar.gz | Bin 12605 -> 0 bytes afp-Regular-Sets-current.tar.gz | Bin 22156 -> 0 bytes 2 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 afp-Functional-Automata-current.tar.gz delete mode 100644 afp-Regular-Sets-current.tar.gz diff --git a/afp-Functional-Automata-current.tar.gz b/afp-Functional-Automata-current.tar.gz deleted file mode 100644 index 9e900518e7cefaeb3786bca7c47f3638745f374d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12605 zcmb7~Q*$K@vxQ^Zb~3T;WHK=(wr!ge+xEn^ZQD+Ek{w$U@9#Z7;av2p>dUU`TGh4s zA&r593k!1Bg@81+aAJ~lcQAFcc62bdV-jhGiG9EW#eUHQrv7dqVxpr?+rqrsp{N@ZGq{=UZ>$2f{`Q+U z9-ptPzarnSTzW!63MajvPlp<3po8x(`uAJmo$k{G5MXZa>vrQM{JY@mV#gfm%s;#2 z1F8PsBr5{P3w?H={G6`WsWp4QbaBcuQNZoz*DYvr5A9Am9p3582iha~zoO#bS zEon^j*s^N_EEkkv$sAOwwc=J<5;UDh$?W1f{-L~MGrMe+{(z1MxEA9skmTFmx4fRq zYw-PnI7Tku^Mo@9x6H{GICEn*(~H?;oeI3F`vwLYbjgJP!Po>ufOL+n$Y>}V!KH$x zkFmrgnBng=mYj@_XSH5JiX-_qY z7$QkR$Qo}{r{$V5_wU5R9R|2Rcj)L~t)nADi;?rjt63~(@Qgzb6~Hu3kxg} z4MtF2^+o68&hQ`b0*=$<+xi;vxkAJ|3jz0@go%cDSPmP^ktMG)sU`Zd(kF z22Rnhf;W_!LQ8)NeJLU`4d+#}lzJ{I#)JsevXV4Yw~YPbF;~t73KCInxS6(FGVpYu z9p6UjF_`H5)njhU52?4MK3zw{BCOoQ+UCG4JeWfgV6|fS`8i+F!Z)p;EvGXuLG?4r z_GMM_xNY5odnIr3EB*b#?7qVl4N4;+veSJx&dn2I-alOEe6J)@P=e04hvmnDw%hnF z{F3`0_71ecC$$N|c5(>a7Q*f8yTASTVCpT|8?^^nc4~B(pu9TzH&A+kpy6)(Tf5@7 zqsYdl>$QlNDJ7^M`{UM{oiO+*QmIAWjMh{&peYx^;mrm`)-pVJIXx|SyrpHuqNJn$ zqtt@nQsU!+TZo-x3lvVXZv5axTwp3fr-OY3MAy8xSMq6C=Hv z;O7zdfq-ueE5-?}2#mf6(SV)%4mL7JUW)afU6A+viUjJlfuHwbPa;}R|EAyfduQK4 z9&3SqW2!m_b`y`Tb4Li*aJQMvWTwVBH;6j(WtS8l`nZR}(_SR8Kd*byu74As>&cd$ zS53FX;WY(5Z#c3c{q1!EnwwvyVeGA0z>Ff9k~25qf(l>B0{m4TF%` zrZq3vs=j%N$>mCmfr4ZV*|~5ZD|;#s$df3hO)u5r4y1{R%=Sc_e%)6MbOs~f8w?-w zQ&}*;d?ea1K1DcxoYID1JkR?n|487bAE!c-xomeN3BFy1w*_GB=S+N*qX8LFtLcR( z83F4xf9qVDQVh*e7C!e6x5#*=`B5hVi_Ysgb{?@JMQ=^)hUa_r6sPMoK?N|O{htg? zFVXmv#RPFl3H+lJ4g*IWP0mSLV_|HhFl9IFrXqjHri)??m=eVu*OkX}qmFa)fwqrN zmI=(H9OATJ3cUJv_vO{deP5(tJ*9d5o8SGtm-(Qp>pkCwO;CT@7YKjen?J;#ZMrzg z@-)!H==}AVLiyrzG#9lghBu1EqK3EpteN{N+w)%WZDYI_eIe%N%Q#tnmspqAYb9R) zAt}`XYF05ZTzxNHLpPpREJ9FG?lqTxY)=@9ZX~=TD+1_BMBcid3wRZ!Hxt9*2INx{_N`We%0%X!gqvL&=7o?ZSbTXmUMz zMO3PuX>N^?C#uDl(gY539qmM`Rf_TEIry0n{^v!Yc}>5TnYUUx@XNWtHbA~9Xn%V< z7xan!0h$I%ZG3?mbMqt0opIZJ+7OS8PXYp;Xu%)acav<(-`1xpL5Z0bd8(KjV1MhP_p%d|bl|S$MYamg( z0?q!!LQ75)|8z+4!aCWA6dGJxnpSr6Ns>wM7&~tzDHd=}ejGIoSDq+}T2K=xW<6Tb zWnYvMj~Jg@8L<-;%N{zilNB$i!Ba?6FL8rysSHQ8Cg$C zb!vPXeG8raLR!ja?ZB}8Z^jSm9z(JV-wQfpoV&Oq25I{m0Jt_rLCKhxDoMd{xpaus zMLV3mhOv=6td=I^Zct1D3D64vtlK7Dp0|G?CZvvvU?y9M(-?&y_os-vG8o?$?<3!P zqZ%o;BLgBVsC@0%}4x#Zu>c(W@JgedBNPV;5;8w2*Hb-ReaFmn$X+lg2ZX zQ2bBb)l5nKu@Ox_Mn$C4!i-o9&On#M-vgs(*)V^9&Rrx*R|H$VseSI|-=ifsFVIjW zhlJsm7LW7#EUZcCj)Ira{BeH@t9D3?pj=zqSc`*)d;A?M9>5lhTXFU9yjYyUtvmEI zog1`qGvIR7hq)*L)uBsN&KH@!47e<)o^a6{)*}(D z2k;P}yWt7voZ_xaJpM3#BhVTAakMvPk8L7?Ur#V4sDL+f1sxJ^u(65qv!~wCxocc= zK2P9o=}|UISFs`}jlg0P!{3dp#A4OVuK9|SZ(d6V_6n~=^;M&j_{D2XgDpGUnzOT^ znU5v~mp@B-C%E(p{Fco&`a}iRLaHNykZXkJ*G}~NZnh7mX5{Da_VPulLsL{j;_dGEh|0{kOC6}m41~lT!qlD;5=DXq zF;6EUMwuR5t14~X_CF;ER;nZs9DiqOnuyUD1UmeWPzgykDQy~xq*956hj7M1VjbCr z!|<5=ppa%I+z;s%II{$|h1#h9P#9oeGxD{~csW&*trQ}}QndIZB^OWu)G9)Q#=bG0 z2VJ_1=vvq?LHrF#B&S`kq%^#$Hv=f*hr^MeH ziU{8;YohWH{xDYCB>Wbe{KVEvl=|gXF+JTDm}$QcSD#-!gnWw7L5>s|`a!ip2EWi$wlZWI}fxK8u*m%BP4&AUX1 zsAvVEumkCq0+aL7fR>b7yc)7T0~*9~ZaxZvo545{RV}h=C8#n5BNZymT516%`iqyw zB4lVV$x>np1JqOt|D`5P<9UyIx*73&@wI&UwB?ts=!S6w!4CgZ)6CO1)uSo=#h~nl z94O6tUylU&Z;d?4-Dv9yp0Y~lPsEv@gvyfB?J{+F|3V+^GVZY2gDQTuqL+|%Q9tJU zi1xiT=TFG272xR~tJawr$@0{I0nP4@v;}t@JH&E}0ckP{yJX~uEtk`3+z3_xxGhVY z=z5q;B78hcp41!NVCahD)%pb zPRZm9xHgOEHH-Y=`Ka(8X@$mgx!ikB76xVn@KjE~b)teEXEe&nP@ z%LjJtWXN`FtN=t`WjI>2;IayKt%>1Lt|v)!4}11%+yi~`l$@j&TQY_^YC zu?G`uP32}N8038@<;)w^&lpfC6dA}Nz)pZ4evYCo&tzOIQ}9M@Vl1r@lSxLpOA6b3 zSN_b!tw>^{PM=c!l*S^gIGNsB^TT=0VFp2@bkN>^?1)75&g=w$GE)lCW68uK}8rbayFZS`Rja$pit zLp2p*m3d%O8{9XuJaNG<2R(Wn4-Tgab0$iAg>mR3=AatiuSPQ`N^UAIR0#xMvazj~ z_&MTkQdOGVepo8a%|2alv}aI&U)EHw@;u!2lZ0xg$3_#glW3X#N||H>DniaJ1zmG^ z&cypH&`AdB$JP!rkDy1d@(^R+jk1T%ULAG{pQA;MpS)E{OaD}tE-S3IYw?~pqSk?q zA;DZ>A`|LoM>nE=st$D#@=v3;j}2Qzt={M?f5VMPwLIh>*9Rim^l?Kn%BD2QQM3&K zSb>(xv^d~D^-+Q8(O{1pl=d;2B-+`ntLVO>fuRMjXeaAphMqj=+&L6(<2hLcIEpc{LHd>5+ zQJIQG_=XNyLtfv;10#%>oa?0_#%s);!StX`V=Ahr;FJean(@na@;xh36J!sHa_KNn zT^B^{9*v-jBG&raU7K3wK}Y4RjJA%a*KBF;*jMZ&P#n25zY zU(vASqKs5xcsXq!yAXb{ai3#G1YX2#fdi!0v^ToO_&zZ4+CZ6|vJ;(BovbGtT7$G( zR?x+GAMQ~WU+iEG@Pw2dMH`Q;P3F)MqajOoD8Wxw&^CIB1mqLM<$S?{??1Y;zVY4s z-YuC<{fMiq%GFY3tL*Q!A`{m1Uv$Hn6`B5OsL(2iCh?#BEoN3K);{i|r%K-OUie<5rMb%!D=#}QdR z`we;Azc4N)XiTHpGy^Dw-QLD?q4&iaC$ZN?KK$?$oZoVLN8uJ78C5kzVAtR$RHnNh zq{^Pc-L_6X?ltDLBMdz8TiuIn($xV~UEJVCfx6qjoVu|PX!#Zg zp=y}i!qx-SRZnXKRQ7GOmzb)_(=jQeSEW9X5iG|E54c$kdCqWF1dz2Ty2HjhaT0(6 z+p1X#{P9i~Yfu9jGIudYoHgT3O?ggsE7r_2JQ&Ft_dir1sM(5h%v!r-6Cf9)!4qsF(xk` zR|-P|{S+5&O*3d!PI#LVR5NmlN`{Q6l_dmW9+Aj4?B>!+`f}W%ssPGcQA+d><6fXC z98jdyOMF>N=z5u;mCn;zqaWv?gi7{AE4K>0`<%QoYF6W^F^=S2oah9fQZ^14Z>Fj? zY>B8)#he2LOc`VCrEyw;Diw+34$DCi^D`oWtbW&Oa{Y9*U$2@40jw$TrH%4di}gpq zlX*1V@A*&UJ;?_-4bOhBHnO6zMAnY`C~}P^>SktE-9LvMdRib?(I>Jcb;ULPm0_qd zPM4L2>y~7o%+->Zo%y-(IDRINp-88E`3|EQbVCgKbym?;0YH{YbzuIKK#Kp@@g8OWPpt_lMvkm&IAl*7d`%u5p%GClgGc{Ie#mA78RrPD}k{ z16Y#e2zecTkFcuLAZ5heG222IYM~aYzw0forDSSHv0Acgu&hOZju)G=W9x1;!JB?g zF8%e!F5+y4Eif(()~o6iwl0lMcO&~re}HQ8LvhYa7FHgJomT?sr_5Jt-ZLckqM?xp zYXnFTtrB;Acf;BVoIH6!P^bwKTjfIT?{oC2J^QpbKXrdBes)koGgJuC4zqdg{5_$kbGmaeRRx{-DrHrhe zTJq#~NFS^i)u86F&2kE-xH&=*TCNIxrFOpNVwip7l(ogeOOHmlLbBV4?ulaxJ5)E> zJpTbA1XuW;6lL-KHeoOdl*w@zSg%TeLBekDrQI-hwRVh}9X7&K@DXD;gI$z~P9#jP z&WVo>L}LhrZg07vMM9PgM9)$bXZ>}YL+-QB^Jk7gTySP8f>ntoi9BQHJYVR<q#i3U zikNZo@^^M6;iFb?YVy+x8y2Rix&oroaHDk)5F>KbVCuT(pda!#v4J{@^0D2=#AURaLRQ{T zLBw)TO_79*HOWDfTZl}qqm0ILmO>_dz!axT`PSyA$gRKWWt;)pXB*f;X1x#zSyAI$ z*DiMJvkD5pncR{>8xp88E1Ma$Dn|6xNkXa3h;oNo&Yrg(HLgjYf;8Dm_pDcB$U#T? zG|BSso9-&tj~sK0MH;`OQHNm&-4RDMKLpgLp;~i7O@71PyFLdxr!}MRWCfb7t0YnH zMaZ;0CP#=lesUq-S(^c%x9ObY(Ca?$eu#^BFv%sLf)BM8r>f4uwV7DOQ!JYEia>LJm}OTkFz!wXn>M59 zi%0hMAAhc9)b$uTGisZ6oxF%t+gPWBwFUAWCu znHcYduMd~=*SQ4gG+nexUsRH!`hoi!<#puN?t3xJ$-HThc3EC#bb0p5(h-V&TL>3M zcPwy5lA~rJ^@!)7b7Eo%*zrA^z?Yikr0i-hd;u=N!hv*NxOQ)kOK+hR$+ zJuho++6+7syDL)TGs}CRUW>WcjkDl;D&?&|*eD#2R_ieTz1jk?0jV0HyeZpcfv=&l z2Ao>U{#DkvG{OCpI#t(pYm>v@R#L2IM%`gUbmOr7Bf^rJvPNTF*Yo?28(u84q^f~Q z9k8qZ58e|GeXTF=fPDp*;b=DTtivql#(;5>d0V)Y;A*6E1I&)mMCgq3!M0m97F-=3 zA_0>7G$ohg0f$y}?`De*LZi4ww>nql2tO61U8hPFiq5zMw60f8l8<7g+WR>B;2e6B zm?_7gcHBC|xDJry0qgSNu`-%s3iO}ayz|D7@>7sBn%xwkmR3sBaio5DgAWyIqkNut z*v-y0jg7eOJm9h2bIPzm1_}42$ZK_7!ko~yOhyigGYGIW7{SoV{xmO87=8^sL?w=6 zQx8MQVY3RGy|$(?DakaH_Ljo0tlvl3yYJC_jiBE~tDbNLf}<$~s01yZI=c1@e}i9{ zY^fFG4ru3qT;48J1L2zy12mC=K``o>M;9-z0<58xq2FsW8-BVg#8LxVZZ+^oqB3 zl9i@u-?1>h1*otqmtj((WM>}NX>hsZN$4#XILX+!9;jda>Po$rp|yX1gpbq}wnC~? z$}((jC3Zl0r;M=4sQqw`;+fPtbhyCi58I*ca9(aQ=ZGwSLT6 zYpsg!i<{=VV1MejUfFM=*^xO zxZ$EeX6+>>O}{ilV0vClDo20yX)C_$V0H7&9A_*3u8qEY`>WP7xn`5mku%BIn!@q& zN6`ov#}fGWMP*_IS5*#yw@rHNv#YDA%;kc%!wrN+Ea9GJuq0blal5*x75v&5nMR~$ zL}(f-3iekBOR{rM;E~-~A8^ozQ8|!}8Tlk%tDCZC{PQ z;~_bHxG(aW9K>&9!HVAxvl8%|Rjx1PBHDo3GOG(tcS@REK$R>!gBsp|MeHl8$fsKH zd!nB8)sG$7Z4Gh9k4%cKgK~RMZ@w<}{hdU1i(|l$i~!gZmR{WhbWi7yxh|LD!=E%n z7Q0hPu;hO2bro7SckQPhawr}-3CH}106F;6^(i*X|H`v%wQ29D`2ajmT3|~5E=i6E zW`!n?N8v|*p1jL;g$DjH(Cm&S2l>16anLazMNx=*g0?HI#1VvN$a(&8!nf~0Q!UuO z*e9sV3WNQ~M3rLTZYaa=Y3fvqzy zjlu)Ls?Tof`-Q&39=-;Hrj>@ZIu>qV^4Vd&TlLI3b<&XdO`^>U*4`0d0)zV>Qf%6UjG z-E*M~{Cz!lsA+eW*mVbG21EUBy90sxX=V=U4~g{3#kDi5h9j`^HK>$6vQxae7n)t= z(JioG4=Gfsdp1JQWVwmx%;fOR5-fS9tzu5Y14W(^$#hzJ0qQA+NzwC7e&n{5$hGdC~YPgXntAN7T5Jb zB&L<@lprLkF06K|e7jv5a_La4(|PPn zH{Q&X?N{nNo?BNohW*oRU`=d0>*oufPwyd3UE@>39e`<6_X2gtJS+ihcS>AhLWx!KGt-_rYs_^%Oq%Lgw_Lz2{F@6a(d)u}vDH!-t z%SDQiLx3~N_BXpqAN8bgt)?NW-?2&8G;}zyRg56fo!ZZ5w-$<9o8hnBxi*YSloiSNO*tR{q;Y>E z=9>p!&h#emUp}Tg6j)%;pp3UanYF@$p#rb+TOfe_QguCzl`coOjjXHlS#w=m!Svp$ zR{0DriN^F}_NS!O+umc}JM#SZOfb&Jd&tw5#+QqSh!M*h?e(SA%-FV0+U&&Nm=9maXms=vTB~?--*4s1GV^Ginl520_fZOk+!jf_H8nPuVDhz8;(ILa zss^`pH{B!wf{kgateCtYVFX?z zDbhyTurH0?NERZ#G9s2{rAqb{9?BL5Q|T{VgVMx`+N$88aZ?sco+e*AuuDLuHtO@7LuL)KIVArBBxrHB!ogjm8B=s?%?eQ*B< zHs$Gee{bKsynOlQygM2?f3>fDgy!94mRD4Uh+s=^BsWB+m&#PcW=hYcKF=(##bJyG$IilUj`TOG z{C#T_GAu5N{`pNt|MnRUuW_fPS@Ks!sDo=@Kg*7|NNdF25`my-)p;P_bQZ#^xKtEH zz1p77WTkx!Bg~>gn~WX}Ygla^ouc6?Lx}5zp8g#C8w*?%&rE|&$ds@{N96s6=f7g(UqJj0)fcN2q`mZKH-4nHcfz``9(E@?15}`;NY5D6+2zxR=Q`{RCy6^8Wr19pt=?tn~7jpk+hMi zjKuv+5-i6xM~w#`8%xl>$3LxJ`;>TGX;2tYQdjI@GDRfIm z@KqXh1oMc-?RZTQQ_1;FP=HL$lw3wsixou#Jf^815-%nGRK`y5Ta^D17(w!JcUJ7* z@BpYPx7Z=4x77s;^gbY=nf5{oe(Ku!!^O_kP?gdctZMVmIbrk)-MF_)^1VvRPLTyh z(wWJafD$8onh1LY+)A*pqNng(NS}Q&eiN%L5j;_MN62E4Q4o*`shfIe!m@bB8jV=_ zT`?vPAUm7*5PudDEQ1hLOS4zkO}{p|9AcXCAh-2JDMij@)0>Aw*MR3>z{Fq1v?0j5 zs(V=^qd&Oky**&=g94fLMOeU2S;N-*h%X$}{hP)Qx3I<(m54-E4ja*IQYk!)xFl3k z48p0X`XR%MT^E8)(R^J`cP(U(i!Q82>wL5D1n3qO+y4_N7(;`AbNLVTz2I z8)^_dpQ{8z3ml6(MpPu&xbcCak#*OQ_oX(|-TADE(XwCW0Z%fqPigY^-RuABO4cierCtOrKcTkA(OhU$K@A%vD zoF-o>t1*YmIp6PD@HK&+2bO&T2V>{xx?vxxWqm!t(P{5AG~D9(wJHtA+}p}zI)VYU zOwg+e&g7S1EgyMvV+TpZ7}8Z};wzO1F`6xJK^4RsWj=pVIg4Y;9-kYIn6)nErH%tm z*G4{DxWQ|l`pYrIyHwvgk1+?61mr{`traWHpe{#ArEUz{$4o9I+?YwNRtThE+&FIX zS_O+1Enk*1JFGz6A1ET|7-=umGx>oy{hqrB4+}sp_A$a)Bcc=6h%&bqw!*ZX5u4~= zia|r%aLakhU&iFsEeM^W` zqcLIP2kTf`VDNwG{54()!Ht1m*)))4sFf^D8VxlVjIm@9i~Qfj9zZ3cioF1YRs|%; zhB=Gr9lpxMn3inQX~MR^eh;pB6K6snDYCuxSCt*lESTlhGg}KQ)I|J$#jTh}n$veV z=4Q;l0}BmL%Bwm#bDH>g4r>=5bO)iHDBSFZ`kuYtV02KIB(k~R6NmKYNo6@dD&T0O zkKxzT6P|fyggBYr0vVLdi2wWAygh|WT=3iP=eHv<>6uI%ClDapq#>h=ar_b|O%6|{ z?wND;{oDBhn?=%});0??K^iJ@1xgRLx7o+DeRRAJ~U3Y9tf2PTCq`9VMx1g6Ahy&gz|v`B8>- z^0-H9^{slL%s>xTvcRqozw1b-V>!)|3=A*UOJ`pkbnw%;!TvXwH+ih|Ir!@gQYY*@ zEn(;sm0&EIw%QSDAE)Yq#diyq{kL0Qd?ri5xxkK6w!74SfIWlJHx~VyfBkK@xBp?k z+^7A4Okc}XlfJ`NuiE{fw%*=$|NXz9;oj@s**8!|N4~_ppn_y>g0aVDDephf_g9Xg zLI2G6cfoF=IOKbVhSq?3dz#c+>OU3`4{R&e{FUK@IwwN;6&T(*MGU)_c_tMp(DwG8 zH+y~~hm&tOSAXF-;Ez2LXXUzA?z{obk43m`Ff2Zle}+OY^H?6)h%P;#Ez|l}zWyNG zlWB51&4nhAL%z7C;B~MN(R~f*VRhMax7iFKce{V56Lfd0xywgug#UT}^-sW8a~F}M z%pb|-~ z?;+pL5A!J{u&_-AZ~`@Ji_OPQ0cVtqmD99{V&&Nnb?-^~Gj%$(Yao+1>(s_8PVJ%x z8Pa8w5}N6BBz}Bd{n>HMh4pk-s{je(9~lS)JwFeRnv9gow6G4)lPA&TYm2pClT`3G+u-Ud)oJ z2$%9y`1(a*0E<71ps&YMea7ylRY_T;Wm_RafVgO)dZVMKujcfe;}!tuL!8jG$WoK5 zUvot@&{iS|tKj8u6qCJ%z>?xQJ3jtZj>|t3`Ef(MC)U-J!(GXoK?e9?D3n5KMmWK) zQ{}EtK!T3n`tXbubCTs=UeTULIoc<7e7MEM_webMK|HdGhi9l~MD|ZazxV^(knTh1 zh`l&ds3(+9nJg{H2?s^Ix)7P7+!@GBh%f4)6+$BfBG~)-mOr01XudhQnI2T z(by1AX4XuvgN4mK!?b$mDEb_+HXO z=3YaW-WHr^xyQLmC%v<^dswT6yhL^Qe4g<$t3ki`vPM@5C2vRA4CC1Mi0KoDl(;X^i1&yo&Eo&~Wv?0UnO9B-#gfo60JMON zn>i+FuiKPrehYmmwxUCpn4_x}!A9Qa#^%4~<5)=Db_43p5dfNGn0x3lO%E$M&8#%% zK(vlSKJ}VQ2443Z2IYn5Jk+o0gC cdO5bP_I0zZ|KG;{*E$GR7!G0vOcWgKf31eor2qf` diff --git a/afp-Regular-Sets-current.tar.gz b/afp-Regular-Sets-current.tar.gz deleted file mode 100644 index aa80cd80e3bcf93887a85298662d470563cf1df1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 22156 zcmV)1K+V4&iwFpffOT5}17T)xEmCD?b!=gBEmLK5b1gD3F*q$SI4vgp;K3I(80C}2PPFdwDm-Ggja{l{Pan9l}$zIpHf|88w=Zqnb4 zZ|JZ7{AKgry?YO~w!Ycifbz|)hYvUZA|CwZ3w-9)EG>oj%g0fc7a!{DEqeYZ{p|Dk z|2`}8({z@fW>t4~a&eRM|M33(n)!!$-@NC||ATMfkJz}K|3CawWy4uMDMXviy_g(} zQqqOU&ZlKoRZyjBw_2S$VrM=(nUp>8?4-=AS)LZ+Z8?B{??E6%7lJj_%E!}5 zIje-Cv;On*X={*u$P1{QozLDsnoP6e@tc#p66$UEHb|@NNB~r4lk%e&T!`Ir^4sLB z`k0IN!+e%q?v}r)f`|97L|Pn)onn@c^Kx>kYM9^OKDg?>e>5CTs_b#ARm}%_#HwrC z)LJEu79znmH7)aTnGIV)z9o7+(OyfT+5P;(3D8tIIeRRipoF*MrR~<)NmgdiLR&U5 z6rKN@m6Jql3;f$|{m!2JTx9VFJG04H4A<~sCqIUgZTN-QECjqGvQd@MTZ#MH>(RUt z<)$dN@J;&F(GSbbg#Fm^-h7jfGxjRLFR15YJ{qNiQHGDGd)cV0?zD|B^7bW=@rrKW z2e?{G?AEWHn1^jMIYQ4)rKe{EMEf~&REc+oqJ-{C3l~5!6kmxdp$DqS0SzE(jk57L z6)8+s)u$(YBE}iOMTNTlIS%FhBl$4<^jN-v{lP-$j512!7#Nx!5Of<(0s(Ysr-SuecI*|$*?rr*oiO)F-~Vg zpq8Fx5=Yr_UgR_OP!nRD%qOae)#?02IJ~&{p5kg^5QMOfu-ey!m-;$xn+A{ zoiI@mzTrdQNekzNn7lBPwYt3MsNy6#X+~^ZmVA0#$)PD#Ly7D8pKR<1s7v!x#f7t3 zGj(BwZPns`%S-_L*bIcnW}Wm%{LZUlGQ_T{6VNp|0MP%t;h_Y;K|dRh?iO2@Y}Or9 z*c(&?lEXiTkzOm!L2xyqAOYf(`~b^2LlC4~3;wF)GQpc3CyBi1GZeoC`&c!^F%aQ7 zRKAsXQ_?gE$O)t9P(0FAjV@*3I*$0NsgqEI?Pnuc4CfL##cq*0_v zc%^`p;4yf8eaRtND)YF7r%sU$XKBb0)x$|K?7w~W;^$ZWBUr9wK1dvBDg{pE!^1VJ zdVn+)Ps<7HtLoK>VT$wq3^rOYgbGVe($h@jC%{SGKiVmd9+zFQdcP=@kJI5yx2HkZ z+?O?E8~4#In?d{`b&slvYU}-@qsgp#?7Z7FFR~rBnE7-k&28IJ|I?9WwBktl@Ml?1 zAE{e4Z3obHTz>2Hq`I0{NZSR8l$-@EkQ=J=twgTQrPG z19BNiI}QeR>(LrVnGcqP%Hxv}&>WI%gK+%ErllJEDej8PcZXMT5+N0>X)-U<}m?#}KB8qyx*AJ+iD}ed|f~UWu+R>k-LvfxatntWjk0wPH9W!aKOO4r8 zgCjM{&6*{p9&fEmD30ao)ugzaji)jL zrP9JWWNfTb$2s|;K6_fyom-An* zdKfj_HM$d3?A`;$;rQOZ)OkZ{ajkl3Kd zgN^JpjzN(D-ZaeWHXO_Kp7<!~(XN)LVqFgG!HOG6`QZuRgMvb$XOHu`KksI&+%%t7*!=OuKGOm^R$=xkt+SbDl6}P{sBjNsFjvw`fe11p~B54 z#}_`9W+Ye*Tlkd$s}&qEh&oM2uMT)LX_I1f!GuYu=|rqoiE0GTmlv9KYDT-g`WQph zhQhFJr&9x%mYM{cmYPCb!T;k)nN3c!GPaVo%@U%cH1_rqmM#}AE&3AG?R+%L$LTC{ zMfpIpMk~>yl0|dL2Enp4e^CO1xdmDR8)~(UOQYq_|jf=;lM%A_rms~IM=8)r-=+1*7--Oeb|%o3j-DyirT}4 z)o>;b6_SvsW0ZxEHBlBjP*=_ujcUkpCbgEK`vr(y2Cv>(OVEaAGO3-zqyd;kaO@Z( z{WzQDRc$A+u7%ddbTmCl zp&%cMv;1gwQsaf&>I{MCGPFRFX)~qD+5}0qNo=i{j|W*f}Vigaci`c!3g2C&f=Cu*y^*}5-DOlvQP>XiEf&04Qv9*dZMn!0STM(ujI*pKSTg@^S zbzl5g=Mqk0649^Z(HjwJ3X4`kYNzaQaD?!Jp zL8NZ#U=CaD283G%Y4!wavSkPT{20~WTIKGraW1HLJsO${pQM_^TR2C6l zNX-uCuy*#IO-j)7(((f4?}44ap(zgGS}=J#bu%|mJGe;+)#c6(kV&gqt)?d2#&lY& z&y?81L)};d#o>jQtT<}@`R{)|$D{dlloi9Q|Lg=bTVSL&a1pfm{^!=_{jG=I{m*ai zKiIgv|M?~Ff2xK2eKyQVl~!~ip4;0^j5_RwM_t;CQI5X*2}|gey%LK1fggX~yStYU z(C@eVUne=Nq-T@y065WXg3ej;dT8vfW)5s=F1Lx5Ih-in960F! z+sd(Oqkmzf)VYw=A@ez&?YykHPGu_WC88BEwRfc7;CYvUqJvQP7L1j40n9`{dG7NA8x-$McE9%ne0520mg8YTVVRX&29J!}{}>t;5}$32URE_p5CdrG}?Jj+w) z!8E&gJW}umlasQ^NujmC8e9t#x6&FsnMN1j%XG+8IUH4IGtq5_=#gp%yLTr9$Z(Y7 z{2nE)qP|`uy_WP1+sA2vuOrkwpqU*;So`99 zK@Vmj=}KlbwOcOT%L6}&&~Wu6&T}@Y3SIAje)rb}s1J&kEL5714ow?uPRNT_+ksRar*#O?bYCHE)^Gc3R^HX4h_)f1!+El}( zE!kjrbBp+*a=32$t*1yy+dy}~?v5uV=$wx`<$9IK$=&Y)Qshl!yStVJm2)tFy{2R? zlD>lWDq#)%mLkS{V&b>Vw%Q%Ur1#BuWGU z(~6NWjNZ13PZTxE3l7>bzv9~yn!F4uuSw}6+W2)_sI3ac4p^60{i#L8+z1TGZRqP- z9dv6E_n01*lP%nPMe~snVD8C2(wn&&OY7dcaoTx?rf|8cv6Xrj_snP$eZHZ$B33rg zx(m3}JBf(>?e>%X_F>4;JuDlZ&W@YCDr@Z;1TgLjo!Ofx9Su5jcrsE4r6#f|bzX-r zzLX+Q34zIAmgWWB#+E}V@sf4`laei{;k?}INuH(dXN$JQABRDRvmCZca>~_MS6D95 z!_8hD6==k)z|R9bJNMCEYViyROzE3cD-iQd@>fy|iCT>_h5A%=ESI_-9^t4|%XUo_ zD#B>y1#DWD$C4J$W}_`S9y`y73L<+0+ zN<%SPj760WQNqe96SgEkS6^w=yZ7KETG72)$s*XrRZNFq*@j3UY-H#nZpVnPR7&ujIbFE6fsowdUSTO zyhM)QM_2m5oj`ggVlu`4jL6pgrZ`_ET2qFj{uyr1MCWhB#JlY3J`X=ZTCJ%wPNytR zLRv3W6BOmHQUaScNp5Oaws`Ezl(xhoWHhNV7N)dPfj8{%I%n1Te$s6T8oX8$v}D7U z&s+^=->4yVn$R^Ap}NMzT?In-+N#p^H6@%{W3`pE)vqRVyn?T_AlRbMM;N7s(Vj%) zwm6uKGuR&hc0p?bxkp$q0d1B-&jO|Du2x@W8BiB!FupCK>#Tx4Ra>|e+Pc+hmQ9yv zm8xBP@t`vh%E)S_r9)m|xSfD#LH(EK;uOQDEys*|0_J$-X@saHMn*KiUjKnUS)|adc(eg>Zh=1A|~vsCQveky8m9gz={#FO#pW zXz@J2NXI2mTlKT|j%ey|{gVD(g|3ktoW%9@qBd=XY1NDNAeX0}0n*XssHJS_V+q>S z`)aAblNbassW}2-!bKRA9Fi)}M1O2M+4l70{rL99!Fi1~Bb8U5OV}|vd%dJiz;pBD z-QzOL)Pg~Uv`O}RZZes*rHUciQbCPDM}bT>W~7Yz_4BUPa11TQ7{`@R@%3tSipoWz;k(Xre7TIqvjOmxR4#Vnqfz zEh{%Y*j%|Lsx`jNR8W*_>(v@?@1A%PwTOk)_8If#&L!mXjTa8Hvda1z z-Lk@#=FMaW#M$<|H`TU-^zXm+Q&FMfW~MStIE;9LzzYz zy4ThjuK{Ov24@pJow`SfFhrRoDPj&Wn%ZBEruMj{;3{@&coatvSlrTFe(rAtNdZi~=N-VC|zTC(dei9*~js@IT5% zy|;x70;!qnW#@eT0<`A~X5;CwP1L$l`&_1vj+?ZNMhwSd`z1qS8OA?-cJd()5I<|l=G$jl_Y)vt+NbvU4LR%u0O{AYwPnJz!vy_ zJ$$fv-}nD|aPPsb|JT3L|BI0V+5n!P&tP4EMiTmkRZBU8ZN)MHY%S>u7ExI;U`uo~ z0ByV1Z=eYhZr2omR!7kdk%rR83!)%?Ncig~eXYHa1aclB52|@bYBu33MzxV&2DBYY z#iMQb8mS1FpR|7Ntrh<(%ms4Gc2GECVTtG~90j%hrfOrjM8@A+VV8e&Tr%dH7PQWNaTXJ87h%&8M$CmvJ`sNDN^wt zAm?Xvr8$HY2u>m2iq1^36QnH7Ni=DhimX=UO$L>Rv*B!ab*U|^n-6)jN>7Nh4Z*;3 zG#_5J47Br-yTLfGBp>K`(<52X%}kGMD>6OuL1cO~H$)_w?;aAVJVc_Y&XH&f>ySt_ zTqGve)Iz(iC6K{Kjjsd)!$L`3@UJamRuuV-J>qLraXGyyw7Wr5+o2&%ybblOv4pLGo=*%CKhO_7Hb`QLCC#5m5gyPNKXb3H3g-!T@ zGj(l3md!-yRX>nZ>zAjF2AN}UPtYN2GeX(7VpGpd^c`mM^xouTAQcF-irwZ1T zk|B8^oO~b>MkOZJDyK&YHkA5(Tat?PWP^n`FL_kPj~tzPb<}2F*yMN#sZOUqN_z*9 zlNm#Y4B&0!;@{Ck*$~RVXRJN^1Vp*T9&8^Rdg-BD!K^Hpj$E!kzgSv4I~~g4Vzw_v zDv?A~OLyx^p-8_pQ>1(w2kRSaf9$}Km=)?a&}y-Y?<3CjtbV3G~h zDJU3uEe(lFUDt-|54pxwpmL!us2u3y2#WWvECzqTO|O>@>ZF@MY)$XQ>a{-i^2>s& zW9L!!^2IKcMR7x(>GATQ8c+PXe2;m%LM_QT{S$ zGQ3}Bd;7eLcBG;mAbiS8VbRSy38Z&J++3ow5GQ^~#jjQW5tr-qG}Ya&v#{}tc5K>e zZruIZF5HjqX8tRxZyv9{Of9XM`YkKZ%?~o>Rva+?j1KZymCWbx0Fq< zgKa$(vyqUqwU&6-{M;*!t05UX@vb`7)HF97MeK$nzo#PUvZE%X@lIk5uq&Nfw8YBf zzr-~}AR)Ym7>GBzfXJq6y6DMrAGnI=Mmk1*?{S9k23^2+8%3yC*Y;M7q+X zzPGHqcZ~-j%`PDRb8g0=Q|2FWDQ;SQ{;jxW?!bYn(4fzL=1sUp>TA9mcT-O04P3Q` z2mc%UnVTc4hC0q^^q=buxJTJB938I8udPRY;5h$o z7&i5q79<`hu6r$`DN|4z~F?^u2zY6GvFDMIqIShH#QFCxy7W+m0v3~hew=0WwVpX z5ifbhV%N#4(`Y6Ud=kG^F2vDfI3H)ltbd%3RDbPqGMRM;`Jl~y!p@+5%g&nDe`Hqv z@ccjb?mv7O#D93WdGB`pe~C}$4lI!$a@2b2)+JH^RN9%t>MgO_PiZmFM&kJ(D@Wo{ zhJVIS^P)SOkGn^>t(NB*q;8<;nl~m;9|N={{%KJ zo<0@3$jN2FF8-EZDY3LY)2F>Uof1>dROK;8ZeeaOFt9U_9$`A(;XqTwVh(;V45?B0%hK&Ma1a$(bCW$nHMWW(A&7U#IrMQo)r(Lt9l zETWO?Q$bU^k5^d?=A2gE9h&U%9>VO~#?dfr7|*?6-wk80Y5pAB2gt75K{|zLIjU*B z12}jpE@*@4PwzRFhkff6R>E+H=4 zFo6Jj!j`Xj85!2)Syvc;{!d-;3CKs^p4z!Gwj=!xU~7g0csHCJWr!80vCjKPZ^^iE zMh)zAWe7|y`Y_6|O9!S|ePphy_Onk)V!Dpf3LIc0{k-U#*D>6xnJD9H&R~oCSL{er z6dElquk>^=8wCe12~d%Zr?U&&t?BLZ3h}zQJw|kHo9=Mj>AI>N%A!8y%*DHn@J~CO z^9l##&ZJ`T*Xc|8F!3IAmRn$#ay|K7S)-XnM!33y-u<3ke&$Sm!~|)4T-CjV_2W>;;zVDN}4$8sK>|@b#^Q`E}T0N)a@=onSRf2U%mMGRjgt#Oh zvNtc)1!w0UiJvN%3vt=)J}R!vn{C1Uj95S33&^413*qI8-&m^6&~4;%z^}MuUX?7Y z;C4|j%fn3EM!7fm&{!+*P76!Mp`?4a>*B$5qtj-yrkn*rjp7<%QQRY0w_Qk9r0tq8 zuB5$$i_T;*iGs1X>!RBXMo7StbdixPqTbrJ>!!+5Ty!*Q{+F52+JP^iEfK?gbQP%R z+O!UkA9D@yH5nC}HN@A-@Ng{+(Yzef5b;jUr%*%G?*tm6(^!p$sLdpyhG?4A8ltX> z8lwE(q#>Hu;~FA^?Rpv_2QLXw;c1Abw|WiHZI9^yZkz6K-050bL-b}Y-fcufw8LQ< zqM6CKhPYH;Mng0`L^VX-m6cu=8KH(4^zQfUYKXfFG(-!Y)es$u(i$Q?HcxizNPLlo zsK`B{A(|#U4Ut3WYKSp7Erd%@rr)!tA=;i7YlyaPordTwGr#XLJ9WA=);?`x47UyS zG-+(M*zDp>h3wnP@SyLIARGFwShGRj(pC`CtkRE{(}OrkMAYNBZpg`ZCka&~qcSSX zt7~r!ZZ}Eu2mR{_chgH$$yg3J_hJ|)5@B_YI(5g>Mzo=ZgY3+`4zkfGzXN8<)YnyK z%`-c>tffhNWtM7dp#GeRiT7WR{W|1hJ4t*WQ-}3rp}rJ1s*`mt)SI|+1ux?zFE|Ej6ZANh$0_tWRTOmOf*H*wAVUy#(?_>u zKyhq*0~u4`dNw}wa;U)>#Y;hWCO&aHpL{Q3=VJ}l+WAzSvzl$bYTMrUT4s6U&Z=e#u}vS)?0%XbWLf$G-L5e(=tjc^A{1LyUB2E(z04Ti4idR6(1O@ z6?g*b27rkGGF-wW;%T*>iNw;6wRkpBPRuat>bj)rvi&YMjK)xCHpKck2Djh?k0~rTe=*iX5Z}4 zmxo|0hJ!90EArwl9a@cd8Ij6AC>MxOEPGZDC77YN(d0}<)FE+t>^C4~?uY!8#q(5B z8|x!CRzlqha9l7T!aG10qR56>Ri$MP%7vBa38?^8_Gym3UzH=+R}|~eh`z_#)<2uY zs(d3!xSYnah}5L2p3rpYkKXfG2$g9?+i$Gqct$?-PG{6$7If?`Pb!a#YB?C^0LpZ$ z`Y_2Zy$0V#T8haI53IA{D#YDa+SoRc4d5@dudBJ{fLFeAxX8BB6SDq(2 zQR1D>=K5Bm!h!`z@i5vn5P7_*Tc*M_xhbFmbb&s%@w?J?=inAByWM4 zXK0}?Kh#Yu`=WCrPu<00wVignSN+R*PECGu%RKOI>2VOXl}F##^NGCHzy;JVBECoz zE@yB`#1Kp>DCb&e-(?frFr|3WmB5`GOGhbpN_8nY13Hsuo{%4(KX>|R=lrX4J_CBT zJ5DXvJJF)n^btWXdlwa}^Xe+Z?RjM~b=x=W9n{DvCs)F%{I{$I4tx}a15eUBTjCDh zIV?VatO5}3VswdBfm6ykpZRCn>C|y(mJX=PFI#Epb7xtW9mf~1ei&K6`Vm{i?(C!P z6scabsP%KRuysKdYovC8uhvX-vD+ph3!Waj#K^h~d@rxD@LP@Uhbu5@<-lap9yZ_h z(HA? z=^3={(i?vq(3^)EZqU6MdOJ`DISVPyy zx6I~zP0Dvo-~KxIA#OHp;=Hg9*fOBa<#m>UeKE2O?89Ig*hfqSWXs?@si_@Z2KKpH z2DZR11J+1P1H48vz6R(u;c9@+LvLT=_iZg4jpPzh_5E}4MGUcZ^D!A>_kZO*?VF1L zx8VN&{rj5_z5BlpH}BuZfBRR&e|ud{PVt0iKT|=5J1>fa^6unx3->fnmr4Qe-uTz4 z-(y$!O;>#iwg^qiyaD?DtKHR(aKl+r!{&D79bj|`9Z)PECf0e+#D;g0lYUW(_%PMzx(LGtRk-CU6$-qtM${KISn37h8Xb_O^Oz0o#AXbpd^_a4{ZpBt)}UgjfID7~35P0kudKOiVY3agnd2 z5~+(QCh}GCU8@C%6P^A#3k*Cd)8QyX@Se$ZF5SnEA5(@t`Y}$YeaX4{G9o+1sKtR` zAYu8g=sdlIG097`y4@?*2k4#9HL@)1ULhEr4M-IRAZ<<*SxkdQ$SW4YSRAEL=VD4O zEhZlr;-Pa~U_8Oc0{#xvU%(A~@rS7=LxphSTI84*avvskK20G)RBkQDphXUA1nk*@B1&GybH{mRA*MhIX%=EwO^uEDxDX=z=FS3kdcE;ilQ z{SmWQiK|@sFFgDC)jyu^AK*E3=f|J-CLi))7bzKuT@poy6ArbRB#XO!hmE{A#L&rD zfN_z{CR$DIQ8vtv@RUFac2B4`CR{y%H~Vx7Av)MtVgi855N|5-8pS~~k@ajgNP;AP-e2b@k`x=P5=Q>G=^b+8rK-{@1TTNRe=0g(SB~OP2`$ zpBnAV*Z+0+kuMkjdGi7M3gbU--Rl2eCjO6&?DJ+a$U!%Fl}|rT&cvOhWy0dOA2F5^X&YZwvLkN?29L3_^}a=7mUYYwbY#wuXsTIv9XJ&eiTNFF+?rH20@0F4~R7 zDML?)3{}#{ED2@VRi|EfGRzTZWI(-r%1Nzl>MMkjCP#xYPb6MZ%(>+fx5Fg7a&${QuvW(^=4vrkz?@o1`sCaETKVEgSHdcAZuA2B;o{gNQ ziW|v;2A#;UOSI@)-Gc`8>^F|81H;=GC081mPxU zgc>!#JWgjAI?HNj5@g(K;r|F5S>g2$Jf=6_KghLXep_<^a2=?K&l9-<_%<9A7kmZ5 z5EFfYoOMhH0d2OJ8KZ8bi}S8DaXX!s-sRDpA~?OPF2=B_D)Zr?fE_WYs5B7jn(y^PG!gH`A$9fr$jGP4>XLpq!tPhQ&>CRA{l3ZR&X&ncN4pC~8 zNhe(sFv(!6DJQ*_lA-&a#lB6&rtBXun>HLX8wG|`>(iE#Vt4nXLusb7Adug=OqRa8 z``Aj)%MMvwWW4V)Bf{53q_|3w-^CdWlE7CdQnkonH%i(bdTa2)QZv3>7kfH|96{U_ zt0jl$D}!N4HfHWrO9z_HrT~eW7Yu!v54ipAu)QvWM#8QB7~Y`mhhpAC z-*zXM!)`S#W=he~r)3?gn27czeF2;)6%PNw7Qe#7IddyRf;W=Z*PTl%O zCHUPw>AR{Ae}n?H<{haNg|8T)%7gN!;?XvIja2L>bsd=J(O%2}EPaugB5Y!iNZp2x z^z&#(O5T@iiAifxtvN@`HdTp%U=8X>10e${i8S1Ag3H#3LZ%f50!=61xIS#85u9NB z)3m~vlSsnIXlL_d%-D!)==(7+^!x&VO)qBIh_-3$lFJBoXSnvLQLV}20>-rKmJK=c z5tUqgdHXT`aSRa;cPCy0(2yX_J9*eYs=*#RPP65bFCs?(uF^xkJQ|eg z$B&QMkiIkHgE@WwsFW3Iz-`DMpjz?Lpr9*w&%sRMgNJ7LY6;+GO~BcAW|44O2qn`B zW^s_!^@vk3GxsB0o^hH$>pJ@!<5aUYOa7`IT-k+-YEx8Ou~y}}#-}&kvRam(({!v^ z&GfF-;;EQ7p;msGqt(KFHpl*K+P}z@Ea>)n_`it`nx|ov9#)!-r5mPPR_6Myc=02x z!yRC8v#!VY>Q)>=wqQN#M40tbv>hE)IOqDkCB}8VIlqAp7R>q5J)jLn_hP5WP7hSr z#^(*l-@=z0rf#cw?zZ&g(Z}i8ioZqxHk+Tr!&&p|4gj8RB#l&jLff_T9Fh`m!=DSa zO>)gg{j?gJ%VMr*O=>4@W%6hO>y|{8ClSYx=^(vbKN%sFIL&-MnOF;f(Q`>zp$EPl zldH9~n>30l%R29Og<7KDud8aQ6Y{#v;wUf7eo=q7TSiZHo6J(KlXgmMVlBmg^}F(< zfkX&e5Sg;Mye?UfGUd$CjH-2MRCJ)$Sh&sibbJ1*5AwEd@Bny$|L4|&t$Y6YFZ{hd z|NTPeze^ndK0luUa{wk2@%@aR18@C_4}z)i)npcw$!y>?O|@FeWvM02zz6#96P-+b zXV!n?6J}chQ7ChmlI8k9+||dHwvI%p3m)Rx+Ulif@?G<1eH|*t+QZpR{g{@74IX$E z6=W!V9otb5ae;R#Pdj)?a(ndMaP;jyG*rs{$-_AwcY$CYz5IlI^)vN@ z&H<7?88J zw2ey|L^BBRr%&7Bn+J(z!7@aSj4tu?q7uJj%!Z4q3x9e$;(x){-Y&-JM0095qpJsQzaUxhjNX|L6qoMz={lG>Bh zDQIKcfEF2S2gi?o;j0W?Asqc}t^UzhjkQ;8&3Xx$W;SHBQ>ByeCn{$LJ(mdz%%g&w z#z?$#)FSo>Hy^ShD``g_jkA9%i(@S};qx1A>(!TN?u#v1^kcZ7W*<=upbI~I= zLSHdEh4S4<`TXfAy@88UisTwOobX_^->JIPVN;K7#X|ph4{(DF?=C9f6KeF{hvgh7&vTI zH78Des7mEY90YH3wG-PdEhOUldJ$6QTQJwPtjFWh^;z&%cr)>`jbP0%$%yH$QSaZ; zy9raG_ze(EUTIRwEAse~_i~WnBDC}Z_8eGi(*+{L11K1Are^~9alYrkftfrbRQx^c zc9!kkC_F&2cWIR-9%-wN?R?-$>$zi$j(m&vy<0c(o$HQ#Yr)VLc5jBRm$)9kUDb<+ zzQtmPsW5Ds21yyfSZTcl#HXrQ%9Hef`|@}0BdQrph+H6;nzV01XG-E!-=UPAVwpZ4 z2`D8*VA3S)RRqUC+#5ZGRDa6-eEcqs-s32|x-^R}Vx=2C^MqE1y8YeN`dy?gj|WKkvER$Q9Y`HOqj zCsR@!OP8v@B4PrZM1t1&!i;$jVGO#yfoDD-G7P+Fce<)7dBmbe*vH5cLFeP0Uw%g5;~8(pY8J9@u~vTOWxm^z6IITLYNXTOQy zT&`AHZ&ViE2(^oZ6KK+-sF~9)DD`494C5>tA70-W5%z0>g!ZoN(ea zTH1k$1#Lhf?6o-K#Cu83Lxm!NMrK2A>WJKRf`cYexuNKgIxtK_tT@hag^Md4^PDYc zkIW?tKp^U=+Gj-0#$VE(+~1SM$=OEt_+-0m+1vH2cI4iEwU+WDfXHnp$f~t`Yb;nv zk7e89OTuXN(lptDmD+x?2bG@ew-ZSQhPyu5b3_}|FcIq4MaEb&cf|3pImzo!n$aRy zcw4J-T83(BS(OASc~u{H%%yH!k?y2bCT`JF_jS)Inb#r$4{bHh)UhuC zLZ(VWSU+gw$v)J@Di(*@-;S+wWT!eCfw65re8S)dk7@;aiTVN4Rt_gM- zjZ(uZ7eJjMTCyTE?Io(Zjo5!?Qhzid;glnx&GRd(C91kGKoWm}f4@S6IC*eb)Oi5c+=YuZ6Ink7kEbvyHO6^mr}ETXSHY%Fy&-F_Ny#5xFKYkE z|Hkl7lE5_m<7Wg-KcP5O?c;oeC)b_7%~6^e_s@5S2~M!8ulP=^Kj;ElYI9kGiEs>Q z$y|F^ff5qP1^-fJ9S6nDKPME|G-SB6%TSq(MomAN5~NkcCySsdafga_g{7M1vbB6)9Wr3N0c1hFF zR4N8taSnq^#ZxiRgqrf#c%fsn9Jsydg9;}s0Qr$RvPa1y{V@Z=TMK*CRS(G@(v)8YfLGVhhn4nWPL*q(a^NuS@utZ(U zV_%qt3#WHCY6bu!-&>~tc-}}01IldO8bHDB+Ub)x(NR!Vm^qX@T@)2kxvS;r)_??9 zDk@a5Rb@|^GnJJ-=T#q`z;@X*&8rWnK3d>X_&bPN;CM^KHzH65nW6LpR<4V7-6nJN z4ytRC9!8o}^3OQ3>>KQ{h17uJZW5Y?W^#p(MywsF;y#Yshm~b#&Q|e>{M*&Q2I;>UHH^Hve`D zYG5&Y+t`V2#GIYDwC*GWia>976CU)T-9ieC$eX5!sM$U4Q?=|yP)a{vZis3=?cyeT zKJ(N0cDr5PN7$;5G?`dj+ajW^d1DQwb<>2I+I-#e1aCoib+~n*H-~p-?q!9FnIwjI zJKaf4)j$Yd;^E~d-OUB9zyWo`F1P49xXYQxkdzKXM6WEyUm1 zvf^r!Ui0X=a%sX(is?j!=vi+JCp^YpZJ>{*Lcg%*1xGsLrkcIrgrydr>1ft>@-eH? zAr#vlSPQ!Jcko)|9A&0;qYWr!Xf^^tf$o@kLt$go;6?|xmgl6Rov*?4^*v1kbxL^f zd+>n#g_G#U($0+SR-VDjjNr$|02FtBMI!AQ2iQpR@00oQ@rZ6JAy%R)q^@IJ^xi+3 zrg#FBjUKnf-!~GLUt3k(p<`UFBVAUim|XX`3!kqr8^vzd=H8ZLRxPvq0FFXo019;q z_K3?1{=m^USmsvQXSgQfN4>ZSS8w1%6u=_2Q-a-1sS3_1kDrz}Ji?Lbi6cil%+~CW zwDBN_4=qf0NxbB@O3fEjqubUx@*AE`nS6-O47b@n3lwZjqewR8bDm1^!bznljW^`M zC54%(Yf*3!6j^%?y${f+e3UxTqO(VrqhYK5H+_vyLErVQhu-m&OM{*thl~8xeOl@W z55qrE=qHbYqMlJ4jEIZWQj5VlP2E^~^&^9I^mQ<@8c!?UXoJfYuU6xG#j8Y}u$&6X zvIEf1_@1v7;F0b8+LUOU<4`}T?a;|lt@|@(y-07y%}*$2K)=J?CHjjVfpO z0_p6hNrm}qhv6RSmmBNd-9*vSbhHX?dEIFXcdZ3PQX{{9aSpL{o@}cTG%s#>kZ|Ul zMg1btMy7dm=S?l|Rxv@JE^}kD^U&)VD?fz$5Nqr=vOSgfPDO zDEod3Z{BZU9tq|8v>~1LpyR&2xR1Gk=_mw~Kg2r!_1Dgm;_J;M00K)xI6@~YygC-H%67x1>5ZZRsV7?@AP-ZP|kXCPti=>oJ4iqS9DQB{f420^-OE02@q6(g^YjigC2^82{9V)Qv9Tc{WXXiYw+ zw`CP0Z%9VM+pczfFjkt+RTQJ-`aniCkUCga)PHWKcdh^E#FA#c1*%YyA@}waqR0~4 zSxz6?4UR8b0dI~j>05Q^rqv;d<|X_4Oohnpt5zlA)UjA4iVBG@tQ+n6M@9|0(XK*5 z=*qi-a?~*BWwfMS&wGSvNg|>qdBgM6B=<;>`|FCBB4S*A*`8CeK}F4Pi@rK%T%=m< zI({^gyVZh|ET>xSdKVizVH4L@t#*AG#x%H{>#0^Dn`~CC{s#m^^WuNR(-VGXERY59 zzwT{pJaFTGY;9~k*u0JZ@h`~#8j0@(98UZ=8BZ`A!(?8GU07BW+(XhzEca_Xp{pMe z4AVu0NnTBAS3ltX0F$;3C~0dFPj=n(3iGzYPEOs(TBcOST9P?kO(Bb}bt+Q#5f85= zi%KV9E5<8!-*BRA=~sS^*R_$9I;y73x2o}Fvg2J3X%V-zI;5GNPVq>IP=tZ-S#c-* zDyaLV;u91Bfh%_XIL_!X`Q^epn}^xwc3&n z>Wr`P)M%rG34>A(*ESgLipW+j*5azwa@_bG$W=yN4H!SHFI7HW7*AHZ7j&X|;B{W# zAd{uZ_zN4%NLH6fj$%dNnw@w_ip^}BbGO@|ueb4Ewf=WB8NyzsnEl7i_JK;?HxC}v z>3{g~;=kR$|IIgl5f5(lzkkvEg8-Oy2l?R3#Q)uV;K%=cxOx9p|NkPNr)fFMhokIL z2Ksw-_dj>`*Ha1zxP6Hc0Mh~km&>Q*W938%yjpL~^4TcczI;te!|N<9LDLGi5j>$i zHKTV4VrTRr8niPQhtM${#2yp6Jb z`R#+dcQ-d4Uaen1zuQ|In_KI6WoG+wIv;>?djhXdictT-WRn`Onfe_uyi1dSbqI&2R9Gc5!F^Ar6iYh7U_8naN96sRc;>)FJ{k?rij zu+Ot{n1kw&7w{~VV@;?L=zL~+`aV6y^xOv^+4D*HA(uFs4DikymgnP1kgK# zA6|O~n$Nr}0jn37I10C4oo7G4diHjI?c2Z0ZU|Kx*3ED4-@UuF`FHt5A&Ig-9!xQR zT2|7NDl2pO5lxS+~$A(BA@qq&MD>KyUoq%Y(2d{O{ZCTc&WJb zRqOq{%BJb?BdqGnyh;bzXq2t%FR)Q}Yh?4|GW`f_{%^o%bMly;kB-D3BeqNSbyZcs zTSv~j)1%`mo6V>6_PiXCz$t-$VMx;p48@?VjI@8oz%uFVpJOhadm<1UL7jO1L4Hq49D0F(V)i_~`)nc{&8}DS0#o z%M+t~fX?SACx)EgQ?6z;xXcq8=7w)qh!-<7e4#ob0~}ALDhzL>n#Afs zMgiwDrtwmU=Zr!_w_?#Pf;Lhr za!bG&-gl!l2$KeUu>ckCvXVe!Td?IH<&zI(Iz73N%n~oYpI*67E>S(e>IjNupts7@ zpI!f(vQ2vbnTksP`I#V^^}l-?Tlana@0*8P4{r6pf5G|h_t`L~!-54CoX%Dfocl(p z$DI1c9&&}sB!vJ@rGR-~pfLs6=Yi7iKx(PZo?faiZBD`6STF6XbrhT};>l4h0hFXH zJ@T}YE*qeQX)iyXA>^QtUZ8;1!W1)K^rClL+k2?c?PCgux35Im+wbZ$NKUO+I!_JN zeEy59X|vb3wO=2p`=n00Q(fxS-k}^xPF0mQNv0hC;l(dMJ;xJEjOnLZ!6@6PUvr%o z#;H37X?0Xircyx!1uz|exTmBb>VgQi-mjgNq?8yXWN9xIb;`OK+PzbOc(2H(xSDo`o)-~5>b-AKEytOaADbPVsXDpMH z!}5PQ_x3Fj+Cyj49RkDiP4R%>NR!&JjEHnR#X@vE`_-<0uTn4ChUC3$&QkKKzncSybF~3(jDy5lj$}ep zJ|=CyN!Tn;E6NDiF^;&M{q=*yUi+Tnp3;!Bz8Xvg2hMuN%BcnP`;KrAS*W z`a^Y?qt@vDcKgYG`!ETd<*9#XE{rC?{BnXyYk^m@LRMnKM^c1I%`!V0e&!8s{BF<> z2=HQjt>ML&G9-x`26G?Qh8$0c@f9M3w5FZdQOdvEgGv^gZ6Gg8S&mma=~t3Y)R0$L zW9^v3c`rsa962Vs!OKMf5%y|2aJcu&J(t;9IinGrwpifCePN<`y-e))J;#lTGGuFf zs;NmCx>FMllJHk57O#z?)6nddhKZZPb zX~ZYaLAbNPL}R#5hj+;Mp!l!6BNzr(c%}?(Lpta4(ySfN3o*+;mMgi=B>0`h5?v=1 zqZT@6)jCQ(6f)#PcShDiTA2Jz8Iwr-E{IQly+aoTT+Y(rct6i&v04i1)L}OhO zI+D5s>4yWP@k#Xnq?L{)OALQooE*WJzf*-u+|nbIwYt2T4~M9DjngR$@|4!=6`=W> zmx~NTHwPi2rm!P)*X6Wgw>>fyR~mb!hCn!U%4~czpH|f5Qw%>Pr*Dlk7oLhykx!2T zf-Ix@d3eMgeb?ir`iYoKv1dbF8iEwjc6O2j&z3bL<#yEkcyLe4OfrpTmDMQCOFn6d zl~;+@do4t1MZ$TTO`#&LZcB80{o%};OQb=qr`BrB;Y%xN&`b1yEnZi-{w&=(plqgW zBQ2moqfghg0Hi^ge*E}Y7!C!<`%VNa8o#ZrBlSb$rF^F)VyRK6$2A|fYlQ7DU7+>1F$#+oh_s@SPV#wNVr-;OlGQ1XDgBB5%q-cV-({2)GB8c$N{@; zu(^3EEy}*aDlzr5270$!I|qEp`B1#}yZ1$@?H_59LAgf$@FdqdN=Uv2)ov9@i!pOV zFi57Ec{igw`)lh5Yj#gcDD=65VZCd?idl;;8KW##iaudF?~LUfr#u^N`LN*q}4oetgvEI!rwj}{%W%DZ{RxjOjsrcpZxE|?wKe{+HjPX^t#$L zm^hY-I@wB&Q7X;!$;A{0KkLs<(C0+mi8XQfBMFO$C$?%C7iXz{rXh#%#Bvqc3R7ge zk;WBsc0#Xk9jT8cXo~DBhW}3Tz3j-y1?c#0t+vdNT*CsLchk;;KaGz!a4;`x>}=qw1WB zgykqi?_k`0(FQ<)yaCkqx^x{P7ae05;)RErVK_NBK@*#@>#6-FE5|wW z+)#&DHDgpYOl;$APnskR5D0J14V))7AG?P6 zUJOm(|MtEknM|YlT*tdGsGFd$w5%^VE!O6vp2cweWwd?ea%gN>xRO}9U zFC$*v{YKk-KiFsoIcug};cXB%)(MVBQH85|M&1DYR;+?pUk+uwe#r zx7Jo~s?}t3oee5~!&J%b6@t>%k%eX8=lDj@U(&P%G$)7gZQxoE0BH#&n{YLteOg=Z z=LQuERi&+??UgodJk}LiNZU)8NCy!UsqCLMC-;4n=e3j(`sNB5on`oNlh$O^$(^0=T2Gut#V#E@y=FM?+HIPQXM$s zck^M|qY#rEMeoTpYG4hr2@sEHQC6{uko6UhWsB5|M`fcYrBtz3%v$ELaQze3IxM3+ zE&3|+fvGn+;>$`2l;hl14bH{5VTV(l}dQs;Q3hUtj#P|obWkoDaT*xWVZzYNE2bbWZ0gJ| zDap)>$*$pTN2O9Z3~^{vou1QVX=d3Ea8tGb({->1tJ{<7F`?$P)%NW}zOHQpn@p`c zgB+3)d(*|zZn~PZn|@ho_a~ERfshK&b><_=BUqR@pH^F;A@x#c2T`+rt^}I3HA*h| z{1*Xh!f{xPEDrO$rf0)2`m=N>&bQlVPv4y%w(%e|JAc7>fL5bb3?FybBn^PB(DIO^ zc)OzxOdUTS#T}tWKf)ocqmnXfHk$M{6(5&1W$t3!!s^Boza3nyvXGn#c@}fEw)IUQ z#_2+1isEW8IjA--=7ebSq2-B8>&Y|4u9P(fFSJmIBbzH1F!UJe`P*K(QlU>d3q*#) zQEwSu+|es}ec+m&IEP1DP>m(%&`Si@ImwSQ*ldlfb(qgS`dGqhBNmXIpQQ6j9UQ42 ziOZRNv4XRSRB)&U#wNR(O{VyGII6MS8re$LKuc*fx5G&}8IAC|){q%8?g)76UwZ)=X|D&#F1h)R@yp~kI??Ut_H+BW{oH`(t From 4494193d4f12448d59017824dc2346c54cd6c70e Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 16 Jun 2019 21:01:54 +0100 Subject: [PATCH 05/23] Removed old patch for Isabelle 2017. --- patches/thy_output.2017.ML | 686 ------------------------------------- 1 file changed, 686 deletions(-) delete mode 100644 patches/thy_output.2017.ML diff --git a/patches/thy_output.2017.ML b/patches/thy_output.2017.ML deleted file mode 100644 index a503c19..0000000 --- a/patches/thy_output.2017.ML +++ /dev/null @@ -1,686 +0,0 @@ -(* Title: Pure/Thy/thy_output.ML - Author: Markus Wenzel, TU Muenchen - -Theory document output with antiquotations. -*) - -signature THY_OUTPUT = -sig - val display: bool Config.T - val quotes: bool Config.T - val margin: int Config.T - val indent: int Config.T - val source: bool Config.T - val break: bool Config.T - val modes: string Config.T - val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context - val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory - val check_command: Proof.context -> xstring * Position.T -> string - val check_option: Proof.context -> xstring * Position.T -> string - val print_antiquotations: bool -> Proof.context -> unit - val antiquotation: binding -> 'a context_parser -> - ({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) -> - theory -> theory - val boolean: string -> bool - val integer: string -> int - val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string - val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string - val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T - val set_meta_args_parser : (theory -> string parser) -> unit - val pretty_text: Proof.context -> string -> Pretty.T - val pretty_term: Proof.context -> term -> Pretty.T - val pretty_thm: Proof.context -> thm -> Pretty.T - val str_of_source: Token.src -> string - val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context -> - Token.src -> 'a list -> Pretty.T list - val string_of_margin: Proof.context -> Pretty.T -> string - val output: Proof.context -> Pretty.T list -> string - val verbatim_text: Proof.context -> string -> string - val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source -> - Toplevel.transition -> Toplevel.transition -end; - -structure Thy_Output: THY_OUTPUT = -struct - -(** options **) - -val display = Attrib.setup_option_bool ("thy_output_display", \<^here>); -val break = Attrib.setup_option_bool ("thy_output_break", \<^here>); -val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>); -val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>); -val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>); -val source = Attrib.setup_option_bool ("thy_output_source", \<^here>); -val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>); - - -structure Wrappers = Proof_Data -( - type T = ((unit -> string) -> unit -> string) list; - fun init _ = []; -); - -fun add_wrapper wrapper = Wrappers.map (cons wrapper); - -val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f); - - - -(** maintain global antiquotations **) - -structure Antiquotations = Theory_Data -( - type T = - (Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table * - (string -> Proof.context -> Proof.context) Name_Space.table; - val empty : T = - (Name_Space.empty_table Markup.document_antiquotationN, - Name_Space.empty_table Markup.document_antiquotation_optionN); - val extend = I; - fun merge ((commands1, options1), (commands2, options2)) : T = - (Name_Space.merge_tables (commands1, commands2), - Name_Space.merge_tables (options1, options2)); -); - -val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; - -fun add_command name cmd thy = thy - |> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd)); - -fun add_option name opt thy = thy - |> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd)); - -fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt)); - -fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)); - -fun command src state ctxt = - let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src - in f src' state ctxt end; - -fun option ((xname, pos), s) ctxt = - let - val (_, opt) = - Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos); - in opt s ctxt end; - -fun print_antiquotations verbose ctxt = - let - val (commands, options) = get_antiquotations ctxt; - val command_names = map #1 (Name_Space.markup_table verbose ctxt commands); - val option_names = map #1 (Name_Space.markup_table verbose ctxt options); - in - [Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names), - Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)] - end |> Pretty.writeln_chunks; - -fun antiquotation name scan body = - add_command name - (fn src => fn state => fn ctxt => - let val (x, ctxt') = Token.syntax scan src ctxt - in body {source = src, state = state, context = ctxt'} x end); - - - -(** syntax of antiquotations **) - -(* option values *) - -fun boolean "" = true - | boolean "true" = true - | boolean "false" = false - | boolean s = error ("Bad boolean value: " ^ quote s); - -fun integer s = - let - fun int ss = - (case Library.read_int ss of (i, []) => i - | _ => error ("Bad integer value: " ^ quote s)); - in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end; - - -(* outer syntax *) - -local - -val property = - Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; - -val properties = - Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) []; - -in - -val antiq = - Parse.!!! - (Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof) - >> (fn ((name, props), args) => (props, name :: args)); - -end; - - -(* eval antiquote *) - -local - -fun eval_antiq state (opts, src) = - let - val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); - val print_ctxt = Context_Position.set_visible false preview_ctxt; - - fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); - val _ = cmd preview_ctxt; - val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN]; - in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end; - -in - -fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss - | eval_antiquote state (Antiquote.Control {name, body, ...}) = - eval_antiq state - ([], Token.make_src name (if null body then [] else [Token.read_cartouche body])) - | eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) = - let - val keywords = - (case try Toplevel.presentation_context_of state of - SOME ctxt => Thy_Header.get_keywords' ctxt - | NONE => - error ("Unknown context -- cannot expand document antiquotations" ^ - Position.here pos)); - in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end; - -end; - - -(* output text *) - -fun output_text state {markdown} source = - let - val is_reported = - (case try Toplevel.context_of state of - SOME ctxt => Context_Position.is_visible ctxt - | NONE => true); - - val pos = Input.pos_of source; - val syms = Input.source_explode source; - - val _ = - if is_reported then - Position.report pos (Markup.language_document (Input.is_delimited source)) - else (); - - val output_antiquotes = map (eval_antiquote state) #> implode; - - fun output_line line = - (if Markdown.line_is_item line then "\\item " else "") ^ - output_antiquotes (Markdown.line_content line); - - fun output_blocks blocks = space_implode "\n\n" (map output_block blocks) - and output_block (Markdown.Par lines) = cat_lines (map output_line lines) - | output_block (Markdown.List {kind, body, ...}) = - Latex.environment (Markdown.print_kind kind) (output_blocks body); - in - if Toplevel.is_skipped_proof state then "" - else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms - then - let - val ants = Antiquote.parse pos syms; - val reports = Antiquote.antiq_reports ants; - val blocks = Markdown.read_antiquotes ants; - val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else (); - in output_blocks blocks end - else - let - val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms); - val reports = Antiquote.antiq_reports ants; - val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else (); - in output_antiquotes ants end - end; - - - -(** present theory source **) - -(*NB: arranging white space around command spans is a black art*) - -(* presentation tokens *) - -datatype token = - No_Token - | Basic_Token of Token.T - | Markup_Token of string * string * Input.source - | Markup_Env_Token of string * string * Input.source - | Raw_Token of Input.source; - -fun basic_token pred (Basic_Token tok) = pred tok - | basic_token _ _ = false; - -val improper_token = basic_token Token.is_improper; -val comment_token = basic_token Token.is_comment; -val blank_token = basic_token Token.is_blank; -val newline_token = basic_token Token.is_newline; - - -(* output token *) - -fun output_token state tok = - (case tok of - No_Token => "" - | Basic_Token tok => Latex.output_token tok - | Markup_Token (cmd, meta_args, source) => - "%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n" - | Markup_Env_Token (cmd, meta_args, source) => - Latex.environment ("isamarkup" ^ cmd) - (meta_args ^ output_text state {markdown = true} source) - | Raw_Token source => - "%\n" ^ output_text state {markdown = true} source ^ "\n"); - - -(* command spans *) - -type command = string * Position.T * string list; (*name, position, tags*) -type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*) - -datatype span = Span of command * (source * source * source * source) * bool; - -fun make_span cmd src = - let - fun take_newline (tok :: toks) = - if newline_token (fst tok) then ([tok], toks, true) - else ([], tok :: toks, false) - | take_newline [] = ([], [], false); - val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = - src - |> take_prefix (improper_token o fst) - ||>> take_suffix (improper_token o fst) - ||>> take_prefix (comment_token o fst) - ||> take_newline; - in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; - - -(* present spans *) - -local - -fun err_bad_nesting pos = - error ("Bad nesting of commands in presentation" ^ pos); - -fun edge which f (x: string option, y) = - if x = y then I - else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt)); - -val begin_tag = edge #2 Latex.begin_tag; -val end_tag = edge #1 Latex.end_tag; -fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; -fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; - -in - -fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) = - let - val present = fold (fn (tok, (flag, 0)) => - Buffer.add (output_token state' tok) - #> Buffer.add flag - | _ => I); - - val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span; - - val (tag, tags) = tag_stack; - val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); - - val nesting = Toplevel.level state' - Toplevel.level state; - - val active_tag' = - if is_some tag' then tag' - else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE - else - (case Keyword.command_tags keywords cmd_name of - default_tag :: _ => SOME default_tag - | [] => - if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state - then active_tag - else NONE); - - val edge = (active_tag, active_tag'); - - val newline' = - if is_none active_tag' then span_newline else newline; - - val tag_stack' = - if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack - else if nesting >= 0 then (tag', replicate nesting tag @ tags) - else - (case drop (~ nesting - 1) tags of - tg :: tgs => (tg, tgs) - | [] => err_bad_nesting (Position.here cmd_pos)); - - val buffer' = - buffer - |> end_tag edge - |> close_delim (fst present_cont) edge - |> snd present_cont - |> open_delim (present (#1 srcs)) edge - |> begin_tag edge - |> present (#2 srcs); - val present_cont' = - if newline then (present (#3 srcs), present (#4 srcs)) - else (I, present (#3 srcs) #> present (#4 srcs)); - in (tag_stack', active_tag', newline', buffer', present_cont') end; - -fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) = - if not (null tags) then err_bad_nesting " at end of theory" - else - buffer - |> end_tag (active_tag, NONE) - |> close_delim (fst present_cont) (active_tag, NONE) - |> snd present_cont; - -end; - - -(* present_thy *) - -local - -val markup_true = "\\isamarkuptrue%\n"; -val markup_false = "\\isamarkupfalse%\n"; - -val space_proper = - Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper; - -val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore); -val improper = Scan.many is_improper; -val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper)); -val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank)); - -val opt_newline = Scan.option (Scan.one Token.is_newline); - -val ignore = - Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore - >> pair (d + 1)) || - Scan.depend (fn d => Scan.one Token.is_end_ignore --| - (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) - >> pair (d - 1)); - -val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end); - -val locale = - Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- - Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")"))); - -val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser) - - -in - -fun present_thy thy command_results toks = - let - val keywords = Thy_Header.get_keywords thy; - - (* tokens *) - - val ignored = Scan.state --| ignore - >> (fn d => (NONE, (No_Token, ("", d)))); - - fun markup pred mk flag = Scan.peek (fn d => - improper |-- - Parse.position (Scan.one (fn tok => Token.is_command tok andalso - pred keywords (Token.content_of tok))) -- - Scan.repeat tag -- - (improper |-- - (Parse.!!!! - ( (!meta_args_parser_hook thy) - -- ( (improper -- locale -- improper) - |-- (Parse.document_source)) - --| improper_end))) - >> (fn (((tok, pos'), tags), (meta_args,source)) => - let val name = Token.content_of tok - in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end)); - - val command = Scan.peek (fn d => - Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] -- - Scan.one Token.is_command -- Scan.repeat tag - >> (fn ((cmd_mod, cmd), tags) => - map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @ - [(SOME (Token.content_of cmd, Token.pos_of cmd, tags), - (Basic_Token cmd, (markup_false, d)))])); - - val cmt = Scan.peek (fn d => - (Parse.$$$ "--" || Parse.$$$ Symbol.comment) |-- - Parse.!!!! (improper |-- Parse.document_source) >> - (fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d))))); - - val other = Scan.peek (fn d => - Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); - - val tokens = - (ignored || - markup Keyword.is_document_heading Markup_Token markup_true || - markup Keyword.is_document_body Markup_Env_Token markup_true || - markup Keyword.is_document_raw (Raw_Token o #3) "") >> single || - command || - (cmt || other) >> single; - - - (* spans *) - - val is_eof = fn (_, (Basic_Token x, _)) => Token.is_eof x | _ => false; - val stopper = Scan.stopper (K (NONE, (Basic_Token Token.eof, ("", 0)))) is_eof; - - val cmd = Scan.one (is_some o fst); - val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; - - val comments = Scan.many (comment_token o fst o snd); - val blank = Scan.one (blank_token o fst o snd); - val newline = Scan.one (newline_token o fst o snd); - val before_cmd = - Scan.option (newline -- comments) -- - Scan.option (newline -- comments) -- - Scan.option (blank -- comments) -- cmd; - - val span = - Scan.repeat non_cmd -- cmd -- - Scan.repeat (Scan.unless before_cmd non_cmd) -- - Scan.option (newline >> (single o snd)) - >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => - make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); - - val spans = toks - |> take_suffix Token.is_space |> #1 - |> Source.of_list - |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) - |> Source.source stopper (Scan.error (Scan.bulk span)) - |> Source.exhaust; - - (* present commands *) - - fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span keywords span st st'); - - fun present _ [] = I - | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; - in - if length command_results = length spans then - ((NONE, []), NONE, true, Buffer.empty, (I, I)) - |> present Toplevel.toplevel (command_results ~~ spans) - |> present_trailer - else error "Messed-up outer syntax for presentation" - end; - -fun set_meta_args_parser f = (meta_args_parser_hook:= f) - -end; - - - -(** setup default output **) - -(* options *) - -val _ = Theory.setup - (add_option @{binding show_types} (Config.put show_types o boolean) #> - add_option @{binding show_sorts} (Config.put show_sorts o boolean) #> - add_option @{binding show_structs} (Config.put show_structs o boolean) #> - add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #> - add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #> - add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #> - add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #> - add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #> - add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #> - add_option @{binding display} (Config.put display o boolean) #> - add_option @{binding break} (Config.put break o boolean) #> - add_option @{binding quotes} (Config.put quotes o boolean) #> - add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #> - add_option @{binding margin} (Config.put margin o integer) #> - add_option @{binding indent} (Config.put indent o integer) #> - add_option @{binding source} (Config.put source o boolean) #> - add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer)); - - -(* basic pretty printing *) - -fun perhaps_trim ctxt = - not (Config.get ctxt display) ? Symbol.trim_blanks; - -fun pretty_text ctxt = - Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines; - -fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t; - -fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; - -fun pretty_term_style ctxt (style, t) = - pretty_term ctxt (style t); - -fun pretty_thm_style ctxt (style, th) = - pretty_term ctxt (style (Thm.full_prop_of th)); - -fun pretty_term_typ ctxt (style, t) = - let val t' = style t - in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; - -fun pretty_term_typeof ctxt (style, t) = - Syntax.pretty_typ ctxt (Term.fastype_of (style t)); - -fun pretty_const ctxt c = - let - val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) - handle TYPE (msg, _, _) => error msg; - val ([t'], _) = Variable.import_terms true [t] ctxt; - in pretty_term ctxt t' end; - -fun pretty_abbrev ctxt s = - let - val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; - fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); - val (head, args) = Term.strip_comb t; - val (c, T) = Term.dest_Const head handle TERM _ => err (); - val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c - handle TYPE _ => err (); - val t' = Term.betapplys (Envir.expand_atom T (U, u), args); - val eq = Logic.mk_equals (t, t'); - val ctxt' = Variable.auto_fixes eq ctxt; - in Proof_Context.pretty_term_abbrev ctxt' eq end; - -fun pretty_locale ctxt (name, pos) = - let - val thy = Proof_Context.theory_of ctxt - in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end; - -fun pretty_class ctxt = - Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt; - -fun pretty_type ctxt s = - let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s - in Pretty.str (Proof_Context.extern_type ctxt name) end; - -fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full; - -fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name); - - -(* default output *) - -val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src; - -fun maybe_pretty_source pretty ctxt src xs = - map (pretty ctxt) xs (*always pretty in order to exhibit errors!*) - |> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I); - -fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin); - -fun output ctxt prts = - prts - |> Config.get ctxt quotes ? map Pretty.quote - |> (if Config.get ctxt display then - map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output) - #> space_implode "\\isasep\\isanewline%\n" - #> Latex.environment "isabelle" - else - map - ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of) - #> Output.output) - #> space_implode "\\isasep\\isanewline%\n" - #> enclose "\\isa{" "}"); - - -(* verbatim text *) - -fun verbatim_text ctxt = - if Config.get ctxt display then - split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #> - Latex.output_ascii #> Latex.environment "isabellett" - else - split_lines #> - map (Latex.output_ascii #> enclose "\\isatt{" "}") #> - space_implode "\\isasep\\isanewline%\n"; - - -(* antiquotations for basic entities *) - -local - -fun basic_entities name scan pretty = - antiquotation name scan (fn {source, context = ctxt, ...} => - output ctxt o maybe_pretty_source pretty ctxt source); - -fun basic_entities_style name scan pretty = - antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) => - output ctxt - (maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs)); - -fun basic_entity name scan = basic_entities name (scan >> single); - -in - -val _ = Theory.setup - (basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #> - basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #> - basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #> - basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #> - basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #> - basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> - basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #> - basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #> - basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #> - basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #> - basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #> - basic_entities @{binding prf} Attrib.thms (pretty_prf false) #> - basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #> - basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory); - -end; - - -(** document command **) - - -fun document_command markdown (loc, txt) = - Toplevel.keep (fn state => - (case loc of - NONE => ignore (output_text state markdown txt) - | SOME (_, pos) => - error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o - Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); - -end; From bbfe6b1a2cc90c8d7bf9381ce9a492ab587bde01 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 16 Jun 2019 21:02:36 +0100 Subject: [PATCH 06/23] Migration from 2017 to 2018. --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 8aaf03f..b9e6f6e 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,9 @@ well as formal development. ## Prerequisites -Isabelle/DOF requires [Isabelle 2017](http://isabelle.in.tum.de/website-Isabelle2017/). -Please download the Isabelle 2017 distribution for your operating -system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2017/). +Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/). +Please download the Isabelle 2018 distribution for your operating +system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/). ## Installation @@ -22,7 +22,7 @@ If a specific Isabelle version should be used (i.e., not the default one), the full path to the ``isabelle`` command needs to be passed as using the ``-i`` command line argument of the ``install`` script: ```console -foo@bar:~$ ./install -i /usr/local/Isabelle2017/bin/isabelle +foo@bar:~$ ./install -i /usr/local/Isabelle2018/bin/isabelle ``` For further command line options of the installer, please use the @@ -36,22 +36,22 @@ foo@bar:~$ ./install -h The installer will * apply a patch to Isabelle that is necessary to use Isabelle/DOF. If this patch installations fails, you need to manually replace - the file ``Isabelle2017/src/Pure/Thy/thy_output.ML`` in the Isabelle + the file ``Isabelle2018/src/Pure/Thy/thy_output.ML`` in the Isabelle distribution with the file ``patches/thy_output.ML`` from the Isabelle/DOF distribution: ```console cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/ ``` * install required entries from the [AFP](https://www.isa-afp.org). If this - installations fails, you need to manually install the AFP for Isabelle 2017 as follows: - Download the [AFP for Isabelle 2017](https://sourceforge.net/projects/afp/files/afp-Isabelle2017/afp-2018-08-14.tar.gz") + installations fails, you need to manually install the AFP for Isabelle 2018 as follows: + Download the [AFP for Isabelle 2018](https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz) and follow the [instructions for installing the AFP as Isabelle component](https://www.isa-afp.org/using.html). If you have extracted the AFP archive into the directory to `/home/myself/afp`, you should run the following command to make the AFP session `ROOTS` available to Isabelle: ```console - echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2017/ROOTS + echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2018/ROOTS ``` * install the Isabelle/DOF-plugin into the Isabelle user directory (the exact location depends on the Isabelle version). From f971a8e018f247c92b1d5362ac6456f3ba1001d8 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 16 Jun 2019 21:08:36 +0100 Subject: [PATCH 07/23] Cleanup. --- Functional-Automata/AutoMaxChop.thy | 52 -- Functional-Automata/AutoProj.thy | 29 - Functional-Automata/AutoRegExp.thy | 17 - Functional-Automata/Automata.thy | 55 -- Functional-Automata/DA.thy | 38 -- Functional-Automata/Execute.thy | 20 - Functional-Automata/Functional_Automata.thy | 5 - Functional-Automata/MaxChop.thy | 120 ---- Functional-Automata/MaxPrefix.thy | 92 --- Functional-Automata/NA.thy | 50 -- Functional-Automata/NAe.thy | 72 --- Functional-Automata/ROOT | 11 - Functional-Automata/RegExp2NA.thy | 442 -------------- Functional-Automata/RegExp2NAe.thy | 641 -------------------- Functional-Automata/RegSet_of_nat_DA.thy | 233 ------- Functional-Automata/document/root.bib | 6 - Functional-Automata/document/root.tex | 54 -- 17 files changed, 1937 deletions(-) delete mode 100644 Functional-Automata/AutoMaxChop.thy delete mode 100644 Functional-Automata/AutoProj.thy delete mode 100644 Functional-Automata/AutoRegExp.thy delete mode 100644 Functional-Automata/Automata.thy delete mode 100644 Functional-Automata/DA.thy delete mode 100644 Functional-Automata/Execute.thy delete mode 100644 Functional-Automata/Functional_Automata.thy delete mode 100644 Functional-Automata/MaxChop.thy delete mode 100644 Functional-Automata/MaxPrefix.thy delete mode 100644 Functional-Automata/NA.thy delete mode 100644 Functional-Automata/NAe.thy delete mode 100644 Functional-Automata/ROOT delete mode 100644 Functional-Automata/RegExp2NA.thy delete mode 100644 Functional-Automata/RegExp2NAe.thy delete mode 100644 Functional-Automata/RegSet_of_nat_DA.thy delete mode 100644 Functional-Automata/document/root.bib delete mode 100644 Functional-Automata/document/root.tex diff --git a/Functional-Automata/AutoMaxChop.thy b/Functional-Automata/AutoMaxChop.thy deleted file mode 100644 index fc20fcf..0000000 --- a/Functional-Automata/AutoMaxChop.thy +++ /dev/null @@ -1,52 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Automata based scanner" - -theory AutoMaxChop -imports DA MaxChop -begin - -primrec auto_split :: "('a,'s)da \ 's \ 'a list * 'a list \ 'a list \ 'a splitter" where -"auto_split A q res ps [] = (if fin A q then (ps,[]) else res)" | -"auto_split A q res ps (x#xs) = - auto_split A (next A x q) (if fin A q then (ps,x#xs) else res) (ps@[x]) xs" - -definition - auto_chop :: "('a,'s)da \ 'a chopper" where -"auto_chop A = chop (\xs. auto_split A (start A) ([],xs) [] xs)" - - -lemma delta_snoc: "delta A (xs@[y]) q = next A y (delta A xs q)" -by simp - -lemma auto_split_lemma: - "\q ps res. auto_split A (delta A ps q) res ps xs = - maxsplit (\ys. fin A (delta A ys q)) res ps xs" -apply (induct xs) - apply simp -apply (simp add: delta_snoc[symmetric] del: delta_append) -done - -lemma auto_split_is_maxsplit: - "auto_split A (start A) res [] xs = maxsplit (accepts A) res [] xs" -apply (unfold accepts_def) -apply (subst delta_Nil[where ?s = "start A", symmetric]) -apply (subst auto_split_lemma) -apply simp -done - -lemma is_maxsplitter_auto_split: - "is_maxsplitter (accepts A) (\xs. auto_split A (start A) ([],xs) [] xs)" -by (simp add: auto_split_is_maxsplit is_maxsplitter_maxsplit) - - -lemma is_maxchopper_auto_chop: - "is_maxchopper (accepts A) (auto_chop A)" -apply (unfold auto_chop_def) -apply (rule is_maxchopper_chop) -apply (rule is_maxsplitter_auto_split) -done - -end diff --git a/Functional-Automata/AutoProj.thy b/Functional-Automata/AutoProj.thy deleted file mode 100644 index c09ed39..0000000 --- a/Functional-Automata/AutoProj.thy +++ /dev/null @@ -1,29 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM - -Is there an optimal order of arguments for `next'? -Currently we can have laws like `delta A (a#w) = delta A w o delta A a' -Otherwise we could have `acceps A == fin A o delta A (start A)' -and use foldl instead of foldl2. -*) - -section "Projection functions for automata" - -theory AutoProj -imports Main -begin - -definition start :: "'a * 'b * 'c \ 'a" where "start A = fst A" -definition "next" :: "'a * 'b * 'c \ 'b" where "next A = fst(snd(A))" -definition fin :: "'a * 'b * 'c \ 'c" where "fin A = snd(snd(A))" - -lemma [simp]: "start(q,d,f) = q" -by(simp add:start_def) - -lemma [simp]: "next(q,d,f) = d" -by(simp add:next_def) - -lemma [simp]: "fin(q,d,f) = f" -by(simp add:fin_def) - -end diff --git a/Functional-Automata/AutoRegExp.thy b/Functional-Automata/AutoRegExp.thy deleted file mode 100644 index fe6612c..0000000 --- a/Functional-Automata/AutoRegExp.thy +++ /dev/null @@ -1,17 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Combining automata and regular expressions" - -theory AutoRegExp -imports Automata RegExp2NA RegExp2NAe -begin - -theorem "DA.accepts (na2da(rexp2na r)) w = (w : lang r)" -by (simp add: NA_DA_equiv[THEN sym] accepts_rexp2na) - -theorem "DA.accepts (nae2da(rexp2nae r)) w = (w : lang r)" -by (simp add: NAe_DA_equiv accepts_rexp2nae) - -end diff --git a/Functional-Automata/Automata.thy b/Functional-Automata/Automata.thy deleted file mode 100644 index 8d1cfe1..0000000 --- a/Functional-Automata/Automata.thy +++ /dev/null @@ -1,55 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Conversions between automata" - -theory Automata -imports DA NAe -begin - -definition - na2da :: "('a,'s)na \ ('a,'s set)da" where -"na2da A = ({start A}, \a Q. Union(next A a ` Q), \Q. \q\Q. fin A q)" - -definition - nae2da :: "('a,'s)nae \ ('a,'s set)da" where -"nae2da A = ({start A}, - \a Q. Union(next A (Some a) ` ((eps A)\<^sup>* `` Q)), - \Q. \p \ (eps A)\<^sup>* `` Q. fin A p)" - -(*** Equivalence of NA and DA ***) - -lemma DA_delta_is_lift_NA_delta: - "\Q. DA.delta (na2da A) w Q = Union(NA.delta A w ` Q)" -by (induct w)(auto simp:na2da_def) - -lemma NA_DA_equiv: - "NA.accepts A w = DA.accepts (na2da A) w" -apply (simp add: DA.accepts_def NA.accepts_def DA_delta_is_lift_NA_delta) -apply (simp add: na2da_def) -done - -(*** Direct equivalence of NAe and DA ***) - -lemma espclosure_DA_delta_is_steps: - "\Q. (eps A)\<^sup>* `` (DA.delta (nae2da A) w Q) = steps A w `` Q" -apply (induct w) - apply(simp) -apply (simp add: step_def nae2da_def) -apply (blast) -done - -lemma NAe_DA_equiv: - "DA.accepts (nae2da A) w = NAe.accepts A w" -proof - - have "\Q. fin (nae2da A) Q = (\q \ (eps A)\<^sup>* `` Q. fin A q)" - by(simp add:nae2da_def) - thus ?thesis - apply(simp add:espclosure_DA_delta_is_steps NAe.accepts_def DA.accepts_def) - apply(simp add:nae2da_def) - apply blast - done -qed - -end diff --git a/Functional-Automata/DA.thy b/Functional-Automata/DA.thy deleted file mode 100644 index 2cc8eb7..0000000 --- a/Functional-Automata/DA.thy +++ /dev/null @@ -1,38 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Deterministic automata" - -theory DA -imports AutoProj -begin - -type_synonym ('a,'s)da = "'s * ('a \ 's \ 's) * ('s \ bool)" - -definition - foldl2 :: "('a \ 'b \ 'b) \ 'a list \ 'b \ 'b" where -"foldl2 f xs a = foldl (\a b. f b a) a xs" - -definition - delta :: "('a,'s)da \ 'a list \ 's \ 's" where -"delta A = foldl2 (next A)" - -definition - accepts :: "('a,'s)da \ 'a list \ bool" where -"accepts A = (\w. fin A (delta A w (start A)))" - -lemma [simp]: "foldl2 f [] a = a \ foldl2 f (x#xs) a = foldl2 f xs (f x a)" -by(simp add:foldl2_def) - -lemma delta_Nil[simp]: "delta A [] s = s" -by(simp add:delta_def) - -lemma delta_Cons[simp]: "delta A (a#w) s = delta A w (next A a s)" -by(simp add:delta_def) - -lemma delta_append[simp]: - "\q ys. delta A (xs@ys) q = delta A ys (delta A xs q)" -by(induct xs) simp_all - -end diff --git a/Functional-Automata/Execute.thy b/Functional-Automata/Execute.thy deleted file mode 100644 index ef3c047..0000000 --- a/Functional-Automata/Execute.thy +++ /dev/null @@ -1,20 +0,0 @@ -(* Author: Lukas Bulwahn, TUM 2011 *) - -section \Executing Automata and membership of Regular Expressions\ - -theory Execute -imports AutoRegExp -begin - -subsection \Example\ - -definition example_expression -where - "example_expression = (let r0 = Atom (0::nat); r1 = Atom (1::nat) - in Times (Star (Plus (Times r1 r1) r0)) (Star (Plus (Times r0 r0) r1)))" - -value "NA.accepts (rexp2na example_expression) [0,1,1,0,0,1]" - -value "DA.accepts (na2da (rexp2na example_expression)) [0,1,1,0,0,1]" - -end diff --git a/Functional-Automata/Functional_Automata.thy b/Functional-Automata/Functional_Automata.thy deleted file mode 100644 index 8dd9c9f..0000000 --- a/Functional-Automata/Functional_Automata.thy +++ /dev/null @@ -1,5 +0,0 @@ -theory Functional_Automata -imports AutoRegExp AutoMaxChop RegSet_of_nat_DA Execute -begin - -end diff --git a/Functional-Automata/MaxChop.thy b/Functional-Automata/MaxChop.thy deleted file mode 100644 index 06b3786..0000000 --- a/Functional-Automata/MaxChop.thy +++ /dev/null @@ -1,120 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Generic scanner" - -theory MaxChop -imports MaxPrefix -begin - -type_synonym 'a chopper = "'a list \ 'a list list * 'a list" - -definition - is_maxchopper :: "('a list \ bool) \ 'a chopper \ bool" where -"is_maxchopper P chopper = - (\xs zs yss. - (chopper(xs) = (yss,zs)) = - (xs = concat yss @ zs \ (\ys \ set yss. ys \ []) \ - (case yss of - [] \ is_maxpref P [] xs - | us#uss \ is_maxpref P us xs \ chopper(concat(uss)@zs) = (uss,zs))))" - -definition - reducing :: "'a splitter \ bool" where -"reducing splitf = - (\xs ys zs. splitf xs = (ys,zs) \ ys \ [] \ length zs < length xs)" - -function chop :: "'a splitter \ 'a list \ 'a list list \ 'a list" where - [simp del]: "chop splitf xs = (if reducing splitf - then let pp = splitf xs - in if fst pp = [] then ([], xs) - else let qq = chop splitf (snd pp) - in (fst pp # fst qq, snd qq) - else undefined)" -by pat_completeness auto - -termination apply (relation "measure (length \ snd)") -apply (auto simp: reducing_def) -apply (case_tac "splitf xs") -apply auto -done - -lemma chop_rule: "reducing splitf \ - chop splitf xs = (let (pre, post) = splitf xs - in if pre = [] then ([], xs) - else let (xss, zs) = chop splitf post - in (pre # xss,zs))" -apply (simp add: chop.simps) -apply (simp add: Let_def split: prod.split) -done - -lemma reducing_maxsplit: "reducing(\qs. maxsplit P ([],qs) [] qs)" -by (simp add: reducing_def maxsplit_eq) - -lemma is_maxsplitter_reducing: - "is_maxsplitter P splitf \ reducing splitf" -by(simp add:is_maxsplitter_def reducing_def) - -lemma chop_concat[rule_format]: "is_maxsplitter P splitf \ - (\yss zs. chop splitf xs = (yss,zs) \ xs = concat yss @ zs)" -apply (induct xs rule:length_induct) -apply (simp (no_asm_simp) split del: if_split - add: chop_rule[OF is_maxsplitter_reducing]) -apply (simp add: Let_def is_maxsplitter_def split: prod.split) -done - -lemma chop_nonempty: "is_maxsplitter P splitf \ - \yss zs. chop splitf xs = (yss,zs) \ (\ys \ set yss. ys \ [])" -apply (induct xs rule:length_induct) -apply (simp (no_asm_simp) add: chop_rule is_maxsplitter_reducing) -apply (simp add: Let_def is_maxsplitter_def split: prod.split) -apply (intro allI impI) -apply (rule ballI) -apply (erule exE) -apply (erule allE) -apply auto -done - -lemma is_maxchopper_chop: - assumes prem: "is_maxsplitter P splitf" shows "is_maxchopper P (chop splitf)" -apply(unfold is_maxchopper_def) -apply clarify -apply (rule iffI) - apply (rule conjI) - apply (erule chop_concat[OF prem]) - apply (rule conjI) - apply (erule prem[THEN chop_nonempty[THEN spec, THEN spec, THEN mp]]) - apply (erule rev_mp) - apply (subst prem[THEN is_maxsplitter_reducing[THEN chop_rule]]) - apply (simp add: Let_def prem[simplified is_maxsplitter_def] - split: prod.split) - apply clarify - apply (rule conjI) - apply (clarify) - apply (clarify) - apply simp - apply (frule chop_concat[OF prem]) - apply (clarify) -apply (subst prem[THEN is_maxsplitter_reducing, THEN chop_rule]) -apply (simp add: Let_def prem[simplified is_maxsplitter_def] - split: prod.split) -apply (clarify) -apply (rename_tac xs1 ys1 xss1 ys) -apply (simp split: list.split_asm) - apply (simp add: is_maxpref_def) - apply (blast intro: prefix_append[THEN iffD2]) -apply (rule conjI) - apply (clarify) - apply (simp (no_asm_use) add: is_maxpref_def) - apply (blast intro: prefix_append[THEN iffD2]) -apply (clarify) -apply (rename_tac us uss) -apply (subgoal_tac "xs1=us") - apply simp -apply simp -apply (simp (no_asm_use) add: is_maxpref_def) -apply (blast intro: prefix_append[THEN iffD2] prefix_order.antisym) -done - -end diff --git a/Functional-Automata/MaxPrefix.thy b/Functional-Automata/MaxPrefix.thy deleted file mode 100644 index 7441219..0000000 --- a/Functional-Automata/MaxPrefix.thy +++ /dev/null @@ -1,92 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Maximal prefix" - -theory MaxPrefix -imports "HOL-Library.Sublist" -begin - -definition - is_maxpref :: "('a list \ bool) \ 'a list \ 'a list \ bool" where -"is_maxpref P xs ys = - (prefix xs ys \ (xs=[] \ P xs) \ (\zs. prefix zs ys \ P zs \ prefix zs xs))" - -type_synonym 'a splitter = "'a list \ 'a list * 'a list" - -definition - is_maxsplitter :: "('a list \ bool) \ 'a splitter \ bool" where -"is_maxsplitter P f = - (\xs ps qs. f xs = (ps,qs) = (xs=ps@qs \ is_maxpref P ps xs))" - -fun maxsplit :: "('a list \ bool) \ 'a list * 'a list \ 'a list \ 'a splitter" where -"maxsplit P res ps [] = (if P ps then (ps,[]) else res)" | -"maxsplit P res ps (q#qs) = maxsplit P (if P ps then (ps,q#qs) else res) - (ps@[q]) qs" - -declare if_split[split del] - -lemma maxsplit_lemma: "(maxsplit P res ps qs = (xs,ys)) = - (if \us. prefix us qs \ P(ps@us) then xs@ys=ps@qs \ is_maxpref P xs (ps@qs) - else (xs,ys)=res)" -proof (induction P res ps qs rule: maxsplit.induct) - case 1 - thus ?case by (auto simp: is_maxpref_def split: if_splits) -next - case (2 P res ps q qs) - show ?case - proof (cases "\us. prefix us qs \ P ((ps @ [q]) @ us)") - case True - note ex1 = this - then guess us by (elim exE conjE) note us = this - hence ex2: "\us. prefix us (q # qs) \ P (ps @ us)" - by (intro exI[of _ "q#us"]) auto - with ex1 and 2 show ?thesis by simp - next - case False - note ex1 = this - show ?thesis - proof (cases "\us. prefix us (q#qs) \ P (ps @ us)") - case False - from 2 show ?thesis - by (simp only: ex1 False) (insert ex1 False, auto simp: prefix_Cons) - next - case True - note ex2 = this - show ?thesis - proof (cases "P ps") - case True - with 2 have "(maxsplit P (ps, q # qs) (ps @ [q]) qs = (xs, ys)) \ (xs = ps \ ys = q # qs)" - by (simp only: ex1 ex2) simp_all - also have "\ \ (xs @ ys = ps @ q # qs \ is_maxpref P xs (ps @ q # qs))" - using ex1 True - by (auto simp: is_maxpref_def prefix_append prefix_Cons append_eq_append_conv2) - finally show ?thesis using True by (simp only: ex1 ex2) simp_all - next - case False - with 2 have "(maxsplit P res (ps @ [q]) qs = (xs, ys)) \ ((xs, ys) = res)" - by (simp only: ex1 ex2) simp - also have "\ \ (xs @ ys = ps @ q # qs \ is_maxpref P xs (ps @ q # qs))" - using ex1 ex2 False - by (auto simp: append_eq_append_conv2 is_maxpref_def prefix_Cons) - finally show ?thesis - using False by (simp only: ex1 ex2) simp - qed - qed - qed -qed - -declare if_split[split] - -lemma is_maxpref_Nil[simp]: - "\(\us. prefix us xs \ P us) \ is_maxpref P ps xs = (ps = [])" - by (auto simp: is_maxpref_def) - -lemma is_maxsplitter_maxsplit: - "is_maxsplitter P (\xs. maxsplit P ([],xs) [] xs)" - by (auto simp: maxsplit_lemma is_maxsplitter_def) - -lemmas maxsplit_eq = is_maxsplitter_maxsplit[simplified is_maxsplitter_def] - -end diff --git a/Functional-Automata/NA.thy b/Functional-Automata/NA.thy deleted file mode 100644 index 83994a4..0000000 --- a/Functional-Automata/NA.thy +++ /dev/null @@ -1,50 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Nondeterministic automata" - -theory NA -imports AutoProj -begin - -type_synonym ('a,'s) na = "'s * ('a \ 's \ 's set) * ('s \ bool)" - -primrec delta :: "('a,'s)na \ 'a list \ 's \ 's set" where -"delta A [] p = {p}" | -"delta A (a#w) p = Union(delta A w ` next A a p)" - -definition - accepts :: "('a,'s)na \ 'a list \ bool" where -"accepts A w = (\q \ delta A w (start A). fin A q)" - -definition - step :: "('a,'s)na \ 'a \ ('s * 's)set" where -"step A a = {(p,q) . q : next A a p}" - -primrec steps :: "('a,'s)na \ 'a list \ ('s * 's)set" where -"steps A [] = Id" | -"steps A (a#w) = step A a O steps A w" - -lemma steps_append[simp]: - "steps A (v@w) = steps A v O steps A w" -by(induct v, simp_all add:O_assoc) - -lemma in_steps_append[iff]: - "(p,r) : steps A (v@w) = ((p,r) : (steps A v O steps A w))" -apply(rule steps_append[THEN equalityE]) -apply blast -done - -lemma delta_conv_steps: "\p. delta A w p = {q. (p,q) : steps A w}" -by(induct w)(auto simp:step_def) - -lemma accepts_conv_steps: - "accepts A w = (\q. (start A,q) \ steps A w \ fin A q)" -by(simp add: delta_conv_steps accepts_def) - -abbreviation - Cons_syn :: "'a \ 'a list set \ 'a list set" (infixr "##" 65) where - "x ## S \ Cons x ` S" - -end diff --git a/Functional-Automata/NAe.thy b/Functional-Automata/NAe.thy deleted file mode 100644 index 0d41806..0000000 --- a/Functional-Automata/NAe.thy +++ /dev/null @@ -1,72 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "Nondeterministic automata with epsilon transitions" - -theory NAe -imports NA -begin - -type_synonym ('a,'s)nae = "('a option,'s)na" - -abbreviation - eps :: "('a,'s)nae \ ('s * 's)set" where - "eps A \ step A None" - -primrec steps :: "('a,'s)nae \ 'a list \ ('s * 's)set" where -"steps A [] = (eps A)\<^sup>*" | -"steps A (a#w) = (eps A)\<^sup>* O step A (Some a) O steps A w" - -definition - accepts :: "('a,'s)nae \ 'a list \ bool" where -"accepts A w = (\q. (start A,q) \ steps A w \ fin A q)" - -(* not really used: -consts delta :: "('a,'s)nae \ 'a list \ 's \ 's set" -primrec -"delta A [] s = (eps A)\<^sup>* `` {s}" -"delta A (a#w) s = lift(delta A w) (lift(next A (Some a)) ((eps A)\<^sup>* `` {s}))" -*) - -lemma steps_epsclosure[simp]: "(eps A)\<^sup>* O steps A w = steps A w" -by (cases w) (simp_all add: O_assoc[symmetric]) - -lemma in_steps_epsclosure: - "[| (p,q) : (eps A)\<^sup>*; (q,r) : steps A w |] ==> (p,r) : steps A w" -apply(rule steps_epsclosure[THEN equalityE]) -apply blast -done - -lemma epsclosure_steps: "steps A w O (eps A)\<^sup>* = steps A w" -apply(induct w) - apply simp -apply(simp add:O_assoc) -done - -lemma in_epsclosure_steps: - "[| (p,q) : steps A w; (q,r) : (eps A)\<^sup>* |] ==> (p,r) : steps A w" -apply(rule epsclosure_steps[THEN equalityE]) -apply blast -done - -lemma steps_append[simp]: "steps A (v@w) = steps A v O steps A w" -by(induct v)(simp_all add:O_assoc[symmetric]) - -lemma in_steps_append[iff]: - "(p,r) : steps A (v@w) = ((p,r) : (steps A v O steps A w))" -apply(rule steps_append[THEN equalityE]) -apply blast -done - -(* Equivalence of steps and delta -* Use "(\x \ f ` A. P x) = (\a\A. P(f x))" ?? * -Goal "\p. (p,q) : steps A w = (q : delta A w p)"; -by (induct_tac "w" 1); - by (Simp_tac 1); -by (asm_simp_tac (simpset() addsimps [comp_def,step_def]) 1); -by (Blast_tac 1); -qed_spec_mp "steps_equiv_delta"; -*) - -end diff --git a/Functional-Automata/ROOT b/Functional-Automata/ROOT deleted file mode 100644 index 8e222de..0000000 --- a/Functional-Automata/ROOT +++ /dev/null @@ -1,11 +0,0 @@ -chapter AFP - -session "Functional-Automata" (AFP) = "HOL-Library" + - options [timeout = 600] - sessions - "Regular-Sets" - theories - Functional_Automata - document_files - "root.bib" - "root.tex" diff --git a/Functional-Automata/RegExp2NA.thy b/Functional-Automata/RegExp2NA.thy deleted file mode 100644 index a3374f6..0000000 --- a/Functional-Automata/RegExp2NA.thy +++ /dev/null @@ -1,442 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "From regular expressions directly to nondeterministic automata" - -theory RegExp2NA -imports "Regular-Sets.Regular_Exp" NA -begin - -type_synonym 'a bitsNA = "('a,bool list)na" - -definition -"atom" :: "'a \ 'a bitsNA" where -"atom a = ([True], - \b s. if s=[True] \ b=a then {[False]} else {}, - \s. s=[False])" - -definition - or :: "'a bitsNA \ 'a bitsNA \ 'a bitsNA" where -"or = (\(ql,dl,fl)(qr,dr,fr). - ([], - \a s. case s of - [] \ (True ## dl a ql) \ (False ## dr a qr) - | left#s \ if left then True ## dl a s - else False ## dr a s, - \s. case s of [] \ (fl ql | fr qr) - | left#s \ if left then fl s else fr s))" - -definition - conc :: "'a bitsNA \ 'a bitsNA \ 'a bitsNA" where -"conc = (\(ql,dl,fl)(qr,dr,fr). - (True#ql, - \a s. case s of - [] \ {} - | left#s \ if left then (True ## dl a s) \ - (if fl s then False ## dr a qr else {}) - else False ## dr a s, - \s. case s of [] \ False | left#s \ left \ fl s \ fr qr | \left \ fr s))" - -definition - epsilon :: "'a bitsNA" where -"epsilon = ([],\a s. {}, \s. s=[])" - -definition - plus :: "'a bitsNA \ 'a bitsNA" where -"plus = (\(q,d,f). (q, \a s. d a s \ (if f s then d a q else {}), f))" - -definition - star :: "'a bitsNA \ 'a bitsNA" where -"star A = or epsilon (plus A)" - -primrec rexp2na :: "'a rexp \ 'a bitsNA" where -"rexp2na Zero = ([], \a s. {}, \s. False)" | -"rexp2na One = epsilon" | -"rexp2na(Atom a) = atom a" | -"rexp2na(Plus r s) = or (rexp2na r) (rexp2na s)" | -"rexp2na(Times r s) = conc (rexp2na r) (rexp2na s)" | -"rexp2na(Star r) = star (rexp2na r)" - -declare split_paired_all[simp] - -(******************************************************) -(* atom *) -(******************************************************) - -lemma fin_atom: "(fin (atom a) q) = (q = [False])" -by(simp add:atom_def) - -lemma start_atom: "start (atom a) = [True]" -by(simp add:atom_def) - -lemma in_step_atom_Some[simp]: - "(p,q) : step (atom a) b = (p=[True] \ q=[False] \ b=a)" -by (simp add: atom_def step_def) - -lemma False_False_in_steps_atom: - "([False],[False]) : steps (atom a) w = (w = [])" -apply (induct "w") - apply simp -apply (simp add: relcomp_unfold) -done - -lemma start_fin_in_steps_atom: - "(start (atom a), [False]) : steps (atom a) w = (w = [a])" -apply (induct "w") - apply (simp add: start_atom) -apply (simp add: False_False_in_steps_atom relcomp_unfold start_atom) -done - -lemma accepts_atom: - "accepts (atom a) w = (w = [a])" -by (simp add: accepts_conv_steps start_fin_in_steps_atom fin_atom) - -(******************************************************) -(* or *) -(******************************************************) - -(***** lift True/False over fin *****) - -lemma fin_or_True[iff]: - "\L R. fin (or L R) (True#p) = fin L p" -by(simp add:or_def) - -lemma fin_or_False[iff]: - "\L R. fin (or L R) (False#p) = fin R p" -by(simp add:or_def) - -(***** lift True/False over step *****) - -lemma True_in_step_or[iff]: -"\L R. (True#p,q) : step (or L R) a = (\r. q = True#r \ (p,r) \ step L a)" -apply (simp add:or_def step_def) -apply blast -done - -lemma False_in_step_or[iff]: -"\L R. (False#p,q) : step (or L R) a = (\r. q = False#r \ (p,r) \ step R a)" -apply (simp add:or_def step_def) -apply blast -done - - -(***** lift True/False over steps *****) - -lemma lift_True_over_steps_or[iff]: - "\p. (True#p,q)\steps (or L R) w = (\r. q = True # r \ (p,r) \ steps L w)" -apply (induct "w") - apply force -apply force -done - -lemma lift_False_over_steps_or[iff]: - "\p. (False#p,q)\steps (or L R) w = (\r. q = False#r \ (p,r)\steps R w)" -apply (induct "w") - apply force -apply force -done - -(** From the start **) - -lemma start_step_or[iff]: - "\L R. (start(or L R),q) : step(or L R) a = - (\p. (q = True#p \ (start L,p) : step L a) | - (q = False#p \ (start R,p) : step R a))" -apply (simp add:or_def step_def) -apply blast -done - -lemma steps_or: - "(start(or L R), q) : steps (or L R) w = - ( (w = [] \ q = start(or L R)) | - (w \ [] \ (\p. q = True # p \ (start L,p) : steps L w | - q = False # p \ (start R,p) : steps R w)))" -apply (case_tac "w") - apply (simp) - apply blast -apply (simp) -apply blast -done - -lemma fin_start_or[iff]: - "\L R. fin (or L R) (start(or L R)) = (fin L (start L) | fin R (start R))" -by (simp add:or_def) - -lemma accepts_or[iff]: - "accepts (or L R) w = (accepts L w | accepts R w)" -apply (simp add: accepts_conv_steps steps_or) -(* get rid of case_tac: *) -apply (case_tac "w = []") - apply auto -done - -(******************************************************) -(* conc *) -(******************************************************) - -(** True/False in fin **) - -lemma fin_conc_True[iff]: - "\L R. fin (conc L R) (True#p) = (fin L p \ fin R (start R))" -by(simp add:conc_def) - -lemma fin_conc_False[iff]: - "\L R. fin (conc L R) (False#p) = fin R p" -by(simp add:conc_def) - - -(** True/False in step **) - -lemma True_step_conc[iff]: - "\L R. (True#p,q) : step (conc L R) a = - ((\r. q=True#r \ (p,r): step L a) | - (fin L p \ (\r. q=False#r \ (start R,r) : step R a)))" -apply (simp add:conc_def step_def) -apply blast -done - -lemma False_step_conc[iff]: - "\L R. (False#p,q) : step (conc L R) a = - (\r. q = False#r \ (p,r) : step R a)" -apply (simp add:conc_def step_def) -apply blast -done - -(** False in steps **) - -lemma False_steps_conc[iff]: - "\p. (False#p,q): steps (conc L R) w = (\r. q=False#r \ (p,r): steps R w)" -apply (induct "w") - apply fastforce -apply force -done - -(** True in steps **) - -lemma True_True_steps_concI: - "\L R p. (p,q) : steps L w \ (True#p,True#q) : steps (conc L R) w" -apply (induct "w") - apply simp -apply simp -apply fast -done - -lemma True_False_step_conc[iff]: - "\L R. (True#p,False#q) : step (conc L R) a = - (fin L p \ (start R,q) : step R a)" -by simp - -lemma True_steps_concD[rule_format]: - "\p. (True#p,q) : steps (conc L R) w \ - ((\r. (p,r) : steps L w \ q = True#r) \ - (\u a v. w = u@a#v \ - (\r. (p,r) : steps L u \ fin L r \ - (\s. (start R,s) : step R a \ - (\t. (s,t) : steps R v \ q = False#t)))))" -apply (induct "w") - apply simp -apply simp -apply (clarify del:disjCI) -apply (erule disjE) - apply (clarify del:disjCI) - apply (erule allE, erule impE, assumption) - apply (erule disjE) - apply blast - apply (rule disjI2) - apply (clarify) - apply simp - apply (rule_tac x = "a#u" in exI) - apply simp - apply blast -apply (rule disjI2) -apply (clarify) -apply simp -apply (rule_tac x = "[]" in exI) -apply simp -apply blast -done - -lemma True_steps_conc: - "(True#p,q) : steps (conc L R) w = - ((\r. (p,r) : steps L w \ q = True#r) \ - (\u a v. w = u@a#v \ - (\r. (p,r) : steps L u \ fin L r \ - (\s. (start R,s) : step R a \ - (\t. (s,t) : steps R v \ q = False#t)))))" -by(force dest!: True_steps_concD intro!: True_True_steps_concI) - -(** starting from the start **) - -lemma start_conc: - "\L R. start(conc L R) = True#start L" -by (simp add:conc_def) - -lemma final_conc: - "\L R. fin(conc L R) p = ((fin R (start R) \ (\s. p = True#s \ fin L s)) \ - (\s. p = False#s \ fin R s))" -apply (simp add:conc_def split: list.split) -apply blast -done - -lemma accepts_conc: - "accepts (conc L R) w = (\u v. w = u@v \ accepts L u \ accepts R v)" -apply (simp add: accepts_conv_steps True_steps_conc final_conc start_conc) -apply (rule iffI) - apply (clarify) - apply (erule disjE) - apply (clarify) - apply (erule disjE) - apply (rule_tac x = "w" in exI) - apply simp - apply blast - apply blast - apply (erule disjE) - apply blast - apply (clarify) - apply (rule_tac x = "u" in exI) - apply simp - apply blast -apply (clarify) -apply (case_tac "v") - apply simp - apply blast -apply simp -apply blast -done - -(******************************************************) -(* epsilon *) -(******************************************************) - -lemma step_epsilon[simp]: "step epsilon a = {}" -by(simp add:epsilon_def step_def) - -lemma steps_epsilon: "((p,q) : steps epsilon w) = (w=[] \ p=q)" -by (induct "w") auto - -lemma accepts_epsilon[iff]: "accepts epsilon w = (w = [])" -apply (simp add: steps_epsilon accepts_conv_steps) -apply (simp add: epsilon_def) -done - -(******************************************************) -(* plus *) -(******************************************************) - -lemma start_plus[simp]: "\A. start (plus A) = start A" -by(simp add:plus_def) - -lemma fin_plus[iff]: "\A. fin (plus A) = fin A" -by(simp add:plus_def) - -lemma step_plusI: - "\A. (p,q) : step A a \ (p,q) : step (plus A) a" -by(simp add:plus_def step_def) - -lemma steps_plusI: "\p. (p,q) : steps A w \ (p,q) \ steps (plus A) w" -apply (induct "w") - apply simp -apply simp -apply (blast intro: step_plusI) -done - -lemma step_plus_conv[iff]: - "\A. (p,r): step (plus A) a = - ( (p,r): step A a | fin A p \ (start A,r) : step A a )" -by(simp add:plus_def step_def) - -lemma fin_steps_plusI: - "[| (start A,q) : steps A u; u \ []; fin A p |] - ==> (p,q) : steps (plus A) u" -apply (case_tac "u") - apply blast -apply simp -apply (blast intro: steps_plusI) -done - -(* reverse list induction! Complicates matters for conc? *) -lemma start_steps_plusD[rule_format]: - "\r. (start A,r) \ steps (plus A) w \ - (\us v. w = concat us @ v \ - (\u\set us. accepts A u) \ - (start A,r) \ steps A v)" -apply (induct w rule: rev_induct) - apply simp - apply (rule_tac x = "[]" in exI) - apply simp -apply simp -apply (clarify) -apply (erule allE, erule impE, assumption) -apply (clarify) -apply (erule disjE) - apply (rule_tac x = "us" in exI) - apply (simp) - apply blast -apply (rule_tac x = "us@[v]" in exI) -apply (simp add: accepts_conv_steps) -apply blast -done - -lemma steps_star_cycle[rule_format]: - "us \ [] \ (\u \ set us. accepts A u) \ accepts (plus A) (concat us)" -apply (simp add: accepts_conv_steps) -apply (induct us rule: rev_induct) - apply simp -apply (rename_tac u us) -apply simp -apply (clarify) -apply (case_tac "us = []") - apply (simp) - apply (blast intro: steps_plusI fin_steps_plusI) -apply (clarify) -apply (case_tac "u = []") - apply (simp) - apply (blast intro: steps_plusI fin_steps_plusI) -apply (blast intro: steps_plusI fin_steps_plusI) -done - -lemma accepts_plus[iff]: - "accepts (plus A) w = - (\us. us \ [] \ w = concat us \ (\u \ set us. accepts A u))" -apply (rule iffI) - apply (simp add: accepts_conv_steps) - apply (clarify) - apply (drule start_steps_plusD) - apply (clarify) - apply (rule_tac x = "us@[v]" in exI) - apply (simp add: accepts_conv_steps) - apply blast -apply (blast intro: steps_star_cycle) -done - -(******************************************************) -(* star *) -(******************************************************) - -lemma accepts_star: - "accepts (star A) w = (\us. (\u \ set us. accepts A u) \ w = concat us)" -apply(unfold star_def) -apply (rule iffI) - apply (clarify) - apply (erule disjE) - apply (rule_tac x = "[]" in exI) - apply simp - apply blast -apply force -done - -(***** Correctness of r2n *****) - -lemma accepts_rexp2na: - "\w. accepts (rexp2na r) w = (w : lang r)" -apply (induct "r") - apply (simp add: accepts_conv_steps) - apply simp - apply (simp add: accepts_atom) - apply (simp) - apply (simp add: accepts_conc Regular_Set.conc_def) -apply (simp add: accepts_star in_star_iff_concat subset_iff Ball_def) -done - -end diff --git a/Functional-Automata/RegExp2NAe.thy b/Functional-Automata/RegExp2NAe.thy deleted file mode 100644 index a8831e3..0000000 --- a/Functional-Automata/RegExp2NAe.thy +++ /dev/null @@ -1,641 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM -*) - -section "From regular expressions to nondeterministic automata with epsilon" - -theory RegExp2NAe -imports "Regular-Sets.Regular_Exp" NAe -begin - -type_synonym 'a bitsNAe = "('a,bool list)nae" - -definition - epsilon :: "'a bitsNAe" where -"epsilon = ([],\a s. {}, \s. s=[])" - -definition -"atom" :: "'a \ 'a bitsNAe" where -"atom a = ([True], - \b s. if s=[True] \ b=Some a then {[False]} else {}, - \s. s=[False])" - -definition - or :: "'a bitsNAe \ 'a bitsNAe \ 'a bitsNAe" where -"or = (\(ql,dl,fl)(qr,dr,fr). - ([], - \a s. case s of - [] \ if a=None then {True#ql,False#qr} else {} - | left#s \ if left then True ## dl a s - else False ## dr a s, - \s. case s of [] \ False | left#s \ if left then fl s else fr s))" - -definition - conc :: "'a bitsNAe \ 'a bitsNAe \ 'a bitsNAe" where -"conc = (\(ql,dl,fl)(qr,dr,fr). - (True#ql, - \a s. case s of - [] \ {} - | left#s \ if left then (True ## dl a s) \ - (if fl s \ a=None then {False#qr} else {}) - else False ## dr a s, - \s. case s of [] \ False | left#s \ \left \ fr s))" - -definition - star :: "'a bitsNAe \ 'a bitsNAe" where -"star = (\(q,d,f). - ([], - \a s. case s of - [] \ if a=None then {True#q} else {} - | left#s \ if left then (True ## d a s) \ - (if f s \ a=None then {True#q} else {}) - else {}, - \s. case s of [] \ True | left#s \ left \ f s))" - -primrec rexp2nae :: "'a rexp \ 'a bitsNAe" where -"rexp2nae Zero = ([], \a s. {}, \s. False)" | -"rexp2nae One = epsilon" | -"rexp2nae(Atom a) = atom a" | -"rexp2nae(Plus r s) = or (rexp2nae r) (rexp2nae s)" | -"rexp2nae(Times r s) = conc (rexp2nae r) (rexp2nae s)" | -"rexp2nae(Star r) = star (rexp2nae r)" - -declare split_paired_all[simp] - -(******************************************************) -(* epsilon *) -(******************************************************) - -lemma step_epsilon[simp]: "step epsilon a = {}" -by(simp add:epsilon_def step_def) - -lemma steps_epsilon: "((p,q) : steps epsilon w) = (w=[] \ p=q)" -by (induct "w") auto - -lemma accepts_epsilon[simp]: "accepts epsilon w = (w = [])" -apply (simp add: steps_epsilon accepts_def) -apply (simp add: epsilon_def) -done - -(******************************************************) -(* atom *) -(******************************************************) - -lemma fin_atom: "(fin (atom a) q) = (q = [False])" -by(simp add:atom_def) - -lemma start_atom: "start (atom a) = [True]" -by(simp add:atom_def) - -(* Use {x. False} = {}? *) - -lemma eps_atom[simp]: - "eps(atom a) = {}" -by (simp add:atom_def step_def) - -lemma in_step_atom_Some[simp]: - "(p,q) : step (atom a) (Some b) = (p=[True] \ q=[False] \ b=a)" -by (simp add:atom_def step_def) - -lemma False_False_in_steps_atom: - "([False],[False]) : steps (atom a) w = (w = [])" -apply (induct "w") - apply (simp) -apply (simp add: relcomp_unfold) -done - -lemma start_fin_in_steps_atom: - "(start (atom a), [False]) : steps (atom a) w = (w = [a])" -apply (induct "w") - apply (simp add: start_atom rtrancl_empty) -apply (simp add: False_False_in_steps_atom relcomp_unfold start_atom) -done - -lemma accepts_atom: "accepts (atom a) w = (w = [a])" -by (simp add: accepts_def start_fin_in_steps_atom fin_atom) - - -(******************************************************) -(* or *) -(******************************************************) - -(***** lift True/False over fin *****) - -lemma fin_or_True[iff]: - "\L R. fin (or L R) (True#p) = fin L p" -by(simp add:or_def) - -lemma fin_or_False[iff]: - "\L R. fin (or L R) (False#p) = fin R p" -by(simp add:or_def) - -(***** lift True/False over step *****) - -lemma True_in_step_or[iff]: -"\L R. (True#p,q) : step (or L R) a = (\r. q = True#r \ (p,r) : step L a)" -apply (simp add:or_def step_def) -apply blast -done - -lemma False_in_step_or[iff]: -"\L R. (False#p,q) : step (or L R) a = (\r. q = False#r \ (p,r) : step R a)" -apply (simp add:or_def step_def) -apply blast -done - - -(***** lift True/False over epsclosure *****) - -lemma lemma1a: - "(tp,tq) : (eps(or L R))\<^sup>* \ - (\p. tp = True#p \ \q. (p,q) : (eps L)\<^sup>* \ tq = True#q)" -apply (induct rule:rtrancl_induct) - apply (blast) -apply (clarify) -apply (simp) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lemma1b: - "(tp,tq) : (eps(or L R))\<^sup>* \ - (\p. tp = False#p \ \q. (p,q) : (eps R)\<^sup>* \ tq = False#q)" -apply (induct rule:rtrancl_induct) - apply (blast) -apply (clarify) -apply (simp) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lemma2a: - "(p,q) : (eps L)\<^sup>* \ (True#p, True#q) : (eps(or L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lemma2b: - "(p,q) : (eps R)\<^sup>* \ (False#p, False#q) : (eps(or L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma True_epsclosure_or[iff]: - "(True#p,q) : (eps(or L R))\<^sup>* = (\r. q = True#r \ (p,r) : (eps L)\<^sup>*)" -by (blast dest: lemma1a lemma2a) - -lemma False_epsclosure_or[iff]: - "(False#p,q) : (eps(or L R))\<^sup>* = (\r. q = False#r \ (p,r) : (eps R)\<^sup>*)" -by (blast dest: lemma1b lemma2b) - -(***** lift True/False over steps *****) - -lemma lift_True_over_steps_or[iff]: - "\p. (True#p,q):steps (or L R) w = (\r. q = True # r \ (p,r):steps L w)" -apply (induct "w") - apply auto -apply force -done - -lemma lift_False_over_steps_or[iff]: - "\p. (False#p,q):steps (or L R) w = (\r. q = False#r \ (p,r):steps R w)" -apply (induct "w") - apply auto -apply (force) -done - -(***** Epsilon closure of start state *****) - -lemma unfold_rtrancl2: - "R\<^sup>* = Id \ (R O R\<^sup>*)" -apply (rule set_eqI) -apply (simp) -apply (rule iffI) - apply (erule rtrancl_induct) - apply (blast) - apply (blast intro: rtrancl_into_rtrancl) -apply (blast intro: converse_rtrancl_into_rtrancl) -done - -lemma in_unfold_rtrancl2: - "(p,q) : R\<^sup>* = (q = p | (\r. (p,r) : R \ (r,q) : R\<^sup>*))" -apply (rule unfold_rtrancl2[THEN equalityE]) -apply (blast) -done - -lemmas [iff] = in_unfold_rtrancl2[where ?p = "start(or L R)"] for L R - -lemma start_eps_or[iff]: - "\L R. (start(or L R),q) : eps(or L R) = - (q = True#start L | q = False#start R)" -by (simp add:or_def step_def) - -lemma not_start_step_or_Some[iff]: - "\L R. (start(or L R),q) \ step (or L R) (Some a)" -by (simp add:or_def step_def) - -lemma steps_or: - "(start(or L R), q) : steps (or L R) w = - ( (w = [] \ q = start(or L R)) | - (\p. q = True # p \ (start L,p) : steps L w | - q = False # p \ (start R,p) : steps R w) )" -apply (case_tac "w") - apply (simp) - apply (blast) -apply (simp) -apply (blast) -done - -lemma start_or_not_final[iff]: - "\L R. \ fin (or L R) (start(or L R))" -by (simp add:or_def) - -lemma accepts_or: - "accepts (or L R) w = (accepts L w | accepts R w)" -apply (simp add:accepts_def steps_or) - apply auto -done - - -(******************************************************) -(* conc *) -(******************************************************) - -(** True/False in fin **) - -lemma in_conc_True[iff]: - "\L R. fin (conc L R) (True#p) = False" -by (simp add:conc_def) - -lemma fin_conc_False[iff]: - "\L R. fin (conc L R) (False#p) = fin R p" -by (simp add:conc_def) - -(** True/False in step **) - -lemma True_step_conc[iff]: - "\L R. (True#p,q) : step (conc L R) a = - ((\r. q=True#r \ (p,r): step L a) | - (fin L p \ a=None \ q=False#start R))" -by (simp add:conc_def step_def) (blast) - -lemma False_step_conc[iff]: - "\L R. (False#p,q) : step (conc L R) a = - (\r. q = False#r \ (p,r) : step R a)" -by (simp add:conc_def step_def) (blast) - -(** False in epsclosure **) - -lemma lemma1b': - "(tp,tq) : (eps(conc L R))\<^sup>* \ - (\p. tp = False#p \ \q. (p,q) : (eps R)\<^sup>* \ tq = False#q)" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lemma2b': - "(p,q) : (eps R)\<^sup>* \ (False#p, False#q) : (eps(conc L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma False_epsclosure_conc[iff]: - "((False # p, q) : (eps (conc L R))\<^sup>*) = - (\r. q = False # r \ (p, r) : (eps R)\<^sup>*)" -apply (rule iffI) - apply (blast dest: lemma1b') -apply (blast dest: lemma2b') -done - -(** False in steps **) - -lemma False_steps_conc[iff]: - "\p. (False#p,q): steps (conc L R) w = (\r. q=False#r \ (p,r): steps R w)" -apply (induct "w") - apply (simp) -apply (simp) -apply (fast) (*MUCH faster than blast*) -done - -(** True in epsclosure **) - -lemma True_True_eps_concI: - "(p,q): (eps L)\<^sup>* \ (True#p,True#q) : (eps(conc L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma True_True_steps_concI: - "\p. (p,q) : steps L w \ (True#p,True#q) : steps (conc L R) w" -apply (induct "w") - apply (simp add: True_True_eps_concI) -apply (simp) -apply (blast intro: True_True_eps_concI) -done - -lemma lemma1a': - "(tp,tq) : (eps(conc L R))\<^sup>* \ - (\p. tp = True#p \ - (\q. tq = True#q \ (p,q) : (eps L)\<^sup>*) | - (\q r. tq = False#q \ (p,r):(eps L)\<^sup>* \ fin L r \ (start R,q) : (eps R)\<^sup>*))" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lemma2a': - "(p, q) : (eps L)\<^sup>* \ (True#p, True#q) : (eps(conc L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma lem: - "\L R. (p,q) : step R None \ (False#p, False#q) : step (conc L R) None" -by(simp add: conc_def step_def) - -lemma lemma2b'': - "(p,q) : (eps R)\<^sup>* \ (False#p, False#q) : (eps(conc L R))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (drule lem) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma True_False_eps_concI: - "\L R. fin L p \ (True#p, False#start R) : eps(conc L R)" -by(simp add: conc_def step_def) - -lemma True_epsclosure_conc[iff]: - "((True#p,q) \ (eps(conc L R))\<^sup>*) = - ((\r. (p,r) \ (eps L)\<^sup>* \ q = True#r) \ - (\r. (p,r) \ (eps L)\<^sup>* \ fin L r \ - (\s. (start R, s) \ (eps R)\<^sup>* \ q = False#s)))" -apply (rule iffI) - apply (blast dest: lemma1a') -apply (erule disjE) - apply (blast intro: lemma2a') -apply (clarify) -apply (rule rtrancl_trans) -apply (erule lemma2a') -apply (rule converse_rtrancl_into_rtrancl) -apply (erule True_False_eps_concI) -apply (erule lemma2b'') -done - -(** True in steps **) - -lemma True_steps_concD[rule_format]: - "\p. (True#p,q) : steps (conc L R) w \ - ((\r. (p,r) : steps L w \ q = True#r) \ - (\u v. w = u@v \ (\r. (p,r) \ steps L u \ fin L r \ - (\s. (start R,s) \ steps R v \ q = False#s))))" -apply (induct "w") - apply (simp) -apply (simp) -apply (clarify del: disjCI) - apply (erule disjE) - apply (clarify del: disjCI) - apply (erule disjE) - apply (clarify del: disjCI) - apply (erule allE, erule impE, assumption) - apply (erule disjE) - apply (blast) - apply (rule disjI2) - apply (clarify) - apply (simp) - apply (rule_tac x = "a#u" in exI) - apply (simp) - apply (blast) - apply (blast) -apply (rule disjI2) -apply (clarify) -apply (simp) -apply (rule_tac x = "[]" in exI) -apply (simp) -apply (blast) -done - -lemma True_steps_conc: - "(True#p,q) \ steps (conc L R) w = - ((\r. (p,r) \ steps L w \ q = True#r) | - (\u v. w = u@v \ (\r. (p,r) : steps L u \ fin L r \ - (\s. (start R,s) : steps R v \ q = False#s))))" -by (blast dest: True_steps_concD - intro: True_True_steps_concI in_steps_epsclosure) - -(** starting from the start **) - -lemma start_conc: - "\L R. start(conc L R) = True#start L" -by (simp add: conc_def) - -lemma final_conc: - "\L R. fin(conc L R) p = (\s. p = False#s \ fin R s)" -by (simp add:conc_def split: list.split) - -lemma accepts_conc: - "accepts (conc L R) w = (\u v. w = u@v \ accepts L u \ accepts R v)" -apply (simp add: accepts_def True_steps_conc final_conc start_conc) -apply (blast) -done - -(******************************************************) -(* star *) -(******************************************************) - -lemma True_in_eps_star[iff]: - "\A. (True#p,q) \ eps(star A) = - ( (\r. q = True#r \ (p,r) \ eps A) \ (fin A p \ q = True#start A) )" -by (simp add:star_def step_def) (blast) - -lemma True_True_step_starI: - "\A. (p,q) : step A a \ (True#p, True#q) : step (star A) a" -by (simp add:star_def step_def) - -lemma True_True_eps_starI: - "(p,r) : (eps A)\<^sup>* \ (True#p, True#r) : (eps(star A))\<^sup>*" -apply (induct rule: rtrancl_induct) - apply (blast) -apply (blast intro: True_True_step_starI rtrancl_into_rtrancl) -done - -lemma True_start_eps_starI: - "\A. fin A p \ (True#p,True#start A) : eps(star A)" -by (simp add:star_def step_def) - -lemma lem': - "(tp,s) : (eps(star A))\<^sup>* \ (\p. tp = True#p \ - (\r. ((p,r) \ (eps A)\<^sup>* \ - (\q. (p,q) \ (eps A)\<^sup>* \ fin A q \ (start A,r) : (eps A)\<^sup>*)) \ - s = True#r))" -apply (induct rule: rtrancl_induct) - apply (simp) -apply (clarify) -apply (simp) -apply (blast intro: rtrancl_into_rtrancl) -done - -lemma True_eps_star[iff]: - "((True#p,s) \ (eps(star A))\<^sup>*) = - (\r. ((p,r) \ (eps A)\<^sup>* \ - (\q. (p,q) : (eps A)\<^sup>* \ fin A q \ (start A,r) : (eps A)\<^sup>*)) \ - s = True#r)" -apply (rule iffI) - apply (drule lem') - apply (blast) -(* Why can't blast do the rest? *) -apply (clarify) -apply (erule disjE) -apply (erule True_True_eps_starI) -apply (clarify) -apply (rule rtrancl_trans) -apply (erule True_True_eps_starI) -apply (rule rtrancl_trans) -apply (rule r_into_rtrancl) -apply (erule True_start_eps_starI) -apply (erule True_True_eps_starI) -done - -(** True in step Some **) - -lemma True_step_star[iff]: - "\A. (True#p,r) \ step (star A) (Some a) = - (\q. (p,q) \ step A (Some a) \ r=True#q)" -by (simp add:star_def step_def) (blast) - - -(** True in steps **) - -(* reverse list induction! Complicates matters for conc? *) -lemma True_start_steps_starD[rule_format]: - "\rr. (True#start A,rr) \ steps (star A) w \ - (\us v. w = concat us @ v \ - (\u\set us. accepts A u) \ - (\r. (start A,r) \ steps A v \ rr = True#r))" -apply (induct w rule: rev_induct) - apply (simp) - apply (clarify) - apply (rule_tac x = "[]" in exI) - apply (erule disjE) - apply (simp) - apply (clarify) - apply (simp) -apply (simp add: O_assoc[symmetric] epsclosure_steps) -apply (clarify) -apply (erule allE, erule impE, assumption) -apply (clarify) -apply (erule disjE) - apply (rule_tac x = "us" in exI) - apply (rule_tac x = "v@[x]" in exI) - apply (simp add: O_assoc[symmetric] epsclosure_steps) - apply (blast) -apply (clarify) -apply (rule_tac x = "us@[v@[x]]" in exI) -apply (rule_tac x = "[]" in exI) -apply (simp add: accepts_def) -apply (blast) -done - -lemma True_True_steps_starI: - "\p. (p,q) : steps A w \ (True#p,True#q) : steps (star A) w" -apply (induct "w") - apply (simp) -apply (simp) -apply (blast intro: True_True_eps_starI True_True_step_starI) -done - -lemma steps_star_cycle: - "(\u \ set us. accepts A u) \ - (True#start A,True#start A) \ steps (star A) (concat us)" -apply (induct "us") - apply (simp add:accepts_def) -apply (simp add:accepts_def) -by(blast intro: True_True_steps_starI True_start_eps_starI in_epsclosure_steps) - -(* Better stated directly with start(star A)? Loop in star A back to start(star A)?*) -lemma True_start_steps_star: - "(True#start A,rr) : steps (star A) w = - (\us v. w = concat us @ v \ - (\u\set us. accepts A u) \ - (\r. (start A,r) \ steps A v \ rr = True#r))" -apply (rule iffI) - apply (erule True_start_steps_starD) -apply (clarify) -apply (blast intro: steps_star_cycle True_True_steps_starI) -done - -(** the start state **) - -lemma start_step_star[iff]: - "\A. (start(star A),r) : step (star A) a = (a=None \ r = True#start A)" -by (simp add:star_def step_def) - -lemmas epsclosure_start_step_star = - in_unfold_rtrancl2[where ?p = "start (star A)"] for A - -lemma start_steps_star: - "(start(star A),r) : steps (star A) w = - ((w=[] \ r= start(star A)) | (True#start A,r) : steps (star A) w)" -apply (rule iffI) - apply (case_tac "w") - apply (simp add: epsclosure_start_step_star) - apply (simp) - apply (clarify) - apply (simp add: epsclosure_start_step_star) - apply (blast) -apply (erule disjE) - apply (simp) -apply (blast intro: in_steps_epsclosure) -done - -lemma fin_star_True[iff]: "\A. fin (star A) (True#p) = fin A p" -by (simp add:star_def) - -lemma fin_star_start[iff]: "\A. fin (star A) (start(star A))" -by (simp add:star_def) - -(* too complex! Simpler if loop back to start(star A)? *) -lemma accepts_star: - "accepts (star A) w = - (\us. (\u \ set(us). accepts A u) \ (w = concat us))" -apply(unfold accepts_def) -apply (simp add: start_steps_star True_start_steps_star) -apply (rule iffI) - apply (clarify) - apply (erule disjE) - apply (clarify) - apply (simp) - apply (rule_tac x = "[]" in exI) - apply (simp) - apply (clarify) - apply (rule_tac x = "us@[v]" in exI) - apply (simp add: accepts_def) - apply (blast) -apply (clarify) -apply (rule_tac xs = "us" in rev_exhaust) - apply (simp) - apply (blast) -apply (clarify) -apply (simp add: accepts_def) -apply (blast) -done - - -(***** Correctness of r2n *****) - -lemma accepts_rexp2nae: - "\w. accepts (rexp2nae r) w = (w : lang r)" -apply (induct "r") - apply (simp add: accepts_def) - apply simp - apply (simp add: accepts_atom) - apply (simp add: accepts_or) - apply (simp add: accepts_conc Regular_Set.conc_def) -apply (simp add: accepts_star in_star_iff_concat subset_iff Ball_def) -done - -end diff --git a/Functional-Automata/RegSet_of_nat_DA.thy b/Functional-Automata/RegSet_of_nat_DA.thy deleted file mode 100644 index 450f801..0000000 --- a/Functional-Automata/RegSet_of_nat_DA.thy +++ /dev/null @@ -1,233 +0,0 @@ -(* Author: Tobias Nipkow - Copyright 1998 TUM - -To generate a regular expression, the alphabet must be finite. -regexp needs to be supplied with an 'a list for a unique order - -add_Atom d i j r a = (if d a i = j then Union r (Atom a) else r) -atoms d i j as = foldl (add_Atom d i j) Empty as - -regexp as d i j 0 = (if i=j then Union (Star Empty) (atoms d i j as) - else atoms d i j as -*) - -section "From deterministic automata to regular sets" - -theory RegSet_of_nat_DA -imports "Regular-Sets.Regular_Set" DA -begin - -type_synonym 'a nat_next = "'a \ nat \ nat" - -abbreviation - deltas :: "'a nat_next \ 'a list \ nat \ nat" where - "deltas \ foldl2" - -primrec trace :: "'a nat_next \ nat \ 'a list \ nat list" where -"trace d i [] = []" | -"trace d i (x#xs) = d x i # trace d (d x i) xs" - -(* conversion a la Warshall *) - -primrec regset :: "'a nat_next \ nat \ nat \ nat \ 'a list set" where -"regset d i j 0 = (if i=j then insert [] {[a] | a. d a i = j} - else {[a] | a. d a i = j})" | -"regset d i j (Suc k) = - regset d i j k \ - (regset d i k k) @@ (star(regset d k k k)) @@ (regset d k j k)" - -definition - regset_of_DA :: "('a,nat)da \ nat \ 'a list set" where -"regset_of_DA A k = (\j\{j. j fin A j}. regset (next A) (start A) j k)" - -definition - bounded :: "'a nat_next \ nat \ bool" where -"bounded d k = (\n. n < k \ (\x. d x n < k))" - -declare - in_set_butlast_appendI[simp,intro] less_SucI[simp] image_eqI[simp] - -(* Lists *) - -lemma butlast_empty[iff]: - "(butlast xs = []) = (case xs of [] \ True | y#ys \ ys=[])" -by (cases xs) simp_all - -lemma in_set_butlast_concatI: - "x:set(butlast xs) \ xs:set xss \ x:set(butlast(concat xss))" -apply (induct "xss") - apply simp -apply (simp add: butlast_append del: ball_simps) -apply (rule conjI) - apply (clarify) - apply (erule disjE) - apply (blast) - apply (subgoal_tac "xs=[]") - apply simp - apply (blast) -apply (blast dest: in_set_butlastD) -done - -(* Regular sets *) - -(* The main lemma: - how to decompose a trace into a prefix, a list of loops and a suffix. -*) -lemma decompose[rule_format]: - "\i. k \ set(trace d i xs) \ (\pref mids suf. - xs = pref @ concat mids @ suf \ - deltas d pref i = k \ (\n\set(butlast(trace d i pref)). n \ k) \ - (\mid\set mids. (deltas d mid k = k) \ - (\n\set(butlast(trace d k mid)). n \ k)) \ - (\n\set(butlast(trace d k suf)). n \ k))" -apply (induct "xs") - apply (simp) -apply (rename_tac a as) -apply (intro strip) -apply (case_tac "d a i = k") - apply (rule_tac x = "[a]" in exI) - apply simp - apply (case_tac "k : set(trace d (d a i) as)") - apply (erule allE) - apply (erule impE) - apply (assumption) - apply (erule exE)+ - apply (rule_tac x = "pref#mids" in exI) - apply (rule_tac x = "suf" in exI) - apply simp - apply (rule_tac x = "[]" in exI) - apply (rule_tac x = "as" in exI) - apply simp - apply (blast dest: in_set_butlastD) -apply simp -apply (erule allE) -apply (erule impE) - apply (assumption) -apply (erule exE)+ -apply (rule_tac x = "a#pref" in exI) -apply (rule_tac x = "mids" in exI) -apply (rule_tac x = "suf" in exI) -apply simp -done - -lemma length_trace[simp]: "\i. length(trace d i xs) = length xs" -by (induct "xs") simp_all - -lemma deltas_append[simp]: - "\i. deltas d (xs@ys) i = deltas d ys (deltas d xs i)" -by (induct "xs") simp_all - -lemma trace_append[simp]: - "\i. trace d i (xs@ys) = trace d i xs @ trace d (deltas d xs i) ys" -by (induct "xs") simp_all - -lemma trace_concat[simp]: - "(\xs \ set xss. deltas d xs i = i) \ - trace d i (concat xss) = concat (map (trace d i) xss)" -by (induct "xss") simp_all - -lemma trace_is_Nil[simp]: "\i. (trace d i xs = []) = (xs = [])" -by (case_tac "xs") simp_all - -lemma trace_is_Cons_conv[simp]: - "(trace d i xs = n#ns) = - (case xs of [] \ False | y#ys \ n = d y i \ ns = trace d n ys)" -apply (case_tac "xs") -apply simp_all -apply (blast) -done - -lemma set_trace_conv: - "\i. set(trace d i xs) = - (if xs=[] then {} else insert(deltas d xs i)(set(butlast(trace d i xs))))" -apply (induct "xs") - apply (simp) -apply (simp add: insert_commute) -done - -lemma deltas_concat[simp]: - "(\mid\set mids. deltas d mid k = k) \ deltas d (concat mids) k = k" -by (induct mids) simp_all - -lemma lem: "[| n < Suc k; n \ k |] ==> n < k" -by arith - -lemma regset_spec: - "\i j xs. xs \ regset d i j k = - ((\n\set(butlast(trace d i xs)). n < k) \ deltas d xs i = j)" -apply (induct k) - apply(simp split: list.split) - apply(fastforce) -apply (simp add: conc_def) -apply (rule iffI) - apply (erule disjE) - apply simp - apply (erule exE conjE)+ - apply simp - apply (subgoal_tac - "(\m\set(butlast(trace d k xsb)). m < Suc k) \ deltas d xsb k = k") - apply (simp add: set_trace_conv butlast_append ball_Un) - apply (erule star_induct) - apply (simp) - apply (simp add: set_trace_conv butlast_append ball_Un) -apply (case_tac "k : set(butlast(trace d i xs))") - prefer 2 apply (rule disjI1) - apply (blast intro:lem) -apply (rule disjI2) -apply (drule in_set_butlastD[THEN decompose]) -apply (clarify) -apply (rule_tac x = "pref" in exI) -apply simp -apply (rule conjI) - apply (rule ballI) - apply (rule lem) - prefer 2 apply simp - apply (drule bspec) prefer 2 apply assumption - apply simp -apply (rule_tac x = "concat mids" in exI) -apply (simp) -apply (rule conjI) - apply (rule concat_in_star) - apply (clarsimp simp: subset_iff) - apply (rule lem) - prefer 2 apply simp - apply (drule bspec) prefer 2 apply assumption - apply (simp add: image_eqI in_set_butlast_concatI) -apply (rule ballI) -apply (rule lem) - apply auto -done - -lemma trace_below: - "bounded d k \ \i. i < k \ (\n\set(trace d i xs). n < k)" -apply (unfold bounded_def) -apply (induct "xs") - apply simp -apply (simp (no_asm)) -apply (blast) -done - -lemma regset_below: - "[| bounded d k; i < k; j < k |] ==> - regset d i j k = {xs. deltas d xs i = j}" -apply (rule set_eqI) -apply (simp add: regset_spec) -apply (blast dest: trace_below in_set_butlastD) -done - -lemma deltas_below: - "\i. bounded d k \ i < k \ deltas d w i < k" -apply (unfold bounded_def) -apply (induct "w") - apply simp_all -done - -lemma regset_DA_equiv: - "[| bounded (next A) k; start A < k; j < k |] ==> - w : regset_of_DA A k = accepts A w" -apply(unfold regset_of_DA_def) -apply (simp cong: conj_cong - add: regset_below deltas_below accepts_def delta_def) -done - -end diff --git a/Functional-Automata/document/root.bib b/Functional-Automata/document/root.bib deleted file mode 100644 index 3a1992e..0000000 --- a/Functional-Automata/document/root.bib +++ /dev/null @@ -1,6 +0,0 @@ -@inproceedings{Nipkow-TPHOLs98,author={Tobias Nipkow}, -title={Verified Lexical Analysis}, -booktitle={Theorem Proving in Higher Order Logics}, -editor={J. Grundy and M. Newey}, -publisher=Springer,series=LNCS,volume={1479},pages={1--15},year=1998, -note={\url{http://www4.informatik.tu-muenchen.de/~nipkow/pubs/tphols98.html}}} diff --git a/Functional-Automata/document/root.tex b/Functional-Automata/document/root.tex deleted file mode 100644 index b5421fb..0000000 --- a/Functional-Automata/document/root.tex +++ /dev/null @@ -1,54 +0,0 @@ - - -\documentclass[11pt,a4paper]{article} -\usepackage{isabelle,isabellesym} - - -% this should be the last package used -\usepackage{pdfsetup} - -\begin{document} - -\title{Functional Automata} -\author{Tobias Nipkow} -\maketitle - -\begin{abstract} -This theory defines deterministic and nondeterministic automata in a -functional representation: the transition function/relation and the finality -predicate are just functions. Hence the state space may be infinite. It is -shown how to convert regular expressions into such automata. A scanner -(generator) is implemented with the help of functional automata: the scanner -chops the input up into longest recognized substrings. Finally we also show -how to convert a certain subclass of functional automata (essentially the -finite deterministic ones) into regular sets. -\end{abstract} - -\section{Overview} - -The theories are structured as follows: -\begin{itemize} -\item Automata: - \texttt{AutoProj}, \texttt{NA}, \texttt{NAe}, \texttt{DA}, \texttt{Automata} -\item Conversion of regular expressions into automata:\\ - \texttt{RegExp2NA}, \texttt{RegExp2NAe}, \texttt{AutoRegExp}. -\item Scanning: \texttt{MaxPrefix}, \texttt{MaxChop}, \texttt{AutoMaxChop}. -\end{itemize} -For a full description see \cite{Nipkow-TPHOLs98}. - -In contrast to that paper, the latest version of the theories provides a -fully executable scanner generator. The non-executable bits (transitive -closure) have been eliminated by going from regular expressions directly to -nondeterministic automata, thus bypassing epsilon-moves. - -Not described in the paper is the conversion of certain functional automata -(essentially the finite deterministic ones) into regular sets contained in -\texttt{RegSet\_of\_nat\_DA}. - -% include generated text of all theories -\input{session} - -\bibliographystyle{abbrv} -\bibliography{root} - -\end{document} From f0cd78e1beddfd613a043a5504622518abd22462 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:01:58 +0100 Subject: [PATCH 08/23] Fixed LaTeX. --- ontologies/scholarly_paper.thy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ontologies/scholarly_paper.thy b/ontologies/scholarly_paper.thy index 6d74526..58e4062 100644 --- a/ontologies/scholarly_paper.thy +++ b/ontologies/scholarly_paper.thy @@ -65,7 +65,7 @@ doc_class annex = "text_section" + text\ Besides subtyping, there is another relation between -doc_classes: a class can be a \<^emph>\monitor\ to other ones, +doc\_classes: a class can be a \<^emph>\monitor\ to other ones, which is expressed by occurrence in the where clause. While sub-classing refers to data-inheritance of attributes, a monitor captures structural constraints -- the order -- From a21a8844a43f402642aa6d68e9ed568cf720938e Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:02:23 +0100 Subject: [PATCH 09/23] Updated copyright information. --- install | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/install b/install index 8d0ab05..e8fb2c1 100755 --- a/install +++ b/install @@ -1,6 +1,7 @@ #!/usr/bin/env bash -# Copyright (c) 2018-2019 The University of Sheffield. All rights reserved. -# 2018 The University of Paris-Sud. All rights reserved. +# Copyright (c) 2018-2019 The University of Sheffield. +# 2019-2019 The University of Exeter. +# 2018-2019 The University of Paris-Sud. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions From 0b9a788a630a818e975b68938ff1703da62e5a52 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:02:37 +0100 Subject: [PATCH 10/23] Updated copyright information. --- LICENSE | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index e4bc137..87e2e02 100644 --- a/LICENSE +++ b/LICENSE @@ -1,5 +1,6 @@ -Copyright (C) 2018 The University of Sheffield - 2018 The University of Paris-Sud +Copyright (C) 2018-2019 The University of Sheffield + 2019-2019 The University of Exeter + 2018-2019 The University of Paris-Sud All rights reserved. Redistribution and use in source and binary forms, with or without From f53d43afd212e1db89f83a1fef36a4f7e122e1b7 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:06:38 +0100 Subject: [PATCH 11/23] Added support for isa_dof.ref[]{} and isa_dof.label[]{}. --- document-generator/latex/DOF-core.sty | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/document-generator/latex/DOF-core.sty b/document-generator/latex/DOF-core.sty index fadf56e..f3449e2 100644 --- a/document-generator/latex/DOF-core.sty +++ b/document-generator/latex/DOF-core.sty @@ -1,5 +1,6 @@ %% Copyright (C) 2018 The University of Sheffield %% 2018 The University of Paris-Sud +%% 2019 The University of Exeter %% %% License: %% This program can be redistributed and/or modified under the terms @@ -126,3 +127,9 @@ % end: chapter/section default implementations %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% begin: label and ref +\newisadof{label}[label=,type=][1]{\label{#1}} +\newisadof{ref}[label=,type=][1]{\autoref{#1}} +% end: label and ref +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% From 8a8fac042eb601d2a42cb7de3d19e589612423b6 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:07:21 +0100 Subject: [PATCH 12/23] Removed local debugging hacks that break proper build. --- examples/cenelec/mini_odo/mini_odo.thy | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/examples/cenelec/mini_odo/mini_odo.thy b/examples/cenelec/mini_odo/mini_odo.thy index 9b6d57a..ac2c333 100644 --- a/examples/cenelec/mini_odo/mini_odo.thy +++ b/examples/cenelec/mini_odo/mini_odo.thy @@ -1,11 +1,7 @@ theory mini_odo -(* imports "Isabelle_DOF.CENELEC_50128" "Isabelle_DOF.scholarly_paper" - *) - imports "../../../ontologies/CENELEC_50128" - "../../../ontologies/scholarly_paper" begin @@ -32,13 +28,13 @@ text\An "anonymous" text-item, automatically coerced into the top-class "t text*[tralala] \ Brexit means Brexit \ text\Examples for declaration of typed doc-items "assumption" and "hypothesis", - concepts defined in the underlying ontology @{theory "Draft.CENELEC_50128"}. \ + concepts defined in the underlying ontology @{theory "Isabelle_DOF.CENELEC_50128"}. \ text*[ass1::assumption] \ The subsystem Y is safe. \ text*[hyp1::hypothesis] \ P not equal NP \ text\A real example fragment from a larger project, declaring a text-element as a "safety-related application condition", a concept defined in the - @{theory "Draft.CENELEC_50128"} ontology:\ + @{theory "Isabelle_DOF.CENELEC_50128"} ontology:\ text*[new_ass::hypothesis]\Under the assumption @{assumption \ass1\} we establish the following: ... \ From ed65ce54ed64969116120fea909cc23a587ba395 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:10:29 +0100 Subject: [PATCH 13/23] Ported LaTeX document generation to Isabelle 2018. --- Isa_DOF.thy | 20 +++++++------------- document-generator/latex/DOF-COL.sty | 3 ++- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/Isa_DOF.thy b/Isa_DOF.thy index f01a6b3..cb2871f 100644 --- a/Isa_DOF.thy +++ b/Isa_DOF.thy @@ -1457,10 +1457,8 @@ val _ = end (* struct *) - \ - ML\ structure ODL_LTX_Converter = struct @@ -1474,20 +1472,19 @@ fun meta_args_2_string thy ((((lab, _), cid_opt), attr_list) : ODL_Command_Parse | SOME(cid,_) => DOF_core.name2doc_class_name thy cid val cid_txt = "type = " ^ (enclose "{" "}" cid_long); - - fun (* ltx_of_term _ _ (((Const ("List.list.Cons", t1) $ (Const ("String.Char", t2 ) $ t))) $ t') - = (HOLogic.dest_string (((Const ("List.list.Cons", t1) $ (Const ("String.Char", t2 ) $ t))) $ t')) + fun ltx_of_term _ _ (Const ("List.list.Cons", @{typ "char \ char list \ char list"}) $ t1 $ t2) + = HOLogic.dest_string (Const ("List.list.Cons", @{typ "char \ char list \ char list"}) $ t1 $ t2) | ltx_of_term _ _ (Const ("List.list.Nil", _)) = "" | ltx_of_term _ _ (@{term "numeral :: _ \ _"} $ t) = Int.toString(HOLogic.dest_numeral t) | ltx_of_term ctx encl ((Const ("List.list.Cons", _) $ t1) $ t2) = let val inner = (case t2 of - Const ("List.list.Nil", _) => (ltx_of_term ctx true t1) + Const ("List.list.Nil", _) => (ltx_of_term ctx true t1) | _ => ((ltx_of_term ctx false t1)^", " ^(ltx_of_term ctx false t2)) ) in if encl then enclose "{" "}" inner else inner end | ltx_of_term _ _ (Const ("Option.option.None", _)) = "" | ltx_of_term ctxt _ (Const ("Option.option.Some", _)$t) = ltx_of_term ctxt true t - | *)ltx_of_term ctxt _ t = ""^(Sledgehammer_Util.hackish_string_of_term ctxt t) + | ltx_of_term ctxt _ t = ""^(Sledgehammer_Util.hackish_string_of_term ctxt t) fun ltx_of_term_dbg ctx encl term = let @@ -1688,13 +1685,10 @@ fun pretty_docitem_antiquotation_generic cid_decl ctxt ({unchecked = x, define = val _ = check_and_mark ctxt cid_decl ({strict_checking = not x}) (Input.pos_of src) (Input.source_content src) - in (*(if y then Latex.enclose_block "\\label{" "}" - else Latex.enclose_block "\\autoref{" "}") - [Latex.string (Input.source_content src)]*) - (if y then Latex.enclose_block ("\\labelX[type="^cid_decl^"]{") "}" - else Latex.enclose_block ("\\autorefX[type="^cid_decl^"]{") "}") + in + (if y then Latex.enclose_block ("\\csname isadof.label[type={"^cid_decl^"}]{") "}\\endcsname" + else Latex.enclose_block ("\\csname isadof.ref[type={"^cid_decl^"}]{") "}\\endcsname") [Latex.string (Input.source_content src)] - end diff --git a/document-generator/latex/DOF-COL.sty b/document-generator/latex/DOF-COL.sty index 088dbc8..2a02a04 100644 --- a/document-generator/latex/DOF-COL.sty +++ b/document-generator/latex/DOF-COL.sty @@ -1,5 +1,6 @@ %% Copyright (C) 2018 The University of Sheffield %% 2018 The University of Paris-Sud +%% 2019 The University of Exeter %% %% License: %% This program can be redistributed and/or modified under the terms @@ -50,7 +51,7 @@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % begin: side_by_side_figure* -\NewEnviron{isamarkupside_by_side_figure*}[1][]{\isaDof[env={side_by_side_figure},#1]{\BODY}} +\NewEnviron{isamarkupsideUNDERSCOREbyUNDERSCOREsideUNDERSCOREfigure*}[1][]{\isaDof[env={side_by_side_figure},#1]{\BODY}} \newisadof{side_by_side_figure.Isa_COL.side_by_side_figure}% [label=,type=% ,Isa_COL.figure.relative_width=% From df7fd4724bcd8040120f93d761ec1f4962b72a55 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:14:30 +0100 Subject: [PATCH 14/23] Upgraded Jenkins build to Isabelle 2018. --- .ci/isabelle4isadof/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ci/isabelle4isadof/Dockerfile b/.ci/isabelle4isadof/Dockerfile index 53aed5e..76039a8 100644 --- a/.ci/isabelle4isadof/Dockerfile +++ b/.ci/isabelle4isadof/Dockerfile @@ -24,7 +24,7 @@ # # SPDX-License-Identifier: BSD-2-Clause -FROM logicalhacking:isabelle2017 +FROM logicalhacking:isabelle2018 WORKDIR /home/isabelle COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy From 991fd5d0e99177495c0c6d9f5848bfacbcca65ff Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 10:32:51 +0100 Subject: [PATCH 15/23] Added URL of master git repository. --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 8aaf03f..cc894e6 100644 --- a/README.md +++ b/README.md @@ -156,3 +156,9 @@ SPDX-License-Identifier: BSD-2-Clause with the Informal]({https://www.brucker.ch/bibliography/download/2018/brucker.ea-isabelle-ontologies-2018.pdf). In Conference on Intelligent Computer Mathematics (CICM). Lecture Notes in Computer Science (11006), Springer-Verlag, 2018. + +## Master Repository + +The master git repository for this project is hosted +. + From 29d2aa2eae4cd2a7fcc5898c9e38c4ad712da245 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 13:08:32 +0100 Subject: [PATCH 16/23] Reverted 39196965210b5d5953283c91b49c081de1d9466b. --- patches/thy_output.ML | 722 +++++++++++++++++++++++++----------------- 1 file changed, 426 insertions(+), 296 deletions(-) diff --git a/patches/thy_output.ML b/patches/thy_output.ML index acf55de..a503c19 100644 --- a/patches/thy_output.ML +++ b/patches/thy_output.ML @@ -1,181 +1,252 @@ (* Title: Pure/Thy/thy_output.ML - Author: Makarius + Author: Markus Wenzel, TU Muenchen -Theory document output. +Theory document output with antiquotations. *) signature THY_OUTPUT = sig - val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list - val check_comments: Proof.context -> Symbol_Pos.T list -> unit - val output_token: Proof.context -> Token.T -> Latex.text list - val output_source: Proof.context -> string -> Latex.text list - type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state} - val present_thy: Options.T -> theory -> segment list -> Latex.text list + val display: bool Config.T + val quotes: bool Config.T + val margin: int Config.T + val indent: int Config.T + val source: bool Config.T + val break: bool Config.T + val modes: string Config.T + val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context + val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory + val check_command: Proof.context -> xstring * Position.T -> string + val check_option: Proof.context -> xstring * Position.T -> string + val print_antiquotations: bool -> Proof.context -> unit + val antiquotation: binding -> 'a context_parser -> + ({source: Token.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) -> + theory -> theory + val boolean: string -> bool + val integer: string -> int + val eval_antiquote: Toplevel.state -> Antiquote.text_antiquote -> string + val output_text: Toplevel.state -> {markdown: bool} -> Input.source -> string + val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T val set_meta_args_parser : (theory -> string parser) -> unit + val pretty_text: Proof.context -> string -> Pretty.T val pretty_term: Proof.context -> term -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T - val lines: Latex.text list -> Latex.text list - val items: Latex.text list -> Latex.text list - val isabelle: Proof.context -> Latex.text list -> Latex.text - val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text - val typewriter: Proof.context -> string -> Latex.text - val verbatim: Proof.context -> string -> Latex.text - val source: Proof.context -> Token.src -> Latex.text - val pretty: Proof.context -> Pretty.T -> Latex.text - val pretty_source: Proof.context -> Token.src -> Pretty.T -> Latex.text - val pretty_items: Proof.context -> Pretty.T list -> Latex.text - val pretty_items_source: Proof.context -> Token.src -> Pretty.T list -> Latex.text - val antiquotation_pretty: - binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory - val antiquotation_pretty_source: - binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory - val antiquotation_raw: - binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory - val antiquotation_verbatim: - binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory + val str_of_source: Token.src -> string + val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context -> + Token.src -> 'a list -> Pretty.T list + val string_of_margin: Proof.context -> Pretty.T -> string + val output: Proof.context -> Pretty.T list -> string + val verbatim_text: Proof.context -> string -> string + val document_command: {markdown: bool} -> (xstring * Position.T) option * Input.source -> + Toplevel.transition -> Toplevel.transition end; structure Thy_Output: THY_OUTPUT = struct -(* output document source *) +(** options **) -val output_symbols = single o Latex.symbols_output; +val display = Attrib.setup_option_bool ("thy_output_display", \<^here>); +val break = Attrib.setup_option_bool ("thy_output_break", \<^here>); +val quotes = Attrib.setup_option_bool ("thy_output_quotes", \<^here>); +val margin = Attrib.setup_option_int ("thy_output_margin", \<^here>); +val indent = Attrib.setup_option_int ("thy_output_indent", \<^here>); +val source = Attrib.setup_option_bool ("thy_output_source", \<^here>); +val modes = Attrib.setup_option_string ("thy_output_modes", \<^here>); -fun output_comment ctxt (kind, syms) = - (case kind of - Comment.Comment => - Input.cartouche_content syms - |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false) - {markdown = false} - |> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}" - | Comment.Cancel => - Symbol_Pos.cartouche_content syms - |> output_symbols - |> Latex.enclose_body "%\n\\isamarkupcancel{" "}" - | Comment.Latex => - [Latex.symbols (Symbol_Pos.cartouche_content syms)]) -and output_comment_document ctxt (comment, syms) = - (case comment of - SOME kind => output_comment ctxt (kind, syms) - | NONE => [Latex.symbols syms]) -and output_document_text ctxt syms = - Comment.read_body syms |> maps (output_comment_document ctxt) -and output_document ctxt {markdown} source = + +structure Wrappers = Proof_Data +( + type T = ((unit -> string) -> unit -> string) list; + fun init _ = []; +); + +fun add_wrapper wrapper = Wrappers.map (cons wrapper); + +val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f); + + + +(** maintain global antiquotations **) + +structure Antiquotations = Theory_Data +( + type T = + (Token.src -> Toplevel.state -> Proof.context -> string) Name_Space.table * + (string -> Proof.context -> Proof.context) Name_Space.table; + val empty : T = + (Name_Space.empty_table Markup.document_antiquotationN, + Name_Space.empty_table Markup.document_antiquotation_optionN); + val extend = I; + fun merge ((commands1, options1), (commands2, options2)) : T = + (Name_Space.merge_tables (commands1, commands2), + Name_Space.merge_tables (options1, options2)); +); + +val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; + +fun add_command name cmd thy = thy + |> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd)); + +fun add_option name opt thy = thy + |> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd)); + +fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt)); + +fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)); + +fun command src state ctxt = + let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src + in f src' state ctxt end; + +fun option ((xname, pos), s) ctxt = let - val pos = Input.pos_of source; - val syms = Input.source_explode source; + val (_, opt) = + Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos); + in opt s ctxt end; - val output_antiquotes = - maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt); - - fun output_line line = - (if Markdown.line_is_item line then [Latex.string "\\item "] else []) @ - output_antiquotes (Markdown.line_content line); - - fun output_block (Markdown.Par lines) = - Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines)) - | output_block (Markdown.List {kind, body, ...}) = - Latex.environment_block (Markdown.print_kind kind) (output_blocks body) - and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks); +fun print_antiquotations verbose ctxt = + let + val (commands, options) = get_antiquotations ctxt; + val command_names = map #1 (Name_Space.markup_table verbose ctxt commands); + val option_names = map #1 (Name_Space.markup_table verbose ctxt options); in - if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then [] - else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms - then - let - val ants = Antiquote.parse_comments pos syms; - val reports = Antiquote.antiq_reports ants; - val blocks = Markdown.read_antiquotes ants; - val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks); - in output_blocks blocks end - else - let - val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms); - val reports = Antiquote.antiq_reports ants; - val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants); - in output_antiquotes ants end - end; + [Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names), + Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)] + end |> Pretty.writeln_chunks; + +fun antiquotation name scan body = + add_command name + (fn src => fn state => fn ctxt => + let val (x, ctxt') = Token.syntax scan src ctxt + in body {source = src, state = state, context = ctxt'} x end); -(* output tokens with formal comments *) + +(** syntax of antiquotations **) + +(* option values *) + +fun boolean "" = true + | boolean "true" = true + | boolean "false" = false + | boolean s = error ("Bad boolean value: " ^ quote s); + +fun integer s = + let + fun int ss = + (case Library.read_int ss of (i, []) => i + | _ => error ("Bad integer value: " ^ quote s)); + in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end; + + +(* outer syntax *) local -val output_symbols_antiq = - (fn Antiquote.Text syms => output_symbols syms - | Antiquote.Control {name = (name, _), body, ...} => - Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) :: - output_symbols body - | Antiquote.Antiq {body, ...} => - Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body)); +val property = + Parse.position Parse.name -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.name) ""; -fun output_comment_symbols ctxt {antiq} (comment, syms) = - (case (comment, antiq) of - (NONE, false) => output_symbols syms - | (NONE, true) => - Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms - |> maps output_symbols_antiq - | (SOME comment, _) => output_comment ctxt (comment, syms)); - -fun output_body ctxt antiq bg en syms = - Comment.read_body syms - |> maps (output_comment_symbols ctxt {antiq = antiq}) - |> Latex.enclose_body bg en; +val properties = + Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) []; in -fun output_token ctxt tok = - let - fun output antiq bg en = - output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok)); - in - (case Token.kind_of tok of - Token.Comment NONE => [] - | Token.Command => output false "\\isacommand{" "}" - | Token.Keyword => - if Symbol.is_ascii_identifier (Token.content_of tok) - then output false "\\isakeyword{" "}" - else output false "" "" - | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" - | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" - | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" - | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}" - | _ => output false "" "") - end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)); - -fun output_source ctxt s = - output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none)); - -fun check_comments ctxt = - Comment.read_body #> List.app (fn (comment, syms) => - let - val pos = #1 (Symbol_Pos.range syms); - val _ = - comment |> Option.app (fn kind => - Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind))); - val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); - in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); +val antiq = + Parse.!!! + (Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof) + >> (fn ((name, props), args) => (props, name :: args)); end; +(* eval antiquote *) + +local + +fun eval_antiq state (opts, src) = + let + val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); + val print_ctxt = Context_Position.set_visible false preview_ctxt; + + fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); + val _ = cmd preview_ctxt; + val print_modes = space_explode "," (Config.get print_ctxt modes) @ [Latex.latexN]; + in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end; + +in + +fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss + | eval_antiquote state (Antiquote.Control {name, body, ...}) = + eval_antiq state + ([], Token.make_src name (if null body then [] else [Token.read_cartouche body])) + | eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) = + let + val keywords = + (case try Toplevel.presentation_context_of state of + SOME ctxt => Thy_Header.get_keywords' ctxt + | NONE => + error ("Unknown context -- cannot expand document antiquotations" ^ + Position.here pos)); + in eval_antiq state (Token.read_antiq keywords antiq (body, pos)) end; + +end; + + +(* output text *) + +fun output_text state {markdown} source = + let + val is_reported = + (case try Toplevel.context_of state of + SOME ctxt => Context_Position.is_visible ctxt + | NONE => true); + + val pos = Input.pos_of source; + val syms = Input.source_explode source; + + val _ = + if is_reported then + Position.report pos (Markup.language_document (Input.is_delimited source)) + else (); + + val output_antiquotes = map (eval_antiquote state) #> implode; + + fun output_line line = + (if Markdown.line_is_item line then "\\item " else "") ^ + output_antiquotes (Markdown.line_content line); + + fun output_blocks blocks = space_implode "\n\n" (map output_block blocks) + and output_block (Markdown.Par lines) = cat_lines (map output_line lines) + | output_block (Markdown.List {kind, body, ...}) = + Latex.environment (Markdown.print_kind kind) (output_blocks body); + in + if Toplevel.is_skipped_proof state then "" + else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms + then + let + val ants = Antiquote.parse pos syms; + val reports = Antiquote.antiq_reports ants; + val blocks = Markdown.read_antiquotes ants; + val _ = if is_reported then Position.reports (reports @ Markdown.reports blocks) else (); + in output_blocks blocks end + else + let + val ants = Antiquote.parse pos (Symbol_Pos.trim_blanks syms); + val reports = Antiquote.antiq_reports ants; + val _ = if is_reported then Position.reports (reports @ Markdown.text_reports ants) else (); + in output_antiquotes ants end + end; + + (** present theory source **) (*NB: arranging white space around command spans is a black art*) -val is_white = Token.is_space orf Token.is_informal_comment; -val is_black = not o is_white; - -val is_white_comment = Token.is_informal_comment; -val is_black_comment = Token.is_formal_comment; - - (* presentation tokens *) datatype token = - Ignore_Token + No_Token | Basic_Token of Token.T | Markup_Token of string * string * Input.source | Markup_Env_Token of string * string * Input.source @@ -184,24 +255,25 @@ datatype token = fun basic_token pred (Basic_Token tok) = pred tok | basic_token _ _ = false; -val white_token = basic_token is_white; -val white_comment_token = basic_token is_white_comment; +val improper_token = basic_token Token.is_improper; +val comment_token = basic_token Token.is_comment; val blank_token = basic_token Token.is_blank; val newline_token = basic_token Token.is_newline; -fun present_token ctxt tok = + +(* output token *) + +fun output_token state tok = (case tok of - Ignore_Token => [] - | Basic_Token tok => output_token ctxt tok + No_Token => "" + | Basic_Token tok => Latex.output_token tok | Markup_Token (cmd, meta_args, source) => - Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ meta_args ^ "{") "%\n}\n" - (output_document ctxt {markdown = false} source) + "%\n\\isamarkup" ^ cmd ^ meta_args ^ "{" ^ output_text state {markdown = false} source ^ "%\n}\n" | Markup_Env_Token (cmd, meta_args, source) => - [Latex.environment_block - ("isamarkup" ^ cmd) - (Latex.string meta_args :: output_document ctxt {markdown = true} source)] + Latex.environment ("isamarkup" ^ cmd) + (meta_args ^ output_text state {markdown = true} source) | Raw_Token source => - Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]); + "%\n" ^ output_text state {markdown = true} source ^ "\n"); (* command spans *) @@ -213,16 +285,16 @@ datatype span = Span of command * (source * source * source * source) * bool; fun make_span cmd src = let - fun chop_newline (tok :: toks) = + fun take_newline (tok :: toks) = if newline_token (fst tok) then ([tok], toks, true) else ([], tok :: toks, false) - | chop_newline [] = ([], [], false); + | take_newline [] = ([], [], false); val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = src - |> chop_prefix (white_token o fst) - ||>> chop_suffix (white_token o fst) - ||>> chop_prefix (white_comment_token o fst) - ||> chop_newline; + |> take_prefix (improper_token o fst) + ||>> take_suffix (improper_token o fst) + ||>> take_prefix (comment_token o fst) + ||> take_newline; in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; @@ -235,72 +307,42 @@ fun err_bad_nesting pos = fun edge which f (x: string option, y) = if x = y then I - else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt))); + else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt)); val begin_tag = edge #2 Latex.begin_tag; val end_tag = edge #1 Latex.end_tag; fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; -fun read_tag s = - (case space_explode "%" s of - ["", b] => (SOME b, NONE) - | [a, b] => (NONE, SOME (a, b)) - | _ => error ("Bad document_tags specification: " ^ quote s)); - in -fun make_command_tag options keywords = +fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) = let - val document_tags = - map read_tag (space_explode "," (Options.string options \<^system_option>\document_tags\)); - val document_tags_default = map_filter #1 document_tags; - val document_tags_command = map_filter #2 document_tags; - in - fn {cmd_name, cmd_tags, tag, active_tag} => fn state => fn state' => - let - val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); - - val keyword_tags = - if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"] - else Keyword.command_tags keywords cmd_name; - val command_tags = - the_list (AList.lookup (op =) document_tags_command cmd_name) @ - keyword_tags @ document_tags_default; - - val active_tag' = - if is_some tag' then tag' - else - (case command_tags of - default_tag :: _ => SOME default_tag - | [] => - if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state - then active_tag - else NONE); - in {tag' = tag', active_tag' = active_tag'} end - end; - -fun present_span thy command_tag span state state' - (tag_stack, active_tag, newline, latex, present_cont) = - let - val ctxt' = - Toplevel.presentation_context state' - handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN; val present = fold (fn (tok, (flag, 0)) => - fold cons (present_token ctxt' tok) - #> cons (Latex.string flag) + Buffer.add (output_token state' tok) + #> Buffer.add flag | _ => I); val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span; val (tag, tags) = tag_stack; - val {tag', active_tag'} = - command_tag {cmd_name = cmd_name, cmd_tags = cmd_tags, tag = tag, active_tag = active_tag} - state state'; - val edge = (active_tag, active_tag'); + val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); val nesting = Toplevel.level state' - Toplevel.level state; + val active_tag' = + if is_some tag' then tag' + else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE + else + (case Keyword.command_tags keywords cmd_name of + default_tag :: _ => SOME default_tag + | [] => + if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state + then active_tag + else NONE); + + val edge = (active_tag, active_tag'); + val newline' = if is_none active_tag' then span_newline else newline; @@ -312,8 +354,8 @@ fun present_span thy command_tag span state state' tg :: tgs => (tg, tgs) | [] => err_bad_nesting (Position.here cmd_pos)); - val latex' = - latex + val buffer' = + buffer |> end_tag edge |> close_delim (fst present_cont) edge |> snd present_cont @@ -323,12 +365,12 @@ fun present_span thy command_tag span state state' val present_cont' = if newline then (present (#3 srcs), present (#4 srcs)) else (I, present (#3 srcs) #> present (#4 srcs)); - in (tag_stack', active_tag', newline', latex', present_cont') end; + in (tag_stack', active_tag', newline', buffer', present_cont') end; -fun present_trailer ((_, tags), active_tag, _, latex, present_cont) = +fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) = if not (null tags) then err_bad_nesting " at end of theory" else - latex + buffer |> end_tag (active_tag, NONE) |> close_delim (fst present_cont) (active_tag, NONE) |> snd present_cont; @@ -344,9 +386,9 @@ val markup_true = "\\isamarkuptrue%\n"; val markup_false = "\\isamarkupfalse%\n"; val space_proper = - Scan.one Token.is_blank -- Scan.many is_white_comment -- Scan.one is_black; + Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper; -val is_improper = not o (is_black orf Token.is_begin_ignore orf Token.is_end_ignore); +val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore); val improper = Scan.many is_improper; val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper)); val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank)); @@ -366,35 +408,32 @@ val locale = Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- Parse.!!! (improper |-- Parse.name --| (improper -- Parse.$$$ ")"))); -val meta_args_parser_hook = Unsynchronized.ref ((fn thy => fn s => ("",s)): theory -> string parser) +val meta_args_parser_hook = Unsynchronized.ref((fn thy => fn s => ("",s)): theory -> string parser) in -type segment = {span: Command_Span.span, command: Toplevel.transition, state: Toplevel.state}; - -fun present_thy options thy (segments: segment list) = +fun present_thy thy command_results toks = let val keywords = Thy_Header.get_keywords thy; - (* tokens *) val ignored = Scan.state --| ignore - >> (fn d => (NONE, (Ignore_Token, ("", d)))); + >> (fn d => (NONE, (No_Token, ("", d)))); fun markup pred mk flag = Scan.peek (fn d => improper |-- - Parse.position (Scan.one (fn tok => - Token.is_command tok andalso pred keywords (Token.content_of tok))) -- + Parse.position (Scan.one (fn tok => Token.is_command tok andalso + pred keywords (Token.content_of tok))) -- Scan.repeat tag -- (improper |-- (Parse.!!!! - ((!meta_args_parser_hook thy) + ( (!meta_args_parser_hook thy) -- ( (improper -- locale -- improper) |-- (Parse.document_source)) --| improper_end))) - >> (fn (((tok, pos'), tags), (meta_args, source)) => + >> (fn (((tok, pos'), tags), (meta_args,source)) => let val name = Token.content_of tok in (SOME (name, pos', tags), (mk (name, meta_args, source), (flag, d))) end)); @@ -407,7 +446,9 @@ fun present_thy options thy (segments: segment list) = (Basic_Token cmd, (markup_false, d)))])); val cmt = Scan.peek (fn d => - Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); + (Parse.$$$ "--" || Parse.$$$ Symbol.comment) |-- + Parse.!!!! (improper |-- Parse.document_source) >> + (fn source => (NONE, (Markup_Token ("cmt", "", source), ("", d))))); val other = Scan.peek (fn d => Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d))))); @@ -429,13 +470,13 @@ fun present_thy options thy (segments: segment list) = val cmd = Scan.one (is_some o fst); val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; - val white_comments = Scan.many (white_comment_token o fst o snd); + val comments = Scan.many (comment_token o fst o snd); val blank = Scan.one (blank_token o fst o snd); val newline = Scan.one (newline_token o fst o snd); val before_cmd = - Scan.option (newline -- white_comments) -- - Scan.option (newline -- white_comments) -- - Scan.option (blank -- white_comments) -- cmd; + Scan.option (newline -- comments) -- + Scan.option (newline -- comments) -- + Scan.option (blank -- comments) -- cmd; val span = Scan.repeat non_cmd -- cmd -- @@ -444,113 +485,202 @@ fun present_thy options thy (segments: segment list) = >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); - val spans = segments - |> maps (Command_Span.content o #span) - |> drop_suffix Token.is_space - |> Source.of_list + val spans = toks + |> take_suffix Token.is_space |> #1 + |> Source.of_list |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat)) |> Source.source stopper (Scan.error (Scan.bulk span)) |> Source.exhaust; - val command_results = - segments |> map_filter (fn {command, state, ...} => - if Toplevel.is_ignored command then NONE else SOME (command, state)); - - (* present commands *) - val command_tag = make_command_tag options keywords; - fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span thy command_tag span st st'); + Toplevel.setmp_thread_position tr (present_span keywords span st st'); fun present _ [] = I - | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest; + | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; in if length command_results = length spans then - ((NONE, []), NONE, true, [], (I, I)) - |> present Toplevel.toplevel (spans ~~ command_results) + ((NONE, []), NONE, true, Buffer.empty, (I, I)) + |> present Toplevel.toplevel (command_results ~~ spans) |> present_trailer - |> rev else error "Messed-up outer syntax for presentation" end; -fun set_meta_args_parser f = meta_args_parser_hook := f +fun set_meta_args_parser f = (meta_args_parser_hook:= f) end; -(** standard output operations **) +(** setup default output **) -(* pretty printing *) +(* options *) -fun pretty_term ctxt t = - Syntax.pretty_term (Variable.auto_fixes t ctxt) t; +val _ = Theory.setup + (add_option @{binding show_types} (Config.put show_types o boolean) #> + add_option @{binding show_sorts} (Config.put show_sorts o boolean) #> + add_option @{binding show_structs} (Config.put show_structs o boolean) #> + add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #> + add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #> + add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #> + add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #> + add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #> + add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #> + add_option @{binding display} (Config.put display o boolean) #> + add_option @{binding break} (Config.put break o boolean) #> + add_option @{binding quotes} (Config.put quotes o boolean) #> + add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #> + add_option @{binding margin} (Config.put margin o integer) #> + add_option @{binding indent} (Config.put indent o integer) #> + add_option @{binding source} (Config.put source o boolean) #> + add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer)); + + +(* basic pretty printing *) + +fun perhaps_trim ctxt = + not (Config.get ctxt display) ? Symbol.trim_blanks; + +fun pretty_text ctxt = + Pretty.chunks o map Pretty.str o map (perhaps_trim ctxt) o split_lines; + +fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t; fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; +fun pretty_term_style ctxt (style, t) = + pretty_term ctxt (style t); + +fun pretty_thm_style ctxt (style, th) = + pretty_term ctxt (style (Thm.full_prop_of th)); + +fun pretty_term_typ ctxt (style, t) = + let val t' = style t + in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; + +fun pretty_term_typeof ctxt (style, t) = + Syntax.pretty_typ ctxt (Term.fastype_of (style t)); + +fun pretty_const ctxt c = + let + val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) + handle TYPE (msg, _, _) => error msg; + val ([t'], _) = Variable.import_terms true [t] ctxt; + in pretty_term ctxt t' end; + +fun pretty_abbrev ctxt s = + let + val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; + fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); + val (head, args) = Term.strip_comb t; + val (c, T) = Term.dest_Const head handle TERM _ => err (); + val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c + handle TYPE _ => err (); + val t' = Term.betapplys (Envir.expand_atom T (U, u), args); + val eq = Logic.mk_equals (t, t'); + val ctxt' = Variable.auto_fixes eq ctxt; + in Proof_Context.pretty_term_abbrev ctxt' eq end; + +fun pretty_locale ctxt (name, pos) = + let + val thy = Proof_Context.theory_of ctxt + in (Pretty.str o Locale.extern thy o Locale.check thy) (name, pos) end; + +fun pretty_class ctxt = + Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt; + +fun pretty_type ctxt s = + let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s + in Pretty.str (Proof_Context.extern_type ctxt name) end; + +fun pretty_prf full ctxt = Proof_Syntax.pretty_clean_proof_of ctxt full; + +fun pretty_theory ctxt (name, pos) = (Theory.check ctxt (name, pos); Pretty.str name); + (* default output *) -val lines = separate (Latex.string "\\isanewline%\n"); -val items = separate (Latex.string "\\isasep\\isanewline%\n"); +val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src; -fun isabelle ctxt body = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Latex.environment_block "isabelle" body - else Latex.block (Latex.enclose_body "\\isa{" "}" body); +fun maybe_pretty_source pretty ctxt src xs = + map (pretty ctxt) xs (*always pretty in order to exhibit errors!*) + |> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I); -fun isabelle_typewriter ctxt body = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Latex.environment_block "isabellett" body - else Latex.block (Latex.enclose_body "\\isatt{" "}" body); +fun string_of_margin ctxt = Pretty.string_of_margin (Config.get ctxt margin); -fun typewriter ctxt s = - isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)]; - -fun verbatim ctxt = - if Config.get ctxt Document_Antiquotation.thy_output_display - then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt - else split_lines #> map (typewriter ctxt) #> lines #> Latex.block; - -fun source ctxt = - Token.args_of_src - #> map (Token.unparse #> Document_Antiquotation.prepare_lines ctxt) - #> space_implode " " - #> output_source ctxt - #> isabelle ctxt; - -fun pretty ctxt = - Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt; - -fun pretty_source ctxt src prt = - if Config.get ctxt Document_Antiquotation.thy_output_source - then source ctxt src else pretty ctxt prt; - -fun pretty_items ctxt = - map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt; - -fun pretty_items_source ctxt src prts = - if Config.get ctxt Document_Antiquotation.thy_output_source - then source ctxt src else pretty_items ctxt prts; +fun output ctxt prts = + prts + |> Config.get ctxt quotes ? map Pretty.quote + |> (if Config.get ctxt display then + map (Pretty.indent (Config.get ctxt indent) #> string_of_margin ctxt #> Output.output) + #> space_implode "\\isasep\\isanewline%\n" + #> Latex.environment "isabelle" + else + map + ((if Config.get ctxt break then string_of_margin ctxt else Pretty.unformatted_string_of) + #> Output.output) + #> space_implode "\\isasep\\isanewline%\n" + #> enclose "\\isa{" "}"); -(* antiquotation variants *) +(* verbatim text *) -fun antiquotation_pretty name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x)); +fun verbatim_text ctxt = + if Config.get ctxt display then + split_lines #> map (prefix (Symbol.spaces (Config.get ctxt indent))) #> cat_lines #> + Latex.output_ascii #> Latex.environment "isabellett" + else + split_lines #> + map (Latex.output_ascii #> enclose "\\isatt{" "}") #> + space_implode "\\isasep\\isanewline%\n"; -fun antiquotation_pretty_source name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, source = src, argument = x} => pretty_source ctxt src (f ctxt x)); -fun antiquotation_raw name scan f = - Document_Antiquotation.setup name scan - (fn {context = ctxt, argument = x, ...} => f ctxt x); +(* antiquotations for basic entities *) -fun antiquotation_verbatim name scan f = - antiquotation_raw name scan (fn ctxt => verbatim ctxt o f ctxt); +local + +fun basic_entities name scan pretty = + antiquotation name scan (fn {source, context = ctxt, ...} => + output ctxt o maybe_pretty_source pretty ctxt source); + +fun basic_entities_style name scan pretty = + antiquotation name scan (fn {source, context = ctxt, ...} => fn (style, xs) => + output ctxt + (maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) ctxt source xs)); + +fun basic_entity name scan = basic_entities name (scan >> single); + +in + +val _ = Theory.setup + (basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #> + basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #> + basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #> + basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #> + basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #> + basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> + basic_entity @{binding abbrev} (Scan.lift Args.embedded_inner_syntax) pretty_abbrev #> + basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #> + basic_entity @{binding locale} (Scan.lift (Parse.position Args.name)) pretty_locale #> + basic_entity @{binding class} (Scan.lift Args.embedded_inner_syntax) pretty_class #> + basic_entity @{binding type} (Scan.lift Args.embedded) pretty_type #> + basic_entities @{binding prf} Attrib.thms (pretty_prf false) #> + basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #> + basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory); + +end; + + +(** document command **) + + +fun document_command markdown (loc, txt) = + Toplevel.keep (fn state => + (case loc of + NONE => ignore (output_text state markdown txt) + | SOME (_, pos) => + error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o + Toplevel.present_local_theory loc (fn state => ignore (output_text state markdown txt)); end; From 1e85afcfb9aeae5681413d2d09da3241f692c90f Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 16:17:42 +0100 Subject: [PATCH 17/23] Fix wrong path for actual Isabelle version. --- install | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install b/install index e8fb2c1..621a79a 100755 --- a/install +++ b/install @@ -38,7 +38,6 @@ AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06 ISABELLE=`which isabelle` GEN_DIR=document-generator PROG=`echo $0 | sed 's|.*/||'`; -ACTUAL_ISABELLE_VERSION=`$ISABELLE version` SKIP="false" VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS` for i in $VARS; do @@ -225,6 +224,7 @@ do shift done +ACTUAL_ISABELLE_VERSION=`$ISABELLE version` echo "Isabelle/DOF Installer" echo "======================" From 567fedf6c8063905c8d1b584954f97d1c08cb4a8 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 16:34:49 +0100 Subject: [PATCH 18/23] Fix wrong path for actual Isabelle version and update to dependent variables. --- install | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/install b/install index 621a79a..8bdd9e1 100755 --- a/install +++ b/install @@ -35,15 +35,6 @@ ISABELLE_VERSION="Isabelle2018: August 2018" ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/" AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz" -ISABELLE=`which isabelle` -GEN_DIR=document-generator -PROG=`echo $0 | sed 's|.*/||'`; -SKIP="false" -VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS` -for i in $VARS; do - export "$i" -done - print_help() { echo "Usage: $PROG [OPTION] " @@ -224,7 +215,16 @@ do shift done + +ISABELLE=`which isabelle` ACTUAL_ISABELLE_VERSION=`$ISABELLE version` +GEN_DIR=document-generator +PROG=`echo $0 | sed 's|.*/||'`; +SKIP="false" +VARS=`$ISABELLE getenv ISABELLE_HOME_USER ISABELLE_HOME ISABELLE_TOOLS` +for i in $VARS; do + export "$i" +done echo "Isabelle/DOF Installer" echo "======================" From 30c5876ade8d0a444cc78fb6a292d4f735167255 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 16:35:40 +0100 Subject: [PATCH 19/23] Fix wrong path for actual Isabelle version and update to dependent variables. --- install | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install b/install index 8bdd9e1..1a2dd14 100755 --- a/install +++ b/install @@ -197,6 +197,7 @@ install_and_register(){ } +ISABELLE=`which isabelle` while [ $# -gt 0 ] do @@ -216,7 +217,6 @@ do done -ISABELLE=`which isabelle` ACTUAL_ISABELLE_VERSION=`$ISABELLE version` GEN_DIR=document-generator PROG=`echo $0 | sed 's|.*/||'`; From ce78f54984a251e4c7d78e56d7d039a0c9d2f239 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 19:46:44 +0100 Subject: [PATCH 20/23] Pushed afp version into configuration variable. --- install | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/install b/install index 1a2dd14..25daafd 100755 --- a/install +++ b/install @@ -33,7 +33,9 @@ shopt -s nocasematch # Global configuration ISABELLE_VERSION="Isabelle2018: August 2018" ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/" -AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz" + +AFP_DATE="afp-2019-06-04" +AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/"$AFP_DATE".tar.gz" print_help() { @@ -100,14 +102,14 @@ check_afp_entries() { echo " Trying to install AFP (this might take a few *minutes*) ...." extract="" for e in $missing; do - extract="$extract afp-2019-06-04/thys/$e" + extract="$extract $AFP_DATE/thys/$e" done mkdir -p .afp if curl -s -L $AFP_URL | tar zxf - -C .afp $extract; then for e in $missing; do echo " Registering $e in $ISABELLE_HOME_USER/ROOTS" touch $ISABELLE_HOME_USER/ROOTS - grep -q $PWD/.afp/afp-2018-08-14/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/afp-2018-08-14/thys/$e" >> $ISABELLE_HOME_USER/ROOTS + grep -q $PWD/.afp/$AFP_DATE/thys/$e $ISABELLE_HOME_USER/ROOTS || echo "$PWD/.afp/$AFP_DATE/thys/$e" >> $ISABELLE_HOME_USER/ROOTS done echo " AFP installation successful." else From 519095e8f59228d2898ae270073bfad79bb8ae62 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Mon, 17 Jun 2019 20:53:13 +0100 Subject: [PATCH 21/23] Initial commit. --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..c60b04b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,9 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [Unreleased] + From 39afa24591400361a0bd9d2e69fef618ad92bd23 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 18 Jun 2019 06:33:41 +0100 Subject: [PATCH 22/23] Migration to Isabelle 2019. --- .ci/isabelle4isadof/Dockerfile | 2 +- README.md | 16 ++++++++-------- install | 8 ++++---- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.ci/isabelle4isadof/Dockerfile b/.ci/isabelle4isadof/Dockerfile index 76039a8..3ae2262 100644 --- a/.ci/isabelle4isadof/Dockerfile +++ b/.ci/isabelle4isadof/Dockerfile @@ -24,7 +24,7 @@ # # SPDX-License-Identifier: BSD-2-Clause -FROM logicalhacking:isabelle2018 +FROM logicalhacking:isabelle2019 WORKDIR /home/isabelle COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy diff --git a/README.md b/README.md index f600dcb..dfd69f7 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,9 @@ well as formal development. ## Prerequisites -Isabelle/DOF requires [Isabelle 2018](http://isabelle.in.tum.de/website-Isabelle2018/). -Please download the Isabelle 2018 distribution for your operating -system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2018/). +Isabelle/DOF requires [Isabelle 2019](http://isabelle.in.tum.de/website-Isabelle2019/). +Please download the Isabelle 2019 distribution for your operating +system from the [Isabelle website](http://isabelle.in.tum.de/website-Isabelle2019/). ## Installation @@ -22,7 +22,7 @@ If a specific Isabelle version should be used (i.e., not the default one), the full path to the ``isabelle`` command needs to be passed as using the ``-i`` command line argument of the ``install`` script: ```console -foo@bar:~$ ./install -i /usr/local/Isabelle2018/bin/isabelle +foo@bar:~$ ./install -i /usr/local/Isabelle2019/bin/isabelle ``` For further command line options of the installer, please use the @@ -36,22 +36,22 @@ foo@bar:~$ ./install -h The installer will * apply a patch to Isabelle that is necessary to use Isabelle/DOF. If this patch installations fails, you need to manually replace - the file ``Isabelle2018/src/Pure/Thy/thy_output.ML`` in the Isabelle + the file ``Isabelle2019/src/Pure/Thy/thy_output.ML`` in the Isabelle distribution with the file ``patches/thy_output.ML`` from the Isabelle/DOF distribution: ```console cp patches/thy_output.ML `isabelle getenv -b ISABELLE_HOME`/src/Pure/Thy/ ``` * install required entries from the [AFP](https://www.isa-afp.org). If this - installations fails, you need to manually install the AFP for Isabelle 2018 as follows: - Download the [AFP for Isabelle 2018](https://sourceforge.net/projects/afp/files/afp-Isabelle2018/afp-2019-06-04.tar.gz) + installations fails, you need to manually install the AFP for Isabelle 2019 as follows: + Download the [AFP for Isabelle 2019](https://www.isa-afp.org/release/afp-2019-06-17.tar.gz) and follow the [instructions for installing the AFP as Isabelle component](https://www.isa-afp.org/using.html). If you have extracted the AFP archive into the directory to `/home/myself/afp`, you should run the following command to make the AFP session `ROOTS` available to Isabelle: ```console - echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2018/ROOTS + echo "/home/myself/afp/thys" >> ~/.isabelle/Isabelle2019/ROOTS ``` * install the Isabelle/DOF-plugin into the Isabelle user directory (the exact location depends on the Isabelle version). diff --git a/install b/install index 25daafd..c32be68 100755 --- a/install +++ b/install @@ -31,11 +31,11 @@ shopt -s nocasematch # Global configuration -ISABELLE_VERSION="Isabelle2018: August 2018" -ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2018/" +ISABELLE_VERSION="Isabelle2019: June 2019" +ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2019/" -AFP_DATE="afp-2019-06-04" -AFP_URL="https://sourceforge.net/projects/afp/files/afp-Isabelle2018/"$AFP_DATE".tar.gz" +AFP_DATE="afp-2019-06-17" +AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz" print_help() { From 1a9ed995f58a82783d44eaa57f7ab091ddcb5f5d Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Tue, 18 Jun 2019 09:12:13 +0100 Subject: [PATCH 23/23] Initial update of used API to Isabelle 2019. --- patches/thy_output.ML | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/patches/thy_output.ML b/patches/thy_output.ML index c794e90..898e35d 100644 --- a/patches/thy_output.ML +++ b/patches/thy_output.ML @@ -153,7 +153,7 @@ fun check_comments ctxt = val pos = #1 (Symbol_Pos.range syms); val _ = comment |> Option.app (fn kind => - Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.markups kind))); + Context_Position.reports ctxt (map (pair pos) (Markup.cartouche :: Comment.kind_markups kind))); val _ = output_comment_symbols ctxt {antiq = false} (comment, syms); in if comment = SOME Comment.Comment then check_comments ctxt syms else () end); @@ -360,7 +360,7 @@ val ignore = (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) >> pair (d - 1)); -val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end); +val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Document_Source.tag_name --| blank_end); val locale = Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- @@ -477,7 +477,7 @@ fun present_thy options thy (segments: segment list) = in if length command_results = length spans then ((NONE, []), NONE, true, [], (I, I)) - |> present Toplevel.toplevel (spans ~~ command_results) + |> present (Toplevel.init_toplevel ()) (spans ~~ command_results) |> present_trailer |> rev else error "Messed-up outer syntax for presentation"