From f437e1337cbc87c77128d1cb7a92820fcaf5132d Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 17:41:34 +0100 Subject: [PATCH 1/7] Disable markdown. --- Isa_DOF.thy | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Isa_DOF.thy b/Isa_DOF.thy index da2569f..547f76f 100644 --- a/Isa_DOF.thy +++ b/Isa_DOF.thy @@ -499,17 +499,17 @@ val _ = val _ = Outer_Syntax.command ("paragraph*", @{here}) "paragraph heading" (attributes -- Parse.opt_target -- Parse.document_source --| semi - >> enriched_document_command {markdown = true}); + >> enriched_document_command {markdown = false}); val _ = Outer_Syntax.command ("subparagraph*", @{here}) "subparagraph heading" (attributes -- Parse.opt_target -- Parse.document_source --| semi - >> enriched_document_command {markdown = true}); + >> enriched_document_command {markdown = false}); val _ = Outer_Syntax.command ("text*", @{here}) "formal comment (primary style)" (attributes -- Parse.opt_target -- Parse.document_source - >> enriched_document_command {markdown = true}); + >> enriched_document_command {markdown = false}); val _ = @@ -750,4 +750,4 @@ text {* Lq *} -end \ No newline at end of file +end From 3587183a3ffa710fef862e8915cc54d48fc57f11 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 17:42:16 +0100 Subject: [PATCH 2/7] Renaming: keyword_list => keywordlist (to avoid LaTex special characters). --- examples/simple/Article.thy | 4 ++-- ontologies/scholarly_paper.thy | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/simple/Article.thy b/examples/simple/Article.thy index 662ccf3..5407850 100644 --- a/examples/simple/Article.thy +++ b/examples/simple/Article.thy @@ -18,7 +18,7 @@ text*[auth4::author, affiliation="''Universit\\'e Paris-Sud''"]\Burkhart W term "affiliation_update (\ _ . '''') S" -text*[abs::abstract, keyword_list="[]"] {* Isabelle/Isar is a system +text*[abs::abstract, keywordlist="[]"] {* Isabelle/Isar is a system framework with many similarities to Eclipse; it is mostly known as part of Isabelle/HOL, an interactive theorem proving and code generation environment. Recently, an Document Ontology Framework has been developed as a plugin in @@ -60,7 +60,7 @@ simplify; *} term "scholarly_paper.author.affiliation_update" -term "scholarly_paper.abstract.keyword_list_update" +term "scholarly_paper.abstract.keywordlist_update" term "scholarly_paper.introduction.comment2_update" ML{* val a $ b $ c = @{term "X\affiliation:='' ''\"}; fold; *} diff --git a/ontologies/scholarly_paper.thy b/ontologies/scholarly_paper.thy index 7125d8a..d34593b 100644 --- a/ontologies/scholarly_paper.thy +++ b/ontologies/scholarly_paper.thy @@ -18,7 +18,7 @@ doc_class author = affiliation :: "string" doc_class abstract = - keyword_list :: "string list" <= "[]" + keywordlist :: "string list" <= "[]" doc_class text_section = From 0f9910917ec378694eabc5767461d0cac0bb55fd Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 17:44:43 +0100 Subject: [PATCH 3/7] Initial conversion. --- .../dof/converter/DofConverter.scala | 302 +++++++++++++----- 1 file changed, 227 insertions(+), 75 deletions(-) diff --git a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala index c4163e3..6676f8b 100644 --- a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala +++ b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala @@ -1,5 +1,5 @@ /** - * Copyright (c) 2018 The University of Sheffield. All rights reserved. +eq * Copyright (c) 2018 The University of Sheffield. All rights reserved. * 2018 The University of Paris-Sud. All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -29,89 +29,241 @@ package com.logicalhacking.dof.converter -import java.io.{BufferedWriter, File, FileWriter} +import java.io.{ BufferedWriter, File, FileWriter } import IoUtils._ import scala.util.matching.Regex - object DofConverter { - val version = "0.0.0" - def convertLaTeX(string:String):Either[LaTeXLexerError,String] = { - LaTeXLexer(string) match { - case Left(err) => Left(err) - case Right(tokens) => Right(LaTeXLexer.toString(tokens)) - } + val version = "0.0.0" + + def deMarkUpArgList(tokens: List[LaTeXToken]): List[LaTeXToken] = { + tokens match { + case CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: tail + => RAWTEXT(""""""") :: deMarkUpArgList(tail) + case CURLYOPEN :: COMMAND("""\isachardoublequoteopen""") :: CURLYCLOSE :: tail => RAWTEXT("""{""") :: deMarkUpArgList(tail) + case CURLYOPEN :: COMMAND("""\isachardoublequoteclose""") :: CURLYCLOSE :: tail => RAWTEXT("""}""") :: deMarkUpArgList(tail) + case t :: tail => t :: deMarkUpArgList(tail) + case Nil => Nil + } + } + + def deMarkUp(tokens: List[LaTeXToken]): List[LaTeXToken] = { + tokens match { + case CURLYOPEN :: COMMAND("""\isacharcolon""") :: CURLYCLOSE :: tail => RAWTEXT(""":""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharunderscore""") :: CURLYCLOSE :: tail => RAWTEXT("""_""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isadigit""") :: CURLYOPEN::n::CURLYCLOSE::CURLYCLOSE :: tail => n :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharcomma""") :: CURLYCLOSE :: tail => RAWTEXT(""",""") :: deMarkUp(tail) + case COMMAND("""\isanewline""") :: tail => deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isachardot""") :: CURLYCLOSE :: tail => RAWTEXT(""".""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharsemicolon""") :: CURLYCLOSE :: tail => RAWTEXT(""";""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharbackslash""") :: CURLYCLOSE :: tail => RAWTEXT("""\""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharslash""") :: CURLYCLOSE :: tail => RAWTEXT("""/""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharbraceleft""") :: CURLYCLOSE :: tail => RAWTEXT("""{""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharbraceright""") :: CURLYCLOSE :: tail => RAWTEXT("""}""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharparenleft""") :: CURLYCLOSE :: tail => RAWTEXT("""(""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharparenright""") :: CURLYCLOSE :: tail => RAWTEXT(""")""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharequal""") :: CURLYCLOSE :: tail => RAWTEXT("""=""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharminus""") :: CURLYCLOSE :: tail => RAWTEXT("""-""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: tail => RAWTEXT("""'""") :: deMarkUp(tail) + case VSPACE :: tail => RAWTEXT(""" """) :: deMarkUp(tail) + case t :: tail => t :: deMarkUp(tail) + case Nil => Nil + } + } + + def convertIsaDofCommand(cmd: String, tokens: List[LaTeXToken]): List[LaTeXToken] = { + + def convertType(head: List[LaTeXToken], tail: List[LaTeXToken]): List[LaTeXToken] = { + + def split(head:List[LaTeXToken], tokens: List[LaTeXToken]):Tuple2[List[LaTeXToken], List[LaTeXToken]] = { + tokens match { + case CURLYOPEN::COMMAND("""\isacharcomma""")::CURLYCLOSE::tail => (head,tokens) + case CURLYCLOSE::COMMAND("""\isacharcomma""")::CURLYOPEN::tail => (head++(CURLYCLOSE::COMMAND("""\isacharcomma""")::CURLYOPEN::List()),tail) + case CURLYCLOSE::COMMAND("""\isacharbrackleft""")::CURLYOPEN::tail => (head++(CURLYCLOSE::COMMAND("""\isacharbrackleft""")::CURLYOPEN::List()),tail) + case BRACKETOPEN::tail => (head,BRACKETOPEN::tail) + case CURLYOPEN::COMMAND("""\isacharbrackright""")::CURLYCLOSE::tail => (head,tokens) + case t::tail => split(head++List(t), tail) + case t => (head,t) + } + } + tail match { + case CURLYOPEN::COMMAND("""\isacharcolon""")::CURLYCLOSE :: CURLYOPEN::COMMAND("""\isacharcolon""")::CURLYCLOSE :: tail => { + print ("SPLITTING: \n") + print ("head: "+head+"\n") + print ("tail: "+tail+"\n") + + + val (label, shead)= split(List(), head.reverse) + val (typ, stail) = split(List(), tail) + + print ("\nlabel = "+(label.reverse)+"\n") + print ("\nshead = "+(shead.reverse)+"\n") + print ("\nstail = "+stail+"\n") + print ("\ntyp = "+typ+"\n") + + (shead.reverse)++List(RAWTEXT("""label={"""))++(label.reverse)++List(RAWTEXT("""}, type={"""))++typ++List(RAWTEXT("""}"""))++stail + } + case t::tail => convertType(head++List(t), tail) + case t => t + } } - def convertFile(f: File):Option[(String,LaTeXLexerError)] = { - val texFileName = f.getAbsolutePath() - println("DOF Converter: converting " + texFileName - + " (Not yet fully implemented!)") - f.renameTo(new File(texFileName+".orig")) - - using(io.Source.fromFile(texFileName+".orig")) { - inputFile => - using(new BufferedWriter(new FileWriter(new File(texFileName), true))) { - outputFile => - outputFile.write("% This file was modified by the DOF LaTeX converter\n") - val input = inputFile.getLines.reduceLeft(_+"\n"+_) - - convertLaTeX(input) match { - case Left(err) => Some((texFileName, err)) - case Right(output) => { - outputFile.write(output) - None - } - } + + def delSpace(tokens: List[LaTeXToken]): List[LaTeXToken] = { + tokens match { + case VSPACE :: tail => delSpace(tail) + case COMMAND("""\isanewline""")::tail => delSpace(tail) + case COMMAND("""\newline""")::tail => delSpace(tail) + case RAWTEXT(""" """)::tail => delSpace(tail) + case RAWTEXT("\n")::tail => delSpace(tail) + case RAWTEXT("\t")::tail => delSpace(tail) + case VBACKSLASH::tail => delSpace(tail) + case tokens => tokens + } + } + + def backSpace(tokens: List[LaTeXToken]): List[LaTeXToken] = (delSpace(tokens.reverse)).reverse + + val sep=RAWTEXT("%\n") + + def parseIsaDofCmd(args: List[LaTeXToken], tokens: List[LaTeXToken]): Tuple2[List[LaTeXToken], List[LaTeXToken]] = { + (args, tokens) match { + case (args, COMMAND("""\isamarkupfalse""") :: tail) => parseIsaDofCmd(args, tail) + case (args, CURLYOPEN :: COMMAND("""\isachardoublequoteopen""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isacharbrackleft""") :: CURLYCLOSE :: tail) + => parseIsaDofCmd(backSpace(args) ++ List(CURLYOPEN), tail) + case (args, CURLYOPEN :: COMMAND("""\isacharbrackright""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isachardoublequoteclose""") :: CURLYCLOSE :: tail) + => parseIsaDofCmd(backSpace(args) ++ List(CURLYCLOSE), delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacharbrackleft""") :: CURLYCLOSE :: tail) => parseIsaDofCmd(backSpace(args) ++List(sep) ++ List(BRACKETOPEN), tail) + case (args, CURLYOPEN :: COMMAND("""\isacharbrackright""") :: CURLYCLOSE :: tail) => parseIsaDofCmd(deMarkUpArgList(convertType(List(), args))++List(BRACKETCLOSE,sep), tail) + case (args, CURLYOPEN :: COMMAND("""\isacharverbatimopen""") :: CURLYCLOSE ::tail) => parseIsaDofCmd(args ++ List(CURLYOPEN), delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacharverbatimclose""") :: CURLYCLOSE :: tail) => (deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacartoucheopen""") :: CURLYCLOSE ::tail) => parseIsaDofCmd(args ++ List(CURLYOPEN), delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacartoucheclose""") :: CURLYCLOSE :: tail) => (deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) + case (args, t :: tail) => parseIsaDofCmd(args ++ List(t), tail) + case (args, Nil) => (deMarkUp(args), Nil) + } + } + + + cmd match { + case """chapter""" => { + val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofChapter""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + } + case """section""" => { + val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + } + case """subsection""" => { + val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + COMMAND("""\isaDofSubSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + } + case """subsubsection""" => { + val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofCSubSubSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + } + case """paragraph""" => { + val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofParagraph""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + } + case """text""" => { + val (dofText, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofText""") :: dofText ++ convertLaTeXTokenStream(tail) + } + case s => sep::COMMAND("""\isaDofUnknown{""" + s + """}""") ::sep:: convertLaTeXTokenStream(tokens) + } + } + + def convertLaTeXTokenStream(tokens: List[LaTeXToken]): List[LaTeXToken] = { + tokens match { + case Nil => Nil + case COMMAND("""\isacommand""") :: CURLYOPEN :: RAWTEXT(cmd) :: CURLYOPEN + :: COMMAND("""\isacharasterisk""") :: CURLYCLOSE :: CURLYCLOSE :: ts => convertIsaDofCommand(cmd, ts) + case t :: ts => t :: convertLaTeXTokenStream(ts) + } + } + + def convertLaTeX(string: String): Either[LaTeXLexerError, String] = { + LaTeXLexer(string) match { + case Left(err) => Left(err) + case Right(tokens) => Right(LaTeXLexer.toString(convertLaTeXTokenStream(tokens))) + } + } + + def convertFile(f: File): Option[(String, LaTeXLexerError)] = { + val texFileName = f.getAbsolutePath() + println("DOF Converter: converting " + texFileName + + " (Not yet fully implemented!)") + f.renameTo(new File(texFileName + ".orig")) + + using(io.Source.fromFile(texFileName + ".orig")) { + inputFile => + using(new BufferedWriter(new FileWriter(new File(texFileName), true))) { + outputFile => + outputFile.write("% This file was modified by the DOF LaTeX converter, version " + version + "\n") + val input = inputFile.getLines.reduceLeft(_ + "\n" + _) + + convertLaTeX(input) match { + case Left(err) => Some((texFileName, err)) + case Right(output) => { + outputFile.write(output) + None + } } } } - - def processArgs(args: List[String]):Option[List[String]] = { - def printVersion() = { - println("DOF LaTeX converter version "+version) - } - def printUsage() = { - println("Usage:") - println(" scala dof_latex_converter.jar [OPTIONS] [directory ...]") - println("") - println("Options:") - println(" --version, -v print version and exit") - println(" --help, -h print usage inforamtion and exit") - } - args match { - case Nil => Some(List[String]()) - case "-v"::Nil => printVersion(); None - case "--version"::Nil => printVersion(); None - case "-h"::tail => printUsage(); None - case "--help"::tail => printUsage(); None - case file::tail => processArgs(tail) match { - case Some(files) => Some(file::files) - case None => None - } - case _ => printUsage();None - } - } - - def main(args: Array[String]): Unit = { - val directories = processArgs(args.toList) match { - case None => System.exit(1); List[String]() - case Some(Nil) => List[String](".") - case Some(l) => l - } + } - val texFiles = directories.map(dir => recursiveListFiles(new File(dir), new Regex("\\.tex$")) - .filterNot(_.length() == 0)).flatten - - println(texFiles) - val errors = texFiles.map(file => convertFile(file)).flatten - if(!errors.isEmpty) { - println() - println("DOF LaTeX converter error(s):") - println("=============================") - errors.map{case (file:String, err:LaTeXLexerError) => println(file + ": " + err)} - System.exit(1) - } - System.exit(0) + def processArgs(args: List[String]): Option[List[String]] = { + def printVersion() = { + println("DOF LaTeX converter version " + version) } + def printUsage() = { + println("Usage:") + println(" scala dof_latex_converter.jar [OPTIONS] [directory ...]") + println("") + println("Options:") + println(" --version, -v print version and exit") + println(" --help, -h print usage inforamtion and exit") + } + args match { + case Nil => Some(List[String]()) + case "-v" :: Nil => + printVersion(); None + case "--version" :: Nil => + printVersion(); None + case "-h" :: tail => + printUsage(); None + case "--help" :: tail => + printUsage(); None + case file :: tail => processArgs(tail) match { + case Some(files) => Some(file :: files) + case None => None + } + case _ => printUsage(); None + } + } + + def main(args: Array[String]): Unit = { + val directories = processArgs(args.toList) match { + case None => + System.exit(1); List[String]() + case Some(Nil) => List[String](".") + case Some(l) => l + } + + val texFiles = directories.map(dir => recursiveListFiles(new File(dir), new Regex("\\.tex$")) + .filterNot(_.length() == 0)).flatten + + println(texFiles) + val errors = texFiles.map(file => convertFile(file)).flatten + if (!errors.isEmpty) { + println() + println("DOF LaTeX converter error(s):") + println("=============================") + errors.map { case (file: String, err: LaTeXLexerError) => println(file + ": " + err) } + System.exit(1) + } + System.exit(0) + } } From f5ef91fb7200a2694254f88f76bd73041c4ff8a4 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 21:45:02 +0100 Subject: [PATCH 4/7] Changed type of ORCID (non-option type, just string). --- examples/simple/Article.thy | 2 +- ontologies/scholarly_paper.thy | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/simple/Article.thy b/examples/simple/Article.thy index 10b6733..62df422 100644 --- a/examples/simple/Article.thy +++ b/examples/simple/Article.thy @@ -79,7 +79,7 @@ term "scholarly_paper.author.affiliation_update" term "scholarly_paper.abstract.keyword_list_update" term "scholarly_paper.introduction.comment_update" -term "\author.tag_attribute=undefined,email=''dfg'',orcid=None,affiliation=undefined\" +term "\author.tag_attribute=undefined,email=''dfg'',orcid='''',affiliation=undefined\" definition HORX where "HORX = affiliation(\author.tag_attribute=undefined,email=''dfg'',orcid=None,affiliation=undefined\\affiliation:=''e''\) " diff --git a/ontologies/scholarly_paper.thy b/ontologies/scholarly_paper.thy index 08fd9c0..f652eee 100644 --- a/ontologies/scholarly_paper.thy +++ b/ontologies/scholarly_paper.thy @@ -14,7 +14,7 @@ doc_class subtitle = doc_class author = email :: "string" - orcid :: "string option" <= "None" + orcid :: "string" affiliation :: "string" doc_class abstract = From 51d4d71e9034c130acb4db5844b2b99cc6a378a2 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 21:49:50 +0100 Subject: [PATCH 5/7] Generate type specific LaTeX commands. --- .../dof/converter/DofConverter.scala | 61 +++++++++---------- 1 file changed, 29 insertions(+), 32 deletions(-) diff --git a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala index 6676f8b..c83e29c 100644 --- a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala +++ b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala @@ -73,7 +73,7 @@ object DofConverter { def convertIsaDofCommand(cmd: String, tokens: List[LaTeXToken]): List[LaTeXToken] = { - def convertType(head: List[LaTeXToken], tail: List[LaTeXToken]): List[LaTeXToken] = { + def convertType(head: List[LaTeXToken], tail: List[LaTeXToken]): Tuple2[String,List[LaTeXToken]] = { def split(head:List[LaTeXToken], tokens: List[LaTeXToken]):Tuple2[List[LaTeXToken], List[LaTeXToken]] = { tokens match { @@ -88,23 +88,16 @@ object DofConverter { } tail match { case CURLYOPEN::COMMAND("""\isacharcolon""")::CURLYCLOSE :: CURLYOPEN::COMMAND("""\isacharcolon""")::CURLYCLOSE :: tail => { - print ("SPLITTING: \n") - print ("head: "+head+"\n") - print ("tail: "+tail+"\n") - - val (label, shead)= split(List(), head.reverse) - val (typ, stail) = split(List(), tail) - - print ("\nlabel = "+(label.reverse)+"\n") - print ("\nshead = "+(shead.reverse)+"\n") - print ("\nstail = "+stail+"\n") - print ("\ntyp = "+typ+"\n") - - (shead.reverse)++List(RAWTEXT("""label={"""))++(label.reverse)++List(RAWTEXT("""}, type={"""))++typ++List(RAWTEXT("""}"""))++stail + val (typ, stail) = split(List(), tail) + val typstring = typ match { + case RAWTEXT(s)::Nil => s.capitalize + case _ => "" + } + (typstring,(shead.reverse)++List(RAWTEXT("""label={"""))++(label.reverse)++List(RAWTEXT("""}, type={"""))++typ++List(RAWTEXT("""}"""))++stail) } case t::tail => convertType(head++List(t), tail) - case t => t + case t => ("",t) } } @@ -126,7 +119,7 @@ object DofConverter { val sep=RAWTEXT("%\n") - def parseIsaDofCmd(args: List[LaTeXToken], tokens: List[LaTeXToken]): Tuple2[List[LaTeXToken], List[LaTeXToken]] = { + def parseIsaDofCmd(args: List[LaTeXToken], tokens: List[LaTeXToken]): Tuple3[String,List[LaTeXToken], List[LaTeXToken]] = { (args, tokens) match { case (args, COMMAND("""\isamarkupfalse""") :: tail) => parseIsaDofCmd(args, tail) case (args, CURLYOPEN :: COMMAND("""\isachardoublequoteopen""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isacharbrackleft""") :: CURLYCLOSE :: tail) @@ -134,41 +127,45 @@ object DofConverter { case (args, CURLYOPEN :: COMMAND("""\isacharbrackright""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isachardoublequoteclose""") :: CURLYCLOSE :: tail) => parseIsaDofCmd(backSpace(args) ++ List(CURLYCLOSE), delSpace(tail)) case (args, CURLYOPEN :: COMMAND("""\isacharbrackleft""") :: CURLYCLOSE :: tail) => parseIsaDofCmd(backSpace(args) ++List(sep) ++ List(BRACKETOPEN), tail) - case (args, CURLYOPEN :: COMMAND("""\isacharbrackright""") :: CURLYCLOSE :: tail) => parseIsaDofCmd(deMarkUpArgList(convertType(List(), args))++List(BRACKETCLOSE,sep), tail) + case (args, CURLYOPEN :: COMMAND("""\isacharbrackright""") :: CURLYCLOSE :: tail) => { + val (typ,arglist) = convertType(List(), args) + val (_, t1, t2) = parseIsaDofCmd(deMarkUpArgList(arglist)++List(BRACKETCLOSE,sep), tail) + (typ,t1,t2) + } case (args, CURLYOPEN :: COMMAND("""\isacharverbatimopen""") :: CURLYCLOSE ::tail) => parseIsaDofCmd(args ++ List(CURLYOPEN), delSpace(tail)) - case (args, CURLYOPEN :: COMMAND("""\isacharverbatimclose""") :: CURLYCLOSE :: tail) => (deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacharverbatimclose""") :: CURLYCLOSE :: tail) => ("",deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) case (args, CURLYOPEN :: COMMAND("""\isacartoucheopen""") :: CURLYCLOSE ::tail) => parseIsaDofCmd(args ++ List(CURLYOPEN), delSpace(tail)) - case (args, CURLYOPEN :: COMMAND("""\isacartoucheclose""") :: CURLYCLOSE :: tail) => (deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) + case (args, CURLYOPEN :: COMMAND("""\isacartoucheclose""") :: CURLYCLOSE :: tail) => ("",deMarkUp(backSpace(args) ++ List(CURLYCLOSE)), sep::delSpace(tail)) case (args, t :: tail) => parseIsaDofCmd(args ++ List(t), tail) - case (args, Nil) => (deMarkUp(args), Nil) + case (args, Nil) => ("",deMarkUp(args), Nil) } } cmd match { case """chapter""" => { - val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) - sep::COMMAND("""\isaDofChapter""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + val (typ,sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofChapter"""+typ) :: sectionArgs ++ convertLaTeXTokenStream(tail) } case """section""" => { - val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) - sep::COMMAND("""\isaDofSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + val (typ,sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofSection"""+typ) :: sectionArgs ++ convertLaTeXTokenStream(tail) } case """subsection""" => { - val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) - COMMAND("""\isaDofSubSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + val (typ,sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + COMMAND("""\isaDofSubSection"""+typ) :: sectionArgs ++ convertLaTeXTokenStream(tail) } case """subsubsection""" => { - val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) - sep::COMMAND("""\isaDofCSubSubSection""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + val (typ,sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofCSubSubSection"""+typ) :: sectionArgs ++ convertLaTeXTokenStream(tail) } case """paragraph""" => { - val (sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) - sep::COMMAND("""\isaDofParagraph""") :: sectionArgs ++ convertLaTeXTokenStream(tail) + val (typ,sectionArgs, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofParagraph"""+typ) :: sectionArgs ++ convertLaTeXTokenStream(tail) } case """text""" => { - val (dofText, tail) = parseIsaDofCmd(Nil, tokens) - sep::COMMAND("""\isaDofText""") :: dofText ++ convertLaTeXTokenStream(tail) + val (typ,dofText, tail) = parseIsaDofCmd(Nil, tokens) + sep::COMMAND("""\isaDofText"""+typ) :: dofText ++ convertLaTeXTokenStream(tail) } case s => sep::COMMAND("""\isaDofUnknown{""" + s + """}""") ::sep:: convertLaTeXTokenStream(tokens) } From e6b5d8bf5cb0d0b5e2e8095eec4b80294afdfd10 Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sat, 28 Apr 2018 22:52:29 +0100 Subject: [PATCH 6/7] Re-map isacharplus. --- .../scala/com/logicalhacking/dof/converter/DofConverter.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala index c83e29c..5ebef60 100644 --- a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala +++ b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala @@ -46,7 +46,7 @@ object DofConverter { case Nil => Nil } } - + def deMarkUp(tokens: List[LaTeXToken]): List[LaTeXToken] = { tokens match { case CURLYOPEN :: COMMAND("""\isacharcolon""") :: CURLYCLOSE :: tail => RAWTEXT(""":""") :: deMarkUp(tail) @@ -64,6 +64,7 @@ object DofConverter { case CURLYOPEN :: COMMAND("""\isacharparenright""") :: CURLYCLOSE :: tail => RAWTEXT(""")""") :: deMarkUp(tail) case CURLYOPEN :: COMMAND("""\isacharequal""") :: CURLYCLOSE :: tail => RAWTEXT("""=""") :: deMarkUp(tail) case CURLYOPEN :: COMMAND("""\isacharminus""") :: CURLYCLOSE :: tail => RAWTEXT("""-""") :: deMarkUp(tail) + case CURLYOPEN :: COMMAND("""\isacharplus""") :: CURLYCLOSE :: tail => RAWTEXT("""+""") :: deMarkUp(tail) case CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: tail => RAWTEXT("""'""") :: deMarkUp(tail) case VSPACE :: tail => RAWTEXT(""" """) :: deMarkUp(tail) case t :: tail => t :: deMarkUp(tail) From cc84c364a4b348aad4dc6cab3db4ef8cb28e1cad Mon Sep 17 00:00:00 2001 From: "Achim D. Brucker" Date: Sun, 29 Apr 2018 00:11:33 +0100 Subject: [PATCH 7/7] Remove quotes. --- .../scala/com/logicalhacking/dof/converter/DofConverter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala index 5ebef60..7003ce2 100644 --- a/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala +++ b/document-generator/converter/src/main/scala/com/logicalhacking/dof/converter/DofConverter.scala @@ -39,7 +39,7 @@ object DofConverter { def deMarkUpArgList(tokens: List[LaTeXToken]): List[LaTeXToken] = { tokens match { case CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: CURLYOPEN :: COMMAND("""\isacharprime""") :: CURLYCLOSE :: tail - => RAWTEXT(""""""") :: deMarkUpArgList(tail) + => RAWTEXT("") :: deMarkUpArgList(tail) case CURLYOPEN :: COMMAND("""\isachardoublequoteopen""") :: CURLYCLOSE :: tail => RAWTEXT("""{""") :: deMarkUpArgList(tail) case CURLYOPEN :: COMMAND("""\isachardoublequoteclose""") :: CURLYCLOSE :: tail => RAWTEXT("""}""") :: deMarkUpArgList(tail) case t :: tail => t :: deMarkUpArgList(tail)