Compare commits
871 Commits
v1.1.x/Isa
...
main
Author | SHA1 | Date |
---|---|---|
Achim D. Brucker | 9feeb63665 | |
Achim D. Brucker | 55e42142fa | |
Achim D. Brucker | 26774fc053 | |
Burkhart Wolff | 7d6048bf64 | |
Burkhart Wolff | 231892cd23 | |
Nicolas Méric | c945da75fa | |
Nicolas Méric | b554f20a5c | |
Burkhart Wolff | 734c1953bd | |
Burkhart Wolff | a735e9a1f2 | |
Burkhart Wolff | 7c2a6099f8 | |
Burkhart Wolff | 6dfefc6b4e | |
Burkhart Wolff | 3235410af3 | |
Burkhart Wolff | 4745c58803 | |
Burkhart Wolff | 28d1fa926e | |
Burkhart Wolff | b651116af3 | |
Nicolas Méric | 93ef94cddb | |
Nicolas Méric | 20e90f688f | |
Burkhart Wolff | 02b6d0b048 | |
Burkhart Wolff | f5a94ca962 | |
Burkhart Wolff | 6b2879d1d6 | |
Nicolas Méric | 51d93e38f8 | |
Nicolas Méric | 7791538b54 | |
Burkhart Wolff | dee3b47d06 | |
Burkhart Wolff | 0bf21336f1 | |
Burkhart Wolff | 2b12e53cf4 | |
Burkhart Wolff | c2eea7696b | |
Burkhart Wolff | 20f163eba9 | |
Burkhart Wolff | 2d2cb6c8ce | |
Achim D. Brucker | f61e107515 | |
Burkhart Wolff | d3aefa63b1 | |
Achim D. Brucker | f0c379a5d2 | |
Achim D. Brucker | 5d5eef1a46 | |
Nicolas Méric | 291b2e258f | |
Nicolas Méric | 10b98b660f | |
Nicolas Méric | a1677384b3 | |
Nicolas Méric | 46b094939a | |
Nicolas Méric | 42da18cf3a | |
Nicolas Méric | 1740898171 | |
Achim D. Brucker | aa0a2c5f6a | |
Achim D. Brucker | a79a3f539d | |
Achim D. Brucker | ab05663738 | |
Nicolas Méric | 41dd3e4949 | |
Achim D. Brucker | f44b5458f2 | |
Achim D. Brucker | c4875da7cb | |
Achim D. Brucker | a286e4b1bc | |
Achim D. Brucker | 341523b223 | |
Nicolas Méric | 207029e70e | |
Achim D. Brucker | 645a3edcec | |
Achim D. Brucker | 5a760b9e2c | |
Achim D. Brucker | 572ec2d0bb | |
Nicolas Méric | d59dabaf7c | |
Nicolas Méric | e78a114879 | |
Burkhart Wolff | 443d63f3b5 | |
Nicolas Méric | 331fcd07f0 | |
Nicolas Méric | 5dc20889a8 | |
Burkhart Wolff | e2b3184a77 | |
Nicolas Méric | 7b54bf5ca5 | |
Nicolas Méric | baa36b10c1 | |
Nicolas Méric | c57ce6292b | |
Achim D. Brucker | b698572146 | |
Achim D. Brucker | e12abadc94 | |
Achim D. Brucker | 792fd60055 | |
Nicolas Méric | ec7297f1d3 | |
Achim D. Brucker | e4ee3ff240 | |
Achim D. Brucker | 4393042f2c | |
Achim D. Brucker | fef7b9d60b | |
Achim D. Brucker | ab7d695a77 | |
Achim D. Brucker | c063287947 | |
Achim D. Brucker | 342984df3b | |
Achim D. Brucker | 5a8e79fb7e | |
Achim D. Brucker | d7f9f10ef1 | |
Achim D. Brucker | 0a3259fbca | |
Nicolas Méric | ca7cdec9b4 | |
Nicolas Méric | 43aad517b9 | |
Nicolas Méric | 8d6c8929e2 | |
Nicolas Méric | b447a480fb | |
Nicolas Méric | a78397693e | |
Nicolas Méric | 9812bc0517 | |
Nicolas Méric | b364880bfc | |
Burkhart Wolff | d835665b6b | |
Burkhart Wolff | 1a8cdb8014 | |
Nicolas Méric | 5a7cbf2da5 | |
Nicolas Méric | 7f7780f8fd | |
Nicolas Méric | 889805cccc | |
Nicolas Méric | 5a07aa2453 | |
Nicolas Méric | cef4086029 | |
Nicolas Méric | 9df276ac6f | |
Nicolas Méric | b4f1b8c321 | |
Nicolas Méric | 59b082d09d | |
Achim D. Brucker | 1869a96b2d | |
Achim D. Brucker | e95c6386af | |
Achim D. Brucker | 23a85cc8c2 | |
Achim D. Brucker | ddcfb5f708 | |
Achim D. Brucker | 02d13cdcad | |
Achim D. Brucker | d353ff07cc | |
Achim D. Brucker | 38035785da | |
Achim D. Brucker | 7e7c197ac3 | |
Nicolas Méric | 4f8e588138 | |
Nicolas Méric | 2c0b51779e | |
Nicolas Méric | 350ff6fe76 | |
Achim D. Brucker | c803474950 | |
Achim D. Brucker | e17f09e624 | |
Achim D. Brucker | 8051d4233e | |
Nicolas Méric | b4b63ce989 | |
Achim D. Brucker | 2dc16b263f | |
Achim D. Brucker | 5754bb4adc | |
Achim D. Brucker | c7debc577b | |
Achim D. Brucker | 9c94593f45 | |
Nicolas Méric | 4d89250606 | |
Achim D. Brucker | 3f06320034 | |
Achim D. Brucker | 49faed4faf | |
Achim D. Brucker | 1a22441f3e | |
Achim D. Brucker | df1b2c9904 | |
Achim D. Brucker | 9064cd3f62 | |
Nicolas Méric | f5b8d4348b | |
Achim D. Brucker | d225a3253c | |
Achim D. Brucker | 2ee0bc5074 | |
Achim D. Brucker | 9683ea7efa | |
Burkhart Wolff | bce097b1d6 | |
Nicolas Méric | 65d6fb946d | |
Achim D. Brucker | 060f2aca89 | |
Nicolas Méric | ba7c0711a8 | |
Achim D. Brucker | 4adbe4ce81 | |
Achim D. Brucker | 7e698a9e69 | |
Achim D. Brucker | 2569db05c3 | |
Nicolas Méric | cd311d8a3a | |
Achim D. Brucker | fb69f05ac0 | |
Achim D. Brucker | 1986d0bcbd | |
Achim D. Brucker | bbac65e233 | |
Achim D. Brucker | 9cd34d7815 | |
Achim D. Brucker | 641bea4a58 | |
Burkhart Wolff | d0cd28a45c | |
Burkhart Wolff | db4290428f | |
Burkhart Wolff | 43da6d3197 | |
Achim D. Brucker | a93046beac | |
Nicolas Méric | b8282b771e | |
Burkhart Wolff | 1cfc4ac88a | |
Burkhart Wolff | e9044e8d5a | |
Achim D. Brucker | 6bab138af6 | |
Achim D. Brucker | fcc25f7450 | |
Burkhart Wolff | e97cca1a2c | |
Burkhart Wolff | 33fd1453a0 | |
Burkhart Wolff | 543c647bcc | |
Burkhart Wolff | f7141f0df8 | |
Burkhart Wolff | 514ebee17c | |
Burkhart Wolff | bdc8477f38 | |
Nicolas Méric | 7e01b7de97 | |
Burkhart Wolff | 8bdd40fc20 | |
Idir Ait-Sadoune | 9cc03c0816 | |
Idir Ait-Sadoune | e9cfcdbcbc | |
Burkhart Wolff | 36740bf72b | |
Burkhart Wolff | b8da1a304a | |
Burkhart Wolff | 5b519fcbe6 | |
Burkhart Wolff | 50da7670cf | |
Achim D. Brucker | 09d1b27f10 | |
Achim D. Brucker | 34e23b314f | |
Burkhart Wolff | 0aa9f1ff25 | |
Achim D. Brucker | 3f8fc4f16f | |
Achim D. Brucker | b62b391410 | |
Achim D. Brucker | 41a4f38478 | |
Burkhart Wolff | ca8671ee1c | |
Burkhart Wolff | 9e210b487a | |
Burkhart Wolff | 6317294721 | |
Burkhart Wolff | 762680a20c | |
Burkhart Wolff | 850244844b | |
Burkhart Wolff | 322d70ef69 | |
Burkhart Wolff | b04ff7e31a | |
Burkhart Wolff | 7ba220e417 | |
Burkhart Wolff | 713a24615f | |
Burkhart Wolff | 7ffdcbc569 | |
Achim D. Brucker | 43ce393e4a | |
Burkhart Wolff | 4326492b39 | |
Burkhart Wolff | 1e7f6a7c18 | |
Achim D. Brucker | a087e94ebe | |
Achim D. Brucker | 78cb606268 | |
Achim D. Brucker | c40a5a74c1 | |
Achim D. Brucker | fc214fc391 | |
Burkhart Wolff | f613811154 | |
Burkhart Wolff | 4c66716999 | |
Achim D. Brucker | 639abb6cf5 | |
Achim D. Brucker | 2c00f4b8db | |
Burkhart Wolff | d9e2f251d2 | |
Burkhart Wolff | cec21c9935 | |
Achim D. Brucker | 640a867f28 | |
Achim D. Brucker | 0c654e2634 | |
Achim D. Brucker | 01bcc48c79 | |
Achim D. Brucker | c3aaaf9ebb | |
Achim D. Brucker | 47e8fc805f | |
Achim D. Brucker | 02bf9620f6 | |
Nicolas Méric | 18be1ba5f5 | |
Nicolas Méric | 93c722a41b | |
Nicolas Méric | 0f48f356df | |
Achim D. Brucker | 870a4eec57 | |
Achim D. Brucker | 4df233e9f4 | |
Burkhart Wolff | 5d7b50ca7f | |
Burkhart Wolff | 1ebfaccb50 | |
Burkhart Wolff | 7ce3fdf768 | |
Burkhart Wolff | db130bd6ce | |
Achim D. Brucker | 496a850700 | |
Achim D. Brucker | 101f96a261 | |
Achim D. Brucker | 49aa29ee68 | |
Burkhart Wolff | 2919f5d2a5 | |
Burkhart Wolff | 6cafcce536 | |
Burkhart Wolff | ebce149d6a | |
Burkhart Wolff | 6984b9ae03 | |
Burkhart Wolff | 74e2341971 | |
Burkhart Wolff | 16caefc7be | |
Achim D. Brucker | 0d74645d2e | |
Burkhart Wolff | f906d45d48 | |
Burkhart Wolff | 761a336a7a | |
Nicolas Méric | b3f396fb08 | |
Burkhart Wolff | 77aeb3b7ca | |
Burkhart Wolff | 81208f73a8 | |
Burkhart Wolff | f093bfc961 | |
Burkhart Wolff | 2c7df482e8 | |
Burkhart Wolff | c9de5f2293 | |
Nicolas Méric | c6dc848438 | |
Burkhart Wolff | 1acf863845 | |
Burkhart Wolff | a6aca1407e | |
Burkhart Wolff | 4c953fb954 | |
Nicolas Méric | 77e8844687 | |
Nicolas Méric | 939715aba9 | |
Burkhart Wolff | d809211481 | |
Achim D. Brucker | 480272ad86 | |
Achim D. Brucker | d277fa2aed | |
Achim D. Brucker | 9318ea55a0 | |
Achim D. Brucker | 3408b90f89 | |
Burkhart Wolff | dd0a9981a3 | |
Achim D. Brucker | e549bcb23c | |
Achim D. Brucker | 04c8c8d150 | |
Achim D. Brucker | a5885b3eb5 | |
Achim D. Brucker | 4cdb6d725b | |
Achim D. Brucker | 486ae2db97 | |
Burkhart Wolff | fb8da62182 | |
Burkhart Wolff | 6c588c3fe4 | |
Burkhart Wolff | 3ab6f665eb | |
Burkhart Wolff | 0c8bc2cab3 | |
Burkhart Wolff | 20ac16196a | |
Burkhart Wolff | d62cd04e26 | |
Burkhart Wolff | 96d20c127f | |
Burkhart Wolff | 394189e9e0 | |
Burkhart Wolff | 1f79e37d9b | |
Burkhart Wolff | b43de570a4 | |
Burkhart Wolff | debddc45d2 | |
Burkhart Wolff | 3de5548642 | |
Burkhart Wolff | 4157954506 | |
Burkhart Wolff | 25473b177b | |
Nicolas Méric | 36cd3817cf | |
Burkhart Wolff | cb2b0dc230 | |
Burkhart Wolff | c82a3a7e70 | |
Burkhart Wolff | 8c6abf2613 | |
Achim D. Brucker | 07444efd21 | |
Achim D. Brucker | c203327191 | |
Nicolas Méric | a90202953b | |
Achim D. Brucker | 698e6ab169 | |
Achim D. Brucker | 320614004e | |
Burkhart Wolff | 91ff9c67af | |
Burkhart Wolff | 1838baecb9 | |
Nicolas Méric | ef29a9759f | |
Nicolas Méric | 5336e0518f | |
Burkhart Wolff | accc4f40b4 | |
Burkhart Wolff | bbb4b1749c | |
Burkhart Wolff | 4ba0c705b4 | |
Burkhart Wolff | 5d89bcc86a | |
Burkhart Wolff | 07527dbe11 | |
Burkhart Wolff | c0dc60d49e | |
Burkhart Wolff | 81a50c6a9e | |
Burkhart Wolff | 5628eaa2dc | |
Nicolas Méric | 230247de1a | |
Burkhart Wolff | 0834f938a9 | |
Burkhart Wolff | 63c2acfece | |
Burkhart Wolff | 3a4db69184 | |
Burkhart Wolff | 3fc4688f69 | |
Burkhart Wolff | 7dbd016b5d | |
Burkhart Wolff | 3b446c874d | |
Burkhart Wolff | 4de23de5ee | |
Nicolas Méric | 4bd31be71d | |
Nicolas Méric | 826fc489b7 | |
Nicolas Méric | ddcbf76353 | |
Nicolas Méric | 5ad6c0d328 | |
Nicolas Méric | 34d5a194ee | |
Nicolas Méric | 8b09b0c135 | |
Achim D. Brucker | 5292154687 | |
Achim D. Brucker | caf966e3df | |
Achim D. Brucker | 6a1343fd06 | |
Achim D. Brucker | a7db5cc344 | |
Nicolas Méric | de94ef196f | |
Nicolas Méric | c791be2912 | |
Achim D. Brucker | 44528e887d | |
Achim D. Brucker | b3097eaa79 | |
Achim D. Brucker | ecb1e88b78 | |
Achim D. Brucker | 75b39bc168 | |
Nicolas Méric | dde865520a | |
Nicolas Méric | 37afd975b3 | |
Burkhart Wolff | d2a1808fa8 | |
Burkhart Wolff | 94543a86e4 | |
Burkhart Wolff | af096e56fc | |
Burkhart Wolff | 68c1046918 | |
Achim D. Brucker | 1229db1432 | |
Nicolas Méric | 3670d30ddf | |
Burkhart Wolff | 542c38a89c | |
Nicolas Méric | b96302f676 | |
Burkhart Wolff | f60aebccb3 | |
Burkhart Wolff | 224a320165 | |
Nicolas Méric | 92e7ee017a | |
Burkhart Wolff | 8e4ac3f118 | |
Burkhart Wolff | 9fae991ea0 | |
Burkhart Wolff | 6e5fa2d91b | |
Nicolas Méric | b1a0d5d739 | |
Nicolas Méric | 10b90c823f | |
Nicolas Méric | ef8ffda414 | |
Achim D. Brucker | 69485fd497 | |
Achim D. Brucker | f29d888068 | |
Achim D. Brucker | cc805cadbe | |
Achim D. Brucker | 5bf0b00fbc | |
Achim D. Brucker | cc3e6566ca | |
Achim D. Brucker | c297b5cddd | |
Achim D. Brucker | 47c6ce78be | |
Burkhart Wolff | 48c6457f63 | |
Burkhart Wolff | ef3eee03c9 | |
Burkhart Wolff | 853158c916 | |
Burkhart Wolff | 280feb8653 | |
Nicolas Méric | 709187d415 | |
Nicolas Méric | 289d47ee56 | |
Achim D. Brucker | 9c324fde70 | |
Achim D. Brucker | 22abad9026 | |
Nicolas Méric | 40e7285f0a | |
Achim D. Brucker | 3b33166f55 | |
Burkhart Wolff | 0f3beb846e | |
Nicolas Méric | 8e6cb3b991 | |
Achim D. Brucker | baf1d1b629 | |
Achim D. Brucker | de4c7a5168 | |
Achim D. Brucker | 6fe23c16be | |
Achim D. Brucker | 113b3e79bf | |
Achim D. Brucker | daea6333f1 | |
Achim D. Brucker | 53867fb24f | |
Burkhart Wolff | 0f5e7f582b | |
Burkhart Wolff | 0b256adee9 | |
Burkhart Wolff | cbd197e4d8 | |
Burkhart Wolff | 5411aa4d6b | |
Burkhart Wolff | 1895d3b52c | |
Burkhart Wolff | 5bee1fee8f | |
Burkhart Wolff | a64fca4774 | |
Burkhart Wolff | bf4c3d618e | |
Achim D. Brucker | 684a775b07 | |
Achim D. Brucker | 9fe7b26a35 | |
Nicolas Méric | 511c6369dd | |
Achim D. Brucker | 2cb9156488 | |
Achim D. Brucker | ef87b1d81c | |
Nicolas Méric | 5b7a50ba5c | |
Achim D. Brucker | 69808755da | |
Achim D. Brucker | da6bc4277d | |
Achim D. Brucker | 229f7c49de | |
Achim D. Brucker | 3aa1b45837 | |
Achim D. Brucker | 990c6f7708 | |
Achim D. Brucker | 14dd368cd0 | |
Achim D. Brucker | 684e1144bd | |
Achim D. Brucker | 3a39028f1c | |
Achim D. Brucker | ae514aea18 | |
Achim D. Brucker | 9f5473505e | |
Achim D. Brucker | 0c732ec59f | |
Achim D. Brucker | f27150eb88 | |
Achim D. Brucker | bde86a1118 | |
Achim D. Brucker | be2eaab09b | |
Achim D. Brucker | 058324ab5d | |
Achim D. Brucker | 10b4eaf660 | |
Achim D. Brucker | c59858930d | |
Achim D. Brucker | 7ad7c664a3 | |
Achim D. Brucker | dd963a7e09 | |
Achim D. Brucker | 5f88def3be | |
Achim D. Brucker | dfcd00ca73 | |
Achim D. Brucker | e26b4e662e | |
Achim D. Brucker | 02332e8608 | |
Achim D. Brucker | 86152c374b | |
Achim D. Brucker | 233079ef5f | |
Achim D. Brucker | 8389d9ddbe | |
Achim D. Brucker | 85e6cd0372 | |
Achim D. Brucker | 9090772a8a | |
Achim D. Brucker | 070bd363ca | |
Achim D. Brucker | 8e65263093 | |
Achim D. Brucker | acb82477b5 | |
Achim D. Brucker | b90992121e | |
Nicolas Méric | 6a6259bf29 | |
Achim D. Brucker | fb049946c5 | |
Achim D. Brucker | 829915ae2c | |
Achim D. Brucker | 85f115196b | |
Achim D. Brucker | 873f5c79ab | |
Achim D. Brucker | 55f377da39 | |
Achim D. Brucker | 501ea118c2 | |
Achim D. Brucker | a055180b72 | |
Achim D. Brucker | d1c195db26 | |
Achim D. Brucker | 2481603ce1 | |
Achim D. Brucker | b9eeb9e9b8 | |
Achim D. Brucker | fa27d2425e | |
Achim D. Brucker | 8b9c65f6ef | |
Achim D. Brucker | f66b6187f8 | |
Achim D. Brucker | cf386892fc | |
Achim D. Brucker | b0879e98fd | |
Achim D. Brucker | f8399e0fb2 | |
Achim D. Brucker | 0c064b1c8a | |
Achim D. Brucker | 1e0eeea6f9 | |
Achim D. Brucker | 080d867587 | |
Achim D. Brucker | 3e41871b17 | |
Achim D. Brucker | be9ef5a122 | |
Achim D. Brucker | f0fac41148 | |
Achim D. Brucker | 47fa3590aa | |
Achim D. Brucker | fba9ca78e9 | |
Achim D. Brucker | 9287891483 | |
Achim D. Brucker | 30eb47d80c | |
Achim D. Brucker | 00eff9f819 | |
Achim D. Brucker | 73e3cb1098 | |
Achim D. Brucker | 64f4957679 | |
Achim D. Brucker | e4a8ad4227 | |
Achim D. Brucker | 60b1c4f4d4 | |
Achim D. Brucker | de1870fbee | |
Achim D. Brucker | f7b4cf67f7 | |
Achim D. Brucker | 97bf5aa1e3 | |
Achim D. Brucker | d766ac22df | |
Achim D. Brucker | ba90433700 | |
Achim D. Brucker | 762225d20d | |
Achim D. Brucker | aaeb793a51 | |
Achim D. Brucker | 38628c37dc | |
Achim D. Brucker | 43ccaf43f7 | |
Nicolas Méric | 848ce311e2 | |
Nicolas Méric | 6115f0de4a | |
Nicolas Méric | bdfea3ddb1 | |
Nicolas Méric | 9de18b148a | |
Nicolas Méric | 1459b8cfc3 | |
Nicolas Méric | 234ff18ec0 | |
Nicolas Méric | 55690bba33 | |
Nicolas Méric | 93509ab17d | |
Nicolas Méric | 1e09598d81 | |
Nicolas Méric | e01ec9fc21 | |
Nicolas Méric | 7c16d02979 | |
Nicolas Méric | 4a77347e40 | |
Nicolas Méric | 2398fc579a | |
Nicolas Méric | 821eefb230 | |
Nicolas Méric | 9b51844fad | |
Nicolas Méric | c440f9628f | |
Nicolas Méric | 5b3086bbe5 | |
Nicolas Méric | 7c0d2cee55 | |
Nicolas Méric | 7c6150affa | |
Nicolas Méric | ad4ad52b4e | |
Nicolas Méric | ba8227e6ab | |
Nicolas Méric | 20b0af740d | |
Nicolas Méric | 1379f8a671 | |
Achim D. Brucker | 8fdaafa295 | |
Nicolas Méric | 8513f7d267 | |
Nicolas Méric | 2b1a9d009e | |
Nicolas Méric | cd758d2c44 | |
Nicolas Méric | 8496963fec | |
Nicolas Méric | 72d8000f7b | |
Nicolas Méric | 17ec11b297 | |
Nicolas Méric | a96e17abf3 | |
Nicolas Méric | 74b60e47d5 | |
Nicolas Méric | a42dd4ea6c | |
Nicolas Méric | b162a24749 | |
Nicolas Méric | a9432c7b52 | |
Nicolas Méric | 9f28d4949e | |
Nicolas Méric | 885c23a138 | |
Nicolas Méric | a589d4cd47 | |
Burkhart Wolff | e1f143d151 | |
Burkhart Wolff | fd60cf2312 | |
Nicolas Méric | 73dfcd6c1e | |
Nicolas Méric | c0afe1105e | |
Burkhart Wolff | e414b97afb | |
Nicolas Méric | 0b2d28b547 | |
Nicolas Méric | 37d7ed7d17 | |
Nicolas Méric | 312734afbd | |
Burkhart Wolff | 8cee80d78e | |
Makarius Wenzel | ec0d525426 | |
Makarius Wenzel | 791990039b | |
Makarius Wenzel | 78d61390fe | |
Makarius Wenzel | ffcf1f3240 | |
Makarius Wenzel | 5471d873a9 | |
Makarius Wenzel | df37250a00 | |
Makarius Wenzel | 185daeb577 | |
Makarius Wenzel | 8037fd15f2 | |
Makarius Wenzel | afcd78610b | |
Makarius Wenzel | b8a9ef5118 | |
Makarius Wenzel | a4e75c8b12 | |
Makarius Wenzel | d20e9ccd22 | |
Makarius Wenzel | f2ee5d3780 | |
Makarius Wenzel | 44cae2e631 | |
Makarius Wenzel | 7b2bf35353 | |
Makarius Wenzel | e8c7fa6018 | |
Makarius Wenzel | b12e61511d | |
Makarius Wenzel | 3cac42e6cb | |
Makarius Wenzel | aee8ba1df1 | |
Makarius Wenzel | d93e1383d4 | |
Makarius Wenzel | 3d5d1e7476 | |
Makarius Wenzel | 4264e7cd15 | |
Makarius Wenzel | 96f4077c53 | |
Makarius Wenzel | d7fb39d7eb | |
Makarius Wenzel | b95826962f | |
Makarius Wenzel | 912d4bb49e | |
Makarius Wenzel | a6c1a2baa4 | |
Makarius Wenzel | bb5963c6e2 | |
Makarius Wenzel | cc3e2a51a4 | |
Makarius Wenzel | 9e4e5b49eb | |
Makarius Wenzel | b65ecbdbef | |
Makarius Wenzel | 3be2225dcf | |
Makarius Wenzel | f44f0af01c | |
Makarius Wenzel | 9a11baf840 | |
Makarius Wenzel | 48c167aa23 | |
Makarius Wenzel | 700a9bbfee | |
Makarius Wenzel | 73299941ad | |
Makarius Wenzel | 5a8c438c41 | |
Makarius Wenzel | 7772c73aaa | |
Makarius Wenzel | ca18453043 | |
Makarius Wenzel | 1a122b1a87 | |
Makarius Wenzel | 47d95c467e | |
Makarius Wenzel | bf3085d4c0 | |
Makarius Wenzel | 068e6e0411 | |
Makarius Wenzel | 09e9980691 | |
Makarius Wenzel | 94ce3fdec2 | |
Makarius Wenzel | 44819bff02 | |
Makarius Wenzel | a6ab1e101e | |
Makarius Wenzel | c29ec9641a | |
Nicolas Méric | 06833aa190 | |
Nicolas Méric | 4f0c7e1e95 | |
Nicolas Méric | 0040949cf8 | |
Nicolas Méric | e68c332912 | |
Burkhart Wolff | b2c4f40161 | |
Burkhart Wolff | 309952e0ce | |
Burkhart Wolff | 830e1b440a | |
Burkhart Wolff | 2149db9efc | |
Burkhart Wolff | 1547ace64b | |
Burkhart Wolff | 39acd61dfd | |
Burkhart Wolff | 29770b17ee | |
Achim D. Brucker | b4f4048cff | |
Achim D. Brucker | eac94f2a01 | |
Achim D. Brucker | ab1877ce8e | |
Achim D. Brucker | fc575a5be5 | |
Achim D. Brucker | 4e47c38860 | |
Achim D. Brucker | 943af164f4 | |
Achim D. Brucker | 873151b4f3 | |
Achim D. Brucker | 82645c2e8e | |
Achim D. Brucker | f09a2df943 | |
Achim D. Brucker | cfdbd18bfa | |
Achim D. Brucker | 0b807ea4bc | |
Makarius Wenzel | 516f5d2f79 | |
Makarius Wenzel | 5ac41a72ac | |
Makarius Wenzel | 15feeb7d92 | |
Makarius Wenzel | 0c8a0e1d63 | |
Burkhart Wolff | 0aec98b95a | |
Burkhart Wolff | 43871ced48 | |
Burkhart Wolff | 0fa1048d6d | |
Burkhart Wolff | 33490f8f15 | |
Burkhart Wolff | 01632b5251 | |
Burkhart Wolff | 8a54831295 | |
Burkhart Wolff | 427226f593 | |
Achim D. Brucker | f14c0bebbb | |
Achim D. Brucker | 7f500dc257 | |
Burkhart Wolff | c05bb0bf4d | |
Burkhart Wolff | 66f78001eb | |
Burkhart Wolff | 5a06d3618b | |
Burkhart Wolff | e63ef4e189 | |
Burkhart Wolff | bba7d9d5c5 | |
Burkhart Wolff | 07a9c10001 | |
Burkhart Wolff | 5779c729a4 | |
Burkhart Wolff | 03f2836f5d | |
Burkhart Wolff | d2703b0dbd | |
Achim D. Brucker | 9f2e2b53a4 | |
Achim D. Brucker | 4caee16cb6 | |
Achim D. Brucker | 6ee7058d51 | |
Burkhart Wolff | 583636404f | |
Burkhart Wolff | 8a9684590a | |
Burkhart Wolff | 81c4ae2c13 | |
Achim D. Brucker | 2c1b56d277 | |
Achim D. Brucker | f40d33b9ed | |
Achim D. Brucker | 6a94728747 | |
Achim D. Brucker | 99facb109c | |
Achim D. Brucker | f6d97db0d3 | |
Achim D. Brucker | 4a6fa93644 | |
Achim D. Brucker | 6ca0b0fd21 | |
Achim D. Brucker | 65ae177fbc | |
Achim D. Brucker | b93ff8f65c | |
Achim D. Brucker | adf87dfde4 | |
Achim D. Brucker | df5d037942 | |
Achim D. Brucker | f2f48f2340 | |
Achim D. Brucker | 6839f63129 | |
Achim D. Brucker | 3febf83b3c | |
Achim D. Brucker | fb8dbfac49 | |
Achim D. Brucker | 45e4a11a74 | |
Burkhart Wolff | c8a3c58f7f | |
Achim D. Brucker | 1939ffeea4 | |
Achim D. Brucker | 74093dfaae | |
Achim D. Brucker | d2b6cb81aa | |
Achim D. Brucker | b24ede4400 | |
Achim D. Brucker | 205aa5a6b1 | |
Achim D. Brucker | c8f3bfc65d | |
Achim D. Brucker | 44f9317b35 | |
Achim D. Brucker | 6c2a0d6876 | |
Achim D. Brucker | 909dda1ea2 | |
Achim D. Brucker | 367d8f28ad | |
Achim D. Brucker | d3f41dca9e | |
Achim D. Brucker | ae3d35e363 | |
Achim D. Brucker | 41a6c22822 | |
Achim D. Brucker | 4ac7c84403 | |
Achim D. Brucker | 38f6516ad9 | |
Achim D. Brucker | 03b721f014 | |
Achim D. Brucker | c5752ba4a2 | |
Achim D. Brucker | 5721398340 | |
Achim D. Brucker | 6c0d325673 | |
Achim D. Brucker | b40069bedd | |
Achim D. Brucker | 70b2647e7c | |
Achim D. Brucker | c1efddf252 | |
Achim D. Brucker | 9ded308371 | |
Achim D. Brucker | f63d922096 | |
Achim D. Brucker | 11b309da02 | |
Achim D. Brucker | 1444f8f48b | |
Achim D. Brucker | e6ca682114 | |
Achim D. Brucker | 15fb6fdc2d | |
Achim D. Brucker | 9d5c71d4e1 | |
Burkhart Wolff | 013296f25e | |
Achim D. Brucker | d10b277c60 | |
Achim D. Brucker | 7c50ffb3af | |
Achim D. Brucker | 3a9826901a | |
Achim D. Brucker | a54373ad2f | |
Achim D. Brucker | aa7d0aec09 | |
Achim D. Brucker | 31778374ed | |
Achim D. Brucker | 0d55da68de | |
Achim D. Brucker | a973707a73 | |
Achim D. Brucker | b83f7a8abb | |
Achim D. Brucker | e138855623 | |
Achim D. Brucker | 5582644068 | |
Achim D. Brucker | 5278608b89 | |
Achim D. Brucker | 59658cea6f | |
Achim D. Brucker | ef674b5ae2 | |
Achim D. Brucker | ac8c939179 | |
Burkhart Wolff | c16ec333f1 | |
Burkhart Wolff | d1e4fd173b | |
Burkhart Wolff | 43c857af2c | |
Burkhart Wolff | 0cc010cecc | |
Burkhart Wolff | ba7bd6dc03 | |
Burkhart Wolff | 43b0a3049f | |
Nicolas Méric | 03fd491d5d | |
Nicolas Méric | 9673359688 | |
Nicolas Méric | 5d1b271336 | |
Nicolas Méric | 83c790d66a | |
Nicolas Méric | 9981c31966 | |
Nicolas Méric | 319b39905f | |
Nicolas Méric | c00c6ed31d | |
Nicolas Méric | ae3300ac2c | |
Achim D. Brucker | 61f167c29c | |
Achim D. Brucker | 2833deff90 | |
Achim D. Brucker | a8424979eb | |
Achim D. Brucker | f6f6f32b50 | |
Achim D. Brucker | 15e71fe189 | |
Achim D. Brucker | 45c23b4330 | |
Achim D. Brucker | 995feb6685 | |
Nicolas Méric | d8fde4b4f4 | |
Achim D. Brucker | 41e6c9ed02 | |
Achim D. Brucker | cbad96aba5 | |
Achim D. Brucker | 82c9a07c1a | |
Achim D. Brucker | ae8b91ac4e | |
Achim D. Brucker | 0f3f5d4b56 | |
Achim D. Brucker | fee83a2a29 | |
Achim D. Brucker | a0993b6eea | |
Achim D. Brucker | 64b4eca5ea | |
Achim D. Brucker | 2e4fb5d174 | |
Achim D. Brucker | 317c5a7759 | |
Achim D. Brucker | 12f1b230e6 | |
Achim D. Brucker | 530783c23b | |
Nicolas Méric | 1457c1cb85 | |
Nicolas Méric | e3caad804b | |
Nicolas Méric | 17df6a271b | |
Nicolas Méric | a331b80095 | |
Nicolas Méric | 74420a932f | |
Nicolas Méric | 8e1702d2da | |
Achim D. Brucker | 609f09e919 | |
Achim D. Brucker | 0f5e5bf6f6 | |
Achim D. Brucker | 5c886d49b4 | |
Nicolas Méric | b1f73e9235 | |
Nicolas Méric | 9603311a9a | |
Burkhart Wolff | 2351e00be6 | |
Burkhart Wolff | 3e99e9e013 | |
Burkhart Wolff | d2e1d77b01 | |
Burkhart Wolff | 96726fc507 | |
Burkhart Wolff | a68ecb4f11 | |
Achim D. Brucker | 1ea897e660 | |
Achim D. Brucker | 1b25a08da8 | |
Burkhart Wolff | 6a7b5c6afb | |
Burkhart Wolff | 9403afd86f | |
Burkhart Wolff | 894166a630 | |
Burkhart Wolff | 34df9f6fcd | |
Nicolas Méric | c5a3239d2b | |
Nicolas Méric | e4e4a708a5 | |
Nicolas Méric | 9cd5323063 | |
Nicolas Méric | 444d6d077c | |
Nicolas Méric | ec33e70bbf | |
Achim D. Brucker | f655d2a784 | |
Achim D. Brucker | d80d5b0538 | |
Achim D. Brucker | e5874396c4 | |
Achim D. Brucker | 60b7216daa | |
Achim D. Brucker | 4a7605b43e | |
Achim D. Brucker | 8a2828f3bf | |
Achim D. Brucker | 9522597733 | |
Achim D. Brucker | 9f773ca129 | |
Achim D. Brucker | 7b8ae0a93d | |
Achim D. Brucker | 700855411e | |
Achim D. Brucker | 5348a609be | |
Achim D. Brucker | 46c46af880 | |
Achim D. Brucker | 7b4450450d | |
Achim D. Brucker | 1d48fb810f | |
Achim D. Brucker | c2fbd57f12 | |
Achim D. Brucker | 1f1a504bf0 | |
Achim D. Brucker | 05e85edd91 | |
Achim D. Brucker | 57b9720d99 | |
Achim D. Brucker | 846237b515 | |
Achim D. Brucker | 74368af56c | |
Achim D. Brucker | 21ab0ff6b9 | |
Achim D. Brucker | b7948659ad | |
Achim D. Brucker | 95cda1aaea | |
Achim D. Brucker | 0f6ec7dcd1 | |
Achim D. Brucker | 250755e7f1 | |
Achim D. Brucker | 68e8d0be4a | |
Achim D. Brucker | aff78b0625 | |
Achim D. Brucker | 9f5d20a586 | |
Achim D. Brucker | 3c49a9aaba | |
Achim D. Brucker | f4286404fb | |
Achim D. Brucker | a1d83e33ef | |
Achim D. Brucker | 5ae72e1103 | |
Achim D. Brucker | de67a05160 | |
Achim D. Brucker | 97bfdcff58 | |
Achim D. Brucker | 1a41e92188 | |
Achim D. Brucker | 5381182ab2 | |
Achim D. Brucker | d3270f4afa | |
Achim D. Brucker | ac2fab895b | |
Achim D. Brucker | 20a81d3428 | |
Achim D. Brucker | 20b77577cb | |
Achim D. Brucker | 16bd3b3a94 | |
Achim D. Brucker | f3f24c0d2e | |
Achim D. Brucker | 76582f75fd | |
Nicolas Méric | a4f39bb700 | |
Nicolas Méric | 13835fbed9 | |
Nicolas Méric | cc3f9ab402 | |
Achim D. Brucker | 442835442f | |
Achim D. Brucker | c69b11a312 | |
Achim D. Brucker | 0c9dcfb6e1 | |
Nicolas Méric | 5d0136a168 | |
Nicolas Méric | 3e9adb026b | |
Achim D. Brucker | f51ee34681 | |
Achim D. Brucker | ef89a95307 | |
Achim D. Brucker | 62726920a7 | |
Achim D. Brucker | 7bb4ab58e9 | |
Achim D. Brucker | 010202e34a | |
Achim D. Brucker | 62eefcee5d | |
Achim D. Brucker | abe7713f1e | |
Achim D. Brucker | 2314b2191f | |
Burkhart Wolff | 6bb62fb08a | |
Burkhart Wolff | fb91700a43 | |
Burkhart Wolff | d86173834f | |
Burkhart Wolff | 49f4c5b95b | |
Achim D. Brucker | 658e7a68a1 | |
Achim D. Brucker | bdc7aab6cf | |
Achim D. Brucker | 50e42ca5c0 | |
Achim D. Brucker | d7cf6f1fc7 | |
Achim D. Brucker | a89878079e | |
Achim D. Brucker | 90416c2310 | |
Achim D. Brucker | 36c0e415e3 | |
Burkhart Wolff | 2ca84fd40f | |
Burkhart Wolff | 306d117231 | |
Nicolas Méric | 2886f7df99 | |
Achim D. Brucker | 703b9a055d | |
Achim D. Brucker | a950142749 | |
Achim D. Brucker | 6c74a2e0f5 | |
Nicolas Méric | b7d7015423 | |
Nicolas Méric | e4195a68a2 | |
Achim D. Brucker | 54c9bc2d74 | |
Achim D. Brucker | f6e9e39a58 | |
Achim D. Brucker | a66e90cf25 | |
Burkhart Wolff | 63c0b1e442 | |
Nicolas Méric | 3585b6a2f1 | |
Nicolas Méric | 8bc2e60d2f | |
Nicolas Méric | 3895ba550c | |
Nicolas Méric | eb9edd66d5 | |
Nicolas Méric | a332109dca | |
Burkhart Wolff | 5af219469d | |
Achim D. Brucker | 17d7562d4f | |
Achim D. Brucker | 8efc1300b4 | |
Achim D. Brucker | 4c0d3ccee3 | |
Achim D. Brucker | 53eb93367c | |
Achim D. Brucker | 005d18657c | |
Achim D. Brucker | 6cf004637c | |
Achim D. Brucker | 462673d31e | |
Achim D. Brucker | 43522215b9 | |
Nicolas Méric | 8f7e898f4b | |
Burkhart Wolff | e650892b48 | |
Burkhart Wolff | 35b47223b9 | |
Achim D. Brucker | 46325cc64b | |
Nicolas Méric | d546a714b7 | |
Nicolas Méric | 76612ae6f3 | |
Burkhart Wolff | 96112ff893 | |
Burkhart Wolff | 5631010371 | |
Burkhart Wolff | 68e9f64156 | |
Burkhart Wolff | 647f8e86cc | |
Burkhart Wolff | b5939bc9db | |
Burkhart Wolff | 6889e08f33 | |
Burkhart Wolff | ef7d8caefb | |
Makarius Wenzel | 4352691e95 | |
Makarius Wenzel | 2547b2324e | |
Makarius Wenzel | 99264edc02 | |
Makarius Wenzel | 70617f59fe | |
Makarius Wenzel | fadd982285 | |
Makarius Wenzel | 4e4995bde5 | |
Makarius Wenzel | 2e4d37e3ca | |
Makarius Wenzel | ff32bac3fc | |
Burkhart Wolff | 96d6bb8e00 | |
Makarius Wenzel | bcf7849083 | |
Burkhart Wolff | 77150aefe2 | |
Makarius Wenzel | 86b555b56e | |
Makarius Wenzel | ec49f45966 | |
Burkhart Wolff | 12d33fa457 | |
Burkhart Wolff | 616ff85721 | |
Burkhart Wolff | b0a2214c40 | |
Burkhart Wolff | cbd32874cf | |
Burkhart Wolff | 6c99612dcd | |
Burkhart Wolff | 3f09aca090 | |
Achim D. Brucker | 9632c0810b | |
Achim D. Brucker | a2673b0825 | |
Achim D. Brucker | 546b4fbcfe | |
Nicolas Méric | 541d2711bd | |
Nicolas Méric | 18c0557d01 | |
Achim D. Brucker | 84588fccb3 | |
Nicolas Méric | d2a6106be5 | |
Achim D. Brucker | 1d497db5cf | |
Achim D. Brucker | 42783d6bbe | |
Nicolas Méric | 08c101c544 | |
Nicolas Méric | 6ac1445147 | |
Nicolas Méric | 664aede4c0 | |
Burkhart Wolff | c14cb31639 | |
Burkhart Wolff | 9b08e92588 | |
Burkhart Wolff | 5f47588270 | |
Burkhart Wolff | eb292a695b | |
Burkhart Wolff | 4420084d52 | |
Burkhart Wolff | 3f8880c0f0 | |
Achim D. Brucker | eef8170e40 | |
Achim D. Brucker | 3ac69001ab | |
Burkhart Wolff | f9027ef331 | |
Achim D. Brucker | 6c433ed766 | |
Achim D. Brucker | cfbc3311cd | |
Achim D. Brucker | 295233cdcf | |
Achim D. Brucker | 9569113f9b | |
Burkhart Wolff | 9f9bc25618 | |
Burkhart Wolff | 5aad659a85 | |
Nicolas Méric | 2c01a7118b | |
Nicolas Méric | f11e5b762b | |
Burkhart Wolff | f8801a1121 | |
Burkhart Wolff | d7b625ae04 | |
Burkhart Wolff | 3b21df199b | |
Achim D. Brucker | 0b6ef076b0 | |
Achim D. Brucker | 51375ea983 | |
Achim D. Brucker | 78987a5ae0 | |
Achim D. Brucker | 920779b150 | |
Achim D. Brucker | e20e73be90 | |
Achim D. Brucker | b96397800d | |
Achim D. Brucker | 8d8d418f0e | |
Achim D. Brucker | 712cea7ac9 | |
Achim D. Brucker | e048bbe508 | |
Achim D. Brucker | 201d12a01a | |
Achim D. Brucker | aadbce7844 | |
Burkhart Wolff | cfad21e296 | |
Burkhart Wolff | ad18d3c179 | |
Achim D. Brucker | 46875b0560 | |
Achim D. Brucker | 389fd6d033 | |
Achim D. Brucker | f975672901 | |
Achim D. Brucker | 4a1f15be02 | |
Achim D. Brucker | 06dddeacf5 | |
Burkhart Wolff | 9bba4301c1 |
|
@ -1,27 +0,0 @@
|
|||
pipeline {
|
||||
agent any
|
||||
|
||||
stages {
|
||||
stage('Build Docker') {
|
||||
steps {
|
||||
sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/'
|
||||
sh 'docker build -t logicalhacking:isabelle4dof .ci/isabelle4isadof'
|
||||
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
|
||||
}
|
||||
}
|
||||
stage('Check Docker') {
|
||||
when { changeset "src/patches/*" }
|
||||
steps {
|
||||
sh 'cp src/patches/thy_output.ML .ci/isabelle4isadof/'
|
||||
sh 'docker build --no-cache -t logicalhacking:isabelle4dof .ci/isabelle4isadof'
|
||||
sh 'rm -f .ci/isabelle4isadof/thy_output.ML'
|
||||
}
|
||||
}
|
||||
stage('Build Isabelle/DOF') {
|
||||
steps {
|
||||
sh 'find -type d -name "output" -exec rm -rf {} \\; || true'
|
||||
sh 'docker run -v $PWD:/DOF logicalhacking:isabelle4dof sh -c "cd /DOF && ./install && isabelle build -D ."'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
# Copyright (c) 2019 Achim D. Brucker
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice, this
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
FROM logicalhacking/lh-docker-isabelle:isabelle2020
|
||||
|
||||
WORKDIR /home/isabelle
|
||||
COPY thy_output.ML /home/isabelle/Isabelle/src/Pure/Thy
|
||||
RUN Isabelle/bin/isabelle build -b Functional-Automata
|
||||
|
18
.config
|
@ -1,18 +0,0 @@
|
|||
# Isabelle/DOF Version Information
|
||||
DOF_VERSION="Unreleased" # "Unreleased" for development, semantic version for releases
|
||||
DOF_LATEST_VERSION="1.0.0"
|
||||
DOF_LATEST_ISABELLE="Isabelle2019"
|
||||
DOF_LATEST_DOI="10.5281/zenodo.3370483"
|
||||
DOF_GENERIC_DOI="10.5281/zenodo.3370482"
|
||||
#
|
||||
# Isabelle and AFP Configuration
|
||||
ISABELLE_VERSION="Isabelle2020: April 2020"
|
||||
ISABELLE_URL="https://isabelle.in.tum.de/website-Isabelle2020/"
|
||||
AFP_DATE="afp-2020-04-20"
|
||||
AFP_URL="https://www.isa-afp.org/release/"$AFP_DATE".tar.gz"
|
||||
#
|
||||
# Isabelle/DOF Repository Configuration
|
||||
DOF_URL="https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF"
|
||||
DOF_ARTIFACT_DIR="releases/Isabelle_DOF/Isabelle_DOF"
|
||||
DOF_ARTIFACT_HOST="artifacts.logicalhacking.com"
|
||||
#
|
|
@ -1,3 +1,2 @@
|
|||
install -crlf
|
||||
document-generator/Tools/DOF_mkroot -crlf
|
||||
document-generator/document-template/build -crlf
|
||||
core.autocrlf false
|
||||
core.eol lf
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
output
|
||||
.afp
|
||||
*~
|
||||
*#
|
||||
Isabelle_DOF-Unit-Tests/latex_test/
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Continuous Build and Release Setup
|
||||
|
||||
[![status-badge](https://ci.logicalhacking.com/api/badges/Isabelle_DOF/Isabelle_DOF/status.svg)](https://ci.logicalhacking.com/Isabelle_DOF/Isabelle_DOF)
|
||||
|
||||
This directory contains the CI configuration for the [Woodpecker CI](https://woodpecker-ci.org/).
|
||||
It may also contain additional tools and script that are useful for preparing a release.
|
||||
|
||||
## Generated Artifacts
|
||||
|
||||
### Latest Build
|
||||
|
||||
* lualatex
|
||||
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/browser_info/Unsorted/)
|
||||
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/lualatex/)
|
||||
* pdflatex
|
||||
* [browser_info](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/browser_info/Unsorted/)
|
||||
* [aux files](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/pdflatex/)
|
||||
* [Isabelle_DOF-Unreleased_Isabelle2022.tar.xz](https://artifacts.logicalhacking.com/ci/Isabelle_DOF/Isabelle_DOF/main/latest/Isabelle_DOF-Unreleased_Isabelle2022.tar.xz)
|
|
@ -0,0 +1,61 @@
|
|||
pipeline:
|
||||
build:
|
||||
image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest
|
||||
pull: true
|
||||
commands:
|
||||
- hg log --limit 2 /root/isabelle
|
||||
- ./.woodpecker/check_dangling_theories
|
||||
- ./.woodpecker/check_external_file_refs
|
||||
- ./.woodpecker/check_quick_and_dirty
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export `isabelle getenv ISABELLE_HOME_USER`
|
||||
- mkdir -p $ISABELLE_HOME_USER/etc
|
||||
- echo "ISABELLE_PDFLATEX=\"$LATEX --file-line-error\"" >> $ISABELLE_HOME_USER/etc/settings
|
||||
- isabelle build -x HOL-Proofs -x Isabelle_DOF-Proofs -D . -o browser_info
|
||||
- if [ "$LATEX" = "lualatex" ]; then isabelle build -o 'timeout_scale=2' -D . -o browser_info; else echo "Skipping Isabelle_DOF-Proofs for pdflatex build."; fi
|
||||
- find . -name 'root.tex' -prune -o -name 'output' -type f | xargs latexmk -$LATEX -cd -quiet -Werror
|
||||
- isabelle components -u .
|
||||
- isabelle dof_mkroot -q DOF_test
|
||||
- isabelle build -D DOF_test
|
||||
- cp -r $ISABELLE_HOME_USER/browser_info $ARTIFACT_DIR
|
||||
- cd $ARTIFACT_DIR
|
||||
- cd ../..
|
||||
- ln -s * latest
|
||||
archive:
|
||||
image: git.logicalhacking.com/lh-docker/lh-docker-isabelle/isabelle2023:latest
|
||||
commands:
|
||||
- export ARTIFACT_DIR=$CI_WORKSPACE/.artifacts/$CI_REPO/$CI_BRANCH/$CI_BUILD_NUMBER/$LATEX
|
||||
- mkdir -p $ARTIFACT_DIR
|
||||
- export ISABELLE_VERSION=`isabelle version`
|
||||
- ./.woodpecker/mk_release -d
|
||||
- cp Isabelle_DOF-Unreleased_$ISABELLE_VERSION.tar.xz $ARTIFACT_DIR/../
|
||||
when:
|
||||
matrix:
|
||||
LATEX: lualatex
|
||||
deploy:
|
||||
image: docker.io/drillster/drone-rsync
|
||||
settings:
|
||||
hosts: [ "ci.logicalhacking.com"]
|
||||
port: 22
|
||||
source: .artifacts/$CI_REPO_OWNER/*
|
||||
target: $CI_REPO_OWNER
|
||||
include: [ "**.*"]
|
||||
key:
|
||||
from_secret: artifacts_ssh
|
||||
user: artifacts
|
||||
notify:
|
||||
image: docker.io/drillster/drone-email
|
||||
settings:
|
||||
host: smtp.0x5f.org
|
||||
username: woodpecker
|
||||
password:
|
||||
from_secret: email
|
||||
from: ci@logicalhacking.com
|
||||
when:
|
||||
status: [ failure ]
|
||||
|
||||
matrix:
|
||||
LATEX:
|
||||
- lualatex
|
||||
- pdflatex
|
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
echo "Checking for theories that are not part of an Isabelle session:"
|
||||
echo "==============================================================="
|
||||
|
||||
PWD=`pwd`
|
||||
TMPDIR=`mktemp -d`
|
||||
isabelle build -D . -l -n | grep $PWD | sed -e "s| *${PWD}/||" | sort -u | grep thy$ > ${TMPDIR}/sessions-thy-files.txt
|
||||
find * -type f | sort -u | grep thy$ > ${TMPDIR}/actual-thy-files.txt
|
||||
thylist=`comm -13 ${TMPDIR}/sessions-thy-files.txt ${TMPDIR}/actual-thy-files.txt`
|
||||
if [ -z "$thylist" ] ; then
|
||||
echo " * Success: No dangling theories found."
|
||||
exit 0
|
||||
else
|
||||
echo -e "$thylist"
|
||||
echo "$failuremsg: Dangling theories found (see list above)!"
|
||||
exit $failurecode
|
||||
fi
|
|
@ -0,0 +1,45 @@
|
|||
#!/bin/sh
|
||||
|
||||
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
DIRREGEXP="\\.\\./"
|
||||
|
||||
echo "Checking for references pointing outside of session directory:"
|
||||
echo "=============================================================="
|
||||
|
||||
REGEXP=$DIRREGEXP
|
||||
DIR=$DIRMATCH
|
||||
failed=0
|
||||
for i in $(seq 1 10); do
|
||||
FILES=`find * -mindepth $((i-1)) -maxdepth $i -type f | xargs`
|
||||
if [ -n "$FILES" ]; then
|
||||
grep -s ${REGEXP} ${FILES}
|
||||
exit=$?
|
||||
if [ "$exit" -eq 0 ] ; then
|
||||
failed=1
|
||||
fi
|
||||
fi
|
||||
REGEXP="${DIRREGEXP}${REGEXP}"
|
||||
done
|
||||
|
||||
|
||||
if [ "$failed" -ne 0 ] ; then
|
||||
echo "$failuremsg: Forbidden reference to files outside of their session directory!"
|
||||
exit $failurecode
|
||||
fi
|
||||
|
||||
echo " * Success: No relative references to files outside of their session directory found."
|
||||
exit 0
|
|
@ -0,0 +1,30 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
failuremsg="Error"
|
||||
failurecode=1
|
||||
|
||||
while [ $# -gt 0 ]
|
||||
do
|
||||
case "$1" in
|
||||
--warning|-w)
|
||||
failuremsg="Warning"
|
||||
failurecode=0;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
echo "Checking for sessions with quick_and_dirty mode enabled:"
|
||||
echo "========================================================"
|
||||
|
||||
rootlist=`find -name 'ROOT' -exec grep -l 'quick_and_dirty *= *true' {} \;`
|
||||
|
||||
if [ -z "$rootlist" ] ; then
|
||||
echo " * Success: No sessions with quick_and_dirty mode enabled found."
|
||||
exit 0
|
||||
else
|
||||
echo -e "$rootlist"
|
||||
echo "$failuremsg: Sessions with quick_and_dirty mode enabled found (see list above)!"
|
||||
exit $failurecode
|
||||
fi
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
# Copyright (c) 2019The University of Exeter.
|
||||
# 2019 The University of Paris-Saclay.
|
||||
# Copyright (c) 2019-2022 University of Exeter.
|
||||
# 2019 University of Paris-Saclay.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions
|
||||
|
@ -39,26 +39,18 @@ print_help()
|
|||
echo "Run ..."
|
||||
echo ""
|
||||
echo " --help, -h display this help message"
|
||||
echo " --sign -s sign release archive"
|
||||
echo " --sign, -s sign release archive"
|
||||
echo " (default: $SIGN)"
|
||||
echo " --isabelle, -i isabelle isabelle command used for installation"
|
||||
echo " (default: $ISABELLE)"
|
||||
echo " --tag -t tag use tag for release archive"
|
||||
echo " --tag tag, -t tag use tag for release archive"
|
||||
echo " (default: use master branch)"
|
||||
echo " --p --publish publish generated artefact"
|
||||
echo " (use master: $PUBLISH)"
|
||||
echo " --publish, -p publish generated artefact"
|
||||
echo " (default: $PUBLISH)"
|
||||
echo " --quick-and-dirty, -d only build required artifacts, no complete test"
|
||||
echo " (default: $DIRTY)"
|
||||
}
|
||||
|
||||
read_config() {
|
||||
if [ ! -f .config ]; then
|
||||
echo "Error: .config not found (not started in the main directory?)!"
|
||||
exit 1
|
||||
else
|
||||
source .config
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
check_isabelle_version() {
|
||||
ACTUAL_ISABELLE_VERSION=`$ISABELLE version`
|
||||
echo "* Checking Isabelle version:"
|
||||
|
@ -78,39 +70,54 @@ clone_repo()
|
|||
echo " * Switching to tag $TAG"
|
||||
(cd $ISADOF_WORK_DIR && git checkout $TAG)
|
||||
else
|
||||
echo " * Not tag specified, using master branch"
|
||||
echo " * No tag specified, using master branch"
|
||||
fi
|
||||
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/.config
|
||||
(cd $ISADOF_WORK_DIR && git show -s --format="COMMIT=%H%nDATE=%cd" --date=short | sed -e 's|-|/|g') >> $ISADOF_WORK_DIR/etc/settings
|
||||
|
||||
}
|
||||
|
||||
build_and_install_manuals()
|
||||
{
|
||||
echo "* Building manual"
|
||||
ROOTS=$ISABELLE_HOME_USER/ROOTS
|
||||
if [ -f $ROOTS ]; then
|
||||
mv $ROOTS $ROOTS.backup
|
||||
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
if [ -z ${ARTIFACT_DIR+x} ]; then
|
||||
echo " * Quick and Dirty Mode (local build)"
|
||||
$ISABELLE build -d . Isabelle_DOF Isabelle_DOF-Example-I
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
cp Isabelle_DOF-Example-I/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF/output/
|
||||
cp Isabelle_DOF/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF/output/;
|
||||
else
|
||||
echo " * Quick and Dirty Mode (running on CI)"
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
cp $ARTIFACT_DIR/browser_info/AFP/Isabelle_DOF-Example-I/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/
|
||||
|
||||
mkdir -p $ISADOF_WORK_DIR/Isabelle_DOF/output/
|
||||
cp $ARTIFACT_DIR/browser_info/AFP/Isabelle_DOF/document.pdf \
|
||||
$ISADOF_WORK_DIR/Isabelle_DOF/output/;
|
||||
fi
|
||||
else
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE env ./install-afp)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
fi
|
||||
|
||||
(cd $ISADOF_WORK_DIR && ./install)
|
||||
(cd $ISADOF_WORK_DIR && $ISABELLE build -c -D . )
|
||||
|
||||
mkdir -p $ISADOF_WORK_DIR/doc
|
||||
echo "Isabelle/DOF Manuals!" > $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
cp $ISADOF_WORK_DIR/examples/technical_report/Isabelle_DOF-Manual/output/document.pdf \
|
||||
cp $ISADOF_WORK_DIR/Isabelle_DOF/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/Isabelle_DOF-Manual.pdf
|
||||
echo " Isabelle_DOF-Manual User and Implementation Manual for Isabelle/DOF" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
cp $ISADOF_WORK_DIR/examples/scholarly_paper/2018-cicm-isabelle_dof-applications/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/2018-cicm-isabelle_dof-applications.pdf
|
||||
echo " 2018-cicm-isabelle_dof-applications Example academic paper" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
cp $ISADOF_WORK_DIR/Isabelle_DOF-Example-I/output/document.pdf \
|
||||
$ISADOF_WORK_DIR/doc/Isabelle_DOF-Example-I.pdf
|
||||
echo " Isabelle_DOF-Example-I Example academic paper" >> $ISADOF_WORK_DIR/doc/Contents
|
||||
|
||||
find $ISADOF_WORK_DIR -type d -name "output" -exec rm -rf {} \; &> /dev/null || true
|
||||
rm -rf $ISADOF_WORK_DIR/.git* $ISADOF_WORK_DIR/.ci $ISADOF_WORK_DIR/.afp
|
||||
if [ -f $ROOTS.backup ]; then
|
||||
mv $ROOTS.backup $ROOTS
|
||||
fi
|
||||
rm -rf $ISADOF_WORK_DIR/.git* $ISADOF_WORK_DIR/.woodpecker $ISADOF_WORK_DIR/.afp
|
||||
|
||||
}
|
||||
|
||||
create_archive()
|
||||
|
@ -120,7 +127,6 @@ create_archive()
|
|||
(mv $ISADOF_WORK_DIR $ISADOF_DIR)
|
||||
(cd $BUILD_DIR && tar cf $ISADOF_TAR.tar $ISADOF_TAR && xz $ISADOF_DIR.tar)
|
||||
mv $BUILD_DIR/$ISADOF_TAR.tar.xz .
|
||||
rm -rf $BUILD_DIR
|
||||
}
|
||||
|
||||
sign_archive()
|
||||
|
@ -137,12 +143,11 @@ publish_archive()
|
|||
ssh 0x5f.org chmod go+u-w -R www/$DOF_ARTIFACT_HOST/htdocs/$DOF_ARTIFACT_DIR
|
||||
}
|
||||
|
||||
|
||||
read_config
|
||||
ISABELLE=`which isabelle`
|
||||
USE_TAG="false"
|
||||
SIGN="false"
|
||||
PUBLISH="false"
|
||||
DIRTY="false"
|
||||
BUILD_DIR=`mktemp -d`
|
||||
ISADOF_WORK_DIR="$BUILD_DIR/Isabelle_DOF"
|
||||
while [ $# -gt 0 ]
|
||||
|
@ -159,6 +164,8 @@ do
|
|||
SIGN="true";;
|
||||
--publish|-p)
|
||||
PUBLISH="true";;
|
||||
--quick-and-dirty|-d)
|
||||
DIRTY="true";;
|
||||
--help|-h)
|
||||
print_help
|
||||
exit 0;;
|
||||
|
@ -171,19 +178,36 @@ done
|
|||
|
||||
clone_repo
|
||||
|
||||
source $ISADOF_WORK_DIR/.config
|
||||
ISADOF_MAIN_DIR=`pwd`
|
||||
|
||||
if [ "$DIRTY" = "true" ]; then
|
||||
echo "Running in Quick and Dirty mode!"
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
else
|
||||
$ISABELLE components -x $ISADOF_MAIN_DIR
|
||||
$ISABELLE components -u $ISADOF_WORK_DIR
|
||||
fi
|
||||
|
||||
VARS=`$ISABELLE getenv ISABELLE_TOOL`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
|
||||
ISABELLE_VERSION="Isabelle$($ISABELLE_TOOL dof_param -b isabelle_version)"
|
||||
DOF_VERSION="$($ISABELLE_TOOL dof_param -b dof_version)"
|
||||
|
||||
ISABELLE_SHORT_VERSION=`echo $ISABELLE_VERSION | sed -e 's/:.*$//'`
|
||||
ISADOF_TAR="Isabelle_DOF-"$DOF_VERSION"_"$ISABELLE_SHORT_VERSION
|
||||
ISADOF_DIR="$BUILD_DIR/$ISADOF_TAR"
|
||||
|
||||
check_isabelle_version
|
||||
VARS=`$ISABELLE getenv ISABELLE_HOME_USER`
|
||||
for i in $VARS; do
|
||||
export "$i"
|
||||
done
|
||||
|
||||
build_and_install_manuals
|
||||
|
||||
if [ "$DIRTY" != "true" ]; then
|
||||
$ISABELLE components -x $ISADOF_WORK_DIR
|
||||
$ISABELLE components -u $ISADOF_MAIN_DIR
|
||||
fi
|
||||
|
||||
create_archive
|
||||
|
||||
if [ "$SIGN" = "true" ]; then
|
||||
|
@ -194,4 +218,5 @@ if [ "$PUBLISH" = "true" ]; then
|
|||
publish_archive
|
||||
fi
|
||||
|
||||
exit 0
|
||||
rm -rf $BUILD_DIR
|
||||
|
|
@ -11,7 +11,24 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
|
||||
### Changed
|
||||
|
||||
## 1.1.0 - 2021-03-20
|
||||
- Updated Isabelle version to Isabelle 2023
|
||||
|
||||
## [1.3.0] - 2022-07-08
|
||||
|
||||
### Changed
|
||||
|
||||
- The project-specific configuration is not part of the `ROOT` file, the formerly
|
||||
used `isadof.cfg` is obsolete and no longer supported.
|
||||
- Removed explicit use of `document/build` script. Requires removing the `build` script
|
||||
entry from ROOT files.
|
||||
- Isabelle/DOF is now a proper Isabelle component that should be installed using the
|
||||
`isabelle components` command. The installation script is now only a convenient way
|
||||
of installing the required AFP entries.
|
||||
- `mkroot_DOF` has been renamed to `dof_mkroot` (and reimplemented in Scala).
|
||||
|
||||
## [1.2.0] - 2022-03-26
|
||||
|
||||
## [1.1.0] - 2021-03-20
|
||||
|
||||
### Added
|
||||
|
||||
|
@ -28,4 +45,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
|
||||
- First public release
|
||||
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...HEAD
|
||||
[Unreleased]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.3.0/Isabelle2021...HEAD
|
||||
[1.3.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.2.0/Isabelle2021...v1.3.0/Isabelle2021-1
|
||||
[1.2.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.1.0/Isabelle2021...v1.2.0/Isabelle2021
|
||||
[1.1.0]: https://git.logicalhacking.com/Isabelle_DOF/Isabelle_DOF/compare/v1.0.0/Isabelle2019...v1.1.0/Isabelle2021
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
To cite Isabelle/DOF in publications, please use
|
||||
|
||||
|
||||
Achim D. Brucker, Idir Ait-Sadoune, Paolo Crisafulli, and Burkhart
|
||||
Wolff. Using The Isabelle Ontology Framework: Linking the Formal
|
||||
|
|
472
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/IsaDofApplications.thy → Isabelle_DOF-Example-I/IsaDofApplications.thy
Executable file → Normal file
|
@ -16,6 +16,9 @@ theory IsaDofApplications
|
|||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
use_template "lncs"
|
||||
use_ontology "Isabelle_DOF.scholarly_paper"
|
||||
|
||||
open_monitor*[this::article]
|
||||
declare[[strict_monitor_checking=false]]
|
||||
|
||||
|
@ -27,6 +30,61 @@ define_shortcut* isadof \<rightleftharpoons> \<open>\isadof\<close>
|
|||
|
||||
(* slanted text in contrast to italics *)
|
||||
define_macro* slanted_text \<rightleftharpoons> \<open>\textsl{\<close> _ \<open>}\<close>
|
||||
define_macro* unchecked_label \<rightleftharpoons> \<open>\autoref{\<close> _ \<open>}\<close>
|
||||
|
||||
ML\<open>
|
||||
|
||||
fun boxed_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_text
|
||||
(fn ctxt => DOF_lib.string_2_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox")
|
||||
|
||||
val neant = K(Latex.text("",\<^here>))
|
||||
|
||||
fun boxed_theory_text_antiquotation name (* redefined in these more abstract terms *) =
|
||||
DOF_lib.gen_text_antiquotation name DOF_lib.report_theory_text
|
||||
(fn ctxt => DOF_lib.string_2_theory_text_antiquotation ctxt
|
||||
#> DOF_lib.enclose_env false ctxt "isarbox"
|
||||
(* #> neant *)) (*debugging *)
|
||||
|
||||
fun boxed_sml_text_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "sml")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_pdf_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "out")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_latex_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "ltx")
|
||||
(* the simplest conversion possible *)
|
||||
|
||||
fun boxed_bash_antiquotation name =
|
||||
DOF_lib.gen_text_antiquotation name (K(K()))
|
||||
(fn ctxt => Input.source_content
|
||||
#> Latex.text
|
||||
#> DOF_lib.enclose_env true ctxt "bash")
|
||||
(* the simplest conversion possible *)
|
||||
\<close>
|
||||
|
||||
setup\<open>boxed_text_antiquotation \<^binding>\<open>boxed_text\<close> #>
|
||||
boxed_text_antiquotation \<^binding>\<open>boxed_cartouche\<close> #>
|
||||
boxed_theory_text_antiquotation \<^binding>\<open>boxed_theory_text\<close> #>
|
||||
|
||||
boxed_sml_text_antiquotation \<^binding>\<open>boxed_sml\<close> #>
|
||||
boxed_pdf_antiquotation \<^binding>\<open>boxed_pdf\<close> #>
|
||||
boxed_latex_antiquotation \<^binding>\<open>boxed_latex\<close>#>
|
||||
boxed_bash_antiquotation \<^binding>\<open>boxed_bash\<close>
|
||||
\<close>
|
||||
|
||||
(*>*)
|
||||
|
||||
|
@ -71,7 +129,7 @@ abstract*[abs::abstract, keywordlist="[''Ontology'',''Ontological Modeling'',''I
|
|||
\<close>
|
||||
|
||||
section*[intro::introduction]\<open> Introduction \<close>
|
||||
text*[introtext::introduction]\<open>
|
||||
text*[introtext::introduction, level = "Some 1"]\<open>
|
||||
The linking of the \<^emph>\<open>formal\<close> to the \<^emph>\<open>informal\<close> is perhaps the
|
||||
most pervasive challenge in the digitization of knowledge and its
|
||||
propagation. This challenge incites numerous research efforts
|
||||
|
@ -99,20 +157,18 @@ document evolution. Based on Isabelle infrastructures, ontologies may refer to
|
|||
types, terms, proven theorems, code, or established assertions.
|
||||
Based on a novel adaption of the Isabelle IDE, a document is checked to be
|
||||
\<^emph>\<open>conform\<close> to a particular ontology---\<^isadof> is designed to give fast user-feedback
|
||||
\<^emph>\<open>during the capture of content\<close>. This is particularly valuable in case of document
|
||||
\<^emph>\<open>during the capture of content\<close>. This is particularly valuable for document
|
||||
changes, where the \<^emph>\<open>coherence\<close> between the formal and the informal parts of the
|
||||
content can be mechanically checked.
|
||||
|
||||
To avoid any misunderstanding: \<^isadof> is \<^emph>\<open>not a theory in HOL\<close>
|
||||
on ontologies and operations to track and trace links in texts,
|
||||
it is an \<^emph>\<open>environment to write structured text\<close> which \<^emph>\<open>may contain\<close>
|
||||
\<^isabelle> definitions and proofs like mathematical articles, tech-reports and
|
||||
scientific papers---as the present one, which is written in \<^isadof>
|
||||
itself. \<^isadof> is a plugin into the Isabelle/Isar
|
||||
framework in the style of~@{cite "wenzel.ea:building:2007"}.
|
||||
To avoid any misunderstanding: \<^isadof> is \<^emph>\<open>not a theory in HOL\<close> on ontologies and operations
|
||||
to track and trace links in texts, it is an \<^emph>\<open>environment to write structured text\<close> which
|
||||
\<^emph>\<open>may contain\<close> \<^isabelle> definitions and proofs like mathematical articles, tech-reports and
|
||||
scientific papers---as the present one, which is written in \<^isadof> itself. \<^isadof> is a plugin
|
||||
into the Isabelle/Isar framework in the style of~@{cite "wenzel.ea:building:2007"}.
|
||||
\<close>
|
||||
|
||||
(* declaring the forward references used in the subsequent section *)
|
||||
(* declaring the forward references used in the subsequent sections *)
|
||||
(*<*)
|
||||
declare_reference*[bgrnd::text_section]
|
||||
declare_reference*[isadof::text_section]
|
||||
|
@ -120,29 +176,25 @@ declare_reference*[ontomod::text_section]
|
|||
declare_reference*[ontopide::text_section]
|
||||
declare_reference*[conclusion::text_section]
|
||||
(*>*)
|
||||
text*[plan::introduction]\<open> The plan of the paper is follows: we start by introducing the underlying
|
||||
Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by presenting the
|
||||
essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
|
||||
text*[plan::introduction, level="Some 1"]\<open> The plan of the paper is as follows: we start by
|
||||
introducing the underlying Isabelle system (@{text_section (unchecked) \<open>bgrnd\<close>}) followed by
|
||||
presenting the essentials of \<^isadof> and its ontology language (@{text_section (unchecked) \<open>isadof\<close>}).
|
||||
It follows @{text_section (unchecked) \<open>ontomod\<close>}, where we present three application
|
||||
scenarios from the point of view of the ontology modeling. In @{text_section (unchecked) \<open>ontopide\<close>}
|
||||
we discuss the user-interaction generated from the ontological definitions. Finally, we draw
|
||||
conclusions and discuss related work in @{text_section (unchecked) \<open>conclusion\<close>}. \<close>
|
||||
|
||||
section*[bgrnd::text_section,main_author="Some(@{docitem ''bu''}::author)"]
|
||||
section*[bgrnd::text_section,main_author="Some(@{author ''bu''}::author)"]
|
||||
\<open> Background: The Isabelle System \<close>
|
||||
text*[background::introduction]\<open>
|
||||
While Isabelle is widely perceived as an interactive theorem prover
|
||||
for HOL (Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we
|
||||
would like to emphasize the view that Isabelle is far more than that:
|
||||
it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This refers to the
|
||||
``\<^slanted_text>\<open>generic system framework of Isabelle/Isar underlying recent
|
||||
versions of Isabelle. Among other things, Isar provides an
|
||||
infrastructure for Isabelle plug-ins, comprising extensible state
|
||||
components and extensible syntax that can be bound to ML
|
||||
programs. Thus, the Isabelle/Isar architecture may be understood as
|
||||
an extension and refinement of the traditional `LCF approach', with
|
||||
explicit infrastructure for building derivative
|
||||
\<^emph>\<open>systems\<close>.\<close>''~@{cite "wenzel.ea:building:2007"}
|
||||
text*[background::introduction, level="Some 1"]\<open>
|
||||
While Isabelle is widely perceived as an interactive theorem prover for HOL
|
||||
(Higher-order Logic)~@{cite "nipkow.ea:isabelle:2002"}, we would like to emphasize the view that
|
||||
Isabelle is far more than that: it is the \<^emph>\<open>Eclipse of Formal Methods Tools\<close>. This refers to the
|
||||
``\<^slanted_text>\<open>generic system framework of Isabelle/Isar underlying recent versions of Isabelle.
|
||||
Among other things, Isar provides an infrastructure for Isabelle plug-ins, comprising extensible
|
||||
state components and extensible syntax that can be bound to ML programs. Thus, the Isabelle/Isar
|
||||
architecture may be understood as an extension and refinement of the traditional `LCF approach',
|
||||
with explicit infrastructure for building derivative \<^emph>\<open>systems\<close>.\<close>''~@{cite "wenzel.ea:building:2007"}
|
||||
|
||||
The current system framework offers moreover the following features:
|
||||
|
||||
|
@ -154,12 +206,12 @@ The current system framework offers moreover the following features:
|
|||
the most prominent and deeply integrated system component.
|
||||
\<close>
|
||||
|
||||
figure*[architecture::figure,relative_width="100",src="''figures/isabelle-architecture''"]\<open>
|
||||
figure*[architecture::figure,relative_width="100",file_src="''figures/isabelle-architecture.pdf''"]\<open>
|
||||
The system architecture of Isabelle (left-hand side) and the
|
||||
asynchronous communication between the Isabelle system and
|
||||
the IDE (right-hand side). \<close>
|
||||
|
||||
text*[blug::introduction]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
||||
text*[blug::introduction, level="Some 1"]\<open> The Isabelle system architecture shown in @{figure \<open>architecture\<close>}
|
||||
comes with many layers, with Standard ML (SML) at the bottom layer as implementation
|
||||
language. The architecture actually foresees a \<^emph>\<open>Nano-Kernel\<close> (our terminology) which
|
||||
resides in the SML structure \<^ML_structure>\<open>Context\<close>. This structure provides a kind of container called
|
||||
|
@ -169,41 +221,39 @@ automated proof procedures as well as specific support for higher specification
|
|||
were built. \<close>
|
||||
|
||||
text\<open> We would like to detail the documentation generation of the architecture,
|
||||
which is based on literate specification commands such as \inlineisar+section+ \<^dots>,
|
||||
\inlineisar+subsection+ \<^dots>, \inlineisar+text+ \<^dots>, etc.
|
||||
which is based on literate specification commands such as \<^theory_text>\<open>section\<close> \<^dots>,
|
||||
\<^theory_text>\<open>subsection\<close> \<^dots>, \<^theory_text>\<open>text\<close> \<^dots>, etc.
|
||||
Thus, a user can add a simple text:
|
||||
\begin{isar}
|
||||
text\<Open>This is a description.\<Close>
|
||||
\end{isar}
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> This is a description.\<close>\<close>}
|
||||
These text-commands can be arbitrarily mixed with other commands stating definitions, proofs, code, etc.,
|
||||
and will result in the corresponding output in generated \<^LaTeX> or HTML documents.
|
||||
Now, \<^emph>\<open>inside\<close> the textual content, it is possible to embed a \<^emph>\<open>text-antiquotation\<close>:
|
||||
\begin{isar}
|
||||
text\<Open>According to the reflexivity axiom \at{thm refl}, we obtain in \<Gamma>
|
||||
for \at{term "fac 5"} the result \at{value "fac 5"}.\<Close>
|
||||
\end{isar}
|
||||
@{boxed_theory_text [display]\<open>
|
||||
text\<open> According to the \<^emph>\<open>reflexivity\<close> axiom @{thm refl},
|
||||
we obtain in \<Gamma> for @{term "fac 5"} the result @{value "fac 5"}.\<close>\<close>}
|
||||
|
||||
which is represented in the generated output by:
|
||||
\begin{out}
|
||||
According to the reflexivity axiom $x = x$, we obtain in $\Gamma$ for $\operatorname{fac} 5$ the result $120$.
|
||||
\end{out}
|
||||
where \inlineisar+refl+ is actually the reference to the axiom of reflexivity in HOL.
|
||||
For the antiquotation \inlineisar+\at{value "fac 5"}+ we assume the usual definition for
|
||||
\inlineisar+fac+ in HOL.
|
||||
@{boxed_pdf [display]\<open>According to the reflexivity axiom $x = x$, we obtain in $\Gamma$ for $\operatorname{fac} 5$ the result $120$.\<close>}
|
||||
|
||||
where \<^theory_text>\<open>refl\<close> is actually the reference to the axiom of reflexivity in HOL.
|
||||
For the antiquotation \<^theory_text>\<open>@{value "''fac 5''"}\<close> we assume the usual definition for
|
||||
\<^theory_text>\<open>fac\<close> in HOL.
|
||||
\<close>
|
||||
|
||||
text*[anti]\<open> Thus, antiquotations can refer to formal content, can be type-checked before being
|
||||
displayed and can be used for calculations before actually being typeset. When editing,
|
||||
Isabelle's PIDE offers auto-completion and error-messages while typing the above
|
||||
\<^emph>\<open>semi-formal\<close> content. \<close>
|
||||
text*[anti::introduction, level = "Some 1"]\<open> Thus, antiquotations can refer to formal content,
|
||||
can be type-checked before being displayed and can be used for calculations before actually being
|
||||
typeset. When editing, Isabelle's PIDE offers auto-completion and error-messages while typing the
|
||||
above \<^emph>\<open>semi-formal\<close> content.\<close>
|
||||
|
||||
section*[isadof::technical,main_author="Some(@{docitem ''adb''}::author)"]\<open> \<^isadof> \<close>
|
||||
section*[isadof::technical,main_author="Some(@{author ''adb''}::author)"]\<open> \<^isadof> \<close>
|
||||
|
||||
text\<open> An \<^isadof> document consists of three components:
|
||||
\<^item> the \<^emph>\<open>ontology definition\<close> which is an Isabelle theory file with definitions
|
||||
for document-classes and all auxiliary datatypes.
|
||||
\<^item> the \<^emph>\<open>core\<close> of the document itself which is an Isabelle theory
|
||||
importing the ontology definition. \<^isadof> provides an own family of text-element
|
||||
commands such as \inlineisar+title*+, \inlineisar+section*+, \inlineisar+text*+, etc.,
|
||||
commands such as \<^theory_text>\<open>title*\<close>, \<^theory_text>\<open>section*\<close>, \<^theory_text>\<open>text*\<close>, etc.,
|
||||
which can be annotated with meta-information defined in the underlying ontology definition.
|
||||
\<^item> the \<^emph>\<open>layout definition\<close> for the given ontology exploiting this meta-information.
|
||||
\<close>
|
||||
|
@ -212,7 +262,7 @@ three parts. Note that the document core \<^emph>\<open>may\<close>, but \<^emph
|
|||
use Isabelle definitions or proofs for checking the formal content---the
|
||||
present paper is actually an example of a document not containing any proof.
|
||||
|
||||
The document generation process of \<^isadof> is currently restricted to \LaTeX, which means
|
||||
The document generation process of \<^isadof> is currently restricted to \<^LaTeX>, which means
|
||||
that the layout is defined by a set of \<^LaTeX> style files. Several layout
|
||||
definitions for one ontology are possible and pave the way that different \<^emph>\<open>views\<close> for
|
||||
the same central document were generated, addressing the needs of different purposes `
|
||||
|
@ -226,65 +276,47 @@ style-files (\<^verbatim>\<open>.sty\<close>-files). In the document core author
|
|||
their source, but this limits the possibility of using different representation technologies,
|
||||
\<^eg>, HTML, and increases the risk of arcane error-messages in generated \<^LaTeX>.
|
||||
|
||||
The \<^isadof> ontology specification language consists basically on a notation for
|
||||
document classes, where the attributes were typed with HOL-types and can be instantiated
|
||||
by terms HOL-terms, \<^ie>, the actual parsers and type-checkers of the Isabelle system were reused.
|
||||
This has the particular advantage that \<^isadof> commands can be arbitrarily mixed with
|
||||
Isabelle/HOL commands providing the machinery for type declarations and term specifications such
|
||||
as enumerations. In particular, document class definitions provide:
|
||||
The \<^isadof> ontology specification language consists basically on a notation for document classes,
|
||||
where the attributes were typed with HOL-types and can be instantiated by terms HOL-terms, \<^ie>,
|
||||
the actual parsers and type-checkers of the Isabelle system were reused. This has the particular
|
||||
advantage that \<^isadof> commands can be arbitrarily mixed with Isabelle/HOL commands providing the
|
||||
machinery for type declarations and term specifications such as enumerations. In particular,
|
||||
document class definitions provide:
|
||||
\<^item> a HOL-type for each document class as well as inheritance,
|
||||
\<^item> support for attributes with HOL-types and optional default values,
|
||||
\<^item> support for overriding of attribute defaults but not overloading, and
|
||||
\<^item> text-elements annotated with document classes; they are mutable
|
||||
instances of document classes.
|
||||
\<close>
|
||||
instances of document classes.\<close>
|
||||
|
||||
text\<open>
|
||||
Attributes referring to other ontological concepts are called \<^emph>\<open>links\<close>.
|
||||
The HOL-types inside the document specification language support built-in types for Isabelle/HOL
|
||||
\inlineisar+typ+'s, \inlineisar+term+'s, and \inlineisar+thm+'s reflecting internal Isabelle's
|
||||
internal types for these entities; when denoted in HOL-terms to instantiate an attribute, for
|
||||
example, there is a specific syntax (called \<^emph>\<open>inner syntax antiquotations\<close>) that is checked by
|
||||
\<^isadof> for consistency.
|
||||
Attributes referring to other ontological concepts are called \<^emph>\<open>links\<close>. The HOL-types inside the
|
||||
document specification language support built-in types for Isabelle/HOL \<^theory_text>\<open>typ\<close>'s, \<^theory_text>\<open>term\<close>'s, and
|
||||
\<^theory_text>\<open>thm\<close>'s reflecting internal Isabelle's internal types for these entities; when denoted in
|
||||
HOL-terms to instantiate an attribute, for example, there is a specific syntax
|
||||
(called \<^emph>\<open>inner syntax antiquotations\<close>) that is checked by \<^isadof> for consistency.
|
||||
|
||||
Document classes can have a \inlineisar+where+ clause containing a regular
|
||||
expression over class names. Classes with such a \inlineisar+where+ were called \<^emph>\<open>monitor classes\<close>.
|
||||
While document classes and their inheritance relation structure meta-data of text-elements
|
||||
in an object-oriented manner, monitor classes enforce structural organization
|
||||
of documents via the language specified by the regular expression
|
||||
enforcing a sequence of text-elements that must belong to the corresponding classes.
|
||||
|
||||
To start using \<^isadof>, one creates an Isabelle project (with the name
|
||||
\inlinebash{IsaDofApplications}):
|
||||
\begin{bash}
|
||||
isabelle mkroot_DOF -o scholarly_paper -t lncs -d IsaDofApplications
|
||||
\end{bash}
|
||||
where the \inlinebash{-o scholarly_paper} specifies the ontology for writing scientific articles and
|
||||
\inlinebash{-t lncs} specifies the use of Springer's \LaTeX-configuration for the Lecture Notes in
|
||||
Computer Science series. The project can be formally checked, including the generation of the
|
||||
article in PDF using the following command:
|
||||
\begin{bash}
|
||||
isabelle build -d . IsaDofApplications
|
||||
\end{bash}
|
||||
\<close>
|
||||
Document classes can have a \<^theory_text>\<open>where\<close> clause containing a regular expression over class names.
|
||||
Classes with such a \<^theory_text>\<open>where\<close> were called \<^emph>\<open>monitor classes\<close>. While document classes and their
|
||||
inheritance relation structure meta-data of text-elements in an object-oriented manner, monitor
|
||||
classes enforce structural organization of documents via the language specified by the regular
|
||||
expression enforcing a sequence of text-elements that belong to the corresponding classes. \<^vs>\<open>-0.4cm\<close>\<close>
|
||||
|
||||
section*[ontomod::text_section]\<open> Modeling Ontologies in \<^isadof> \<close>
|
||||
text\<open> In this section, we will use the \<^isadof> document ontology language
|
||||
for three different application scenarios: for scholarly papers, for mathematical
|
||||
exam sheets as well as standardization documents where the concepts of the
|
||||
standard are captured in the ontology. For space reasons, we will concentrate in all three
|
||||
cases on aspects of the modeling due to space limitations.\<close>
|
||||
text\<open> In this section, we will use the \<^isadof> document ontology language for three different
|
||||
application scenarios: for scholarly papers, for mathematical exam sheets as well as standardization
|
||||
documents where the concepts of the standard are captured in the ontology. For space reasons, we
|
||||
will concentrate in all three cases on aspects of the modeling due to space limitations.\<close>
|
||||
|
||||
subsection*[scholar_onto::example]\<open> The Scholar Paper Scenario: Eating One's Own Dog Food. \<close>
|
||||
text\<open> The following ontology is a simple ontology modeling scientific papers. In this
|
||||
\<^isadof> application scenario, we deliberately refrain from integrating references to
|
||||
(Isabelle) formal content in order demonstrate that \<^isadof> is not a framework from
|
||||
Isabelle users to Isabelle users only.
|
||||
Of course, such references can be added easily and represent a particular strength
|
||||
of \<^isadof>.
|
||||
Isabelle users to Isabelle users only. Of course, such references can be added easily and
|
||||
represent a particular strength of \<^isadof>.\<close>
|
||||
|
||||
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
text*["paper_onto_core"::float,
|
||||
main_caption="\<open>The core of the ontology definition for writing scholarly papers.\<close>"]
|
||||
\<open>@{boxed_theory_text [display]\<open>
|
||||
doc_class title =
|
||||
short_title :: "string option" <= None
|
||||
|
||||
|
@ -299,64 +331,62 @@ doc_class abstract =
|
|||
|
||||
doc_class text_section =
|
||||
main_author :: "author option" <= None
|
||||
todo_list :: "string list" <= "[]"
|
||||
\end{isar}
|
||||
\caption{The core of the ontology definition for writing scholarly papers.}
|
||||
\label{fig:paper-onto-core}
|
||||
\end{figure}
|
||||
The first part of the ontology \inlineisar+scholarly_paper+ (see \autoref{fig:paper-onto-core})
|
||||
todo_list :: "string list" <= "[]"
|
||||
\<close>}\<close>
|
||||
|
||||
text\<open> The first part of the ontology \<^theory_text>\<open>scholarly_paper\<close>
|
||||
(see @{float "paper_onto_core"})
|
||||
contains the document class definitions
|
||||
with the usual text-elements of a scientific paper. The attributes \inlineisar+short_title+,
|
||||
\inlineisar+abbrev+ etc are introduced with their types as well as their default values.
|
||||
Our model prescribes an optional \inlineisar+main_author+ and a todo-list attached to an arbitrary
|
||||
with the usual text-elements of a scientific paper. The attributes \<^theory_text>\<open>short_title\<close>,
|
||||
\<^theory_text>\<open>abbrev\<close> etc are introduced with their types as well as their default values.
|
||||
Our model prescribes an optional \<^theory_text>\<open>main_author\<close> and a todo-list attached to an arbitrary
|
||||
text section; since instances of this class are mutable (meta)-objects of text-elements, they
|
||||
can be modified arbitrarily through subsequent text and of course globally during text evolution.
|
||||
Since \inlineisar+author+ is a HOL-type internally generated by \<^isadof> framework and can therefore
|
||||
appear in the \inlineisar+main_author+ attribute of the \inlineisar+text_section+ class;
|
||||
Since \<^theory_text>\<open>author\<close> is a HOL-type internally generated by \<^isadof> framework and can therefore
|
||||
appear in the \<^theory_text>\<open>main_author\<close> attribute of the \<^theory_text>\<open>text_section\<close> class;
|
||||
semantic links between concepts can be modeled this way.
|
||||
|
||||
The translation of its content to, \<^eg>, Springer's \<^LaTeX> setup for the Lecture Notes in Computer
|
||||
Science Series, as required by many scientific conferences, is mostly straight-forward. \<close>
|
||||
Science Series, as required by many scientific conferences, is mostly straight-forward.
|
||||
\<^vs>\<open>-0.8cm\<close>\<close>
|
||||
|
||||
figure*[fig1::figure,spawn_columns=False,relative_width="95",src="''figures/Dogfood-Intro''"]
|
||||
figure*[fig1::figure,relative_width="95",file_src="''figures/Dogfood-Intro.png''"]
|
||||
\<open> Ouroboros I: This paper from inside \<^dots> \<close>
|
||||
|
||||
text\<open> @{figure \<open>fig1\<close>} shows the corresponding view in the Isabelle/PIDE of thqqe present paper.
|
||||
(*<*)declare_reference*[paper_onto_sections::float](*>*)
|
||||
text\<open>\<^vs>\<open>-0.8cm\<close> @{figure \<open>fig1\<close>} shows the corresponding view in the Isabelle/PIDE of the present paper.
|
||||
Note that the text uses \<^isadof>'s own text-commands containing the meta-information provided by
|
||||
the underlying ontology.
|
||||
We proceed by a definition of \inlineisar+introduction+'s, which we define as the extension of
|
||||
\inlineisar+text_section+ which is intended to capture common infrastructure:
|
||||
\begin{isar}
|
||||
We proceed by a definition of \<^theory_text>\<open>introduction\<close>'s, which we define as the extension of
|
||||
\<^theory_text>\<open>text_section\<close> which is intended to capture common infrastructure:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class introduction = text_section +
|
||||
comment :: string
|
||||
\end{isar}
|
||||
As a consequence of the definition as extension, the \inlineisar+introduction+ class
|
||||
inherits the attributes \inlineisar+main_author+ and \inlineisar+todo_list+ together with
|
||||
\<close>}
|
||||
As a consequence of the definition as extension, the \<^theory_text>\<open>introduction\<close> class
|
||||
inherits the attributes \<^theory_text>\<open>main_author\<close> and \<^theory_text>\<open>todo_list\<close> together with
|
||||
the corresponding default values.
|
||||
|
||||
As a variant of the introduction, we could add here an attribute that contains the formal
|
||||
claims of the article --- either here, or, for example, in the keyword list of the abstract.
|
||||
As type, one could use either the built-in type \inlineisar+term+ (for syntactically correct,
|
||||
but not necessarily proven entity) or \inlineisar+thm+ (for formally proven entities). It suffices
|
||||
As type, one could use either the built-in type \<^theory_text>\<open>term\<close> (for syntactically correct,
|
||||
but not necessarily proven entity) or \<^theory_text>\<open>thm\<close> (for formally proven entities). It suffices
|
||||
to add the line:
|
||||
\begin{isar}
|
||||
@{boxed_theory_text [display]\<open>
|
||||
claims :: "thm list"
|
||||
\end{isar}
|
||||
and to extent the \LaTeX-style accordingly to handle the additional field.
|
||||
Note that \inlineisar+term+ and \inlineisar+thm+ are types reflecting the core-types of the
|
||||
\<close>}
|
||||
and to extent the \<^LaTeX>-style accordingly to handle the additional field.
|
||||
Note that \<^theory_text>\<open>term\<close> and \<^theory_text>\<open>thm\<close> are types reflecting the core-types of the
|
||||
Isabelle kernel. In a corresponding conclusion section, one could model analogously an
|
||||
achievement section; by programming a specific compliance check in SML, the implementation
|
||||
of automated forms of validation check for specific categories of papers is envisageable.
|
||||
Since this requires deeper knowledge in Isabelle programming, however, we consider this out
|
||||
of the scope of this paper.
|
||||
|
||||
|
||||
We proceed more or less conventionally by the subsequent sections (\autoref{fig:paper-onto-sections})
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
doc_class technical = text_section +
|
||||
definition_list :: "string list" <= "[]"
|
||||
|
||||
We proceed more or less conventionally by the subsequent sections (@{float (unchecked)\<open>paper_onto_sections\<close>})\<close>
|
||||
text*["paper_onto_sections"::float,
|
||||
main_caption = "''Various types of sections of a scholarly papers.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class example = text_section +
|
||||
comment :: string
|
||||
|
||||
|
@ -368,14 +398,13 @@ doc_class related_work = conclusion +
|
|||
|
||||
doc_class bibliography =
|
||||
style :: "string option" <= "''LNCS''"
|
||||
\end{isar}
|
||||
\caption{Various types of sections of a scholarly papers.}
|
||||
\label{fig:paper-onto-sections}
|
||||
\end{figure}
|
||||
and finish with a monitor class definition that enforces a textual ordering
|
||||
in the document core by a regular expression (\autoref{fig:paper-onto-monitor}).
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
\<close>}\<close>
|
||||
(*<*)declare_reference*[paper_onto_monitor::float](*>*)
|
||||
text\<open>... and finish with a monitor class definition that enforces a textual ordering
|
||||
in the document core by a regular expression (@{float (unchecked) "paper_onto_monitor"}).\<close>
|
||||
text*["paper_onto_monitor"::float,
|
||||
main_caption = "''A monitor for the scholarly paper ontology.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class article =
|
||||
trace :: "(title + subtitle + author+ abstract +
|
||||
introduction + technical + example +
|
||||
|
@ -383,23 +412,20 @@ doc_class article =
|
|||
where "(title ~~ \<lbrakk>subtitle\<rbrakk> ~~ \<lbrace>author\<rbrace>$^+$+ ~~ abstract ~~
|
||||
introduction ~~ \<lbrace>technical || example\<rbrace>$^+$ ~~ conclusion ~~
|
||||
bibliography)"
|
||||
\end{isar}
|
||||
\caption{A monitor for the scholarly paper ontology.}
|
||||
\label{fig:paper-onto-monitor}
|
||||
\end{figure}
|
||||
\<close>}
|
||||
\<close>
|
||||
text\<open> We might wish to add a component into our ontology that models figures to be included into
|
||||
the document. This boils down to the exercise of modeling structured data in the style of a
|
||||
functional programming language in HOL and to reuse the implicit HOL-type inside a suitable document
|
||||
class \inlineisar+figure+:
|
||||
\begin{isar}
|
||||
class \<^theory_text>\<open>figure\<close>:
|
||||
@{boxed_theory_text [display]\<open>
|
||||
datatype placement = h | t | b | ht | hb
|
||||
doc_class figure = text_section +
|
||||
relative_width :: "int" (* percent of textwidth *)
|
||||
src :: "string"
|
||||
placement :: placement
|
||||
spawn_columns :: bool <= True
|
||||
\end{isar}
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
text\<open> Alternatively, by including the HOL-libraries for rationals, it is possible to
|
||||
|
@ -407,11 +433,11 @@ use fractions or even mathematical reals. This must be counterbalanced by syntac
|
|||
and semantic convenience. Choosing the mathematical reals, \<^eg>, would have the drawback that
|
||||
attribute evaluation could be substantially more complicated.\<close>
|
||||
|
||||
figure*[fig_figures::figure,spawn_columns=False,relative_width="85",src="''figures/Dogfood-figures''"]
|
||||
figure*[fig_figures::figure,relative_width="85",file_src="''figures/Dogfood-figures.png''"]
|
||||
\<open> Ouroboros II: figures \<^dots> \<close>
|
||||
|
||||
text\<open> The document class \inlineisar+figure+ --- supported by the \<^isadof> text command
|
||||
\inlineisar+figure*+ --- makes it possible to express the pictures and diagrams in this paper
|
||||
text\<open> The document class \<^theory_text>\<open>figure\<close> --- supported by the \<^isadof> text command
|
||||
\<^theory_text>\<open>figure*\<close> --- makes it possible to express the pictures and diagrams in this paper
|
||||
such as @{figure \<open>fig_figures\<close>}.
|
||||
\<close>
|
||||
|
||||
|
@ -434,10 +460,10 @@ We assume that the content has four different types of addressees, which have a
|
|||
text\<open> The latter quality assurance mechanism is used in many universities,
|
||||
where for organizational reasons the execution of an exam takes place in facilities
|
||||
where the author of the exam is not expected to be physically present.
|
||||
Furthermore, we assume a simple grade system (thus, some calculation is required).
|
||||
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
Furthermore, we assume a simple grade system (thus, some calculation is required). \<close>
|
||||
text*["onto_exam"::float,
|
||||
main_caption = "''The core of the ontology modeling math exams.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class Author = ...
|
||||
datatype Subject = algebra | geometry | statistical
|
||||
datatype Grade = A1 | A2 | A3
|
||||
|
@ -459,18 +485,18 @@ doc_class Exam_item =
|
|||
concerns :: "ContentClass set"
|
||||
|
||||
type_synonym SubQuestion = string
|
||||
\end{isar}
|
||||
\caption{The core of the ontology modeling math exams.}
|
||||
\label{fig:onto-exam}
|
||||
\end{figure}
|
||||
The heart of this ontology (see \autoref{fig:onto-exam}) is an alternation of questions and answers,
|
||||
\<close>}\<close>
|
||||
|
||||
(*<*)declare_reference*[onto_questions::float](*>*)
|
||||
text\<open>The heart of this ontology (see @{float "onto_exam"}) is an alternation of questions and answers,
|
||||
where the answers can consist of simple yes-no answers (QCM style check-boxes) or lists of formulas.
|
||||
Since we do not
|
||||
assume familiarity of the students with Isabelle (\inlineisar+term+ would assume that this is a
|
||||
assume familiarity of the students with Isabelle (\<^theory_text>\<open>term\<close> would assume that this is a
|
||||
parse-able and type-checkable entity), we basically model a derivation as a sequence of strings
|
||||
(see \autoref{fig:onto-questions}).
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
(see @{float (unchecked)"onto_questions"}).\<close>
|
||||
text*["onto_questions"::float,
|
||||
main_caption = "''An exam can contain different types of questions.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class Answer_Formal_Step = Exam_item +
|
||||
justification :: string
|
||||
"term" :: "string"
|
||||
|
@ -494,19 +520,18 @@ doc_class Exercise = Exam_item +
|
|||
content :: "(Task) list"
|
||||
concerns :: "ContentClass set" <= "UNIV"
|
||||
mark :: int
|
||||
\end{isar}
|
||||
\caption{An exam can contain different types of questions.}
|
||||
\label{fig:onto-questions}
|
||||
\end{figure}
|
||||
|
||||
\<close>}\<close>
|
||||
(*<*)declare_reference*[onto_exam_monitor::float](*>*)
|
||||
text\<open>
|
||||
In many institutions, it makes sense to have a rigorous process of validation
|
||||
for exam subjects: is the initial question correct? Is a proof in the sense of the
|
||||
question possible? We model the possibility that the @{term examiner} validates a
|
||||
question by a sample proof validated by Isabelle (see \autoref{fig:onto-exam-monitor}).
|
||||
question by a sample proof validated by Isabelle (see @{float (unchecked) "onto_exam_monitor"}).
|
||||
In our scenario this sample proofs are completely \<^emph>\<open>intern\<close>, \<^ie>, not exposed to the
|
||||
students but just additional material for the internal review process of the exam.
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
students but just additional material for the internal review process of the exam.\<close>
|
||||
text*["onto_exam_monitor"::float,
|
||||
main_caption = "''Validating exams.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class Validation =
|
||||
tests :: "term list" <="[]"
|
||||
proofs :: "thm list" <="[]"
|
||||
|
@ -520,14 +545,9 @@ doc_class MathExam=
|
|||
content :: "(Header + Author + Exercise) list"
|
||||
global_grade :: Grade
|
||||
where "\<lbrace>Author\<rbrace>$^+$ ~~ Header ~~ \<lbrace>Exercise ~~ Solution\<rbrace>$^+$ "
|
||||
\end{isar}
|
||||
\caption{Validating exams.}
|
||||
\label{fig:onto-exam-monitor}
|
||||
\end{figure}
|
||||
\<close>
|
||||
|
||||
\<close>}\<close>
|
||||
|
||||
declare_reference*["fig_qcm"::figure]
|
||||
(*<*)declare_reference*["fig_qcm"::figure](*>*)
|
||||
|
||||
text\<open> Using the \<^LaTeX> package hyperref, it is possible to conceive an interactive
|
||||
exam-sheets with multiple-choice and/or free-response elements
|
||||
|
@ -535,14 +555,14 @@ exam-sheets with multiple-choice and/or free-response elements
|
|||
help of the latter, it is possible that students write in a browser a formal mathematical
|
||||
derivation---as part of an algebra exercise, for example---which is submitted to the examiners
|
||||
electronically. \<close>
|
||||
figure*[fig_qcm::figure,spawn_columns=False,
|
||||
relative_width="90",src="''figures/InteractiveMathSheet''"]
|
||||
\<open> A Generated QCM Fragment \<^dots> \<close>
|
||||
figure*[fig_qcm::figure,
|
||||
relative_width="90",file_src="''figures/InteractiveMathSheet.png''"]
|
||||
\<open>A Generated QCM Fragment \<^dots> \<close>
|
||||
|
||||
subsection*[cenelec_onto::example]\<open> The Certification Scenario following CENELEC \<close>
|
||||
text\<open> Documents to be provided in formal certifications (such as CENELEC
|
||||
50126/50128, the DO-178B/C, or Common Criteria) can much profit from the control of ontological consistency:
|
||||
a lot of an evaluators work consists in tracing down the links from requirements over
|
||||
50126/50128, the DO-178B/C, or Common Criteria) can much profit from the control of ontological
|
||||
consistency: a lot of an evaluators work consists in tracing down the links from requirements over
|
||||
assumptions down to elements of evidence, be it in the models, the code, or the tests.
|
||||
In a certification process, traceability becomes a major concern; and providing
|
||||
mechanisms to ensure complete traceability already at the development of the
|
||||
|
@ -554,15 +574,17 @@ of developments targeting certifications. Continuously checking the links betwee
|
|||
and the semi-formal parts of such documents is particularly valuable during the (usually
|
||||
collaborative) development effort.
|
||||
|
||||
As in many other cases, formal certification documents come with an own terminology and
|
||||
pragmatics of what has to be demonstrated and where, and how the trace-ability of requirements through
|
||||
As in many other cases, formal certification documents come with an own terminology and pragmatics
|
||||
of what has to be demonstrated and where, and how the trace-ability of requirements through
|
||||
design-models over code to system environment assumptions has to be assured.
|
||||
\<close>
|
||||
(*<*)declare_reference*["conceptual"::float](*>*)
|
||||
text\<open> In the sequel, we present a simplified version of an ontological model used in a
|
||||
case-study~ @{cite "bezzecchi.ea:making:2018"}. We start with an introduction of the concept of requirement
|
||||
(see \autoref{fig:conceptual}).
|
||||
\begin{figure}
|
||||
\begin{isar}
|
||||
(see @{float (unchecked) "conceptual"}). \<close>
|
||||
text*["conceptual"::float,
|
||||
main_caption = "''Modeling requirements.''"]\<open>
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class requirement = long_name :: "string option"
|
||||
|
||||
doc_class requirement_analysis = no :: "nat"
|
||||
|
@ -575,11 +597,9 @@ datatype ass_kind = informal | semiformal | formal
|
|||
|
||||
doc_class assumption = requirement +
|
||||
assumption_kind :: ass_kind <= informal
|
||||
\end{isar}
|
||||
\caption{Modeling requirements.}
|
||||
\label{fig:conceptual}
|
||||
\end{figure}
|
||||
Such ontologies can be enriched by larger explanations and examples, which may help
|
||||
\<close>}\<close>
|
||||
|
||||
text\<open>Such ontologies can be enriched by larger explanations and examples, which may help
|
||||
the team of engineers substantially when developing the central document for a certification,
|
||||
like an explication what is precisely the difference between an \<^emph>\<open>hypothesis\<close> and an
|
||||
\<^emph>\<open>assumption\<close> in the context of the evaluation standard. Since the PIDE makes for each
|
||||
|
@ -601,71 +621,70 @@ is the category \<^emph>\<open>safety related application condition\<close> (or
|
|||
for short) which is used for \<^emph>\<open>ec\<close>'s that establish safety properties
|
||||
of the evaluation target. Their track-ability throughout the certification
|
||||
is therefore particularly critical. This is naturally modeled as follows:
|
||||
\begin{isar}
|
||||
@{boxed_theory_text [display]\<open>
|
||||
doc_class ec = assumption +
|
||||
assumption_kind :: ass_kind <= (*default *) formal
|
||||
|
||||
doc_class srac = ec +
|
||||
assumption_kind :: ass_kind <= (*default *) formal
|
||||
\end{isar}
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
section*[ontopide::technical]\<open> Ontology-based IDE support \<close>
|
||||
text\<open> We present a selection of interaction scenarios @{example \<open>scholar_onto\<close>}
|
||||
and @{example \<open>cenelec_onto\<close>} with Isabelle/PIDE instrumented by \<^isadof>. \<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["text_elements"::float]
|
||||
declare_reference*["hyperlinks"::float]
|
||||
(*>*)
|
||||
|
||||
subsection*[scholar_pide::example]\<open> A Scholarly Paper \<close>
|
||||
text\<open> In \autoref{fig-Dogfood-II-bgnd1} and \autoref{fig-bgnd-text_section} we show how
|
||||
text\<open> In @{float (unchecked) "text_elements"}~(a)
|
||||
and @{float (unchecked) "text_elements"}~(b)we show how
|
||||
hovering over links permits to explore its meta-information.
|
||||
Clicking on a document class identifier permits to hyperlink into the corresponding
|
||||
class definition (\autoref{fig:Dogfood-IV-jumpInDocCLass}); hovering over an attribute-definition
|
||||
(which is qualified in order to disambiguate; \autoref{fig:Dogfood-V-attribute}).
|
||||
class definition (@{float (unchecked) "hyperlinks"}~(a)); hovering over an attribute-definition
|
||||
(which is qualified in order to disambiguate; @{float (unchecked) "hyperlinks"}~(b)).
|
||||
\<close>
|
||||
|
||||
side_by_side_figure*["text-elements"::side_by_side_figure,anchor="''fig-Dogfood-II-bgnd1''",
|
||||
caption="''Exploring a Reference of a Text-Element.''",relative_width="48",
|
||||
src="''figures/Dogfood-II-bgnd1''",anchor2="''fig-bgnd-text_section''",
|
||||
caption2="''Exploring the class of a text element.''",relative_width2="47",
|
||||
src2="''figures/Dogfood-III-bgnd-text_section''"]\<open> Exploring text elements. \<close>
|
||||
|
||||
side_by_side_figure*["hyperlinks"::side_by_side_figure,anchor="''fig:Dogfood-IV-jumpInDocCLass''",
|
||||
caption="''Hyperlink to Class-Definition.''",relative_width="48",
|
||||
src="''figures/Dogfood-IV-jumpInDocCLass''",anchor2="''fig:Dogfood-V-attribute''",
|
||||
caption2="''Exploring an attribute.''",relative_width2="47",
|
||||
src2="''figures/Dogfood-III-bgnd-text_section''"]\<open> Hyperlinks.\<close>
|
||||
text*["text_elements"::float,
|
||||
main_caption="\<open>Exploring text elements.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=53, height=5, caption="Exploring a reference of a text element.") "figures/Dogfood-II-bgnd1.png"
|
||||
}\<^hfill>@{fig_content (width=47, height=5, caption="Exploring the class of a text element.") "figures/Dogfood-III-bgnd-text_section.png"}
|
||||
\<close>
|
||||
|
||||
text*["hyperlinks"::float,
|
||||
main_caption="\<open>Hyperlinks.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=48, caption="Hyperlink to Class-Definition.") "figures/Dogfood-IV-jumpInDocCLass.png"
|
||||
}\<^hfill>@{fig_content (width=47, caption="Exploring an attribute.") "figures/Dogfood-V-attribute.png"}
|
||||
\<close>
|
||||
|
||||
declare_reference*["figDogfoodVIlinkappl"::figure]
|
||||
text\<open> An ontological reference application in \autoref{figDogfoodVIlinkappl}: the ontology-dependant
|
||||
antiquotation \inlineisar|@ {example ...}| refers to the corresponding text-elements. Hovering allows
|
||||
for inspection, clicking for jumping to the definition. If the link does not exist or has a
|
||||
non-compatible type, the text is not validated. \<close>
|
||||
|
||||
figure*[figDogfoodVIlinkappl::figure,relative_width="80",src="''figures/Dogfood-V-attribute''"]
|
||||
\<open> Exploring an attribute (hyperlinked to the class). \<close>
|
||||
subsection*[cenelec_pide::example]\<open> CENELEC \<close>
|
||||
declare_reference*[figfig3::figure]
|
||||
text\<open> The corresponding view in @{docitem (unchecked) \<open>figfig3\<close>} shows core part of a document,
|
||||
(*<*)declare_reference*[figfig3::figure](*>*)
|
||||
text\<open> The corresponding view in @{figure (unchecked) \<open>figfig3\<close>} shows core part of a document,
|
||||
coherent to the @{example \<open>cenelec_onto\<close>}. The first sample shows standard Isabelle antiquotations
|
||||
@{cite "wenzel:isabelle-isar:2017"} into formal entities of a theory. This way, the informal parts
|
||||
of a document get ``formal content'' and become more robust under change.\<close>
|
||||
|
||||
figure*[figfig3::figure,relative_width="80",src="''figures/antiquotations-PIDE''"]
|
||||
figure*[figfig3::figure,relative_width="80",file_src="''figures/antiquotations-PIDE.png''"]
|
||||
\<open> Standard antiquotations referring to theory elements.\<close>
|
||||
|
||||
declare_reference*[figfig5::figure]
|
||||
(*<*)declare_reference*[figfig5::figure] (*>*)
|
||||
text\<open> The subsequent sample in @{figure (unchecked) \<open>figfig5\<close>} shows the definition of an
|
||||
\<^emph>\<open>safety-related application condition\<close>, a side-condition of a theorem which
|
||||
has the consequence that a certain calculation must be executed sufficiently fast on an embedded
|
||||
device. This condition can not be established inside the formal theory but has to be
|
||||
checked by system integration tests.\<close>
|
||||
|
||||
figure*[figfig5::figure, relative_width="80", src="''figures/srac-definition''"]
|
||||
figure*[figfig5::figure, relative_width="80", file_src="''figures/srac-definition.png''"]
|
||||
\<open> Defining a SRAC reference \<^dots> \<close>
|
||||
figure*[figfig7::figure, relative_width="80", src="''figures/srac-as-es-application''"]
|
||||
figure*[figfig7::figure, relative_width="80", file_src="''figures/srac-as-es-application.png''"]
|
||||
\<open> Using a SRAC as EC document reference. \<close>
|
||||
|
||||
text\<open> Now we reference in @{figure (unchecked) \<open>figfig7\<close>} this safety-related condition;
|
||||
text\<open> Now we reference in @{figure \<open>figfig7\<close>} this safety-related condition;
|
||||
however, this happens in a context where general \<^emph>\<open>exported constraints\<close> are listed.
|
||||
\<^isadof>'s checks establish that this is legal in the given ontology.
|
||||
|
||||
|
@ -677,7 +696,7 @@ informal parts. \<close>
|
|||
section*[onto_future::technical]\<open> Monitor Classes \<close>
|
||||
text\<open> Besides sub-typing, there is another relation between
|
||||
document classes: a class can be a \<^emph>\<open>monitor\<close> to other ones,
|
||||
which is expressed by the occurrence of a \inlineisar+where+ clause
|
||||
which is expressed by the occurrence of a @{theory_text \<open>where\<close>} clause
|
||||
in the document class definition containing a regular
|
||||
expression (see @{example \<open>scholar_onto\<close>}).
|
||||
While class-extension refers to data-inheritance of attributes,
|
||||
|
@ -686,8 +705,8 @@ in which instances of monitored classes may occur. \<close>
|
|||
|
||||
text\<open>
|
||||
The control of monitors is done by the commands:
|
||||
\<^item> \inlineisar+open_monitor* + <doc-class>
|
||||
\<^item> \inlineisar+close_monitor* + <doc-class>
|
||||
\<^item> \<^theory_text>\<open>open_monitor*\<close> \<^emph>\<open><doc-class>\<close>
|
||||
\<^item> \<^theory_text>\<open>close_monitor*\<close> \<^emph>\<open><doc-class>\<close>
|
||||
\<close>
|
||||
text\<open>
|
||||
where the automaton of the monitor class is expected to be in a final state. In the final state,
|
||||
|
@ -735,8 +754,7 @@ work in this area we are aware of is rOntorium~@{cite "rontorium"}, a plugin
|
|||
for \<^Protege> that integrates R~@{cite "adler:r:2010"} into an
|
||||
ontology environment. Here, the main motivation behind this
|
||||
integration is to allow for statistically analyze ontological
|
||||
documents. Thus, this is complementary to our work.
|
||||
\<close>
|
||||
documents. Thus, this is complementary to our work.\<close>
|
||||
|
||||
text\<open> \<^isadof> in its present form has a number of technical short-comings as well
|
||||
as potentials not yet explored. On the long list of the short-comings is the
|
11
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/ROOT → Isabelle_DOF-Example-I/ROOT
Executable file → Normal file
|
@ -1,13 +1,14 @@
|
|||
session "2018-cicm-isabelle_dof-applications" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", quick_and_dirty = true]
|
||||
chapter AFP
|
||||
|
||||
session "Isabelle_DOF-Example-I" (AFP) = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
IsaDofApplications
|
||||
document_files
|
||||
"isadof.cfg"
|
||||
"root.bib"
|
||||
"authorarchive.sty"
|
||||
"preamble.tex"
|
||||
"build"
|
||||
"lstisadof.sty"
|
||||
"lstisadof-manual.sty"
|
||||
"figures/isabelle-architecture.pdf"
|
||||
"figures/Dogfood-Intro.png"
|
||||
"figures/InteractiveMathSheet.png"
|
|
@ -0,0 +1,345 @@
|
|||
%% Copyright (C) 2008-2023 Achim D. Brucker, https://www.brucker.ch
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1.3c of the License, or (at your option) any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
\NeedsTeXFormat{LaTeX2e}\relax
|
||||
\ProvidesPackage{authorarchive}
|
||||
[2023/02/10 v1.3.0
|
||||
Self-archiving information for scientific publications.]
|
||||
%
|
||||
\PassOptionsToPackage{hyphens}{url}
|
||||
%
|
||||
\RequirePackage{ifthen}
|
||||
\RequirePackage[inline]{enumitem}
|
||||
\RequirePackage{orcidlink}
|
||||
\RequirePackage{eso-pic}
|
||||
\RequirePackage{intopdf}
|
||||
\RequirePackage{kvoptions}
|
||||
\RequirePackage{hyperref}
|
||||
\RequirePackage{calc}
|
||||
\RequirePackage{qrcode}
|
||||
\RequirePackage{etoolbox}
|
||||
\newrobustcmd\BibTeX{Bib\TeX}
|
||||
%
|
||||
%Better url breaking
|
||||
\g@addto@macro{\UrlBreaks}{\UrlOrds}
|
||||
%
|
||||
% Option declarations
|
||||
% -------------------
|
||||
\SetupKeyvalOptions{
|
||||
family=AA,
|
||||
prefix=AA@
|
||||
}
|
||||
%
|
||||
\DeclareStringOption[.]{bibtexdir}
|
||||
\DeclareStringOption[https://duckduckgo.com/?q=]{baseurl}
|
||||
\DeclareStringOption[.pdf]{suffix}
|
||||
\DeclareStringOption[UNKNOWN PUBLISHER]{publisher}[]
|
||||
\DeclareStringOption[UNKNOWN YEAR]{year}[]
|
||||
\DeclareStringOption[]{key}[]
|
||||
\DeclareStringOption[]{doi}[]
|
||||
\DeclareStringOption[]{doiText}[]
|
||||
\DeclareStringOption[]{publisherurl}[]
|
||||
\DeclareStringOption[UNKNOWN START PAGE]{startpage}[]
|
||||
\DeclareStringOption[UNKNOWN PUBLICATION]{publication}[]
|
||||
|
||||
\DeclareBoolOption{ACM}
|
||||
\DeclareBoolOption{acmart}
|
||||
\DeclareBoolOption{ENTCS}
|
||||
\DeclareBoolOption{IEEE}
|
||||
\DeclareBoolOption{LNCS}
|
||||
\DeclareBoolOption{LNI}
|
||||
\DeclareBoolOption{nocopyright}
|
||||
\DeclareBoolOption{nourl}
|
||||
\DeclareBoolOption{nobib}
|
||||
\DeclareBoolOption{orcidicon}
|
||||
%\ProcessOptions\relax
|
||||
|
||||
|
||||
% Default option rule
|
||||
\DeclareDefaultOption{%
|
||||
\ifx\CurrentOptionValue\relax
|
||||
\PackageWarningNoLine{\@currname}{%
|
||||
Unknown option `\CurrentOption'\MessageBreak
|
||||
is passed to package `authorarchive'%
|
||||
}%
|
||||
% Pass the option to package color.
|
||||
% Again it is better to expand \CurrentOption.
|
||||
\expandafter\PassOptionsToPackage\expandafter{\CurrentOption}{color}%
|
||||
\else
|
||||
% Package color does not take options with values.
|
||||
% We provide the standard LaTeX error.
|
||||
\@unknownoptionerror
|
||||
\fi
|
||||
}
|
||||
\ProcessKeyvalOptions*
|
||||
|
||||
\newcommand{\AA@defIncludeFiles}{
|
||||
\def\AA@bibBibTeX{\AA@bibtexdir/\AA@key.bib}
|
||||
\def\AA@bibBibTeXLong{\AA@bibtexdir/\AA@key.bibtex}
|
||||
\def\AA@bibWord{\AA@bibtexdir/\AA@key.word.xml}
|
||||
\def\AA@bibEndnote{\AA@bibtexdir/\AA@key.enw}
|
||||
\def\AA@bibRIS{\AA@bibtexdir/\AA@key.ris}
|
||||
}
|
||||
\AA@defIncludeFiles
|
||||
|
||||
\newboolean{AA@bibExists}
|
||||
\setboolean{AA@bibExists}{false}
|
||||
\newcommand{\AA@defIncludeSwitches}{
|
||||
\IfFileExists{\AA@bibBibTeX}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibBibTeXLong}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibWord}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibEndnote}{\setboolean{AA@bibExists}{true}}{}
|
||||
\IfFileExists{\AA@bibRIS}{\setboolean{AA@bibExists}{true}}{}
|
||||
}
|
||||
\AA@defIncludeSwitches
|
||||
|
||||
|
||||
% Provide command for dynamic configuration setup
|
||||
% \def\authorsetup{\kvsetkeys{AA}}
|
||||
\newcommand{\authorsetup}[1]{%
|
||||
\kvsetkeys{AA}{#1}
|
||||
\AA@defIncludeFiles
|
||||
\AA@defIncludeSwitches
|
||||
}
|
||||
|
||||
% Load local configuration
|
||||
\InputIfFileExists{authorarchive.config}{}{}
|
||||
|
||||
% define proxy command for setting PDF attributes
|
||||
\ExplSyntaxOn
|
||||
\@ifundefined{pdfmanagement_add:nnn}{%
|
||||
\newcommand{\AA@pdfpagesattribute}[2]{\pdfpagesattr{/#1 #2}}%
|
||||
}{%
|
||||
\newcommand{\AA@pdfpagesattribute}[2]{\pdfmanagement_add:nnn{Pages}{#1}{#2}}%
|
||||
}%
|
||||
\ExplSyntaxOff
|
||||
|
||||
\newlength\AA@x
|
||||
\newlength\AA@y
|
||||
\newlength\AA@width
|
||||
|
||||
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||
|
||||
\newcommand{\authorcrfont}{\footnotesize}
|
||||
\newcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{.2cm-\paperheight}){#1}}}
|
||||
\newcommand{\authorwidth}[1]{\setlength{\AA@width}{#1}}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
|
||||
\def\AA@pageinfo{}
|
||||
\ifthenelse{\equal{\AA@startpage}{UNKNOWN START PAGE}}{%
|
||||
}{%
|
||||
\setcounter{page}{\AA@startpage}%
|
||||
\def\AA@pageinfo{pp. \thepage--\pageref{\aa@lastpage}, }
|
||||
}
|
||||
|
||||
|
||||
|
||||
%%%% sig-alternate.cls
|
||||
\ifAA@ACM%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=ACM}
|
||||
}{}
|
||||
\global\boilerplate={}
|
||||
\global\copyrightetc={}
|
||||
\renewcommand{\conferenceinfo}[2]{}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength\AA@x{1in+\hoffset+\oddsidemargin}
|
||||
\setlength\AA@y{-\textheight+\topmargin+\headheight-\footskip} % -\voffset-\topmargin-\headheight-\footskip}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{\AA@y}){#1}}
|
||||
\setlength{\AA@width}{\columnwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% acmart.cls
|
||||
\ifAA@acmart%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=ACM}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\AtPageUpperLeft{\put(\LenToUnit{\AA@x},\LenToUnit{0.2cm-\paperheight}){#1}}}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% LNCS
|
||||
\ifAA@LNCS%
|
||||
\ifAA@orcidicon%
|
||||
\renewcommand{\orcidID}[1]{\orcidlink{#1}}
|
||||
\else\relax\fi%
|
||||
%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=Springer-Verlag}
|
||||
}{}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\@ifclasswith{llncs}{a4paper}{%
|
||||
\AA@pdfpagesattribute{CropBox}{[92 114 523 780]}%
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},40){#1}}%
|
||||
}{%
|
||||
\AA@pdfpagesattribute{CropBox}{[92 65 523 731]}%
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},23){#1}}%
|
||||
}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
%%%% LNI
|
||||
\ifAA@LNI%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=GI}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},35){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\AA@pdfpagesattribute{CropBox}{[70 65 526.378 748.15]}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
%%%% ENTCS
|
||||
\ifAA@ENTCS%
|
||||
\addtolength{\voffset}{1cm}
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=Elsevier Science B.~V.}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},\LenToUnit{-.5cm-\the\ht\AA@authoratBox}){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\fi
|
||||
%
|
||||
%%%% IEEE
|
||||
\ifAA@IEEE%
|
||||
\ifthenelse{\equal{\AA@publisher}{UNKNOWN PUBLISHER}}{%
|
||||
\setkeys{AA}{publisher=IEEE}
|
||||
}{}
|
||||
\renewcommand{\authorat}[1]{\put(\LenToUnit{\AA@x},6){#1}}
|
||||
\renewcommand{\authorcrfont}{\scriptsize}
|
||||
\setlength{\AA@width}{\textwidth}
|
||||
\setcounter{tocdepth}{2}
|
||||
\fi
|
||||
%
|
||||
|
||||
\hypersetup{%
|
||||
draft = false,
|
||||
bookmarksopen = true,
|
||||
bookmarksnumbered= true,
|
||||
}
|
||||
|
||||
\@ifpackageloaded{totpages}{%
|
||||
\def\aa@lastpage{TotPages}
|
||||
}{%
|
||||
\RequirePackage{lastpage}
|
||||
\def\aa@lastpage{LastPage}
|
||||
}
|
||||
\newsavebox{\AA@authoratBox}
|
||||
|
||||
\AddToShipoutPicture*{%
|
||||
\setlength{\unitlength}{1mm}%
|
||||
\savebox{\AA@authoratBox}{%
|
||||
\parbox{1.4cm}{%
|
||||
\bgroup%
|
||||
\normallineskiplimit=0pt%
|
||||
\ifAA@nourl%
|
||||
\ifx\AA@doi\@empty\relax%
|
||||
\else%
|
||||
\qrcode[hyperlink,height=1.17cm,padding]{https://doi.org/\AA@doi}%
|
||||
\fi%
|
||||
\else%
|
||||
\qrcode[hyperlink,height=1.17cm,padding]{\AA@baseurl/\AA@key\AA@suffix}%
|
||||
\fi%
|
||||
\egroup%
|
||||
}%
|
||||
\ifAA@nourl\ifx\AA@doi\@empty\addtolength{\AA@width}{1.4cm}\fi\fi
|
||||
\parbox{\AA@width-1.4cm}{\authorcrfont%
|
||||
\ifAA@LNCS%
|
||||
\AA@publication, \AA@pageinfo \AA@year. %
|
||||
\ifAA@nocopyright\else
|
||||
\textcopyright~\AA@year~\AA@publisher.
|
||||
\fi
|
||||
This is the author's
|
||||
version of the work. It is posted
|
||||
\ifAA@nourl\relax\else%
|
||||
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||
\fi
|
||||
\ifAA@nocopyright\relax\else
|
||||
by permission of \AA@publisher{}
|
||||
\fi
|
||||
for your personal use.
|
||||
\ifx\AA@doi\@empty%
|
||||
\relax
|
||||
\else
|
||||
The final publication is available at Springer via
|
||||
\ifx\AA@doiText\@empty%
|
||||
\url{https://doi.org/\AA@doi}.
|
||||
\else
|
||||
\href{https://doi.org/\AA@doi}{\AA@doiText}.
|
||||
\fi
|
||||
\fi
|
||||
\else
|
||||
\ifAA@nocopyright\relax\else
|
||||
\textcopyright~\AA@year~\AA@publisher. %
|
||||
\fi%
|
||||
This is the author's
|
||||
version of the work. It is posted
|
||||
\ifAA@nourl\relax\else%
|
||||
at \url{\AA@baseurl/\AA@key\AA@suffix} %
|
||||
\fi
|
||||
\ifAA@nocopyright\relax\else
|
||||
by permission of \AA@publisher{} %
|
||||
\fi
|
||||
for your personal use. Not for redistribution. The definitive
|
||||
version was published in \emph{\AA@publication}, \AA@pageinfo \AA@year%
|
||||
\ifx\AA@doi\@empty%
|
||||
\ifx\AA@publisherurl\@empty%
|
||||
.%
|
||||
\else
|
||||
\url{\AA@publisherurl}.%
|
||||
\fi
|
||||
\else
|
||||
\ifx\AA@doiText\@empty%
|
||||
, doi: \href{https://doi.org/\AA@doi}{\AA@doi}.%
|
||||
\else
|
||||
, doi: \href{https://doi.org/\AA@doi}{\AA@doiText}.%
|
||||
\fi
|
||||
\fi
|
||||
\fi
|
||||
\ifAA@nobib\relax\else%
|
||||
\ifthenelse{\boolean{AA@bibExists}}{%
|
||||
\hfill
|
||||
\begin{itemize*}[label={}, itemjoin={,}]
|
||||
\IfFileExists{\AA@bibBibTeX}{%
|
||||
\item \expanded{\attachandlink[\AA@key.bib]{\AA@bibBibTeX}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}}%
|
||||
}{%
|
||||
\IfFileExists{\AA@bibBibTeXLong}{%
|
||||
\item \expanded{\attachandlink[\AA@key.bib]{\AA@bibBibTeXLong}[application/x-bibtex]{BibTeX entry of this paper}{\BibTeX}}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibBibTeX{} (and no \AA@bibBibTeXLong) found. Not embedded reference in BibTeX format.}%
|
||||
}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibWord}{%
|
||||
\item \expanded{\attachandlink[\AA@key.word.xml]{\AA@bibWord}[application/xml]{XML entry of this paper (e.g., for Word 2007 and later)}{Word}}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibWord{} found. Not embedded reference for Word 2007 and later.}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibEndnote}{%
|
||||
\item \expanded{\attachandlink[\AA@key.enw]{\AA@bibEndnote}[application/x-endnote-refer]{Endnote entry of this paper}{EndNote}}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibEndnote{} found. Not embedded reference in Endnote format.}%
|
||||
}%
|
||||
\IfFileExists{\AA@bibRIS}{%
|
||||
\item \expanded{\attachandlink[\AA@key.ris]{\AA@bibRIS}[application/x-research-info-systems]{RIS entry of this paper}{RIS}}%
|
||||
}{%
|
||||
\typeout{No file \AA@bibRIS{} found. Not embedded reference in RIS format.}%
|
||||
}%
|
||||
\end{itemize*}\\
|
||||
}{%
|
||||
\PackageError{authorarchive}{No bibliographic files found. Specify option 'nobib' if this is intended.}
|
||||
}
|
||||
\fi
|
||||
}
|
||||
}
|
||||
\authorat{\raisebox{\the\ht\AA@authoratBox}{\usebox{\AA@authoratBox}}}
|
||||
}
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
Before Width: | Height: | Size: 85 KiB After Width: | Height: | Size: 85 KiB |
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 75 KiB After Width: | Height: | Size: 75 KiB |
Before Width: | Height: | Size: 96 KiB After Width: | Height: | Size: 96 KiB |
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 57 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 50 KiB |
49
examples/technical_report/Isabelle_DOF-Manual/document/lstisadof-manual.sty → Isabelle_DOF-Example-I/document/lstisadof-manual.sty
Executable file → Normal file
|
@ -90,9 +90,7 @@
|
|||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{isar!60!black}
|
||||
,sharp corners
|
||||
,before skip balanced=0.5\baselineskip plus 2pt
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
%,before skip balanced=0.5\baselineskip plus 2pt % works only with Tex Live 2020 and later
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=isar!60!black,xshift=0pt,anchor=north
|
||||
|
@ -136,11 +134,12 @@
|
|||
\lstloadlanguages{ML}
|
||||
\providecolor{sml}{named}{red}
|
||||
\lstdefinestyle{sml}{
|
||||
basicstyle=\ttfamily,%
|
||||
commentstyle=\itshape,%
|
||||
keywordstyle=\bfseries\color{CornflowerBlue},%
|
||||
ndkeywordstyle=\color{green},%
|
||||
language=ML
|
||||
,escapechar=ë%
|
||||
,basicstyle=\ttfamily%
|
||||
,commentstyle=\itshape%
|
||||
,keywordstyle=\bfseries\color{CornflowerBlue}%
|
||||
,ndkeywordstyle=\color{green}%
|
||||
,language=ML
|
||||
% ,literate={%
|
||||
% {<@>}{@}1%
|
||||
% }
|
||||
|
@ -150,7 +149,7 @@
|
|||
,tagstyle=\color{CornflowerBlue}%
|
||||
,markfirstintag=true%
|
||||
}%
|
||||
\def\inlinesml{\lstinline[style=sml,breaklines=true,mathescape,breakatwhitespace=true]}
|
||||
\def\inlinesml{\lstinline[style=sml,breaklines=true,breakatwhitespace=true]}
|
||||
\newtcblisting{sml}[1][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
|
@ -170,7 +169,6 @@
|
|||
style=sml
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
|
@ -296,3 +294,34 @@
|
|||
}%
|
||||
%% </bash>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% <config>
|
||||
\providecolor{config}{named}{gray}
|
||||
\newtcblisting{config}[2][]{%
|
||||
listing only%
|
||||
,boxrule=0pt
|
||||
,boxsep=0pt
|
||||
,colback=white!90!config
|
||||
,enhanced jigsaw
|
||||
,borderline west={2pt}{0pt}{config!60!black}
|
||||
,sharp corners
|
||||
% ,before skip=10pt
|
||||
% ,after skip=10pt
|
||||
,enlarge top by=0mm
|
||||
,enhanced
|
||||
,overlay={\node[draw,fill=config!60!black,xshift=0pt,anchor=north
|
||||
east,font=\bfseries\footnotesize\color{white}]
|
||||
at (frame.north east) {#2};}
|
||||
,listing options={
|
||||
breakatwhitespace=true
|
||||
,columns=flexible%
|
||||
,basicstyle=\small\ttfamily
|
||||
,mathescape
|
||||
,#1
|
||||
}
|
||||
}%
|
||||
%% </config>
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
%% Copyright (C) 2018 The University of Sheffield
|
||||
%% 2018 The University of Paris-Saclay
|
||||
%%
|
||||
%% License:
|
||||
%% This program can be redistributed and/or modified under the terms
|
||||
%% of the LaTeX Project Public License Distributed from CTAN
|
||||
%% archives in directory macros/latex/base/lppl.txt; either
|
||||
%% version 1 of the License, or any later version.
|
||||
%% OR
|
||||
%% The 2-clause BSD-style license.
|
||||
%%
|
||||
%% SPDX-License-Identifier: LPPL-1.0+ OR BSD-2-Clause
|
||||
|
||||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
||||
|
||||
\IfFileExists{beramono.sty}{\usepackage[scaled=0.88]{beramono}}{}%
|
||||
\IfFileExists{upquote.sty}{\usepackage{upquote}}{}%
|
||||
\usepackage{textcomp}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{paralist}
|
||||
\usepackage{listings}
|
||||
\usepackage{lstisadof-manual}
|
||||
|
||||
\providecommand{\isactrlemph}[1]{\emph{#1}}
|
||||
\usepackage[LNCS,
|
||||
orcidicon,
|
||||
key=brucker.ea-isabelle-ontologies-2018,
|
||||
year=2018,
|
||||
publication={F. Rabe et al. (Eds.): CICM 2018, LNAI 11006},
|
||||
nobib,
|
||||
startpage={1},
|
||||
doi={10.1007/978-3-319-96812-4_3},
|
||||
doiText={10.1007/978-3-319-96812-4\_3},
|
||||
]{authorarchive}
|
||||
\authorrunning{A. D. Brucker et al.}
|
||||
\pagestyle{headings}
|
||||
|
||||
|
||||
\title{<TITLE>}
|
||||
\author{<AUTHOR>}
|
||||
|
||||
%%% Local Variables:
|
||||
%%% mode: latex
|
||||
%%% TeX-master: "root.tex"
|
||||
%%% End:
|
4
examples/scholarly_paper/2018-cicm-isabelle_dof-applications/document/root.bib → Isabelle_DOF-Example-I/document/root.bib
Executable file → Normal file
|
@ -108,7 +108,7 @@
|
|||
volume = 2283,
|
||||
doi = {10.1007/3-540-45949-9},
|
||||
abstract = {This book is a self-contained introduction to interactive
|
||||
proof in higher-order logic (\acs{hol}), using the proof
|
||||
proof in higher-order logic HOL, using the proof
|
||||
assistant Isabelle2002. It is a tutorial for potential
|
||||
users rather than a monograph for researchers. The book has
|
||||
three parts.
|
||||
|
@ -121,7 +121,7 @@
|
|||
such advanced topics as nested and mutual recursion. 2.
|
||||
Logic and Sets presents a collection of lower-level tactics
|
||||
that you can use to apply rules selectively. It also
|
||||
describes Isabelle/\acs{hol}'s treatment of sets, functions
|
||||
describes Isabelle/HOL's treatment of sets, functions
|
||||
and relations and explains how to define sets inductively.
|
||||
One of the examples concerns the theory of model checking,
|
||||
and another is drawn from a classic textbook on formal
|
|
@ -0,0 +1,9 @@
|
|||
chapter AFP
|
||||
|
||||
session "Isabelle_DOF-Example-II" (AFP) = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
"paper"
|
||||
document_files
|
||||
"root.bib"
|
||||
"preamble.tex"
|
|
@ -1,6 +1,8 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
||||
\usepackage{stmaryrd}
|
||||
\usepackage{pifont}% http://ctan.org/pkg/pifont
|
||||
|
||||
|
||||
\title{<TITLE>}
|
||||
\author{<AUTHOR>}
|
|
@ -6870,7 +6870,7 @@ isbn="978-3-540-48509-4"
|
|||
title = {{Isabelle's} Logic: {HOL}},
|
||||
author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
|
||||
year = 2009,
|
||||
misc = {\url{http://isabelle.in.tum.de/library/HOL/}}
|
||||
misc = {\url{https://isabelle.in.tum.de/library/HOL/}}
|
||||
}
|
||||
|
||||
@InProceedings{ garson.ea:security:2008,
|
||||
|
@ -11000,7 +11000,7 @@ isbn="978-1-4471-3182-3"
|
|||
journal = {Archive of Formal Proofs},
|
||||
month = apr,
|
||||
year = 2019,
|
||||
note = {\url{http://isa-afp.org/entries/HOL-CSP.html}},
|
||||
note = {\url{https://isa-afp.org/entries/HOL-CSP.html}},
|
||||
ISSN = {2150-914x},
|
||||
}
|
||||
|
|
@ -3,6 +3,8 @@ theory "paper"
|
|||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
use_template "scrartcl"
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
open_monitor*[this::article]
|
||||
|
||||
|
@ -10,10 +12,13 @@ declare[[ strict_monitor_checking = false]]
|
|||
declare[[ Definition_default_class = "definition"]]
|
||||
declare[[ Lemma_default_class = "lemma"]]
|
||||
declare[[ Theorem_default_class = "theorem"]]
|
||||
declare[[ Corollary_default_class = "corollary"]]
|
||||
|
||||
define_shortcut* csp \<rightleftharpoons> \<open>CSP\<close>
|
||||
holcsp \<rightleftharpoons> \<open>HOL-CSP\<close>
|
||||
isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
|
||||
hfill \<rightleftharpoons> \<open>\hfill\<close>
|
||||
br \<rightleftharpoons> \<open>\break\<close>
|
||||
|
||||
(*>*)
|
||||
|
||||
|
@ -25,7 +30,7 @@ author*[lina,email="\<open>lina.ye@lri.fr\<close>",affiliation="\<open>LRI, Inri
|
|||
|
||||
abstract*[abs, keywordlist="[\<open>Shallow Embedding\<close>,\<open>Process-Algebra\<close>,
|
||||
\<open>Concurrency\<close>,\<open>Computational Models\<close>]"]
|
||||
\<open> The theory of Communicating Sequential Processes going back to Hoare and Roscoe is still today
|
||||
\<open> The theory of Communicating Sequential Processes going back to Hoare and Roscoe is still today
|
||||
one of the reference theories for concurrent specification and computing. In 1997, a first
|
||||
formalization in \<^isabelle> of the denotational semantics of the Failure/Divergence Model of
|
||||
\<^csp> was undertaken; in particular, this model can cope with infinite alphabets, in contrast
|
||||
|
@ -49,33 +54,32 @@ abstract*[abs, keywordlist="[\<open>Shallow Embedding\<close>,\<open>Process-Alg
|
|||
If you consider citing this paper, please refer to @{cite "HOL-CSP-iFM2020"}.
|
||||
\<close>
|
||||
text\<open>\<close>
|
||||
section*[introheader::introduction,main_author="Some(@{docitem ''bu''}::author)"]\<open> Introduction \<close>
|
||||
text*[introtext::introduction]\<open>
|
||||
Communicating Sequential Processes (\<^csp>) is a language
|
||||
to specify and verify patterns of interaction of concurrent systems.
|
||||
Together with CCS and LOTOS, it belongs to the family of \<^emph>\<open>process algebras\<close>.
|
||||
\<^csp>'s rich theory comprises denotational, operational and algebraic semantic facets
|
||||
and has influenced programming languages such as Limbo, Crystal, Clojure and
|
||||
most notably Golang @{cite "donovan2015go"}. \<^csp> has been applied in
|
||||
industry as a tool for specifying and verifying the concurrent aspects of hardware
|
||||
systems, such as the T9000 transansputer @{cite "Barret95"}.
|
||||
section*[introheader::introduction,main_author="Some(@{author ''bu''}::author)"]\<open> Introduction \<close>
|
||||
text*[introtext::introduction, level="Some 1"]\<open>
|
||||
Communicating Sequential Processes (\<^csp>) is a language to specify and verify patterns of
|
||||
interaction of concurrent systems. Together with CCS and LOTOS, it belongs to the family of
|
||||
\<^emph>\<open>process algebras\<close>. \<^csp>'s rich theory comprises denotational, operational and algebraic semantic
|
||||
facets and has influenced programming languages such as Limbo, Crystal, Clojure and most notably
|
||||
Golang @{cite "donovan2015go"}. \<^csp> has been applied in industry as a tool for specifying and
|
||||
verifying the concurrent aspects of hardware systems, such as the T9000 transansputer
|
||||
@{cite "Barret95"}.
|
||||
|
||||
The theory of \<^csp> was first described in 1978 in a book by Tony Hoare @{cite "Hoare:1985:CSP:3921"},
|
||||
but has since evolved substantially @{cite "BrookesHR84" and "brookes-roscoe85" and "roscoe:csp:1998"}.
|
||||
\<^csp> describes the most common communication and synchronization mechanisms
|
||||
with one single language primitive: synchronous communication written \<open>_\<lbrakk>_\<rbrakk>_\<close>. \<^csp> semantics is
|
||||
described by a fully abstract model of behaviour designed to be \<^emph>\<open>compositional\<close>: the denotational
|
||||
semantics of a process \<open>P\<close> encompasses all possible behaviours of this process in the context of all
|
||||
possible environments \<open>P \<lbrakk>S\<rbrakk> Env\<close> (where \<open>S\<close> is the set of \<open>atomic events\<close> both \<open>P\<close> and \<open>Env\<close> must
|
||||
synchronize). This design objective has the consequence that two kinds of choice have to
|
||||
be distinguished:
|
||||
\<^enum> the \<^emph>\<open>external choice\<close>, written \<open>_\<box>_\<close>, which forces a process "to follow" whatever
|
||||
the environment offers, and
|
||||
\<^enum> the \<^emph>\<open>internal choice\<close>, written \<open>_\<sqinter>_\<close>, which imposes on the environment of a process
|
||||
"to follow" the non-deterministic choices made.
|
||||
\<^csp> describes the most common communication and synchronization mechanisms with one single language
|
||||
primitive: synchronous communication written \<open>_\<lbrakk>_\<rbrakk>_\<close>. \<^csp> semantics is described by a fully abstract
|
||||
model of behaviour designed to be \<^emph>\<open>compositional\<close>: the denotational semantics of a process \<open>P\<close>
|
||||
encompasses all possible behaviours of this process in the context of all possible environments
|
||||
\<open>P \<lbrakk>S\<rbrakk> Env\<close> (where \<open>S\<close> is the set of \<open>atomic events\<close> both \<open>P\<close> and \<open>Env\<close> must synchronize). This
|
||||
design objective has the consequence that two kinds of choice have to be distinguished: \<^vs>\<open>0.1cm\<close>
|
||||
|
||||
\<^enum> the \<^emph>\<open>external choice\<close>, written \<open>_\<box>_\<close>, which forces a process "to follow" whatever
|
||||
the environment offers, and \<^vs>\<open>-0.4cm\<close>
|
||||
\<^enum> the \<^emph>\<open>internal choice\<close>, written \<open>_\<sqinter>_\<close>, which imposes on the environment of a process
|
||||
"to follow" the non-deterministic choices made.\<^vs>\<open>0.3cm\<close>
|
||||
\<close>
|
||||
text\<open>
|
||||
|
||||
text\<open> \<^vs>\<open>-0.6cm\<close>
|
||||
Generalizations of these two operators \<open>\<box>x\<in>A. P(x)\<close> and \<open>\<Sqinter>x\<in>A. P(x)\<close> allow for modeling the concepts
|
||||
of \<^emph>\<open>input\<close> and \<^emph>\<open>output\<close>: Based on the prefix operator \<open>a\<rightarrow>P\<close> (event \<open>a\<close> happens, then the process
|
||||
proceeds with \<open>P\<close>), receiving input is modeled by \<open>\<box>x\<in>A. x\<rightarrow>P(x)\<close> while sending output is represented
|
||||
|
@ -121,25 +125,11 @@ attempt to formalize denotational \<^csp> semantics covering a part of Bill Rosc
|
|||
\<^url>\<open>https://gitlri.lri.fr/burkhart.wolff/hol-csp2.0\<close>. In this paper, all Isabelle proofs are
|
||||
omitted.\<close>}.
|
||||
\<close>
|
||||
(*
|
||||
% Moreover, decomposition rules of the form:
|
||||
% \begin{center}
|
||||
% \begin{minipage}[c]{10cm}
|
||||
% @{cartouche [display] \<open>C \<Longrightarrow> A \<sqsubseteq>\<^sub>F\<^sub>D A' \<Longrightarrow> B \<sqsubseteq>\<^sub>F\<^sub>D B' \<Longrightarrow> A \<lbrakk>S\<rbrakk> B \<sqsubseteq>\<^sub>F\<^sub>D A' \<lbrakk>S\<rbrakk> B'\<close>}
|
||||
% \end{minipage}
|
||||
% \end{center}
|
||||
% are of particular interest since they allow to avoid the costly automata-product construction
|
||||
% of model-checkers and to separate infinite sub-systems from finite (model-checkable) ones; however,
|
||||
% their side-conditions \<open>C\<close> are particularly tricky to work out. Decomposition rules may pave the
|
||||
% way for future tool combinations for model-checkers such as FDR4~@{cite "fdr4"} or
|
||||
% PAT~@{cite "SunLDP09"} based on proof certifications.*)
|
||||
|
||||
section*["pre"::tc,main_author="Some(@{docitem \<open>bu\<close>}::author)"]
|
||||
section*["pre"::technical,main_author="Some(@{author \<open>bu\<close>}::author)"]
|
||||
\<open>Preliminaries\<close>
|
||||
|
||||
text\<open>\<close>
|
||||
|
||||
subsection*[cspsemantics::tc, main_author="Some(@{docitem ''bu''})"]\<open>Denotational \<^csp> Semantics\<close>
|
||||
subsection*[cspsemantics::technical, main_author="Some(@{author ''bu''})"]\<open>Denotational \<^csp> Semantics\<close>
|
||||
|
||||
text\<open> The denotational semantics (following @{cite "roscoe:csp:1998"}) comes in three layers:
|
||||
the \<^emph>\<open>trace model\<close>, the \<^emph>\<open>(stable) failures model\<close> and the \<^emph>\<open>failure/divergence model\<close>.
|
||||
|
@ -152,10 +142,10 @@ processes \<open>Skip\<close> (successful termination) and \<open>Stop\<close> (
|
|||
\<open>\<T>(Skip) = \<T>(Stop) = {[]}\<close>.
|
||||
Note that the trace sets, representing all \<^emph>\<open>partial\<close> history, is in general prefix closed.\<close>
|
||||
|
||||
text*[ex1::math_example, status=semiformal] \<open>
|
||||
Let two processes be defined as follows:
|
||||
text*[ex1::math_example, status=semiformal, level="Some 1"] \<open>
|
||||
Let two processes be defined as follows:\<^vs>\<open>0.2cm\<close>
|
||||
|
||||
\<^enum> \<open>P\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<box> (b \<rightarrow> Stop)\<close>
|
||||
\<^enum> \<open>P\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<box> (b \<rightarrow> Stop)\<close>
|
||||
\<^enum> \<open>P\<^sub>n\<^sub>d\<^sub>e\<^sub>t = (a \<rightarrow> Stop) \<sqinter> (b \<rightarrow> Stop)\<close>
|
||||
\<close>
|
||||
|
||||
|
@ -181,7 +171,6 @@ The following process \<open>P\<^sub>i\<^sub>n\<^sub>f\<close> is an infinite pr
|
|||
many times. However, using the \<^csp> hiding operator \<open>_\_\<close>, this activity is concealed:
|
||||
|
||||
\<^enum> \<open>P\<^sub>i\<^sub>n\<^sub>f = (\<mu> X. a \<rightarrow> X) \ {a}\<close>
|
||||
|
||||
\<close>
|
||||
|
||||
text\<open>where \<open>P\<^sub>i\<^sub>n\<^sub>f\<close> will be equivalent to \<open>\<bottom>\<close> in the process cpo ordering.
|
||||
|
@ -200,7 +189,7 @@ of @{cite "IsobeRoggenbach2010"} is restricted to a variant of the failures mode
|
|||
|
||||
\<close>
|
||||
|
||||
subsection*["isabelleHol"::tc, main_author="Some(@{docitem ''bu''})"]\<open>Isabelle/HOL\<close>
|
||||
subsection*["isabelleHol"::technical, main_author="Some(@{author ''bu''})"]\<open>Isabelle/HOL\<close>
|
||||
text\<open> Nowadays, Isabelle/HOL is one of the major interactive theory development environments
|
||||
@{cite "nipkow.ea:isabelle:2002"}. HOL stands for Higher-Order Logic, a logic based on simply-typed
|
||||
\<open>\<lambda>\<close>-calculus extended by parametric polymorphism and Haskell-like type-classes.
|
||||
|
@ -212,7 +201,6 @@ in the plethora of work done and has been a key factor for the success of the Ar
|
|||
For the work presented here, one relevant construction is :
|
||||
|
||||
\<^item> \<^theory_text>\<open>typedef (\<alpha>\<^sub>1,...,\<alpha>\<^sub>n)t = E\<close>
|
||||
|
||||
|
||||
It creates a fresh type that is isomorphic to a set \<open>E\<close> involving \<open>\<alpha>\<^sub>1,...,\<alpha>\<^sub>n\<close> types.
|
||||
Isabelle/HOL performs a number of syntactic checks for these constructions that guarantee the logical
|
||||
|
@ -223,25 +211,23 @@ distribution comes with rich libraries comprising Sets, Numbers, Lists, etc. whi
|
|||
For this work, a particular library called \<^theory_text>\<open>HOLCF\<close> is intensively used. It provides classical
|
||||
domain theory for a particular type-class \<open>\<alpha>::pcpo\<close>, \<^ie> the class of types \<open>\<alpha>\<close> for which
|
||||
|
||||
\<^enum> a least element \<open>\<bottom>\<close> is defined, and
|
||||
\<^enum> a least element \<open>\<bottom>\<close> is defined, and
|
||||
\<^enum> a complete partial order \<open>_\<sqsubseteq>_\<close> is defined.
|
||||
|
||||
For these types, \<^theory_text>\<open>HOLCF\<close> provides a fixed-point operator \<open>\<mu>X. f X\<close> as well as the
|
||||
fixed-point induction and other (automated) proof infrastructure. Isabelle's type-inference can
|
||||
automatically infer, for example, that if \<open>\<alpha>::pcpo\<close>, then \<open>(\<beta> \<Rightarrow> \<alpha>)::pcpo\<close>. \<close>
|
||||
|
||||
section*["csphol"::tc,main_author="Some(@{docitem ''bu''}::author)", level="Some 2"]
|
||||
section*["csphol"::technical,main_author="Some(@{author ''bu''}::author)", level="Some 2"]
|
||||
\<open>Formalising Denotational \<^csp> Semantics in HOL \<close>
|
||||
|
||||
text\<open>\<close>
|
||||
|
||||
subsection*["processinv"::tc, main_author="Some(@{docitem ''bu''})"]
|
||||
subsection*["processinv"::technical, main_author="Some(@{author ''bu''})"]
|
||||
\<open>Process Invariant and Process Type\<close>
|
||||
text\<open> First, we need a slight revision of the concept
|
||||
of \<^emph>\<open>trace\<close>: if \<open>\<Sigma>\<close> is the type of the atomic events (represented by a type variable), then
|
||||
we need to extend this type by a special event \<open>\<surd>\<close> (called "tick") signaling termination.
|
||||
Thus, traces have the type \<open>(\<Sigma>+\<surd>)\<^sup>*\<close>, written \<open>\<Sigma>\<^sup>\<surd>\<^sup>*\<close>; since \<open>\<surd>\<close> may only occur at the end of a trace,
|
||||
we need to define a predicate \<open>front\<^sub>-tickFree t\<close> that requires from traces that \<open>\<surd>\<close> can only occur
|
||||
we need to extend this type by a special event \<open>\<checkmark>\<close> (called "tick") signaling termination.
|
||||
Thus, traces have the type \<open>(\<Sigma>\<uplus>\<checkmark>)\<^sup>*\<close>, written \<open>\<Sigma>\<^sup>\<checkmark>\<^sup>*\<close>; since \<open>\<checkmark>\<close> may only occur at the end of a trace,
|
||||
we need to define a predicate \<open>front\<^sub>-tickFree t\<close> that requires from traces that \<open>\<checkmark>\<close> can only occur
|
||||
at the end.
|
||||
|
||||
Second, in the traditional literature, the semantic domain is implicitly described by 9 "axioms"
|
||||
|
@ -256,38 +242,37 @@ Informally, these are:
|
|||
\<^item> the tick accepted after a trace \<open>s\<close> implies that all other events are refused;
|
||||
\<^item> a divergence trace with any suffix is itself a divergence one
|
||||
\<^item> once a process has diverged, it can engage in or refuse any sequence of events.
|
||||
\<^item> a trace ending with \<open>\<surd>\<close> belonging to divergence set implies that its
|
||||
maximum prefix without \<open>\<surd>\<close> is also a divergent trace.
|
||||
\<^item> a trace ending with \<open>\<checkmark>\<close> belonging to divergence set implies that its
|
||||
maximum prefix without \<open>\<checkmark>\<close> is also a divergent trace.
|
||||
|
||||
More formally, a process \<open>P\<close> of the type \<open>\<Sigma> process\<close> should have the following properties:
|
||||
|
||||
|
||||
@{cartouche [display] \<open>([],{}) \<in> \<F> P \<and>
|
||||
@{cartouche [display, indent=10] \<open>([],{}) \<in> \<F> P \<and>
|
||||
(\<forall> s X. (s,X) \<in> \<F> P \<longrightarrow> front_tickFree s) \<and>
|
||||
(\<forall> s t . (s@t,{}) \<in> \<F> P \<longrightarrow> (s,{}) \<in> \<F> P) \<and>
|
||||
(\<forall> s X Y. (s,Y) \<in> \<F> P \<and> X\<subseteq>Y \<longrightarrow> (s,X) \<in> \<F> P) \<and>
|
||||
(\<forall> s X Y. (s,X) \<in> \<F> P \<and> (\<forall>c \<in> Y. ((s@[c],{}) \<notin> \<F> P)) \<longrightarrow> (s,X \<union> Y) \<in> \<F> P) \<and>
|
||||
(\<forall> s X. (s@[\<surd>],{}) \<in> \<F> P \<longrightarrow> (s,X-{\<surd>}) \<in> \<F> P) \<and>
|
||||
(\<forall> s X. (s@[\<checkmark>],{}) \<in> \<F> P \<longrightarrow> (s,X-{\<checkmark>}) \<in> \<F> P) \<and>
|
||||
(\<forall> s t. s \<in> \<D> P \<and> tickFree s \<and> front_tickFree t \<longrightarrow> s@t \<in> \<D> P) \<and>
|
||||
(\<forall> s X. s \<in> \<D> P \<longrightarrow> (s,X) \<in> \<F> P) \<and>
|
||||
(\<forall> s. s@[\<surd>] \<in> \<D> P \<longrightarrow> s \<in> \<D> P)\<close>}
|
||||
(\<forall> s. s@[\<checkmark>] \<in> \<D> P \<longrightarrow> s \<in> \<D> P)\<close>}
|
||||
|
||||
Our objective is to encapsulate this wishlist into a type constructed as a conservative
|
||||
theory extension in our theory \<^holcsp>.
|
||||
Therefore third, we define a pre-type for processes \<open>\<Sigma> process\<^sub>0\<close> by \<open> \<P>(\<Sigma>\<^sup>\<surd>\<^sup>* \<times> \<P>(\<Sigma>\<^sup>\<surd>)) \<times> \<P>(\<Sigma>\<^sup>\<surd>)\<close>.
|
||||
Therefore third, we define a pre-type for processes \<open>\<Sigma> process\<^sub>0\<close> by \<open> \<P>(\<Sigma>\<^sup>\<checkmark>\<^sup>* \<times> \<P>(\<Sigma>\<^sup>\<checkmark>)) \<times> \<P>(\<Sigma>\<^sup>\<checkmark>)\<close>.
|
||||
Forth, we turn our wishlist of "axioms" above into the definition of a predicate \<open>is_process P\<close>
|
||||
of type \<open>\<Sigma> process\<^sub>0 \<Rightarrow> bool\<close> deciding if its conditions are fulfilled. Since \<open>P\<close> is a pre-process,
|
||||
we replace \<open>\<F>\<close> by \<open>fst\<close> and \<open>\<D>\<close> by \<open>snd\<close> (the HOL projections into a pair).
|
||||
And last not least fifth, we use the following type definition:
|
||||
\<^item> \<^theory_text>\<open>typedef '\<alpha> process = "{P :: '\<alpha> process\<^sub>0 . is_process P}"\<close>
|
||||
|
||||
\<^item> \<^theory_text>\<open>typedef '\<alpha> process = "{P :: '\<alpha> process\<^sub>0 . is_process P}"\<close>
|
||||
|
||||
Isabelle requires a proof for the existence of a witness for this set,
|
||||
but this can be constructed in a straight-forward manner. Suitable definitions for
|
||||
\<open>\<T>\<close>, \<open>\<F>\<close> and \<open>\<D>\<close> lifting \<open>fst\<close> and \<open>snd\<close> on the new \<open>'\<alpha> process\<close>-type allows to derive
|
||||
the above properties for any \<open>P::'\<alpha> process\<close>. \<close>
|
||||
|
||||
subsection*["operator"::tc, main_author="Some(@{docitem ''lina''})"]
|
||||
subsection*["operator"::technical, main_author="Some(@{author ''lina''})"]
|
||||
\<open>\<^csp> Operators over the Process Type\<close>
|
||||
text\<open> Now, the operators of \<^csp> \<open>Skip\<close>, \<open>Stop\<close>, \<open>_\<sqinter>_\<close>, \<open>_\<box>_\<close>, \<open>_\<rightarrow>_\<close>,\<open>_\<lbrakk>_\<rbrakk>_\<close> etc.
|
||||
for internal choice, external choice, prefix and parallel composition, can
|
||||
|
@ -301,17 +286,18 @@ For example, we define \<open>_\<sqinter>_\<close> on the pre-process type as fo
|
|||
|
||||
\<^item> \<^theory_text>\<open>definition "P \<sqinter> Q \<equiv> Abs_process(\<F> P \<union> \<F> Q , \<D> P \<union> \<D> Q)"\<close>
|
||||
|
||||
where \<open>\<F> = fst \<circ> Rep_process\<close> and \<open>\<D> = snd \<circ> Rep_process\<close> and where \<open>Rep_process\<close> and
|
||||
\<open>Abs_process\<close> are the representation and abstraction morphisms resulting from the
|
||||
type definition linking \<open>'\<alpha> process\<close> isomorphically to \<open>'\<alpha> process\<^sub>0\<close>. Proving the above properties
|
||||
for \<open>\<F> (P \<sqinter> Q)\<close> and \<open>\<D> (P \<sqinter> Q)\<close> requires a proof that \<open>(\<F> P \<union> \<F> Q , \<D> P \<union> \<D> Q)\<close>
|
||||
satisfies the 9 "axioms", which is fairly simple in this case.
|
||||
where \<open>Rep_process\<close> and \<open>Abs_process\<close> are the representation and abstraction morphisms resulting
|
||||
from the type definition linking the type \<open>'\<alpha> process\<close> isomorphically to the set \<open>'\<alpha> process\<^sub>0\<close>.
|
||||
The projection into \<^emph>\<open>failures\<close> is defined by \<open>\<F> = fst \<circ> Rep_process\<close>, whereas the
|
||||
\<^emph>\<open>divergences\<close> are defined bz \<open>\<D> = snd \<circ> Rep_process\<close>. Proving the above properties for
|
||||
\<open>\<F> (P \<sqinter> Q)\<close> and \<open>\<D> (P \<sqinter> Q)\<close> requires a proof that \<open>(\<F> P \<union> \<F> Q , \<D> P \<union> \<D> Q)\<close>
|
||||
satisfies the well-formedness conditions of \<open>is_process\<close>, which is fairly simple in this case.
|
||||
|
||||
The definitional presentation of the \<^csp> process operators according to @{cite "roscoe:csp:1998"}
|
||||
follows always this scheme. This part of the theory comprises around 2000 loc.
|
||||
\<close>
|
||||
|
||||
subsection*["orderings"::tc, main_author="Some(@{docitem ''bu''})"]
|
||||
subsection*["orderings"::technical, main_author="Some(@{author ''bu''})"]
|
||||
\<open>Refinement Orderings\<close>
|
||||
|
||||
text\<open> \<^csp> is centered around the idea of process refinement; many critical properties,
|
||||
|
@ -320,15 +306,16 @@ a conversion of processes in terms of (finite) labelled transition systems leads
|
|||
model-checking techniques based on graph-exploration. Essentially, a process \<open>P\<close> \<^emph>\<open>refines\<close>
|
||||
another process \<open>Q\<close> if and only if it is more deterministic and more defined (has less divergences).
|
||||
Consequently, each of the three semantics models (trace, failure and failure/divergence)
|
||||
has its corresponding refinement orderings.
|
||||
has its corresponding refinement orderings.\<close>
|
||||
Theorem*[th1::"theorem", short_name="\<open>Refinement properties\<close>"]\<open>
|
||||
What we are interested in this paper is the following refinement orderings for the
|
||||
failure/divergence model.
|
||||
|
||||
\<^enum> \<open>P \<sqsubseteq>\<^sub>\<F>\<^sub>\<D> Q \<equiv> \<F> P \<supseteq> \<F> Q \<and> \<D> P \<supseteq> \<D> Q\<close>
|
||||
\<^enum> \<open>P \<sqsubseteq>\<^sub>\<T>\<^sub>\<D> Q \<equiv> \<T> P \<supseteq> \<T> Q \<and> \<D> P \<supseteq> \<D> Q\<close>
|
||||
\<^enum> \<open>P \<sqsubseteq>\<^sub>\<FF> Q \<equiv> \<FF> P \<supseteq> \<FF> Q, \<FF>\<in>{\<T>,\<F>,\<D>}\<close>
|
||||
\<^enum> \<open>P \<sqsubseteq>\<^sub>\<FF> Q \<equiv> \<FF> P \<supseteq> \<FF> Q, \<FF>\<in>{\<T>,\<F>,\<D>}\<close> \<close>
|
||||
|
||||
Notice that in the \<^csp> literature, only \<open>\<sqsubseteq>\<^sub>\<F>\<^sub>\<D>\<close> is well studied for failure/divergence model.
|
||||
text\<open> Notice that in the \<^csp> literature, only \<open>\<sqsubseteq>\<^sub>\<F>\<^sub>\<D>\<close> is well studied for failure/divergence model.
|
||||
Our formal analysis of different granularities on the refinement orderings
|
||||
allows deeper understanding of the same semantics model. For example, \<open>\<sqsubseteq>\<^sub>\<T>\<^sub>\<D>\<close> turns
|
||||
out to have in some cases better monotonicity properties and therefore allow for stronger proof
|
||||
|
@ -340,7 +327,7 @@ states, from which no internal progress is possible.
|
|||
\<close>
|
||||
|
||||
|
||||
subsection*["fixpoint"::tc, main_author="Some(@{docitem ''lina''})"]
|
||||
subsection*["fixpoint"::technical, main_author="Some(@{author ''lina''})"]
|
||||
\<open>Process Ordering and HOLCF\<close>
|
||||
text\<open> For any denotational semantics, the fixed point theory giving semantics to systems
|
||||
of recursive equations is considered as keystone. Its prerequisite is a complete partial ordering
|
||||
|
@ -352,17 +339,16 @@ Roscoe and Brooks @{cite "Roscoe1992AnAO"} finally proposed another ordering, ca
|
|||
that completeness could at least be assured for read-operations. This more complex ordering
|
||||
is based on the concept \<^emph>\<open>refusals after\<close> a trace \<open>s\<close> and defined by \<open>\<R> P s \<equiv> {X | (s, X) \<in> \<F> P}\<close>.\<close>
|
||||
|
||||
Definition*[process_ordering, short_name="''process ordering''"]\<open>
|
||||
Definition*[process_ordering, level= "Some 2", short_name="''process ordering''"]\<open>
|
||||
We define \<open>P \<sqsubseteq> Q \<equiv> \<psi>\<^sub>\<D> \<and> \<psi>\<^sub>\<R> \<and> \<psi>\<^sub>\<M> \<close>, where
|
||||
\<^enum> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
||||
\<^enum> \<open>\<psi>\<^sub>\<D> = \<D> P \<supseteq> \<D> Q \<close>
|
||||
\<^enum> \<open>\<psi>\<^sub>\<R> = s \<notin> \<D> P \<Rightarrow> \<R> P s = \<R> Q s\<close>
|
||||
\<^enum> \<open>\<psi>\<^sub>\<M> = Mins(\<D> P) \<subseteq> \<T> Q \<close>
|
||||
\<close>
|
||||
\<^enum> \<open>\<psi>\<^sub>\<M> = Mins(\<D> P) \<subseteq> \<T> Q \<close> \<close>
|
||||
|
||||
text\<open>The third condition \<open>\<psi>\<^sub>\<M>\<close> implies that the set of minimal divergent traces
|
||||
(ones with no proper prefix that is also a divergence) in \<open>P\<close>, denoted by \<open>Mins(\<D> P)\<close>,
|
||||
should be a subset of the trace set of \<open>Q\<close>.
|
||||
%One may note that each element in \<open>Mins(\<D> P)\<close> do actually not contain the \<open>\<surd>\<close>,
|
||||
%One may note that each element in \<open>Mins(\<D> P)\<close> do actually not contain the \<open>\<checkmark>\<close>,
|
||||
%which can be deduced from the process invariants described
|
||||
%in the precedent @{technical "processinv"}. This can be explained by the fact that we are not
|
||||
%really concerned with what a process does after it terminates.
|
||||
|
@ -393,44 +379,45 @@ For most \<^csp> operators \<open>\<otimes>\<close> we derived rules of the form
|
|||
|
||||
These rules allow to automatically infer for any process term if it is continuous or not.
|
||||
The port of HOL-CSP 2 on HOLCF implied that the derivation of the entire continuity rules
|
||||
had to be completely re-done (3000 loc).
|
||||
|
||||
|
||||
HOL-CSP provides an important proof principle, the fixed-point induction:
|
||||
had to be completely re-done (3000 loc).\<close>
|
||||
|
||||
Theorem*[th2,short_name="\<open>Fixpoint Induction\<close>"]
|
||||
\<open>HOL-CSP provides an important proof principle, the fixed-point induction:
|
||||
@{cartouche [display, indent=5] \<open>cont f \<Longrightarrow> adm P \<Longrightarrow> P \<bottom> \<Longrightarrow> (\<And>X. P X \<Longrightarrow> P(f X)) \<Longrightarrow> P(\<mu>X. f X)\<close>}
|
||||
\<close>
|
||||
|
||||
Fixed-point induction requires a small side-calculus for establishing the admissibility
|
||||
text\<open>Fixed-point induction of @{theorem th2} requires a small side-calculus for establishing the admissibility
|
||||
of a predicate; basically, predicates are admissible if they are valid for any least upper bound
|
||||
of a chain \<open>x\<^sub>1 \<sqsubseteq> x\<^sub>2 \<sqsubseteq> x\<^sub>3 ... \<close> provided that \<open>\<forall>i. P(x\<^sub>i)\<close>. It turns out that \<open>_\<sqsubseteq>_\<close> and \<open>_\<sqsubseteq>\<^sub>F\<^sub>D_\<close> as
|
||||
well as all other refinement orderings that we introduce in this paper are admissible.
|
||||
Fixed-point inductions are the main proof weapon in verifications,
|
||||
together with monotonicities and the \<^csp> laws. Denotational arguments can be hidden as they are not
|
||||
needed in practical verifications. \<close>
|
||||
Fixed-point inductions are the main proof weapon in verifications, together with monotonicities
|
||||
and the \<^csp> laws. Denotational arguments can be hidden as they are not needed in practical
|
||||
verifications. \<close>
|
||||
|
||||
subsection*["law"::tc, main_author="Some(@{docitem ''lina''})"]
|
||||
subsection*["law"::technical, main_author="Some(@{author ''lina''})"]
|
||||
\<open>\<^csp> Rules: Improved Proofs and New Results\<close>
|
||||
|
||||
|
||||
text\<open> The \<^csp> operators enjoy a number of algebraic properties: commutativity,
|
||||
text\<open>The \<^csp> operators enjoy a number of algebraic properties: commutativity,
|
||||
associativities, and idempotence in some cases. Moreover, there is a rich body of distribution
|
||||
laws between these operators. Our new version HOL-CSP 2 not only shortens and restructures the
|
||||
proofs of @{cite "tej.ea:corrected:1997"}; the code reduces
|
||||
to 8000 loc from 25000 loc. Some illustrative examples of new established rules are:
|
||||
proofs of @{cite "tej.ea:corrected:1997"}; the code reduces to 8000 loc from 25000 loc. \<close>
|
||||
|
||||
Theorem*[th3, short_name="\<open>Examples of Derived Rules.\<close>"]\<open>
|
||||
\<^item> \<open>\<box>x\<in>A\<union>B\<rightarrow>P(x) = (\<box>x\<in>A\<rightarrow>P x) \<box> (\<box>x\<in>B\<rightarrow>P x)\<close>
|
||||
\<^item> \<open>A\<union>B\<subseteq>C \<Longrightarrow> (\<box>x\<in>A\<rightarrow>P x \<lbrakk>C\<rbrakk> \<box>x\<in>B\<rightarrow>Q x) = \<box>x\<in>A\<inter>B\<rightarrow>(P x \<lbrakk>C\<rbrakk> Q x)\<close>
|
||||
\<^item> @{cartouche [display]\<open>A\<subseteq>C \<Longrightarrow> B\<inter>C={} \<Longrightarrow>
|
||||
(\<box>x\<in>A\<rightarrow>P x \<lbrakk>C\<rbrakk> \<box>x\<in>B\<rightarrow>Q x) = \<box>x\<in>B\<rightarrow>(\<box>x\<in>A\<rightarrow>P x \<lbrakk>C\<rbrakk> Q x)\<close>}
|
||||
\<^item> \<open>finite A \<Longrightarrow> A\<inter>C = {} \<Longrightarrow> ((P \<lbrakk>C\<rbrakk> Q) \ A) = ((P \ A) \<lbrakk>C\<rbrakk> (Q \ A)) ...\<close>
|
||||
\<^item> \<open>finite A \<Longrightarrow> A\<inter>C = {} \<Longrightarrow> ((P \<lbrakk>C\<rbrakk> Q) \ A) = ((P \ A) \<lbrakk>C\<rbrakk> (Q \ A)) ...\<close>\<close>
|
||||
|
||||
The continuity proof of the hiding operator is notorious. The proof is known
|
||||
to involve the classical König's lemma stating that every infinite tree with finite branching
|
||||
has an infinite path. We adapt this lemma to our context as follows:
|
||||
|
||||
@{cartouche [display, indent=5]
|
||||
text\<open>The continuity proof of the hiding operator is notorious. The proof is known to involve the
|
||||
classical König's lemma stating that every infinite tree with finite branching has an infinite path.
|
||||
We adapt this lemma to our context as follows:
|
||||
|
||||
@{cartouche [display, indent=5]
|
||||
\<open>infinite tr \<Longrightarrow> \<forall>i. finite{t. \<exists>t'\<in>tr. t = take i t'}
|
||||
\<Longrightarrow> \<exists> f. strict_mono f \<and> range f \<subseteq> {t. \<exists>t'\<in>tr. t \<le> t'}\<close>}
|
||||
\<Longrightarrow> \<exists> f. strict_mono f \<and> range f \<subseteq> {t. \<exists>t'\<in>tr. t \<le> t'}\<close>}
|
||||
|
||||
in order to come up with the continuity rule: \<open>finite S \<Longrightarrow> cont P \<Longrightarrow> cont(\<lambda>X. P X \ S)\<close>.
|
||||
The original proof had been drastically shortened by a factor 10 and important immediate steps
|
||||
|
@ -449,12 +436,12 @@ cases to be considered as well as their complexity makes pen and paper proofs
|
|||
practically infeasible.
|
||||
\<close>
|
||||
|
||||
section*["newResults"::tc,main_author="Some(@{docitem ''safouan''}::author)",
|
||||
main_author="Some(@{docitem ''lina''}::author)", level= "Some 3"]
|
||||
section*["newResults"::technical,main_author="Some(@{author ''safouan''}::author)",
|
||||
main_author="Some(@{author ''lina''}::author)", level= "Some 3"]
|
||||
\<open>Theoretical Results on Refinement\<close>
|
||||
text\<open>\<close>
|
||||
subsection*["adm"::tc,main_author="Some(@{docitem ''safouan''}::author)",
|
||||
main_author="Some(@{docitem ''lina''}::author)"]
|
||||
subsection*["adm"::technical,main_author="Some(@{author ''safouan''}::author)",
|
||||
main_author="Some(@{author ''lina''}::author)"]
|
||||
\<open>Decomposition Rules\<close>
|
||||
text\<open>
|
||||
In our framework, we implemented the pcpo process refinement together with the five refinement
|
||||
|
@ -474,47 +461,23 @@ under all refinement orderings, while others are not.
|
|||
\<^item> Sequence operator is not monotonic under \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> or \<open>\<sqsubseteq>\<^sub>\<T>\<close>:
|
||||
@{cartouche [display,indent=5]
|
||||
\<open>P \<sqsubseteq>\<^sub>\<FF> P'\<Longrightarrow> Q \<sqsubseteq>\<^sub>\<FF> Q' \<Longrightarrow> (P ; Q) \<sqsubseteq>\<^sub>\<FF> (P' ; Q') where \<FF>\<in>{\<T>\<D>,\<F>\<D>}\<close>}
|
||||
%All refinements are right-side monotonic but \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> and \<open>\<sqsubseteq>\<^sub>\<T>\<close> are not left-side monotonic,
|
||||
%which can be explained by
|
||||
%the interdependence relationship of failure and divergence projections for the first component.
|
||||
%We thus proved:
|
||||
|
||||
All refinements are right-side monotonic but \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> and \<open>\<sqsubseteq>\<^sub>\<T>\<close> are not left-side monotonic,
|
||||
which can be explained by the interdependence relationship of failure and divergence projections
|
||||
for the first component. We thus proved:
|
||||
\<^item> Hiding operator is not monotonic under \<open>\<sqsubseteq>\<^sub>\<D>\<close>:
|
||||
@{cartouche [display,indent=5] \<open>P \<sqsubseteq>\<^sub>\<FF> Q \<Longrightarrow> P \ A \<sqsubseteq>\<^sub>\<FF> Q \ A where \<FF>\<in>{\<T>,\<F>,\<T>\<D>,\<F>\<D>}\<close>}
|
||||
%Intuitively, for the divergence refinement of the hiding operator, there may be
|
||||
%some trace \<open>s\<in>\<T> Q\<close> and \<open>s\<notin>\<T> P\<close> such that it becomes divergent in \<open>Q \ A\<close> but
|
||||
%not in \<open>P \ A\<close>.
|
||||
%when the condition in the corresponding projection laws is satisfied, which makes it is not monotonic.
|
||||
Intuitively, for the divergence refinement of the hiding operator, there may be
|
||||
some trace \<open>s\<in>\<T> Q\<close> and \<open>s\<notin>\<T> P\<close> such that it becomes divergent in \<open>Q \ A\<close> but
|
||||
not in \<open>P \ A\<close>.
|
||||
\<^item> Parallel composition is not monotonic under \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> or \<open>\<sqsubseteq>\<^sub>\<T>\<close>:
|
||||
@{cartouche [display,indent=5] \<open>P \<sqsubseteq>\<^sub>\<FF> P' \<Longrightarrow> Q \<sqsubseteq>\<^sub>\<FF> Q' \<Longrightarrow> (P \<lbrakk>A\<rbrakk> Q) \<sqsubseteq>\<^sub>\<FF> (P' \<lbrakk>A\<rbrakk> Q') where \<FF>\<in>{\<T>\<D>,\<F>\<D>}\<close>}
|
||||
%The failure and divergence projections of this operator are also interdependent, similar to the
|
||||
%sequence operator.
|
||||
%Hence, this operator is not monotonic with \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> and \<open>\<sqsubseteq>\<^sub>\<T>\<close>, but monotonic when their
|
||||
%combinations are considered.
|
||||
The failure and divergence projections of this operator are also interdependent, similar to the
|
||||
sequence operator. Hence, this operator is not monotonic with \<open>\<sqsubseteq>\<^sub>\<F>\<close>, \<open>\<sqsubseteq>\<^sub>\<D>\<close> and \<open>\<sqsubseteq>\<^sub>\<T>\<close>, but monotonic
|
||||
when their combinations are considered. \<close>
|
||||
|
||||
\<close>
|
||||
|
||||
(* Besides the monotonicity results on the above \<^csp> operators,
|
||||
we have also proved that for other \<^csp> operators, such as multi-prefix and non-deterministic choice,
|
||||
they are all monotonic with these five refinement orderings. Such theoretical results provide significant indicators
|
||||
for semantics choices when considering specification decomposition.
|
||||
We want to emphasize that this is the first work on such substantial
|
||||
analysis in a formal way, as far as we know.
|
||||
|
||||
%In the literature, these processes are defined in a way that does not distinguish the special event \<open>tick\<close>. To be consistent with the idea that ticks should be distinguished on the semantic level, besides the above
|
||||
three processes,
|
||||
|
||||
one can directly prove 3 since for both \<open>CHAOS\<close> and \<open>DF\<close>,
|
||||
the version with \<open>SKIP\<close> is constructed exactly in the same way from that without \<open>SKIP\<close>.
|
||||
And 4 is obtained based on the projection laws of internal choice \<open>\<sqinter>\<close>.
|
||||
Finally, for 5, the difference between \<open>DF\<close> and \<open>RUN\<close> is that the former applies internal choice
|
||||
while the latter with external choice. From the projection laws of both operators,
|
||||
the failure set of \<open>RUN\<close> has more constraints, thus being a subset of that of \<open>DF\<close>,
|
||||
when the divergence set is empty, which is true for both processes.
|
||||
|
||||
*)
|
||||
|
||||
subsection*["processes"::tc,main_author="Some(@{docitem ''safouan''}::author)",
|
||||
main_author="Some(@{docitem ''lina''}::author)"]
|
||||
subsection*["processes"::technical,main_author="Some(@{author ''safouan''}::author)",
|
||||
main_author="Some(@{author ''lina''}::author)"]
|
||||
\<open>Reference Processes and their Properties\<close>
|
||||
text\<open>
|
||||
We now present reference processes that exhibit basic behaviors, introduced in
|
||||
|
@ -528,10 +491,10 @@ To handle termination better, we added two new processes \<open>CHAOS\<^sub>S\<^
|
|||
\<close>
|
||||
|
||||
(*<*) (* a test ...*)
|
||||
text*[X22 ::math_content ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
text*[X32::"definition", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X42]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X52::"definition"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
text*[X22 ::math_content, level="Some 2" ]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
text*[X32::"definition", level="Some 2", mcc=defn]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X42, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X52::"definition", level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
|
||||
text\<open> The \<open>RUN\<close>-process defined @{math_content X22} represents the process that accepts all
|
||||
events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in two variants shown in
|
||||
|
@ -539,51 +502,48 @@ events, but never stops nor deadlocks. The \<open>CHAOS\<close>-process comes in
|
|||
stops or accepts any offered event, whereas \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> can additionally terminate.\<close>
|
||||
(*>*)
|
||||
|
||||
Definition*[X2]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
Definition*[X3]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X4]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
||||
Definition*[X5]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
||||
Definition*[X6]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
||||
Definition*[X2, level="Some 2"]\<open>\<open>RUN A \<equiv> \<mu> X. \<box> x \<in> A \<rightarrow> X\<close> \<close>
|
||||
Definition*[X3, level="Some 2"]\<open>\<open>CHAOS A \<equiv> \<mu> X. (STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close> \<close>
|
||||
Definition*[X4, level="Some 2"]\<open>\<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. (SKIP \<sqinter> STOP \<sqinter> (\<box> x \<in> A \<rightarrow> X))\<close>\<close>
|
||||
Definition*[X5, level="Some 2"]\<open>\<open>DF A \<equiv> \<mu> X. (\<sqinter> x \<in> A \<rightarrow> X)\<close> \<close>
|
||||
Definition*[X6, level="Some 2"]\<open>\<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<equiv> \<mu> X. ((\<sqinter> x \<in> A \<rightarrow> X) \<sqinter> SKIP)\<close> \<close>
|
||||
|
||||
text\<open>In the following, we denote \<open> \<R>\<P> = {DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P, DF, RUN, CHAOS, CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P}\<close>.
|
||||
All five reference processes are divergence-free.
|
||||
%which was done by using a particular lemma \<open>\<D> (\<mu> x. f x) = \<Inter>\<^sub>i\<^sub>\<in>\<^sub>\<nat> \<D> (f\<^sup>i \<bottom>)\<close>.
|
||||
which was proven by using a particular lemma \<open>\<D> (\<mu> x. f x) = \<Inter>\<^sub>i\<^sub>\<in>\<^sub>\<nat> \<D> (f\<^sup>i \<bottom>)\<close>.
|
||||
@{cartouche
|
||||
[display,indent=8] \<open> D (\<PP> UNIV) = {} where \<PP> \<in> \<R>\<P> and UNIV is the set of all events\<close>
|
||||
}
|
||||
Regarding the failure refinement ordering, the set of failures \<open>\<F> P\<close> for any process \<open>P\<close> is
|
||||
a subset of \<open>\<F> (CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV)\<close>.% and the following lemma was proved:
|
||||
% This proof is performed by induction, based on the failure projection of \<open>STOP\<close> and that of
|
||||
% internal choice.
|
||||
|
||||
a subset of \<open>\<F> (CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV)\<close>.
|
||||
|
||||
@{cartouche [display, indent=25] \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close>}
|
||||
|
||||
|
||||
\<^noindent> Furthermore, the following 5 relationships were demonstrated from monotonicity results and
|
||||
a denotational proof.
|
||||
%among which 1 and 2 are immediate corollaries,
|
||||
%4 and 5 are directly obtained from our monotonicity results while 3 requires a denotational proof.
|
||||
and thanks to transitivity, we can derive other relationships.
|
||||
Furthermore, the following 5 relationships were demonstrated from monotonicity results and
|
||||
a denotational proof.
|
||||
\<close>
|
||||
|
||||
|
||||
Corollary*[co1::"corollary", short_name="\<open>Corollaries on reference processes.\<close>",level="Some 2"]
|
||||
\<open> \<^hfill> \<^br> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^enum> \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<sqsubseteq>\<^sub>\<F> CHAOS A\<close>
|
||||
\<^enum> \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<sqsubseteq>\<^sub>\<F> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A\<close>
|
||||
\<^enum> \<open>CHAOS A \<sqsubseteq>\<^sub>\<F> DF A\<close>
|
||||
\<^enum> \<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P A \<sqsubseteq>\<^sub>\<F> DF A\<close>
|
||||
\<^enum> \<open>DF A \<sqsubseteq>\<^sub>\<F> RUN A\<close>
|
||||
\<^enum> \<open>DF A \<sqsubseteq>\<^sub>\<F> RUN A\<close> \<^vs>\<open>0.3cm\<close>
|
||||
|
||||
where 1 and 2 are immediate, and where 4 and 5 are directly obtained from our monotonicity
|
||||
results while 3 requires an argument over the denotational space.
|
||||
Thanks to transitivity, we can derive other relationships.\<close>
|
||||
|
||||
Last, regarding trace refinement, for any process P,
|
||||
text\<open> Lastly, regarding trace refinement, for any process P,
|
||||
its set of traces \<open>\<T> P\<close> is a subset of \<open>\<T> (CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV)\<close> and of \<open>\<T> (DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV)\<close> as well.
|
||||
%As we already proved that \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> covers all failures,
|
||||
%we can immediately infer that it also covers all traces.
|
||||
%The \<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P\<close> case requires a longer denotational proof.
|
||||
|
||||
|
||||
\<^enum> \<open>CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T> P\<close>
|
||||
\<^enum> \<open>DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T> P\<close>
|
||||
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -596,39 +556,34 @@ verification. For example, if one wants to establish that a protocol implementat
|
|||
a non-deterministic specification \<open>SPEC\<close> it suffices to ask if \<open>IMPL || SPEC\<close> is deadlock-free.
|
||||
In this setting, \<open>SPEC\<close> becomes a kind of observer that signals non-conformance of \<open>IMPL\<close> by
|
||||
deadlock.
|
||||
% A livelocked system looks similar to a deadlocked one from an external point of view.
|
||||
% However, livelock is sometimes considered as worse since the user may be able to observe the internal
|
||||
% activities and so hope that some output will happen eventually.
|
||||
|
||||
In the literature, deadlock and lifelock are phenomena that are often
|
||||
handled separately. One contribution of our work is establish their precise relationship inside
|
||||
the Failure/Divergence Semantics of \<^csp>.\<close>
|
||||
|
||||
(* bizarre: Definition* does not work for this single case *)
|
||||
text*[X10::"definition"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
||||
Definition*[X10::"definition", level="Some 2"]\<open> \<open>deadlock\<^sub>-free P \<equiv> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F> P\<close> \<close>
|
||||
|
||||
text\<open>\<^noindent> A process \<open>P\<close> is deadlock-free if and only if after any trace \<open>s\<close> without \<open>\<surd>\<close>, the union of \<open>\<surd>\<close>
|
||||
text\<open>\<^noindent> A process \<open>P\<close> is deadlock-free if and only if after any trace \<open>s\<close> without \<open>\<checkmark>\<close>, the union of \<open>\<checkmark>\<close>
|
||||
and all events of \<open>P\<close> can never be a refusal set associated to \<open>s\<close>, which means that \<open>P\<close> cannot
|
||||
be deadlocked after any non-terminating trace.
|
||||
\<close>
|
||||
|
||||
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>"]
|
||||
\<open> \hfill \break \<open>deadlock_free P \<longleftrightarrow> (\<forall>s\<in>\<T> P. tickFree s \<longrightarrow> (s, {\<surd>}\<union>events_of P) \<notin> \<F> P)\<close> \<close>
|
||||
Definition*[X11]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
||||
Theorem*[T1, short_name="\<open>DF definition captures deadlock-freeness\<close>", level="Some 2"]
|
||||
\<open> \<^hfill> \<^br> \<open>deadlock_free P \<longleftrightarrow> (\<forall>s\<in>\<T> P. tickFree s \<longrightarrow> (s, {\<checkmark>}\<union>events_of P) \<notin> \<F> P)\<close> \<close>
|
||||
Definition*[X11, level="Some 2"]\<open> \<open>livelock\<^sub>-free P \<equiv> \<D> P = {} \<close> \<close>
|
||||
|
||||
text\<open> Recall that all five reference processes are livelock-free.
|
||||
We also have the following lemmas about the
|
||||
livelock-freeness of processes:
|
||||
\<^enum> \<open>livelock\<^sub>-free P \<longleftrightarrow> \<PP> UNIV \<sqsubseteq>\<^sub>\<D> P where \<PP> \<in> \<R>\<P>\<close>
|
||||
\<^enum> @{cartouche [display]\<open>livelock\<^sub>-free P \<longleftrightarrow> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T>\<^sub>\<D> P
|
||||
\<longleftrightarrow> CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T>\<^sub>\<D> P\<close>}
|
||||
\<^enum> \<open>livelock\<^sub>-free P \<longleftrightarrow> DF\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T>\<^sub>\<D> P \<longleftrightarrow> CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<T>\<^sub>\<D> P\<close>
|
||||
\<^enum> \<open>livelock\<^sub>-free P \<longleftrightarrow> CHAOS\<^sub>S\<^sub>K\<^sub>I\<^sub>P UNIV \<sqsubseteq>\<^sub>\<F>\<^sub>\<D> P\<close>
|
||||
\<close>
|
||||
text\<open>
|
||||
Finally, we proved the following theorem that confirms the relationship between the two vital
|
||||
properties:
|
||||
\<close>
|
||||
Theorem*[T2, short_name="''DF implies LF''"]
|
||||
Theorem*[T2, short_name="''DF implies LF''", level="Some 2"]
|
||||
\<open> \<open>deadlock_free P \<longrightarrow> livelock_free P\<close> \<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -642,11 +597,11 @@ then it may still be livelock-free. % This makes sense since livelocks are worse
|
|||
|
||||
\<close>
|
||||
|
||||
section*["advanced"::tc,main_author="Some(@{docitem ''safouan''}::author)",level="Some 3"]
|
||||
section*["advanced"::technical,main_author="Some(@{author ''safouan''}::author)",level="Some 3"]
|
||||
\<open>Advanced Verification Techniques\<close>
|
||||
|
||||
text\<open>
|
||||
Based on the refinement framework discussed in @{docitem "newResults"}, we will now
|
||||
Based on the refinement framework discussed in @{technical "newResults"}, we will now
|
||||
turn to some more advanced proof principles, tactics and verification techniques.
|
||||
We will demonstrate them on two paradigmatic examples well-known in the \<^csp> literature:
|
||||
The CopyBuffer and Dijkstra's Dining Philosophers. In both cases, we will exploit
|
||||
|
@ -657,7 +612,7 @@ verification. In the latter case, we present an approach to a verification of a
|
|||
architecture, in this case a ring-structure of arbitrary size.
|
||||
\<close>
|
||||
|
||||
subsection*["illustration"::tc,main_author="Some(@{docitem ''safouan''}::author)", level="Some 3"]
|
||||
subsection*["illustration"::technical,main_author="Some(@{author ''safouan''}::author)", level="Some 3"]
|
||||
\<open>The General CopyBuffer Example\<close>
|
||||
text\<open>
|
||||
We consider the paradigmatic copy buffer example @{cite "Hoare:1985:CSP:3921" and "Roscoe:UCS:2010"}
|
||||
|
@ -705,7 +660,7 @@ of 2 lines proof-script involving the derived algebraic laws of \<^csp>.
|
|||
|
||||
After proving that \<open>SYSTEM\<close> implements \<open>COPY\<close> for arbitrary alphabets, we aim to profit from this
|
||||
first established result to check which relations \<open>SYSTEM\<close> has wrt. to the reference processes of
|
||||
@{docitem "processes"}. Thus, we prove that \<open>COPY\<close> is deadlock-free which implies livelock-free,
|
||||
@{technical "processes"}. Thus, we prove that \<open>COPY\<close> is deadlock-free which implies livelock-free,
|
||||
(proof by fixed-induction similar to \<open>lemma: COPY \<sqsubseteq> SYSTEM\<close>), from which we can immediately infer
|
||||
from transitivity that \<open>SYSTEM\<close> is. Using refinement relations, we killed four birds with one stone
|
||||
as we proved the deadlock-freeness and the livelock-freeness for both \<open>COPY\<close> and \<open>SYSTEM\<close> processes.
|
||||
|
@ -722,7 +677,7 @@ corollary deadlock_free COPY
|
|||
\<close>
|
||||
|
||||
|
||||
subsection*["inductions"::tc,main_author="Some(@{docitem ''safouan''}::author)"]
|
||||
subsection*["inductions"::technical,main_author="Some(@{author ''safouan''}::author)"]
|
||||
\<open>New Fixed-Point Inductions\<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -739,9 +694,8 @@ For this reason, we derived a number of alternative induction schemes (which are
|
|||
in the HOLCF library), which are also relevant for our final Dining Philophers example.
|
||||
These are essentially adaptions of k-induction schemes applied to domain-theoretic
|
||||
setting (so: requiring \<open>f\<close> continuous and \<open>P\<close> admissible; these preconditions are
|
||||
skipped here):
|
||||
\<^item> @{cartouche [display]\<open>... \<Longrightarrow> \<forall>i<k. P (f\<^sup>i \<bottom>) \<Longrightarrow> (\<forall>X. (\<forall>i<k. P (f\<^sup>i X)) \<longrightarrow> P (f\<^sup>k X))
|
||||
\<Longrightarrow> P (\<mu>X. f X)\<close>}
|
||||
skipped here):\<^vs>\<open>0.2cm\<close>
|
||||
\<^item> \<open>... \<Longrightarrow> \<forall>i<k. P (f\<^sup>i \<bottom>) \<Longrightarrow> (\<forall>X. (\<forall>i<k. P (f\<^sup>i X)) \<longrightarrow> P (f\<^sup>k X)) \<Longrightarrow> P (\<mu>X. f X)\<close>
|
||||
\<^item> \<open>... \<Longrightarrow> \<forall>i<k. P (f\<^sup>i \<bottom>) \<Longrightarrow> (\<forall>X. P X \<longrightarrow> P (f\<^sup>k X)) \<Longrightarrow> P (\<mu>X. f X)\<close>
|
||||
|
||||
|
||||
|
@ -749,10 +703,9 @@ skipped here):
|
|||
it reduces the goal size.
|
||||
|
||||
Another problem occasionally occurring in refinement proofs happens when the right side term
|
||||
involves more than one fixed-point process (\<^eg> \<open>P \<lbrakk>{A}\<rbrakk> Q \<sqsubseteq> S\<close>). In this situation,
|
||||
involves more than one fixed-point process (\<^eg> \<open>P \<lbrakk>A\<rbrakk> Q \<sqsubseteq> S\<close>). In this situation,
|
||||
we need parallel fixed-point inductions. The HOLCF library offers only a basic one:
|
||||
\<^item> @{cartouche [display]\<open>... \<Longrightarrow> P \<bottom> \<bottom> \<Longrightarrow> (\<forall>X Y. P X Y \<Longrightarrow> P (f X) (g Y))
|
||||
\<Longrightarrow> P (\<mu>X. f X) (\<mu>X. g X)\<close>}
|
||||
\<^item> \<open>... \<Longrightarrow> P \<bottom> \<bottom> \<Longrightarrow> (\<forall>X Y. P X Y \<Longrightarrow> P (f X) (g Y)) \<Longrightarrow> P (\<mu>X. f X) (\<mu>X. g X)\<close>
|
||||
|
||||
|
||||
\<^noindent> This form does not help in cases like in \<open>P \<lbrakk>\<emptyset>\<rbrakk> Q \<sqsubseteq> S\<close> with the interleaving operator on the
|
||||
|
@ -774,7 +727,7 @@ The astute reader may notice here that if the induction step is weakened (having
|
|||
the base steps require enforcement.
|
||||
\<close>
|
||||
|
||||
subsection*["norm"::tc,main_author="Some(@{docitem ''safouan''}::author)"]
|
||||
subsection*["norm"::technical,main_author="Some(@{author ''safouan''}::author)"]
|
||||
\<open>Normalization\<close>
|
||||
text\<open>
|
||||
Our framework can reason not only over infinite alphabets, but also over processes parameterized
|
||||
|
@ -795,7 +748,7 @@ This normal form is closed under deterministic and communication operators.
|
|||
The advantage of this format is that we can mimick the well-known product automata construction
|
||||
for an arbitrary number of synchronized processes under normal form.
|
||||
We only show the case of the synchronous product of two processes: \<close>
|
||||
text*[T3::"theorem", short_name="\<open>Product Construction\<close>"]\<open>
|
||||
Theorem*[T3, short_name="\<open>Product Construction\<close>", level="Some 2"]\<open>
|
||||
Parallel composition translates to normal form:
|
||||
@{cartouche [display,indent=5]\<open>(P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>1,\<upsilon>\<^sub>1\<rbrakk> \<sigma>\<^sub>1) || (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>\<^sub>2,\<upsilon>\<^sub>2\<rbrakk> \<sigma>\<^sub>2) =
|
||||
P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2). \<tau>\<^sub>1 \<sigma>\<^sub>1 \<inter> \<tau>\<^sub>2 \<sigma>\<^sub>2 , \<lambda>(\<sigma>\<^sub>1,\<sigma>\<^sub>2).\<lambda>e.(\<upsilon>\<^sub>1 \<sigma>\<^sub>1 e, \<upsilon>\<^sub>2 \<sigma>\<^sub>2 e)\<rbrakk> (\<sigma>\<^sub>1,\<sigma>\<^sub>2)\<close>}
|
||||
|
@ -815,7 +768,7 @@ states via the closure \<open>\<RR>\<close>, which is defined inductively over:
|
|||
Thus, normalization leads to a new characterization of deadlock-freeness inspired
|
||||
from automata theory. We formally proved the following theorem:\<close>
|
||||
|
||||
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>"]
|
||||
text*[T4::"theorem", short_name="\<open>DF vs. Reacheability\<close>", level="Some 2"]
|
||||
\<open> If each reachable state \<open>s \<in> (\<RR> \<tau> \<upsilon>)\<close> has outgoing transitions,
|
||||
the \<^csp> process is deadlock-free:
|
||||
@{cartouche [display,indent=10] \<open>\<forall>\<sigma> \<in> (\<RR> \<tau> \<upsilon> \<sigma>\<^sub>0). \<tau> \<sigma> \<noteq> {} \<Longrightarrow> deadlock_free (P\<^sub>n\<^sub>o\<^sub>r\<^sub>m\<lbrakk>\<tau>,\<upsilon>\<rbrakk> \<sigma>\<^sub>0)\<close>}
|
||||
|
@ -834,7 +787,7 @@ Summing up, our method consists of four stages:
|
|||
|
||||
\<close>
|
||||
|
||||
subsection*["dining_philosophers"::tc,main_author="Some(@{docitem ''safouan''}::author)",level="Some 3"]
|
||||
subsection*["dining_philosophers"::technical,main_author="Some(@{author ''safouan''}::author)",level="Some 3"]
|
||||
\<open>Generalized Dining Philosophers\<close>
|
||||
|
||||
text\<open> The dining philosophers problem is another paradigmatic example in the \<^csp> literature
|
||||
|
@ -926,7 +879,7 @@ for a dozen of philosophers (on a usual machine) due to the exponential combinat
|
|||
Furthermore, our proof is fairly stable against modifications like adding non synchronized events like
|
||||
thinking or sitting down in contrast to model-checking techniques. \<close>
|
||||
|
||||
section*["relatedwork"::tc,main_author="Some(@{docitem ''lina''}::author)",level="Some 3"]
|
||||
section*["relatedwork"::technical,main_author="Some(@{author ''lina''}::author)",level="Some 3"]
|
||||
\<open>Related work\<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -993,7 +946,7 @@ restrictions on the structure of components. None of our paradigmatic examples c
|
|||
be automatically proven with any of the discussed SMT techniques without restrictions.
|
||||
\<close>
|
||||
|
||||
section*["conclusion"::conclusion,main_author="Some(@{docitem ''bu''}::author)"]\<open>Conclusion\<close>
|
||||
section*["conclusion"::conclusion,main_author="Some(@{author ''bu''}::author)"]\<open>Conclusion\<close>
|
||||
text\<open>We presented a formalisation of the most comprehensive semantic model for \<^csp>, a 'classical'
|
||||
language for the specification and analysis of concurrent systems studied in a rich body of
|
||||
literature. For this purpose, we ported @{cite "tej.ea:corrected:1997"} to a modern version
|
|
@ -1,7 +1,6 @@
|
|||
theory PikeOS_ST (*Security Target *)
|
||||
|
||||
imports "../../../src/ontologies/CC_v3.1_R5/CC_v3_1_R5"
|
||||
(* Isabelle_DOF.CC_v3_1_R5 in the future. *)
|
||||
imports "Isabelle_DOF-Ontologies.CC_v3_1_R5"
|
||||
|
||||
begin
|
||||
|
||||
|
@ -18,18 +17,20 @@ text*[pkosstref::st_ref_cls, title="''PikeOS Security Target''", st_version ="(0
|
|||
It complies with the Common Criteria for Information Technology Security Evaluation
|
||||
Version 3.1 Revision 4.\<close>
|
||||
|
||||
|
||||
|
||||
subsection*[pkossttoerefsubsec::st_ref_cls]\<open>TOE Reference\<close>
|
||||
|
||||
text*[pkostoeref::toe_ref_cls, dev_name="''''", toe_name="''PikeOS''",
|
||||
toe_version= "(0,3,4)", prod_name="Some ''S3725''"]
|
||||
\<open>The @{docitem toe_def} is the operating system PikeOS version 3.4
|
||||
\<open>The @{docitem (unchecked) toeDef} is the operating system PikeOS version 3.4
|
||||
running on the microprocessor family x86 hosting different applications.
|
||||
The @{docitem toe_def} is referenced as PikeOS 3.4 base
|
||||
The @{docitem (unchecked) toeDef} is referenced as PikeOS 3.4 base
|
||||
product build S3725 for Linux and Windows development host with PikeOS 3.4
|
||||
Certification Kit build S4250 and PikeOS 3.4 Common Criteria Kit build S4388.\<close>
|
||||
|
||||
subsection*[pkossttoeovrvwsubsec::st_ref_cls]\<open> TOE Overview \<close>
|
||||
text*[pkosovrw1::toe_ovrw_cls]\<open>The @{definition \<open>toe\<close> } is a special kind of operating
|
||||
text*[pkosovrw1::toe_ovrw_cls]\<open>The @{docitem (unchecked) \<open>toeDef\<close> } is a special kind of operating
|
||||
system, that allows to effectively separate
|
||||
different applications running on the same platform from each other. The TOE can host
|
||||
user applications that can also be operating systems. User applications can also be
|
||||
|
@ -87,4 +88,4 @@ open_monitor*[PikosSR::SEC_REQ_MNT]
|
|||
close_monitor*[PikosSR]
|
||||
|
||||
close_monitor*[stpkos]
|
||||
end
|
||||
end
|
|
@ -0,0 +1,4 @@
|
|||
session "PikeOS_study" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = false]
|
||||
theories
|
||||
"PikeOS_ST"
|
|
@ -0,0 +1 @@
|
|||
PikeOS_study
|
0
examples/CENELEC_50128/ROOTS → Isabelle_DOF-Examples-Extra/CENELEC_50128/ROOTS
Executable file → Normal file
|
@ -0,0 +1,18 @@
|
|||
session "mini_odo" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
sessions
|
||||
"Physical_Quantities"
|
||||
theories
|
||||
"mini_odo"
|
||||
document_theories
|
||||
"Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
document_files
|
||||
"dof_session.tex"
|
||||
"preamble.tex"
|
||||
"root.bib"
|
||||
"root.mst"
|
||||
"figures/df-numerics-encshaft.png"
|
||||
"figures/odometer.jpeg"
|
||||
"figures/three-phase-odo.pdf"
|
||||
"figures/wheel-df.png"
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
\input{mini_odo}
|
||||
\input{CENELEC_50128}
|
||||
|
Before Width: | Height: | Size: 27 KiB After Width: | Height: | Size: 27 KiB |
Before Width: | Height: | Size: 407 KiB After Width: | Height: | Size: 407 KiB |
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
2
examples/CENELEC_50128/mini_odo/document/preamble.tex → Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/document/preamble.tex
Executable file → Normal file
|
@ -13,8 +13,6 @@
|
|||
%% SPDX-License-Identifier: LPPL-1.3c+ OR BSD-2-Clause
|
||||
|
||||
%% This is a placeholder for user-specific configuration and packages.
|
||||
\usepackage{listings}
|
||||
\usepackage{lstisadof}
|
||||
\usepackage{wrapfig}
|
||||
\usepackage{paralist}
|
||||
\usepackage{numprint}
|
0
examples/CENELEC_50128/mini_odo/document/root.bib → Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/document/root.bib
Executable file → Normal file
0
examples/CENELEC_50128/mini_odo/document/root.mst → Isabelle_DOF-Examples-Extra/CENELEC_50128/mini_odo/document/root.mst
Executable file → Normal file
|
@ -0,0 +1,675 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
* This program can be redistributed and/or modified under the terms
|
||||
* of the 2-clause BSD-style license.
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*************************************************************************)
|
||||
|
||||
(*<*)
|
||||
theory
|
||||
mini_odo
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
"Isabelle_DOF.technical_report"
|
||||
"Physical_Quantities.SI" "Physical_Quantities.SI_Pretty"
|
||||
begin
|
||||
use_template "scrreprt-modern"
|
||||
use_ontology technical_report and "Isabelle_DOF-Ontologies.CENELEC_50128"
|
||||
declare[[strict_monitor_checking=true]]
|
||||
define_shortcut* dof \<rightleftharpoons> \<open>\dof\<close>
|
||||
isadof \<rightleftharpoons> \<open>\isadof{}\<close>
|
||||
(*>*)
|
||||
|
||||
title*[title::title]\<open>The CENELEC 50128 Ontology\<close>
|
||||
subtitle*[subtitle::subtitle]\<open>Case Study: An Odometer-Subsystem\<close>
|
||||
|
||||
chapter*[casestudy::technical]\<open>An Odometer-Subsystem\<close>
|
||||
text\<open>
|
||||
In our case study, we will follow the phases of analysis, design, and implementation of the
|
||||
odometry function of a train. This \<^cenelec_term>\<open>SF\<close> processes data from an odometer to compute
|
||||
the position, speed, and acceleration of a train. This system provides the basis for many
|
||||
safety critical decisions, \<^eg>, the opening of the doors. Due to its relatively small size, it
|
||||
is a manageable, albeit realistic target for a comprehensive formal development: it covers a
|
||||
physical model of the environment, the physical and architectural model of the odometer,
|
||||
but also the \<^cenelec_term>\<open>SFRS\<close> aspects including the problem of numerical sampling and the
|
||||
boundaries of efficient computations. The interplay between environment and measuring-device as
|
||||
well as the implementation problems on a platform with limited resources makes the odometer a
|
||||
fairly typical \<^cenelec_term>\<open>safety\<close> critical \<^cenelec_term>\<open>component\<close> of an embedded system.
|
||||
|
||||
The case-study is presented in form of an \<^emph>\<open>integrated source\<close> in \<^isadof> containing all four
|
||||
reports from the phases:
|
||||
\<^item> \<^term>\<open>software_requirements\<close> with deliverable \<^doc_class>\<open>SWRS\<close>
|
||||
(or long:\<^typ>\<open>software_requirements_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_architecture_and_design\<close> with deliverable \<^doc_class>\<open>SWDS\<close>
|
||||
(or long: \<^typ>\<open>software_design_specification\<close>(-report))
|
||||
\<^item> \<^term>\<open>software_component_design\<close> with deliverable \<^doc_class>\<open>SWCDVR\<close>
|
||||
(or long: \<^typ>\<open>software_component_design_verification\<close>(-report).)
|
||||
\<^item> \<^term>\<open>component_implementation_and_testing\<close> with deliverable \<^doc_class>\<open>SWADVR\<close>
|
||||
(or long: \<^typ>\<open>software_architecture_and_design_verification\<close>(-report))
|
||||
|
||||
The objective of this case study is to demonstrate deep-semantical ontologoies in
|
||||
software developments targeting certifications, and in particular, how \<^isadof>'s
|
||||
integrated source concept permits to assure \<^cenelec_term>\<open>traceability\<close>.
|
||||
|
||||
\<^bold>\<open>NOTE\<close> that this case study has aspects that were actually covered by CENELEC 50126 -
|
||||
the 'systems'-counterpart covering hardware aspects. Recall that the CENELEC 50128 covers
|
||||
software.
|
||||
|
||||
Due to space reasons, we will focus on the analysis part of the integrated
|
||||
document; the design and code parts will only be outlined in a final resume. The
|
||||
\<^emph>\<open>ontological embedding\<close>, which represents a main contribution of this paper, will be presented
|
||||
in the next two sections.
|
||||
|
||||
We start with the capture of a number of informal documents available at the beginning of the
|
||||
development.
|
||||
\<close>
|
||||
|
||||
section\<open>A CENELEC-conform development as an \<^emph>\<open>Integrated Source\<close>\<close>
|
||||
|
||||
text\<open>Accurate information of a train's location along a track is in an important prerequisite
|
||||
to safe railway operation. Position, speed and acceleration measurement usually lies on a
|
||||
set of independent measurements based on different physical principles---as a way to enhance
|
||||
precision and availability. One of them is an \<^emph>\<open>odometer\<close>, which allows estimating a relative
|
||||
location while the train runs positions established by other measurements. \<close>
|
||||
|
||||
subsection\<open>Capturing ``Basic Principles of Motion and Motion Measurement.''\<close>
|
||||
text\<open>
|
||||
A rotary encoder measures the motion of a train. To achieve this, the encoder's shaft is fixed to
|
||||
the trains wheels axle. When the train moves, the encoder produces a signal pattern directly
|
||||
related to the trains progress. By measuring the fractional rotation of the encoders shaft and
|
||||
considering the wheels effective ratio, relative movement of the train can be calculated.
|
||||
|
||||
\begin{wrapfigure}[8]{l}{4.6cm}
|
||||
\centering
|
||||
\vspace{-.5cm}
|
||||
\includegraphics[width=3.4cm]{figures/wheel-df}
|
||||
\caption{Motion sensing via an odometer.}
|
||||
\label{wheel-df}
|
||||
\end{wrapfigure}
|
||||
\autoref{wheel-df} shows that we model a train, seen from a pure kinematics standpoint, as physical
|
||||
system characterized by a one-dimensional continuous distance function, which represents the
|
||||
observable of the physical system. Concepts like speed and acceleration were derived concepts
|
||||
defined as their (gradient) derivatives. We assume the use of the meter, kilogram, and second
|
||||
(MKS) system.
|
||||
|
||||
This model is already based on several fundamental assumptions relevant for the correct
|
||||
functioning of the system and for its integration into the system as a whole. In
|
||||
particular, we need to make the following assumptions explicit: \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
|
||||
text*["perfect_wheel"::assumption]
|
||||
\<open>\<^item> the wheel is perfectly circular with a given, constant radius. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["no_slip"::assumption]
|
||||
\<open>\<^item> the slip between the trains wheel and the track negligible. \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant_teeth_dist"::assumption]
|
||||
\<open>\<^item> the distance between all teeth of a wheel is the same and constant, and \<^vs>\<open>-0.3cm\<close>\<close>
|
||||
text*["constant_sampling_rate"::assumption]
|
||||
\<open>\<^item> the sampling rate of positions is a given constant.\<close>
|
||||
|
||||
text\<open>
|
||||
These assumptions have to be traced throughout the certification process as
|
||||
|
||||
\<^emph>\<open>derived requirements\<close> (or, in CENELEC terminology, as \<^emph>\<open>exported constraints\<close>), which is
|
||||
also reflected by their tracing throughout the body of certification documents. This may result
|
||||
in operational regulations, \<^eg>, regular checks for tolerable wheel defects. As for the
|
||||
\<^emph>\<open>no slip\<close>-assumption, this leads to the modeling of constraints under which physical
|
||||
slip can be neglected: the device can only produce reliable results under certain physical
|
||||
constraints (speed and acceleration limits). Moreover, the \<^emph>\<open>no slip\<close>-assumption motivates
|
||||
architectural arrangements for situations where this assumption cannot be assured (as is the
|
||||
case, for example, of an emergency breaking) together with error-detection and error-recovery.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Architecture.''\<close>
|
||||
|
||||
figure*["three_phase"::figure,relative_width="70",file_src="''figures/three-phase-odo.pdf''"]
|
||||
\<open>An odometer with three sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.\<close>
|
||||
|
||||
text\<open>
|
||||
The requirements analysis also contains a document \<^doc_class>\<open>SYSAD\<close>
|
||||
(\<^typ>\<open>system_architecture_description\<close>) that contains technical drawing of the odometer,
|
||||
a timing diagram (see \<^figure>\<open>three_phase\<close>), and tables describing the encoding of the position
|
||||
for the possible signal transitions of the sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close>.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing ``System Interfaces.''\<close>
|
||||
text\<open>
|
||||
The requirements analysis also contains a sub-document \<^doc_class>\<open>FnI\<close> (\<^typ>\<open>functions_and_interfaces\<close>)
|
||||
describing the technical format of the output of the odometry function.
|
||||
This section, \<^eg>, specifies the output \<^emph>\<open>speed\<close> as given by a \<^verbatim>\<open>int_32\<close> to be the
|
||||
``Estimation of the speed (in mm/sec) evaluated over the latest \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> samples''
|
||||
where the speed refers to the physical speed of the train and \<open>N\<^sub>a\<^sub>v\<^sub>g\<close> a parameter of the
|
||||
sub-system configuration. \<close>
|
||||
|
||||
(*<*)
|
||||
declare_reference*["df_numerics_encshaft"::figure]
|
||||
(*>*)
|
||||
subsection\<open>Capturing ``Required Performances.''\<close>
|
||||
text\<open>
|
||||
The given analysis document is relatively implicit on the expected precision of the measurements;
|
||||
however, certain interface parameters like \<open>Odometric_Position_TimeStamp\<close>
|
||||
(a counter on the number of samplings) and \<open>Relative_Position\<close> are defined by as
|
||||
unsigned 32 bit integer. These definitions imply that exported constraints concerning the acceptable
|
||||
time of service as well the maximum distance before a necessary reboot of the subsystem.
|
||||
For our case-study, we assume maximum deviation of the \<open>Relative_Position\<close> to the
|
||||
theoretical distance.
|
||||
|
||||
The requirement analysis document describes the physical environment, the architecture
|
||||
of the measuring device, and the required format and precision of the measurements of the odometry
|
||||
function as represented (see @{figure (unchecked) "df_numerics_encshaft"}).\<close>
|
||||
|
||||
figure*["df_numerics_encshaft"::figure,relative_width="76",file_src="''figures/df-numerics-encshaft.png''"]
|
||||
\<open>Real distance vs. discrete distance vs. shaft-encoder sequence\<close>
|
||||
|
||||
|
||||
subsection\<open>Capturing the ``Software Design Spec'' (Resume).\<close>
|
||||
text\<open>
|
||||
The design provides a function that manages an internal first-in-first-out buffer of
|
||||
shaft-encodings and corresponding positions. Central for the design is a step-function analyzing
|
||||
new incoming shaft encodings, checking them and propagating two kinds of error-states (one allowing
|
||||
recovery, another one, fatal, signaling, \<^eg>, a defect of the receiver hardware),
|
||||
calculating the relative position, speed and acceleration.
|
||||
\<close>
|
||||
|
||||
subsection\<open>Capturing the ``Software Implementation'' (Resume).\<close>
|
||||
text\<open>
|
||||
While the design is executable on a Linux system, it turns out that the generated code from an
|
||||
Isabelle model is neither executable on resource-constraint target platform, an ARM-based
|
||||
Sabre-light card, nor certifiable, since the compilation chain via ML to C implies the
|
||||
inclusion of a run-time system and quite complex libraries.
|
||||
We adopted therefore a similar approach as used in the seL4 project~@{cite "Klein2014"}: we use a
|
||||
hand-written implementation in C and verify it via
|
||||
AutoCorres~@{cite "greenaway.ea:bridging:2012"} against
|
||||
the design model. The hand-written C-source is integrated into the Isabelle/HOL technically by
|
||||
registering it in the build-configuration and logically by a trusted C-to-HOL compiler included
|
||||
in AutoCorres.
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter ::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s ::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
(*>*)
|
||||
|
||||
|
||||
section\<open>Formal Enrichment of the Software Requirements Specification\<close>
|
||||
text\<open>
|
||||
After the \<^emph>\<open>capture\<close>-phase, where we converted/integrated existing informal analysis and design
|
||||
documents as well as code into an integrated Isabelle document, we entered into the phase of
|
||||
\<open>formal enrichment\<close>. For example, from the assumptions in the architecture follow
|
||||
the definitions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition teeth_per_wheelturn::nat ("tpw") where "tpw \<equiv> SOME x. x > 0"
|
||||
definition wheel_diameter::"real[m]" ("w\<^sub>d") where "w\<^sub>d \<equiv> SOME x. x > 0"
|
||||
definition wheel_circumference::"real[m]" ("w\<^sub>0") where "w\<^sub>0 \<equiv> pi *\<^sub>Q w\<^sub>d"
|
||||
definition \<delta>s\<^sub>r\<^sub>e\<^sub>s::"real[m]" where "\<delta>s\<^sub>r\<^sub>e\<^sub>s \<equiv> 1 / (2 * 3 * tpw) *\<^sub>Q w\<^sub>0 "
|
||||
\<close>}
|
||||
|
||||
Here, \<open>real\<close> refers to the real numbers as defined in the HOL-Analysis library, which provides
|
||||
concepts such as Cauchy Sequences, limits, differentiability, and a very substantial part of
|
||||
classical Calculus. \<open>SOME\<close> is the Hilbert choice operator from HOL; the definitions of the
|
||||
model parameters admit all possible positive values as uninterpreted constants. Our
|
||||
\<^assumption>\<open>perfect_wheel\<close> is translated into a calculation of the circumference of the
|
||||
wheel, while \<open>\<delta>s\<^sub>r\<^sub>e\<^sub>s\<close>, the resolution of the odometer, can be calculated
|
||||
from the these parameters. HOL-Analysis permits to formalize the fundamental physical observables:
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
type_synonym distance_function = "real[s] \<Rightarrow> real[m]"
|
||||
consts Speed::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
consts Accel::"distance_function \<Rightarrow> real[s] \<Rightarrow> real[m\<cdot>s\<^sup>-\<^sup>2]"
|
||||
consts Speed\<^sub>M\<^sub>a\<^sub>x::"real[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
|
||||
(* Non - SI conform common abrbreviations *)
|
||||
definition "kmh \<equiv> kilo *\<^sub>Q metre \<^bold>/ hour :: 'a::{field,ring_char_0}[m\<cdot>s\<^sup>-\<^sup>1]"
|
||||
definition "kHz \<equiv> kilo *\<^sub>Q hertz :: 'a::{field,ring_char_0}[s\<^sup>-\<^sup>1]"
|
||||
|
||||
(*>*)
|
||||
text\<open>
|
||||
@{theory_text [display]\<open>
|
||||
type_synonym distance_function = "real[s]\<Rightarrow>real[m]"
|
||||
definition Speed::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Speed f \<equiv> deriv f"
|
||||
definition Accel::"distance_function\<Rightarrow>real\<Rightarrow>real" where "Accel f \<equiv> deriv (deriv f)"
|
||||
\<close>}
|
||||
|
||||
which permits to constrain the central observable \<open>distance_function\<close> in a
|
||||
way that they describe the space of ``normal behavior'' where we expect the odometer to produce
|
||||
reliable measurements over a \<open>distance_function df\<close> .
|
||||
|
||||
The essence of the physics of the train is covered by the following definition:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition normally_behaved_distance_function :: "(real \<Rightarrow> real) \<Rightarrow> bool"
|
||||
where normally_behaved_distance_function df =
|
||||
( \<forall> t. df(t) \<in> \<real>\<^sub>\<ge>\<^sub>0 \<and> (\<forall> t \<in> \<real>\<real>\<^sub>\<ge>\<^sub>0. df(t) = 0)
|
||||
\<and> df differentiable on \<real>\<^sub>\<ge>\<^sub>0 \<and> (Speed df)differentiable on \<real>\<^sub>\<ge>\<^sub>0$
|
||||
\<and> (Accel df)differentiable on \<real>\<^sub>\<ge>\<^sub>0
|
||||
\<and> (\<forall> t. (Speed df) t \<in> {Speed\<^sub>M\<^sub>i\<^sub>n .. Speed\<^sub>M\<^sub>a\<^sub>x})
|
||||
\<and> (\<forall> t. (Accel df) t \<in> {Accel\<^sub>M\<^sub>i\<^sub>n .. Accel\<^sub>M\<^sub>a\<^sub>x}))
|
||||
\<close>}
|
||||
|
||||
which constrains the distance functions in the bounds described of the informal descriptions and
|
||||
states them as three-fold differentiable function in certain bounds concerning speed and
|
||||
acceleration. Note that violations, in particular of the constraints on speed and acceleration,
|
||||
\<^emph>\<open>do\<close> occur in practice. In such cases, the global system adapts recovery strategies that are out
|
||||
of the scope of our model. Concepts like \<open>shaft_encoder_state\<close> (a triple with the sensor values
|
||||
\<open>C1\<close>, \<open>C2\<close>, \<open>C3\<close>) were formalized as types, while tables were
|
||||
defined as recursive functions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
fun phase\<^sub>0 :: "nat \<Rightarrow> shaft_encoder_state" where
|
||||
"phase\<^sub>0 (0) = \<lparr> C1 = False, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (1) = \<lparr> C1 = True, C2 = False, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 (2) = \<lparr> C1 = True, C2 = False, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (3) = \<lparr> C1 = True, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (4) = \<lparr> C1 = False, C2 = True, C3 = False\<rparr>"
|
||||
|"phase\<^sub>0 (5) = \<lparr> C1 = False, C2 = True, C3 = True \<rparr>"
|
||||
|"phase\<^sub>0 x = phase\<^sub>0(x - 6)"
|
||||
definition Phase ::"nat\<Rightarrow>shaft_encoder_state" where Phase(x) = phase\<^sub>0(x-1)
|
||||
\<close>}
|
||||
|
||||
We now define shaft encoder sequences as translations of distance functions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition encoding::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>shaft_encoder_state"
|
||||
where "encoding df init\<^sub>p\<^sub>o\<^sub>s \<equiv> \<lambda>x. Phase(nat\<lfloor>df(x) / \<delta>s\<^sub>r\<^sub>e\<^sub>s\<rfloor> + init\<^sub>p\<^sub>o\<^sub>s)"
|
||||
\<close>}
|
||||
|
||||
where \<open>init\<^sub>p\<^sub>o\<^sub>s\<close> is the initial position of the wheel.
|
||||
\<open>sampling\<close>'s were constructed from encoding sequences over discretized time points:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
definition sampling::"distance_function\<Rightarrow>nat\<Rightarrow>real\<Rightarrow>nat\<Rightarrow>shaft_encoder_state"
|
||||
where "sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t \<equiv> \<lambda>n::nat. encoding df initinit\<^sub>p\<^sub>o\<^sub>s (n * \<delta>t)"
|
||||
\<close>}
|
||||
|
||||
parameter of the configuration of a system.
|
||||
|
||||
Finally, we can formally define the required performances. From the interface description
|
||||
and the global model parameters such as wheel diameter, the number of teeth per wheel, the
|
||||
sampling frequency etc., we can infer the maximal time of service as well the maximum distance
|
||||
the device can measure. As an example configuration, choosing:
|
||||
|
||||
\<^item> \<^term>\<open>(1 *\<^sub>Q metre):: real[m]\<close> for \<^term>\<open>w\<^sub>d\<close> (wheel-diameter),
|
||||
\<^item> \<^term>\<open>100 :: real\<close> for \<^term>\<open>tpw\<close> (teeth per wheel),
|
||||
\<^item> \<^term>\<open>80 *\<^sub>Q kmh :: real[m\<cdot>s\<^sup>-\<^sup>1]\<close> for \<^term>\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close>,
|
||||
\<^item> \<^term>\<open>14.4 *\<^sub>Q kHz :: real[s\<^sup>-\<^sup>1]\<close> for the sampling frequency,
|
||||
|
||||
results in an odometer resolution of \<^term>\<open>2.3 *\<^sub>Q milli *\<^sub>Q metre\<close>, a maximum distance of
|
||||
\<^term>\<open>9878 *\<^sub>Q kilo *\<^sub>Q metre\<close>, and a maximal system up-time of \<^term>\<open>123.4 *\<^sub>Q hour\<close>s.
|
||||
The required precision of an odometer can be defined by a constant describing
|
||||
the maximally allowed difference between \<open>df(n*\<delta>t)\<close> and
|
||||
\<open>sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t n\<close> for all \<open>init\<^sub>p\<^sub>o\<^sub>s \<in>{0..5}\<close>.
|
||||
\<close>
|
||||
(*<*)
|
||||
ML\<open>val two_thirty2 = 1024 * 1024 * 1024 * 4;
|
||||
val dist_max = 0.0023 * (real two_thirty2) / 1000.0;
|
||||
val dist_h = dist_max / 80.0\<close>
|
||||
(*>*)
|
||||
|
||||
section*[verific::technical]\<open>Verification of the Software Requirements Specification\<close>
|
||||
text\<open>The original documents contained already various statements that motivate certain safety
|
||||
properties of the device. For example, the \<open>Phase\<close>-table excludes situations in which
|
||||
all sensors \<open>C1\<close>, \<open>C2\<close>, and \<open>C3\<close> are all ``off'' or situations in
|
||||
which sensors are ``on,'' reflecting a physical or electrical error in the odometer. It can be
|
||||
shown by a very small Isabelle case-distinction proof that this safety requirement follows indeed
|
||||
from the above definitions:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
lemma Encoder_Property_1:(C1(Phase x) \<and> C2(Phase x) \<and> C3(Phase x))=False
|
||||
proof (cases x)
|
||||
case 0 then show ?thesis by (simp add: Phase_def)
|
||||
next
|
||||
case (Suc n) then show ?thesis
|
||||
by(simp add: Phase_def,rule_tac n = n in cycle_case_split,simp_all)
|
||||
qed
|
||||
\<close>}
|
||||
|
||||
for all positions \<open>x\<close>. Similarly, it is proved that the table is indeed cyclic:
|
||||
|
||||
\<open>phase\<^sub>0 x = phase\<^sub>0(x mod 6)\<close>
|
||||
|
||||
and locally injective:
|
||||
|
||||
\<open>\<forall>x<6. \<forall>y<6. phase\<^sub>0 x = phase\<^sub>0 y \<longrightarrow> x = y\<close>
|
||||
|
||||
These lemmas, building the ``theory of an odometer,'' culminate in a theorem
|
||||
that we would like to present in more detail.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
theorem minimal_sampling :
|
||||
assumes * : normally_behaved_distance_function df
|
||||
and ** : \<delta>t * Speed\<^sub>M\<^sub>a\<^sub>x < \<delta>s\<^sub>r\<^sub>e\<^sub>s
|
||||
shows \<forall> \<delta>X\<le>\<delta>t. 0<\<delta>X \<longrightarrow>
|
||||
\<exists>f. retracting (f::nat\<Rightarrow>nat) \<and>
|
||||
sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>X = (sampling df init\<^sub>p\<^sub>o\<^sub>s \<delta>t) o f
|
||||
|
||||
\<close>}
|
||||
|
||||
This theorem states for \<open>normally_behaved_distance_function\<close>s that there is
|
||||
a minimal sampling frequency assuring the safety of the measurements; samplings on
|
||||
some \<open>df\<close> gained from this minimal sampling frequency can be ``pumped up''
|
||||
to samplings of these higher sampling frequencies; they do not contain more information.
|
||||
Of particular interest is the second assumption, labelled ``\<open>**\<close>'' which
|
||||
establishes a lower bound from \<open>w\<^sub>0\<close>, \<open>tpw\<close>,
|
||||
\<open>Speed\<^sub>M\<^sub>a\<^sub>x\<close> for the sampling frequency. Methodologically, this represents
|
||||
an exported constraint that can not be represented \<^emph>\<open>inside\<close> the design model: it means that the
|
||||
computations have to be fast enough on the computing platform in order to assure that the
|
||||
calculations are valid. It was in particular this exported constraint that forced us to give up
|
||||
the original plan to generate the code from the design model and to execute this directly on the
|
||||
target platform.
|
||||
|
||||
For our example configuration (1m diameter, 100 teeth per wheel, 80km/h max), this theorem justifies
|
||||
that 14,4 kHz is indeed enough to assure valid samplings. Such properties are called
|
||||
``internal consistency of the software requirements specification'' in the CENELEC
|
||||
standard~@{cite "bsi:50128:2014"}, 7.2.4.22 and are usually addressed in an own report.
|
||||
\<close>
|
||||
|
||||
chapter*[ontomodeling::text_section]\<open>The CENELEC 50128 Ontology\<close>
|
||||
|
||||
text\<open>
|
||||
Modeling an ontology from a semi-formal text such as~@{cite"bsi:50128:2014"} is,
|
||||
like any other modeling activity, not a simple one-to-one translation of some
|
||||
concepts to some formalism. Rather, implicit and self-understood principles
|
||||
have to be made explicit, abstractions have to be made, and decisions about
|
||||
the kind of desirable user-interaction may have an influence similarly to
|
||||
design decisions influenced by strengths or weaknesses of a programming language.
|
||||
\<close>
|
||||
|
||||
section*[lhf::text_section]
|
||||
\<open>Tracking Concepts and Definitions\<close>
|
||||
|
||||
text\<open>
|
||||
\<^isadof> is designed to annotate text elements with structured meta-information and to reference
|
||||
these text elements throughout the integrated source. A classical application of this capability
|
||||
is the annotation of concepts and terms definitions---be them informal, semi-formal or formal---and
|
||||
their consistent referencing. In the context of our CENELEC ontology, \<^eg>, we can translate the
|
||||
third chapter of @{cite "bsi:50128:2014"} ``Terms, Definitions and Abbreviations'' directly
|
||||
into our Ontology Definition Language (ODL). Picking one example out of 49, consider the definition
|
||||
of the concept \<^cenelec_term>\<open>traceability\<close> in paragraphs 3.1.46 (a notion referenced 31 times in
|
||||
the standard), which we translated directly into:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
Definition*[traceability, short_name="''traceability''"]
|
||||
\<open>degree to which relationship can be established between two or more products of a
|
||||
development process, especially those having a predecessor/successor or
|
||||
master/subordinate relationship to one another.\<close>
|
||||
\<close>}
|
||||
|
||||
In the integrated source of the odometry study, we can reference in a text element to this
|
||||
concept as follows:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[...]\<open> ... to assure <@>{cenelec_term traceability} for
|
||||
<@>{requirement bitwiseAND}, we prove ... \<close>
|
||||
\<close>}
|
||||
|
||||
|
||||
\<^isadof> also uses the underlying ontology to generate the navigation markup inside the IDE, \<^ie>
|
||||
the presentation of this document element inside \<^isadof> is immediately hyperlinked against the
|
||||
@{theory_text \<open> Definition* \<close>}-element shown above; this serves as documentation of
|
||||
the standard for the development team working on the integrated source. The PDF presentation
|
||||
of such links depends on the actual configurations for the document generation; We will explain
|
||||
this later.
|
||||
CENELEC foresees also a number of roles, phases, safety integration levels, etc., which were
|
||||
directly translated into HOL enumeration types usable in ontological concepts of ODL.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype role =
|
||||
PM (* Program Manager *) | RQM (* Requirements Manager *)
|
||||
| DES (* Designer *) | IMP (* Implementer *) |
|
||||
| VER (* Verifier *) | VAL (* Validator *) | ...
|
||||
datatype phase =
|
||||
SYSDEV_ext (* System Development *) | SPl (* Software Planning *)
|
||||
| SR (* Software Requirement *) | SA (* Software Architecture *)
|
||||
| SDES (* Software Design *) | ...
|
||||
\<close>}
|
||||
|
||||
Similarly, we can formalize the Table A.5: Verification and Testing of @{cite "bsi:50128:2014"}:
|
||||
a classification of \<^emph>\<open>verification and testing techniques\<close>:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
datatype vnt_technique =
|
||||
formal_proof "thm list" | stat_analysis
|
||||
| dyn_analysis dyn_ana_kind | ...
|
||||
\<close>}
|
||||
|
||||
In contrast to the standard, we can parameterize \<open>formal_proof\<close> with a list of
|
||||
theorems, an entity known in the Isabelle kernel. Here, \<^isadof> assures for text elements
|
||||
annotated with theorem names, that they refer indeed to established theorems in the Isabelle
|
||||
environment. Additional checks could be added to make sure that these theorems have a particular
|
||||
form.
|
||||
|
||||
While we claim that this possibility to link to theorems (and test-results) is unique in the
|
||||
world of systems attempting to assure \<^cenelec_term>\<open>traceability\<close>, referencing a particular
|
||||
(proven) theorem is definitively not sufficient to satisfy the claimed requirement. Human
|
||||
evaluators will always have to check that the provided theorem \<open>adequately\<close> represents the claim;
|
||||
we do not in the slightest suggest that their work is superfluous. Our framework allows to
|
||||
statically check that tests or proofs have been provided, at places where the ontology requires
|
||||
them to be, and both assessors and developers can rely on this check and navigate through
|
||||
related information easily. It does not guarantee that intended concepts for, \<^eg>, safety
|
||||
or security have been adequately modeled.
|
||||
\<close>
|
||||
|
||||
section*[moe::text_section]
|
||||
\<open>Major Ontological Entities: Requirements and Evidence\<close>
|
||||
text\<open>
|
||||
We introduce central concept of a \<^emph>\<open>requirement\<close> as an ODL \<^theory_text>\<open>doc_class\<close>
|
||||
based on the generic basic library \<^doc_class>\<open>text_element\<close> providing basic layout attributes.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class requirement = text_element +
|
||||
long_name :: "string option"
|
||||
is_concerned :: "role set"
|
||||
\<close>}
|
||||
|
||||
the groups of stakeholders in the CENELEC process. Therefore, the \<open>is_concerned\<close>-attribute
|
||||
allows expressing who ``owns'' this text-element. \<^isadof> supports a role-based
|
||||
presentation, \<^eg>, different presentation styles of the integrated source may decide to highlight,
|
||||
to omit, to defer into an annex, text entities according to the role-set.
|
||||
|
||||
Since ODL supports single inheritance, we can express sub-requirements and therefore a style
|
||||
of requirement decomposition as advocated in GSN~@{cite "kelly.ea:goal:2004"}:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class sub_requirement =
|
||||
decomposes :: "requirement"
|
||||
relates_to :: "requirement set"
|
||||
\<close>}
|
||||
\<close>
|
||||
|
||||
section*[claimsreqevidence::text_section]\<open>Tracking Claims, Derived Requirements and Evidence\<close>
|
||||
text\<open>An example for making explicit implicit principles,
|
||||
consider the following statement @{cite "bsi:50128:2014"}, pp. 25.: \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
\begin{quote}\small
|
||||
The objective of software verification is to examine and arrive at a judgment based on
|
||||
evidence that output items (process, documentation, software or application) of a specific
|
||||
development phase fulfill the requirements and plans with respect to completeness, correctness
|
||||
and consistency.
|
||||
\end{quote} \<^vs>\<open>-0.15cm\<close>
|
||||
|
||||
The terms \<^onto_class>\<open>judgement\<close> based on \<^term>\<open>evidence\<close> are used as a kind of leitmotif throughout
|
||||
the CENELEC standard, but they are neither explained nor even listed in the general glossary.
|
||||
However, the standard is fairly explicit on the \<^emph>\<open>phase\<close>s and the organizational roles that
|
||||
different stakeholders should have in the process. Our version to express this key concept of
|
||||
\<^onto_class>\<open>judgement\<close> , \<^eg>, by the following concept:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class judgement =
|
||||
refers_to :: requirement
|
||||
evidence :: "vnt_technique list"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
As one can see, the role set is per default set to the verification team, the assessors and the
|
||||
validation team.
|
||||
|
||||
There are different views possible here: an alternative would be to define \<^term>\<open>evidence\<close>
|
||||
as ontological concept with \<^typ>\<open>vnt_technique\<close>'s (rather than an attribute of judgement)
|
||||
and consider the basis of a summary containing the relation between requirements and relation:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
doc_class summary =
|
||||
based_on :: "(requirement \<times> evidence) set"
|
||||
status :: status
|
||||
is_concerned :: "role set" <= "{VER,ASR,VAL}"
|
||||
\<close>}
|
||||
|
||||
More experimentation will be needed to find out what kind of ontological modeling is most
|
||||
adequate for developers in the context of \isadof.
|
||||
\<close>
|
||||
|
||||
section*[ontocontrol::text_section]\<open>Ontological Compliance\<close>
|
||||
|
||||
text\<open>From the variety of different possibilities for adding CENELEC annotations to the
|
||||
integrated source, we will, in the following, point out three scenarios.\<close>
|
||||
|
||||
subsection\<open>Internal Verification of Claims in the Requirements Specification.\<close>
|
||||
text\<open>In our case, the \<^term>\<open>SR\<close>-team early on detected a property necessary
|
||||
for error-detection of the device (c.f. @{technical verific}):
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[encoder_props::requirement]\<open> The requirement specification team identifies the property:
|
||||
C1 & C2 & C3 = 0 (bitwise logical AND operation)
|
||||
C1 | C2 | C3 = 1 (bitwise logical OR operation) \<close>
|
||||
\<close>}
|
||||
|
||||
After the Isabelle proofs shown in @{technical verific}, we can either register the theorems
|
||||
directly in an evidence statement:
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[J1::judgement, refers_to="@{docitem <open>encoder_props<close>}",
|
||||
evidence="[formal_proof[@{thm <open>Encoder_Property_1<close>},
|
||||
@{thm <open>Encoder_Property_2<close>}]]"]
|
||||
\<open>The required encoder properties are in fact verified to be consistent
|
||||
with the formalization of @{term "phase\<^sub>0"}.\<close>
|
||||
\<close>}
|
||||
|
||||
The references \<open>@{...}\<close>, called antiquotation, allow us not only to reference to
|
||||
formal concepts, they are checked for consistency and there are also antiquotations that
|
||||
print the formally checked content (\<^eg>, the statement of a theorem).
|
||||
\<close>
|
||||
|
||||
subsection\<open>Exporting Claims of the Requirements Specification.\<close>
|
||||
|
||||
text\<open>By definition, the main purpose of the requirement specification is the identification of
|
||||
the safety requirements. As an example, we state the required precision of an odometric function:
|
||||
for any normally behaved distance function \<open>df\<close>, and any representable and valid
|
||||
sampling sequence that can be constructed for \<open>df\<close>, we require that the difference
|
||||
between the physical distance and distance calculable from the @{term Odometric_Position_Count}
|
||||
is bound by the minimal resolution of the odometer.
|
||||
|
||||
@{theory_text [display]\<open>
|
||||
text*[R5::safety_requirement]\<open>We can now state ... \<close>
|
||||
definition Odometric_Position_Count_precise :: "(shaft_encoder_state list\<Rightarrow>output)\<Rightarrow>bool"
|
||||
where "Odometric_Position_Count_precise odofunction \<equiv>
|
||||
(\<forall> df. \<forall>S. normally_behaved_distance_function df
|
||||
\<longrightarrow> representable S
|
||||
\<longrightarrow> valid_sampling S df
|
||||
\<longrightarrow> (let pos = uint(Odometric_Position_Count(odofunction S))
|
||||
in \<bar>df((length S - 1)*\<delta>t\<^sub>o\<^sub>d\<^sub>o) - (\<delta>s\<^sub>r\<^sub>e\<^sub>s * pos)\<bar> \<le> \<delta>s\<^sub>r\<^sub>e\<^sub>s))"
|
||||
|
||||
update_instance*[R5::safety_requirement,
|
||||
formal_definition:="[@{thm \<open>Odometric_Position_Count_precise_def\<close>}]"]
|
||||
\<close>}
|
||||
|
||||
By \<^theory_text>\<open>update_instance*\<close>, we book the property \<open>Position_Count_precise_def\<close> as
|
||||
\<^onto_class>\<open>safety_requirement\<close>, a specific sub-class of \<^onto_class>\<open>requirement\<close>s
|
||||
requesting a formal definition in Isabelle.\<close>
|
||||
|
||||
subsection\<open>Exporting Derived Requirements.\<close>
|
||||
|
||||
text\<open>Finally, we discuss the situation where the verification team discovered a critical side-condition
|
||||
for a major theorem necessary for the safety requirements; this was in our development the case for
|
||||
the condition labelled ``\<open>**\<close>'' in @{docitem verific}. The current CENELEC standard clearly separates
|
||||
``requirement specifications'' from ``verification reports,'' which is probably motivated
|
||||
by the overall concern of organizational separation and of document consistency. While this
|
||||
document organization is possible in \<^isadof>, it is in our experience often counter-productive
|
||||
in practice: organizations tend to defend their documents because the impact of changes is more and more
|
||||
difficult to oversee. This effect results in a dramatic development slow-down and an increase of
|
||||
costs. Furthermore, these barriers exclude situations where developers perfectly know, for example,
|
||||
invariants, but can not communicate them to the verification team because the precise formalization
|
||||
is not known in time. Rather than advocating document separation, we tend to integrate these documents,
|
||||
keep proof as close as possible to definitions, and plead for consequent version control of the
|
||||
integrated source, together with the proposed methods to strengthen the links between the informal
|
||||
and formal parts by anti-quotations and continuous ontological checking. Instead of separation
|
||||
of the documents, we would rather emphasize the \<^emph>\<open>separation of the views\<close> of the different
|
||||
document representations. Such views were systematically generated out of the integrated source in
|
||||
different PDF versions and for each version, document specific consistency guarantees can be
|
||||
automatically enforced.
|
||||
|
||||
In our case study, we define this condition as predicate, declare an explanation of it as
|
||||
\<^onto_class>\<open>SRAC\<close> (CENELEC for: safety-related application condition; ontologically, this is a
|
||||
derived class from \<^onto_class>\<open>requirement\<close>.) and add the definition of the predicate into the
|
||||
document instance as described in the previous section.\<close>
|
||||
|
||||
|
||||
|
||||
chapter\<open>Appendix\<close>
|
||||
text\<open>
|
||||
\<^item> \<open>@{thm refl}\<close> : @{thm refl}
|
||||
\<^item> \<open>@{thm [source] refl}\<close> : @{thm [source] refl}
|
||||
\<^item> \<open>@{thm[mode=Rule] conjI}\<close> : @{thm[mode=Rule] conjI}
|
||||
\<^item> \<open>@{file "mini_odo.thy"}\<close> : @{file "mini_odo.thy"}
|
||||
\<^item> \<open>@{value "3+4::int"}}\<close> : @{value "3+4::int"}
|
||||
\<^item> \<open>@{const hd}\<close> : @{const hd}
|
||||
\<^item> \<open>@{theory HOL.List}\<close> : @{theory HOL.List}s
|
||||
\<^item> \<open>@{tserm "3"}\<close> : @{term "3"}
|
||||
\<^item> \<open>@{type bool}\<close> : @{type bool}
|
||||
\<^item> \<open>@{thm term [show_types] "f x = a + x"}\<close> : @{term [show_types] "f x = a + x"}
|
||||
\<close>
|
||||
|
||||
text\<open>Examples for declaration of typed doc-classes "assumption" (sic!) and "hypothesis" (sic!!),
|
||||
concepts defined in the underlying ontology @{theory "Isabelle_DOF-Ontologies.CENELEC_50128"}. \<close>
|
||||
text*[ass2::assumption, long_name="Some ''assumption one''"] \<open> The subsystem Y is safe. \<close>
|
||||
text*[hyp1::hypothesis] \<open> \<open>P \<noteq> NP\<close> \<close>
|
||||
|
||||
text\<open>
|
||||
A real example fragment from a larger project, declaring a text-element as a
|
||||
"safety-related application condition", a concept defined in the
|
||||
@{theory "Isabelle_DOF-Ontologies.CENELEC_50128"} ontology:\<close>
|
||||
|
||||
text*[hyp2::hypothesis]\<open>Under the assumption @{assumption \<open>ass2\<close>} we establish the following: ... \<close>
|
||||
|
||||
text*[ass122::SRAC, long_name="Some ''ass122''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass123::SRAC]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[ass124::EC, long_name="Some ''ass124''"]
|
||||
\<open> The overall sampling frequence of the odometer subsystem is therefore 14 khz,
|
||||
which includes sampling, computing and result communication times... \<close>
|
||||
|
||||
text*[t10::test_result]
|
||||
\<open> This is a meta-test. This could be an ML-command that governs the external
|
||||
test-execution via, \<^eg>, a makefile or specific calls to a test-environment or test-engine. \<close>
|
||||
|
||||
|
||||
text \<open> Finally some examples of references to doc-items, i.e. text-elements
|
||||
with declared meta-information and status. \<close>
|
||||
|
||||
text \<open> As established by @{test_result \<open>t10\<close>}\<close>
|
||||
text \<open> the @{test_result \<open>t10\<close>}
|
||||
as well as the @{SRAC \<open>ass122\<close>}\<close>
|
||||
text \<open> represent a justification of the safety related applicability
|
||||
condition @{SRAC \<open>ass122\<close>} aka exported constraint @{EC \<open>ass122\<close>}.\<close>
|
||||
|
||||
text \<open> due to notational conventions for antiquotations, one may even write:
|
||||
|
||||
"represent a justification of the safety related applicability
|
||||
condition \<^SRAC>\<open>ass122\<close> aka exported constraint \<^EC>\<open>ass122\<close>."\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -1,4 +1,5 @@
|
|||
scholarly_paper
|
||||
technical_report
|
||||
math_exam
|
||||
CENELEC_50128
|
||||
cytology
|
||||
CC_ISO15408
|
||||
beamerx
|
|
@ -0,0 +1,2 @@
|
|||
poster
|
||||
presentation
|
|
@ -0,0 +1,8 @@
|
|||
chapter AFP
|
||||
|
||||
session "poster-example" (AFP) = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
"poster"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1,2 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
(*<*)
|
||||
theory "poster"
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
begin
|
||||
|
||||
use_template "beamerposter-UNSUPPORTED"
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>Example Presentation\<close>
|
||||
|
||||
author*[safouan,email="\<open>example@example.org\<close>",affiliation="\<open>Example Org\<close>"]\<open>Eliza Example\<close>
|
||||
|
||||
text\<open>
|
||||
\vfill
|
||||
\begin{block}{\large Fontsizes}
|
||||
\centering
|
||||
{\tiny tiny}\par
|
||||
{\scriptsize scriptsize}\par
|
||||
{\footnotesize footnotesize}\par
|
||||
{\normalsize normalsize}\par
|
||||
{\large large}\par
|
||||
{\Large Large}\par
|
||||
{\LARGE LARGE}\par
|
||||
{\veryHuge veryHuge}\par
|
||||
{\VeryHuge VeryHuge}\par
|
||||
{\VERYHuge VERYHuge}\par
|
||||
\end{block}
|
||||
\vfill
|
||||
\<close>
|
||||
|
||||
text\<open>
|
||||
@{block (title = "\<open>Title\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close>") "\<open>Block content\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close>"}
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -0,0 +1,9 @@
|
|||
chapter AFP
|
||||
|
||||
session "presentation-example" (AFP) = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof, timeout = 300]
|
||||
theories
|
||||
"presentation"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"figures/A.png"
|
0
src/tests/figures/A.png → Isabelle_DOF-Examples-Extra/beamerx/presentation/document/figures/A.png
Executable file → Normal file
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
|
@ -0,0 +1,2 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
(*<*)
|
||||
theory "presentation"
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
begin
|
||||
|
||||
use_template "beamer-UNSUPPORTED"
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>Example Presentation\<close>
|
||||
|
||||
author*[safouan,email="\<open>example@example.org\<close>",affiliation="\<open>Example Org\<close>"]\<open>Eliza Example\<close>
|
||||
|
||||
text\<open>
|
||||
\begin{frame}
|
||||
\frametitle{Example Slide}
|
||||
\centering\huge This is an example!
|
||||
\end{frame}
|
||||
\<close>
|
||||
|
||||
|
||||
frame*[test_frame
|
||||
, frametitle = \<open>\<open>\<open>Example Slide\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> with items @{thm "HOL.refl"}\<close>\<close>
|
||||
, framesubtitle = "''Subtitle''"]
|
||||
\<open>This is an example!
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame\<close>}\<close>\<close>
|
||||
|
||||
frame*[test_frame2
|
||||
, frametitle = "''Example Slide''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>Test frame env \<^term>\<open>refl\<close>\<close>
|
||||
|
||||
frame*[test_frame3, frametitle = "''A slide with a Figure''"]
|
||||
\<open>A figure
|
||||
@{figure_content (width=45, caption=\<open>\<open>Figure\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> is not the \<^term>\<open>refl\<close> theorem (@{thm "refl"}).\<close>)
|
||||
"figures/A.png"}\<close>
|
||||
|
||||
frame*[test_frame4
|
||||
, options = "''allowframebreaks''"
|
||||
, frametitle = "''Example Slide with frame break''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame4\<close>}\<close>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -0,0 +1,87 @@
|
|||
theory Cytology
|
||||
imports "Isabelle_DOF.scholarly_paper"
|
||||
begin
|
||||
|
||||
text\<open>A small example ontology for demonstration purposes.
|
||||
The presentation follows closely: \<^url>\<open>https://www.youtube.com/watch?v=URUJD5NEXC8\<close>.\<close>
|
||||
|
||||
|
||||
datatype protein = filaments | motor_proteins | rna | dna |nucleolus
|
||||
|
||||
type_synonym desc = "string"
|
||||
|
||||
onto_class organelles = description :: desc
|
||||
|
||||
find_theorems (60) name:"organelles"
|
||||
|
||||
term "Cytology.organelles.make"
|
||||
|
||||
onto_class ribosomes = organelles + description :: desc
|
||||
|
||||
onto_class mytochondria = organelles + description :: desc
|
||||
|
||||
onto_class golgi_apparatus = organelles + description :: desc
|
||||
|
||||
onto_class lysosome = organelles + description :: desc
|
||||
|
||||
text\<open>the control center of the cell:\<close>
|
||||
onto_class nucleus = organelles +
|
||||
description :: desc
|
||||
components :: "protein list" <= "[nucleolus]"
|
||||
|
||||
(* Not so nice construction to mimick inheritance on types useds in attribute positions. *)
|
||||
datatype organelles' = upcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s (get_ribosomes:ribosomes)
|
||||
| upcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a (get_mytochondria:mytochondria)
|
||||
| upcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s (get_golgi_apparatus: golgi_apparatus)
|
||||
| upcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e (get_lysosome : lysosome)
|
||||
| upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (get_nucleus : nucleus)
|
||||
|
||||
fun is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s where "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X) = True" | "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s ( _) = False"
|
||||
(* ... *)
|
||||
fun downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s
|
||||
where "downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s (upcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s X) = X" | "downcast\<^sub>r\<^sub>i\<^sub>b\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e\<^sub>s _ = undefined"
|
||||
fun downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a
|
||||
where "downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a (upcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a X) = X" | "downcast\<^sub>m\<^sub>y\<^sub>t\<^sub>o\<^sub>c\<^sub>h\<^sub>o\<^sub>n\<^sub>d\<^sub>r\<^sub>i\<^sub>a _ = undefined"
|
||||
fun downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s
|
||||
where "downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s (upcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s X) = X" | "downcast\<^sub>g\<^sub>o\<^sub>l\<^sub>g\<^sub>i\<^sub>_\<^sub>a\<^sub>p\<^sub>p\<^sub>a\<^sub>r\<^sub>a\<^sub>t\<^sub>u\<^sub>s _ = undefined"
|
||||
fun downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e
|
||||
where "downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e (upcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e X) = X" | "downcast\<^sub>l\<^sub>y\<^sub>s\<^sub>o\<^sub>s\<^sub>o\<^sub>m\<^sub>e _ = undefined"
|
||||
fun downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s
|
||||
where "downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X) = X" | "downcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s _ = undefined"
|
||||
|
||||
|
||||
|
||||
|
||||
onto_class cell =
|
||||
name :: string
|
||||
membrane :: desc <= "\<open>The outer boundary of the cell\<close>"
|
||||
cytoplasm :: desc <= "\<open>The liquid in the cell\<close>"
|
||||
cytoskeleton :: desc <= "\<open>includes the thread-like microfilaments\<close>"
|
||||
genetic_material :: "protein list" <= "[rna, dna]"
|
||||
|
||||
text\<open>Cells are devided into two categories: \<^emph>\<open>procaryotic\<close> cells (unicellular organisms some
|
||||
bacteria) without a substructuring in organelles and \<^emph>\<open>eucaryotic\<close> cells, as occurring in
|
||||
pluricellular organisms\<close>
|
||||
|
||||
onto_class procaryotic_cells = cell +
|
||||
name :: string
|
||||
|
||||
onto_class eucaryotic_cells = cell +
|
||||
organelles :: "organelles' list"
|
||||
invariant has_nucleus :: "\<exists> org \<in> set (organelles \<sigma>). is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s org"
|
||||
\<comment> \<open>Cells must have at least one nucleus. However, this should be executable.\<close>
|
||||
|
||||
find_theorems (70)name:"eucaryotic_cells"
|
||||
find_theorems name:has_nucleus
|
||||
|
||||
value "is\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (mk\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s X)"
|
||||
|
||||
term \<open>eucaryotic_cells.organelles\<close>
|
||||
|
||||
value \<open>(eucaryotic_cells.organelles(eucaryotic_cells.make X Y Z Z Z [] []))\<close>
|
||||
|
||||
value \<open>has_nucleus_inv(eucaryotic_cells.make X Y Z Z Z [] [])\<close>
|
||||
|
||||
value \<open>has_nucleus_inv(eucaryotic_cells.make X Y Z Z Z [] [upcast\<^sub>n\<^sub>u\<^sub>c\<^sub>l\<^sub>e\<^sub>u\<^sub>s (nucleus.make a b c )])\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,4 @@
|
|||
session "Cytology" = "Isabelle_DOF" +
|
||||
options [document = false]
|
||||
theories
|
||||
"Cytology"
|
1
examples/technical_report/ROOTS → Isabelle_DOF-Examples-Extra/technical_report/ROOTS
Executable file → Normal file
|
@ -1,2 +1 @@
|
|||
Isabelle_DOF-Manual
|
||||
TR_my_commented_isabelle
|
|
@ -1,13 +1,11 @@
|
|||
session "TR_MyCommentedIsabelle" = "Isabelle_DOF" +
|
||||
options [document = pdf, document_output = "output",quick_and_dirty = true]
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
theories
|
||||
"TR_MyCommentedIsabelle"
|
||||
document_files
|
||||
"root.bib"
|
||||
"isadof.cfg"
|
||||
"preamble.tex"
|
||||
"prooftree.sty"
|
||||
"build"
|
||||
"figures/markup-demo.png"
|
||||
"figures/text-element.pdf"
|
||||
"figures/isabelle-architecture.pdf"
|
|
@ -1,7 +1,7 @@
|
|||
(*************************************************************************
|
||||
* Copyright (C)
|
||||
* 2019 The University of Exeter
|
||||
* 2018-2019 The University of Paris-Saclay
|
||||
* 2019-2022 The University of Exeter
|
||||
* 2018-2022 The University of Paris-Saclay
|
||||
* 2018 The University of Sheffield
|
||||
*
|
||||
* License:
|
||||
|
@ -14,16 +14,18 @@
|
|||
(*<*)
|
||||
theory TR_MyCommentedIsabelle
|
||||
imports "Isabelle_DOF.technical_report"
|
||||
|
||||
begin
|
||||
|
||||
use_template "scrreprt"
|
||||
use_ontology "technical_report"
|
||||
|
||||
define_shortcut* isabelle \<rightleftharpoons> \<open>Isabelle/HOL\<close>
|
||||
|
||||
open_monitor*[this::report]
|
||||
(*>*)
|
||||
|
||||
title*[tit::title]\<open>My Personal, Ecclectic Isabelle Programming Manual\<close>
|
||||
subtitle*[stit::subtitle]\<open>Version : Isabelle 2020\<close>
|
||||
title*[tit::title]\<open>My Personal, Eclectic Isabelle Programming Manual\<close>
|
||||
subtitle*[stit::subtitle]\<open>Version : Isabelle 2022\<close>
|
||||
text*[bu::author,
|
||||
email = "''wolff@lri.fr''",
|
||||
affiliation = "\<open>Université Paris-Saclay, LRI, France\<close>"]\<open>Burkhart Wolff\<close>
|
||||
|
@ -46,7 +48,7 @@ text*[abs::abstract,
|
|||
This text is written itself in Isabelle/Isar using a specific document ontology
|
||||
for technical reports. It is intended to be a "living document", i.e. it is not only
|
||||
used for generating a static, conventional .pdf, but also for direct interactive
|
||||
exploration in Isabelle/jedit. This way, types, intermediate results of computations and
|
||||
exploration in Isabelle/jEdit. This way, types, intermediate results of computations and
|
||||
checks can be repeated by the reader who is invited to interact with this document.
|
||||
Moreover, the textual parts have been enriched with a maximum of formal content
|
||||
which makes this text re-checkable at each load and easier maintainable.
|
||||
|
@ -60,22 +62,28 @@ text\<open> \<^vs>\<open>-0.5cm\<close>
|
|||
spectrum of applications. A particular strength of the Isabelle framework is the combination
|
||||
of text editing, formal verification, and code generation. This is a programming-tutorial of
|
||||
Isabelle and Isabelle/HOL, a complementary text to the unfortunately somewhat outdated
|
||||
"The Isabelle Cookbook" in \<^url>\<open>https://nms.kcl.ac.uk/christian.urban/Cookbook/\<close>. The reader
|
||||
is encouraged not only to consider the generated .pdf, but also consult the loadable version
|
||||
"The Isabelle Cookbook" in \<^url>\<open>https://nms.kcl.ac.uk/christian.urban/Cookbook/\<close>.
|
||||
The present text is also complementary to the current version of
|
||||
\<^url>\<open>https://isabelle.in.tum.de/doc/isar-ref.pdf\<close>
|
||||
"The Isabelle/Isar Implementation" by Makarius Wenzel in that it focusses on subjects
|
||||
not covered there, or presents alternative explanations for which I believe, based on my
|
||||
experiences with students and Phds, that they are helpful.
|
||||
For the present programming manual, the reader is encouraged not only to consider the generated
|
||||
.pdf, but also consult the loadable version
|
||||
in Isabelle/jedit in order to make experiments on the running code. This text is written
|
||||
itself in Isabelle/Isar using a specific document ontology for technical reports. It is
|
||||
intended to be a "living document", i.e. it is not only used for generating a static,
|
||||
conventional .pdf, but also for direct interactive exploration in Isabelle/jedit. This way,
|
||||
conventional .pdf, but also for direct interactive exploration in Isabelle/jEdit. This way,
|
||||
types, intermediate results of computations and checks can be repeated by the reader who is
|
||||
invited to interact with this document. Moreover, the textual parts have been enriched with a
|
||||
maximum of formal content which makes this text re-checkable at each load and easier
|
||||
maintainable. \<close>
|
||||
|
||||
figure*[architecture::figure,relative_width="70",src="''figures/isabelle-architecture''"]\<open>
|
||||
figure*[architecture::figure,relative_width="70",file_src="''figures/isabelle-architecture.pdf''"]\<open>
|
||||
The system architecture of Isabelle (left-hand side) and the asynchronous communication
|
||||
between the Isabelle system and the IDE (right-hand side). \<close>
|
||||
|
||||
text\<open>This programming roughly follows the Isabelle system architecture shown in
|
||||
text\<open>This programming tutorial roughly follows the Isabelle system architecture shown in
|
||||
\<^figure>\<open>architecture\<close>, and, to be more precise, more or less in the bottom-up order.
|
||||
|
||||
We start from the basic underlying SML platform over the Kernels to the tactical layer
|
||||
|
@ -86,8 +94,8 @@ chapter*[t1::technical]\<open> SML and Fundamental SML libraries \<close>
|
|||
|
||||
section*[t11::technical] "ML, Text and Antiquotations"
|
||||
|
||||
text\<open>Isabelle is written in SML, the "Standard Meta-Language", which is is an impure functional
|
||||
programming language allowing, in principle, mutable variables and side-effects.
|
||||
text\<open>Isabelle is written in SML, the "Standard Meta-Language", which is an impure functional
|
||||
programming language allowing, in principle, mutable variables and side effects.
|
||||
The following Isabelle/Isar commands allow for accessing the underlying SML interpreter
|
||||
of Isabelle directly. In the example, a mutable variable X is declared, initialized to 0 and
|
||||
updated; and finally re-evaluated leading to output: \<close>
|
||||
|
@ -136,14 +144,14 @@ text\<open>\<^emph>\<open>This is a text.\<close>\<close>
|
|||
|
||||
text\<open>... is represented in the integrated source (the \<^verbatim>\<open>.thy\<close> file) by:\<close>
|
||||
|
||||
text\<open> *\<open>\<open>\<close>This is a text.\<open>\<close>\<close>\<close>
|
||||
text\<open> \<open>*\<open>This is a text.\<close>\<close>\<close>
|
||||
|
||||
text\<open>and displayed in the Isabelle/jedit front-end at the sceen by:\<close>
|
||||
text\<open>and displayed in the Isabelle/jEdit front-end at the sceen by:\<close>
|
||||
|
||||
figure*[fig2::figure, relative_width="60", placement="pl_h", src="''figures/text-element''"]
|
||||
\<open>A text-element as presented in Isabelle/jedit.\<close>
|
||||
figure*[fig2::figure, relative_width="60", file_src="''figures/text-element.pdf''"]
|
||||
\<open>A text-element as presented in Isabelle/jEdit.\<close>
|
||||
|
||||
text\<open>The text-commands, ML- commands (and in principle any other command) can be seen as
|
||||
text\<open>The text-commands, ML-commands (and in principle any other command) can be seen as
|
||||
\<^emph>\<open>quotations\<close> over the underlying SML environment (similar to OCaml or Haskell).
|
||||
Linking these different sorts of quotations with each other and the underlying SML-environment
|
||||
is supported via \<^emph>\<open>antiquotations\<close>'s. Generally speaking, antiquotations are a kind of semantic
|
||||
|
@ -186,7 +194,7 @@ text\<open>\<^vs>\<open>-1.0cm\<close>... which we will describe in more detail
|
|||
|
||||
text\<open>In a way, anti-quotations implement a kind of
|
||||
literate specification style in text, models, code, proofs, etc., which become alltogether
|
||||
elements of one global \<^emph>\<open>integrated document\<close> in which mutual dependencies can be machine=checked
|
||||
elements of one global \<^emph>\<open>integrated document\<close> in which mutual dependencies can be machine-checked
|
||||
(i.e. \<open>formal\<close> in this sense).
|
||||
Attempting to maximize the \<open>formal content\<close> is a way to ensure "Agile Development" (AD) of an
|
||||
integrated document development, in the sense that it allows to give immediate feedback
|
||||
|
@ -204,17 +212,21 @@ text\<open>It is instructive to study the fundamental bootstrapping sequence of
|
|||
it is written in the Isar format and gives an idea of the global module dependencies:
|
||||
\<^file>\<open>~~/src/Pure/ROOT.ML\<close>. Loading this file
|
||||
(for example by hovering over this hyperlink in the antiquotation holding control or
|
||||
command key in Isabelle/jedit and activating it) allows the Isabelle IDE
|
||||
command key in Isabelle/jEdit and activating it) allows the Isabelle IDE
|
||||
to support hyperlinking \<^emph>\<open>inside\<close> the Isabelle source.\<close>
|
||||
|
||||
text\<open>The bootstrapping sequence is also reflected in the following diagram @{figure "architecture"}.\<close>
|
||||
|
||||
|
||||
section*[t12::technical] "Elements of the SML library";
|
||||
section*[t12::technical] "Elements of the SML library"
|
||||
text\<open>Elements of the \<^file>\<open>~~/src/Pure/General/basics.ML\<close> SML library
|
||||
are basic exceptions. Note that exceptions should be catched individually, uncatched exceptions
|
||||
except those generated by the specific "error" function are discouraged in Isabelle
|
||||
source programming since they might produce races in the internal Isabelle evaluation.
|
||||
source programming since they might produce races in the internal Isabelle evaluation.
|
||||
% TODO:
|
||||
% The following exceptions are defined in $ML_SOURCES/basis/General.sml
|
||||
% and in $ISABELLE_HOME/src/Pure/general/scan.ml
|
||||
% ans not in \<^file>\<open>~~/src/Pure/General/basics.ML\<close>
|
||||
|
||||
\<^item> \<^ML>\<open>Bind : exn\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> \<^ML>\<open>Chr : exn\<close> \<^vs>\<open>-0.3cm\<close>
|
||||
|
@ -245,7 +257,11 @@ text*[squiggols::technical]
|
|||
\<^item> @{ML "op --| : ('a->'b*'c) * ('c->'d*'e)->'a->'b*'e"}, parse pair, forget left \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> @{ML "op ? : bool * ('a->'a)->'a->'a"}, if then else \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> @{ML "ignore : 'a->unit"}, force execution, but ignore result \<^vs>\<open>-0.3cm\<close>
|
||||
\<^item> @{ML "op before: ('a*unit) -> 'a"} \<^vs>\<open>-0.8cm\<close> \<close>
|
||||
\<^item> @{ML "op before: ('a*unit) -> 'a"} \<^vs>\<open>-0.8cm\<close>
|
||||
% TODO:
|
||||
% Again the definitions of these operators span multiple files
|
||||
% and not just \<^file>\<open>~~/src/Pure/General/basics.ML\<close>.
|
||||
\<close>
|
||||
|
||||
text\<open>\<^noindent> Some basic examples for the programming style using these combinators can be found in the
|
||||
"The Isabelle Cookbook" section 2.3.\<close>
|
||||
|
@ -294,7 +310,9 @@ text\<open> What I call the 'Nano-Kernel' in Isabelle can also be seen as an acy
|
|||
"Generic theory contexts with unique identity, arbitrarily typed data,
|
||||
monotonic development graph and history support. Generic proof
|
||||
contexts with arbitrarily typed data."
|
||||
|
||||
% NOTE:
|
||||
% Add the reference.
|
||||
|
||||
In my words: a context is essentially a container with
|
||||
\<^item> an id
|
||||
\<^item> a list of parents (so: the graph structure)
|
||||
|
@ -329,12 +347,10 @@ text\<open>
|
|||
\<^item> \<^ML>\<open>Context.proper_subthy : theory * theory -> bool\<close> subcontext test
|
||||
\<^item> \<^ML>\<open>Context.Proof: Proof.context -> Context.generic \<close> A constructor embedding local contexts
|
||||
\<^item> \<^ML>\<open>Context.proof_of : Context.generic -> Proof.context\<close> the inverse
|
||||
\<^item> \<^ML>\<open>Context.theory_name : theory -> string\<close>
|
||||
\<^item> \<^ML>\<open>Context.theory_name : {long:bool} -> theory -> string\<close>
|
||||
\<^item> \<^ML>\<open>Context.map_theory: (theory -> theory) -> Context.generic -> Context.generic\<close>
|
||||
\<close>
|
||||
|
||||
text\<open>\<^ML>\<open>3+4\<close>\<close>
|
||||
|
||||
text\<open>The structure \<^ML_structure>\<open>Proof_Context\<close> provides a key data-structures concerning contexts:
|
||||
|
||||
\<^item> \<^ML>\<open> Proof_Context.init_global: theory -> Proof.context\<close>
|
||||
|
@ -342,7 +358,7 @@ text\<open>The structure \<^ML_structure>\<open>Proof_Context\<close> provides a
|
|||
\<^item> \<^ML>\<open> Context.Proof: Proof.context -> Context.generic \<close>
|
||||
the path to a generic Context, i.e. a sum-type of global and local contexts
|
||||
in order to simplify system interfaces
|
||||
\<^item> \<^ML>\<open> Proof_Context.get_global: theory -> string -> Proof.context\<close>
|
||||
\<^item> \<^ML>\<open> Proof_Context.get_global: {long:bool} -> theory -> string -> Proof.context\<close>
|
||||
\<close>
|
||||
|
||||
|
||||
|
@ -350,23 +366,21 @@ subsection*[t213::example]\<open>Mechanism 2 : Extending the Global Context \<op
|
|||
|
||||
text\<open>A central mechanism for constructing user-defined data is by the \<^ML_functor>\<open>Generic_Data\<close>-functor.
|
||||
A plugin needing some data \<^verbatim>\<open>T\<close> and providing it with implementations for an
|
||||
\<^verbatim>\<open>empty\<close>, and operations \<^verbatim>\<open>merge\<close> and \<^verbatim>\<open>extend\<close>, can construct a lense with operations
|
||||
\<^verbatim>\<open>empty\<close>, and operation \<^verbatim>\<open>merge\<close>, can construct a lense with operations
|
||||
\<^verbatim>\<open>get\<close> and \<^verbatim>\<open>put\<close> that attach this data into the generic system context. Rather than using
|
||||
unsynchronized SML mutable variables, this is the mechanism to introduce component local
|
||||
data in Isabelle, which allows to manage this data for the necessary backtrack- and synchronization
|
||||
data in Isabelle, which allows to manage this data for the necessary backtrack and synchronization
|
||||
features in the pervasively parallel evaluation framework that Isabelle as a system represents.\<close>
|
||||
|
||||
ML \<open>
|
||||
datatype X = mt
|
||||
val init = mt;
|
||||
val ext = I
|
||||
fun merge (X,Y) = mt
|
||||
|
||||
structure Data = Generic_Data
|
||||
(
|
||||
type T = X
|
||||
val empty = init
|
||||
val extend = ext
|
||||
val merge = merge
|
||||
);
|
||||
\<close>
|
||||
|
@ -470,17 +484,21 @@ ML\<open> val Const ("HOL.implies", @{typ "bool \<Rightarrow> bool \<Rightarrow>
|
|||
|
||||
val "HOL.bool" = @{type_name "bool"};
|
||||
|
||||
(* three ways to write type bool:@ *)
|
||||
(* three ways to write type bool: *)
|
||||
val Type("fun",[s,Type("fun",[@{typ "bool"},Type(@{type_name "bool"},[])])]) = @{typ "bool \<Rightarrow> bool \<Rightarrow> bool"};
|
||||
|
||||
\<close>
|
||||
text\<open>
|
||||
% NOTE:
|
||||
% The quotes disappear in the pdf document output.
|
||||
|
||||
\<close>
|
||||
text\<open>Note that the SML interpreter is configured that he will actually print a type
|
||||
\<^verbatim>\<open>Type("HOL.bool",[])\<close> just as \<^verbatim>\<open>"bool": typ\<close>, so a compact notation looking
|
||||
pretty much like a string. This can be confusing at times.\<close>
|
||||
|
||||
text\<open>Note, furthermore, that there is a programming API for the HOL-instance of Isabelle:
|
||||
it is contained in @{file "$ISABELLE_HOME/src/HOL/Tools/hologic.ML"}. It offers for many
|
||||
it is contained in @{file "$ISABELLE_HOME/src/HOL/Tools/hologic.ML"}. It offers many
|
||||
operators of the HOL logic specific constructors and destructors:\<close>
|
||||
|
||||
text*[T2::technical]\<open>
|
||||
|
@ -594,32 +612,36 @@ subsection\<open>More operations on types\<close>
|
|||
text\<open>
|
||||
\<^item> \<^ML>\<open>Term_Subst.map_types_same : (typ -> typ) -> term -> term\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.map_aterms_same : (term -> term) -> term -> term\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.instantiate: ((indexname * sort) * typ) list * ((indexname * typ) * term) list
|
||||
-> term -> term\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.instantiateT: ((indexname * sort) * typ) list -> typ -> typ\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.generalizeT: string list -> int -> typ -> typ\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.instantiate: typ TVars.table * term Vars.table -> term -> term\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.instantiateT: typ TVars.table -> typ -> typ\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.generalizeT: Names.set -> int -> typ -> typ\<close>
|
||||
this is the standard type generalisation function !!!
|
||||
only type-frees in the string-list were taken into account.
|
||||
\<^item> \<^ML>\<open>Term_Subst.generalize: string list * string list -> int -> term -> term\<close>
|
||||
\<^item> \<^ML>\<open>Term_Subst.generalize: Names.set * Names.set -> int -> term -> term\<close>
|
||||
this is the standard term generalisation function !!!
|
||||
only type-frees and frees in the string-lists were taken
|
||||
into account.
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
text \<open>Apparently, a bizarre conversion between the old-style interface and
|
||||
the new-style \<^ML>\<open>tyenv\<close> is necessary. See the following example.\<close>
|
||||
ML\<open>
|
||||
val S = Vartab.dest tyenv;
|
||||
val S = Vartab.dest tyenv : (Vartab.key * (sort * typ)) list;
|
||||
val S' = (map (fn (s,(t,u)) => ((s,t),u)) S) : ((indexname * sort) * typ) list;
|
||||
(* it took me quite some time to find out that these two type representations,
|
||||
obscured by a number of type-synonyms, where actually identical. *)
|
||||
val S''= TVars.make S': typ TVars.table
|
||||
val ty = t_schematic;
|
||||
val ty' = Term_Subst.instantiateT S' t_schematic;
|
||||
val ty' = Term_Subst.instantiateT S'' t_schematic;
|
||||
|
||||
(* Don't know how to build a typ TVars.table *)
|
||||
val t = (generalize_term @{term "[]"});
|
||||
|
||||
val t' = Term_Subst.map_types_same (Term_Subst.instantiateT S') (t)
|
||||
val t' = Term_Subst.map_types_same (Term_Subst.instantiateT (TVars.make S')) (t)
|
||||
(* or alternatively : *)
|
||||
val t'' = Term.map_types (Term_Subst.instantiateT S') (t)
|
||||
val t'' = Term.map_types (Term_Subst.instantiateT S'') (t)
|
||||
\<close>
|
||||
|
||||
text\<open>A more abstract env for variable management in tactic proofs. A bit difficult to use
|
||||
|
@ -776,27 +798,22 @@ text\<open> We come now to the very heart of the LCF-Kernel of Isabelle, which
|
|||
\<^item> \<^ML>\<open> Thm.forall_intr: cterm -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.forall_elim: cterm -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.transfer : theory -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.generalize: string list * string list -> int -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.instantiate: ((indexname*sort)*ctyp)list * ((indexname*typ)*cterm) list -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.generalize: Names.set * Names.set -> int -> thm -> thm\<close>
|
||||
\<^item> \<^ML>\<open> Thm.instantiate: ctyp TVars.table * cterm Vars.table -> thm -> thm\<close>
|
||||
\<close>
|
||||
|
||||
text\<open> They reflect the Pure logic depicted in a number of presentations such as
|
||||
text\<open> They reflect the Pure logic depicted in a number of presentations such as
|
||||
M. Wenzel, \<^emph>\<open>Parallel Proof Checking in Isabelle/Isar\<close>, PLMMS 2009, or simiular papers.
|
||||
Notated as logical inference rules, these operations were presented as follows:
|
||||
\<close>
|
||||
|
||||
side_by_side_figure*["text-elements"::side_by_side_figure,anchor="''fig-kernel1''",
|
||||
caption="''Pure Kernel Inference Rules I ''",relative_width="48",
|
||||
src="''figures/pure-inferences-I''",anchor2="''fig-kernel2''",
|
||||
caption2="''Pure Kernel Inference Rules II''",relative_width2="47",
|
||||
src2="''figures/pure-inferences-II''"]\<open> \<close>
|
||||
text*["text_elements"::float,
|
||||
main_caption="\<open>Kernel Inference Rules.\<close>"]
|
||||
\<open>
|
||||
@{fig_content (width=48, caption="Pure Kernel Inference Rules I.") "figures/pure-inferences-I.pdf"
|
||||
}\<^hfill>@{fig_content (width=47, caption="Pure Kernel Inference Rules II.") "figures/pure-inferences-II.pdf"}
|
||||
\<close>
|
||||
|
||||
(*
|
||||
figure*[kir1::figure,relative_width="100",src="''figures/pure-inferences-I''"]
|
||||
\<open> Pure Kernel Inference Rules I.\<close>
|
||||
figure*[kir2::figure,relative_width="100",src="''figures/pure-inferences-II''"]
|
||||
\<open> Pure Kernel Inference Rules II. \<close>
|
||||
*)
|
||||
|
||||
text\<open>Note that the transfer rule:
|
||||
\[
|
||||
|
@ -869,7 +886,6 @@ datatype thy = Thy of
|
|||
\<^item> \<^ML>\<open>Theory.axiom_space: theory -> Name_Space.T\<close>
|
||||
\<^item> \<^ML>\<open>Theory.all_axioms_of: theory -> (string * term) list\<close>
|
||||
\<^item> \<^ML>\<open>Theory.defs_of: theory -> Defs.T\<close>
|
||||
\<^item> \<^ML>\<open>Theory.join_theory: theory list -> theory\<close>
|
||||
\<^item> \<^ML>\<open>Theory.at_begin: (theory -> theory option) -> theory -> theory\<close>
|
||||
\<^item> \<^ML>\<open>Theory.at_end: (theory -> theory option) -> theory -> theory\<close>
|
||||
\<^item> \<^ML>\<open>Theory.begin_theory: string * Position.T -> theory list -> theory\<close>
|
||||
|
@ -891,14 +907,10 @@ high-level component (more low-level components such as \<^ML>\<open>Global_Theo
|
|||
exist) for definitions and axiomatizations is here:
|
||||
\<close>
|
||||
|
||||
|
||||
text\<open>
|
||||
\<^item> \<^ML>\<open>Specification.definition: (binding * typ option * mixfix) option ->
|
||||
(binding * typ option * mixfix) list -> term list -> Attrib.binding * term ->
|
||||
local_theory -> (term * (string * thm)) * local_theory\<close>
|
||||
\<^item> \<^ML>\<open>Specification.definition': (binding * typ option * mixfix) option ->
|
||||
(binding * typ option * mixfix) list -> term list -> Attrib.binding * term ->
|
||||
bool -> local_theory -> (term * (string * thm)) * local_theory\<close>
|
||||
\<^item> \<^ML>\<open>Specification.definition_cmd: (binding * string option * mixfix) option ->
|
||||
(binding * string option * mixfix) list -> string list -> Attrib.binding * string ->
|
||||
bool -> local_theory -> (term * (string * thm)) * local_theory\<close>
|
||||
|
@ -941,8 +953,8 @@ fun mk_def name p =
|
|||
val ty_global = ty --> ty
|
||||
val args = (((SOME(nameb,SOME ty_global,NoSyn),(Binding.empty_atts,term_prop)),[]),[])
|
||||
val cmd = (fn (((decl, spec), prems), params) =>
|
||||
#2 oo Specification.definition' decl params prems spec)
|
||||
in cmd args true
|
||||
#2 o Specification.definition decl params prems spec)
|
||||
in cmd args
|
||||
end;
|
||||
in Named_Target.theory_map (mk_def "I" @{here} )
|
||||
end\<close>
|
||||
|
@ -957,9 +969,9 @@ text\<open>At this point, we leave the Pure-Kernel and start to describe the fir
|
|||
|
||||
text\<open> \<^ML_type>\<open>tactic\<close>'s are in principle \<^emph>\<open>relations\<close> on theorems \<^ML_type>\<open>thm\<close>; the relation is
|
||||
lazy and encoded as function of type \<^ML_type>\<open>thm -> thm Seq.seq\<close>.
|
||||
This gives a
|
||||
natural way to represent the fact that HO-Unification (and therefore the mechanism of rule-instan-
|
||||
tiation) are non-deterministic in principle. Heuristics may choose particular preferences between
|
||||
This gives a natural way to represent the fact that HO-Unification
|
||||
(and therefore the mechanism of rule-instantiation) are non-deterministic in principle.
|
||||
Heuristics may choose particular preferences between
|
||||
the theorems in the range of this relation, but the Isabelle Design accepts this fundamental
|
||||
fact reflected at this point in the prover architecture.
|
||||
This potentially infinite relation is implemented by a function of theorems to lazy lists
|
||||
|
@ -988,9 +1000,9 @@ text\<open>The next layer in the architecture describes \<^ML_type>\<open>tacti
|
|||
theorems in a backward reasoning style (bottom up development of proof-trees). An initial
|
||||
goal-state for some property \<^prop>\<open>A\<close> --- the \<^emph>\<open>goal\<close> --- is constructed via the kernel
|
||||
\<^ML>\<open>Thm.trivial\<close>-operation into \<^prop>\<open>A \<Longrightarrow> A\<close>, and tactics either refine the premises --- the
|
||||
\<^emph>\<open>subgoals\<close> the of this meta-implication --- producing more and more of them or eliminate them
|
||||
\<^emph>\<open>subgoals\<close> of this meta-implication --- producing more and more of them or eliminate them
|
||||
in subsequent goal-states. Subgoals of the form \<^prop>\<open>B\<^sub>1 \<Longrightarrow> B\<^sub>2 \<Longrightarrow> A \<Longrightarrow> B\<^sub>3 \<Longrightarrow> B\<^sub>4 \<Longrightarrow> A\<close> can be
|
||||
eliminated via the \<^ML>\<open>Tactic.assume_tac\<close>-tactic, and a subgoal \<^prop>\<open>C\<^sub>m\<close> can be refined via the
|
||||
eliminated via the \<^ML>\<open>Tactic.assume_tac\<close>-tactic, and a subgoal \<^prop>\<open>C\<^sub>m\<close> can be refined via the
|
||||
theorem \<^prop>\<open>E\<^sub>1 \<Longrightarrow> E\<^sub>2 \<Longrightarrow> E\<^sub>3 \<Longrightarrow> C\<^sub>m\<close> the \<^ML>\<open>Tactic.resolve_tac\<close> - tactic to new subgoals
|
||||
\<^prop>\<open>E\<^sub>1\<close>, \<^prop>\<open>E\<^sub>2\<close>, \<^prop>\<open>E\<^sub>3\<close>. In case that a theorem used for resolution has no premise \<^prop>\<open>E\<^sub>i\<close>,
|
||||
the subgoal \<^prop>\<open>C\<^sub>m\<close> is also eliminated ("closed").
|
||||
|
@ -1031,7 +1043,7 @@ text\<open>Note that "applying a rule" is a fairly complex operation in the Exte
|
|||
\<^prop>\<open>D\<^sub>2\<close> and \<^prop>\<open>A\<close> have been replaced by schematic variables (see phase one),
|
||||
they must be replaced by parameterized schematic variables, i. e. a kind of skolem function.
|
||||
For example, \<open>?x + ?y = ?y + ?x\<close> would be lifted to
|
||||
\<open>!! x y z. ?x x y z + ?y x y z = ?y x y z + ?x x y z\<close>. This way, the lifted theorem
|
||||
\<open>\<And> x y z. ?x x y z + ?y x y z = ?y x y z + ?x x y z\<close>. This way, the lifted theorem
|
||||
can be instantiated by the parameters \<open>x y z\<close> representing "fresh free variables"
|
||||
used for this sub-proof. This mechanism implements their logically correct bookkeeping via
|
||||
kernel primitives.
|
||||
|
@ -1116,18 +1128,17 @@ thm "thm111"
|
|||
section\<open>Toplevel --- aka. ''The Isar Engine''\<close>
|
||||
|
||||
text\<open> The main structure of the Isar-engine is \<^ML_structure>\<open>Toplevel\<close>.
|
||||
The Isar Toplevel (aka "the Isar engine" or the "Isar Interpreter") is an transaction
|
||||
The Isar Toplevel (aka "the Isar engine" or the "Isar Interpreter") is a transaction
|
||||
machine sitting over the Isabelle Kernel steering some asynchronous evaluation during the
|
||||
evaluation of Isabelle/Isar input, usually stemming from processing Isabelle \<^verbatim>\<open>.thy\<close>-files. \<close>
|
||||
|
||||
subsection*[tplstate::technical] \<open>Toplevel Transaction Management in the Isar-Engine\<close>
|
||||
text\<open>
|
||||
The structure \<^ML_structure>\<open>Toplevel\<close> provides and internal \<^ML_type>\<open>state\<close> with the
|
||||
The structure \<^ML_structure>\<open>Toplevel\<close> provides an internal \<^ML_type>\<open>state\<close> with the
|
||||
necessary infrastructure --- i.e. the operations to pack and unpack theories and
|
||||
queries on it:
|
||||
|
||||
\<^item> \<^ML>\<open> Toplevel.theory_toplevel: theory -> Toplevel.state\<close>
|
||||
\<^item> \<^ML>\<open> Toplevel.init_toplevel: unit -> Toplevel.state\<close>
|
||||
\<^item> \<^ML>\<open> Toplevel.make_state: theory option -> Toplevel.state\<close>
|
||||
\<^item> \<^ML>\<open> Toplevel.is_toplevel: Toplevel.state -> bool\<close>
|
||||
\<^item> \<^ML>\<open> Toplevel.is_theory: Toplevel.state -> bool\<close>
|
||||
\<^item> \<^ML>\<open> Toplevel.is_proof: Toplevel.state -> bool\<close>
|
||||
|
@ -1165,11 +1176,11 @@ text\<open> The extensibility of Isabelle as a system framework depends on a num
|
|||
\<^item> \<^ML>\<open>Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
adjoins a theory transformer.
|
||||
\<^item> \<^ML>\<open>Toplevel.generic_theory: (generic_theory -> generic_theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.theory': (bool -> theory -> theory) -> Toplevel.presentation option -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.exit: Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.ignored: Position.T -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
|
||||
(Toplevel.state -> unit) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
\<^item> \<^ML>\<open>Toplevel.present_local_theory: (xstring * Position.T) option ->
|
||||
(Toplevel.state -> Latex.text) -> Toplevel.transition -> Toplevel.transition\<close>
|
||||
|
||||
\<close>
|
||||
subsection*[cmdbinding::technical] \<open>Toplevel Transaction Management in the Isar-Engine\<close>
|
||||
|
@ -1180,10 +1191,9 @@ text\<open>
|
|||
with a few query operations on the state of the toplevel:
|
||||
\<^item> \<^ML>\<open>Outer_Syntax.command : Outer_Syntax.command_keyword -> string ->
|
||||
(Toplevel.transition -> Toplevel.transition) parser -> unit\<close>,
|
||||
\<^item> \<^ML>\<open>Document.state : unit -> Document.state\<close>, giving the state as a "collection of named
|
||||
\<^item> \<^ML>\<open>Document.state : unit -> Document.state\<close>, giving the state as a "collection" of named
|
||||
nodes, each consisting of an editable list of commands, associated with asynchronous
|
||||
execution process,
|
||||
\<^item> \<^ML>\<open>Session.get_keywords : unit -> Keyword.keywords\<close>, this looks to be session global,
|
||||
\<^item> \<^ML>\<open>Thy_Header.get_keywords : theory -> Keyword.keywords\<close> this looks to be just theory global.
|
||||
|
||||
|
||||
|
@ -1196,34 +1206,47 @@ text\<open>
|
|||
Registers \<^ML_type>\<open>Toplevel.transition -> Toplevel.transition\<close> parsers to the
|
||||
Isar interpreter.\<close>
|
||||
|
||||
text\<open>The file \<^file>\<open>~~/src/HOL/ex/Commands.thy\<close> shows some example Isar command definitions, with the
|
||||
text\<open>The file \<^file>\<open>~~/src/HOL/Examples/Commands.thy\<close> shows some example Isar command definitions, with the
|
||||
all-important theory header declarations for outer syntax keywords.\<close>
|
||||
|
||||
text\<open>@{ML_structure Pure_Syn}\<close>
|
||||
|
||||
subsubsection*[ex1137::example]\<open>Examples: \<^theory_text>\<open>text\<close>\<close>
|
||||
text\<open> The integration of the \<^theory_text>\<open>text\<close>-command is done as follows:
|
||||
|
||||
@{ML [display]\<open>
|
||||
Outer_Syntax.command ("text", @{here}) "formal comment (primary style)"
|
||||
(Parse.opt_target -- Parse.document_source >> Pure_Syn.document_command {markdown = true})
|
||||
(Parse.opt_target -- Parse.document_source >> Document_Output.document_output
|
||||
{markdown = true, markup = I})
|
||||
\<close>}
|
||||
|
||||
where \<^ML>\<open>Pure_Syn.document_command\<close> is the defining operation for the
|
||||
"diagnostic" (=side-effect-free) toplevel operation \<^ML>\<open>Pure_Syn.document_command\<close> looks as follows:
|
||||
where \<^ML>\<open>Document_Output.document_output\<close> is the defining operation for the
|
||||
"diagnostic" (=side-effect-free) toplevel operation.
|
||||
\<^ML>\<open>Document_Output.document_output\<close> looks as follows:
|
||||
|
||||
@{ML [display]\<open> let fun output_document state markdown txt =
|
||||
Thy_Output.output_document (Toplevel.presentation_context state) markdown txt
|
||||
fun document_command markdown (loc, txt) =
|
||||
Toplevel.keep (fn state =>
|
||||
(case loc of
|
||||
NONE => ignore (output_document state markdown txt)
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc (fn state =>
|
||||
ignore (output_document state markdown txt)) in () end
|
||||
|
||||
@{ML [display]\<open>let fun document_reports txt =
|
||||
let val pos = Input.pos_of txt in
|
||||
[(pos, Markup.language_document (Input.is_delimited txt)),
|
||||
(pos, Markup.plain_text)]
|
||||
end;
|
||||
fun document_output {markdown, markup} (loc, txt) =
|
||||
let
|
||||
fun output st =
|
||||
let
|
||||
val ctxt = Toplevel.presentation_context st;
|
||||
val _ = Context_Position.reports ctxt (document_reports txt);
|
||||
in txt |> Document_Output.output_document ctxt {markdown = markdown} |> markup end;
|
||||
in
|
||||
Toplevel.present (fn st =>
|
||||
(case loc of
|
||||
NONE => output st
|
||||
| SOME (_, pos) =>
|
||||
error ("Illegal target specification -- not a theory context" ^ Position.here pos))) o
|
||||
Toplevel.present_local_theory loc output
|
||||
end in () end
|
||||
\<close>}
|
||||
|
||||
\<close>
|
||||
|
||||
subsubsection*[ex1138::example]\<open>Examples: \<^theory_text>\<open>ML\<close>\<close>
|
||||
|
||||
text\<open>
|
||||
|
@ -1244,7 +1267,6 @@ subsection\<open>Miscellaneous\<close>
|
|||
|
||||
text\<open>Here are a few queries relevant for the global config of the isar engine:\<close>
|
||||
ML\<open> Document.state();\<close>
|
||||
ML\<open> Session.get_keywords(); (* this looks to be session global. *) \<close>
|
||||
ML\<open> Thy_Header.get_keywords @{theory};(* this looks to be really theory global. *) \<close>
|
||||
|
||||
|
||||
|
@ -1266,7 +1288,7 @@ text\<open>The toplevel also provides an infrastructure for managing configurati
|
|||
\<close>
|
||||
|
||||
|
||||
subsubsection*[ex::example]\<open>Example registration of an config attribute \<close>
|
||||
subsubsection*[ex::example]\<open>Example registration of a config attribute \<close>
|
||||
text\<open>The attribute XS232 is initialized by false:\<close>
|
||||
ML\<open>
|
||||
val (XS232, XS232_setup)
|
||||
|
@ -1298,10 +1320,12 @@ subsection*[ex213::example]\<open>A Definition Command (High-level)\<close>
|
|||
|
||||
text\<open>A quite complex example is drawn from the Theory \<^verbatim>\<open>Clean\<close>; it generates \<close>
|
||||
|
||||
ML\<open>Specification.definition\<close>
|
||||
|
||||
ML\<open>
|
||||
structure HLDefinitionSample =
|
||||
struct
|
||||
fun cmd (decl, spec, prems, params) = #2 oo Specification.definition' decl params prems spec
|
||||
fun cmd (decl, spec, prems, params) = #2 o Specification.definition decl params prems spec
|
||||
|
||||
fun MON_SE_T res state = state --> optionT(HOLogic.mk_prodT(res,state));
|
||||
|
||||
|
@ -1322,7 +1346,7 @@ fun mk_push_def binding sty lthy =
|
|||
val eq = push_eq binding (Binding.name_of name) rty sty lthy
|
||||
val mty = MON_SE_T rty sty
|
||||
val args = (SOME(name, SOME mty, NoSyn), (Binding.empty_atts,eq),[],[])
|
||||
in cmd args true lthy end;
|
||||
in cmd args lthy end;
|
||||
|
||||
val define_test = Named_Target.theory_map (mk_push_def (Binding.name "test") @{typ "'a"})
|
||||
|
||||
|
@ -1407,7 +1431,7 @@ text\<open>The document model forsees a number of text files, which are organize
|
|||
secondary formats can be \<^verbatim>\<open>.sty\<close>,\<^verbatim>\<open>.tex\<close>, \<^verbatim>\<open>.png\<close>, \<^verbatim>\<open>.pdf\<close>, or other files processed
|
||||
by Isabelle and listed in a configuration processed by the build system.\<close>
|
||||
|
||||
figure*[fig3::figure, relative_width="100",src="''figures/document-model''"]
|
||||
figure*[fig3::figure, relative_width="100",file_src="''figures/document-model.pdf''"]
|
||||
\<open>A Theory-Graph in the Document Model\<close>
|
||||
|
||||
text\<open>A \<^verbatim>\<open>.thy\<close> file consists of a \<^emph>\<open>header\<close>, a \<^emph>\<open>context-definition\<close> and
|
||||
|
@ -1430,31 +1454,31 @@ text\<open>A \<^verbatim>\<open>.thy\<close> file consists of a \<^emph>\<open>h
|
|||
predefined commands allow for the dynamic creation of new commands similarly
|
||||
to the definition of new functions in an interpreter shell (or: toplevel, see above.).
|
||||
A command starts with a pre-declared keyword and specific syntax of this command;
|
||||
the declaration of a keyword is only allowed in the same \<^verbatim>\<open>.thy\<close>-file where the
|
||||
the declaration of a keyword is only allowed in the same \<^verbatim>\<open>.thy\<close>-file where
|
||||
the corresponding new command is defined. The semantics of the command is expressed
|
||||
in ML and consists of a @{ML_type "Toplevel.transition -> Toplevel.transition"}
|
||||
function. Thus, the Isar-toplevel supports the generic document model
|
||||
and allows for user-programmed extensions.\<close>
|
||||
|
||||
text\<open>In the traditional literature, Isabelle \<^verbatim>\<open>.thy\<close>-files were
|
||||
were said to be processed by processed by two types of parsers:
|
||||
text\<open>In the traditional literature, Isabelle \<^verbatim>\<open>.thy\<close>-files
|
||||
were said to be processed by two types of parsers:
|
||||
\<^enum> the "outer-syntax" (i.e. the syntax for commands) is processed
|
||||
by a lexer-library and parser combinators built on top, and
|
||||
\<^enum> the "inner-syntax" (i.e. the syntax for @{term \<open>\<Lambda>\<close>} - terms)
|
||||
\<^enum> the "inner-syntax" (i.e. the syntax for @{term \<open>\<Lambda>\<close>}-terms)
|
||||
with an evolved, eight-layer parsing and pretty-printing process
|
||||
based on an Early-algorithm.
|
||||
based on an Earley-algorithm.
|
||||
\<close>
|
||||
|
||||
text\<open>This picture is less and less true for a number of reasons:
|
||||
\<^enum> With the advent of \<open>(\<open>)... (\<close>)\<close>, a mechanism for
|
||||
\<^emph>\<open>cascade-syntax\<close> came to the Isabelle platform that introduce a flexible means
|
||||
\<^enum> With the advent of \<open>\<open> ... \<close>\<close>, a mechanism for
|
||||
\<^emph>\<open>cascade-syntax\<close> came to the Isabelle platform that introduces a flexible means
|
||||
to change parsing contexts \<^emph>\<open>and\<close> parsing technologies.
|
||||
\<^enum> Inside the term-parser levels, the concept of \<^emph>\<open>cartouche\<close> can be used
|
||||
to escape the parser and its underlying parsing technology.
|
||||
\<^enum> Outside, in the traditional toplevel-parsers, the
|
||||
\<open>(\<open>)... (\<close>)\<close> is becoming more and more enforced
|
||||
\<open>\<open> ... \<close>\<close> is becoming more and more enforced
|
||||
(some years ago, syntax like \<open>term{* ... *}\<close> was replaced by
|
||||
syntax \<open>term(\<open>)... (\<close>)\<close>. This makes technical support of cascade syntax
|
||||
syntax \<open>term\<open> ... \<close>\<close>. This makes technical support of cascade syntax
|
||||
more and more easy.
|
||||
\<^enum> The Lexer infra-structure is already rather generic; nothing prevents to
|
||||
add beside the lexer - configurations for ML-Parsers, Toplevel Command Syntax
|
||||
|
@ -1468,8 +1492,8 @@ section\<open>Basics: string, bstring and xstring\<close>
|
|||
text\<open>\<^ML_type>\<open>string\<close> is the basic library type from the SML library
|
||||
in structure \<^ML_structure>\<open>String\<close>. Many Isabelle operations produce
|
||||
or require formats thereof introduced as type synonyms
|
||||
\<^ML_type>\<open>bstring\<close> (defined in structure \<^ML_structure>\<open>Binding\<close>
|
||||
and \<^ML_type>\<open>xstring\<close> (defined in structure \<^ML_structure>\<open>Name_Space\<close>.
|
||||
\<^ML_type>\<open>bstring\<close> (defined in structure \<^ML_structure>\<open>Binding\<close>)
|
||||
and \<^ML_type>\<open>xstring\<close> (defined in structure \<^ML_structure>\<open>Name_Space\<close>).
|
||||
Unfortunately, the abstraction is not tight and combinations with
|
||||
elementary routines might produce quite crappy results.\<close>
|
||||
|
||||
|
@ -1480,7 +1504,7 @@ text\<open>... produces the system output \<^verbatim>\<open>val it = "here": bs
|
|||
ML\<open>String.explode b\<close> (* is harmless, but *)
|
||||
ML\<open>String.explode(Binding.name_of
|
||||
(Binding.conglomerate[Binding.qualified_name "X.x", @{binding "here"}] ))\<close>
|
||||
text\<open>... whioch leads to the output \<^verbatim>\<open>val it = [#"x", #"_", #"h", #"e", #"r", #"e"]: char list\<close>\<close>
|
||||
text\<open>... which leads to the output \<^verbatim>\<open>val it = [#"x", #"_", #"h", #"e", #"r", #"e"]: char list\<close>\<close>
|
||||
|
||||
text\<open> However, there is an own XML parser for this format. See Section Markup. \<close>
|
||||
|
||||
|
@ -1502,7 +1526,7 @@ text\<open> ... uses the antiquotation @{ML "@{here}"} to infer from the system
|
|||
of itself in the global document, converts it to markup (a string-representation of it) and sends
|
||||
it via the usual @{ML "writeln"} to the interface. \<close>
|
||||
|
||||
figure*[hyplinkout::figure,relative_width="40",src="''figures/markup-demo''"]
|
||||
figure*[hyplinkout::figure,relative_width="40",file_src="''figures/markup-demo.png''"]
|
||||
\<open>Output with hyperlinked position.\<close>
|
||||
|
||||
text\<open>@{figure \<open>hyplinkout\<close>} shows the produced output where the little house-like symbol in the
|
||||
|
@ -1513,11 +1537,12 @@ text\<open>The structures @{ML_structure Markup} and @{ML_structure Properties}
|
|||
annotation data which is part of the protocol sent from Isabelle to the front-end.
|
||||
They are qualified as "quasi-abstract", which means they are intended to be an abstraction of
|
||||
the serialized, textual presentation of the protocol. Markups are structurally a pair of a key
|
||||
and properties; @{ML_structure Markup} provides a number of of such \<^emph>\<open>key\<close>s for annotation classes
|
||||
and properties; @{ML_structure Markup} provides a number of such \<^emph>\<open>key\<close>s for annotation classes
|
||||
such as "constant", "fixed", "cartouche", some of them quite obscure. Here is a code sample
|
||||
from \<^theory_text>\<open>Isabelle_DOF\<close>. A markup must be tagged with an id; this is done by the @{ML serial}-function
|
||||
discussed earlier. Markup Operations, were used for hyperlinking applications to binding
|
||||
occurrences, info for hovering, infors for type ... \<close>
|
||||
discussed earlier. Markup operations were used for hyperlinking applications to binding
|
||||
occurrences, info for hovering, infos for type ... \<close>
|
||||
|
||||
ML\<open>
|
||||
(* Position.report is also a type consisting of a pair of a position and markup. *)
|
||||
(* It would solve all my problems if I find a way to infer the defining Position.report
|
||||
|
@ -1533,20 +1558,18 @@ Markup.enclose : Markup.T -> string -> string;
|
|||
|
||||
(* example for setting a link, the def flag controls if it is a defining or a binding
|
||||
occurence of an item *)
|
||||
fun theory_markup (def:bool) (name:string) (id:serial) (pos:Position.T) =
|
||||
if id = 0 then Markup.empty
|
||||
else
|
||||
Markup.properties (Position.entity_properties_of def id pos)
|
||||
(Markup.entity Markup.theoryN name);
|
||||
Markup.theoryN : string;
|
||||
|
||||
serial(); (* A global, lock-guarded seriel counter used to produce unique identifiers,
|
||||
fun theory_markup refN (def:bool) (name:string) (id:serial) (pos:Position.T) =
|
||||
if id = 0 then Markup.empty
|
||||
else Position.make_entity_markup {def = def} id refN (name, pos);
|
||||
|
||||
serial(); (* A global, lock-guarded serial counter used to produce unique identifiers,
|
||||
be it on the level of thy-internal states or as reference in markup in
|
||||
PIDE *)
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
subsection\<open>A simple Example\<close>
|
||||
ML\<open>
|
||||
local
|
||||
|
@ -1555,10 +1578,7 @@ val docclassN = "doc_class";
|
|||
|
||||
(* derived from: theory_markup; def for "defining occurrence" (true) in contrast to
|
||||
"referring occurence" (false). *)
|
||||
fun docclass_markup def name id pos =
|
||||
if id = 0 then Markup.empty
|
||||
else Markup.properties (Position.entity_properties_of def id pos)
|
||||
(Markup.entity docclassN name);
|
||||
val docclass_markup = theory_markup docclassN
|
||||
|
||||
in
|
||||
|
||||
|
@ -1577,21 +1597,23 @@ text\<open>The @\<open>ML report_defining_occurrence\<close>-function above take
|
|||
|
||||
subsection\<open>A Slightly more Complex Example\<close>
|
||||
text\<open>Note that this example is only animated in the integrated source of this document;
|
||||
it is essential that is executed inside Isabelle/jedit. \<close>
|
||||
it is essential that is executed inside Isabelle/jEdit. \<close>
|
||||
ML \<open>
|
||||
|
||||
fun markup_tvar def_name ps (name, id) =
|
||||
let
|
||||
fun markup_elem name = (name, (name, []): Markup.T);
|
||||
val (tvarN, tvar) = markup_elem ((case def_name of SOME name => name | _ => "") ^ "'s nickname is");
|
||||
val entity = Markup.entity tvarN name
|
||||
val entity = Markup.entity tvarN name (* ??? *)
|
||||
val def = def_name = NONE
|
||||
in
|
||||
tvar ::
|
||||
(if def then I else cons (Markup.keyword_properties Markup.ML_keyword3))
|
||||
(map (fn pos => Markup.properties (Position.entity_properties_of def id pos) entity) ps)
|
||||
(map (fn pos => Position.make_entity_markup {def = def} id tvarN (name, pos) ) ps)
|
||||
end
|
||||
|
||||
(* Position.make_entity_markup {def = def} id refN (name, pos) *)
|
||||
|
||||
fun report [] _ _ = I
|
||||
| report ps markup x =
|
||||
let val ms = markup x
|
||||
|
@ -1606,7 +1628,7 @@ val data = \<comment> \<open>Derived from Yakoub's example ;-)\<close>
|
|||
, (\<open>Frédéric II\<close>, \<open>King of Sicily\<close>)
|
||||
, (\<open>Frédéric III\<close>, \<open>the Handsome\<close>)
|
||||
, (\<open>Frédéric IV\<close>, \<open>of the Empty Pockets\<close>)
|
||||
, (\<open>Frédéric V\<close>, \<open>King of Denmark–Norway\<close>)
|
||||
, (\<open>Frédéric V\<close>, \<open>King of Denmark-Norway\<close>)
|
||||
, (\<open>Frédéric VI\<close>, \<open>the Knight\<close>)
|
||||
, (\<open>Frédéric VII\<close>, \<open>Count of Toggenburg\<close>)
|
||||
, (\<open>Frédéric VIII\<close>, \<open>Count of Zollern\<close>)
|
||||
|
@ -1746,7 +1768,7 @@ text\<open>Parsing Combinators go back to monadic programming as advocated by Wa
|
|||
'a * (Context.generic * Token.T list)\<close>
|
||||
|
||||
Since the semantics of an Isabelle command is a \<^ML_type>\<open>Toplevel.transition -> Toplevel.transition \<close>
|
||||
or theory \<^ML_type>\<open>theory -> theory\<close> function, i.e. a global system transition.
|
||||
or theory \<^ML_type>\<open>theory -> theory\<close> function, i.e. a global system transition,
|
||||
"parsers" of that type can be constructed and be bound as call-back functions
|
||||
to a table in the Toplevel-structure of Isar.
|
||||
|
||||
|
@ -1766,7 +1788,7 @@ text\<open>Parsing Combinators go back to monadic programming as advocated by Wa
|
|||
\<^item> \<^ML>\<open>op |-- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'd * 'e\<close>
|
||||
concatenate and forget first parse result
|
||||
\<^item> \<^ML>\<open>op --| : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'b * 'e\<close>
|
||||
concatenation and forget second parse result
|
||||
concatenate and forget second parse result
|
||||
\<^item> \<^ML>\<open>op ^^ : ('a -> string * 'b) * ('b -> string * 'c) -> 'a -> string * 'c\<close>
|
||||
\<^item> \<^ML>\<open>op ::: : ('a -> 'b * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd\<close>
|
||||
\<^item> \<^ML>\<open>op @@@ : ('a -> 'b list * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd\<close>
|
||||
|
@ -1785,7 +1807,7 @@ fun parser2contextparser pars (ctxt, toks) = let val (a, toks') = pars toks
|
|||
in (a,(ctxt, toks')) end;
|
||||
val _ = parser2contextparser : 'a parser -> 'a context_parser;
|
||||
|
||||
(* bah, is the same as Scan.lift *)
|
||||
(* bah, it's the same as Scan.lift *)
|
||||
val _ = Scan.lift Args.cartouche_input : Input.source context_parser;\<close>
|
||||
|
||||
subsection\<open>Advanced Parser Library\<close>
|
||||
|
@ -1798,10 +1820,10 @@ text\<open>There are two parts. A general multi-purpose parsing combinator libra
|
|||
\<^item> \<^ML>\<open>Parse.nat: int parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.int: int parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.enum_positions: string -> 'a parser -> ('a list * Position.T list) parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.enum: string -> 'a parser -> 'a list parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.enum : string -> 'a parser -> 'a list parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.input: 'a parser -> Input.source parser\<close>
|
||||
|
||||
\<^item> \<^ML>\<open>Parse.enum : string -> 'a parser -> 'a list parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.enum': string -> 'a context_parser -> 'a list context_parser\<close>
|
||||
\<^item> \<^ML>\<open>Parse.!!! : (Token.T list -> 'a) -> Token.T list -> 'a\<close>
|
||||
\<^item> \<^ML>\<open>Parse.position: 'a parser -> ('a * Position.T) parser\<close>
|
||||
|
||||
|
@ -1810,10 +1832,11 @@ text\<open>There are two parts. A general multi-purpose parsing combinator libra
|
|||
|
||||
text\<open>The second part is much more high-level, and can be found under \<^ML_structure>\<open>Args\<close>.
|
||||
In parts, these combinators are again based on more low-level combinators, in parts they serve as
|
||||
an an interface to the underlying Earley-parser for mathematical notation used in types and terms.
|
||||
an interface to the underlying Earley-parser for mathematical notation used in types and terms.
|
||||
This is perhaps meant with the fairly cryptic comment:
|
||||
"Quasi-inner syntax based on outer tokens: concrete argument syntax of
|
||||
attributes, methods etc." at the beginning of this structure.\<close>
|
||||
ML\<open>open Args\<close>
|
||||
|
||||
text\<open> Some more combinators
|
||||
\<^item>\<^ML>\<open>Args.symbolic : Token.T parser\<close>
|
||||
|
@ -1834,17 +1857,11 @@ Common Isar Syntax
|
|||
\<^item>\<^ML>\<open>Args.name_position: (string * Position.T) parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_inner_syntax: string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_token: Token.T parser \<close>
|
||||
|
||||
|
||||
Common Isar Syntax
|
||||
\<^item>\<^ML>\<open>Args.embedded_token : Token.T parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded_inner_syntax: string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded_input : Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded_position: (string * Position.T) parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Parse.embedded_input: Input.source parser\<close>
|
||||
\<^item>\<^ML>\<open>Parse.embedded : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.binding : Binding.binding parser\<close>
|
||||
|
||||
Common Stuff related to Inner Syntax Parsing
|
||||
|
@ -1856,19 +1873,17 @@ Common Stuff related to Inner Syntax Parsing
|
|||
\<^item>\<^ML>\<open>Args.internal_typ : typ parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.internal_term: term parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.internal_fact: thm list parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.internal_attribute: (morphism -> attribute) parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.internal_declaration: declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.internal_attribute: attribute Morphism.entity parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.alt_name : string parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.liberal_name: string parser\<close>
|
||||
|
||||
|
||||
|
||||
Common Isar Syntax
|
||||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_term : (string -> term) -> term parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.embedded_declaration: (Input.source -> Morphism.declaration_entity) ->
|
||||
Morphism.declaration_entity parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.typ_abbrev : typ context_parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.typ: typ context_parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.term: term context_parser\<close>
|
||||
|
@ -1877,8 +1892,6 @@ Common Isar Syntax
|
|||
\<^item>\<^ML>\<open>Args.named_source: (Token.T -> Token.src) -> Token.src parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_typ : (string -> typ) -> typ parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.named_term: (string -> term) -> term parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.text_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
\<^item>\<^ML>\<open>Args.cartouche_declaration: (Input.source -> declaration) -> declaration parser\<close>
|
||||
|
||||
Syntax for some major Pure commands in Isar
|
||||
\<^item>\<^ML>\<open>Args.prop: term context_parser\<close>
|
||||
|
@ -1913,7 +1926,7 @@ subsubsection\<open> Example \<close>
|
|||
text\<open>Since this is so common in interface programming, there are a number of antiquotations\<close>
|
||||
ML\<open>
|
||||
val H = @{binding here}; (* There are "bindings" consisting of a text-span and a position,
|
||||
where \<dieresis>positions\<dieresis> are absolute references to a file *)
|
||||
where "positions" are absolute references to a file *)
|
||||
|
||||
Binding.pos_of H; (* clicking on "H" activates the hyperlink to the defining occ of "H" above *)
|
||||
(* {offset=23, end_offset=27, id=-17214}: Position.T *)
|
||||
|
@ -1935,7 +1948,7 @@ subsubsection \<open>Example :Input streams. \<close>
|
|||
|
||||
ML\<open> Input.source_explode (Input.string " f @{thm refl}");
|
||||
|
||||
(* If stemming from the input window, this can be s th like:
|
||||
(* If stemming from the input window, this can be something like:
|
||||
|
||||
[(" ", {offset=14, id=-2769}), ("f", {offset=15, id=-2769}), (" ", {offset=16, id=-2769}),
|
||||
("@", {offset=17, id=-2769}), ("{", {offset=18, id=-2769}), ("t", {offset=19, id=-2769}),
|
||||
|
@ -1949,17 +1962,17 @@ ML\<open> Input.source_explode (Input.string " f @{thm refl}");
|
|||
|
||||
section\<open>Term Parsing\<close>
|
||||
|
||||
text\<open>The heart of the parsers for mathematical notation, based on an Earley parser that can cope
|
||||
text\<open>The heart of the parsers for mathematical notation, based on an Earley-parser that can cope
|
||||
with incremental changes of the grammar as required for sophisticated mathematical output, is hidden
|
||||
behind the API described in this section.\<close>
|
||||
|
||||
text\<open> Note that the naming underlies the following convention :
|
||||
there are:
|
||||
text\<open> Note that the naming underlies the following convention.
|
||||
There are:
|
||||
\<^enum> "parser"s
|
||||
\<^enum> type-"checker"s, which usually also englobe the markup generation for PIDE
|
||||
\<^enum> "reader"s which do both together with pretty-printing
|
||||
|
||||
This is encapsulated the data structure @{ML_structure Syntax} ---
|
||||
This is encapsulated in the data structure @{ML_structure Syntax} ---
|
||||
the table with const symbols, print and ast translations, ... The latter is accessible, e.g.
|
||||
from a Proof context via @{ML Proof_Context.syn_of}.
|
||||
\<close>
|
||||
|
@ -2040,6 +2053,10 @@ text\<open>
|
|||
|
||||
|
||||
|
||||
(*
|
||||
Document_Antiquotation
|
||||
*)
|
||||
|
||||
subsection*[ex33::example] \<open>Example\<close>
|
||||
|
||||
ML\<open>
|
||||
|
@ -2052,10 +2069,10 @@ ML\<open>
|
|||
|
||||
(* Here is the code to register the above parsers as text antiquotations into the Isabelle
|
||||
Framework: *)
|
||||
Thy_Output.antiquotation_pretty_source \<^binding>\<open>theory\<close>
|
||||
(Scan.lift (Parse.position Args.embedded));
|
||||
Document_Output.antiquotation_pretty_source \<^binding>\<open>theory\<close>
|
||||
(Scan.lift (Parse.position Parse.embedded));
|
||||
|
||||
Thy_Output.antiquotation_raw \<^binding>\<open>file\<close>
|
||||
Document_Output.antiquotation_raw \<^binding>\<open>file\<close>
|
||||
(Scan.lift (Parse.position Parse.path)) ;
|
||||
|
||||
\<close>
|
||||
|
@ -2066,7 +2083,7 @@ text\<open>where we have the registration of the action
|
|||
transaction that, of course, has the type \<^ML_type>\<open>theory -> theory\<close> :
|
||||
|
||||
@{ML [display] \<open>
|
||||
(fn name => (Thy_Output.antiquotation_pretty_source
|
||||
(fn name => (Document_Output.antiquotation_pretty_source
|
||||
name
|
||||
(Scan.lift (Parse.position Args.cartouche_input))))
|
||||
: binding ->
|
||||
|
@ -2086,11 +2103,11 @@ ML\<open> Output.output "bla_1:" \<close>
|
|||
text\<open>It provides a number of hooks that can be used for redirection hacks ...\<close>
|
||||
|
||||
section \<open> Output: LaTeX \<close>
|
||||
text\<open>The heart of the LaTeX generator is to be found in the structure \<^ML_structure>\<open>Thy_Output\<close>.
|
||||
text\<open>The heart of the LaTeX generator is to be found in the structure \<^ML_structure>\<open>Document_Output\<close>.
|
||||
This is an own parsing and writing process, with the risc that a parsed file in the IDE parsing
|
||||
process can not be parsed for the LaTeX Generator. The reason is twofold:
|
||||
|
||||
\<^enum> The LaTeX Generator makes a rough attempt to mimic the LayOut if the thy-file; thus, its
|
||||
\<^enum> The LaTeX Generator makes a rough attempt to mimic the LayOut in the thy-file; thus, its
|
||||
spacing is relevant.
|
||||
\<^enum> there is a special bracket \<open>(*<*)\<close> ... \<open>(*>*)\<close> that allows to specify input that is checked by
|
||||
Isabelle, but excluded from the LaTeX generator (this is handled in an own sub-parser
|
||||
|
@ -2102,60 +2119,51 @@ Since Isabelle2018, an own AST is provided for the LaTeX syntax, analogously to
|
|||
\<^item>\<^ML>\<open>Latex.string: string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Latex.text: string * Position.T -> Latex.text\<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Latex.output_text: Latex.text list -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_positions: Position.T -> Latex.text list -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_name: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_ascii: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.output_symbols: Symbol.symbol list -> string\<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Latex.begin_delim: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.end_delim: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.begin_tag: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.end_tag: string -> string\<close>
|
||||
\<^item>\<^ML>\<open>Latex.environment_block: string -> Latex.text list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Latex.environment: string -> string -> string\<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Latex.block: Latex.text list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Latex.enclose_body: string -> string -> Latex.text list -> Latex.text list\<close>
|
||||
\<^item>\<^ML>\<open>Latex.enclose_block: string -> string -> Latex.text list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Latex.environment: string -> Latex.text -> Latex.text\<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Latex.block: Latex.text -> XML.tree\<close>
|
||||
\<close>
|
||||
|
||||
|
||||
|
||||
ML\<open> Latex.output_ascii;
|
||||
Latex.environment "isa" "bg";
|
||||
Latex.environment "isa" (Latex.string "bg");
|
||||
Latex.output_ascii "a_b:c'é";
|
||||
(* Note: *)
|
||||
space_implode "sd &e sf dfg" ["qs","er","alpa"];
|
||||
\<close>
|
||||
|
||||
text\<open>Here is an abstract of the main interface to @{ML_structure Thy_Output}:\<close>
|
||||
text\<open>Here is an abstract of the main interface to @{ML_structure Document_Output}:\<close>
|
||||
|
||||
text\<open>
|
||||
\<^item>\<^ML>\<open>Thy_Output.output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.output_token: Proof.context -> Token.T -> Latex.text list\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.output_source: Proof.context -> string -> Latex.text list\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.present_thy: Options.T -> theory -> Thy_Output.segment list -> Latex.text list\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text \<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.output_token: Proof.context -> Token.T -> Latex.text \<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.output_source: Proof.context -> string -> Latex.text \<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.present_thy: Options.T -> Keyword.keywords -> string -> Document_Output.segment list -> Latex.text \<close>
|
||||
|
||||
\<^item>\<^ML>\<open>Thy_Output.isabelle: Proof.context -> Latex.text list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.typewriter: Proof.context -> string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.verbatim: Proof.context -> string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.pretty: Proof.context -> Pretty.T -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.pretty_items: Proof.context -> Pretty.T list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.pretty_items_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.isabelle: Proof.context -> Latex.text -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.isabelle_typewriter: Proof.context -> Latex.text -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.typewriter: Proof.context -> string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.verbatim: Proof.context -> string -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.pretty: Proof.context -> Pretty.T -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.pretty_items: Proof.context -> Pretty.T list -> Latex.text\<close>
|
||||
\<^item>\<^ML>\<open>Document_Output.pretty_items_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T list -> Latex.text\<close>
|
||||
|
||||
Finally a number of antiquotation registries :
|
||||
|
||||
\<^item>\<^ML>\<open>Thy_Output.antiquotation_pretty:
|
||||
\<^item>\<^ML>\<open>Document_Output.antiquotation_pretty:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.antiquotation_pretty_source:
|
||||
\<^item>\<^ML>\<open>Document_Output.antiquotation_pretty_source:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.antiquotation_raw:
|
||||
\<^item>\<^ML>\<open>Document_Output.antiquotation_raw:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory\<close>
|
||||
\<^item>\<^ML>\<open>Thy_Output.antiquotation_verbatim:
|
||||
\<^item>\<^ML>\<open>Document_Output.antiquotation_verbatim:
|
||||
binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory\<close>
|
||||
\<close>
|
||||
|
Before Width: | Height: | Size: 162 KiB After Width: | Height: | Size: 162 KiB |
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
|
@ -0,0 +1,9 @@
|
|||
template-beamerposter-UNSUPPORTED
|
||||
template-beamer-UNSUPPORTED
|
||||
template-lipics-v2021-UNSUPPORTED
|
||||
template-lncs
|
||||
template-scrartcl
|
||||
template-scrreprt
|
||||
template-scrreprt-modern
|
||||
template-sn-article-UNSUPPORTED
|
||||
template-svjour3-UNSUPPORTED
|
|
@ -0,0 +1,9 @@
|
|||
session "template-beamer-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-beamer-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,72 @@
|
|||
(*<*)
|
||||
theory
|
||||
"template-beamer-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "beamer-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
(*>*)
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
(*
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
*)
|
||||
|
||||
text\<open>
|
||||
\begin{frame}
|
||||
\frametitle{Example Slide}
|
||||
\centering\huge This is an example!
|
||||
\end{frame}
|
||||
\<close>
|
||||
|
||||
|
||||
frame*[test_frame
|
||||
, frametitle = \<open>\<open>\<open>Example Slide\<^sub>t\<^sub>e\<^sub>s\<^sub>t\<close> with items @{thm "HOL.refl"}\<close>\<close>
|
||||
, framesubtitle = "''Subtitle''"]
|
||||
\<open>This is an example!
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame\<close>}\<close>\<close>
|
||||
|
||||
frame*[test_frame2
|
||||
, frametitle = "''Example Slide''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>Test frame env \<^term>\<open>refl\<close>\<close>
|
||||
|
||||
frame*[test_frame3
|
||||
, options = "''allowframebreaks''"
|
||||
, frametitle = "''Example Slide with frame break''"
|
||||
, framesubtitle = \<open>\<open>\<open>Subtitle\<^sub>t\<^sub>e\<^sub>s\<^sub>t:\<close> the value of \<^term>\<open>(3::int) + 3\<close> is @{value "(3::int) + 3"}\<close>\<close>]
|
||||
\<open>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> and the term encoding the title of this frame is \<^term_>\<open>frametitle @{frame \<open>test_frame3\<close>}\<close>
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<^item> The term \<^term>\<open>refl\<close> is...
|
||||
\<close>
|
||||
|
||||
(*<*)
|
||||
end
|
||||
(*>*)
|
|
@ -0,0 +1,9 @@
|
|||
session "template-beamerposter-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-beamerposter-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-beamerposter-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "beamerposter-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,10 @@
|
|||
session "template-lipics-v2021-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-lipics-v2021-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"lipics-v2021.cls"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-lipics-v2021-UNSUPPORTED"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "lipics-v2021-UNSUPPORTED"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,9 @@
|
|||
session "template-lncs" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-lncs"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-lncs"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "lncs"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,9 @@
|
|||
session "template-scrartcl" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrartcl"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-scrartcl"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.scholarly_paper
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrartcl"
|
||||
list_ontologies
|
||||
use_ontology "scholarly_paper"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,9 @@
|
|||
session "template-scrreprt-modern" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrreprt-modern"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-scrreprt-modern"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.technical_report
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrreprt-modern"
|
||||
list_ontologies
|
||||
use_ontology "technical_report"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,9 @@
|
|||
session "template-scrreprt" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-scrreprt"
|
||||
document_files
|
||||
"preamble.tex"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|
|
@ -0,0 +1,21 @@
|
|||
theory
|
||||
"template-scrreprt"
|
||||
imports
|
||||
"Isabelle_DOF-Ontologies.document_templates"
|
||||
Isabelle_DOF.technical_report
|
||||
begin
|
||||
|
||||
list_templates
|
||||
use_template "scrreprt"
|
||||
list_ontologies
|
||||
use_ontology "technical_report"
|
||||
|
||||
title* [tit::title]\<open>Formal Verification of Security Protocols\<close>
|
||||
author*[alice, email = "\<open>alice@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/alice\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Alice\<close>
|
||||
author*[bob, email = "\<open>bob@example.com\<close>",
|
||||
http_site = "\<open>https://example.com/bob\<close>",
|
||||
affiliation = "\<open>Wonderland University\<close>"]\<open>Bob\<close>
|
||||
|
||||
end
|
|
@ -0,0 +1,10 @@
|
|||
session "template-sn-article-UNSUPPORTED" = "Isabelle_DOF-Ontologies" +
|
||||
options [document = pdf, document_output = "output", document_build = dof]
|
||||
(*theories [document = false]
|
||||
A
|
||||
B*)
|
||||
theories
|
||||
"template-sn-article-UNSUPPORTED"
|
||||
document_files
|
||||
"preamble.tex"
|
||||
"sn-jnl.cls"
|
|
@ -0,0 +1 @@
|
|||
%% This is a placeholder for user-specific configuration and packages.
|