Theory CTranslationSetup

(*
 * Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
 * Copyright (c) 2022 Apple Inc. All rights reserved.
 *
 * SPDX-License-Identifier: BSD-2-Clause
 *)

chapter ‹Setup Lex / Yacc and Translation from C to Simpl›

theory CTranslationSetup
  imports
  "UMM"
  "PackedTypes"
  "PrettyProgs"
  "IndirectCalls"
  "ModifiesProofs"
  "HOL-Eisbach.Eisbach"
  "Option_Scanner"
  "Misc_Antiquotation"
  "MkTermAntiquote"
  "TermPatternAntiquote"
  "CLocals"
keywords
  "cond_sorry_modifies_proofs" :: thy_decl
and
  "mllex"
  "mlyacc":: thy_load 
begin

ML structure Conj_Elim =
struct

val ctxt0 = @{context}
structure Data = Theory_Data (
  type T = thm Inttab.table
  val empty = Inttab.empty;
  fun merge (t1, t2) = if pointer_eq (t1, t2) then t1 else Inttab.merge (K true) (t1, t2)
)

fun P' i = (("P", i), typbool)
fun P i = Var (P' i) |> Thm.cterm_of ctxt0
val thesis = Var (("thesis", 0), typbool) |> Thm.cterm_of ctxt0


val thm2 = instantiateP2 = P 2 and P1 = P 1 and thesis = thesis
  in lemma  "P2  P1  (P2  P1  thesis)  thesis" by (erule conjE)

val _ = Theory.setup (Data.put (Inttab.make [(2, thm2)]))


fun non_atomic (Const ("Pure.imp", _) $ _ $ _) = true
  | non_atomic (Const ("Pure.all", _) $ _) = true
  | non_atomic _ = false;

val assume_rule_tac = CSUBGOAL (fn (goal, i) =>
  let 
    val Rs = filter (non_atomic o Thm.term_of) (Drule.strip_imp_prems goal);
    val tacs = Rs |> map (fn R =>
      Thm.bicompose NONE {flatten = false, match = true, incremented = true} (false, Thm.trivial R, 0));
  in fold_rev (curry op APPEND') tacs (K no_tac) i end);

fun ensure_upto n thy =
  if n <= 1 then error "Conj_Elim: expecting at least 2 conjuncts" 
  else
    let
      val tab = Data.get thy
    in
      case Inttab.lookup tab n of 
        SOME thm => (thm, thy)
      | NONE => 
         let
           val (thm', thy') = ensure_upto (n - 1) thy
           val start = Timing.start ();
           val [conj', elim']  = Thm.cprems_of thm'
           val conj = instantiateA = P n and B = HOLogic.dest_judgment conj' in cterm A  B
           val elim = instantiateA = P n and B = elim' in cprop A  PROP B
           val prop = instantiateC = conj and E = elim and thesis = thesis in cprop C  PROP E  thesis
           val thm = Utils.timeit_msg 2 ctxt0 (fn _ => string_of_int n ^ ": prove: " ) (fn _ =>
                 Goal.prove_internal ctxt0 [] prop (fn _ =>
                 ematch_tac ctxt0 @{thms conjE} 1 THEN
                 ematch_tac ctxt0 [thm'] 1 THEN
                 assume_rule_tac 1))
           val thy'' = Utils.timeit_msg 2 ctxt0 (fn _ => string_of_int n ^ ": data: " ) (fn _ =>  
             Data.map (Inttab.update_new (n, Thm.trim_context thm)) thy')
           val _ = Utils.timing_msg' 1 ctxt0 (fn _ => string_of_int n ^ ": all") start
         in
           (thm, thy'')
         end
    end 

fun dest_conjs' ct = ct |> Match_Cterm.switch [
  @{cterm_match ?P  ?Q} #> (fn {P, Q, ...} => P :: dest_conjs' Q),
  (fn _ => [ct])]

fun dest_conjs ct = 
  case Thm.term_of ct of
    @{term_pat "?P  ?Q"} => let val (P, X) = Thm.dest_binop ct in P :: dest_conjs X end
  | _=> [ct]

fun proj ctxt i elim_thm =
  SINGLE (filter_prems_tac ctxt (fn (_ $ Var ((_, j), _)) => i = j | _ => false) 2) elim_thm

fun dest i elim_thm =
  let    
    val inst_thm = Thm.instantiate (TVars.empty, Vars.make1 ((("thesis", 0), typbool), P i)) elim_thm
    val dest = SINGLE (eq_assume_tac 2) inst_thm
  in
    dest
  end

fun conj_elims {parallel} ctxt thm =
  let
    val start = Timing.start ();
    val conjs = Utils.timeit_msg 2 ctxt (fn _ => "conj_elims: conjs") (fn _ =>  
      thm |> Thm.cconcl_of |> HOLogic.dest_judgment |> dest_conjs)
    val n = length conjs
    val _ = (n > 1) orelse raise Match
    val (elim_n, _) = ensure_upto n (Proof_Context.theory_of ctxt)
    val tagged_conjs = map_index (fn (i, c) => (n - i, c)) conjs
    val insts = Vars.make (map (fn (i, c) => (P' i, c)) tagged_conjs)
    fun mk_thm (i, conj) =
      let
       val dest = the (Utils.timeit_msg 2 ctxt (fn _ => "conj_elims: dest") (fn _ => dest i elim_n))
       val dest_inst = Utils.timeit_msg 2 ctxt (fn _ => "conj_elims: dest_inst") (fn _ =>
        Thm.instantiate (TVars.empty, insts) dest)
       val dest_inst' =  Utils.timeit_msg 2 ctxt (fn _ => "conj_elims: dest_inst'") (fn _ => Thm.implies_elim dest_inst thm)
      in
        Goal.prove_internal ctxt [] (HOLogic.mk_judgment conj) (fn _ =>          
          Utils.timeap_msg_tac 2 ctxt (fn _ => "conj_elims: match") (match_tac ctxt [dest_inst'] 1))
      end
    val map = if parallel then Par_List.map else map
    val thms = map mk_thm tagged_conjs
    val _ = Utils.timing_msg' 1 ctxt0 (fn _ => "Conj_Elim.conj_elims " ^ string_of_int n ^ ": ") start
  in thms end
  handle Match => [thm]


end

setup Conj_Elim.ensure_upto 1500 #> snd

ML structure Coerce_Syntax =
struct

  val show_types = Ptr_Syntax.show_ptr_types

  fun coerce_tr' cnst ctxt typ ts = if Config.get ctxt show_types then
      case Term.strip_type typ of
        ([S], T) =>
          list_comb
            (Syntax.const cnst $ Syntax_Phases.term_of_typ ctxt S
                               $ Syntax_Phases.term_of_typ ctxt T
            , ts)
        | _ => raise Match
  else raise Match
end

definition coerce::"'a::mem_type  'b::mem_type" where
  "coerce v = from_bytes (to_bytes_p v)"

syntax
  "_coerce" :: "type  type  logic"
    ((‹indent=1 notation=‹mixfix COERCE››COERCE/(‹indent=1 notation=‹infix coerce››'(_  _'))))
syntax_consts
  "_coerce" == coerce
translations
  "COERCE('a  'b)" => "CONST coerce :: ('a   'b)"
typed_print_translation
  [(@{const_syntax coerce}, Coerce_Syntax.coerce_tr' @{syntax_const "_coerce"})]

lemma coerce_id[simp]: 
  shows "coerce v = v"
  by (simp add: coerce_def)

lemma coerce_cancel_packed[simp]:
  fixes v::"'a::packed_type"
  assumes sz_eq: "size_of (TYPE('a)) = size_of (TYPE('b))"
  shows "coerce ((coerce v)::'b::packed_type) = v"
  using assms
  apply (simp add: coerce_def)
  by (metis (mono_tags, lifting) access_ti0_def access_ti0_to_bytes 
      field_access_update_same(1) inv_p length_replicate length_to_bytes_p 
      packed_type_intro_simps(1) wf_fd)

definition coerce_map:: "('a::mem_type  'a)  ('b::mem_type  'b)" where
  "coerce_map f v = coerce (f (coerce v))"

lemma coerce_map_id[simp]: "coerce_map f (coerce v) = f v"
  by (simp add: coerce_map_def)

lemma coerce_coerce_map_cancel_packed[simp]: 
  fixes f::"'a::packed_type  'a"
  fixes v::"'b::packed_type" 
  assumes sz_eq[simp]: "size_of (TYPE('a)) = size_of (TYPE('b))"
  shows "((coerce (coerce_map f v))::'a) = f (coerce v)"
  by (simp add: coerce_map_def)




named_theorems global_const_defs and 
  global_const_array_selectors and 
  global_const_non_array_selectors and
  global_const_selectors

named_theorems fun_ptr_distinct 
named_theorems fun_ptr_subtree
named_theorems disjoint_𝒢_𝒮

named_bindings fun_ptr_map_of_default_eqs and fun_ptr_map_of_default_fallthrough_eqs and
  fun_ptr_guards and fun_ptr_not_NULL and fun_ptr_simps and fun_ptr_undefined_simps

text ‹
We integrate mllex and mlyacc directly into Isabelle:
 We compile the SML files according to the description in 🗏‹tools/mlyacc/src/FILES›
 We export the necessary signatures and structures to the Isabelle/ML environment.
 As mllex / mlyacc operate directly on files we invoke them on temporary files and 
  redirect stdout / stderr to display the messages within PIDE. We wrap this in Isabelle commands
  @{command mllex}, @{command mlyacc}.
›

SML_import infix 1 |> val op |> = Basics.|>
  structure File_Stream = File_Stream
  val make_path = Path.explode o ML_System.standard_path
SML_file ‹tools/mllex/mllex.ML›
SML_export structure LexGen = LexGen

SML_file ‹tools/mlyacc/mlyacclib/MLY_base-sig.ML›
SML_file ‹tools/mlyacc/mlyacclib/MLY_stream.ML›
SML_file ‹tools/mlyacc/mlyacclib/MLY_lrtable.ML›
SML_file ‹tools/mlyacc/mlyacclib/MLY_join.ML›
SML_file ‹tools/mlyacc/mlyacclib/MLY_parser2.ML›
SML_file ‹tools/mlyacc/src/utils.ML›
SML_file ‹tools/mlyacc/src/sigs.ML›
SML_file ‹tools/mlyacc/src/hdr.ML›
SML_file ‹tools/mlyacc/src/yacc-grm-sig.sml›
SML_file ‹tools/mlyacc/src/yacc-grm.sml›
SML_file ‹tools/mlyacc/src/yacc.lex.sml›
SML_file ‹tools/mlyacc/src/parse.ML›
SML_file ‹tools/mlyacc/src/grammar.ML›
SML_file ‹tools/mlyacc/src/core.ML›
SML_file ‹tools/mlyacc/src/coreutils.ML›
SML_file ‹tools/mlyacc/src/graph.ML›
SML_file ‹tools/mlyacc/src/look.ML›
SML_file ‹tools/mlyacc/src/lalr.ML›
SML_file ‹tools/mlyacc/src/mklrtable.ML›
SML_file ‹tools/mlyacc/src/mkprstruct.ML›
SML_file ‹tools/mlyacc/src/shrink.ML›
SML_file ‹tools/mlyacc/src/verbose.ML›
SML_file ‹tools/mlyacc/src/absyn-sig.ML›
SML_file ‹tools/mlyacc/src/absyn.ML›
SML_file ‹tools/mlyacc/src/yacc.ML›
SML_file ‹tools/mlyacc/src/link.ML›

SML_export signature LR_TABLE = LR_TABLE
SML_export structure LrTable = LrTable
SML_export signature LR_PARSER = LR_PARSER
SML_export structure LrParser = LrParser
SML_export signature TOKEN = TOKEN
SML_export signature PARSER_DATA = PARSER_DATA
SML_export signature PARSE_GEN = PARSE_GEN
SML_export signature LEXER = LEXER
SML_export signature PARSER_DATA = PARSER_DATA
SML_export signature PARSER = PARSER
SML_export signature ARG_PARSER = ARG_PARSER
SML_export structure ParseGen = ParseGen

SML_export functor Join(structure Lex : LEXER
             structure ParserData: PARSER_DATA
             structure LrParser : LR_PARSER
             sharing ParserData.LrTable = LrParser.LrTable
             sharing ParserData.Token = LrParser.Token
             sharing type Lex.UserDeclarations.svalue = ParserData.svalue
             sharing type Lex.UserDeclarations.pos = ParserData.pos
             sharing type Lex.UserDeclarations.token = ParserData.Token.token): PARSER =
  Join(structure Lex = Lex 
       structure ParserData = ParserData 
       structure LrParser = LrParser)
SML_export functor JoinWithArg(structure Lex : ARG_LEXER
             structure ParserData: PARSER_DATA
             structure LrParser : LR_PARSER
             sharing ParserData.LrTable = LrParser.LrTable
             sharing ParserData.Token = LrParser.Token
             sharing type Lex.UserDeclarations.svalue = ParserData.svalue
             sharing type Lex.UserDeclarations.pos = ParserData.pos
             sharing type Lex.UserDeclarations.token = ParserData.Token.token)
                 : ARG_PARSER  = 
   JoinWithArg(structure Lex = Lex 
       structure ParserData = ParserData 
       structure LrParser = LrParser)

ML local

val tmp_io = Synchronized.var "tmp_io" ()

fun system_command cmd =
  if Isabelle_System.bash cmd <> 0 then error ("System command failed: " ^ cmd) else ();


fun copy_file qualify_ref src0 dst0 =
  let
    val src = Path.expand src0;
    val dst = Path.expand dst0;
    val target = if File.is_dir dst then Path.append dst (Path.base src) else dst;
  in
    if File.eq (src, target) then 
      ()
    else if qualify_ref then
      ignore (system_command ("sed 's/ ref / Unsynchronized.ref /g' " ^ File.bash_path src  ^ " > " ^ File.bash_path target))
    else 
      ignore (system_command ("cp -f " ^ File.bash_path src ^ " " ^ File.bash_path target))
  end;

fun file_command qualify_ref tag exts f files thy = 
  let
    val (files, thy) = files thy
    val {src_path, lines,...}: Token.file = the_single files
    val filename = Path.file_name src_path
    val full_src_path = Path.append (Resources.master_directory thy) src_path
    val _ = Isabelle_System.with_tmp_dir tag (fn tmp_dir => Synchronized.change tmp_io (fn _ =>
      let
        val orig_stdOut = TextIO.getOutstream TextIO.stdOut
        val orig_stdErr = TextIO.getOutstream TextIO.stdErr
        val out_file = (Utils.sanitized_path thy tmp_dir (Path.ext "out" src_path))
        val err_file = (Utils.sanitized_path thy tmp_dir (Path.ext "err" src_path))
        val dir = Path.dir out_file
        val _ = Isabelle_System.make_directory dir
        val stdOut = TextIO.openOut (File.platform_path out_file)
        val stdErr = TextIO.openOut (File.platform_path err_file)

        val _ = TextIO.setOutstream (TextIO.stdOut, TextIO.getOutstream stdOut)
        val _ = TextIO.setOutstream (TextIO.stdErr, TextIO.getOutstream stdErr)

        val tmp_file = Utils.sanitized_path thy tmp_dir src_path

        val _ = File.write tmp_file (cat_lines lines)
        val res = Exn.result f (File.platform_path tmp_file)


        val _ = TextIO.setOutstream (TextIO.stdOut, orig_stdOut)
        val _ = TextIO.setOutstream (TextIO.stdErr, orig_stdErr)
        val _ = TextIO.closeOut stdOut
        val _ = TextIO.closeOut stdErr
        val lines = filter (fn s => s <> "") (File.read_lines out_file @ File.read_lines err_file)
        val msg = if null lines then "" else ":\n" ^ (prefix_lines "  " (cat_lines lines))
      in 
        case res of
           Exn.Res () =>
            let 
              val result_files = map (fn ext => (Path.ext ext tmp_file, 
                      Path.ext ext full_src_path)) exts
              val _ = app (fn (src, dst) => copy_file qualify_ref src dst) result_files
              val _ = tracing (Markup.markup Markup.keyword1 tag ^ " " ^ quote filename ^ " succeeded" ^ msg)
            in () end 
        |  Exn.Exn exn =>
            error (Markup.markup Markup.keyword1 tag ^ " " ^ quote filename ^ " failed" ^ msg ^
              "\n" ^ Runtime.exn_message exn)
      end))
    
  in 
    thy
  end
in
val _ =
  Outer_Syntax.command
    @{command_keyword "mllex"} "generate lexer" 
   (Resources.provide_parse_files single >> (Toplevel.theory o (file_command true "mllex" ["sml"] LexGen.lexGen)))
val _ =
  Outer_Syntax.command
    @{command_keyword "mlyacc"} "generate parser" 
   (Resources.provide_parse_files single >> (Toplevel.theory o (file_command true "mlyacc" ["sig", "sml"] ParseGen.parseGen)))

end


primrec map_of_default :: 
  "('p  'a)  ('p * 'a) list  'p  'a"
where
  "map_of_default d [] x = d x"
| "map_of_default d (x # xs) x' = (if fst x = x' then snd x else map_of_default d xs x')"

lemma map_of_default_fun_upd_conv: 
  "map_of_default d ((p, f)#fs) = (map_of_default d fs)(p := f)"
  by (auto split: if_split_asm)

lemma map_of_default_append: map_of_default d (xs @ ys) = map_of_default (map_of_default d ys) xs
  by (induction xs arbitrary: d ys) auto

lemma map_of_default_map_of_conv: 
  map_of_default d xs p = (case map_of xs p of Some f  f | None  d p)
  by (induction xs) (auto simp add: fun_upd_same fun_upd_other)

lemma map_of_default_fallthrough: 
  "p  set (map fst xs)  map_of_default d xs p = d p"
  by (induct xs) auto

lemma map_of_default_distinct:
  assumes "distinct (map fst xs)"
  shows "list_all (λ(p, f). map_of_default d xs p = f) xs"
  using assms
  apply  (induction xs)
   apply simp_all
  by (smt (verit, ccfv_SIG) image_iff list.pred_mono_strong prod.case_eq_if)

lemma map_of_default_default_conv:
  assumes "list_all (λ(p, f). d p = f) xs"
  shows "map_of_default d xs = d"
  using assms
  by (induct xs) auto

lemma map_of_default_monotone_cons[partial_function_mono]:
  assumes f1 [partial_function_mono]: "monotone R X f1"
  assumes [partial_function_mono]: "monotone R X (λf. map_of_default (d f) (xs f) p)"
  shows "monotone R X (λf. map_of_default (d f) ((p1, f1 f)#xs f) p)"
  apply (simp only: map_of_default.simps fst_conv snd_conv cong: if_cong)
  apply (intro partial_function_mono)
  done

lemma monotone_if_fun': "monotone orda (fun_ord ordb) F 
  monotone orda (fun_ord ordb) G 
  monotone orda (fun_ord ordb) (λf n. if c n then F f n else G f n)"
  by(simp add: monotone_def fun_ord_def)

lemma map_of_default_monotone_fun_ord_cons[partial_function_mono]:
  assumes f1 [partial_function_mono]: "monotone R X f1"
  assumes [partial_function_mono]: "monotone R (fun_ord X) (λf. map_of_default (d f) (xs f))"
  shows "monotone R (fun_ord X) (λf. map_of_default (d f) ((p1, f1 f)#(xs f)))"
  apply (simp only: map_of_default.simps [abs_def] fst_conv snd_conv cong: if_cong)
  apply (intro monotone_if_fun' partial_function_mono)
  using f1
  by (simp add: monotone_def fun_ord_def)

lemma map_of_default_monotone_fun_ord_nil[partial_function_mono]:
  assumes f1 [partial_function_mono]: "monotone R X d"
  shows "monotone R X (λf. map_of_default (d f) [])"
  apply (simp only: map_of_default.simps [abs_def] fst_conv snd_conv cong: if_cong)
  apply (intro monotone_if_fun' partial_function_mono)
  done

primrec tree_of :: "'a list  'a tree" 
where
  "tree_of [] = Tip"
| "tree_of (x#xs) = Node (Tip) x False (tree_of xs)"

lemma set_of_tree_of: "set_of (tree_of xs) = set xs"
  by (induct xs) auto

lemma all_distinct_tree_of:
  assumes "all_distinct (tree_of xs)"
  shows "distinct xs"
  using assms by (induct xs) (auto simp add: set_of_tree_of)

lemma all_distinct_tree_of': 
"all_distinct t  tree_of xs  t  distinct xs"
  by (simp add: all_distinct_tree_of)

lemma map_of_default_fallthrough': 
  "map fst xs  ps  tree_of ps  t  p  set_of t  map_of_default d xs p = d p"
  apply (rule map_of_default_fallthrough)
  using set_of_tree_of [of "map fst xs"]
  apply simp
  done

primrec list_of :: "'a tree  'a list"
  where
  "list_of Tip = []"
| "list_of (Node l x d r) =  list_of l @ (if d then [] else [x]) @ list_of r"

lemma list_of_tree_of_conv [simp]: "list_of (tree_of xs) = xs"
  by (induct xs) auto

lemma set_list_of_set_of_conv: "set (list_of t) = set_of t"
  by (induct t) auto

lemma all_distinct_list_of:
  assumes "all_distinct t"
  shows "distinct (list_of t)"
  using assms by (induct t) (auto simp add: set_list_of_set_of_conv)

lemma map_of_default_distinct_lookup_list_all: 
  "distinct (map fst xs)  list_all (λ(p, f). map_of_default d xs p = f) xs"
  by (induct xs) (auto simp add: case_prod_beta' image_iff list.pred_mono_strong)

lemma map_of_default_distinct_lookup_list_all': 
  assumes ps: "map fst xs  ps" 
  assumes t: "tree_of ps  t"
  assumes dist: "all_distinct t"
  shows "list_all (λ(p, f). map_of_default d xs p = f) xs"
  using  all_distinct_tree_of' [OF dist t] ps
  by (simp add: map_of_default_distinct_lookup_list_all)

lemma map_of_default_block_cong: "map_of_default d xs p = map_of_default d xs p"
  by (rule refl)

lemma list_all_block_cong: "list_all P xs = list_all P xs"
  by (rule refl)

lemma map_of_default_distinct_lookup_list_all'': 
  assumes t: "list_of t  ps"
  assumes ps: "map fst xs  ps"
  assumes dist: "all_distinct t"
  shows "list_all (λ(p, f). map_of_default d xs p = f) xs"
  by (metis all_distinct_list_of dist map_of_default_distinct_lookup_list_all ps t)


lemma map_of_default_other_lookup_list_all: 
  "set ps  set (map fst xs) = {}  list_all (λp. map_of_default d xs p = d p) ps"
  using map_of_default_fallthrough [where d=d and xs= xs]
  apply (induct xs) 
   apply (clarsimp simp add: list.pred_True )
  by (smt (verit, best) IntI ball_empty list.pred_set)


lemma delete_Some_subset: "DistinctTreeProver.delete x t = Some t'  set_of t  {x}  set_of t'"
  by (induct t arbitrary: t') (auto split: option.splits if_split_asm)

lemma delete_Some_set_of_union:
  assumes del: "DistinctTreeProver.delete x t = Some t'" shows "set_of t = {x}  set_of t'"
proof -
  from delete_Some_set_of [OF del] have "set_of t'  set_of t" .
  moreover
  from delete_Some_x_set_of [OF del] obtain "x  set_of t"  "x  set_of t'"
    by simp
  ultimately have "{x}  set_of t'  set_of t"
    by blast
  with delete_Some_subset [OF del]
  show ?thesis by blast
qed

primrec undeleted :: "'a tree  bool"
  where
  "undeleted Tip = True"
| "undeleted (Node l y d r) = (¬ d  undeleted l  undeleted r)"

lemma undeleted_tree_of[simp]: "undeleted (tree_of xs)"
  by (induct xs) auto

lemma subtract_union_subset: 
  "subtract t1 t2 = Some t  undeleted t1  set_of t2  set_of t1  set_of t"
proof (induct t1 arbitrary: t2 t)
  case Tip thus ?case by simp
next
  case (Node l x b r)
  have sub: "subtract (Node l x b r) t2 = Some t" by fact
  from Node.prems obtain not_b: "¬ b" and undeleted_l: "undeleted l" and undeleted_r: "undeleted r"
    by simp
  from subtract_Some_set_of [OF sub] have sub_t2: "set_of (Node l x b r)  set_of t2" .

  show ?case
  proof (cases "DistinctTreeProver.delete x t2")
    case (Some t2')
    note del_x_Some = this
    from delete_Some_set_of_union [OF Some] 
    have t2'_t2: "set_of t2 = {x}  set_of t2'" .
    show ?thesis
    proof (cases "subtract l t2'")
      case (Some t2'')
      note sub_l_Some = this
      from Node.hyps (1) [OF Some undeleted_l] 
      have t2''_t2': "set_of t2'  set_of l  set_of t2''" .
      show ?thesis
      proof (cases "subtract r t2''")
        case (Some t2''')
        from Node.hyps (2) [OF Some undeleted_r] 
        have "set_of t2''  set_of r  set_of t2'''" .
        with Some sub_l_Some del_x_Some sub t2''_t2' t2'_t2 not_b
        show ?thesis
          by auto
      next
        case None
        with del_x_Some sub_l_Some sub
        show ?thesis
          by simp
      qed
    next
      case None
      with del_x_Some sub 
      show ?thesis
        by simp
    qed
  next
    case None
    with sub show ?thesis by simp
  qed
qed


lemma subtract_union_eq: 
  assumes sub: "subtract t1 t2 = Some t"
  assumes und: "undeleted t1"
  shows "set_of t2 = set_of t1  set_of t"
proof
  from sub und
  show "set_of t2  set_of t1  set_of t"
    by (rule subtract_union_subset)
next
  from subtract_Some_set_of_res [OF sub] have "set_of t  set_of t2".
  moreover from subtract_Some_set_of [OF sub] have "set_of t1  set_of t2" .
  ultimately
  show "set_of t1  set_of t  set_of t2" by blast
qed



lemma subtract_empty:
  assumes sub: "subtract t1 t2 = Some t" 
  assumes und: "undeleted t1"
  assumes empty: "set_of t = {}"
  shows "set_of t1 = set_of t2"
  using subtract_union_eq [OF sub und] empty by simp


lemma map_of_default_other_lookup_Ball:
  assumes ps: "list_of t  ps"
  assumes map_fst: "map fst xs  ps"
  assumes sub: "subtract t t_all = Some t_sub"
  shows "p  set_of t_sub. map_of_default d xs p = d p"
  by (metis disjoint_iff map_fst map_of_default_fallthrough ps set_list_of_set_of_conv sub subtract_Some_dist_res)


lemma map_of_default_other_lookup_list_all':
  assumes ps: "list_of t  ps"
  assumes map_fst: "map fst xs  ps"
  assumes sub: "subtract t t_all = Some t_sub"
  assumes qs: "list_of t_sub  qs"
  shows "list_all (λp. map_of_default d xs p = d p) qs"
proof -
  from qs have "set_of t_sub = set qs"
    by (metis set_list_of_set_of_conv)
  from map_of_default_other_lookup_Ball [OF ps map_fst sub] this
  show ?thesis 
    by (simp add: Ball_set)
qed

lemma map_of_default_fnptr_guard_NULL:
  assumes map_fst: "map fst xs  ps"
  assumes guard: "list_all (c_fnptr_guard) ps  True"
  shows "map_of_default d xs NULL = d NULL"
proof -
  have "NULL  set (map fst xs)"
    using map_fst guard
    by (metis c_fnptr_guard_NULL list.pred_set)
  from map_of_default_fallthrough [OF this]
  show ?thesis
    by simp
qed



lemma subtract_set_of_exchange_first:
  assumes sub1: "subtract t1 t = Some t'" 
  assumes sub2: "subtract t2 t = Some t''" 
  assumes und1: "undeleted t1" 
  assumes und2: "undeleted t2" 
  assumes seq: "set_of t1 = set_of t2" 
  shows "set_of t' = set_of t''"
  using subtract_union_eq [OF sub1 und1] subtract_union_eq [OF sub2 und2] seq
   subtract_Some_dist_res [OF sub1] subtract_Some_dist_res [OF sub2]
  by blast



lemma TWO: "Suc (Suc 0) = 2" by arith

definition
  fun_addr_of :: "int  unit ptr" where
  "fun_addr_of i  Ptr (word_of_int i)"

definition
  ptr_range :: "'a::c_type ptr  addr set" where
  "ptr_range p  {ptr_val (p::'a ptr) ..< ptr_val p + word_of_int(int(size_of (TYPE('a)))) }"

lemma guarded_spec_body_wp [vcg_hoare]:
  "P  {s. (t. (s,t)  R  t  Q)  (Ft  F  (t. (s,t)  R))}
   Γ,Θ⊢⇘/FP (guarded_spec_body Ft R) Q, A"
  apply (simp add: guarded_spec_body_def)
  apply (cases "Ft  F")
   apply (erule HoarePartialDef.Guarantee)
   apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
   apply auto[1]
  apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P=P])
   apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
   apply auto[1]
  apply simp
  apply (erule order_trans)
  apply (auto simp: image_def Bex_def)
  done

lemma spec_pre_post_wp [vcg_hoare]:
  "P  {s. (t. (s,t)  R  t  Q)  ((F_pre  F  s  Pre)  (F_exists_post  F  (t. (s,t)  R)))}
   Γ,Θ⊢⇘/FP (spec_pre_post F_pre F_exists_post Pre R) Q, A"
  apply (simp add: spec_pre_post_def)
  apply (cases "F_pre  F ")
  subgoal
    apply (erule HoarePartialDef.Guarantee)
    apply (cases "F_exists_post  F ")
     apply (erule HoarePartialDef.Guarantee)
     apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
    subgoal
      by auto
    subgoal
      apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P="(Pre  P)"])
      subgoal
        apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
        apply auto
        done
      subgoal
        by auto
      done
    done
  subgoal
    apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P=P])
    subgoal 
      apply (cases "F_exists_post  F ")
       apply (erule HoarePartialDef.Guarantee)
       apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
      subgoal
        by auto
      subgoal
        apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P="(Pre  P)"])
        subgoal
          apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
          apply auto
          done
        subgoal
          by auto
        done
      done
    subgoal by auto
    done
  done

lemma guarded_spec_pre_post_wp [vcg_hoare]:
  "P  {s. (t. (s,t)  R  t  Q)  ((F_pre  F  s  fst ` R)  (F_post  F  (t. (s,t)  R)))}
   Γ,Θ⊢⇘/FP (guarded_spec_pre_post F_pre F_post R) Q, A"
  apply (simp add: guarded_spec_pre_post_def)
  apply (cases "F_pre  F ")
  subgoal
    apply (erule HoarePartialDef.Guarantee)
    apply (cases "F_post  F ")
     apply (erule HoarePartialDef.Guarantee)
     apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
    subgoal
      by auto
    subgoal
      apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P="(fst ` R  P)"])
      subgoal
        apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
        apply auto
        done
      subgoal
        by auto
      done
    done
  subgoal
    apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P=P])
    subgoal 
      apply (cases "F_post  F ")
       apply (erule HoarePartialDef.Guarantee)
       apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
      subgoal
        by auto
      subgoal
        apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Guard[where P="(fst ` R  P)"])
        subgoal
          apply (rule HoarePartialDef.conseqPre, rule HoarePartialDef.Spec)
          apply auto
          done
        subgoal
          by auto
        done
      done
    subgoal by auto
    done
  done



named_theorems recursive_records_fold_congs and recursive_records_split_all_eqs

ML_file "../lib/ml-helpers/StringExtras.ML"
ML_file "topo_sort.ML"
ML_file "isa_termstypes.ML"

mllex "StrictC.lex"
mlyacc "StrictC.grm"

ML_file "StrictC.grm.sig"
ML_file "StrictC.grm.sml"
ML_file "StrictC.lex.sml"

ML_file "StrictCParser.ML"
ML_file "complit.ML"
ML_file "hp_termstypes.ML"
ML_file "termstypes-sig.ML"
term "word_of_int"
ML_file "termstypes.ML"
ML_file "recursive_records/recursive_record_pp.ML"
ML_file "recursive_records/recursive_record_package.ML"
ML_file "UMM_termstypes.ML"
ML_file "expression_typing.ML"
ML_file "program_analysis.ML"



context
begin
ML_file "cached_theory_simproc.ML"
ML_file "UMM_Proofs.ML"
end

simproc_setup size_of_bound (size_of TYPE('a::c_type)  n) = K (fn ctxt => fn ct => 
  let
    val _ = (case Thm.term_of ct of
        @{term_pat "size_of ?T  _ "} => if UMM_Proofs.is_ctype T then () else raise Match
      | _ => raise Match)
    val ctxt' = ctxt addsimps (Named_Theorems.get ctxt @{named_theorems size_simps})
    val eq = Simplifier.asm_full_rewrite ctxt' ct
    val _ = Utils.verbose_msg 5 ctxt (fn _ => "size_of_bound: " ^ Thm.string_of_thm ctxt eq)
    val rhs = Thm.rhs_of eq |> Thm.term_of
  in
    if rhs aconv termTrue orelse rhs aconv termFalse then
      SOME eq
    else
      NONE
  end
  handle Match => NONE)

named_theorems enum_defs

ML_file "heapstatetype.ML"
ML_file "MemoryModelExtras-sig.ML"
ML_file "MemoryModelExtras.ML"
ML_file "calculate_state.ML"
ML_file "syntax_transforms.ML"
ML_file "expression_translation.ML"
ML_file "modifies_proofs.ML"
ML_file "HPInter.ML"
ML_file "stmt_translation.ML"


method_setup vcg = Hoare.vcg (*|> then_shorten_names*)
    "Simpl 'vcg' followed by C parser 'shorten_names'"

method_setup vcg_step = Hoare.vcg_step (*|> then_shorten_names*)
    "Simpl 'vcg_step' followed by C parser 'shorten_names'"

declare typ_info_word [simp del]
declare typ_info_ptr [simp del]

lemma valid_call_Spec_eq_subset:
  "Γ' procname = Some (Spec R) 
  HoarePartialDef.valid Γ' NF P (Call procname) Q A = (P  fst ` R  (R  (- P) × UNIV  UNIV × Q))"
  apply (safe; simp?)
  subgoal for x
    apply (clarsimp simp: HoarePartialDef.valid_def)
    apply (rule ccontr)
    apply (drule_tac x="Normal x" in spec, elim allE)
    apply (drule mp, erule exec.Call, rule exec.SpecStuck)
     apply (auto simp: image_def)[2]
    done
   apply (clarsimp simp: HoarePartialDef.valid_def)
   apply (elim allE, drule mp, erule exec.Call, erule exec.Spec)
   apply auto[1]
  apply (clarsimp simp: HoarePartialDef.valid_def)
  apply (erule exec_Normal_elim_cases, simp_all)
  apply (erule exec_Normal_elim_cases, auto)
  done


lemma creturn_wp [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Return)) (rvupd (λ_. v s) s)  A}"
  shows "Γ,Θ⊢⇘/FP creturn exnupd rvupd v Q, A"
  unfolding creturn_def
  by vcg

lemma creturn_wp_total [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Return)) (rvupd (λ_. v s) s)  A}"
  shows "Γ,Θt⇘/FP creturn exnupd rvupd v Q, A"
  unfolding creturn_def
  by vcg


lemma creturn_void_wp [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Return)) s  A}"
  shows "Γ,Θ⊢⇘/FP creturn_void exnupd Q, A"
  unfolding creturn_void_def
  by vcg

lemma creturn_void_wp_total [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Return)) s  A}"
  shows "Γ,Θt⇘/FP creturn_void exnupd Q, A"
  unfolding creturn_void_def
  by vcg


lemma cbreak_wp [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Break)) s  A}"
  shows "Γ,Θ⊢⇘/FP cbreak exnupd Q, A"
  unfolding cbreak_def
  by vcg

lemma cbreak_wp_totoal [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Break)) s  A}"
  shows "Γ,Θt⇘/FP cbreak exnupd Q, A"
  unfolding cbreak_def
  by vcg

lemma ccatchbrk_wp [vcg_hoare]:
  assumes "P  {s. (exnupd s = Break  s  Q) 
                    (exnupd s  Break  s  A)}"
  shows "Γ,Θ⊢⇘/FP ccatchbrk exnupd Q, A"
  unfolding ccatchbrk_def
  by vcg

lemma ccatchbrk_wp_total [vcg_hoare]:
  assumes "P  {s. (exnupd s = Break  s  Q) 
                    (exnupd s  Break  s  A)}"
  shows "Γ,Θt⇘/FP ccatchbrk exnupd Q, A"
  unfolding ccatchbrk_def
  by vcg

lemma cgoto_wp [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Goto l)) s  A}"
  shows "Γ,Θ⊢⇘/FP cgoto l exnupd Q, A"
  unfolding cgoto_def
  by vcg

lemma cgoto_wp_total [vcg_hoare]:
  assumes "P  {s. (exnupd (λ_. Goto l)) s  A}"
  shows "Γ,Θt⇘/FP cgoto l exnupd Q, A"
  unfolding cgoto_def
  by vcg

lemma ccatchgoto_wp [vcg_hoare]:
  assumes "P  {s. (exnupd s = Goto l  s  Q) 
                    (exnupd s  Goto l  s  A)}"
  shows "Γ,Θ⊢⇘/FP ccatchgoto l exnupd Q, A"
  unfolding ccatchgoto_def
  by vcg

lemma ccatchgoto_wp_total [vcg_hoare]:
  assumes "P  {s. (exnupd s = Goto l  s  Q) 
                    (exnupd s  Goto l  s  A)}"
  shows "Γ,Θt⇘/FP ccatchgoto l exnupd Q, A"
  unfolding ccatchgoto_def
  by vcg

lemma ccatchreturn_wp [vcg_hoare]:
  assumes "P  {s. (is_local (exnupd s)  s  Q) 
                    (¬ is_local (exnupd s)  s  A)}"
  shows "Γ,Θ⊢⇘/FP ccatchreturn exnupd Q, A"
  unfolding ccatchreturn_def 
  by vcg

lemma ccatchreturn_wp_total [vcg_hoare]:
  assumes "P  {s. (is_local (exnupd s)  s  Q) 
                    (¬ is_local (exnupd s)  s  A)}"
  shows "Γ,Θt⇘/FP ccatchreturn exnupd Q, A"
  unfolding ccatchreturn_def 
  by vcg

lemma cexit_wp [vcg_hoare]:
  assumes "P  {s. exnupd s  A}"
  shows "Γ,Θ⊢⇘/FP cexit exnupd Q, A"
  unfolding cexit_def 
  by vcg

lemma cexit_wp_total [vcg_hoare]:
  assumes "P  {s. exnupd s  A}"
  shows "Γ,Θt⇘/FP cexit exnupd Q, A"
  unfolding cexit_def 
  by vcg

lemma cchaos_wp [vcg_hoare]:
  assumes "P   {s. x. (v x s)  Q }"
  shows "Γ,Θ⊢⇘/FP cchaos v Q, A"
  unfolding cchaos_def
  using assms by (blast intro: HoarePartial.Spec)

lemma cchaos_wp_total [vcg_hoare]:
  assumes "P   {s. x. (v x s)  Q }"
  shows "Γ,Θt⇘/FP cchaos v Q, A"
  unfolding cchaos_def
  using assms by (blast intro: HoareTotal.Spec)

lemma lvar_nondet_init_wp [vcg_hoare]:
  "P  {s. v. (upd (λ_. v)) s  Q}  Γ,Θ⊢⇘/FP lvar_nondet_init upd Q, A"
  unfolding lvar_nondet_init_def
  by (rule HoarePartialDef.conseqPre, vcg, auto)

lemma lvar_nondet_init_wp_total [vcg_hoare]:
  "P  {s. v. (upd (λ_. v)) s  Q}  Γ,Θt⇘/FP lvar_nondet_init upd Q, A"
  unfolding lvar_nondet_init_def
  by (rule HoareTotalDef.conseqPre, vcg, auto)

lemma Seq_propagate_precond:
  "Γ,Θ⊢⇘/FP c1 P,A; Γ,Θ⊢⇘/FP c2 Q,A  Γ,Θ⊢⇘/FP (Seq c1 c2) Q,A"
  apply (rule hoarep.Seq)
   apply assumption
  apply assumption
  done

lemma Seq_propagate_precond_total:
  "Γ,Θt⇘/FP c1 P,A; Γ,Θt⇘/FP c2 Q,A  Γ,Θt⇘/FP (Seq c1 c2) Q,A"
  apply (rule hoaret.Seq)
   apply assumption
  apply assumption
  done

lemma mem_safe_lvar_init [simp,intro]:
  assumes upd: "g v s. globals_update g (upd (λ_. v) s) = upd (λ_. v) (globals_update g s)"
  assumes acc: "v s. globals (upd (λ_. v) s) = globals s"
  shows "mem_safe (lvar_nondet_init upd) x"
  apply (clarsimp simp: mem_safe_def lvar_nondet_init_def)
  apply (erule exec.cases; clarsimp simp: restrict_safe_def)
   apply (fastforce simp: restrict_safe_OK_def restrict_htd_def upd acc intro: exec.Spec)
  apply (simp add: exec_fatal_def)
  apply (fastforce simp: exec_fatal_def restrict_htd_def upd acc intro!: exec.SpecStuck)
  done

lemma intra_safe_lvar_nondet_init [simp]:
  "intra_safe (lvar_nondet_init upd :: (('a::heap_state_type','l,'e,'x) state_scheme,'p,'f) com) =
   (Γ. mem_safe (lvar_nondet_init upd :: (('a::heap_state_type','l,'e,'x) state_scheme,'p,'f) com)
                 (Γ :: (('a,'l,'e,'x) state_scheme,'p,'f) body))"
  by (simp add: lvar_nondet_init_def)


lemma proc_deps_lvar_nondet_init [simp]:
  "proc_deps (lvar_nondet_init upd) Γ = {}"
  by (simp add: lvar_nondet_init_def)

declare word_neq_0_conv[simp]


declare [[hoare_use_generalise=true]]
end