Theory Lens_Instances

section ‹Lens Instances›

theory Lens_Instances
  imports Lens_Order Lens_Symmetric Scene_Spaces "HOL-Eisbach.Eisbach" "HOL-Library.Stream"
  keywords "alphabet" "statespace" :: "thy_defn"
begin

text ‹In this section we define a number of concrete instantiations of the lens locales, including
  functions lenses, list lenses, and record lenses.›

subsection ‹Function Lens›

text ‹A function lens views the valuation associated with a particular domain element @{typ "'a"}.
      We require that range type of a lens function has cardinality of at least 2; this ensures
      that properties of independence are provable.›

definition fun_lens :: "'a  ('b::two  ('a  'b))" where
[lens_defs]: "fun_lens x =  lens_get = (λ f. f x), lens_put = (λ f u. f(x := u)) "

lemma fun_vwb_lens: "vwb_lens (fun_lens x)"
  by (unfold_locales, simp_all add: fun_lens_def)

text ‹Two function lenses are independent if and only if the domain elements are different.›
    
lemma fun_lens_indep:
  "fun_lens x  fun_lens y  x  y"
proof -
  obtain u v :: "'a::two" where "u  v"
    using two_diff by auto
  thus ?thesis
    by (auto simp add: fun_lens_def lens_indep_def)
qed

subsection ‹Function Range Lens›
  
text ‹The function range lens allows us to focus on a particular region of a function's range.›

definition fun_ran_lens :: "('c  'b)  (('a  'b)  )  (('a  'c)  )" where
[lens_defs]: "fun_ran_lens X Y =  lens_get = λ s. getX getYs
                                 , lens_put = λ s v. putYs (λ x::'a. putX(getYs x) (v x)) "

lemma fun_ran_mwb_lens: " mwb_lens X; mwb_lens Y   mwb_lens (fun_ran_lens X Y)"
  by (unfold_locales, auto simp add: fun_ran_lens_def)

lemma fun_ran_wb_lens: " wb_lens X; wb_lens Y   wb_lens (fun_ran_lens X Y)"
  by (unfold_locales, auto simp add: fun_ran_lens_def)

lemma fun_ran_vwb_lens: " vwb_lens X; vwb_lens Y   vwb_lens (fun_ran_lens X Y)"
  by (unfold_locales, auto simp add: fun_ran_lens_def)

subsection ‹Map Lens›

text ‹The map lens allows us to focus on a particular region of a partial function's range. It
  is only a mainly well-behaved lens because it does not satisfy the PutGet law when the view
  is not in the domain.› 
  
definition map_lens :: "'a  ('b  ('a  'b))" where
[lens_defs]: "map_lens x =  lens_get = (λ f. the (f x)), lens_put = (λ f u. f(x  u)) "

lemma map_mwb_lens: "mwb_lens (map_lens x)"
  by (unfold_locales, simp_all add: map_lens_def)

lemma source_map_lens: "𝒮map_lens x= {f. x  dom(f)}"
  by (force simp add: map_lens_def lens_source_def)

lemma pget_map_lens: "pgetmap_lens kf = f k"
  by (auto simp add: lens_partial_get_def source_map_lens)
     (auto simp add: map_lens_def, metis not_Some_eq)

subsection ‹List Lens›

text ‹The list lens allows us to view a particular element of a list. In order to show it is mainly
  well-behaved we need to define to additional list functions. The following function adds 
  a number undefined elements to the end of a list.›
  
definition list_pad_out :: "'a list  nat  'a list" where
"list_pad_out xs k = xs @ replicate (k + 1 - length xs) undefined"

text ‹The following function is like @{term "list_update"} but it adds additional elements to the
  list if the list isn't long enough first.›

definition list_augment :: "'a list  nat  'a  'a list" where
"list_augment xs k v = (list_pad_out xs k)[k := v]"

text ‹The following function is like @{term nth} but it expressly returns @{term undefined} when
  the list isn't long enough.›

definition nth' :: "'a list  nat  'a" where
"nth' xs i = (if (length xs > i) then xs ! i else undefined)"

text ‹We can prove some additional laws about list update and append.›

lemma list_update_append_lemma1: "i < length xs  xs[i := v] @ ys = (xs @ ys)[i := v]"
  by (simp add: list_update_append)

lemma list_update_append_lemma2: "i < length ys  xs @ ys[i := v] = (xs @ ys)[i + length xs := v]"
  by (simp add: list_update_append)

text ‹We can also prove some laws about our new operators.›
    
lemma nth'_0 [simp]: "nth' (x # xs) 0 = x"
  by (simp add: nth'_def)

lemma nth'_Suc [simp]: "nth' (x # xs) (Suc n) = nth' xs n"
  by (simp add: nth'_def)

lemma list_augment_0 [simp]:
  "list_augment (x # xs) 0 y = y # xs"
  by (simp add: list_augment_def list_pad_out_def)

lemma list_augment_Suc [simp]:
  "list_augment (x # xs) (Suc n) y = x # list_augment xs n y"
  by (simp add: list_augment_def list_pad_out_def)

lemma list_augment_twice:
  "list_augment (list_augment xs i u) j v = (list_pad_out xs (max i j))[i:=u, j:=v]"
  apply (auto simp add: list_augment_def list_pad_out_def list_update_append_lemma1 replicate_add[THEN sym] max_def)
  apply (metis Suc_le_mono add.commute diff_diff_add diff_le_mono le_add_diff_inverse2)
done

lemma list_augment_last [simp]:
  "list_augment (xs @ [y]) (length xs) z = xs @ [z]"
  by (induct xs, simp_all)

lemma list_augment_idem [simp]:
  "i < length xs  list_augment xs i (xs ! i) = xs"
  by (simp add: list_augment_def list_pad_out_def)

text ‹We can now prove that @{term list_augment} is commutative for different (arbitrary) indices.›
  
lemma list_augment_commute:
  "i  j  list_augment (list_augment σ j v) i u = list_augment (list_augment σ i u) j v"
  by (simp add: list_augment_twice list_update_swap max.commute)

text ‹We can also prove that we can always retrieve an element we have added to the list, since
  @{term list_augment} extends the list when necessary. This isn't true of @{term list_update}. ›
    
lemma nth_list_augment: "list_augment xs k v ! k = v"
  by (simp add: list_augment_def list_pad_out_def)
    
lemma nth'_list_augment: "nth' (list_augment xs k v) k = v"
  by (auto simp add: nth'_def nth_list_augment list_augment_def list_pad_out_def)

text ‹ The length is expanded if not already long enough, or otherwise left as it is. ›

lemma length_list_augment_1: "k  length xs  length (list_augment xs k v) = Suc k"
  by (simp add: list_augment_def list_pad_out_def)

lemma length_list_augment_2: "k < length xs  length (list_augment xs k v) = length xs"
  by (simp add: list_augment_def list_pad_out_def)

text ‹We also have it that @{term list_augment} cancels itself.›
    
lemma list_augment_same_twice: "list_augment (list_augment xs k u) k v = list_augment xs k v"
  by (simp add: list_augment_def list_pad_out_def)

lemma nth'_list_augment_diff: "i  j  nth' (list_augment σ i v) j = nth' σ j"
  by (auto simp add: list_augment_def list_pad_out_def nth_append nth'_def)

text ‹The definition of @{const list_augment} is not good for code generation,
      since it produces undefined values even when padding out is not required.
      Here, we defined a code equation that avoids this.›

lemma list_augment_code [code]:
  "list_augment xs k v = (if (k < length xs) then list_update xs k v else list_update (list_pad_out xs k) k v)"
  by (simp add:list_pad_out_def list_augment_def)

text ‹Finally we can create the list lenses, of which there are three varieties. One that allows
  us to view an index, one that allows us to view the head, and one that allows us to view the tail.
  They are all mainly well-behaved lenses.›
    
definition list_lens :: "nat  ('a::two  'a list)" where
[lens_defs]: "list_lens i =  lens_get = (λ xs. nth' xs i)
                            , lens_put = (λ xs x. list_augment xs i x) "

abbreviation hd_lens ("hdL") where "hd_lens  list_lens 0"

definition tl_lens :: "'a list  'a list" ("tlL") where
[lens_defs]: "tl_lens =  lens_get = (λ xs. tl xs)
                        , lens_put = (λ xs xs'. hd xs # xs') "

lemma list_mwb_lens: "mwb_lens (list_lens x)"
  by (unfold_locales, simp_all add: list_lens_def nth'_list_augment list_augment_same_twice)

text ‹ The set of constructible sources is precisely those where the length is greater than the
  given index. ›

lemma source_list_lens: "𝒮list_lens i= {xs. length xs > i}"
  apply (auto simp add: lens_source_def list_lens_def)
   apply (metis length_list_augment_1 length_list_augment_2 lessI not_less)
  apply (metis list_augment_idem)
  done

lemma tail_lens_mwb:
  "mwb_lens tlL"
  by (unfold_locales, simp_all add: tl_lens_def)

lemma source_tail_lens: "𝒮tlL= {xs. xs  []}"
  using list.exhaust_sel by (auto simp add: tl_lens_def lens_source_def)
  
text ‹Independence of list lenses follows when the two indices are different.›
    
lemma list_lens_indep:
  "i  j  list_lens i  list_lens j"
  by (simp add: list_lens_def lens_indep_def list_augment_commute nth'_list_augment_diff)

lemma hd_tl_lens_indep [simp]:
  "hdL  tlL"
  apply (rule lens_indepI)
    apply (simp_all add: list_lens_def tl_lens_def)
    apply (metis hd_conv_nth hd_def length_greater_0_conv list.case(1) nth'_def nth'_list_augment)
   apply (metis (full_types) hd_conv_nth hd_def length_greater_0_conv list.case(1) nth'_def)
  apply (metis One_nat_def diff_Suc_Suc diff_zero length_0_conv length_list_augment_1 length_tl linorder_not_less list.exhaust list.sel(2) list.sel(3) list_augment_0 not_less_zero)
done

lemma hd_tl_lens_pbij: "pbij_lens (hdL +L tlL)"
  by (unfold_locales, auto simp add: lens_defs)

subsection ‹Stream Lenses›

primrec stream_update :: "'a stream  nat  'a  'a stream" where
"stream_update xs 0 a = a##(stl xs)" |
"stream_update xs (Suc n) a = shd xs ## (stream_update (stl xs) n a)"

lemma stream_update_snth: "(stream_update xs n a) !! n = a"
proof (induction n arbitrary: xs a)
  case 0
  then show ?case by simp
next
  case (Suc n)
  then show ?case by simp
qed

lemma stream_update_unchanged: "i  j  (stream_update xs i a) !! j = xs !! j"
  using gr0_conv_Suc by (induct i j arbitrary: xs rule: diff_induct; fastforce)

lemma stream_update_override: "stream_update (stream_update xs n a) n b = stream_update xs n b"
  by (induction n arbitrary: xs a; simp)

lemma stream_update_nth: "stream_update σ i (σ !! i) = σ"
  by (metis stream.map_cong stream_smap_nats stream_update_snth stream_update_unchanged)

definition stream_lens :: "nat  ('a::two  'a stream)" where
[lens_defs]: "stream_lens i =  lens_get = (λ xs. snth xs i)
                            , lens_put = (λ xs x. stream_update xs i x)"

lemma stream_vwb_lens: "vwb_lens (stream_lens i)"
  apply (unfold_locales; simp add: stream_lens_def)
    apply (rule stream_update_snth)
    apply (rule stream_update_nth)
    apply (rule stream_update_override)
  done

subsection ‹Record Field Lenses›

text ‹We also add support for record lenses. Every record created can yield a lens for each field.
  These cannot be created generically and thus must be defined case by case as new records are
  created. We thus create a new Isabelle outer syntax command \textbf{alphabet} which enables this. 
  We first create syntax that allows us to obtain a lens from a given field using the internal
  record syntax translations.›
  
abbreviation (input) "fld_put f  (λ σ u. f (λ_. u) σ)"

syntax 
  "_FLDLENS" :: "id  logic"  ("FLDLENS _")
translations 
  "FLDLENS x" => " lens_get = x, lens_put = CONST fld_put (_update_name x) "

text ‹ We also allow the extraction of the "base lens", which characterises all the fields added
  by a record without the extension. ›

syntax
  "_BASELENS" :: "id  logic"  ("BASELENS _")

abbreviation (input) "base_lens t e m   lens_get = t, lens_put = λ s v. e v (m s) "

ML fun baselens_tr [Free (name, _)] =
    let
      val extend = Free (name ^ ".extend", dummyT);
      val truncate = Free (name ^ ".truncate", dummyT);
      val more = Free (name ^ ".more", dummyT);
    in
      Const (@{const_syntax "base_lens"}, dummyT) $ truncate $ extend $ more
    end
  | baselens_tr _ = raise Match;

parse_translation [(@{syntax_const "_BASELENS"}, K baselens_tr)]  

text ‹We also introduce the \textbf{alphabet} command that creates a record with lenses for each field.
  For each field a lens is created together with a proof that it is very well-behaved, and for each 
  pair of lenses an independence theorem is generated. Alphabets can also be extended which yields 
  sublens proofs between the extension field lens and record extension lenses. ›

named_theorems lens

ML_file ‹Lens_Lib.ML›
ML_file ‹Lens_Record.ML›

text ‹The following theorem attribute stores splitting theorems for alphabet types which which is useful
  for proof automation.›

named_theorems alpha_splits

text ‹ We supply a helpful tactic to remove the subscripted v characters from subgoals. These
  exist because the internal names of record fields have them. ›

method rename_alpha_vars = tactic Lens_Utils.rename_alpha_vars

subsection ‹Locale State Spaces ›

text ‹ Alternative to the alphabet command, we also introduce the statespace command, which
  implements Schirmer and Wenzel's locale-based approach to state space modelling~cite"Schirmer2009". 

  It has the advantage of allowing multiple inheritance of state spaces, and also variable names are 
  fully internalised with the locales. The approach is also far simpler than record-based state
  spaces. 

  It has the disadvantage that variables may not be fully polymorphic, unlike for the
  alphabet command. This makes it in general unsuitable for UTP theory alphabets. ›

ML_file ‹Lens_Statespace.ML›

subsection ‹Type Definition Lens›

text ‹ Every type defined by a ‹typedef› command induces a partial bijective lens constructed
  using the abstraction and representation functions. ›

context type_definition
begin

definition typedef_lens :: "'b  'a" ("typedefL") where
[lens_defs]: "typedefL =  lens_get = Abs, lens_put = (λ s. Rep) "

lemma pbij_typedef_lens [simp]: "pbij_lens typedefL"
  by (unfold_locales, simp_all add: lens_defs Rep_inverse)

lemma source_typedef_lens: "𝒮typedefL= A"
  using Rep_cases by (auto simp add: lens_source_def lens_defs Rep)

lemma bij_typedef_lens_UNIV: "A = UNIV  bij_lens typedefL"
  by (auto intro: pbij_vwb_is_bij_lens simp add: mwb_UNIV_src_is_vwb_lens source_typedef_lens)

end

subsection ‹Mapper Lenses›

definition lmap_lens :: 
  "((  )  (  ))  
   ((  )    )  
   (  )  
   (  )  
   (  )" where
  [lens_defs]:
  "lmap_lens f g h l = 
  lens_get = f (getl),
  lens_put = g o (putl) o h "
  
text ‹
  The parse translation below yields a heterogeneous mapping lens for any
  record type. This achieved through the utility function above that
  constructs a functorial lens. This takes as input a heterogeneous mapping
  function that lifts a function on a record's extension type to an update
  on the entire record, and also the record's ``more'' function. The first input
  is given twice as it has different polymorphic types, being effectively
  a type functor construction which are not explicitly supported by HOL. We note 
  that the more_update› function does something similar to the extension lifting, 
  but is not precisely suitable here since it only considers homogeneous functions, 
  namely of type 'a ⇒ 'a› rather than 'a ⇒ 'b›.
›
  
syntax 
  "_lmap" :: "id  logic" ("lmap[_]")

ML fun lmap_tr [Free (name, _)] =
    let
      val extend = Free (name ^ ".extend", dummyT);
      val truncate = Free (name ^ ".truncate", dummyT);
      val more = Free (name ^ ".more", dummyT);
      val map_ext = Abs ("f", dummyT,
                    Abs ("r", dummyT,
                      extend $ (truncate $ Bound 0) $ (Bound 1 $ (more $ (Bound 0)))))

    in
      Const (@{const_syntax "lmap_lens"}, dummyT) $ map_ext $ map_ext $ more
    end
  | lmap_tr _ = raise Match;

parse_translation [(@{syntax_const "_lmap"}, K lmap_tr)]  

subsection ‹Lens Interpretation›

named_theorems lens_interp_laws

locale lens_interp = interp
begin
declare meta_interp_law [lens_interp_laws]
declare all_interp_law [lens_interp_laws]
declare exists_interp_law [lens_interp_laws]

end

subsection ‹ Tactic ›

text ‹ A simple tactic for simplifying lens expressions ›

declare split_paired_all [alpha_splits]

method lens_simp = (simp add: alpha_splits lens_defs prod.case_eq_if)

end