diff --git a/talk/cmu-fall-2018/Wadler-Findler-operational-semantics.png b/talk/cmu-fall-2018/Wadler-Findler-operational-semantics.png
new file mode 100644
index 0000000000000000000000000000000000000000..65b867e25145f042008d2423010b0842af442850
Binary files /dev/null and b/talk/cmu-fall-2018/Wadler-Findler-operational-semantics.png differ
diff --git a/talk/cmu-fall-2018/cmu-talk.tex b/talk/cmu-fall-2018/cmu-talk.tex
new file mode 100644
index 0000000000000000000000000000000000000000..a3b2434bc043c2670f19e69986ee4f223ad1e70d
--- /dev/null
+++ b/talk/cmu-fall-2018/cmu-talk.tex
@@ -0,0 +1,1479 @@
+\documentclass{beamer}
+\usetheme{boxes}
+
+\newcommand{\notepr}[1]{}
+
+%% \newif\ifshort
+%% \newif\iflong
+
+%% \shorttrue
+%% %% \longtrue
+
+\usepackage{rotating}
+\usepackage{listings}
+\usepackage{xcolor}
+\lstset{
+  basicstyle=\ttfamily,
+  escapeinside=~~
+}
+
+\usepackage{microtype}
+\usepackage{latexsym,amsmath,amssymb,amsthm}
+\usepackage{mathpartir}
+\usepackage{tikz}
+\usepackage{tikz-cd}
+\input{talk-defs}
+\newcommand{\ltdyn}{\dynr}
+\newcommand{\uarrow}{\mathrel{\rotatebox[origin=c]{-30}{$\leftarrowtail$}}}
+\newcommand{\darrow}{\mathrel{\rotatebox[origin=c]{30}{$\twoheadleftarrow$}}}
+\renewcommand{\upcast}[2]{\langle{#2}\uarrow{#1}\rangle}
+\renewcommand{\dncast}[2]{\langle{#1}\darrow{#2}\rangle}
+\newcommand{\gtdyn}{\sqsupseteq}
+\AtBeginSection[]
+{
+  \begin{frame}<beamer>
+    %% \frametitle{Outline for section \thesection}
+    \tableofcontents[currentsection]
+  \end{frame}
+}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% WARNING: I changed these commands
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\renewcommand{\u}{\underline}
+
+\newcommand{\pipe}{\,\,|\,\,}
+\newcommand{\dynv}{{?}}
+\newcommand{\dync}{\u {\text{?`}}}
+\newcommand{\result}{\text{result}}
+\newcommand{\Set}{\text{Set}}
+\newcommand{\relto}{\to}
+\newcommand{\M}{\mathcal{M}}
+\newcommand{\sq}{\square}
+\newcommand{\lett}{\kw{let}}
+\newcommand{\letXbeYinZ}[2]{\lett#2 = #1;}
+\newcommand{\bindXtoYinZ}[2]{\kw{bind}#2 \leftarrow #1;}
+\newcommand{\too}{\kw{to}}
+\newcommand{\case}{\kw{case}}
+\newcommand{\kw}[1]{\texttt{#1}\,\,}
+\newcommand{\absurd}{\kw{absurd}}
+\newcommand{\caseofXthenYelseZ}[3]{\case #1 \{ #2 \pipe #3 \}}
+\newcommand{\caseofX}[1]{\case #1}
+\newcommand{\thenY}{\{}
+\newcommand{\elseZ}[1]{\pipe #1 \}}
+\newcommand{\pmpairWtoXYinZ}[4]{\kw{split} #1\,\kw{to} (#2,#3). #4}
+\newcommand{\pmpairWtoinZ}[2]{\kw{split} #1\,\kw{to} (). #2}
+\newcommand{\pmmuXtoYinZ}[3]{\kw{unroll} #1 \,\kw{to} \roll #2. #3}
+\newcommand{\ret}{\kw{ret}}
+\newcommand{\thunk}{\kw{thunk}}
+\newcommand{\force}{\kw{force}}
+\newcommand{\abort}{\kw {abort}}
+\newcommand{\with}{\mathbin{\&}}
+\DeclareMathOperator*{\With}{\&}
+
+
+\title{A Type Theoretic Approach to Gradual Typing}
+\author{Max S. New}
+\institute{Northeastern University}
+\date{\today}
+
+\begin{document}
+
+\begin{frame}
+  \titlepage
+  \begin{small}
+    Joint work with\\
+    Amal Ahmed (Northeastern),\\
+    Dan Licata (Wesleyan)
+  \end{small}
+\end{frame}
+
+\begin{frame}
+  \frametitle{Outline}
+  \tableofcontents
+\end{frame}
+
+\section{Intro to Gradual Typing}
+
+\begin{frame}[fragile]{Gradual Typing is about Program Evolution}
+\begin{columns}
+\begin{column}{0.5\textwidth}
+\begin{onlyenv}<1>
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> {
+  st = initial_state;
+  while true {
+    evt = input();
+    state = update(evt,st);
+    display(view(st));
+  }
+}
+\end{lstlisting}        
+\end{footnotesize}
+\end{onlyenv}
+\begin{onlyenv}<2->
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> { .. }
+\end{lstlisting}
+\end{footnotesize}
+\end{onlyenv}
+\end{column}
+\begin{column}{0.1\textwidth}
+  \begin{onlyenv}<3->
+  $\Longrightarrow$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<3->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}} \end{column}
+\end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing is about Program Evolution}
+\begin{columns}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<1->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}}
+\end{column}
+\begin{column}{0.1\textwidth}
+\begin{onlyenv}<2->
+  $\Longrightarrow$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<2->
+\begin{lstlisting}
+view(st) -> { ... }
+~\colorbox{green}{type State = (Boolean, Num)}~
+~\colorbox{green}{type Evt = NewData Num }~
+        ~\colorbox{green}{| Cancel}~
+~\colorbox{green}{update : (Evt, State) -> State}~
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Up to date
+}
+            
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}}
+\end{column}
+\end{columns}
+\begin{onlyenv}<3->
+  Code runs at every point, even though only \texttt{update} is typed.
+\end{onlyenv}
+\end{frame}
+
+\begin{frame}{Gradual Typing Goals}
+  Gradually Typed languages let dynamically typed and statically typed
+  code interoperate within a single language.
+  \begin{enumerate}
+  \item Can write scripts, experimental code in the dynamically typed
+    fragment.
+  \item Can later add types to enforce interfaces, add documentation,
+    prevent future bugs.
+  \item But don't have to add types to the \emph{entire} program to
+    run it!
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{What do the Types Mean?}
+  How do the types affect the semantics of the program?
+  Two answers.
+  \begin{enumerate}
+  \item Optional Typing: they don't, just run the type erased
+    programs. Dynamic typing is already safe anyway.
+  \item Gradual Typing: a type system should give us a type
+    \emph{soundness} theorem, need to check types at runtime in order
+    to satisfy that.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Cast Insertion}
+  \begin{mathpar}
+  \inferrule
+  {\Gamma \vdash t : A \to B \and u : A' \and
+    A \sim A'}
+  {\Gamma \vdash t u : B}
+  \end{mathpar}
+  elaborates to a term with an explicit cast
+  \[ t (\obcast{A'}{A}u) \]
+\end{frame}
+
+\begin{frame}{Semantics of Casts}
+  \begin{center}
+    \includegraphics[width=\textwidth,height=0.8\textheight,keepaspectratio]{Wadler-Findler-operational-semantics}
+  \end{center}
+\end{frame}
+
+\begin{frame}{Semantics of Casts}
+  \begin{align*}
+    \obcast{A}{A}v &\mapsto v\\
+    \obcast{A\to B}{A'\times B'}v & \mapsto \err\\
+    \obcast{A\to B}{A' \to B'}v &\mapsto \lambda x. \obcast{B}{B'}(v (\obcast{A'}{A}x))\\
+    \cdots
+  \end{align*}
+\end{frame}
+
+\begin{frame}{}
+  \begin{itemize}
+  \item Many different semantics of casts
+    \begin{itemize}
+    \item For simple types alone: lazy, eager, up-down, partially
+      eager, transient
+    \end{itemize}
+  \item Some seem especially natural: functorial interpretation.
+  \end{itemize}
+  What are the underlying principles?
+  Which of them give us sound typing?
+\end{frame}
+
+\begin{frame}{A Type Theoretic Approach}
+  \begin{enumerate}
+  \item Simple axiomatization of the desired properties of types and
+    casts.
+  \item \emph{Derive} semantics of casts from these properties.
+  \item We prove certain cast semantics are the only ones that satisfy
+    our desired properties.
+  \end{enumerate}
+  Caveat: applies directly to the cast calculus, not the surface
+  gradual language. See Abstracting Gradual Typing for a good
+  explanation of the surface type checking.
+\end{frame}
+
+\begin{frame}{Properties of Gradual Typing}
+  \begin{enumerate}
+  \item \pause Dynamic Typing: Have a dynamic type into which all types can
+    be encoded.
+  \item \pause Type Soundness: Types should give associated
+    reasoning principles: $\beta, \eta$ equality.
+  \item \pause \emph{Graduality}: Transition from dynamic to static
+    typing should only add type checking, not otherwise change
+    behavior.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}[fragile]{Graduality}
+\begin{columns}
+\begin{column}{0.5\textwidth}
+\begin{onlyenv}<1->
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> { .. }
+\end{lstlisting}
+\end{footnotesize}
+\end{onlyenv}
+\end{column}
+\begin{column}{0.1\textwidth}
+  \begin{onlyenv}<1>
+  $\Longrightarrow$
+  \end{onlyenv}
+  \begin{onlyenv}<2->
+  $\gtdyn$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<1->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+  \end{footnotesize}}
+\end{column}
+\end{columns}
+  \begin{onlyenv}<2->
+    Adding types \emph{refines} program behavior
+  \end{onlyenv}
+\end{frame}
+
+\begin{frame}{Graduality}
+  Syntactically,
+  \[ t \ltdyn t' \]
+  if $t$ and $t'$ have the same type erasure, and every type in $t$ is
+  ``less dynamic'' than the corresponding type in $t'$
+
+  \pause Operationally, if $t, t' : \texttt{Bool}$, then
+  \begin{enumerate}
+  \item $t$ runs to a type error (and $t'$ can do anything)
+  \item $t$ and $t'$ diverge
+  \item $t \mapsto \texttt{true}$ and $t' \mapsto \texttt{true}$
+  \item $t \mapsto \texttt{false}$ and $t' \mapsto \texttt{false}$
+  \end{enumerate}
+
+  \pause
+  \begin{itemize}
+  \item Originally called the ``gradual guarantee'' in SNAPL '15
+    (Siek-Vitousek-Cimini-Boyland)
+  \item ``Graduality from Embedding-projection Pairs'' ICFP '18 with
+    Amal Ahmed for more on the operational semantics POV
+  \end{itemize}
+\end{frame}
+
+%% \begin{frame}{Evaluation in Gradual Typing}
+%%   Say we have a function $\lambda x. t : \texttt{bool} \to \texttt{string}$ and
+%%   we apply it to a dynamically typed value $v : \dyn$.
+%%   \pause
+%%   If $v$ is a boolean, fine
+%%   \[ (\lambda x. t)(\texttt{true}) \mapsto t[\texttt{true}/x]
+%%   \]
+%%   \pause
+%%   Otherwise, we raise an error
+%%   \[ (\lambda x. t)(5) \mapsto \texttt{type error: expected ...}
+%%   \]
+%% \end{frame}
+
+%% \begin{frame}{Evaluation in Gradual Typing}
+%%   What if it's is a higher order function $\lambda x.t :
+%%   (\texttt{number} \to \texttt{string})\to \texttt{num}$?
+%%   \pause
+%%   If $v$ is not a function, we error
+%%   \[ (\lambda x.t)(\texttt{true}) \mapsto \texttt{type error:...}\]
+%%   But if $v$ is a function? How can we verify that it takes numbers
+%%   to strings?
+%%   \begin{enumerate}
+%%   \item\pause Run the type checker at runtime :(
+%%   \item\pause Findler-Felleisen wrapping implementation:
+%%     \[ (\lambda x.t)(\lambda y. u) \mapsto t[\lambda y. \texttt{string?}(u)/x]\]
+%%   \item \pause Some combination: Annotate every value with best-known type
+%%     information, use wrapping if the type information is not precise
+%%     enough to tell.
+%%   \end{enumerate}
+%% \end{frame}
+
+%% \begin{frame}{Type Soundness}
+%%   \begin{enumerate}
+%%   \item For simple types, usually defined as a safety property: if $t
+%%     : A$ then $t$ diverges or $t$ runs to one of the allowed errors or
+%%     $t$ runs to a value that has type $A$.
+%%   \item \pause But there are also stronger, \emph{relational} soundness
+%%     theorems
+%%     \begin{enumerate}
+%%     \item Function extensionality: $f_1 = f_2 : A \to B$ if for every
+%%       $x : A$,  $f_1(x)$ has the same behavior as $f_2(x)$.
+%%     \item Universal Polymorphism: $v : \forall \alpha. A$, then
+%%       $v[B_1]$ and $v[B_2]$ ``do the same thing''.
+%%     \item Local State/Existential Polymorphism: If I don't expose my
+%%       representation, I can change it without breaking the program.
+%%     \end{enumerate}
+%%   \end{enumerate}
+%% \end{frame}
+
+\section{CBN Gradual Type Theory}
+
+\begin{frame}{Call-by-name Gradual Type Theory}
+  Start simple: $\lambda$ calculus with only negative types $\to,
+  \times, 1$. Since effects are involved this is call-by-name
+  evaluation. \\
+
+  ``Call-by-name Gradual Type Theory'', FSCD 2018 with Dan Licata.
+\end{frame}
+
+\subsection{Syntax of GTT}
+
+\begin{frame}{Syntax of Gradual Type Theory}
+  4 sorts in Gradual Type Theory
+  \begin{enumerate}
+  \item Types $A,B,C$
+  \item Terms $x_1 : A_1,\ldots, x_n:A_n \vdash t : A$
+  \item \pause Type Dynamism: $A \ltdyn B$
+  \item Term Dynamism: $x_1 \ltdyn y_1 : A_1 \ltdyn B_1,\ldots \vdash t \ltdyn u : A \ltdyn B$
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Types and Terms}
+  Add dynamic type, casts, error to typed lambda calculus.
+  \begin{mathpar}
+    \begin{array}{rrl}
+      \text{types} & A,B,C \bnfdef & \colorbox{lightgray}{$\dyn$} \bnfalt A \to B \bnfalt A \times B \bnfalt 1\\
+      \text{terms} & s,t,u \bnfdef& x \bnfalt \lambda (x : A). t \bnfalt t u\\&&
+      \bnfalt (t, u) \bnfalt \pi_i t \bnfalt ()\\&&
+      \bnfalt \colorbox{lightgray}{$\err_{A}$} \bnfalt \colorbox{lightgray}{$\upcast A B t$} \bnfalt \colorbox{lightgray}{$\dncast A B t$}
+    \end{array}
+  \end{mathpar}
+\end{frame}
+%% \begin{frame}{Terms}
+%%   Terms are all intrinsically typed, we have variables and
+%%   substitution as usual.
+%%   %% and we can also add function symbols as
+%%   %% primitives.
+
+%%   \begin{mathpar}
+%%     \inferrule
+%%     {x:A \in \Gamma}
+%%     {\Gamma \vdash x : A}
+
+%%     \inferrule
+%%     {\Gamma \vdash u : B\and
+%%       \Gamma, x: B \vdash t : A}
+%%     {\Gamma \vdash t[u/x] : A}
+
+%%     \inferrule
+%%     {\Gamma \vdash t_1 : A_1,\ldots, \Gamma \vdash t_n : A_n \and f : (A_1,\ldots,A_n) \to B}
+%%     {\Gamma \vdash f(t_1,\ldots,t_n) : B}
+%%   \end{mathpar}
+%% \end{frame}
+
+\begin{frame}{Type Dynamism}
+  \[ A \ltdyn B \]
+  read: $A$ is ``less dynamic''/''more precise'' than $B$. \\
+  \begin{itemize}
+  \item Syntactic intuition: $B$ has more $\dyn$s in it.
+  \item Semantic intuition: $A$ supports fewer interactions than $B$.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Type Dynamism Axioms}
+  \begin{mathpar}
+  \inferrule{~}{A \dynr A}\and
+  \inferrule{A \dynr B \and B \dynr C}{A \dynr C}\and
+  \inferrule{~}{A \ltdyn \dyn}\\
+  \inferrule{A \ltdyn A' \and B \ltdyn B'}{A \to B \ltdyn A' \to B'}\and
+  \inferrule{A \ltdyn A' \and B \ltdyn B'}{A \times B \ltdyn A' \times B'}
+  \end{mathpar}
+  NB: Covariant function rule
+\end{frame}
+
+%% \begin{frame}{Term Dynamism INTUITION}
+%%   Models the refinement relation of gradual guarantee. Here's an example
+%%   \emph{model} for intuition.\\
+%%   \pause
+%%   For every $A \ltdyn A'$, if
+%%   \begin{mathpar}
+%%     \cdot  \vdash t \ltdyn t' : A \ltdyn A'
+%%   \end{mathpar}
+%%   Then either
+%%   \begin{enumerate}
+%%   \item $t$ runs to a type error.
+%%   \item $t,t'$ both diverge
+%%   \item $t \Downarrow v$ and $t' \Downarrow v'$ and $v \ltdyn v'$.
+%%   \end{enumerate}
+  
+%%   \notepr{
+%%   The $A \ltdyn A'$ restriction ensures we have a sensible
+%%   way to interpret that.
+%%   }
+%% \end{frame}
+
+\begin{frame}{Term Dynamism}
+  \begin{mathpar}
+    x_1 \ltdyn x_1' : A_1 \ltdyn A_1' ,\ldots \vdash t \ltdyn t' : B \ltdyn B' 
+  \end{mathpar}
+  read: ``if all the $x_i$s are less than $x_i'$s then $t$ is less than $t'$''.\\
+  \pause
+  Note that it is only well formed if all of the $A_i \ltdyn A_i'$ and
+  $B \ltdyn B'$. and
+  \begin{mathpar}
+    x_1:A_1,\ldots\vdash t : B\\
+    x_1':A_1',\ldots\vdash t' : B'\\
+  \end{mathpar}
+  %% We sometimes abbreviate the context ordering as $\Phi \vdash t
+  %% \ltdyn t' : B \ltdyn B'$.
+  \pause
+  This very explicit notation gets very big, so I'll simplify in the
+  talk. Abbreviate the above as:
+  \[
+  \Phi \vdash t \ltdyn t' : B \ltdyn B'
+  \]
+\end{frame}
+
+\begin{frame}{Type Error}
+  The type error is the \emph{least} dynamic term, modeling
+  graduality.
+
+  \[ \Phi \vdash \err_A \ltdyn t : A \ltdyn A' \]
+\end{frame}
+
+\begin{frame}{Term Dynamism Axioms}
+  Identity and substitution are monotone.
+  %% For open terms, identity and substitution properties that should
+  %% evoke a logical relation interpretation. Substitution says that
+  %% every term is monotone in its free variables.
+  \begin{mathpar}
+    \inferrule
+    {(x \ltdyn x' : A \ltdyn A') \in \Phi}
+    {\Phi \vdash x \ltdyn x' : A \ltdyn A'}
+
+    \inferrule
+        {x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' \and
+      u \ltdyn u' : A \ltdyn A'}
+    {t[u/x] \ltdyn t'[u'/x']}
+  \end{mathpar}
+  %% \pause Note that these are the same as ordinary identity and
+  %% substitution but ``monotonically''.
+\end{frame}
+
+\begin{frame}{Term Dynamism}
+  Like type dynamism, term dynamism has reflexivity and transitivity
+  rules:
+  %% note
+  %% how well-formedness relies on the reflexivity and transitivity of
+  %% type dynamism
+
+  \begin{mathpar}
+    \inferrule
+    {\vdash t : B}
+    {t \ltdyn t : B \ltdyn B}
+
+    \inferrule
+    {t \ltdyn t' : B \ltdyn B'\\
+     t' \ltdyn t'' : B' \ltdyn B''}
+    {t \ltdyn t'' : B \ltdyn B''}
+  \end{mathpar}
+\end{frame}
+
+%% \begin{frame}[fragile]{Term Dynamism Squares}
+%%   These can be nicely visualized as squares:
+%%   For $\Phi : \Gamma \ltdyn \Gamma'$, we can think of 
+%%   \[ \Phi \vdash t \ltdyn t' : A \ltdyn A' \]
+%%   as
+%%   \[
+%%   \begin{tikzcd}
+%%     \Gamma \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t" ]
+%%     &
+%%     \Gamma' \arrow[d , "t'"]
+%%     \\
+%%     A \arrow[ dashed , r, "\preciser" ]
+%%     &
+%%     A'
+%%   \end{tikzcd}
+%%   \]
+  
+%% \end{frame}
+
+\begin{frame}[fragile]{Graphical View of Dynamism}
+  Can help to visualize these as \emph{squares}.
+  \[
+  x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' : B \ltdyn B'
+  \]
+
+  becomes
+  \begin{tikzcd}
+    x:A \arrow[dd] \arrow[rr, "t"] &  & B \arrow[dd] \\
+    & \sqsubseteq &  \\
+    x':A' \arrow[rr, "t'"] &  & B'
+  \end{tikzcd}        
+\end{frame}
+
+\begin{frame}[fragile]{Graphical View of Dynamism}
+  Substitution principle/monotonicity corresponds to horizontal
+  composition of squares.
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+    A \arrow[r,"u"]\arrow[d] & B \arrow[r,"t"]\arrow[d] & C\arrow[d]\\
+    A' \arrow[r,"u'"] & B'\arrow[r,"t'"]& C
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+        A \arrow[d] \arrow[r, "id"] & A \arrow[d] \\
+        B \arrow[r, "id"] & B
+      \end{tikzcd}      
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{x \ltdyn x' \vdash t \ltdyn t'\\ u \ltdyn u'}
+                  {t[u/x] \ltdyn t'[u'/x]}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {}
+          {x\ltdyn x' : A \ltdyn A' \vdash x \ltdyn x' : A \ltdyn A'}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Graphical View of Dynamism}
+  Vertical composition of squares is \emph{transitivity and reflexivity}.
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+\Gamma \arrow[d] \arrow[r,"t"] & A \arrow[d] \\
+\Gamma' \arrow[d] \arrow[r,"t'"] & A' \arrow[d] \\
+\Gamma'' \arrow[r,"t''"] & A''
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+A \arrow[d, "id"] \arrow[r, "t"] & B \arrow[d, "id"] \\
+A \arrow[r, "t"] & B
+\end{tikzcd}
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{\Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A'\\
+          \Gamma' \ltdyn \Gamma'' \vdash t' \ltdyn t'' : A' \ltdyn A''}
+                  {\Gamma \ltdyn \Gamma'' \vdash t \ltdyn t'' : A \ltdyn A''}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {\Gamma \vdash t : A}
+          {\Gamma \ltdyn \Gamma \vdash t \ltdyn t : A \ltdyn A}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+    \pause Aside: Squares can be thought of as 2-cells in a double
+    category, which was the inspiration for the whole approach.
+\end{frame}
+
+\begin{frame}{Function Type}
+  First, we give have the usual term structure for the function type
+  \begin{mathpar}
+    \inferrule
+    {\Gamma, x : A \vdash t : B}
+    {\Gamma \vdash \lambda x:A. t : A \to B}
+
+    \inferrule
+    {\Gamma \vdash t : A \to B\and \Gamma \vdash u : A}
+    {\Gamma \vdash t u : B}
+  \end{mathpar}
+  \pause
+  And we add the $\beta, \eta$ laws as ``equi-dynamism'' axioms:
+
+  \begin{mathpar}
+    \inferrule
+    {}
+    {(\lambda x:A. t) u \equidyn t[u/x]}
+
+    \inferrule
+    {t : A \to B}
+    {t \equidyn (\lambda x:A. t x)}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Function Type}
+  Next to model graduality, we add \emph{congruence} rules.
+  \begin{mathpar}
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \to B \ltdyn A' \to B'}\\
+    \inferrule
+    {x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' : B \ltdyn B'}
+    {\lambda x:A. t \ltdyn \lambda x':A'.t' }
+
+    \inferrule
+    {t \ltdyn t' : A \to B \ltdyn A' \to B'\and
+      u \ltdyn u' : A \ltdyn A' %% \and B \ltdyn B'
+    }
+    {t\, u \ltdyn t'\, u' : B \ltdyn B'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Product Type}
+  Product and (unit types) get a similar treatment.
+  \begin{mathpar}
+    \inferrule
+    {\Gamma  \vdash t_1 : A_1\\ \Gamma \vdash t_2 : A_2}
+    {\Gamma \vdash (t_1,t_2) : A_1 \times A_2}
+
+    \inferrule
+    {\Gamma \vdash t : A_1 \times A_2}
+    {\Gamma \vdash \pi_i t : A_i}\\
+
+    \inferrule
+    {}
+    {\pi_i(t_1,t_2) \equidyn t_i}
+
+    \inferrule
+    {t : A_1 \times A_2}
+    {t \equidyn (\pi_1 t, \pi_2 t)}\\
+
+    \inferrule{t_1 \ltdyn t_1' : A_1 \ltdyn A_1' \and t_2 \ltdyn t_2' : A_2 \ltdyn A_2'}{(t_1,t_1') \ltdyn (t_2,t_2') : A_1 \times A_2 \ltdyn A_1' \times A_2'}
+    
+    \inferrule{t \ltdyn t' : A_1 \times A_2 \ltdyn A_1' \times A_2' \and A_i \ltdyn A_i'}{\pi_i t \ltdyn \pi_i t' : A_i \ltdyn A_i'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Casts}
+  How to define the casts axiomatically?
+  \begin{itemize}
+  \item We \emph{characterize} them by a property with respect to $\ltdyn$.
+  \item We can do this when our casts are \emph{upcasts} or
+    \emph{downcasts}.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Casts}
+  When $A \ltdyn B$, we get two functions: the \emph{upcast} $\upcast
+  A B$ and the \emph{downcast} $\dncast A B$.
+
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash t : A \and A \ltdyn B}
+    {\Gamma \vdash \upcast A B t : B}
+
+    \inferrule
+    {\Gamma \vdash u : B \and A \ltdyn B}
+    {\Gamma \vdash \dncast A B u : A}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Oblique Casts}
+  Not every cast in gradual typing is an upcast or a downcast, for
+  instance the cast
+
+  \[ \obcast {\texttt{Bool} \times \dyn} {\dyn \times \texttt{Bool}} \]
+
+  But in almost every gradual system, these are equivalent to casting
+  through the dynamic type:
+
+  \[ \obcast{A}{B}t = \dncast B \dyn \upcast A \dyn t\]
+\end{frame}
+
+\begin{frame}{Casts}
+  How should casts behave?
+  %% Intuition: the casted term should act as much like the original term
+  %% as possible while still satisfying the new type.
+  \pause
+
+  So for a downcast, the casted term should refine the original:
+  %% ``Elimination Rule''
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+  \end{mathpar}
+
+  \pause But this would be satisfied even by the type error $\err_A$.
+  \pause
+  We want to make sure that the casted term has \emph{as much}
+  behavior as possible: i.e., it's the \emph{biggest} term below
+  $u$.
+  \begin{mathpar}
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash t \ltdyn \dncast A B u : A}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\dncast A B u$ is the greatest
+  lower bound of $u$ in $A$.
+\end{frame}
+
+\begin{frame}{Casts}
+  Upcasts are the exact dual
+
+  For an upcast, the casted term should be \emph{more} dynamic than
+  the original:
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+  \end{mathpar}
+
+  But it has no more behaviors than dictated by the original term,
+  it's the \emph{least} term above $t$.
+
+  \begin{mathpar}
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash \upcast A B t \ltdyn u : B}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\upcast A B u$ is the least upper
+  bound of $t$ in $B$.
+\end{frame}
+
+\begin{frame}{Casts}
+  \begin{enumerate}
+  \item Note that this is a \emph{precise} specification because lubs
+    and glbs are unique (up to $\equidyn$).
+  \item Category theoretic terminology: this is a \emph{universal
+    property} for upcasts and downcasts.
+  \item Since they are exact \emph{duals} most theorems about one
+    dualize for the other.
+  \end{enumerate}
+\end{frame}
+
+\subsection{Theorems in GTT}
+
+\begin{frame}{Consequences of GTT}
+  \begin{itemize}
+  \item Now we have a simple ``logic'' of term dynamism, what can we
+    \emph{prove} in the theory?
+  \item Theorems in the theory apply to any model, i.e. a simple lazy
+    gradually typed language.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Derivation of Function Casts}
+  We can prove that the ``function contract'' is the unique
+  implementation of casts between function types. I.e. if $A \ltdyn
+  A'$ and $B \ltdyn B'$ and $t : A' \to B'$ then
+
+  \[
+  \dncast {A \to B} {A' \to B'} t \equidyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  and
+  \[
+  \upcast {A \to B} {A' \to B'} t \equidyn \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+  \]
+\end{frame}
+
+\begin{frame}{Derivation of Function Casts}
+  One direction:
+  To prove
+  \[ \dncast {A \to B} {A' \to B'} t \ltdyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  By $\eta$, it is sufficient to show
+  \[
+  x : A \vdash (\dncast {A \to B} {A' \to B'} t) x \ltdyn \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  Then it follows by congruence because:
+  \begin{enumerate}
+  \item $(\dncast {A \to B} {A' \to B'} t)\ltdyn t$
+  \item $x \ltdyn \upcast A {A'} x$
+  \item if $u \ltdyn u'$ then $u \ltdyn \dncast {B} {B'} u'$
+  \end{enumerate}
+  \pause
+  Other 3 cases are similar/dual.
+  \pause
+  All just $\beta,\eta$ and congruence. Applies to any type defined
+  this way: $\times$, $1$.
+\end{frame}
+
+\begin{frame}{Casts are Galois Connections}
+  \notepr{  First, we can show that every pair of upcast and downcast forms a
+  Galois connection, which}
+
+  Two ``round-trip'' properties:
+  \pause
+  Casting down and back up has ``more errors''
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\upcast A B \dncast A B u \ltdyn u : B}
+  \end{mathpar}
+  \pause
+  And casting up and back down has ``fewer errors''
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \dncast A B \upcast A B t : A}
+  \end{mathpar}
+  \pause In practice, we strengthen this to an
+  equality, so we can add the ``retract axiom'' as well to the system
+  (though none of the theorems I'll show depend on it):
+  \begin{mathpar}
+    \inferrule*[right=RETRACT-AXIOM]
+    {t : A \and A \ltdyn B}
+    {\dncast A B \upcast A B t \ltdyn t : A}
+  \end{mathpar}
+  but only need to add it as an axiom for ``base casts''
+\end{frame}
+
+\begin{frame}{Identity and Composition}
+  Reflexivity gives the identity cast and transitivity gives the
+  composition of casts
+  \begin{enumerate}
+  \item $\dncast A A t \equidyn t$
+  \item If $A \ltdyn B \ltdyn C$, then $\dncast A C t \equidyn \dncast A B \dncast B C t$
+  \end{enumerate}
+  \pause
+  These are part of the operational semantics for instance:
+  \begin{align*}
+    \dncast{\dyn}{\dyn} v &\mapsto v\\
+    \dncast{A \to B}{\dyn} v &\mapsto \dncast {A \to B}{\dyn \to \dyn}\dncast {\dyn \to \dyn} \dyn v
+  \end{align*}
+\end{frame}
+
+%% \begin{frame}{Identity Cast}
+%%   The upcast/downcast from $A$ to $A$ is the identity function,
+%%   i.e. $\upcast A A t \equidyn t$.  Each direction is an instance of
+%%   the specification of upcasts:
+
+%%   \begin{mathpar}
+%%   \inferrule*[right=UR]
+%%       {~}
+%%       {t \ltdyn \upcast{A}{A}{t} : A}\and
+
+%%   \inferrule*[right=UL]
+%%       {t \ltdyn t : A \ltdyn A}
+%%       {\upcast{A}{A}{t} \ltdyn t : A}
+%%   \end{mathpar}
+
+%%   Downcast proof is precise dual.
+%% \end{frame}
+
+%% \begin{frame}{Casts (de)compose}
+%%   If $A \ltdyn B \ltdyn C$ then a cast between $A$ and $B$ factors
+%%   through $C$:
+
+%%   \begin{mathpar}
+%%     \inferrule
+%%     {A \ltdyn B \ltdyn C}
+%%     {\dncast A B \dncast B C t \equidyn \dncast A C t}
+%%   \end{mathpar}
+
+%%   Example: casts between dynamic type and function types factor
+%%   through ``most dynamic'' function type:
+%%   \begin{mathpar}
+%%     \dncast {A \to B}{\dyn \to \dyn}\dncast \dyn {\dyn \to \dyn} t \equidyn \dncast{A \to B}{\dyn} t
+%%   \end{mathpar}
+%%   This is often a rule of the operational semantics.
+%% \end{frame}
+
+\begin{frame}{Non-theorem: Disjointness of Constructors}
+  The only standard operational rules we cannot prove are the ones like
+  \[
+  \obcast {A \times B} {A' \to B'} t \mapsto \err
+  \]
+  that say that the type constructors $\times, \to, 1$ are all
+  disjoint.
+
+  Independent of the axioms: have models where it's true and
+  false. This is the actual ``design space''.
+
+  %% In fact, written as equidynamism instead of stepping, these are
+  %% \emph{independent} of the other axioms: we can construct adequate
+  %% models where it's true and ones where it's false(!)
+
+  %% The reason is that our definition of function types, product types
+  %% et cetera only defines them \emph{up to isomorphism}, not up to cast
+  %% equivalence. And we know very little about the dynamic type, just
+  %% that everything embeds in it.
+\end{frame}
+
+%% \begin{frame}{Term Dynamism Reduces to Globular Dynamism}
+%%   The presence of the casts means that we can reduce all type dynamism
+%%   to ordering \emph{at the same type}, which is more typical for
+%%   logical relations.
+%%   \begin{mathpar}
+%%     {x \ltdyn y : A \ltdyn B \vdash t \ltdyn u : C \ltdyn D}\\
+%%     \iff\\
+%%     {x \ltdyn x : A \ltdyn A \vdash t \ltdyn \dncast C D u[\upcast A B x/y] : C \ltdyn C}\\
+%%     \iff\\
+%%     {x \ltdyn x : A \ltdyn A \vdash \upcast C D t \ltdyn u[\upcast A B x/y] : D \ltdyn D}\\
+%%     \iff\\
+%%     {y \ltdyn y : B \ltdyn B \vdash \upcast C D t[\dncast A B y/x] \ltdyn u : D \ltdyn D}\\
+%%     \iff\\
+%%     {y \ltdyn y : B \ltdyn B \vdash t[\dncast A B y/x] \ltdyn \dncast C D u : C \ltdyn C}\\
+%%   \end{mathpar}
+%% \end{frame}
+
+\subsection{Models of GTT}
+
+\begin{frame}{Models of GTT}
+  \begin{enumerate}
+  \item In FSCD Paper,
+    \begin{enumerate}
+    \item Sound and complete category theoretic semantics using double
+      categories.
+    \item Adequate domain theoretic model where domains have domain
+      structure and error ordering modeling $\ltdyn$.
+    \end{enumerate}
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Summary}
+  \begin{enumerate}
+  \item Can derive that the functorial interpretation of casts is the
+    only one that satisfies graduality and $\beta, \eta$.
+  \item In practice, other cast semantics break $\eta$, breaking
+    type-based reasoning!
+  \end{enumerate}
+\end{frame}
+
+\section{CBPV Gradual Type Theory}
+
+\subsection{Overview of Call-by-push-value}
+
+\begin{frame}{Call-by-push-value GTT}
+  Want to extend theorems to CBV, but still want a simple axiomatic
+  presentation of the type theory that works with positives, negatives
+  and effects.
+
+  Answer is a polarized type theory: Call-by-push-value.\\
+  
+  Conditionally accepted to POPL 2018: ``Gradual Type Theory'' with
+  Dan Licata and Amal Ahmed.
+\end{frame}
+
+\begin{frame}{Types, Judgments of CBPV}
+  \begin{mathpar}
+    \begin{array}{rl}
+      \text{value types}\,\,A ::= & \colorbox{lightgray}{$\dynv$} \mid U \u B \mid 0 \mid A_1 + A_2 \mid 1 \mid A_1 \times A_2\\
+      \text{computation types}\,\,\u B ::= & \colorbox{lightgray}{$\dync$} \mid \u F A \mid \top \mid \u B_1 \with \u B_2 \mid A \to \u B\\
+      \text{types}\,\, T ::= & A \mid \u B\\
+      \Gamma ::= & x:A,\ldots\\
+      \Delta ::= & \bullet : \u B\\
+    \end{array}
+  \end{mathpar}
+  Three judgments
+  \begin{enumerate}
+  \item Computations $\Gamma \vdash M : \u B$: effectful terms
+  \item Values $\Gamma \vdash V : A$: pure function that returns an $A$
+  \item Stacks $\Gamma \pipe \bullet:B \vdash S : B'$, strict function
+    that forces the hole $\bullet : B$ whenever it is forced.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Functions}
+  \begin{mathpar}
+    \inferrule
+    {\Gamma, x : A \vdash M : B}
+    {\Gamma \vdash \lambda x. M : A \to B}
+
+    \inferrule
+    {\Gamma \vdash M : A \to B \and
+    \Gamma \vdash V : A}
+    {\Gamma \vdash M V : B}
+
+    (\lambda x. M) V \equiv M[V/x]\\
+    (M : A \to B) \equiv \lambda x. M x\\
+
+    \pause
+    \inferrule
+    {\Gamma, x: A \pipe \Delta \vdash S : B }
+    {\Gamma \pipe \Delta \vdash \lambda x. S : A \to B}
+
+    \inferrule
+    {\Gamma \pipe \Delta \vdash S : A \to B\and
+      \Gamma \vdash V : A}
+    {\Gamma \pipe \Delta \vdash S V : B}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Strict Pairs}
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V_i : A_i}
+    {\Gamma \vdash (V_1,V_2) : A_1\times A_2}
+
+    \inferrule
+    {\Gamma \vdash V : A_1\times A_2\and
+      \Gamma, x_1:A_1,x_2:A_2 \vdash E : T}
+    {\Gamma \vdash \pmpairWtoXYinZ V {x_1} {x_2} E : T}
+
+    \pmpairWtoXYinZ {(V_1,V_2)} {x_1} {x_2} E \equiv E[V_1/x_1][V_2/x_2]\\
+    x : A_1\times A_2 \vdash E \equiv \pmpairWtoXYinZ x {x_1}{x_2} E[(x_1,x_2)/x]
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Shifts}
+  A computation of type $\u F A$ is something that performs effects
+  and returns $A$s.
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V : A}
+    {\Gamma \vdash \ret V : \u F A}
+
+    \inferrule
+    {\Gamma \vdash M : \u F A\and
+      \Gamma, x : A \vdash N : \u B}
+    {\Gamma \vdash \bindXtoYinZ M x N : \u B}
+
+    \inferrule
+    {\Gamma\pipe\Delta \vdash S : \u F A\and
+      \Gamma, x : A \vdash N : \u B}
+    {\Gamma \vdash \bindXtoYinZ S x N : \u B}
+
+    \bindXtoYinZ {\ret V} x N \equiv N[V/x]\\
+    \bullet : \u F A \vdash S \equiv \bindXtoYinZ \bullet x S[\ret x/\bullet]
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Shifts}
+  A value of type $U \u B$ is a thunk that when forced behaves as $\u B$.
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash M : \u B}
+    {\Gamma \vdash \thunk M : U \u B}
+
+    \inferrule
+    {\Gamma \vdash V : U \u B}
+    {\Gamma \vdash \force V : \u B}\\
+
+    \force \thunk M \equiv M\\
+    V : U \u B \equiv \thunk \force V
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Embedding CBV, CBN}
+  CBV types become value types:
+  \begin{align*}
+    (A \mathop{+_{cbv}} A') &= A + A'\\
+    (A \mathop{\to_{cbv}} A') &= U(A \to \u F A')
+  \end{align*}
+  And CBV computations:
+  \[ \Gamma \mathrel{\vdash^{cbv}} M : A \Rightarrow \Gamma \vdash M : \u F A \]
+  CBN types become computation types:
+  \begin{align*}
+    (\u B \mathop{+_{cbn}} \u B') &= \u F (U\u B + U \u B')\\
+    (\u B \mathop{\to_{cbn}} \u B') &= U\u B \to \u B'
+  \end{align*}
+  And CBN computations:
+  \[
+  x_1:\u B_1,\ldots \vdash M : \u B \Rightarrow x_1:U\u B_1,\ldots \vdash M : \u B
+  \]
+\end{frame}
+
+\subsection{Casts and Dynamic Types}
+
+\begin{frame}{CBPV Gradual Type Theory}
+  Goal: gradualize CBPV. Should reproduce our CBN work and the
+  existing CBV semantics.
+
+  \begin{enumerate}
+  \item Add dynamism ordering $\ltdyn$.
+  \item Define casts by universal property.
+  \item Add dynamic type(s?)
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Dynamism Ordering}
+  Easy: new ordering judgment for types and values/stacks/computations.
+  Everything is monotone.
+
+  \begin{mathpar}
+    \inferrule
+    {A \ltdyn A'}
+    {\u F A \ltdyn \u F A'}
+
+    \inferrule
+    {A_1 \ltdyn A_1' \and A_2 \ltdyn A_2'}
+    {A_1 \times A_2 \ltdyn A_1' \times A_2'}\\
+
+    \inferrule
+    {\u B \ltdyn \u B'}
+    {U \u B \ltdyn U \u B'}
+
+    \inferrule
+    {A \ltdyn A' \and \u B \ltdyn \u B'}
+    {A \to \u B \ltdyn A' \to \u B'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Errors}
+  \begin{mathpar}
+    \Gamma \vdash \err : \u B\\
+
+    \inferrule
+    {}
+    {\Gamma \vdash \err \ltdyn M : \u B}
+
+    \inferrule
+    {}
+    {\Gamma \vdash S[\err/\bullet] \equidyn \err : \u B}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Which casts?}
+  If we embed from cbv, cbn. we get ``Kleisli'' and ``CoKleisli''
+  casts.
+
+  From CBV:
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V : A \and A \ltdyn A'}
+    {\Gamma \vdash \upcast{A}{A'}V : \u F A'}
+
+    \inferrule
+    {\Gamma \vdash V : A' \and A \ltdyn A'}
+    {\Gamma \vdash \dncast{A}{A'}V : \u F A}
+  \end{mathpar}
+
+  From CBN:
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V : U \u B \and \u B \ltdyn \u B'}
+    {\Gamma \vdash \upcast{\u B}{\u B'}V : \u B'}
+
+    \inferrule
+    {\Gamma \vdash V : U \u B' \and \u B \ltdyn \u B'}
+    {\Gamma \vdash \dncast{\u B}{\u B'}V : \u B}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Embeddings are Values, Projections are Stacks}
+  Observation from CBV: in practice the upcasts are ``pure'': the
+  basic ones are sum type injections, others are functorial liftings
+  of pure functions.
+
+  CBPV lets us axiomatize this directly, along with the \emph{dual}
+  that downcasts \emph{stacks} between computation types:
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V : A}
+    {\Gamma \vdash \upcast{A}{A'}V : A'}
+
+    %% \inferrule
+    %% {\Gamma \vdash M : \u B}
+    %% {\Gamma \vdash \dncast{\u B}{\u B'}M : \u B'}
+
+    \inferrule
+    {\Gamma\pipe \Delta \vdash S : \u B}
+    {\Gamma\pipe \Delta \vdash \dncast{\u B}{\u B'}S : \u B'}
+  \end{mathpar}
+  Should be useful for compilation/optimization.
+\end{frame}
+
+\begin{frame}{Embeddings are Values, Projections are Stacks}
+  ``Missing'' casts come from the shifts:
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash V : A \and A \ltdyn A'}
+    {\Gamma \vdash \upcast{A}{A'}V : A'}
+
+    %% \inferrule
+    %% {\Gamma \vdash M : \u B}
+    %% {\Gamma \vdash \dncast{\u B}{\u B'}M : \u B'}
+
+    \inferrule
+    {\Gamma\pipe \Delta \vdash S : \u B \and \u B \ltdyn \u B'}
+    {\Gamma\pipe \Delta \vdash \dncast{\u B}{\u B'}S : \u B'}
+
+    \inferrule
+    {\Gamma \vdash M : \u F A \and \inferrule{A \ltdyn A'}{\u F A \ltdyn \u F A'}}
+    {\Gamma \vdash \dncast{\u F A}{\u F A'}M : \u F A}
+
+    \inferrule
+    {\Gamma \vdash V : U \u B \and
+      \inferrule{\u B \ltdyn \u B'}{U \u B \ltdyn U \u B'}}
+    {\Gamma \vdash \upcast{U \u B}{U \u B'}V : U\u B'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Cast Universal Properties}
+  Just like for CBN:
+  \begin{mathpar}
+    \inferrule
+    {V : A \and A \ltdyn A'}
+    {V \ltdyn \upcast A {A'} V : A \ltdyn A'}
+
+    \inferrule
+    {V \ltdyn V' : A \ltdyn A'}
+    {\upcast A {A'} V \ltdyn V' : A'}
+
+    \inferrule
+    {M : \u B \and \u B \ltdyn \u B'}
+    {\dncast{\u B}{\u B'}M \ltdyn M}
+
+    \inferrule
+    {M \ltdyn M' : \u B \ltdyn \u B'}
+    {M \ltdyn \dncast {\u B}{\u B'} M' : \u B}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Strict Product Cast}
+  Derivable:
+  \begin{mathpar}
+    \upcast{A_1 \times A_2}{A_1'\times A_2'}V \equidyn\\
+    \pmpairWtoXYinZ V {x_1}{x_2} (\upcast{A_1}{A_1'}x_1, \upcast{A_2}{A_2'}x_2)\\
+    \end{mathpar}
+
+    \begin{mathpar}
+      
+    \dncast{\u F (A_1' \times A_2')}{\u F (A_1 \times A_2)}{M}\\
+    \begin{array}{rcl}
+&
+      \equidyn & 
+      \bindXtoYinZ{M}{p'} {\pmpairWtoXYinZ{p'}{x_1'}{x_2'}{}}\\
+      & & \bindXtoYinZ{\dncast{\u F A_1}{\u F A_1'}{\ret x_1'}}{x_1}\\
+      & & \bindXtoYinZ{\dncast{\u F A_2}{\u F A_2'}{\ret x_2'}}{x_2} {\ret (x_1,x_2) }\\
+    \end{array}
+    \end{mathpar}
+    Note: evaluation order of the two casts doesn't matter.
+\end{frame}
+
+\begin{frame}{CBV and CBN Uniqueness Principles}
+  \begin{mathpar}
+    \dncast{A \to \u B}{A' \to \u B'}M \equidyn
+    \lambda x. \dncast{\u B}{\u B'}(M (\upcast{A}{A'}x))
+  \end{mathpar}
+  \begin{mathpar}
+      \upcast{U (A \to \u B)}{U (A' \to \u B')}{f} \equidyn
+    \begin{array}{ll}
+      \thunk (\lambda x'. & \bindXtoYinZ{\dncast{\u F A}{\u F A'}{(\ret x')}}{x}{} \\
+      & { \force{(\upcast{U \u B}{U \u B'}{(\thunk{(\force{f}\,x)})})}})
+    \end{array}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Two Dynamic Types}
+  Add a dynamic value type $\dynv$ and a dynamic computation type
+  $\dync$.
+
+  \begin{mathpar}
+    \inferrule
+    {}
+    {A \ltdyn \dynv}
+
+    \inferrule
+    {}
+    {\u B \ltdyn \dync}
+  \end{mathpar}
+
+  So we always have casts:
+  \begin{mathpar}
+    \inferrule{V : A}{\upcast{A}{\dynv}V : \dynv}\and
+    \inferrule{M : \dync}{\dncast{\u B}{\dync}M : \u B}
+  \end{mathpar}
+\end{frame}
+
+%% \begin{frame}{Translation to CBPV}
+%%   A model of GTT in CBPV
+%%   \begin{enumerate}
+%%   \item A gradual value type $A$ denotes a pair of a type $[A]$ and an
+%%     interpretation of the upcast $\upcast{[A]}{[\dynv]}$ and the
+%%     downcast $\dncast{\u F [A]}\u F{\dynv}$ that form an ep pair.
+%%   \item A gradual computation type $\u B$ is a pair of type $[\u B]$
+%%     and interpretations of $\dncast{\u B}{\dync}$ and $\upcast{U \u
+%%       B}{U\u \dync}$ that form an ep pair.
+%%   \item $A \ltdyn A'$ is modeled by an ep pair $\upcast{[A]}{[A']}$
+%%     and $\dncast{\u F [A]}{\u F [A']}$ that factorizes the casts for
+%%     $A, A'$.
+%%   \end{enumerate}
+%% \end{frame}
+
+\begin{frame}{Implementing Two Dynamic Types}
+  Can use CBPV as a metalanguage to define models of CBPV GTT.
+  \pause
+  \begin{mathpar}
+    \dynv \cong (1+1) + U\dync + (\dynv \times \dynv)\\
+    \dync \cong (\dynv \to \u F \dync)
+  \end{mathpar}
+
+  $\dynv$: S-expressions.\\
+
+  $\dync$: supports call-by-value single argument functions as the
+  only computation type, similar to ML.
+\end{frame}
+
+\begin{frame}{Implementing Two Dynamic Types}
+  Can use CBPV as a metalanguage to define models of CBPV GTT.
+  \begin{mathpar}
+    \dynv \cong (1+1) + U\dync + (\dynv \times \dynv)\\
+    \dync \cong (\dynv \to \dync) \with \u F \dynv
+  \end{mathpar}
+
+  $\dynv$: S-expressions.\\
+
+  $\dync$: Supports Scheme's multi-argument and variable-arity
+  functions.
+\end{frame}
+
+\section{Conclusion}
+
+\begin{frame}{Summary}
+  \begin{enumerate}
+  \item Deconstructed gradual typing to basic axioms
+  \item By axiomatizing term precision/dynamism, we can give casts a
+    specification.
+  \item Graduality + $\beta\eta$ = functor interpretation of casts, so
+    all other cast semantics sacrifice one or the other.
+  \item Polarized Languages make effectful equations as easy as
+    non-effectful.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Conclusion}
+  \begin{itemize}
+  \item Type theoretic approach justifies the ``most canonical''
+    version of gradual typing. Can other semantics be similarly
+    justified?
+  \item Plays nicely with good type theories: what to gradualize next?
+    Polarized session types?
+  \item Papers: FSCD '18, ICFP'18, POPL'19
+  \end{itemize}
+  
+  \pause
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash t \ltdyn \dncast A B u : A}\\
+
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash \upcast A B t \ltdyn u : B}
+  \end{mathpar}
+
+\end{frame}
+\end{document}
+
+%% Local Variables:
+%% compile-command: "pdflatex talk.tex"
+%% End:
diff --git a/talk/cmu-fall-2018/fscd-talk.tex b/talk/cmu-fall-2018/fscd-talk.tex
new file mode 100644
index 0000000000000000000000000000000000000000..224583f53348431ac18e58c3e4fc720e63b6598e
--- /dev/null
+++ b/talk/cmu-fall-2018/fscd-talk.tex
@@ -0,0 +1,797 @@
+\documentclass{beamer}
+\usetheme{boxes}
+
+\newcommand{\notepr}[1]{}
+
+\usepackage{rotating}
+\usepackage{listings}
+\usepackage{xcolor}
+%% \lstset{
+%%   basicstyle=\ttfamily,
+%%   escapeinside=~~
+%% }
+
+\usepackage{microtype}
+\usepackage{latexsym,amsmath,amssymb,amsthm}
+\usepackage{mathpartir}
+\usepackage{tikz}
+\usepackage{tikz-cd}
+\input{talk-defs}
+\newcommand{\ltdyn}{\dynr}
+\newcommand{\uarrow}{\mathrel{\rotatebox[origin=c]{-30}{$\leftarrowtail$}}}
+\newcommand{\darrow}{\mathrel{\rotatebox[origin=c]{30}{$\twoheadleftarrow$}}}
+\renewcommand{\upcast}[2]{\langle{#2}\uarrow{#1}\rangle}
+\renewcommand{\dncast}[2]{\langle{#1}\darrow{#2}\rangle}
+\newcommand{\gtdyn}{\sqsupseteq}
+
+\newcommand{\converge}{\Downarrow}
+\newcommand{\diverge}{\Uparrow}
+
+\title{Call-by-Name Gradual Type Theory}
+\author{Max S. New\inst{1} \and Daniel R. Licata\inst{2}}
+\institute{\inst{1}Northeastern University \and \inst{2}{Wesleyan University}}
+\date[]{FSCD 2018}
+
+\begin{document}
+\begin{frame}
+  \titlepage
+\end{frame}
+
+\section{What is Gradual Typing?}
+
+\begin{frame}{Gradual Typing}
+  Gradually Typed languages support a \emph{mix} of static and dynamic
+  typing, while maintaining benefits of each.
+  \begin{enumerate}
+  \item Dynamically typed parts of the language have minimal syntactic
+    discipline.
+  \item Statically typed parts of the language have sound type-based
+    reasoning.
+  \item Allow for safe/sound interaction by inserting dynamic type
+    checks at the boundary.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Gradual Typing}
+  Applications
+  \begin{enumerate}
+  \item Allow static code to interact with legacy/untrusted code.
+  \item Gradually strengthen the types in an existing codebase
+    (Scripts to Programs)
+    \begin{enumerate}
+    \item \pause Dynamic $\to$ Partially Static $\to$ Static
+    \item \pause ADT $\to$ GADT $\to$ Dependent Types
+    \end{enumerate}
+  \end{enumerate}
+  We already have to do write this kind of glue code, gradual typing
+  is about doing it compositionally and automatically.
+\end{frame}
+
+\begin{frame}{The Problem with Gradual Typing}
+  \begin{itemize}
+  \item Many implementations: Typed Racket, Thorn, Safe Typescript,
+    Reticulated Python
+  \item Useful type systems have strong type-based reasoning
+    principles, and categorical semantics!
+    \begin{enumerate}
+    \item Extensionality principles ($\eta$ laws), Representation
+      independence, Parametricity.
+    \item Need to respect these if we want to add gradual typing to
+      existing \emph{typed} languages.
+    \end{enumerate}
+  \item \pause But the state of the art$\ldots$
+    \begin{enumerate}
+    \item Many incompatible semantics of casts have been proposed:
+      \begin{itemize}
+      \item For simple types: lazy, eager, up-down, partially eager,
+        transient
+      \item for state: monotonic state, proxied state
+      \item Are these really ``sound''? What does that mean exactly?
+      \end{itemize}
+    \item Blame Safety Theorem: 
+      \begin{enumerate}
+      \item Gradually typed programs \emph{do} go wrong, but
+      \item \pause If a type error occurs, the well-typed
+      part of the program is never \emph{blamed}.
+      \end{enumerate}
+    \end{enumerate}
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Semantic Gradual Typing}
+  Wanted: semantic approach to gradual typing.
+  \begin{enumerate}
+  \item Which semantics enable which type-based reasoning principles?
+  \item Help guide design of gradual typing with different programming
+    features.
+  \end{enumerate}
+\end{frame}
+
+\section{Gradual CCCs and GTT}
+\begin{frame}{First Step: Gradual CCCs and Gradual Type Theory}
+  \begin{footnotesize}
+  \begin{center}
+    \begin{tabular}{|c|c|}
+      \hline
+      \textbf{Type Theory} & \textbf{Category} \\
+      \hline
+      \hline
+      Negative Typed Lambda Calculus & Cartesian Closed Categories\\
+      \hline
+      Negative \textbf{Gradual} Lambda Calculus? & Gradual CCC?\\
+      \hline
+    \end{tabular}
+  \end{center}
+  \end{footnotesize}
+  \begin{itemize}
+  \item Model the runtime semantics of gradual typing, not the surface
+    syntax.
+  \item Type Theory: syntax for proving results 
+  \item Category Theory: Develop concrete models based on domains and
+    logical relations.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{A Guide: Domain Theory}
+  \begin{itemize}
+  \item Gradual Typing is very similar to domain theory with universal
+    types
+    \begin{enumerate}
+    \item Dynamic type $\sim$ Universal object
+    \item Type casts $\sim$ Embedding-projection pairs
+    \end{enumerate}
+  \item But the usual notion of ep pair is not an \emph{adequate}
+    model of gradual typing:
+    \begin{enumerate}
+    \item Models type errors as $\bot$, same as diverging computation.
+    \item Come back to this later
+    \end{enumerate}
+  \item Analogy exploited in work on semantics of contracts in mid
+    2000s by Findler, Blume and Felleisen (some unpublished), but
+    abandoned.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Desiderata}
+  What should GCCC/GTT be?
+  \begin{itemize}
+  \item Have a fully dynamically typed sub-language that is
+    computationally complete for the language.
+    \begin{itemize}
+    \item \pause There should be a \emph{universal type} $\dyn$ such that
+      every type is a \emph{retract} of it.
+    \end{itemize}
+  \item \pause Have static types, with type-based reasoning principles.
+    \begin{itemize}
+    \item \pause The category should be a CCC (satsifies $\beta, \eta$)
+    \end{itemize}
+  \item \pause Have a smooth transition from dynamic to static typing.
+    \begin{itemize}
+    \item \pause ???
+    \end{itemize}
+  \end{itemize}  
+\end{frame}
+
+\begin{frame}{Graduality}
+  How do we formalize the idea of a ``smooth transition''?
+  \begin{itemize}
+  \item Siek-Vitousek-Cimini-Boyland, SNAPL 2015, propose the
+    \emph{gradual guarantee}.
+  \item \pause Intuition: changing the precision of the types of the
+    program should only add or remove checking and not otherwise
+    change behavior.
+  \item \pause More Formally: refining the \emph{types} of a gradual
+    program should result in a refinement of \emph{behavior}.
+  \item \pause Analogous to parametricity: changing the type can only result
+    in certain changes to behavior.
+  \end{itemize}
+\end{frame}
+
+% Refining a type means...
+\begin{frame}{Type Dynamism Ordering}
+  $A \ltdyn B$ means $A$ is ``less dynamic''/ ``more precise'' than $B$.
+  \pause
+  \begin{mathpar}
+    \begin{array}{rrl}
+      \text{types} & A,B,C \bnfdef & \dyn \bnfalt A \to B \bnfalt A \times B \bnfalt 1
+    \end{array}\\
+    \pause
+    \inferrule
+    {}
+    {A \ltdyn \dyn}
+
+    \inferrule
+    {}
+    {A \ltdyn A}
+
+    \inferrule
+    {A \ltdyn B \ltdyn C}
+    {A \ltdyn C}\\
+
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \to B \ltdyn A' \to B'}
+
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \times B \ltdyn A' \times B'}
+  \end{mathpar}
+  \pause
+  Why covariant function space? Consider
+  \[ N \to N \sqsubseteq \dyn \to \dyn \]
+\end{frame}
+
+% Now what does it mean to refine the behavior of a program... 
+\begin{frame}{Operational Definition of Graduality}
+  Extend type ordering to terms by congruence:
+  \begin{itemize}
+  \item Define $t \ltdyn u$ to hold if $t$ is formed by replacing all
+    type annotatons in $u$ with a less dynamic type.
+  \end{itemize}
+  \begin{theorem}[Gradual Guarantee]
+    If $t \ltdyn u$ and $\cdot \vdash t : A$ and $\cdot \vdash u : B$,
+    then either
+    \begin{enumerate}
+    \item $t \converge \err$
+    \item $t \diverge, u \diverge$
+    \item $t \converge v, u \converge v'$ and $v \ltdyn v'$
+    \end{enumerate}
+  \end{theorem}
+  \pause Defines an ordering on effects, but where error is the least
+  element, not divergence.
+\end{frame}
+
+\begin{frame}{Graduality Violations}
+  Rules out:
+  \begin{itemize}
+  \item Making program more static turning a type error into a
+    success(!)
+  \item Or changing the successful result
+  \item Or causing a terminating program to diverge
+  \end{itemize}
+
+  Nota Bene:
+  \begin{itemize}
+  \item Rules out unrestricted \emph{catching} of type errors
+    \emph{within} the gradual language.
+  \item So if the language is sequential, type errors must crash.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Extensional Term Dynamism}
+  To model this theorem, we want an \emph{axiomatic ,extensional}
+  version of the term ordering. I.e., we include the rules:
+  
+  \begin{mathpar}
+    \inferrule*
+    {~}
+    {\err \ltdyn t}
+
+    \inferrule*[Right=$\beta\to$]
+    {~}
+    {(\lambda x.t) u \equidyn t[u/x]}
+
+    \inferrule*[Right=$\eta\to$]
+    {t : A \to B}
+    {t \equidyn \lambda x. t x}
+
+    \inferrule*[Right=$\beta\times$]
+    {~}
+    {\pi_i (t_1, t_2) \equidyn t_i}
+
+    \inferrule*[Right=$\eta\times$]
+    {t : A \times B}
+    {t \equidyn (\pi_1 t, \pi_2 t)}
+
+    \inferrule*[Right=$\eta 1$]
+    {t : 1}
+    {t \equidyn ()}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Term Dynamism Ordering}
+  \alt<2>{\emph{Heterogeneously typed congruence rules}}{Also add \emph{congruence rules}}
+  \begin{mathpar}
+    \alt<2>{\inferrule {t \ltdyn t' : A \to B \ltdyn A' \to B' \and u
+        \ltdyn u' : A \ltdyn A'} {t\, u \ltdyn t' \, u' : B \ltdyn
+        B'}} {\inferrule {t \ltdyn t' \and u \ltdyn u'} {t\, u \ltdyn
+        u \ltdyn u'}}
+    
+    \alt<2>{\inferrule
+    {\Gamma,x:A\ltdyn \Gamma',x':B \vdash t \ltdyn u : B \to B'}
+    {\Gamma \ltdyn \Gamma' \vdash \lambda x : A. t \ltdyn \lambda y : B . u : A \to B \ltdyn B \to B'}}
+    {\inferrule
+    {x:A\ltdyn y:B \vdash t \ltdyn u \and A \ltdyn B}
+    {\lambda x : A. t \ltdyn \lambda y : B . u}}
+
+    \alt<2>{
+    {\inferrule{t_1 \ltdyn u_1 : A_1 \ltdyn B_1 \and t_2\ltdyn u_2 : A_2 \ltdyn B_2}{(t_1,t_2)\ltdyn (u_1,u_2) : A_1 \times A_2 \ltdyn B_1 \times B_2}}
+    }
+    {\inferrule{t_1 \ltdyn u_1 \and t_2\ltdyn u_2}{(t_1,t_2)\ltdyn (u_1,u_2)}}
+
+    \alt<2>
+    {\inferrule{t \ltdyn u : A_1 \times A_2 \ltdyn B_1 \times B_2}{\pi_i t \ltdyn \pi_i u : A_i \ltdyn B_i}}
+    {\inferrule{t \ltdyn u}{\pi_i t \ltdyn \pi_i u}\\}
+
+    \alt<2>{
+    \inferrule
+    {\Gamma \vdash t : A}
+    {\Gamma \ltdyn \Gamma \vdash t \ltdyn t}}
+    {\inferrule{~}{t \ltdyn t}}
+
+    \alt<2>{
+      \inferrule{\Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A'\\
+        \Gamma' \ltdyn \Gamma'' \vdash t' \ltdyn t'' : A' \ltdyn A''}
+                {\Gamma \ltdyn \Gamma'' \vdash t \ltdyn t'' : A \ltdyn A''}
+    }{\inferrule{t \ltdyn t' \ltdyn t''}{t \ltdyn t''}}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Formalizing Graduality}
+  How to formalize this in our categories?
+  \begin{enumerate}
+  \item Types and terms are ordered, type constructors and term
+    constructors are monotone\ldots\pause Use internal categories!
+  \item \pause Want categories internal to preorders, i.e., thin categories.
+  \item \pause Categories internal to categories are well-studied and called
+    \emph{double categories}.
+  \end{enumerate}
+\end{frame}
+
+%\subsection{Double Categories}
+\begin{frame}{Double Categories}
+  A double category is a kind of 2-category with two different kinds
+  of 1-arrows between the same objects.\\
+  Examples:
+  \begin{itemize}
+  \item Functions and Relations
+  \item EP Pairs and Continuous functions
+  \item Functors and Profunctors
+  \item Adjunctions and Functors
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Double Categories}
+  A double category consists of
+  \begin{itemize}
+  \item A set of objects $O$
+  \item A set of \emph{vertical arrows} $p,q,r$, with category structure. 
+    \begin{tikzcd}
+      A \arrow[d, "p"]\\ B
+    \end{tikzcd}
+  \item A set of \emph{horizontal arrows}, with category structure.
+    \[ 
+    \begin{tikzcd}
+      A \arrow[r, "f"] & B
+    \end{tikzcd}
+    \]
+  \item A set of \emph{squares} which are ``framed'' by horizontal, vertical arrows:
+    \[\begin{tikzcd}
+      A \arrow[dd,"p"] \arrow[rr, "f"] &  & B \arrow[dd,"r"] \\
+      & \alpha &  \\
+      C \arrow[rr, "g"] &  & D
+    \end{tikzcd}\]
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Double Categories}
+  Squares can be composed in 2 ways\\
+  \begin{columns}
+    \begin{column}{0.45\textwidth}
+      First, \emph{horizontally}\\
+  \begin{tikzcd}
+    A \arrow[r]\arrow[d] & B \arrow[r]\arrow[d] & C\arrow[d]\\
+    A' \arrow[r] & B'\arrow[r]& C
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      With for every vertical arrow, a horizontal identity:
+      \begin{tikzcd}
+A \arrow[d, "p"] \arrow[r, "id"] & A \arrow[d, "p"] \\
+B \arrow[r, "id"] & B
+\end{tikzcd}
+    \end{column}
+  \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      Second, \emph{vertically}\\
+  \begin{tikzcd}
+A \arrow[d] \arrow[r] & B \arrow[d] \\
+A' \arrow[d] \arrow[r] & B' \arrow[d] \\
+A'' \arrow[r] & B''
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      With for every horizontal arrow, a vertical identity:
+      \begin{tikzcd}
+A \arrow[d, "id"] \arrow[r, "f"] & B \arrow[d, "id"] \\
+A \arrow[r, "f"] & B
+\end{tikzcd}
+    \end{column}
+  \end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  \begin{itemize}
+  \item The objects are the types
+  \item The \emph{vertical arrows} are the type dynamism judgments $A
+    \ltdyn B$ (so a thin category)
+  \item The \emph{horizontal arrows} are the terms $x :A \vdash t : B$
+  \item The \emph{squares} are the term dynamism:
+    \begin{columns}
+      \begin{column}{0.45\textwidth}
+        \begin{tikzcd}
+          x:A \arrow[dd] \arrow[rr, "t"] &  & B \arrow[dd] \\
+          & \sqsubseteq &  \\
+          x':A' \arrow[rr, "t'"] &  & B'
+    \end{tikzcd}        
+      \end{column}
+      \begin{column}{0.45\textwidth}
+        \[
+        x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' : B \ltdyn B'
+        \]
+      \end{column}
+    \end{columns}
+
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  Horizontal composition of squares is a \emph{substitution principle} is 
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+    A \arrow[r,"u"]\arrow[d] & B \arrow[r,"t"]\arrow[d] & C\arrow[d]\\
+    A' \arrow[r,"u'"] & B'\arrow[r,"t'"]& C
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+        A \arrow[d] \arrow[r, "id"] & A \arrow[d] \\
+        B \arrow[r, "id"] & B
+      \end{tikzcd}      
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{x \ltdyn x' \vdash t \ltdyn t'\\ u \ltdyn u'}
+                  {t[u/x] \ltdyn t'[u'/x]}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {}
+          {x\ltdyn x' : A \ltdyn A' \vdash x \ltdyn x' : A \ltdyn A'}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  Vertical composition of squares is \emph{transitivity and reflexivity}.
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+\Gamma \arrow[d] \arrow[r,"t"] & A \arrow[d] \\
+\Gamma' \arrow[d] \arrow[r,"t'"] & A' \arrow[d] \\
+\Gamma'' \arrow[r,"t''"] & A''
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+A \arrow[d, "id"] \arrow[r, "t"] & B \arrow[d, "id"] \\
+A \arrow[r, "t"] & B
+\end{tikzcd}
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{\Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A'\\
+          \Gamma' \ltdyn \Gamma'' \vdash t' \ltdyn t'' : A' \ltdyn A''}
+                  {\Gamma \ltdyn \Gamma'' \vdash t \ltdyn t'' : A \ltdyn A''}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {\Gamma \vdash t : A}
+          {\Gamma \ltdyn \Gamma \vdash t \ltdyn t : A \ltdyn A}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+\end{frame}
+
+%\subsection{A Universal Property for Casts}
+
+\begin{frame}{Type Casts}
+  To actually get dynamic type checking, we need casts.
+
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash t : A \and A \ltdyn B}
+    {\Gamma \vdash \upcast A B t : B}
+
+    \inferrule
+    {\Gamma \vdash u : B \and A \ltdyn B}
+    {\Gamma \vdash \dncast A B u : A}
+  \end{mathpar}
+
+  \pause By having the term ordering defined \emph{first}, we can
+  \emph{characterize} them by a universal property.
+\end{frame}
+
+\begin{frame}{Universal Property of Casts}
+  How should casts behave?
+  Intuition: the casted term should act as much like the original term
+  as possible while still satisfying the new type.
+
+  So for a downcast, the casted term should refine the original:
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+  \end{mathpar}
+
+  \pause But this would be satisfied even by the type error $\err_A$.
+  \pause
+  We want to make sure that the casted term has \emph{as much}
+  behavior as possible: i.e., it's the \emph{biggest} term below
+  $u$.
+  \begin{mathpar}
+    \inferrule
+    {t \ltdyn u : A \ltdyn B}
+    {t \ltdyn \dncast A B u : A}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\dncast A B u$ is the greatest
+  lower bound of $u$ in $A$.
+\end{frame}
+
+\begin{frame}{Universal Property of Casts}
+  Upcasts are the exact dual
+
+  For an upcast, the casted term should be \emph{more} dynamic than
+  the original:
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+  \end{mathpar}
+
+  But it has no more behaviors than dictated by the original term,
+  it's the \emph{least} term above $t$.
+
+  \begin{mathpar}
+    \inferrule
+    {t \ltdyn u : A \ltdyn B}
+    {\upcast A B t \ltdyn u : B}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\upcast A B u$ is the least upper
+  bound of $t$ in $B$.
+\end{frame}
+% TODO: ugly
+\begin{frame}[fragile]{Universal Property for Casts}
+  The upcast is the universal way to turn a vertical arrow horizontal
+  \emph{covariantly} and the downcast is the universal way to do it
+  contravariantly.
+
+  \[\begin{tikzcd}
+A' \arrow[d] & A \arrow[r, "f_!"] \arrow[l, "f^*"'] \arrow[d, "f"] & A' \arrow[d] \\
+A' & A' \arrow[r] \arrow[l] & A'
+  \end{tikzcd}
+  \]
+
+  \begin{itemize}
+  \item Can prove that this makes $f_!$ a left adjoint of $f^*$.
+  \item A double category where every vertical arrow induces these
+    universal horizontal arrows is called a \textbf{(proarrow)
+      equipment} or a \textbf{framed bicategory}.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Gradual CCC}
+  A Gradual CCC is (morally) a cartesian closed category internal to
+  thin categories with
+  \begin{itemize}
+  \item A vertically terminal object ($\dyn$)
+  \item A least element of every hom set (type error), satisfying $\ldots$
+  \item For every $A \ltdyn A'$, an upcast and downcast making the
+    double category an equipment, such that the downcast is a retract
+    of the upcast.
+  \end{itemize}
+\end{frame}
+\begin{frame}{Gradual CCC}
+  A Gradual CCC is a representable cartesian multicategory internal to
+  thin categories with
+  \begin{itemize}
+  \item A vertically terminal object ($\dyn$)
+  \item A least element of every hom set (type error), satisfying $\ldots$
+  \item For every $A \ltdyn A'$, an upcast and downcast making the
+    double category an equipment, such that the downcast is a retract
+    of the upcast.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Gradual Type Theory}
+  \begin{itemize}
+  \item Made type theory with types, terms, type and term ordering.
+  \item Proved soundness and completeness theorems for our Gradual
+    Type Theory.
+  \end{itemize}
+\end{frame}
+
+\section{Applications}
+%\subsection{Theorems about Gradual Languages}
+\begin{frame}{New Results in Gradual Typing}
+  Can prove results that hold in any ``reasonable'' gradually typed
+  language.\pause  
+  \begin{itemize}
+  \item Validate optimizations: elimination of redundant casts.
+  \item Uniqueness Theorems
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Uniqueness Theorem}
+  \begin{theorem}{Uniqueness of Function Casts}
+    In Gradual Type Theory if $A \ltdyn A'$ and $B \ltdyn B'$, the
+    following equivalences hold:
+      \[
+  \dncast {A \to B} {A' \to B'} t \equiv \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  and
+  \[
+  \upcast {A \to B} {A' \to B'} t \equiv \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+  \]
+  \end{theorem}
+
+  Proof uses specification for casts, and $\beta, \eta$ axioms for
+  function types.
+
+  \begin{corollary}
+    Any \emph{observably different} implementation violates $\beta$,
+    $\eta$ or graduality!
+  \end{corollary}
+\end{frame}
+
+\begin{frame}{Uniqueness Theorems}
+  Uniqueness theorems say ``there is only one way to do it'':
+  \begin{itemize}
+  \item Can prove them for casts between the \emph{same} connective
+    using $\beta, \eta$
+  \item Only casts that don't have uniqueness theorems are the ``tag types'':
+    \begin{mathpar}
+      (\dyn \to \dyn) \ltdyn \dyn \and
+      (\dyn \times \dyn) \ltdyn \dyn \and
+    \end{mathpar}
+    The actual design flexibility is here.
+  \end{itemize}
+\end{frame}
+
+%\subsection{Model Construction}
+
+%% \begin{frame}{Semantic Contract Translation}
+%%   \begin{itemize}
+%%   \item Start with a locally thin cartesian closed 2-category $C$
+%%     local bottoms and with a $D \in C$ which has specified ep pairs:
+%%     \begin{enumerate}
+%%     \item $(D \to D) \triangleleft D$
+%%     \item $(D \times D) \triangleleft D$
+%%     \end{enumerate}
+%%   \item Make $C$ into a vertically thin double category whose vertical
+%%     arrows are ep pairs: $EP(C)$
+%%   \item Take a \emph{vertical slice} of $EP(C)$ over $D$ $EP(C)/D$
+%%   \end{itemize}
+%% \end{frame}
+
+%% \begin{frame}{Semantic Contract Translation}
+%%   \begin{itemize}
+%%   \item A gradual type $A$ is an ep pair $A \triangleleft D$
+%%   \item If $A \ltdyn B$, then there exists a (unique) ep pair $A
+%%     \triangleleft B$ that factorizes:
+%%     \[ A \triangleleft D \equiv A \triangleleft B \triangleleft D \]
+%%   \item A term $\Gamma \vdash t : A$ is a morphism $\times \Gamma \to A$
+%%   \item A term ordering
+%%     \[ \Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A' \]
+%%     is an ordering
+%%     \[ \upcast{A}{A'} \circ t \circ \dncast{\Gamma}{\Gamma'} \ltdyn t' \]
+%%     (or one of 3 other equivalent placements)
+%%   \end{itemize}
+%% \end{frame}
+
+\begin{frame}[fragile]{A Domain Model}
+  Need domains with 2 notions of error (divergence, crash) and 2
+  notions of order.
+  \begin{itemize}
+  \item A gradual pre-domain is a set $X$ with an $\omega$CPO
+    structure $(X,\leq)$ and a preorder $(X, \ltdyn)$ with $\ltdyn$
+    closed under $\omega\leq$-chains.
+  \item A gradual domain is a gradual pre-domain with a $\leq$-least
+    element $\bot$ and a $\ltdyn$-least element $\err$
+  \item A morphism $f : X \to Y$ is continuous in $\leq$ and monotone
+    in $\ltdyn$.
+  \end{itemize}
+  $1_{\bot,\err}$:
+  \begin{columns}
+    $\leq$:
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+* \arrow[d, no head] & \mho \arrow[ld, no head] \\
+\bot & 
+\end{tikzcd}      
+    \end{column}
+    $\ltdyn$:
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+* \arrow[d, no head] & \bot \arrow[ld, no head] \\
+\err & 
+\end{tikzcd}      
+    \end{column}
+  \end{columns}
+\end{frame}
+
+\begin{frame}{A Domain Model}
+  \begin{itemize}
+  \item Can construct a suitable reflexive object using the domain
+    structure and standard techniques (using $\leq$ ep pairs).
+  \item Combine with the earlier construction to get a
+    domain-theoretic model of gradual typing (using $\sqsubseteq$ ep
+    pairs).
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Domain Contract Translation}
+  \begin{itemize}
+  \item A gradual type $A$ is an ep pair $A \triangleleft D$ with
+    respect to the \emph{error ordering}
+  \item If $A \ltdyn B$, then there exists a (unique) ep pair $A
+    \triangleleft B$ that factorizes:
+    \[ A \triangleleft D \equiv A \triangleleft B \triangleleft D \]
+  \item A term $\Gamma \vdash t : A$ is a morphism $\times \Gamma \to A$
+  \item A term ordering
+    \[ \Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A' \]
+    is an $\ltdyn$ ordering
+    \[ \upcast{A}{A'} \circ t \circ \dncast{\Gamma}{\Gamma'} \ltdyn t' \]
+    (or one of 3 other equivalent placements)
+  \end{itemize}
+\end{frame}
+
+\section{Conclusion}
+
+%% \begin{frame}{Questions}
+%%   \begin{itemize}
+%%   \item Double Categories for Domain Theory?
+%%   \item Strong analogy to parametricity
+%%   \item Intuition for Gradual Typing has a refinement typing flavor,
+%%     but GTT is Church-style, so doesn't show up yet.
+%%   \end{itemize}
+%% \end{frame}
+
+\begin{frame}{Conclusion}
+  \begin{itemize}
+  \item Using double categories, type casts get a
+    specification/universal property.
+  \item Type-theoretic presentation makes it easy to combine with
+    other features.
+    \begin{itemize}
+    \item In progress: Gradual Call-by-push-value with positives and
+      negatives.
+    \end{itemize}
+    % Present
+  \item More inside!
+    \begin{itemize}
+    \item More theorems in GTT
+    \item Formulation using Cartesian Multicategories internal to
+      Preorders.
+    \item Category-theoretic construction generalizing the domain
+      model as a \emph{vertical} slice category.
+    \end{itemize}
+  \end{itemize}
+\end{frame}
+ 
+\end{document}
+
+%% Local Variables:
+%% compile-command: "latexmk -C talk.tex; latexmk talk.tex"
+%% End:
diff --git a/talk/cmu-fall-2018/mfps-talk.tex b/talk/cmu-fall-2018/mfps-talk.tex
new file mode 100644
index 0000000000000000000000000000000000000000..54b671da01a4513bd25c438b098ef0b1f1c4c289
--- /dev/null
+++ b/talk/cmu-fall-2018/mfps-talk.tex
@@ -0,0 +1,779 @@
+\documentclass{beamer}
+\usetheme{boxes}
+
+\newcommand{\notepr}[1]{}
+
+\usepackage{rotating}
+\usepackage{listings}
+\usepackage{xcolor}
+%% \lstset{
+%%   basicstyle=\ttfamily,
+%%   escapeinside=~~
+%% }
+
+\usepackage{microtype}
+\usepackage{latexsym,amsmath,amssymb,amsthm}
+\usepackage{mathpartir}
+\usepackage{tikz}
+\usepackage{tikz-cd}
+\input{talk-defs}
+\newcommand{\ltdyn}{\dynr}
+\newcommand{\uarrow}{\mathrel{\rotatebox[origin=c]{-30}{$\leftarrowtail$}}}
+\newcommand{\darrow}{\mathrel{\rotatebox[origin=c]{30}{$\twoheadleftarrow$}}}
+\renewcommand{\upcast}[2]{\langle{#2}\uarrow{#1}\rangle}
+\renewcommand{\dncast}[2]{\langle{#1}\darrow{#2}\rangle}
+\newcommand{\gtdyn}{\sqsupseteq}
+
+\newcommand{\converge}{\Downarrow}
+\newcommand{\diverge}{\Uparrow}
+
+\title{Semantic Foundations for Gradual Typing}
+\author{Max S. New}
+\institute{Northeastern University}
+\date{\today}
+
+\begin{document}
+
+\begin{frame}
+  \titlepage
+  \begin{small}
+    Joint work with\\
+    Amal Ahmed (Northeastern Uni.),\\
+    Dan Licata (Wesleyan Uni.)
+  \end{small}
+\end{frame}
+
+\begin{frame}
+  \frametitle{Outline}
+  \tableofcontents
+\end{frame}
+
+\section{Why Semantic Gradual Typing}
+
+\begin{frame}{My Dream of Gradual Typing}
+  % Don't dwell too long, 3rd talk in the session.
+  Why should people that like \emph{static typing} like \emph{gradual
+    typing}?
+  \begin{itemize}
+  \item We need to interact with unverified inputs
+    %% \begin{enumerate}
+    %% \item External environment/user interface
+    %% \item Legacy code
+    %% \item Untrusted, possibly adversarial code
+    %% \end{enumerate}
+  \item Want to be able to add \emph{new} typing features to
+    languages, but still easily interact with old code.
+    %% \begin{enumerate}
+    %% %% \item Types were unclear before, but are clear now
+    %% \item Maybe the type features didn't exist before!
+    %% %% \item Didn't know how/didn't have time to convince the type
+    %% %%   checker.
+    %% \end{enumerate}
+  \end{itemize}
+  Want language-level support for these use cases.
+\end{frame}
+
+\begin{frame}{The Problem with Gradual Typing}
+  \begin{itemize}
+  \item Useful type systems have strong type-based reasoning
+    principles, and categorical semantics!
+    \begin{enumerate}
+    \item Extensionality principles ($\eta$ laws)
+    \item Representation independence
+    \item Parametricity
+    \end{enumerate}
+  \item \pause But the state of the art$\ldots$
+    \begin{enumerate}
+    \item Many incompatible semantics of casts have been proposed:
+      Lazy, Eager, UD, Transient, Monotonic state, Proxied state
+      \begin{itemize}
+      \item Which ones give which reasoning principles?
+      \end{itemize}
+    \item Blame Safety Theorem: 
+      \begin{enumerate}
+      \item Gradually typed programs \emph{do} go wrong, but
+      \item \pause If a type error occurs, the well-typed
+      part of the program is never \emph{blamed}.
+      \end{enumerate}
+    %% \item Other work: Parametricity for Gradual Typing, but ``ad
+    %%   hoc'': what was the principle behind it.
+    \end{enumerate}
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{A Guide: Domain Theory}
+  \begin{itemize}
+  \item Gradual Typing is very similar to domain theory with universal
+    types
+    \begin{enumerate}
+    \item Dynamic type $\sim$ Universal object
+    \item Type casts $\sim$ Embedding-projection pairs
+    \end{enumerate}
+  \item But the usual notion of ep pair is not an \emph{adequate}
+    model of gradual typing:
+    \begin{enumerate}
+    \item Models type errors as $\bot$, same as diverging computation.
+    \item Come back to this later
+    \end{enumerate}
+  \item Analogy exploited in work on semantics of contracts in the
+    2000s by Findler, Blume and Felleisen (some unpublished).
+  \end{itemize}
+\end{frame}
+
+\section{Gradual CCCs and GTT}
+\begin{frame}{First Step: Gradual CCCs and Gradual Type Theory}
+  \begin{footnotesize}
+  \begin{center}
+    \begin{tabular}{|c|c|}
+      \hline
+      \textbf{Type Theory} & \textbf{Category} \\
+      \hline
+      \hline
+      Negative Typed Lambda Calculus & Cartesian Closed Categories\\
+      \hline
+      Negative \textbf{Gradual} Lambda Calculus? & Gradual CCC?\\
+      \hline
+    \end{tabular}
+  \end{center}
+  \end{footnotesize}
+  \begin{itemize}
+  \item Model the ``cast calculus'' of gradual typing, not the surface
+    syntax.
+  \item Develop concrete models based on domains and logical
+    relations.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Desiderata}
+  What should GCCC/GTT be?
+  \begin{itemize}
+  \item Have a fully dynamically typed sub-language that is
+    computationally complete for the language.
+    \begin{itemize}
+    \item \pause There should be a \emph{universal type} $\dyn$ such that
+      every type is a \emph{retract} of it.
+    \end{itemize}
+  \item \pause Have static types, that \emph{mean the right thing}.
+    \begin{itemize}
+    \item \pause The category should be a CCC (satsifies $\beta, \eta$)
+    \end{itemize}
+  \item \pause Have a smooth transition from dynamic to static typing.
+    \begin{itemize}
+    \item \pause ???
+    \end{itemize}
+  \end{itemize}  
+\end{frame}
+
+%\subsection{Graduality}
+\begin{frame}{Graduality}
+  How do we formalize the idea of a ``smooth transition''?
+  \begin{itemize}
+  \item Siek-Vitousek-Cimini-Boyland, SNAPL 2015, propose the
+    \emph{gradual guarantee}.
+  \item \pause Intuition: changing the precision of the types of the
+    program should only add or remove checking and not otherwise
+    change behavior.
+  \item \pause More Formally: refining the \emph{types} of a gradual
+    program should result in a refinement of \emph{behavior}.
+  \item \pause Analogous to parametricity: changing the type can only result
+    in certain changes to behavior.
+  \end{itemize}
+\end{frame}
+
+% Refining a type means...
+\begin{frame}{Type Dynamism Ordering}
+  $A \ltdyn B$ means $A$ is ``less dynamic''/ ``more precise'' than $B$.
+  \pause
+  \begin{mathpar}
+    \begin{array}{rrl}
+      \text{types} & A,B,C \bnfdef & \dyn \bnfalt A \to B \bnfalt A \times B \bnfalt 1
+    \end{array}\\
+    \pause
+    \inferrule
+    {}
+    {A \ltdyn \dyn}
+
+    \inferrule
+    {}
+    {A \ltdyn A}
+
+    \inferrule
+    {A \ltdyn B \ltdyn C}
+    {A \ltdyn C}\\
+
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \to B \ltdyn A' \to B'}
+
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \times B \ltdyn A' \times B'}
+  \end{mathpar}
+  \pause
+  Why? Consider
+  \[ N \to N \sqsubseteq \dyn \to \dyn \]
+\end{frame}
+
+% Now what does it mean to refine the behavior of a program... 
+\begin{frame}{Operational Definition of Graduality}
+  Extend type ordering to terms by congruence:
+  \begin{itemize}
+  \item Define $t \ltdyn u$ to hold if $t$ is formed by replacing all
+    type annotaitons in $u$ with a less dynamic type.
+  \end{itemize}
+  \begin{theorem}[Gradual Guarantee]
+    If $t \ltdyn u$ and $\cdot \vdash t : A$ and $\cdot \vdash u : B$,
+    then either
+    \begin{enumerate}
+    \item $t \converge \err$
+    \item $t \diverge, u \diverge$
+    \item $t \converge v, u \converge v'$ and $v \ltdyn v'$
+    \end{enumerate}
+  \end{theorem}
+  \pause Defines an ordering on effects, but where error is the least
+  element, not divergence.
+\end{frame}
+
+\begin{frame}{Graduality Violations}
+  Rules out:
+  \begin{itemize}
+  \item Making program more static turning a type error into a
+    success(!)
+  \item Or changing the successful result
+  \item Or causing a terminating program to diverge
+  \end{itemize}
+
+  Nota Bene:
+  \begin{itemize}
+  \item Rules out unrestricted \emph{catching} of type errors
+    \emph{within} the gradual language.
+  \item So if the language is sequential, type errors must crash.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Extensional Term Dynamism}
+  To model this theorem, we want an \emph{axiomatic ,extensional}
+  version of the term ordering. I.e., we include the rules:
+  
+  \begin{mathpar}
+    \inferrule*
+    {~}
+    {\err \ltdyn t}
+
+    \inferrule*[Right=$\beta\to$]
+    {~}
+    {(\lambda x.t) u \equidyn t[u/x]}
+
+    \inferrule*[Right=$\eta\to$]
+    {t : A \to B}
+    {t \equidyn \lambda x. t x}
+
+    \inferrule*[Right=$\beta\times$]
+    {~}
+    {\pi_i (t_1, t_2) \equidyn t_i}
+
+    \inferrule*[Right=$\eta\times$]
+    {t : A \times B}
+    {t \equidyn (\pi_1 t, \pi_2 t)}
+
+    \inferrule*[Right=$\eta 1$]
+    {t : 1}
+    {t \equidyn ()}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Term Dynamism Ordering}
+  \alt<2>{\emph{Heterogeneously typed congruence rules}}{Also add \emph{congruence rules}}
+  \begin{mathpar}
+    \alt<2>{\inferrule {t \ltdyn t' : A \to B \ltdyn A' \to B' \and u
+        \ltdyn u' : A \ltdyn A'} {t\, u \ltdyn t' \, u' : B \ltdyn
+        B'}} {\inferrule {t \ltdyn t' \and u \ltdyn u'} {t\, u \ltdyn
+        u \ltdyn u'}}
+    
+    \alt<2>{\inferrule
+    {\Gamma,x:A\ltdyn \Gamma',x':B \vdash t \ltdyn u : B \to B'}
+    {\Gamma \ltdyn \Gamma' \vdash \lambda x : A. t \ltdyn \lambda y : B . u : A \to B \ltdyn B \to B'}}
+    {\inferrule
+    {x:A\ltdyn y:B \vdash t \ltdyn u \and A \ltdyn B}
+    {\lambda x : A. t \ltdyn \lambda y : B . u}}
+
+    \alt<2>{
+    {\inferrule{t_1 \ltdyn u_1 : A_1 \ltdyn B_1 \and t_2\ltdyn u_2 : A_2 \ltdyn B_2}{(t_1,t_2)\ltdyn (u_1,u_2) : A_1 \times A_2 \ltdyn B_1 \times B_2}}
+    }
+    {\inferrule{t_1 \ltdyn u_1 \and t_2\ltdyn u_2}{(t_1,t_2)\ltdyn (u_1,u_2)}}
+
+    \alt<2>
+    {\inferrule{t \ltdyn u : A_1 \times A_2 \ltdyn B_1 \times B_2}{\pi_i t \ltdyn \pi_i u : A_i \ltdyn B_i}}
+    {\inferrule{t \ltdyn u}{\pi_i t \ltdyn \pi_i u}\\}
+
+    \alt<2>{
+    \inferrule
+    {\Gamma \vdash t : A}
+    {\Gamma \ltdyn \Gamma \vdash t \ltdyn t}}
+    {\inferrule{~}{t \ltdyn t}}
+
+    \alt<2>{
+      \inferrule{\Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A'\\
+        \Gamma' \ltdyn \Gamma'' \vdash t' \ltdyn t'' : A' \ltdyn A''}
+                {\Gamma \ltdyn \Gamma'' \vdash t \ltdyn t'' : A \ltdyn A''}
+    }{\inferrule{t \ltdyn t' \ltdyn t''}{t \ltdyn t''}}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Formalizing Graduality}
+  How to formalize this in our categories?
+  \begin{enumerate}
+  \item Types and terms are ordered, type constructors and term
+    constructors are monotone\ldots\pause Use internal categories!
+  \item \pause Want categories internal to preorders, i.e., thin categories.
+  \item \pause Categories internal to categories are well-studied and called
+    \emph{double categories}.
+  \end{enumerate}
+\end{frame}
+
+%\subsection{Double Categories}
+\begin{frame}{Double Categories}
+  A double category is a kind of 2-category with two different kinds
+  of 1-arrows between the same objects.\\
+  Examples:
+  \begin{itemize}
+  \item Functions and Relations
+  \item EP Pairs and Continuous functions
+  \item Functors and Profunctors
+  \item Adjunctions and Functors
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Double Categories}
+  A double category consists of
+  \begin{itemize}
+  \item A set of objects $O$
+  \item A set of \emph{vertical arrows} $p,q,r$, with category structure. 
+    \begin{tikzcd}
+      A \arrow[d, "p"]\\ B
+    \end{tikzcd}
+  \item A set of \emph{horizontal arrows}, with category structure.
+    \[ 
+    \begin{tikzcd}
+      A \arrow[r, "f"] & B
+    \end{tikzcd}
+    \]
+  \item A set of \emph{squares} which are ``framed'' by horizontal, vertical arrows:
+    \[\begin{tikzcd}
+      A \arrow[dd,"p"] \arrow[rr, "f"] &  & B \arrow[dd,"r"] \\
+      & \alpha &  \\
+      C \arrow[rr, "g"] &  & D
+    \end{tikzcd}\]
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Double Categories}
+  Squares can be composed in 2 ways\\
+  \begin{columns}
+    \begin{column}{0.45\textwidth}
+      First, \emph{horizontally}\\
+  \begin{tikzcd}
+    A \arrow[r]\arrow[d] & B \arrow[r]\arrow[d] & C\arrow[d]\\
+    A' \arrow[r] & B'\arrow[r]& C
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      With for every vertical arrow, a horizontal identity:
+      \begin{tikzcd}
+A \arrow[d, "p"] \arrow[r, "id"] & A \arrow[d, "p"] \\
+B \arrow[r, "id"] & B
+\end{tikzcd}
+    \end{column}
+  \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      Second, \emph{vertically}\\
+  \begin{tikzcd}
+A \arrow[d] \arrow[r] & B \arrow[d] \\
+A' \arrow[d] \arrow[r] & B' \arrow[d] \\
+A'' \arrow[r] & B''
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      With for every horizontal arrow, a vertical identity:
+      \begin{tikzcd}
+A \arrow[d, "id"] \arrow[r, "f"] & B \arrow[d, "id"] \\
+A \arrow[r, "f"] & B
+\end{tikzcd}
+    \end{column}
+  \end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  \begin{itemize}
+  \item The objects are the types
+  \item The \emph{vertical arrows} are the type dynamism judgments $A
+    \ltdyn B$ (so a thin category)
+  \item The \emph{horizontal arrows} are the terms $x :A \vdash t : B$
+  \item The \emph{squares} are the term dynamism:
+    \begin{columns}
+      \begin{column}{0.45\textwidth}
+        \begin{tikzcd}
+          x:A \arrow[dd] \arrow[rr, "t"] &  & B \arrow[dd] \\
+          & \sqsubseteq &  \\
+          x':A' \arrow[rr, "t'"] &  & B'
+    \end{tikzcd}        
+      \end{column}
+      \begin{column}{0.45\textwidth}
+        \[
+        x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' : B \ltdyn B'
+        \]
+      \end{column}
+    \end{columns}
+
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  Horizontal composition of squares is a \emph{substitution principle} is 
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+    A \arrow[r,"u"]\arrow[d] & B \arrow[r,"t"]\arrow[d] & C\arrow[d]\\
+    A' \arrow[r,"u'"] & B'\arrow[r,"t'"]& C
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+        A \arrow[d] \arrow[r, "id"] & A \arrow[d] \\
+        B \arrow[r, "id"] & B
+      \end{tikzcd}      
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{x \ltdyn x' \vdash t \ltdyn t'\\ u \ltdyn u'}
+                  {t[u/x] \ltdyn t'[u'/x]}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {}
+          {x\ltdyn x' : A \ltdyn A' \vdash x \ltdyn x' : A \ltdyn A'}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Typing as Double Categories}
+  Vertical composition of squares is \emph{transitivity and reflexivity}.
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+\Gamma \arrow[d] \arrow[r,"t"] & A \arrow[d] \\
+\Gamma' \arrow[d] \arrow[r,"t'"] & A' \arrow[d] \\
+\Gamma'' \arrow[r,"t''"] & A''
+  \end{tikzcd}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{tikzcd}
+A \arrow[d, "id"] \arrow[r, "t"] & B \arrow[d, "id"] \\
+A \arrow[r, "t"] & B
+\end{tikzcd}
+    \end{column}
+    \end{columns}
+    \begin{columns}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+        \inferrule{\Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A'\\
+          \Gamma' \ltdyn \Gamma'' \vdash t' \ltdyn t'' : A' \ltdyn A''}
+                  {\Gamma \ltdyn \Gamma'' \vdash t \ltdyn t'' : A \ltdyn A''}
+      \end{mathpar}
+    \end{column}
+    \begin{column}{0.45\textwidth}
+      \begin{mathpar}
+          \inferrule
+          {\Gamma \vdash t : A}
+          {\Gamma \ltdyn \Gamma \vdash t \ltdyn t : A \ltdyn A}
+      \end{mathpar}
+    \end{column}
+    \end{columns}
+\end{frame}
+
+%\subsection{A Universal Property for Casts}
+
+\begin{frame}{Type Casts}
+  To actually get dynamic type checking, we need casts.
+
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash t : A \and A \ltdyn B}
+    {\Gamma \vdash \upcast A B t : B}
+
+    \inferrule
+    {\Gamma \vdash u : B \and A \ltdyn B}
+    {\Gamma \vdash \dncast A B u : A}
+  \end{mathpar}
+
+  \pause By having the term ordering defined \emph{first}, we can
+  \emph{characterize} them.
+\end{frame}
+
+\begin{frame}{Casts bend the Rules}
+  How should casts behave?
+  Intuition: the casted term should act as much like the original term
+  as possible while still satisfying the new type.
+
+  So for a downcast, the casted term should refine the original:
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+  \end{mathpar}
+
+  \pause But this would be satisfied even by the type error $\err_A$.
+  \pause
+  We want to make sure that the casted term has \emph{as much}
+  behavior as possible: i.e., it's the \emph{biggest} term below
+  $u$.
+  \begin{mathpar}
+    \inferrule
+    {t \ltdyn u : A \ltdyn B}
+    {t \ltdyn \dncast A B u : A}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\dncast A B u$ is the greatest
+  lower bound of $u$ in $A$.
+\end{frame}
+
+\begin{frame}{Casts bend the Rules}
+  Upcasts are the exact dual
+
+  For an upcast, the casted term should be \emph{more} dynamic than
+  the original:
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+  \end{mathpar}
+
+  But it has no more behaviors than dictated by the original term,
+  it's the \emph{least} term above $t$.
+
+  \begin{mathpar}
+    \inferrule
+    {t \ltdyn u : A \ltdyn B}
+    {\upcast A B t \ltdyn u : B}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\upcast A B u$ is the least upper
+  bound of $t$ in $B$.
+\end{frame}
+% TODO: ugly
+\begin{frame}[fragile]{A Universal Property for Casts}
+  The upcast is the universal way to turn a vertical arrow horizontal
+  \emph{covariantly} and the downcast is the universal way to do it
+  contravariantly.
+
+  \[\begin{tikzcd}
+A' \arrow[d] & A \arrow[r, "upcast"] \arrow[l, "dncast"'] \arrow[d] & A' \arrow[d] \\
+A' & A' \arrow[r] \arrow[l] & A'
+  \end{tikzcd}
+  \]
+
+  A double category where every vertical arrow induces these universal
+  horizontal arrows is called an \textbf{equipment}.
+\end{frame}
+
+\begin{frame}{Gradual CCC}
+  A Gradual CCC is a cartesian closed category internal to thin
+  categories with
+  \begin{itemize}
+  \item A vertically terminal object ($\dyn$)
+  \item A least element of every hom set (type error), satisfying $\ldots$
+  \item For every $A \ltdyn A'$, an upcast and downcast making the
+    double category an equipment, such that the downcast is a retract
+    of the upcast.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Gradual Type Theory}
+  \begin{itemize}
+  \item Made type theory with types, terms, type and term ordering.
+  \item Proved soundness and completeness theorems for our Gradual
+    Type Theory.
+  \item Slightly simplifying here: we used multicategories, rather
+    than CCCs directly, but should be essentially the same.
+  \end{itemize}
+\end{frame}
+
+\section{Applications}
+%\subsection{Theorems about Gradual Languages}
+\begin{frame}{New Results in Gradual Typing}
+  Can prove results that hold in any ``reasonable'' gradually typed
+  language.\pause  
+  \begin{itemize}
+  \item Validate optimizations: elimination of redundant casts.
+  \item Uniqueness Theorems
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Uniqueness Theorem}
+  \begin{theorem}{Uniqueness of Function Casts}
+    In Gradual Type Theory if $A \ltdyn A'$ and $B \ltdyn B'$, the
+    following equivalences hold:
+      \[
+  \dncast {A \to B} {A' \to B'} t \equiv \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  and
+  \[
+  \upcast {A \to B} {A' \to B'} t \equiv \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+  \]
+  \end{theorem}
+
+  Proof uses specification for casts, and $\beta, \eta$ axioms for
+  function types.
+
+  \begin{corollary}
+    Any \emph{observably different} implementation violates $\beta$,
+    $\eta$ or graduality!
+  \end{corollary}
+\end{frame}
+
+\begin{frame}{Uniqueness Theorems}
+  Uniqueness theorems say ``there is only one way to do it'':
+  \begin{itemize}
+  \item Can prove them for casts between the \emph{same} connective
+    using $\beta, \eta$
+  \item Only casts that don't have uniqueness theorems are the ``tag types'':
+    \begin{mathpar}
+      (\dyn \to \dyn) \ltdyn \dyn \and
+      (\dyn \times \dyn) \ltdyn \dyn \and
+    \end{mathpar}
+    The actual design flexibility is here.
+  \end{itemize}
+\end{frame}
+
+%\subsection{Model Construction}
+\begin{frame}{Semantic Contract Translation}
+  \begin{itemize}
+  \item Start with a locally thin cartesian closed 2-category $C$
+    local bottoms and with a $D \in C$ which has specified ep pairs:
+    \begin{enumerate}
+    \item $(D \to D) \triangleleft D$
+    \item $(D \times D) \triangleleft D$
+    \end{enumerate}
+  \item Make $C$ into a vertically thin double category whose vertical
+    arrows are ep pairs: $EP(C)$
+  \item Take a \emph{vertical slice} of $EP(C)$ over $D$ $EP(C)/D$
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Semantic Contract Translation}
+  \begin{itemize}
+  \item A gradual type $A$ is an ep pair $A \triangleleft D$
+  \item If $A \ltdyn B$, then there exists a (unique) ep pair $A
+    \triangleleft B$ that factorizes:
+    \[ A \triangleleft D \equiv A \triangleleft B \triangleleft D \]
+  \item A term $\Gamma \vdash t : A$ is a morphism $\times \Gamma \to A$
+  \item A term ordering
+    \[ \Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A' \]
+    is an ordering
+    \[ \upcast{A}{A'} \circ t \circ \dncast{\Gamma}{\Gamma'} \ltdyn t' \]
+    (or one of 3 other equivalent placements)
+  \end{itemize}
+\end{frame}
+
+\begin{frame}[fragile]{A Domain Model}
+  Need domains with 2 notions of error (divergence, crash) and 2
+  notions of order.
+  \begin{itemize}
+  \item A gradual pre-domain is a set $X$ with an $\omega$CPO
+    structure $(X,\leq)$ and a preorder $(X, \ltdyn)$ with $\ltdyn$
+    closed under $\omega\leq$-chains.
+  \item A gradual domain is a gradual pre-domain with a $\leq$-least
+    element $\bot$ and a $\ltdyn$-least element $\err$
+  \item A continuous function $f : X \to Y$ must be continuous in
+    $\leq$ and monotone in $\ltdyn$.
+  \end{itemize}
+  $1_{\bot,\err}$:
+  \begin{columns}
+    $\leq$:
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+* \arrow[d, no head] & \mho \arrow[ld, no head] \\
+\bot & 
+\end{tikzcd}      
+    \end{column}
+    $\ltdyn$:
+    \begin{column}{0.45\textwidth}
+  \begin{tikzcd}
+* \arrow[d, no head] & \bot \arrow[ld, no head] \\
+\err & 
+\end{tikzcd}      
+    \end{column}
+  \end{columns}
+\end{frame}
+
+\begin{frame}{A Domain Model}
+  \begin{itemize}
+  \item Can construct a suitable reflexive object using the domain
+    structure and standard techniques (using $\leq$ ep pairs).
+  \item Combine with the earlier construction to get a
+    domain-theoretic model of gradual typing (using $\sqsubseteq$ ep
+    pairs).
+  \end{itemize}
+\end{frame}
+
+%% \begin{frame}{Domain Contract Translation}
+%%   \begin{itemize}
+%%   \item A gradual type $A$ is an ep pair $A \triangleleft D$ with
+%%     respect to the \emph{error ordering}
+%%   \item If $A \ltdyn B$, then there exists a (unique) ep pair $A
+%%     \triangleleft B$ that factorizes:
+%%     \[ A \triangleleft D \equiv A \triangleleft B \triangleleft D \]
+%%   \item A term $\Gamma \vdash t : A$ is a morphism $\times \Gamma \to A$
+%%   \item A term ordering
+%%     \[ \Gamma \ltdyn \Gamma' \vdash t \ltdyn t' : A \ltdyn A' \]
+%%     is an $\ltdyn$ ordering
+%%     \[ \upcast{A}{A'} \circ t \circ \dncast{\Gamma}{\Gamma'} \ltdyn t' \]
+%%     (or one of 3 other equivalent placements)
+%%   \end{itemize}
+%% \end{frame}
+
+\section{Conclusion}
+
+%% \begin{frame}{Questions}
+%%   \begin{itemize}
+%%   \item Double Categories for Domain Theory?
+%%   \item Strong analogy to parametricity
+%%   \item Intuition for Gradual Typing has a refinement typing flavor,
+%%     but GTT is Church-style, so doesn't show up yet.
+%%   \end{itemize}
+%% \end{frame}
+
+\begin{frame}{Conclusion}
+  \begin{itemize}
+    % Past
+  \item Using double categories, type casts get a
+    specification/universal property.
+    \begin{itemize}
+    \item Can prove theorems about \emph{many} gradual languages: only
+      one way to do function, product casts with $\eta$.
+    \end{itemize}
+    % Future
+  \item Internal category semantics means it's easy to combine with
+    other type theoretic features.
+    \begin{itemize}
+    \item In progress: Gradual Call-by-push-value with positives and
+      negatives.
+    \end{itemize}
+    % Present
+  \item Two papers
+    \begin{itemize}
+    \item ``Call-by-Name Gradual Type Theory'', New and Licata, FSCD 2018
+    \item ``Graduality from Embedding-projection Pairs'', New and Ahmed, ICFP 2018
+    \end{itemize}
+  \end{itemize}
+\end{frame}
+ 
+\end{document}
+
+%% Local Variables:
+%% compile-command: "latexmk -C talk.tex; latexmk talk.tex"
+%% End:
diff --git a/talk/cmu-fall-2018/neu-talk.tex b/talk/cmu-fall-2018/neu-talk.tex
new file mode 100644
index 0000000000000000000000000000000000000000..ade92ee04193819b302dcdd84c8f743930e7e8c4
--- /dev/null
+++ b/talk/cmu-fall-2018/neu-talk.tex
@@ -0,0 +1,1134 @@
+\documentclass{beamer}
+\usetheme{boxes}
+
+\newcommand{\notepr}[1]{}
+
+%% \newif\ifshort
+%% \newif\iflong
+
+%% \shorttrue
+%% %% \longtrue
+
+\usepackage{rotating}
+\usepackage{listings}
+\usepackage{xcolor}
+\lstset{
+  basicstyle=\ttfamily,
+  escapeinside=~~
+}
+
+\usepackage{microtype}
+\usepackage{latexsym,amsmath,amssymb,amsthm}
+\usepackage{mathpartir}
+\usepackage{tikz}
+\usepackage{tikz-cd}
+\input{talk-defs}
+\newcommand{\ltdyn}{\dynr}
+\newcommand{\uarrow}{\mathrel{\rotatebox[origin=c]{-30}{$\leftarrowtail$}}}
+\newcommand{\darrow}{\mathrel{\rotatebox[origin=c]{30}{$\twoheadleftarrow$}}}
+\renewcommand{\upcast}[2]{\langle{#2}\uarrow{#1}\rangle}
+\renewcommand{\dncast}[2]{\langle{#1}\darrow{#2}\rangle}
+\newcommand{\gtdyn}{\sqsupseteq}
+
+\title{Call-by-name Gradual Type Theeory}
+\author{Max S. New}
+\institute{Northeastern University}
+\date{\today}
+
+\begin{document}
+
+\begin{frame}
+  \titlepage
+  Joint work with Dan Licata, Amal Ahmed
+
+  \notepr{I know the mere mention of call-by-name probably puts you to
+    sleep so before I show you my type theory I want to explain at the
+    bigger picture level what I'm trying do to}
+\end{frame}
+
+\begin{frame}{Call-by-name Gradual Type Theory}
+  \begin{enumerate}
+  \item A specific type theory for call-by-name gradually typing.
+  \item A \emph{method} for developing new gradually typed languages.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}
+  \frametitle{Outline}
+  \tableofcontents
+\end{frame}
+
+\section{Intro to Gradual Typing}
+
+\begin{frame}[fragile]{Gradually Typing is about Program Evolution}
+\begin{columns}
+\begin{column}{0.5\textwidth}
+\begin{onlyenv}<1>
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> {
+  st = initial_state;
+  while true {
+    evt = input();
+    state = update(evt,st);
+    display(view(st));
+  }
+}
+\end{lstlisting}        
+\end{footnotesize}
+\end{onlyenv}
+\begin{onlyenv}<2->
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> { .. }
+\end{lstlisting}
+\end{footnotesize}
+\end{onlyenv}
+\end{column}
+\begin{column}{0.1\textwidth}
+  \begin{onlyenv}<3->
+  $\Longrightarrow$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<3->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}} \end{column}
+\end{columns}
+\end{frame}
+
+\begin{frame}[fragile]{Gradually Typing is about Program Evolution}
+\begin{columns}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<1->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}}
+\end{column}
+\begin{column}{0.1\textwidth}
+\begin{onlyenv}<2->
+  $\Longrightarrow$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<2->
+\begin{lstlisting}
+view(st) -> { ... }
+~\colorbox{green}{type State = (Boolean, Num)}~
+~\colorbox{green}{type Evt = NewData Num }~
+        ~\colorbox{green}{| Cancel}~
+~\colorbox{green}{update : (Evt, State) -> State}~
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Up to date
+}
+            
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+\end{footnotesize}}
+\end{column}
+\end{columns}
+\begin{onlyenv}<3->
+  Code runs at every point, even though only \texttt{update} is typed.
+\end{onlyenv}
+\end{frame}
+
+\begin{frame}{Gradual Typing Goals}
+  Gradually Typed languages let dynamically typed and statically typed
+  code interoperate within a single language.
+  \begin{enumerate}
+  \item Can write scripts, experimental code in the dynamically typed
+    fragment.
+  \item Can later add types for documentation, refactoring, bug
+    squashing.
+  \item But don't have to add types to the \emph{entire} program to
+    run it!
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{What do the Types Mean?}
+  How do the types affect the semantics of the program?
+  Two answers.
+  \begin{enumerate}
+  \item Optional Typing: they don't, just run the type erased
+    programs. Dynamic typing is already safe anyway.
+  \item Gradual Typing: a type system should give us a type
+    \emph{soundness} theorem, need to check types at runtime in order
+    to satisfy that.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Evaluation in Gradual Typing}
+  Say we have a function $\lambda x. t : \texttt{bool} \to \texttt{string}$ and
+  we apply it to a dynamically typed value $v : \dyn$.
+  \pause
+  If $v$ is a boolean, fine
+  \[ (\lambda x. t)(\texttt{true}) \mapsto t[\texttt{true}/x]
+  \]
+  \pause
+  Otherwise, we raise an error
+  \[ (\lambda x. t)(5) \mapsto \texttt{type error: expected ...}
+  \]
+\end{frame}
+
+\begin{frame}{Evaluation in Gradual Typing}
+  What if it's is a higher order function $\lambda x.t :
+  (\texttt{number} \to \texttt{string})\to \texttt{num}$?
+  \pause
+  If $v$ is not a function, we error
+  \[ (\lambda x.t)(\texttt{true}) \mapsto \texttt{type error:...}\]
+  But if $v$ is a function? How can we verify that it takes numbers
+  to strings?
+  \begin{enumerate}
+  \item\pause Run the type checker at runtime :(
+  \item\pause Findler-Felleisen wrapping implementation:
+    \[ (\lambda x.t)(\lambda y. u) \mapsto t[\lambda y. \texttt{string?}(u)/x]\]
+  \item \pause Some combination: Annotate every value with best-known type
+    information, use wrapping if the type information is not precise
+    enough to tell.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Evaluation in Gradual Typing}
+  \begin{enumerate}
+  \item Which implementation should we choose?
+  \item How do we implement checking for \texttt{\$NEXT\_BIG\_TYPE\_FEATURE}?
+  \end{enumerate}
+  \pause
+  The type soundness theorem can help!
+\end{frame}
+
+\begin{frame}{Type Soundness}
+  \begin{enumerate}
+  \item For simple types, usually defined as a safety property: if $t
+    : A$ then $t$ diverges or $t$ runs to one of the allowed errors or
+    $t$ runs to a value that has type $A$.
+  \item \pause But there are also stronger, \emph{relational} soundness
+    theorems
+    \begin{enumerate}
+    \item Function extensionality: $f_1 = f_2 : A \to B$ if for every
+      $x : A$,  $f_1(x)$ has the same behavior as $f_2(x)$.
+    \item Universal Polymorphism: $v : \forall \alpha. A$, then
+      $v[B_1]$ and $v[B_2]$ ``do the same thing''.
+    \item Local State/Existential Polymorphism: If I don't expose my
+      representation, I can change it without breaking the program.
+    \end{enumerate}
+  \end{enumerate}
+  \pause
+  The stronger the type soundness theorem, the more constrained we are
+  in design.
+\end{frame}
+
+\begin{frame}{Designing Gradual Languages}
+  \begin{enumerate}
+  \item Previous work (Gradualizer, Abstracting Gradual Typing) can
+    ``automatically'' define gradual languages with a \emph{safety}
+    theorem.
+  \item But both fail on relational soundness theorems: they produce
+    violations of \emph{all} our examples: extensionality,
+    parametricity, representation independence.
+  \end{enumerate}
+  \pause
+  Conclusion: need to think beyond operational semantics
+\end{frame}
+
+\section{CBN Gradual Type Theory}
+
+\begin{frame}{Gradual Type Theory}
+  Idea: using \emph{axiomatic semantics}, flip language design on its
+  head.\\
+  \pause
+  Operational view:
+  \begin{enumerate}
+  \item Design operational semantics
+  \item Try to prove soundness
+  \end{enumerate}
+  \pause
+  Axiomatic view:
+  \begin{enumerate}
+  \item Declare by fiat that the relational type soundness theorem and
+    the gradual guarantee hold.
+  \item See if we can \emph{derive} operational rules as program
+    equivalences.
+  \end{enumerate}
+  \pause Allows us to explore the limits of the design space: many
+  languages as models, any theorems we can prove will apply to all of
+  them.
+\end{frame}
+
+\begin{frame}{Gradual Type Theory: Main Theorem}
+  \begin{block}{Theorem: Function Casts Uniqueness Principle}
+    For a call-by-name gradual language, to satisfy the $\eta$ law
+    and the gradual guarantee, casts between function types
+    \emph{must be} equivalent to the Findler-Felleisen ``wrapping''
+    implementation.
+  \end{block}
+  \pause
+  Two components:
+  \begin{itemize}
+  \item Axiomatic Semantics
+  \item Gradual Guarantee
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Axiomatic Semantics of $\lambda$-calculus}
+  In call-by-name/need:
+  \begin{mathpar}
+    \inferrule*[right=$\beta$]
+    {~}
+    {(\lambda x. t) t' \equiv t[t'/x]}
+
+    \inferrule*[right=$\eta$]
+    {t : A \to B}
+    {t \equiv (\lambda x. t \,x)}
+  \end{mathpar}
+
+  \pause
+  $\beta$ says how to run the program.\\\pause
+  $\eta$ is equivalent to the following
+  \begin{mathpar}
+    \inferrule*[right=extensionality]
+    {\Gamma, x : A \vdash t\,x \equiv t'\,x : B}
+    {\Gamma \vdash t \equiv t' : A \to B}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Axiomatic Semantics of $\lambda$-calculus}
+  In call-by-value, we need to modify the equations:
+  \begin{mathpar}
+    \inferrule*[right=$cbv-\beta$]
+    {~}
+    {(\lambda x. t) v \equiv t[v/x]}
+
+    \inferrule*[right=$cbv-\eta$]
+    {v : A \to B}
+    {v \equiv (\lambda x. v \,x)}
+
+  \end{mathpar}
+  \pause
+  We did call-by-name first because we don't need to introdue this
+  distinction to do functions.\\
+  Note that we don't do sums or booleans.
+\end{frame}
+
+\begin{frame}{Gradual Guarantee}
+  Formal property for saying that there is a ``smooth'' transition
+  from dynamic to static typing.  predictable behavior.
+\end{frame}
+
+\begin{frame}[fragile]{Gradual Guarantee}
+\begin{columns}
+\begin{column}{0.5\textwidth}
+\begin{onlyenv}<1->
+\begin{footnotesize}
+\begin{lstlisting}
+view(st) -> { ... }
+
+update(event, state) -> {
+  if first(state)
+  then ... ; Loading
+  else ... ; Valid Data
+}
+
+main() -> { .. }
+\end{lstlisting}
+\end{footnotesize}
+\end{onlyenv}
+\end{column}
+\begin{column}{0.1\textwidth}
+  \begin{onlyenv}<1>
+  $\Longrightarrow$
+  \end{onlyenv}
+  \begin{onlyenv}<2->
+  $\gtdyn$
+\end{onlyenv}
+\end{column}
+\begin{column}{0.5\textwidth}{%
+\begin{footnotesize}
+\begin{onlyenv}<1->
+\begin{lstlisting}
+view(st) -> { ... }
+
+~\colorbox{green}{type State = (Boolean, ?)}~
+~\colorbox{green}{update : (?, State) -> State}~
+update(event, state) -> {
+  if first(state)
+    then ... ; Loading
+    else ... ; Valid Data
+}
+                
+main() -> { ... }
+\end{lstlisting}
+\end{onlyenv}
+  \end{footnotesize}}
+\end{column}
+\end{columns}
+  \begin{onlyenv}<2->
+    Adding types \emph{refines} program behavior
+
+  \end{onlyenv}
+
+\end{frame}
+
+\begin{frame}{Gradual Guarantee}
+  Syntactically,
+  \[ t \ltdyn t' \]
+  if $t$ and $t'$ have the same type erasure, and every type in $t$ is
+  ``less dynamic'' than the corresponding type in $t'$
+
+  \pause Semantically, if $t, t' : \texttt{Bool}$, then
+  \begin{enumerate}
+  \item $t$ runs to a type error (and $t'$ can do anything)
+  \item $t$ and $t'$ diverge
+  \item $t \mapsto \texttt{true}$ and $t' \mapsto \texttt{true}$
+  \item $t \mapsto \texttt{false}$ and $t' \mapsto \texttt{false}$
+  \end{enumerate}
+\end{frame}
+
+\subsection{Syntax of GTT}
+
+\begin{frame}{Syntax of Gradual Type Theory}
+  4 sorts in Gradual Type Theory
+  \begin{enumerate}
+  \item Types $A,B,C$
+  \item Terms $x_1 : A_1,\ldots, x_n:A_n \vdash t : A$
+  \item Type Dynamism: $A \ltdyn B$ means $A$ is less dynamic than $B$
+  \item Term Dynamism: $x_1 \ltdyn y_1 : A_1 \ltdyn B_1,\ldots \vdash t \ltdyn u : A \ltdyn B$
+  \end{enumerate}
+  Type and term dynamism axiomatize gradual guarantee.
+\end{frame}
+
+\begin{frame}{Types and Terms}
+  Add dynamic type, casts, error to typed lambda calculus.
+  \begin{mathpar}
+    \begin{array}{rrl}
+      \text{types} & A,B,C \bnfdef & \dyn \bnfalt A \to B \bnfalt A \times B \bnfalt 1\\
+      \text{terms} & s,t,u \bnfdef& x \bnfalt \lambda (x : A). t \bnfalt t u\\&&
+      \bnfalt (t, u) \bnfalt \pi_i t \bnfalt ()\\&&
+      \bnfalt \err_{A} \bnfalt \upcast A B t \bnfalt \dncast A B t
+    \end{array}
+  \end{mathpar}
+\end{frame}
+%% \begin{frame}{Terms}
+%%   Terms are all intrinsically typed, we have variables and
+%%   substitution as usual.
+%%   %% and we can also add function symbols as
+%%   %% primitives.
+
+%%   \begin{mathpar}
+%%     \inferrule
+%%     {x:A \in \Gamma}
+%%     {\Gamma \vdash x : A}
+
+%%     \inferrule
+%%     {\Gamma \vdash u : B\and
+%%       \Gamma, x: B \vdash t : A}
+%%     {\Gamma \vdash t[u/x] : A}
+
+%%     \inferrule
+%%     {\Gamma \vdash t_1 : A_1,\ldots, \Gamma \vdash t_n : A_n \and f : (A_1,\ldots,A_n) \to B}
+%%     {\Gamma \vdash f(t_1,\ldots,t_n) : B}
+%%   \end{mathpar}
+%% \end{frame}
+
+\begin{frame}{Type Dynamism}
+  \[ A \ltdyn B \]
+  read: $A$ is ``less dynamic''/''more precise'' than $B$. \\
+  \begin{itemize}
+  \item Syntactic intuition: $B$ has more $\dyn$s in it.
+  \item Semantic intuition: the type given by $A$ has a more
+    restrictive interface than $B$.
+  \end{itemize}
+
+\end{frame}
+
+\begin{frame}{Type Dynamism Axioms}
+  \begin{mathpar}
+  \inferrule{~}{A \dynr A}\and
+  \inferrule{A \dynr B \and B \dynr C}{A \dynr C}\and
+  \inferrule{~}{A \ltdyn \dyn}\\
+  \inferrule{A \ltdyn A' \and B \ltdyn B'}{A \to B \ltdyn A' \to B'}\and
+  \inferrule{A \ltdyn A' \and B \ltdyn B'}{A \times B \ltdyn A' \times B'}
+  \end{mathpar}
+  
+\end{frame}
+
+%% \begin{frame}{Term Dynamism INTUITION}
+%%   Models the refinement relation of gradual guarantee. Here's an example
+%%   \emph{model} for intuition.\\
+%%   \pause
+%%   For every $A \ltdyn A'$, if
+%%   \begin{mathpar}
+%%     \cdot  \vdash t \ltdyn t' : A \ltdyn A'
+%%   \end{mathpar}
+%%   Then either
+%%   \begin{enumerate}
+%%   \item $t$ runs to a type error.
+%%   \item $t,t'$ both diverge
+%%   \item $t \Downarrow v$ and $t' \Downarrow v'$ and $v \ltdyn v'$.
+%%   \end{enumerate}
+  
+%%   \notepr{
+%%   The $A \ltdyn A'$ restriction ensures we have a sensible
+%%   way to interpret that.
+%%   }
+%% \end{frame}
+
+\begin{frame}{Term Dynamism}
+  \begin{mathpar}
+    x_1 \ltdyn x_1' : A_1 \ltdyn A_1' ,\ldots \vdash t \ltdyn t' : B \ltdyn B' 
+  \end{mathpar}
+  read: ``if all the $x_i$s are less than $x_i'$s then $t$ is less than $t'$''.\\
+  \pause
+  Note that it is only well formed if all of the $A_i \ltdyn A_i'$ and
+  $B \ltdyn B'$. and
+  \begin{mathpar}
+    x_1:A_1,\ldots\vdash t : B\\
+    x_1':A_1',\ldots\vdash t' : B'\\
+  \end{mathpar}
+  %% We sometimes abbreviate the context ordering as $\Phi \vdash t
+  %% \ltdyn t' : B \ltdyn B'$.
+  \pause
+  This very explicit notation gets very big, so I'll simplify in the
+  talk. Abbreviate the above as:
+  \[
+  \Phi \vdash t \ltdyn t' : B \ltdyn B'
+  \]
+\end{frame}
+
+\begin{frame}{Term Dynamism Axioms}
+  Identity and substitution are monotone.
+  %% For open terms, identity and substitution properties that should
+  %% evoke a logical relation interpretation. Substitution says that
+  %% every term is monotone in its free variables.
+  \begin{mathpar}
+    \inferrule
+    {(x \ltdyn x' : A \ltdyn A') \in \Phi}
+    {\Phi \vdash x \ltdyn x' : A \ltdyn A'}
+
+    \inferrule
+        {x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' \and
+      u \ltdyn u' : A \ltdyn A'}
+    {t[u/x] \ltdyn t'[u'/x']}
+  \end{mathpar}
+  %% \pause Note that these are the same as ordinary identity and
+  %% substitution but ``monotonically''.
+\end{frame}
+
+\begin{frame}{Term Dynamism}
+  Like type dynamism, term dynamism has reflexivity and transitivity
+  rules:
+  %% note
+  %% how well-formedness relies on the reflexivity and transitivity of
+  %% type dynamism
+
+  \begin{mathpar}
+    \inferrule
+    {\vdash t : B}
+    {t \ltdyn t : B \ltdyn B}
+
+    \inferrule
+    {t \ltdyn t' : B \ltdyn B'\\
+     t' \ltdyn t'' : B' \ltdyn B''}
+    {t \ltdyn t'' : B \ltdyn B''}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}[fragile]{Term Dynamism Squares}
+  These can be nicely visualized as squares:
+  For $\Phi : \Gamma \ltdyn \Gamma'$, we can think of 
+  \[ \Phi \vdash t \ltdyn t' : A \ltdyn A' \]
+  as
+  \[
+  \begin{tikzcd}
+    \Gamma \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t" ]
+    &
+    \Gamma' \arrow[d , "t'"]
+    \\
+    A \arrow[ dashed , r, "\preciser" ]
+    &
+    A'
+  \end{tikzcd}
+  \]
+  
+\end{frame}
+
+\begin{frame}[fragile]{Term Dynamism Squares}
+  Then substitution and transitivity are stacking and sequencing of squares
+  with variables and reflexivity as identities:
+  \[
+\begin{tikzcd}
+\Gamma \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t" ]
+ &
+\Gamma \arrow[d , "t"]
+ \\
+A \arrow[ dashed , r, "\preciser" ]
+&
+A
+\end{tikzcd}
+\qquad
+\begin{tikzcd}
+\Gamma \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t" ]
+ &
+\Gamma' \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t'" ]
+&
+\Gamma'' \arrow[d , "t''"]
+ \\
+A \arrow[ dashed , r, "\preciser" ]
+&
+A' \arrow[ dashed , r, "\preciser" ]
+&
+A'' 
+\end{tikzcd}
+\]\[
+\begin{tikzcd}
+\Gamma,x:A \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "x" ]
+ &
+\Gamma',x:A' \arrow[d , "x"]
+ \\
+A \arrow[ dashed , r, "\preciser" ]
+&
+A'
+\end{tikzcd}
+\qquad
+\begin{tikzcd}
+\Gamma \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "u" ]
+ &
+\Gamma' \arrow[ d, swap, "u'" ]
+\\
+\Gamma,x:A \arrow[ dashed , r, "\preciser" ] \arrow[ d, swap, "t" ]
+ &
+\Gamma',x:A' \arrow[ d, swap, "t'" ]
+\\ 
+B \arrow[ dashed , r, "\preciser" ]
+&
+B'
+\end{tikzcd}
+\]
+\end{frame}
+
+\begin{frame}{Dynamic Type and Type Error}
+  The dynamic type is the most dynamic type.
+
+  \[ A \ltdyn \dyn \]
+  
+  The type error is the \emph{least} dynamic term
+  \notepr{recall the intuition I gave before}
+
+  \[ \Phi \vdash \err_A \ltdyn t : A \ltdyn A' \]
+\end{frame}
+\begin{frame}{Function Type}
+  First, we give have the usual term structure for the function type
+  \begin{mathpar}
+    \inferrule
+    {\Gamma, x : A \vdash t : B}
+    {\Gamma \vdash \lambda x:A. t : A \to B}
+
+    \inferrule
+    {\Gamma \vdash t : A \to B\and \Gamma \vdash u : A}
+    {\Gamma \vdash t u : B}
+  \end{mathpar}
+  \pause
+  And we add the $\beta, \eta$ laws as ``equi-dynamism'' axioms:
+
+  \begin{mathpar}
+    \inferrule
+    {}
+    {(\lambda x:A. t) u \equidyn t[u/x]}
+
+    \inferrule
+    {t : A \to B}
+    {t \equidyn (\lambda x:A. t x)}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Function Type}
+  Next to model the gradual guarantee, we assert that everything is monotone.
+  \begin{mathpar}
+    \inferrule
+    {A \ltdyn A' \and B \ltdyn B'}
+    {A \to B \ltdyn A' \to B'}\\
+    \inferrule
+    {x \ltdyn x' : A \ltdyn A' \vdash t \ltdyn t' : B \ltdyn B'}
+    {\lambda x:A. t \ltdyn \lambda x':A'.t' }
+
+    \inferrule
+    {t \ltdyn t' : A \to B \ltdyn A' \to B'\and
+      u \ltdyn u' : A \ltdyn A' %% \and B \ltdyn B'
+    }
+    {t\, u \ltdyn t'\, u' : B \ltdyn B'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Product Type}
+  Product and (unit types) get a similar treatment.
+  \begin{mathpar}
+    \inferrule
+    {\Gamma  \vdash t_1 : A_1\\ \Gamma \vdash t_2 : A_2}
+    {\Gamma \vdash (t_1,t_2) : A_1 \times A_2}
+
+    \inferrule
+    {\Gamma \vdash t : A_1 \times A_2}
+    {\Gamma \vdash \pi_i t : A_i}\\
+
+    \inferrule
+    {}
+    {\pi_i(t_1,t_2) \equidyn t_i}
+
+    \inferrule
+    {t : A_1 \times A_2}
+    {t \equidyn (\pi_1 t, \pi_2 t)}\\
+
+    \inferrule{t_1 \ltdyn t_1' : A_1 \ltdyn A_1' \and t_2 \ltdyn t_2' : A_2 \ltdyn A_2'}{(t_1,t_1') \ltdyn (t_2,t_2') : A_1 \times A_2 \ltdyn A_1' \times A_2'}
+    
+    \inferrule{t \ltdyn t' : A_1 \times A_2 \ltdyn A_1' \times A_2' \and A_i \ltdyn A_i'}{\pi_i t \ltdyn \pi_i t' : A_i \ltdyn A_i'}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Casts}
+  How to define the casts axiomatically?
+\end{frame}
+
+\begin{frame}{Operational View of Casts}
+  \begin{center}
+    This was a slide with the wadler findler operational semantics
+    \includegraphics[width=\textwidth,height=0.8\textheight,keepaspectratio]{Wadler-Findler-operational-semantics}
+  \end{center}
+\end{frame}
+
+\begin{frame}{Casts}
+  How to define the casts axiomatically?
+  \begin{itemize}
+  \item We \emph{characterize} them by a property with respect to $\ltdyn$.
+  \item We can do this when our casts are \emph{upcasts} or
+    \emph{downcasts}.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Casts}
+  When $A \ltdyn B$, we get two functions: the \emph{upcast} $\upcast
+  A B$ and the \emph{downcast} $\dncast A B$.
+
+  \begin{mathpar}
+    \inferrule
+    {\Gamma \vdash t : A \and A \ltdyn B}
+    {\Gamma \vdash \upcast A B t : B}
+
+    \inferrule
+    {\Gamma \vdash u : B \and A \ltdyn B}
+    {\Gamma \vdash \dncast A B u : A}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Oblique Casts}
+  Not every cast in gradual typing is an upcast or a downcast, for
+  instance the cast
+
+  \[ \obcast {\texttt{Bool} \times \dyn} {\dyn \times \texttt{Bool}} \]
+
+  But we can use a trick by Henglein using the dynamic type:
+
+  \[ \obcast{A}{B}t = \dncast B \dyn \upcast A \dyn t\]
+\end{frame}
+
+\begin{frame}{Casts}
+  How should casts behave?
+  Intuition: the casted term should act as much like the original term
+  as possible while still satisfying the new type.
+  \pause
+
+  So for a downcast, the casted term should refine the original:
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+  \end{mathpar}
+
+  \pause But this would be satisfied even by the type error $\err_A$.
+  \pause
+  We want to make sure that the casted term has \emph{as much}
+  behavior as possible: i.e., it's the \emph{biggest} term below
+  $u$.
+  \begin{mathpar}
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash t \ltdyn \dncast A B u : A}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\dncast A B u$ is the greatest
+  lower bound of $u$ in $A$.
+\end{frame}
+
+\begin{frame}{Casts}
+  Upcasts are the exact dual
+
+  For an upcast, the casted term should be \emph{more} dynamic than
+  the original:
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+  \end{mathpar}
+
+  But it has no more behaviors than dictated by the original term,
+  it's the \emph{least} term above $t$.
+
+  \begin{mathpar}
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash \upcast A B t \ltdyn u : B}
+  \end{mathpar}
+  \pause Order-theoretic summary: $\upcast A B u$ is the least upper
+  bound of $t$ in $B$.
+\end{frame}
+
+\begin{frame}{Casts}
+  \begin{enumerate}
+  \item Note that this is a \emph{precise} specification because lubs
+    and glbs are unique (up to $\equidyn$).
+  \item Since they are exact \emph{duals} most theorems about one
+    dualize for the other.
+  \end{enumerate}
+\end{frame}
+
+\subsection{Theorems in GTT}
+
+\begin{frame}{Consequences of GTT}
+  \begin{itemize}
+  \item Now we have a somewhat rich logic of term dynamism, what can
+    we \emph{prove} in the theory?
+  \item Theorems in the theory should be true in
+    any model, i.e. a simple lazy gradually typed language.
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Wrapping Implementation}
+  We can prove that the wrapping
+  implementation of contracts is the unique implementation of casts
+  between function types. I.e. if $A \ltdyn A'$ and $B \ltdyn B'$ and $t : A' \to B'$ then
+
+  \[
+  \dncast {A \to B} {A' \to B'} t \equidyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  and
+  \[
+  \upcast {A \to B} {A' \to B'} t \equidyn \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+  \]
+\end{frame}
+
+\begin{frame}{Wrapping Implementation}
+  One direction:
+  To prove
+  \[ \dncast {A \to B} {A' \to B'} t \ltdyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  By extensionality, it is sufficient to show
+  \[
+  x : A \vdash (\dncast {A \to B} {A' \to B'} t) x \ltdyn \dncast{B}{B'}(t (\upcast A {A'} x))
+  \]
+  Then it follows by congruence because:
+  \begin{enumerate}
+  \item $(\dncast {A \to B} {A' \to B'} t)\ltdyn t$
+  \item $x \ltdyn \upcast A {A'} x$
+  \item if $u \ltdyn u'$ then $u \ltdyn \dncast {B} {B'} u'$
+  \end{enumerate}
+  \pause
+  Other 3 cases are similar/dual.
+\end{frame}
+\begin{frame}{Wrapping Implementation}
+  If we weaken eta
+  \begin{itemize}
+  \item If $t \ltdyn (\lambda x. t\,x)$:
+    \[
+    \dncast {A \to B} {A' \to B'} t \ltdyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+    \]
+    \[
+    \upcast {A \to B} {A' \to B'} t \ltdyn \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+    \]
+  \item If $t \gtdyn (\lambda x. t\,x)$:
+    \[
+    \dncast {A \to B} {A' \to B'} t \gtdyn \lambda x:A. \dncast{B}{B'}(t (\upcast A {A'} x))
+    \]
+    \[
+    \upcast {A \to B} {A' \to B'} t \gtdyn \lambda x':A'. \upcast{B}{B'}(t (\dncast A {A'} x'))
+    \]
+  \end{itemize}
+\end{frame}
+
+\begin{frame}{Casts are Galois Connections}
+  \notepr{  First, we can show that every pair of upcast and downcast forms a
+  Galois connection, which}
+
+  Two ``round-trip'' properties:
+  \pause
+  Casting down and back up has ``more errors''
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\upcast A B \dncast A B u \ltdyn u : B}
+  \end{mathpar}
+  \pause
+  And casting up and back down has ``fewer errors''
+  \begin{mathpar}
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \dncast A B \upcast A B t : A}
+  \end{mathpar}
+  \pause In practice, we probably want to strengthen this to an
+  equality, so we can add the ``retract axiom'' as well to the system
+  (though none of the theorems I'll show depend on it):
+  \begin{mathpar}
+    \inferrule*[right=RETRACT-AXIOM]
+    {t : A \and A \ltdyn B}
+    {\dncast A B \upcast A B t \ltdyn t : A}
+  \end{mathpar}
+\end{frame}
+
+\begin{frame}{Identity and Composition}
+  Reflexivity gives the identity cast and transitivity gives the
+  composition of casts
+  \begin{enumerate}
+  \item $\dncast A A t \equidyn t$
+  \item If $A \ltdyn B \ltdyn C$, then $\dncast A C t \equidyn \dncast A B \dncast B C t$
+  \end{enumerate}
+  \pause
+  These are part of the operational semantics for instance:
+  \begin{align*}
+    \dncast{\dyn}{\dyn} v &\mapsto v\\
+    \dncast{A \to B}{\dyn} v &\mapsto \dncast {A \to B}{\dyn \to \dyn}\dncast {\dyn \to \dyn} \dyn v
+  \end{align*}
+\end{frame}
+
+%% \begin{frame}{Identity Cast}
+%%   The upcast/downcast from $A$ to $A$ is the identity function,
+%%   i.e. $\upcast A A t \equidyn t$.  Each direction is an instance of
+%%   the specification of upcasts:
+
+%%   \begin{mathpar}
+%%   \inferrule*[right=UR]
+%%       {~}
+%%       {t \ltdyn \upcast{A}{A}{t} : A}\and
+
+%%   \inferrule*[right=UL]
+%%       {t \ltdyn t : A \ltdyn A}
+%%       {\upcast{A}{A}{t} \ltdyn t : A}
+%%   \end{mathpar}
+
+%%   Downcast proof is precise dual.
+%% \end{frame}
+
+%% \begin{frame}{Casts (de)compose}
+%%   If $A \ltdyn B \ltdyn C$ then a cast between $A$ and $B$ factors
+%%   through $C$:
+
+%%   \begin{mathpar}
+%%     \inferrule
+%%     {A \ltdyn B \ltdyn C}
+%%     {\dncast A B \dncast B C t \equidyn \dncast A C t}
+%%   \end{mathpar}
+
+%%   Example: casts between dynamic type and function types factor
+%%   through ``most dynamic'' function type:
+%%   \begin{mathpar}
+%%     \dncast {A \to B}{\dyn \to \dyn}\dncast \dyn {\dyn \to \dyn} t \equidyn \dncast{A \to B}{\dyn} t
+%%   \end{mathpar}
+%%   This is often a rule of the operational semantics.
+%% \end{frame}
+
+\begin{frame}{Non-theorem: Disjointness of Constructors}
+  The only standard operational rules we cannot prove are the ones like
+  \[
+  \obcast {A \times B} {A' \to B'} t \mapsto \err
+  \]
+  that say that the type constructors $\times, \to, 1$ are all
+  disjoint.
+
+  We also have a model where it is false. We can always add axioms to
+  make it true though.
+
+  %% In fact, written as equidynamism instead of stepping, these are
+  %% \emph{independent} of the other axioms: we can construct adequate
+  %% models where it's true and ones where it's false(!)
+
+  %% The reason is that our definition of function types, product types
+  %% et cetera only defines them \emph{up to isomorphism}, not up to cast
+  %% equivalence. And we know very little about the dynamic type, just
+  %% that everything embeds in it.
+\end{frame}
+
+%% \begin{frame}{Term Dynamism Reduces to Globular Dynamism}
+%%   The presence of the casts means that we can reduce all type dynamism
+%%   to ordering \emph{at the same type}, which is more typical for
+%%   logical relations.
+%%   \begin{mathpar}
+%%     {x \ltdyn y : A \ltdyn B \vdash t \ltdyn u : C \ltdyn D}\\
+%%     \iff\\
+%%     {x \ltdyn x : A \ltdyn A \vdash t \ltdyn \dncast C D u[\upcast A B x/y] : C \ltdyn C}\\
+%%     \iff\\
+%%     {x \ltdyn x : A \ltdyn A \vdash \upcast C D t \ltdyn u[\upcast A B x/y] : D \ltdyn D}\\
+%%     \iff\\
+%%     {y \ltdyn y : B \ltdyn B \vdash \upcast C D t[\dncast A B y/x] \ltdyn u : D \ltdyn D}\\
+%%     \iff\\
+%%     {y \ltdyn y : B \ltdyn B \vdash t[\dncast A B y/x] \ltdyn \dncast C D u : C \ltdyn C}\\
+%%   \end{mathpar}
+%% \end{frame}
+
+\subsection{Models of GTT}
+
+\begin{frame}{Models of GTT}
+  Just a theory, need models to show it's any good.
+  \begin{enumerate}
+  \item In the paper we have a denotational model that is similar to
+    Type Racket and Scott's model of untyped lambda calculus.
+  \item We've made a logical relation model for a call-by-value
+    language, but don't have the axiomatic call-by-value theory yet.
+  \end{enumerate}
+\end{frame}
+
+%% \begin{frame}{Models of GTT}
+%%   Just a theory...
+%%   \begin{enumerate}
+%%   \item We give models in which $0 \not\ltdyn 1$ and even better
+%%     $\Omega \not\ltdyn \err$. Shows the theory is \emph{consistent}.
+%%   \item We give a domain theoretic model that is an analogue of
+%%     Scott's semantics of untyped lambda calculus, but where \emph{type
+%%       error} is not modeled as divergence.
+%%   \end{enumerate}
+%% \end{frame}
+
+%% \begin{frame}{Categorical Model}
+%%   We define an abstract notion of model using categories enriched in
+%%   preorders: encodes the idea that ``everything is monotone'' using
+%%   enrichment.
+%% \end{frame}
+
+%% \begin{frame}{Types as EP Pairs}
+%%   Using that, we define an abstract model construction theorem broken
+%%   up into 3 steps.
+%%   We summarize the model here.
+
+%%   TODO: table
+%%   input: language/category with notion of ordering *within each type*, a specified dynamic type and the upcsat/downcast ep pairs
+%%   output: model of gradual type theory
+%%   Types: ep pairs into a specific dynamic type
+%%   Type Dynamism: ep pair factorization
+%%   Terms: terms
+%%   Term Dynamism: insert ep pairs, then ordering
+%% \end{frame}
+
+\section{Conclusion}
+
+\begin{frame}{Summary}
+  \begin{enumerate}
+  \item By axiomatizing term precision/dynamism, we can give casts a
+    specification.
+  \item Gradual guarantee + extensionality = wrapping, so not wrapping
+    implies either no gradual guarantee or no extensionality.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Objections}
+  \begin{enumerate}
+  \item Should the language have $\eta$ for arbitrary terms or just
+    those that never error?
+    \begin{enumerate}
+    \item Violated by ``eager'' cast semantics.
+    \item Does it make sense for eta to be an expansion? Then maybe
+      wrapping is the ``maximal'' implementation instead of the unique
+      one.
+    \end{enumerate}
+  \item Gradual guarantee is, like parametricity, brittle: can't catch
+    type errors! But maybe it is still helpful to design cast calculi
+    even if we throw it away later.
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Future Work}
+  \begin{enumerate}
+  \item Call-by-value! Already have a logical relation, but want the
+    nice axiomatic presentation.
+  \item Relation to Abstracting Gradual Typing: is AGT an abstract
+    interpretation of GTT?
+  \item Polymorphism: can we use the techniques here to ``explain''
+    the complex cast calculi for dynamic typing?
+  \item State: what are the \emph{program equivalence} tradeoffs
+    between monotonic and wrapping references?
+  \end{enumerate}
+\end{frame}
+
+\begin{frame}{Conclusion}
+  \begin{itemize}
+  \item Gradualize Axiomatic Semantics
+  \item\pause Paper accepted to FSCD '18, extended version available on
+    arxiv: ``Call-by-Name Gradual Type Theory'' by New and Licata
+  \end{itemize}
+  
+  \pause
+  \begin{mathpar}
+    \inferrule
+    {u : B \and A \ltdyn B}
+    {\dncast A B u \ltdyn u : A \ltdyn B}
+
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash t \ltdyn \dncast A B u : A}\\
+
+    \inferrule
+    {t : A \and A \ltdyn B}
+    {t \ltdyn \upcast A B t : A \ltdyn B}
+
+    \inferrule
+    {\Phi \vdash t \ltdyn u : A \ltdyn B}
+    {\Phi \vdash \upcast A B t \ltdyn u : B}
+  \end{mathpar}
+
+\end{frame}
+\end{document}
+
+%% Local Variables:
+%% compile-command: "pdflatex talk.tex"
+%% End:
diff --git a/talk/cmu-fall-2018/outline.org b/talk/cmu-fall-2018/outline.org
new file mode 100644
index 0000000000000000000000000000000000000000..b299c0f446ff826bcae897f9609dc4afea9a3290
--- /dev/null
+++ b/talk/cmu-fall-2018/outline.org
@@ -0,0 +1,66 @@
+* Outline
+  1) The problem: gradual typing is good, but the state of the art is
+     questionable. Cast semantics is in flux. Re-use old slides
+     - Introduce graduality as a key 
+  2) CBN GTT: simple axiomatics, show the proof for function types,
+     mention category theoretic connections. Re-use old slides
+     - Mention double category semantics briefly, which are closely
+       related to parametricity
+  3) CBPV GTT: captures CBV and CBN, refines analysis of casts:
+     upcasts are pure values, downcasts are stacks. Make new slides!
+* TODO Today
+  - [ ] (Before the talk). Re-org for parts 1 and 2 of the talk. Merge
+    NEU and FSCD talks
+  - [ ] (This afternoon) flesh out intro
+  - [ ] (This afternoon) Add some CBPV slides at the end
+  - [ ] (Sundary) Finish slides, practice.
+* Expanded Outline
+** Intro
+   - Gradual typing is about migration from dynamic to static typing
+     styles
+     - the "dynamics" of GT
+   - Very basic account of mechanics
+     - very simple overview of the "mechanics" of GT, no details
+   - But the state of the art is *nasty*
+     - many different cast semantics have been proposed
+     - just present a pile of rules
+   - Our approach: type theoretic, axiomatic. Take the properties of
+     types and casts that we want, decompose complex structures,
+     *derive* the complex definitions from a small type-theoretic core
+     - Key ingredient: *graduality*
+     - Key obstacle: dynamic typing inherently involves *effects*, so
+       we need equational theories that are sound even in the presence
+       of effects
+** CBN GTT
+   - Motivate:
+     - Based on FSCD 2018
+     - start with a simple, CBN lambda calculus, only do the negative
+       types. This works even with effects, but not 
+   - Syntax
+     - types, type precision
+     - terms, term precision
+     - Casts
+   - Theorems
+     - Casts form galois connections, in practice strengthen this to
+       ep pair
+     - Uniqueness theorems: casts must be their functorial actions
+   - Semantics
+     - Category internal to preorders: well-studied under the name
+       "double categories"
+     - A double category that has all upcasts/downcasts is called a
+       "proarrow equipment" or a "framed bicategory", some nice work
+       by Mike Shulman on this
+** CBPV GTT (conditionally accepted to POPL 2019)
+   - Motivate:
+     - most gradual typing is CBV
+     - want to prove program equivalences in the presence of effects
+   - Intro to CBPV
+   - Casts as values/stacks
+     - same definition(!)
+     - recover 
+   - Used non-gradual CBPV as a metalanguage for defining models
+     - Dynamic type for Scheme in CBPV
+   - Experience
+     - CBPV rocks: keep the nice equational theory, but handle effects
+     - The syntax guided us naturally to the right solution
+     - type theoretic approach plays nicely with others
diff --git a/talk/cmu-fall-2018/talk-defs.tex b/talk/cmu-fall-2018/talk-defs.tex
new file mode 100644
index 0000000000000000000000000000000000000000..28094d922d43d3e33813c126cb6f808fcab1cab3
--- /dev/null
+++ b/talk/cmu-fall-2018/talk-defs.tex
@@ -0,0 +1,55 @@
+\usepackage{stmaryrd}
+\usepackage{graphicx}
+
+\newcommand{\preciser}{\sqsubseteq}
+\newcommand{\dynr}{\sqsubseteq}
+\newcommand{\equiprecise}{\mathrel{\sqsupseteq\sqsubseteq}}
+\newcommand{\equidyn}{\mathrel{\sqsupseteq\sqsubseteq}}
+\newcommand{\vpreciser}{\rotatebox[origin=c]{-90}{$\preciser$}}
+
+\newcommand{\dyn}{{?}}
+\newcommand{\obcast}[2]{\langle{#2}\Leftarrow{#1}\rangle}
+\newcommand{\upcast}[2]{\langle{#2}\leftarrowtail{#1}\rangle}
+\newcommand{\upcastpr}[2]{\langle{#2}\hookleftarrow{#1}\rangle}
+\newcommand{\dncast}[2]{\langle{#1}\twoheadleftarrow{#2}\rangle}
+
+\newcommand{\type}{\,\text{type}}
+\newcommand{\ctx}{\,\text{ctx}}
+\newcommand{\dom}{\text{dom}}
+
+\newcommand{\cat}{\mathbb}
+\newcommand{\Ctx}{\text{Ctx}}
+\newcommand{\Multi}{\text{Multi}}
+\newcommand{\PTT}{\text{PTT}}
+\newcommand{\PTTS}{PttS }
+\newcommand{\CPM}{CPM }
+\newcommand{\GTT}{\text{GTT}}
+\newcommand{\pocat}{\text{PoCat}}
+\newcommand{\sem}[1]{\llbracket{#1}\rrbracket}
+\newcommand{\interp}[1]{\llparenthesis{#1}\rrparenthesis}
+\newcommand{\floor}[1]{\lfloor{#1}\rfloor}
+
+\newcommand{\err}{\mho}
+\newcommand{\id}{\text{id}}
+\newcommand{\app}{\text{app}}
+\newcommand{\pair}{\text{pair}}
+\newcommand{\unit}{\text{unit}}
+\newcommand{\tl}{\triangleleft}
+
+\newcommand{\coreflection}{\text{CoReflect}}
+
+% mathpartir hacks
+\newcommand{\axiom}[2]{\inferrule*[Right=#1]{~}{#2}}
+\newcommand{\invinferrule}{{\mprset}{fraction={===}}\inferrule}
+
+\newcommand{\funupcast}[6]{\lambda #6 : #3. \upcast{#2}{#4}{(#5 (\dncast{#1}{#3}{#6}))}}
+\newcommand{\fundncast}[6]{\lambda #6 : #3. \dncast{#2}{#4}{(#5 (\upcast{#1}{#3}{#6}))}}
+
+\newcommand{\produpcast}[5]{(\upcast{#1}{#3}\pi_0{#5}, \upcast{#2}{#4}\pi_0{#5})}
+
+\newcommand{\bnfalt}{\mathrel{\bf \,\mid\,}}
+\newcommand{\bnfdef}{\mathrel{\bf ::=}}
+
+%% Local Variables:
+%% compile-command: "latexmk -c main.tex; latexmk -pdf main.tex"
+%% End:
diff --git a/talk/popl-winter-2019/outline.org b/talk/popl-winter-2019/outline.org
new file mode 100644
index 0000000000000000000000000000000000000000..c3e982431bff44460b4d39fb2457bd77836903f8
--- /dev/null
+++ b/talk/popl-winter-2019/outline.org
@@ -0,0 +1,88 @@
+* Technical Bits
+  - How much CBPV do we discuss? (little)
+  - Do we show the specification for casts (yes)
+  - Do we show the dynamic computation type? (maybe)
+* Outline
+** Intro
+   - Goals of GT (copy the ICFP slides here)
+   - Issue: what should the dynamic semantics be? We want the types to
+     remain meaningful
+   - This paper: Axiomatize our desires: typed reasoning/optimization
+     and gradual evolution. Explore the consequences
+     - Main result: Strong typed based reasoning *uniquely determines*
+       most of the cast semantics.
+     - Prove for CBV, CBN and mix of these semantics for simple types.
+** Background
+*** Graduality
+    - Graduality says that changing the types of a program to be more
+      *precise* means that the resulting program should have more
+      refined behavior.
+*** CBPV
+    - Gradual Typing inherently involves
+      - Higher-order dynamic types ? -> ? <| ? so we have recursion
+      - Runtime errors
+      - Need an axiomatic framework that accommodates effects
+      - Our savior: CBPV
+      - subsumes CBV and CBN in a strong sense
+    - What is CBPV?
+      - An effect calculus into which call-by-value and call-by-name
+        languages can be embedded
+      - Based on beta, eta axioms: lambda calculus for effects
+      - Generalizes Moggi's metalanguage from (strong) monads to
+        adjunctions
+** Axioms (Theory)
+** Theorems (Consequences)
+** Models?
+** Conclusion
+* Real Outline
+** Intro
+*** What is Gradual Typing for?
+*** The problem: many different semantics, trade off soundness
+*** Design Principles have emerged
+*** Our Contribution: Axiomatize these design principles, show that some of them have *unique solutions*
+** What are the Principles?
+*** Graduality
+*** Type Based Reasoning
+**** The Humble Eta Principle(s)
+     - Naive Eta Principle
+     - CBV Eta Principle for functions
+     - Show Sum or Bool Eta principle
+       - fails in CBN
+** Axiomatizing the Principles
+*** Extend CBPV
+    - 3 ingredients: ordering, dynamic and casts
+    - For each type add congruence, beta, eta
+    - Emphasize *modularity*
+** Consequences of the Axioms
+*** Laundry List of Theorems
+    - Purity of upcasts/dual purity of downcsats (related to
+      Wadler-Findler/TH-Felleisen's blame soundness property)
+      - applications to optimization?
+    - Compositionality
+      - identity, composition
+    - EP Pairs
+    - Uniqueness Principles
+    - End on a bang: this works for call-by-value, call-by-name, etc
+** More Inside
+   - More theorems
+   - Soundness of the axioms
+     - We construct (many) models by translation to CBPV. Implement
+       the dynamic types using recursive value/computation types
+     - Dynamic value type as a sum, Dynamic comp type as a product
+     - Logical Relation to prove soundness of the ordering wrt an
+       operational semantics
+** Related/Future Work
+   - AGT: doesn't satisfy eta principles, can it be modified to do so?
+   - Greenman-Felleisen: show weakened principles but not blah
+   - Degen-Thiemmann?
+** Future Work
+   - Inductive, Coinductive, Recursive types should all have a similar
+     theorem.
+   - One of our models is quite similar to Scheme, maybe CBPV should
+     be taken more seriously as an intermediate lang?
+** Conclusions
+   - Axiomatic Approach shows us the strength of the graduality concept
+     - but also that it is a reimagining of EP pairs
+   - CBPV is a convenient metalanguage to prove results for many
+     different evaluation orders
+     - Our models suggest new 
diff --git a/talk/popl-winter-2019/talk-from-scratch.key b/talk/popl-winter-2019/talk-from-scratch.key
new file mode 100644
index 0000000000000000000000000000000000000000..c7f45945ed9b841186b1e1657cd42438157e858f
Binary files /dev/null and b/talk/popl-winter-2019/talk-from-scratch.key differ