mirror of
https://github.com/leanprover/lean4.git
synced 2026-03-30 08:44:07 +00:00
Compare commits
43 Commits
hbv/fix_re
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fc62b7042 | ||
|
|
583c223b16 | ||
|
|
ccc7157c08 | ||
|
|
05046dc3d7 | ||
|
|
43f18fd502 | ||
|
|
b06eb981a3 | ||
|
|
f72137f53a | ||
|
|
96dbc324f3 | ||
|
|
d6e69649b6 | ||
|
|
337f1c455b | ||
|
|
6871abaa44 | ||
|
|
8c0bb68ee5 | ||
|
|
ae19b3e248 | ||
|
|
d0d135dbe2 | ||
|
|
088b299343 | ||
|
|
82c35eb517 | ||
|
|
abcf400e90 | ||
|
|
42854412c3 | ||
|
|
c84aa086c7 | ||
|
|
7168289c57 | ||
|
|
febd1caf36 | ||
|
|
79ac2d93b0 | ||
|
|
210d4d00fa | ||
|
|
938c19aace | ||
|
|
e06fc0b5e8 | ||
|
|
f2d36227cf | ||
|
|
0b401cd17c | ||
|
|
fda4793215 | ||
|
|
215aa4010b | ||
|
|
142ca24192 | ||
|
|
c71a0ea9a5 | ||
|
|
439c3c5544 | ||
|
|
2bc7a77806 | ||
|
|
e55f69acd0 | ||
|
|
50785098d8 | ||
|
|
fee2d7a6e8 | ||
|
|
bc5210d52a | ||
|
|
12c547122f | ||
|
|
f9c8b5e93d | ||
|
|
f8f12fdbc8 | ||
|
|
7f424b371e | ||
|
|
d56424b587 | ||
|
|
144db355ea |
6
.github/workflows/build-template.yml
vendored
6
.github/workflows/build-template.yml
vendored
@@ -276,10 +276,10 @@ jobs:
|
||||
- name: Check rebootstrap
|
||||
run: |
|
||||
set -e
|
||||
# clean rebuild in case of Makefile changes/Lake does not detect uncommited stage 0
|
||||
# changes yet
|
||||
git config user.email "stage0@lean-fro.org"
|
||||
git config user.name "update-stage0"
|
||||
make -C build update-stage0
|
||||
make -C build/stage1 clean-stdlib
|
||||
git commit --allow-empty -m "chore: update-stage0"
|
||||
time make -C build -j$NPROC
|
||||
time ctest --preset ${{ matrix.CMAKE_PRESET || 'release' }} --test-dir build/stage1 -j$NPROC
|
||||
if: matrix.check-rebootstrap
|
||||
|
||||
@@ -527,6 +527,14 @@ theorem castLE_of_eq {m n : Nat} (h : m = n) {h' : m ≤ n} : castLE h' = Fin.ca
|
||||
|
||||
@[simp, grind =] theorem val_castAdd (m : Nat) (i : Fin n) : (castAdd m i : Nat) = i := rfl
|
||||
|
||||
/-
|
||||
**Note**
|
||||
The current pattern inference heuristic includes the implicit term `n + m` as pattern of the pattern,
|
||||
but arithmetic is problematic in patterns because it is an interpreted symbol. For example,
|
||||
we will fail to match `@val n (castNat 0 i)`. Thus, we mark the implicit subterm with `no_index`
|
||||
-/
|
||||
grind_pattern val_castAdd => @val (no_index _) (castAdd m i)
|
||||
|
||||
@[deprecated val_castAdd (since := "2025-11-21")]
|
||||
theorem coe_castAdd (m : Nat) (i : Fin n) : (castAdd m i : Nat) = i := rfl
|
||||
|
||||
@@ -637,7 +645,15 @@ theorem exists_castSucc_eq {n : Nat} {i : Fin (n + 1)} : (∃ j, castSucc j = i)
|
||||
|
||||
theorem succ_castSucc {n : Nat} (i : Fin n) : i.castSucc.succ = i.succ.castSucc := rfl
|
||||
|
||||
@[simp, grind =] theorem val_addNat (m : Nat) (i : Fin n) : (addNat i m : Nat) = i + m := rfl
|
||||
@[simp] theorem val_addNat (m : Nat) (i : Fin n) : (addNat i m : Nat) = i + m := rfl
|
||||
|
||||
/-
|
||||
**Note**
|
||||
The current pattern inference heuristic includes the implicit term `n + m` as pattern of the pattern,
|
||||
but arithmetic is problematic in patterns because it is an interpreted symbol. For example,
|
||||
we will fail to match `@val n (addNat i 0)`. Thus, we mark the implicit subterm with `no_index`
|
||||
-/
|
||||
grind_pattern val_addNat => @val (no_index _) (addNat i m)
|
||||
|
||||
@[deprecated val_addNat (since := "2025-11-21")]
|
||||
theorem coe_addNat (m : Nat) (i : Fin n) : (addNat i m : Nat) = i + m := rfl
|
||||
|
||||
@@ -30,7 +30,13 @@ simpMatchDiscrsOnly (match 0 with | 0 => true | _ => false) = true
|
||||
```
|
||||
using `eq_self`.
|
||||
-/
|
||||
def simpMatchDiscrsOnly {α : Sort u} (a : α) : α := a
|
||||
@[expose] def simpMatchDiscrsOnly {α : Sort u} (a : α) : α := a
|
||||
|
||||
/--
|
||||
Gadget for protecting lambda abstractions created by `abstractGroundMismatches?`
|
||||
from beta reduction during preprocessing. See `ProveEq.lean` for details.
|
||||
-/
|
||||
@[expose] def abstractFn {α : Sort u} (a : α) : α := a
|
||||
|
||||
/-- Gadget for representing offsets `t+k` in patterns. -/
|
||||
def offset (a b : Nat) : Nat := a + b
|
||||
|
||||
@@ -185,13 +185,9 @@ example : foo.default = (default, default) :=
|
||||
abbrev inferInstance {α : Sort u} [i : α] : α := i
|
||||
|
||||
set_option checkBinderAnnotations false in
|
||||
/-- `inferInstanceAs α` synthesizes an instance of type `α`, transporting it from a
|
||||
definitionally equal type if necessary. This is useful when `α` is definitionally equal to
|
||||
some `α'` for which instances are registered, as it prevents leaking the definition's RHS
|
||||
at lower transparencies.
|
||||
|
||||
`inferInstanceAs` requires an expected type from context. If you just need to synthesize an
|
||||
instance without transporting between types, use `inferInstance` instead.
|
||||
/--
|
||||
`inferInstanceAs α` synthesizes an instance of type `α` and then adjusts it to conform to the
|
||||
expected type `β`, which must be inferable from context.
|
||||
|
||||
Example:
|
||||
```
|
||||
@@ -199,7 +195,26 @@ def D := Nat
|
||||
instance : Inhabited D := inferInstanceAs (Inhabited Nat)
|
||||
```
|
||||
|
||||
See `Lean.Meta.WrapInstance` for details.
|
||||
The adjustment will make sure that when the resulting instance will not "leak" the RHS `Nat` when
|
||||
reduced at transparency levels below `semireducible`, i.e. where `D` would not be unfolded either,
|
||||
preventing "defeq abuse".
|
||||
|
||||
More specifically, given the "source type" (the argument) and "target type" (the expected type),
|
||||
`inferInstanceAs` synthesizes an instance for the source type and then unfolds and rewraps its
|
||||
components (fields, nested instances) as necessary to make them compatible with the target type. The
|
||||
individual steps are represented by the following options, which all default to enabled and can be
|
||||
disabled to help with porting:
|
||||
|
||||
* `backward.inferInstanceAs.wrap`: master switch for instance adjustment in both `inferInstanceAs`
|
||||
and the default deriving handler
|
||||
* `backward.inferInstanceAs.wrap.reuseSubInstances`: reuse existing instances for the target type
|
||||
for sub-instance fields to avoid non-defeq instance diamonds
|
||||
* `backward.inferInstanceAs.wrap.instances`: wrap non-reducible instances in auxiliary definitions
|
||||
* `backward.inferInstanceAs.wrap.data`: wrap data fields in auxiliary definitions (proof fields are
|
||||
always wrapped)
|
||||
|
||||
If you just need to synthesize an instance without transporting between types, use `inferInstance`
|
||||
instead, potentially with a type annotation for the expected type.
|
||||
-/
|
||||
abbrev «inferInstanceAs» (α : Sort u) [i : α] : α := i
|
||||
|
||||
@@ -3673,7 +3688,7 @@ def panic {α : Sort u} [Inhabited α] (msg : String) : α :=
|
||||
panicCore msg
|
||||
|
||||
-- TODO: this be applied directly to `Inhabited`'s definition when we remove the above workaround
|
||||
attribute [nospecialize] Inhabited
|
||||
attribute [weak_specialize] Inhabited
|
||||
|
||||
/--
|
||||
The `>>=` operator is overloaded via instances of `bind`.
|
||||
|
||||
@@ -186,11 +186,11 @@ def registerTagAttribute (name : Name) (descr : String)
|
||||
mkInitial := pure {}
|
||||
addImportedFn := fun _ _ => pure {}
|
||||
addEntryFn := fun (s : NameSet) n => s.insert n
|
||||
exportEntriesFnEx := fun env es _ =>
|
||||
let r : Array Name := es.foldl (fun a e => a.push e) #[]
|
||||
-- Do not export info for private defs
|
||||
let r := r.filter (env.contains (skipRealize := false))
|
||||
r.qsort Name.quickLt
|
||||
exportEntriesFnEx := fun env es =>
|
||||
let all : Array Name := es.foldl (fun a e => a.push e) #[] |>.qsort Name.quickLt
|
||||
-- Do not export info for private defs at exported/server levels
|
||||
let exported := all.filter ((env.setExporting true).contains (skipRealize := false))
|
||||
{ exported, server := exported, «private» := all }
|
||||
statsFn := fun s => "tag attribute" ++ Format.line ++ "number of local entries: " ++ format s.size
|
||||
asyncMode := asyncMode
|
||||
replay? := some fun _ newState newConsts s =>
|
||||
@@ -266,15 +266,14 @@ def registerParametricAttribute (impl : ParametricAttributeImpl α) : IO (Parame
|
||||
mkInitial := pure ([], {})
|
||||
addImportedFn := fun _ => pure ([], {})
|
||||
addEntryFn := fun (decls, m) (p : Name × α) => (p.1 :: decls, m.insert p.1 p.2)
|
||||
exportEntriesFnEx := fun env (decls, m) lvl => Id.run do
|
||||
let mut r := if impl.preserveOrder then
|
||||
exportEntriesFnEx := fun env (decls, m) => Id.run do
|
||||
let all := if impl.preserveOrder then
|
||||
decls.toArray.reverse.filterMap (fun n => return (n, ← m.find? n))
|
||||
else
|
||||
let r := m.foldl (fun a n p => a.push (n, p)) #[]
|
||||
r.qsort (fun a b => Name.quickLt a.1 b.1)
|
||||
if lvl != .private then
|
||||
r := r.filter (fun ⟨n, a⟩ => impl.filterExport env n a)
|
||||
r
|
||||
let exported := all.filter (fun ⟨n, a⟩ => impl.filterExport env n a)
|
||||
{ exported, server := exported, «private» := all }
|
||||
statsFn := fun (_, m) => "parametric attribute" ++ Format.line ++ "number of local entries: " ++ format m.size
|
||||
}
|
||||
let attrImpl : AttributeImpl := {
|
||||
@@ -333,11 +332,11 @@ def registerEnumAttributes (attrDescrs : List (Name × String × α))
|
||||
mkInitial := pure {}
|
||||
addImportedFn := fun _ _ => pure {}
|
||||
addEntryFn := fun (s : NameMap α) (p : Name × α) => s.insert p.1 p.2
|
||||
exportEntriesFnEx := fun env m _ =>
|
||||
let r : Array (Name × α) := m.foldl (fun a n p => a.push (n, p)) #[]
|
||||
-- Do not export info for private defs
|
||||
let r := r.filter (env.contains (skipRealize := false) ·.1)
|
||||
r.qsort (fun a b => Name.quickLt a.1 b.1)
|
||||
exportEntriesFnEx := fun env m =>
|
||||
let all : Array (Name × α) := m.foldl (fun a n p => a.push (n, p)) #[] |>.qsort (fun a b => Name.quickLt a.1 b.1)
|
||||
-- Do not export info for private defs at exported/server levels
|
||||
let exported := all.filter ((env.setExporting true).contains (skipRealize := false) ·.1)
|
||||
{ exported, server := exported, «private» := all }
|
||||
statsFn := fun s => "enumeration attribute extension" ++ Format.line ++ "number of local entries: " ++ format s.size
|
||||
-- We assume (and check in `modifyState`) that, if used asynchronously, enum attributes are set
|
||||
-- only in the same context in which the tagged declaration was created
|
||||
|
||||
@@ -55,11 +55,6 @@ private def syntaxToExternAttrData (stx : Syntax) : AttrM ExternAttrData := do
|
||||
entries := entries.push <| ExternEntry.inline backend str
|
||||
return { entries := entries.toList }
|
||||
|
||||
-- Forward declaration
|
||||
set_option compiler.ignoreBorrowAnnotation true in
|
||||
@[extern "lean_add_extern"]
|
||||
opaque addExtern (declName : Name) (externAttrData : ExternAttrData) : CoreM Unit
|
||||
|
||||
builtin_initialize externAttr : ParametricAttribute ExternAttrData ←
|
||||
registerParametricAttribute {
|
||||
name := `extern
|
||||
@@ -71,7 +66,7 @@ builtin_initialize externAttr : ParametricAttribute ExternAttrData ←
|
||||
if let some (.thmInfo ..) := env.find? declName then
|
||||
-- We should not mark theorems as extern
|
||||
return ()
|
||||
addExtern declName externAttrData
|
||||
compileDecls #[declName]
|
||||
}
|
||||
|
||||
def getExternAttrData? (env : Environment) (n : Name) : Option ExternAttrData :=
|
||||
|
||||
@@ -6,7 +6,6 @@ Authors: Leonardo de Moura
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Lean.Compiler.IR.AddExtern
|
||||
public import Lean.Compiler.IR.Basic
|
||||
public import Lean.Compiler.IR.Format
|
||||
public import Lean.Compiler.IR.CompilerM
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Cameron Zwarich
|
||||
-/
|
||||
|
||||
module
|
||||
|
||||
prelude
|
||||
import Init.While
|
||||
import Lean.Compiler.IR.ToIR
|
||||
import Lean.Compiler.LCNF.ToImpureType
|
||||
import Lean.Compiler.LCNF.ToImpure
|
||||
import Lean.Compiler.LCNF.ExplicitBoxing
|
||||
import Lean.Compiler.LCNF.Internalize
|
||||
public import Lean.Compiler.ExternAttr
|
||||
import Lean.Compiler.LCNF.ExplicitRC
|
||||
import Lean.Compiler.Options
|
||||
|
||||
public section
|
||||
|
||||
namespace Lean.IR
|
||||
|
||||
set_option compiler.ignoreBorrowAnnotation true in
|
||||
@[export lean_add_extern]
|
||||
def addExtern (declName : Name) (externAttrData : ExternAttrData) : CoreM Unit := do
|
||||
if !isPrivateName declName then
|
||||
modifyEnv (Compiler.LCNF.setDeclPublic · declName)
|
||||
let monoDecl ← addMono declName
|
||||
let impureDecls ← addImpure monoDecl
|
||||
addIr impureDecls
|
||||
where
|
||||
addMono (declName : Name) : CoreM (Compiler.LCNF.Decl .pure) := do
|
||||
let type ← Compiler.LCNF.getOtherDeclMonoType declName
|
||||
let mut typeIter := type
|
||||
let mut params := #[]
|
||||
let ignoreBorrow := Compiler.compiler.ignoreBorrowAnnotation.get (← getOptions)
|
||||
repeat
|
||||
let .forallE binderName ty b _ := typeIter | break
|
||||
let borrow := !ignoreBorrow && isMarkedBorrowed ty
|
||||
params := params.push {
|
||||
fvarId := (← mkFreshFVarId)
|
||||
type := ty,
|
||||
binderName,
|
||||
borrow
|
||||
}
|
||||
typeIter := b
|
||||
let decl := {
|
||||
name := declName,
|
||||
levelParams := [],
|
||||
value := .extern externAttrData,
|
||||
inlineAttr? := some .noinline,
|
||||
type,
|
||||
params,
|
||||
}
|
||||
decl.saveMono
|
||||
return decl
|
||||
|
||||
addImpure (decl : Compiler.LCNF.Decl .pure) : CoreM (Array (Compiler.LCNF.Decl .impure)) := do
|
||||
let type ← Compiler.LCNF.lowerResultType decl.type decl.params.size
|
||||
let params ← decl.params.mapM fun param =>
|
||||
return { param with type := ← Compiler.LCNF.toImpureType param.type }
|
||||
let decl : Compiler.LCNF.Decl .impure := {
|
||||
name := decl.name,
|
||||
levelParams := decl.levelParams,
|
||||
value := .extern externAttrData
|
||||
inlineAttr? := some .noinline,
|
||||
type,
|
||||
params
|
||||
}
|
||||
Compiler.LCNF.CompilerM.run (phase := .impure) do
|
||||
let decl ← decl.internalize
|
||||
decl.saveImpure
|
||||
let decls ← Compiler.LCNF.addBoxedVersions #[decl]
|
||||
let decls ← Compiler.LCNF.runExplicitRc decls
|
||||
for decl in decls do
|
||||
decl.saveImpure
|
||||
modifyEnv fun env => Compiler.LCNF.recordFinalImpureDecl env decl.name
|
||||
return decls
|
||||
|
||||
addIr (decls : Array (Compiler.LCNF.Decl .impure)) : CoreM Unit := do
|
||||
let decls ← toIR decls
|
||||
logDecls `result decls
|
||||
addDecls decls
|
||||
|
||||
end Lean.IR
|
||||
@@ -86,11 +86,11 @@ builtin_initialize declMapExt : SimplePersistentEnvExtension Decl DeclMap ←
|
||||
addEntryFn := fun s d => s.insert d.name d
|
||||
-- Store `meta` closure only in `.olean`, turn all other decls into opaque externs.
|
||||
-- Leave storing the remainder for `meta import` and server `#eval` to `exportIREntries` below.
|
||||
exportEntriesFnEx? := some fun env s entries _ =>
|
||||
exportEntriesFnEx? := some fun env s entries =>
|
||||
let decls := entries.foldl (init := #[]) fun decls decl => decls.push decl
|
||||
let entries := sortDecls decls
|
||||
-- Do not save all IR even in .olean.private as it will be in .ir anyway
|
||||
if env.header.isModule then
|
||||
.uniform <| if env.header.isModule then
|
||||
entries.filterMap fun d => do
|
||||
if isDeclMeta env d.name then
|
||||
return d
|
||||
@@ -126,12 +126,12 @@ private def exportIREntries (env : Environment) : Array (Name × Array EnvExtens
|
||||
-- save all initializers independent of meta/private. Non-meta initializers will only be used when
|
||||
-- .ir is actually loaded, and private ones iff visible.
|
||||
let initDecls : Array (Name × Name) :=
|
||||
regularInitAttr.ext.exportEntriesFn env (regularInitAttr.ext.getState env) .private
|
||||
(regularInitAttr.ext.exportEntriesFn env (regularInitAttr.ext.getState env)).private
|
||||
-- safety: cast to erased type
|
||||
let initDecls : Array EnvExtensionEntry := unsafe unsafeCast initDecls
|
||||
|
||||
-- needed during initialization via interpreter
|
||||
let modPkg : Array (Option PkgId) := modPkgExt.exportEntriesFn env (modPkgExt.getState env) .private
|
||||
let modPkg : Array (Option PkgId) := (modPkgExt.exportEntriesFn env (modPkgExt.getState env)).private
|
||||
-- safety: cast to erased type
|
||||
let modPkg : Array EnvExtensionEntry := unsafe unsafeCast modPkg
|
||||
|
||||
|
||||
104
src/Lean/Compiler/LCNF/CoalesceRC.lean
Normal file
104
src/Lean/Compiler/LCNF/CoalesceRC.lean
Normal file
@@ -0,0 +1,104 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Lean.Compiler.LCNF.CompilerM
|
||||
public import Lean.Compiler.LCNF.PassManager
|
||||
|
||||
namespace Lean.Compiler.LCNF
|
||||
|
||||
/-!
|
||||
# Coalesce Reference Counting Operations
|
||||
|
||||
This pass coalesces multiple `inc`/`dec` operations on the same variable within a basic block.
|
||||
Within a basic block, it is always safe to:
|
||||
- Move all increments on a variable to the first `inc` location (summing the counts). Because if
|
||||
there are later `inc`s no intermediate operation can observe RC=1 (as the value must stay alive
|
||||
until the later inc) and thus doing all relevant `inc` in the beginning doesn't change
|
||||
semantics.
|
||||
- Move all decrements on a variable to the last `dec` location (summing the counts). Because the
|
||||
value is guaranteed to stay alive until at least the last `dec` anyway so a similar argument to
|
||||
`inc` holds.
|
||||
|
||||
Crucially this pass must be placed after `expandResetReuse` as that one relies on `inc`s still being
|
||||
present in their original location for optimization purposes.
|
||||
-/
|
||||
|
||||
private structure State where
|
||||
/-- Total inc count per variable in the current basic block (accumulated going forward). -/
|
||||
incTotal : Std.HashMap FVarId Nat := {}
|
||||
/-- Total dec count per variable in the current basic block (accumulated going forward). -/
|
||||
decTotal : Std.HashMap FVarId Nat := {}
|
||||
/--
|
||||
Inc count seen so far per variable going backward. When this equals `incTotal`, we've
|
||||
reached the first inc and should emit the coalesced operation.
|
||||
-/
|
||||
incAccum : Std.HashMap FVarId Nat := {}
|
||||
/--
|
||||
Whether we've already emitted the coalesced dec for a variable (going backward, the first
|
||||
dec encountered is the last in the block).
|
||||
-/
|
||||
decPlaced : Std.HashSet FVarId := {}
|
||||
|
||||
private abbrev M := StateRefT State CompilerM
|
||||
|
||||
/--
|
||||
Coalesce inc/dec operations within individual basic blocks.
|
||||
-/
|
||||
partial def Code.coalesceRC (code : Code .impure) : CompilerM (Code .impure) := do
|
||||
go code |>.run' {}
|
||||
where
|
||||
go (code : Code .impure) : M (Code .impure) := do
|
||||
match code with
|
||||
| .inc fvarId n check persistent k _ =>
|
||||
modify fun s => { s with incTotal := s.incTotal.alter fvarId (fun v? => some ((v?.getD 0) + n)) }
|
||||
let k ← go k
|
||||
modify fun s => { s with incAccum := s.incAccum.alter fvarId (fun v? => some ((v?.getD 0) + n)) }
|
||||
let s ← get
|
||||
if s.incAccum[fvarId]! == s.incTotal[fvarId]! then
|
||||
return .inc fvarId s.incTotal[fvarId]! check persistent k
|
||||
else
|
||||
return k
|
||||
| .dec fvarId n check persistent k _ =>
|
||||
modify fun s => { s with decTotal := s.decTotal.alter fvarId (fun v? => some ((v?.getD 0) + n)) }
|
||||
let k ← go k
|
||||
let s ← get
|
||||
if !s.decPlaced.contains fvarId then
|
||||
modify fun s => { s with decPlaced := s.decPlaced.insert fvarId }
|
||||
return .dec fvarId s.decTotal[fvarId]! check persistent k
|
||||
else
|
||||
return k
|
||||
| .let _ k =>
|
||||
let k ← go k
|
||||
return code.updateCont! k
|
||||
| .jp decl k =>
|
||||
let value ← decl.value.coalesceRC
|
||||
let decl ← decl.updateValue value
|
||||
let k ← go k
|
||||
return code.updateFun! decl k
|
||||
| .cases c =>
|
||||
let alts ← c.alts.mapMonoM (·.mapCodeM (·.coalesceRC))
|
||||
return code.updateAlts! alts
|
||||
| .del _ k _ =>
|
||||
let k ← go k
|
||||
return code.updateCont! k
|
||||
| .oset (k := k) .. | .uset (k := k) .. | .sset (k := k) .. | .setTag (k := k) .. =>
|
||||
let k ← go k
|
||||
return code.updateCont! k
|
||||
| .return .. | .jmp .. | .unreach .. => return code
|
||||
|
||||
def Decl.coalesceRC (decl : Decl .impure) : CompilerM (Decl .impure) := do
|
||||
let value ← decl.value.mapCodeM Code.coalesceRC
|
||||
return { decl with value }
|
||||
|
||||
public def coalesceRC : Pass :=
|
||||
.mkPerDeclaration `coalesceRc .impure Decl.coalesceRC
|
||||
|
||||
builtin_initialize
|
||||
registerTraceClass `Compiler.coalesceRc (inherited := true)
|
||||
|
||||
end Lean.Compiler.LCNF
|
||||
@@ -291,10 +291,9 @@ builtin_initialize functionSummariesExt : SimplePersistentEnvExtension (Name ×
|
||||
registerSimplePersistentEnvExtension {
|
||||
addImportedFn := fun _ => {}
|
||||
addEntryFn := fun s ⟨e, n⟩ => s.insert e n
|
||||
exportEntriesFnEx? := some fun _ s _ => fun
|
||||
exportEntriesFnEx? := some fun _ s _ =>
|
||||
-- preserved for non-modules, make non-persistent at some point?
|
||||
| .private => s.toArray.qsort decLt
|
||||
| _ => #[]
|
||||
{ exported := #[], server := #[], «private» := s.toArray.qsort decLt }
|
||||
asyncMode := .sync -- compilation is non-parallel anyway
|
||||
replay? := some <| SimplePersistentEnvExtension.replayOfFilter (!·.contains ·.1) (fun s ⟨e, n⟩ => s.insert e n)
|
||||
}
|
||||
|
||||
@@ -69,8 +69,8 @@ open ImpureType
|
||||
abbrev Mask := Array (Option FVarId)
|
||||
|
||||
/--
|
||||
Try to erase `inc` instructions on projections of `targetId` occuring in the tail of `ds`.
|
||||
Return the updated `ds` and mask contianing the `FVarId`s whose `inc` was removed.
|
||||
Try to erase `inc` instructions on projections of `targetId` occurring in the tail of `ds`.
|
||||
Return the updated `ds` and mask containing the `FVarId`s whose `inc` was removed.
|
||||
-/
|
||||
partial def eraseProjIncFor (nFields : Nat) (targetId : FVarId) (ds : Array (CodeDecl .impure)) :
|
||||
CompilerM (Array (CodeDecl .impure) × Mask) := do
|
||||
|
||||
@@ -31,9 +31,12 @@ namespace Lean.Compiler.LCNF
|
||||
open ImpureType
|
||||
|
||||
/-!
|
||||
The following section contains the derived value analysis. It figures out a dependency tree of
|
||||
The following section contains the derived value analysis. It figures out a dependency graph of
|
||||
values that were derived from other values through projections or `Array` accesses. This information
|
||||
is later used in the derived borrow analysis to reduce reference counting pressure.
|
||||
|
||||
When a derived value has more than one parent, it is derived from one of the parent values but we
|
||||
cannot statically determine which one.
|
||||
-/
|
||||
|
||||
/--
|
||||
@@ -41,10 +44,10 @@ Contains information about values derived through various forms of projection fr
|
||||
-/
|
||||
structure DerivedValInfo where
|
||||
/--
|
||||
The variable this value was derived from. This is always set except for parameters as they have no
|
||||
value to be derived from.
|
||||
The set of variables this value may derive from. This is always set except for parameters as they
|
||||
have no value to be derived from.
|
||||
-/
|
||||
parent? : Option FVarId
|
||||
parents : Array FVarId
|
||||
/--
|
||||
The set of variables that were derived from this value.
|
||||
-/
|
||||
@@ -56,59 +59,85 @@ abbrev DerivedValMap := Std.HashMap FVarId DerivedValInfo
|
||||
namespace CollectDerivedValInfo
|
||||
|
||||
structure State where
|
||||
/--
|
||||
The dependency graph of values.
|
||||
-/
|
||||
varMap : DerivedValMap := {}
|
||||
borrowedParams : FVarIdHashSet := {}
|
||||
/--
|
||||
The set of values that are to be interpreted as being borrowed by nature. This currently includes:
|
||||
- borrowed parameters
|
||||
- variables that are initialized from constants
|
||||
-/
|
||||
borrowedValues : FVarIdHashSet := {}
|
||||
|
||||
abbrev M := StateRefT State CompilerM
|
||||
|
||||
@[inline]
|
||||
def visitParam (p : Param .impure) : M Unit :=
|
||||
def addDerivedValue (parents : Array FVarId) (child : FVarId) : M Unit := do
|
||||
modify fun s => { s with
|
||||
varMap := s.varMap.insert p.fvarId {
|
||||
parent? := none
|
||||
children := {}
|
||||
}
|
||||
borrowedParams :=
|
||||
if p.borrow && p.type.isPossibleRef then
|
||||
s.borrowedParams.insert p.fvarId
|
||||
else
|
||||
s.borrowedParams
|
||||
varMap :=
|
||||
let varMap := parents.foldl (init := s.varMap)
|
||||
(·.modify · (fun info => { info with children := info.children.insert child }))
|
||||
varMap.insert child { parents := parents, children := {} }
|
||||
}
|
||||
|
||||
@[inline]
|
||||
def addDerivedValue (parent : FVarId) (child : FVarId) : M Unit := do
|
||||
modify fun s => { s with
|
||||
varMap :=
|
||||
s.varMap
|
||||
|>.modify parent (fun info => { info with children := info.children.insert child })
|
||||
|>.insert child { parent? := some parent, children := {} }
|
||||
}
|
||||
def addBorrowedValue (fvarId : FVarId) : M Unit := do
|
||||
modify fun s => { s with borrowedValues := s.borrowedValues.insert fvarId }
|
||||
|
||||
def removeFromParent (child : FVarId) : M Unit := do
|
||||
if let some parent := (← get).varMap.get? child |>.bind (·.parent?) then
|
||||
modify fun s => { s with
|
||||
varMap := s.varMap.modify parent fun info =>
|
||||
{ info with children := info.children.erase child }
|
||||
}
|
||||
def addDerivedLetValue (parents : Array FVarId) (child : FVarId) : M Unit := do
|
||||
let type ← getType child
|
||||
if !type.isPossibleRef then
|
||||
return ()
|
||||
let parents ← parents.filterM fun fvarId => do
|
||||
let type ← getType fvarId
|
||||
return type.isPossibleRef
|
||||
addDerivedValue parents child
|
||||
if parents.isEmpty then
|
||||
addBorrowedValue child
|
||||
|
||||
@[inline]
|
||||
def visitParam (p : Param .impure) : M Unit := do
|
||||
addDerivedValue #[] p.fvarId
|
||||
if p.borrow && p.type.isPossibleRef then
|
||||
addBorrowedValue p.fvarId
|
||||
|
||||
def removeFromParents (child : FVarId) : M Unit := do
|
||||
if let some entry := (← get).varMap.get? child then
|
||||
for parent in entry.parents do
|
||||
modify fun s => { s with
|
||||
varMap := s.varMap.modify parent fun info =>
|
||||
{ info with children := info.children.erase child }
|
||||
}
|
||||
|
||||
partial def collectCode (code : Code .impure) : M Unit := do
|
||||
match code with
|
||||
| .let decl k =>
|
||||
match decl.value with
|
||||
| .oproj _ parent =>
|
||||
addDerivedValue parent decl.fvarId
|
||||
addDerivedLetValue #[parent] decl.fvarId
|
||||
-- Keep in sync with PropagateBorrow, InferBorrow
|
||||
| .fap ``Array.getInternal args =>
|
||||
if let .fvar parent := args[1]! then
|
||||
addDerivedValue parent decl.fvarId
|
||||
addDerivedLetValue #[parent] decl.fvarId
|
||||
| .fap ``Array.get!Internal args =>
|
||||
let mut parents := #[]
|
||||
/-
|
||||
Because execution may continue after a panic, the value resulting from a get!InternalBorrowed
|
||||
may be derived from either the `Inhabited` instance or the `Array` argument.
|
||||
-/
|
||||
if let .fvar parent := args[1]! then
|
||||
parents := parents.push parent
|
||||
if let .fvar parent := args[2]! then
|
||||
addDerivedValue parent decl.fvarId
|
||||
parents := parents.push parent
|
||||
addDerivedLetValue parents decl.fvarId
|
||||
| .fap ``Array.uget args =>
|
||||
if let .fvar parent := args[1]! then
|
||||
addDerivedValue parent decl.fvarId
|
||||
addDerivedLetValue #[parent] decl.fvarId
|
||||
| .fap _ #[] =>
|
||||
addDerivedLetValue #[] decl.fvarId
|
||||
| .reset _ target =>
|
||||
removeFromParent target
|
||||
removeFromParents target
|
||||
| _ => pure ()
|
||||
collectCode k
|
||||
| .jp decl k =>
|
||||
@@ -125,8 +154,8 @@ Collect the derived value tree as well as the set of parameters that take object
|
||||
-/
|
||||
def collect (ps : Array (Param .impure)) (code : Code .impure) :
|
||||
CompilerM (DerivedValMap × FVarIdHashSet) := do
|
||||
let ⟨_, { varMap, borrowedParams }⟩ ← go |>.run {}
|
||||
return ⟨varMap, borrowedParams⟩
|
||||
let ⟨_, { varMap, borrowedValues }⟩ ← go |>.run {}
|
||||
return ⟨varMap, borrowedValues⟩
|
||||
where
|
||||
go : M Unit := do
|
||||
ps.forM visitParam
|
||||
@@ -170,13 +199,21 @@ def LiveVars.erase (liveVars : LiveVars) (fvarId : FVarId) : LiveVars :=
|
||||
let borrows := liveVars.borrows.erase fvarId
|
||||
{ vars, borrows }
|
||||
|
||||
@[inline]
|
||||
def LiveVars.insertBorrow (liveVars : LiveVars) (fvarId : FVarId) : LiveVars :=
|
||||
{ liveVars with borrows := liveVars.borrows.insert fvarId }
|
||||
|
||||
@[inline]
|
||||
def LiveVars.insertLive (liveVars : LiveVars) (fvarId : FVarId) : LiveVars :=
|
||||
{ liveVars with vars := liveVars.vars.insert fvarId }
|
||||
|
||||
abbrev JPLiveVarMap := FVarIdMap LiveVars
|
||||
|
||||
structure Context where
|
||||
/--
|
||||
The set of all parameters that are borrowed and take potential objects as arguments.
|
||||
The set of all values that are borrowed and potentially objects
|
||||
-/
|
||||
borrowedParams : FVarIdHashSet
|
||||
borrowedValues : FVarIdHashSet
|
||||
/--
|
||||
The derived value tree.
|
||||
-/
|
||||
@@ -277,18 +314,21 @@ def withCollectLiveVars (x : RcM α) : RcM (α × LiveVars) := do
|
||||
return (ret, collected)
|
||||
|
||||
/--
|
||||
Traverse the transitive closure of values derived from `fvarId` and add them to `s` if they pass
|
||||
`shouldAdd`.
|
||||
Traverse the transitive closure of values derived from `fvarId` and add them to `s` if:
|
||||
- they pass `shouldAdd`.
|
||||
- all their parents are accessible
|
||||
-/
|
||||
@[specialize]
|
||||
partial def addDescendants (fvarId : FVarId) (derivedValMap : DerivedValMap) (s : FVarIdHashSet)
|
||||
(shouldAdd : FVarId → Bool := fun _ => true) : FVarIdHashSet :=
|
||||
partial def addDescendants (fvarId : FVarId) (derivedValMap : DerivedValMap) (liveVars : LiveVars)
|
||||
(shouldAdd : FVarId → Bool := fun _ => true) : LiveVars :=
|
||||
if let some info := derivedValMap.get? fvarId then
|
||||
info.children.fold (init := s) fun s child =>
|
||||
let s := if shouldAdd child then s.insert child else s
|
||||
addDescendants child derivedValMap s shouldAdd
|
||||
info.children.fold (init := liveVars) fun liveVars child =>
|
||||
let cinfo := derivedValMap.get! child
|
||||
let parentsOk := cinfo.parents.all fun fvarId => (liveVars.vars.contains fvarId || liveVars.borrows.contains fvarId)
|
||||
let liveVars := if parentsOk && shouldAdd child then liveVars.insertBorrow child else liveVars
|
||||
addDescendants child derivedValMap liveVars shouldAdd
|
||||
else
|
||||
s
|
||||
liveVars
|
||||
|
||||
/--
|
||||
Mark `fvarId` as live from here on out and if there are any derived values that are not live anymore
|
||||
@@ -299,20 +339,21 @@ alive after all).
|
||||
def useVar (fvarId : FVarId) (shouldBorrow : FVarId → Bool := fun _ => true) : RcM Unit := do
|
||||
if !(← isLive fvarId) then
|
||||
let derivedValMap := (← read).derivedValMap
|
||||
modifyLive fun liveVars => { liveVars with vars := liveVars.vars.insert fvarId }
|
||||
modifyLive fun liveVars =>
|
||||
{ liveVars with
|
||||
borrows := addDescendants fvarId derivedValMap liveVars.borrows fun y =>
|
||||
!liveVars.vars.contains y && shouldBorrow y
|
||||
vars := liveVars.vars.insert fvarId
|
||||
}
|
||||
addDescendants fvarId derivedValMap liveVars fun y =>
|
||||
!liveVars.vars.contains y && shouldBorrow y
|
||||
|
||||
def useArgs (args : Array (Arg .impure)) : RcM Unit := do
|
||||
args.forM fun arg =>
|
||||
match arg with
|
||||
| .fvar fvarId =>
|
||||
useVar fvarId fun y =>
|
||||
-- If a value is used as an argument we are going to mark it live anyways so don't mark it
|
||||
-- as borrowed.
|
||||
/-
|
||||
If we are in a situation like `f x y` where `x` would imply that `y` remains borrowed we are
|
||||
going to mark `y` as being live instead of borrowed later on anyways. Instead we skip this
|
||||
intermediate state and don't even begin to consider it as borrowed.
|
||||
-/
|
||||
args.all fun arg =>
|
||||
match arg with
|
||||
| .fvar z => y != z
|
||||
@@ -341,9 +382,9 @@ def setRetLiveVars : RcM Unit := do
|
||||
let derivedValMap := (← read).derivedValMap
|
||||
-- At the end of a function no values are live and all borrows derived from parameters will still
|
||||
-- be around.
|
||||
let borrows := (← read).borrowedParams.fold (init := {}) fun borrows x =>
|
||||
addDescendants x derivedValMap (borrows.insert x)
|
||||
modifyLive fun _ => { vars := {}, borrows }
|
||||
let liveVars := (← read).borrowedValues.fold (init := {}) fun liveVars x =>
|
||||
addDescendants x derivedValMap (liveVars.insertBorrow x)
|
||||
modifyLive (fun _ => liveVars)
|
||||
|
||||
@[inline]
|
||||
def addInc (fvarId : FVarId) (k : Code .impure) (n : Nat := 1) : RcM (Code .impure) := do
|
||||
@@ -625,9 +666,9 @@ partial def Code.explicitRc (code : Code .impure) : RcM (Code .impure) := do
|
||||
def Decl.explicitRc (decl : Decl .impure) :
|
||||
CompilerM (Decl .impure) := do
|
||||
let value ← decl.value.mapCodeM fun code => do
|
||||
let ⟨derivedValMap, borrowedParams⟩ ← CollectDerivedValInfo.collect decl.params code
|
||||
let ⟨derivedValMap, borrowedValues⟩ ← CollectDerivedValInfo.collect decl.params code
|
||||
go code |>.run {
|
||||
borrowedParams,
|
||||
borrowedValues,
|
||||
derivedValMap,
|
||||
} |>.run' {}
|
||||
return { decl with value }
|
||||
|
||||
@@ -375,7 +375,6 @@ where
|
||||
match v with
|
||||
| .reset _ x => ownFVar z (.resetReuse z); ownFVar x (.resetReuse z)
|
||||
| .reuse x _ _ args => ownFVar z (.resetReuse z); ownFVar x (.resetReuse z); ownArgsIfParam z args
|
||||
| .ctor _ args => ownFVar z (.constructorResult z); ownArgsIfParam z args
|
||||
| .oproj _ x _ =>
|
||||
if ← isOwned x then ownFVar z (.forwardProjectionProp z)
|
||||
if ← isOwned z then ownFVar x (.backwardProjectionProp z)
|
||||
@@ -384,6 +383,8 @@ where
|
||||
if let .fvar parent := args[1]! then
|
||||
if ← isOwned parent then ownFVar z (.forwardProjectionProp z)
|
||||
| .fap ``Array.get!Internal args =>
|
||||
if let .fvar parent := args[1]! then
|
||||
if ← isOwned parent then ownFVar z (.forwardProjectionProp z)
|
||||
if let .fvar parent := args[2]! then
|
||||
if ← isOwned parent then ownFVar z (.forwardProjectionProp z)
|
||||
| .fap ``Array.uget args =>
|
||||
@@ -396,6 +397,9 @@ where
|
||||
let ps ← getParamInfo (.decl f)
|
||||
ownFVar z (.functionCallResult z)
|
||||
ownArgsUsingParams args ps (.functionCallArg z)
|
||||
| .ctor i args =>
|
||||
if !i.isScalar then
|
||||
ownFVar z (.constructorResult z); ownArgsIfParam z args
|
||||
| .fvar x args =>
|
||||
ownFVar z (.functionCallResult z); ownFVar x (.fvarCall z); ownArgs (.fvarCall z) args
|
||||
| .pap _ args => ownFVar z (.functionCallResult z); ownArgs (.partialApplication z) args
|
||||
|
||||
@@ -96,9 +96,9 @@ builtin_initialize postponedCompileDeclsExt : SimplePersistentEnvExtension Postp
|
||||
asyncMode := .sync
|
||||
replay? := some <| SimplePersistentEnvExtension.replayOfFilter
|
||||
(fun s e => !e.declNames.any s.contains) (fun s e => e.declNames.foldl (·.insert · e) s)
|
||||
exportEntriesFnEx? := some fun _ _ es lvl =>
|
||||
exportEntriesFnEx? := some fun _ _ es =>
|
||||
-- `leanir` imports the target module privately
|
||||
if lvl == .private then es.toArray else #[]
|
||||
{ exported := #[], server := #[], «private» := es.toArray }
|
||||
}
|
||||
|
||||
def resumeCompilation (declName : Name) : CoreM Unit := do
|
||||
|
||||
@@ -26,6 +26,7 @@ public import Lean.Compiler.LCNF.SimpCase
|
||||
public import Lean.Compiler.LCNF.InferBorrow
|
||||
public import Lean.Compiler.LCNF.ExplicitBoxing
|
||||
public import Lean.Compiler.LCNF.ExplicitRC
|
||||
public import Lean.Compiler.LCNF.CoalesceRC
|
||||
public import Lean.Compiler.LCNF.Toposort
|
||||
public import Lean.Compiler.LCNF.ExpandResetReuse
|
||||
public import Lean.Compiler.LCNF.SimpleGroundExpr
|
||||
@@ -149,6 +150,7 @@ def builtinPassManager : PassManager := {
|
||||
explicitBoxing,
|
||||
explicitRc,
|
||||
expandResetReuse,
|
||||
coalesceRC,
|
||||
pushProj (occurrence := 1),
|
||||
detectSimpleGround,
|
||||
inferVisibility (phase := .impure),
|
||||
|
||||
@@ -93,16 +93,15 @@ def mkDeclExt (phase : Phase) (name : Name := by exact decl_name%) :
|
||||
mkInitial := pure {},
|
||||
addImportedFn := fun _ => pure {},
|
||||
addEntryFn := fun s decl => s.insert decl.name decl
|
||||
exportEntriesFnEx env s level := Id.run do
|
||||
let mut entries := sortedEntries s declLt
|
||||
if level != .private then
|
||||
entries := entries.filterMap fun decl => do
|
||||
guard <| isDeclPublic env decl.name
|
||||
if isDeclTransparent env phase decl.name then
|
||||
some decl
|
||||
else
|
||||
some { decl with value := .extern { entries := [.opaque] } }
|
||||
return entries
|
||||
exportEntriesFnEx env s := Id.run do
|
||||
let all := sortedEntries s declLt
|
||||
let exported := all.filterMap fun decl => do
|
||||
guard <| isDeclPublic env decl.name
|
||||
if isDeclTransparent env phase decl.name then
|
||||
some decl
|
||||
else
|
||||
some { decl with value := .extern { entries := [.opaque] } }
|
||||
return { exported, server := exported, «private» := all }
|
||||
statsFn := statsFn,
|
||||
asyncMode := .sync,
|
||||
replay? := some (replayFn phase)
|
||||
@@ -138,13 +137,12 @@ def mkSigDeclExt (phase : Phase) (name : Name := by exact decl_name%) :
|
||||
mkInitial := pure {},
|
||||
addImportedFn := fun _ => pure {},
|
||||
addEntryFn := fun s sig => s.insert sig.name sig
|
||||
exportEntriesFnEx env s level := Id.run do
|
||||
let mut entries := sortedEntries s sigLt
|
||||
if level != .private then
|
||||
entries := entries.filterMap fun sig => do
|
||||
guard <| isDeclPublic env sig.name
|
||||
some sig
|
||||
return entries
|
||||
exportEntriesFnEx env s := Id.run do
|
||||
let all := sortedEntries s sigLt
|
||||
let exported := all.filterMap fun sig => do
|
||||
guard <| isDeclPublic env sig.name
|
||||
some sig
|
||||
return { exported, server := exported, «private» := all }
|
||||
statsFn := statsFn,
|
||||
asyncMode := .sync,
|
||||
replay? := some (replayFn phase)
|
||||
|
||||
@@ -114,6 +114,9 @@ where
|
||||
let parentVal ← getOwnedness parent
|
||||
join z parentVal
|
||||
| .fap ``Array.get!Internal args =>
|
||||
if let .fvar parent := args[1]! then
|
||||
let parentVal ← getOwnedness parent
|
||||
join z parentVal
|
||||
if let .fvar parent := args[2]! then
|
||||
let parentVal ← getOwnedness parent
|
||||
join z parentVal
|
||||
@@ -124,7 +127,10 @@ where
|
||||
| .fap _ args =>
|
||||
let value := if args.isEmpty then .borrow else .own
|
||||
join z value
|
||||
| .ctor .. | .fvar .. | .pap .. | .sproj .. | .uproj .. | .erased .. | .lit .. =>
|
||||
| .ctor i _ =>
|
||||
let value := if i.isScalar then .borrow else .own
|
||||
join z value
|
||||
| .fvar .. | .pap .. | .sproj .. | .uproj .. | .erased .. | .lit .. =>
|
||||
join z .own
|
||||
| _ => unreachable!
|
||||
|
||||
|
||||
@@ -178,10 +178,11 @@ partial def compileToSimpleGroundExpr (code : Code .impure) : CompilerM (Option
|
||||
where
|
||||
go (code : Code .impure) : DetectM SimpleGroundExpr := do
|
||||
match code with
|
||||
| .let decl (.return fvarId) =>
|
||||
| .let decl (.return fvarId) | .let decl (.inc _ _ _ true (.return fvarId)) =>
|
||||
guard <| decl.fvarId == fvarId
|
||||
compileFinalLet decl.value
|
||||
| .let decl k => compileNonFinalLet decl k
|
||||
| .inc (persistent := true) (k := k) .. => go k
|
||||
| _ => failure
|
||||
|
||||
@[inline]
|
||||
|
||||
@@ -20,8 +20,10 @@ inductive SpecParamInfo where
|
||||
/--
|
||||
A parameter that is an type class instance (or an arrow that produces a type class instance),
|
||||
and is fixed in recursive declarations. By default, Lean always specializes this kind of argument.
|
||||
If the `weak` parameter is set we only specialize for this parameter iff another parameter causes
|
||||
specialization as well.
|
||||
-/
|
||||
| fixedInst
|
||||
| fixedInst (weak : Bool)
|
||||
/--
|
||||
A parameter that is a function and is fixed in recursive declarations. If the user tags a declaration
|
||||
with `@[specialize]` without specifying which arguments should be specialized, Lean will specialize
|
||||
@@ -49,14 +51,15 @@ namespace SpecParamInfo
|
||||
|
||||
@[inline]
|
||||
def causesSpecialization : SpecParamInfo → Bool
|
||||
| .fixedInst | .fixedHO | .user => true
|
||||
| .fixedNeutral | .other => false
|
||||
| .fixedInst false | .fixedHO | .user => true
|
||||
| .fixedInst true | .fixedNeutral | .other => false
|
||||
|
||||
end SpecParamInfo
|
||||
|
||||
instance : ToMessageData SpecParamInfo where
|
||||
toMessageData
|
||||
| .fixedInst => "I"
|
||||
| .fixedInst false => "I"
|
||||
| .fixedInst true => "W"
|
||||
| .fixedHO => "H"
|
||||
| .fixedNeutral => "N"
|
||||
| .user => "U"
|
||||
@@ -130,6 +133,18 @@ private def isNoSpecType (env : Environment) (type : Expr) : Bool :=
|
||||
else
|
||||
false
|
||||
|
||||
/--
|
||||
Return `true` if `type` is a type tagged with `@[weak_specialize]` or an arrow that produces this kind of type.
|
||||
-/
|
||||
private def isWeakSpecType (env : Environment) (type : Expr) : Bool :=
|
||||
match type with
|
||||
| .forallE _ _ b _ => isWeakSpecType env b
|
||||
| _ =>
|
||||
if let .const declName _ := type.getAppFn then
|
||||
hasWeakSpecializeAttribute env declName
|
||||
else
|
||||
false
|
||||
|
||||
/-!
|
||||
*Note*: `fixedNeutral` must have forward dependencies.
|
||||
|
||||
@@ -160,7 +175,7 @@ See comment at `.fixedNeutral`.
|
||||
private def hasFwdDeps (decl : Decl .pure) (paramsInfo : Array SpecParamInfo) (j : Nat) : Bool := Id.run do
|
||||
let param := decl.params[j]!
|
||||
for h : k in (j+1)...decl.params.size do
|
||||
if paramsInfo[k]!.causesSpecialization then
|
||||
if paramsInfo[k]!.causesSpecialization || paramsInfo[k]! matches .fixedInst .. then
|
||||
let param' := decl.params[k]
|
||||
if param'.type.containsFVar param.fvarId then
|
||||
return true
|
||||
@@ -199,7 +214,7 @@ def computeSpecEntries (decls : Array (Decl .pure)) (autoSpecialize : Name → O
|
||||
else if isTypeFormerType param.type then
|
||||
pure .fixedNeutral
|
||||
else if (← isArrowClass? param.type).isSome then
|
||||
pure .fixedInst
|
||||
pure (.fixedInst (weak := isWeakSpecType (← getEnv) param.type))
|
||||
/-
|
||||
Recall that if `specArgs? == some #[]`, then user annotated function with `@[specialize]`, but did not
|
||||
specify which arguments must be specialized besides instances. In this case, we try to specialize
|
||||
|
||||
@@ -31,11 +31,8 @@ builtin_initialize specCacheExt : SimplePersistentEnvExtension CacheEntry Cache
|
||||
registerSimplePersistentEnvExtension {
|
||||
addEntryFn := addEntry
|
||||
addImportedFn := fun es => (mkStateFromImportedEntries addEntry {} es).switch
|
||||
exportEntriesFnEx? := some fun _ _ entries level =>
|
||||
if level == .private then
|
||||
entries.toArray
|
||||
else
|
||||
#[]
|
||||
exportEntriesFnEx? := some fun _ _ entries =>
|
||||
{ exported := #[], server := #[], «private» := entries.toArray }
|
||||
asyncMode := .sync
|
||||
replay? := some <| SimplePersistentEnvExtension.replayOfFilter
|
||||
(!·.contains ·.key) addEntry
|
||||
@@ -209,7 +206,7 @@ def collect (paramsInfo : Array SpecParamInfo) (args : Array (Arg .pure)) :
|
||||
match paramInfo with
|
||||
| .other =>
|
||||
argMask := argMask.push none
|
||||
| .fixedNeutral | .user | .fixedInst | .fixedHO =>
|
||||
| .fixedNeutral | .user | .fixedInst .. | .fixedHO =>
|
||||
argMask := argMask.push (some arg)
|
||||
Closure.collectArg arg
|
||||
return argMask
|
||||
@@ -257,7 +254,8 @@ def shouldSpecialize (specEntry : SpecEntry) (args : Array (Arg .pure)) : Specia
|
||||
match paramInfo with
|
||||
| .other => pure ()
|
||||
| .fixedNeutral => pure () -- If we want to monomorphize types such as `Array`, we need to change here
|
||||
| .fixedInst | .user => if ← isGround arg then return true
|
||||
| .fixedInst true => pure () -- weak: don't trigger specialization on its own
|
||||
| .fixedInst false | .user => if ← isGround arg then return true
|
||||
| .fixedHO => if ← hoCheck arg then return true
|
||||
|
||||
return false
|
||||
@@ -509,7 +507,7 @@ def updateLocalSpecParamInfo : SpecializeM Unit := do
|
||||
for entry in infos do
|
||||
if let some mask := (← get).parentMasks[entry.declName]? then
|
||||
let maskInfo info :=
|
||||
mask.zipWith info (f := fun b i => if !b && i.causesSpecialization then .other else i)
|
||||
mask.zipWith info (f := fun b i => if !b && (i.causesSpecialization || i matches .fixedInst ..) then .other else i)
|
||||
let entry := { entry with paramsInfo := maskInfo entry.paramsInfo }
|
||||
modify fun s => {
|
||||
s with
|
||||
|
||||
@@ -39,11 +39,9 @@ private builtin_initialize declMetaExt : SimplePersistentEnvExtension Name NameS
|
||||
addEntryFn := fun s n => s.insert n
|
||||
asyncMode := .sync
|
||||
replay? := some <| SimplePersistentEnvExtension.replayOfFilter (!·.contains ·) (·.insert ·)
|
||||
exportEntriesFnEx? := some fun env s entries => fun
|
||||
| .private =>
|
||||
let decls := entries.foldl (init := #[]) fun decls decl => decls.push decl
|
||||
decls.qsort Name.quickLt
|
||||
| _ => #[]
|
||||
exportEntriesFnEx? := some fun env s entries =>
|
||||
let decls := entries.foldl (init := #[]) fun decls decl => decls.push decl
|
||||
{ exported := #[], server := #[], «private» := decls.qsort Name.quickLt }
|
||||
}
|
||||
|
||||
/-- Whether a declaration should be exported for interpretation. -/
|
||||
|
||||
@@ -24,6 +24,17 @@ Marks a definition to never be specialized during code generation.
|
||||
builtin_initialize nospecializeAttr : TagAttribute ←
|
||||
registerTagAttribute `nospecialize "mark definition to never be specialized"
|
||||
|
||||
/--
|
||||
Marks a type for weak specialization: Parameters of this type are only specialized when
|
||||
another argument already triggers specialization. Unlike `@[nospecialize]`, if specialization
|
||||
happens for other reasons, parameters of this type will participate in the specialization
|
||||
rather than being ignored.
|
||||
-/
|
||||
@[builtin_doc]
|
||||
builtin_initialize weakSpecializeAttr : TagAttribute ←
|
||||
registerTagAttribute `weak_specialize
|
||||
"mark type for weak specialization: instances are only specialized when another argument already triggers specialization"
|
||||
|
||||
private def elabSpecArgs (declName : Name) (args : Array Syntax) : MetaM (Array Nat) := do
|
||||
if args.isEmpty then return #[]
|
||||
let info ← getConstInfo declName
|
||||
@@ -82,4 +93,7 @@ def hasSpecializeAttribute (env : Environment) (declName : Name) : Bool :=
|
||||
def hasNospecializeAttribute (env : Environment) (declName : Name) : Bool :=
|
||||
nospecializeAttr.hasTag env declName
|
||||
|
||||
def hasWeakSpecializeAttribute (env : Environment) (declName : Name) : Bool :=
|
||||
weakSpecializeAttr.hasTag env declName
|
||||
|
||||
end Lean.Compiler
|
||||
|
||||
@@ -18,10 +18,9 @@ namespace Lean
|
||||
|
||||
builtin_initialize builtinDeclRanges : IO.Ref (NameMap DeclarationRanges) ← IO.mkRef {}
|
||||
builtin_initialize declRangeExt : MapDeclarationExtension DeclarationRanges ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun _ s level =>
|
||||
if level < .server then
|
||||
#[]
|
||||
else s.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun _ s =>
|
||||
let ents := s.toArray
|
||||
{ exported := #[], server := ents, «private» := ents })
|
||||
|
||||
def addBuiltinDeclarationRanges (declName : Name) (declRanges : DeclarationRanges) : IO Unit :=
|
||||
builtinDeclRanges.modify (·.insert declName declRanges)
|
||||
|
||||
@@ -78,27 +78,21 @@ private builtin_initialize builtinDocStrings : IO.Ref (NameMap String) ← IO.mk
|
||||
builtin_initialize docStringExt : MapDeclarationExtension String ←
|
||||
mkMapDeclarationExtension
|
||||
(asyncMode := .async .asyncEnv)
|
||||
(exportEntriesFn := fun _ s level =>
|
||||
if level < .server then
|
||||
{}
|
||||
else
|
||||
s.toArray)
|
||||
(exportEntriesFn := fun _ s =>
|
||||
let ents := s.toArray
|
||||
{ exported := #[], server := ents, «private» := ents })
|
||||
private builtin_initialize inheritDocStringExt : MapDeclarationExtension Name ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun _ s level =>
|
||||
if level < .server then
|
||||
{}
|
||||
else
|
||||
s.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun _ s =>
|
||||
let ents := s.toArray
|
||||
{ exported := #[], server := ents, «private» := ents })
|
||||
|
||||
private builtin_initialize builtinVersoDocStrings : IO.Ref (NameMap VersoDocString) ← IO.mkRef {}
|
||||
builtin_initialize versoDocStringExt : MapDeclarationExtension VersoDocString ←
|
||||
mkMapDeclarationExtension
|
||||
(asyncMode := .async .asyncEnv)
|
||||
(exportEntriesFn := fun _ s level =>
|
||||
if level < .server then
|
||||
{}
|
||||
else
|
||||
s.toArray)
|
||||
(exportEntriesFn := fun _ s =>
|
||||
let ents := s.toArray
|
||||
{ exported := #[], server := ents, «private» := ents })
|
||||
|
||||
/--
|
||||
Adds a builtin docstring to the compiler.
|
||||
@@ -196,11 +190,9 @@ private builtin_initialize moduleDocExt :
|
||||
SimplePersistentEnvExtension ModuleDoc (PersistentArray ModuleDoc) ← registerSimplePersistentEnvExtension {
|
||||
addImportedFn := fun _ => {}
|
||||
addEntryFn := fun s e => s.push e
|
||||
exportEntriesFnEx? := some fun _ _ es level =>
|
||||
if level < .server then
|
||||
#[]
|
||||
else
|
||||
es.toArray
|
||||
exportEntriesFnEx? := some fun _ _ es =>
|
||||
let ents := es.toArray
|
||||
{ exported := #[], server := ents, «private» := ents }
|
||||
}
|
||||
|
||||
def addMainModuleDoc (env : Environment) (doc : ModuleDoc) : Environment :=
|
||||
@@ -407,11 +399,9 @@ private builtin_initialize versoModuleDocExt :
|
||||
SimplePersistentEnvExtension VersoModuleDocs.Snippet VersoModuleDocs ← registerSimplePersistentEnvExtension {
|
||||
addImportedFn := fun _ => {}
|
||||
addEntryFn := fun s e => s.add! e
|
||||
exportEntriesFnEx? := some fun _ _ es level =>
|
||||
if level < .server then
|
||||
#[]
|
||||
else
|
||||
es.toArray
|
||||
exportEntriesFnEx? := some fun _ _ es =>
|
||||
let ents := es.toArray
|
||||
{ exported := #[], server := ents, «private» := ents }
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ builtin_initialize
|
||||
|
||||
Upon such rewrite, the code for adding flat inductives does not diverge much from the usual
|
||||
way its done for inductive declarations, but we omit applying attributes/modifiers and
|
||||
we do not set the syntax references to track those declarations (as this is auxillary piece of
|
||||
we do not set the syntax references to track those declarations (as this is auxiliary piece of
|
||||
data hidden from the user).
|
||||
|
||||
Then, upon adding such flat inductives for each definition in the mutual block to the environment,
|
||||
@@ -345,7 +345,7 @@ private def mkCasesOnCoinductive (infos : Array InductiveVal) : MetaM Unit := do
|
||||
| throwError "expected to be quantifier"
|
||||
let motiveMVar ← mkFreshExprMVar type
|
||||
/-
|
||||
We intro all the indices and the occurence of the coinductive predicate
|
||||
We intro all the indices and the occurrence of the coinductive predicate
|
||||
-/
|
||||
let (fvars, subgoal) ← motiveMVar.mvarId!.introN (info.numIndices + 1)
|
||||
subgoal.withContext do
|
||||
@@ -373,7 +373,7 @@ private def mkCasesOnCoinductive (infos : Array InductiveVal) : MetaM Unit := do
|
||||
-/
|
||||
let originalCasesOn := mkAppN originalCasesOn indices
|
||||
/-
|
||||
The next argument is the occurence of the coinductive predicate.
|
||||
The next argument is the occurrence of the coinductive predicate.
|
||||
The original `casesOn` of the flat inductive mentions it in
|
||||
unrolled form, so we need to rewrite it.
|
||||
-/
|
||||
@@ -447,7 +447,7 @@ public def elabCoinductive (coinductiveElabData : Array CoinductiveElabData) : T
|
||||
let consts := namesAndTypes.map fun (name, _) => (mkConst name levelParams)
|
||||
/-
|
||||
We create values of each of PreDefinitions, by taking existential (see `Meta.SumOfProducts`)
|
||||
form of the associated flat inductives and applying paramaters, as well as recursive calls
|
||||
form of the associated flat inductives and applying parameters, as well as recursive calls
|
||||
(with their parameters passed).
|
||||
-/
|
||||
let preDefVals ← forallBoundedTelescope infos[0]!.type originalNumParams fun params _ => do
|
||||
|
||||
@@ -26,9 +26,11 @@ public structure EqnInfo where
|
||||
deriving Inhabited
|
||||
|
||||
public builtin_initialize eqnInfoExt : MapDeclarationExtension EqnInfo ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s _ =>
|
||||
-- Do not export for non-exposed defs
|
||||
s.filter (fun n _ => env.find? n |>.any (·.hasValue)) |>.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
|
||||
let all := s.toArray
|
||||
-- Do not export for non-exposed defs at exported/server levels
|
||||
let exported := s.filter (fun n _ => (env.setExporting true).find? n |>.any (·.hasValue)) |>.toArray
|
||||
{ exported, server := exported, «private» := all })
|
||||
|
||||
public def registerEqnsInfo (preDefs : Array PreDefinition) (declNameNonRec : Name)
|
||||
(fixedParamPerms : FixedParamPerms) (fixpointType : Array PartialFixpointType): MetaM Unit := do
|
||||
|
||||
@@ -148,9 +148,11 @@ where
|
||||
throwError "no progress at goal\n{MessageData.ofGoal mvarId}"
|
||||
|
||||
public builtin_initialize eqnInfoExt : MapDeclarationExtension EqnInfo ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s _ =>
|
||||
-- Do not export for non-exposed defs
|
||||
s.filter (fun n _ => env.find? n |>.any (·.hasValue)) |>.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
|
||||
let all := s.toArray
|
||||
-- Do not export for non-exposed defs at exported/server levels
|
||||
let exported := s.filter (fun n _ => (env.setExporting true).find? n |>.any (·.hasValue)) |>.toArray
|
||||
{ exported, server := exported, «private» := all })
|
||||
|
||||
public def registerEqnsInfo (preDef : PreDefinition) (declNames : Array Name) (recArgPos : Nat)
|
||||
(fixedParamPerms : FixedParamPerms) : CoreM Unit := do
|
||||
|
||||
@@ -24,9 +24,11 @@ public structure EqnInfo where
|
||||
deriving Inhabited
|
||||
|
||||
public builtin_initialize eqnInfoExt : MapDeclarationExtension EqnInfo ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s _ =>
|
||||
-- Do not export for non-exposed defs
|
||||
s.filter (fun n _ => env.find? n |>.any (·.hasValue)) |>.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
|
||||
let all := s.toArray
|
||||
-- Do not export for non-exposed defs at exported/server levels
|
||||
let exported := s.filter (fun n _ => (env.setExporting true).find? n |>.any (·.hasValue)) |>.toArray
|
||||
{ exported, server := exported, «private» := all })
|
||||
|
||||
public def registerEqnsInfo (preDefs : Array PreDefinition) (declNameNonRec : Name) (fixedParamPerms : FixedParamPerms)
|
||||
(argsPacker : ArgsPacker) : MetaM Unit := do
|
||||
|
||||
@@ -243,10 +243,6 @@ private def printAxiomsOf (constName : Name) : CommandElabM Unit := do
|
||||
|
||||
@[builtin_command_elab «printAxioms»] def elabPrintAxioms : CommandElab
|
||||
| `(#print%$tk axioms $id) => withRef tk do
|
||||
if (← getEnv).header.isModule then
|
||||
throwError "cannot use `#print axioms` in a `module`; consider temporarily removing the \
|
||||
`module` header or placing the command in a separate file"
|
||||
|
||||
let cs ← liftCoreM <| realizeGlobalConstWithInfos id
|
||||
cs.forM printAxiomsOf
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
@@ -31,7 +31,7 @@ open Lean.Parser.Command
|
||||
def allRecommendedSpellings : MetaM (Array RecommendedSpelling) := do
|
||||
let all := recommendedSpellingExt.toEnvExtension.getState (← getEnv)
|
||||
|>.importedEntries
|
||||
|>.push (recommendedSpellingExt.exportEntriesFn (← getEnv) (recommendedSpellingExt.getState (← getEnv)) .exported)
|
||||
|>.push ((recommendedSpellingExt.exportEntriesFn (← getEnv) (recommendedSpellingExt.getState (← getEnv))).exported)
|
||||
return all.flatMap id
|
||||
|
||||
end Lean.Elab.Term.Doc
|
||||
|
||||
@@ -256,15 +256,15 @@ Marks a type as an invariant type for the `mvcgen` tactic.
|
||||
Goals whose type is an application of a tagged type will be classified
|
||||
as invariants rather than verification conditions.
|
||||
-/
|
||||
builtin_initialize mvcgenInvariantAttr : TagAttribute ←
|
||||
registerTagAttribute `mvcgen_invariant_type
|
||||
builtin_initialize specInvariantAttr : TagAttribute ←
|
||||
registerTagAttribute `spec_invariant_type
|
||||
"marks a type as an invariant type for the `mvcgen` tactic"
|
||||
|
||||
/--
|
||||
Returns `true` if `ty` is an application of a type tagged with `@[mvcgen_invariant_type]`.
|
||||
Returns `true` if `ty` is an application of a type tagged with `@[spec_invariant_type]`.
|
||||
-/
|
||||
def isMVCGenInvariantType (env : Environment) (ty : Expr) : Bool :=
|
||||
def isSpecInvariantType (env : Environment) (ty : Expr) : Bool :=
|
||||
if let .const name .. := ty.getAppFn then
|
||||
mvcgenInvariantAttr.hasTag env name
|
||||
specInvariantAttr.hasTag env name
|
||||
else
|
||||
false
|
||||
|
||||
@@ -75,7 +75,7 @@ def elabSpec (stx? : Option (TSyntax `term)) (wp : Expr) : TacticM SpecTheorem :
|
||||
| none => findSpec (← getSpecTheorems) wp
|
||||
| some stx => elabTermIntoSpecTheorem stx expectedTy
|
||||
|
||||
variable {n} [Monad n] [MonadControlT MetaM n] [MonadLiftT MetaM n]
|
||||
variable {n} [Monad n] [MonadControlT MetaM n] [MonadLiftT MetaM n] [MonadEnv n]
|
||||
|
||||
private def mkProj' (n : Name) (i : Nat) (Q : Expr) : MetaM Expr := do
|
||||
return (← projectCore? Q i).getD (mkProj n i Q)
|
||||
@@ -181,11 +181,12 @@ public def mSpec (goal : MGoal) (elabSpecAtWP : Expr → n SpecTheorem) (goalTag
|
||||
-- Instantiation creates `.natural` MVars, which possibly get instantiated by the def eq checks
|
||||
-- below when they occur in `P` or `Q`.
|
||||
-- That's good for many such as MVars ("schematic variables"), but problematic for MVars
|
||||
-- corresponding to `Invariant`s, which should end up as user goals.
|
||||
-- To prevent accidental instantiation, we mark all `Invariant` MVars as synthetic opaque.
|
||||
-- corresponding to invariant types, which should end up as user goals.
|
||||
-- To prevent accidental instantiation, we mark all invariant MVars as synthetic opaque.
|
||||
let env ← getEnv
|
||||
for mvar in mvars do
|
||||
let ty ← mvar.mvarId!.getType
|
||||
if ty.isAppOf ``Invariant then mvar.mvarId!.setKind .syntheticOpaque
|
||||
if isSpecInvariantType env ty then mvar.mvarId!.setKind .syntheticOpaque
|
||||
|
||||
-- Apply the spec to the excess arguments of the `wp⟦e⟧ Q` application
|
||||
let T := goal.target.consumeMData
|
||||
|
||||
@@ -364,7 +364,7 @@ def elabInvariants (stx : Syntax) (invariants : Array MVarId) (suggestInvariant
|
||||
for h : n in 0...alts.size do
|
||||
let alt := alts[n]
|
||||
match alt with
|
||||
| `(goalDotAlt| · $rhs) =>
|
||||
| `(invariantDotAlt| · $rhs) =>
|
||||
if dotOrCase matches .false then
|
||||
logErrorAt alt m!"Alternation between labelled and bulleted invariants is not supported."
|
||||
break
|
||||
@@ -374,7 +374,7 @@ def elabInvariants (stx : Syntax) (invariants : Array MVarId) (suggestInvariant
|
||||
continue
|
||||
withRef rhs do
|
||||
discard <| evalTacticAt (← `(tactic| exact $rhs)) mv
|
||||
| `(goalCaseAlt| | $tag $args* => $rhs) =>
|
||||
| `(invariantCaseAlt| | $tag $args* => $rhs) =>
|
||||
if dotOrCase matches .true then
|
||||
logErrorAt alt m!"Alternation between labelled and bulleted invariants is not supported."
|
||||
break
|
||||
@@ -391,7 +391,7 @@ def elabInvariants (stx : Syntax) (invariants : Array MVarId) (suggestInvariant
|
||||
continue
|
||||
withRef rhs do
|
||||
discard <| evalTacticAt (← `(tactic| rename_i $args*; exact $rhs)) mv
|
||||
| _ => logErrorAt alt m!"Expected `goalDotAlt`, got {alt}"
|
||||
| _ => logErrorAt alt m!"Expected `invariantDotAlt`, got {alt}"
|
||||
|
||||
if let `(invariantsKW| invariants) := invariantsKW then
|
||||
if alts.size < invariants.size then
|
||||
@@ -405,7 +405,7 @@ def elabInvariants (stx : Syntax) (invariants : Array MVarId) (suggestInvariant
|
||||
if ← mv.isAssigned then
|
||||
continue
|
||||
let invariant ← suggestInvariant mv
|
||||
suggestions := suggestions.push (← `(goalDotAlt| · $invariant))
|
||||
suggestions := suggestions.push (← `(invariantDotAlt| · $invariant))
|
||||
let alts' := alts ++ suggestions
|
||||
let stx' ← `(invariantAlts|invariants $alts'*)
|
||||
if suggestions.size > 0 then
|
||||
|
||||
@@ -104,7 +104,7 @@ def addSubGoalAsVC (goal : MVarId) : VCGenM PUnit := do
|
||||
-- VC to the user as-is, without abstracting any variables in the local context.
|
||||
-- This only makes sense for synthetic opaque metavariables.
|
||||
goal.setKind .syntheticOpaque
|
||||
if isMVCGenInvariantType (← getEnv) ty then
|
||||
if isSpecInvariantType (← getEnv) ty then
|
||||
modify fun s => { s with invariants := s.invariants.push goal }
|
||||
else
|
||||
modify fun s => { s with vcs := s.vcs.push goal }
|
||||
|
||||
@@ -52,7 +52,7 @@ def firstTacticTokens [Monad m] [MonadEnv m] : m (NameMap String) := do
|
||||
let mut firstTokens : NameMap String :=
|
||||
tacticNameExt.toEnvExtension.getState env
|
||||
|>.importedEntries
|
||||
|>.push (tacticNameExt.exportEntriesFn env (tacticNameExt.getState env) .exported)
|
||||
|>.push ((tacticNameExt.exportEntriesFn env (tacticNameExt.getState env)).exported)
|
||||
|>.foldl (init := {}) fun names inMods =>
|
||||
inMods.foldl (init := names) fun names (k, n) =>
|
||||
names.insert k n
|
||||
@@ -108,7 +108,7 @@ Displays all available tactic tags, with documentation.
|
||||
@[builtin_command_elab printTacTags] def elabPrintTacTags : CommandElab := fun _stx => do
|
||||
let all :=
|
||||
tacticTagExt.toEnvExtension.getState (← getEnv)
|
||||
|>.importedEntries |>.push (tacticTagExt.exportEntriesFn (← getEnv) (tacticTagExt.getState (← getEnv)) .exported)
|
||||
|>.importedEntries |>.push ((tacticTagExt.exportEntriesFn (← getEnv) (tacticTagExt.getState (← getEnv))).exported)
|
||||
let mut mapping : NameMap NameSet := {}
|
||||
for arr in all do
|
||||
for (tac, tag) in arr do
|
||||
@@ -160,7 +160,7 @@ def allTacticDocs (includeUnnamed : Bool := true) : MetaM (Array TacticDoc) := d
|
||||
let env ← getEnv
|
||||
let allTags :=
|
||||
tacticTagExt.toEnvExtension.getState env |>.importedEntries
|
||||
|>.push (tacticTagExt.exportEntriesFn env (tacticTagExt.getState env) .exported)
|
||||
|>.push ((tacticTagExt.exportEntriesFn env (tacticTagExt.getState env)).exported)
|
||||
let mut tacTags : NameMap NameSet := {}
|
||||
for arr in allTags do
|
||||
for (tac, tag) in arr do
|
||||
|
||||
@@ -26,7 +26,7 @@ structure SimplePersistentEnvExtensionDescr (α σ : Type) where
|
||||
addImportedFn : Array (Array α) → σ
|
||||
toArrayFn : List α → Array α := fun es => es.toArray
|
||||
exportEntriesFnEx? :
|
||||
Option (Environment → σ → List α → OLeanLevel → Array α) := none
|
||||
Option (Environment → σ → List α → OLeanEntries (Array α)) := none
|
||||
asyncMode : EnvExtension.AsyncMode := .mainOnly
|
||||
replay? : Option ((newEntries : List α) → (newState : σ) → σ → List α × σ) := none
|
||||
|
||||
@@ -48,9 +48,9 @@ def registerSimplePersistentEnvExtension {α σ : Type} [Inhabited σ] (descr :
|
||||
addImportedFn := fun as => pure ([], descr.addImportedFn as),
|
||||
addEntryFn := fun s e => match s with
|
||||
| (entries, s) => (e::entries, descr.addEntryFn s e),
|
||||
exportEntriesFnEx env s level := match descr.exportEntriesFnEx? with
|
||||
| some fn => fn env s.2 s.1.reverse level
|
||||
| none => descr.toArrayFn s.1.reverse
|
||||
exportEntriesFnEx env s := match descr.exportEntriesFnEx? with
|
||||
| some fn => fn env s.2 s.1.reverse
|
||||
| none => .uniform (descr.toArrayFn s.1.reverse)
|
||||
statsFn := fun s => format "number of local entries: " ++ format s.1.length
|
||||
asyncMode := descr.asyncMode
|
||||
replay? := descr.replay?.map fun replay oldState newState _ (entries, s) =>
|
||||
@@ -131,16 +131,18 @@ deriving Inhabited
|
||||
|
||||
def mkMapDeclarationExtension (name : Name := by exact decl_name%)
|
||||
(asyncMode : EnvExtension.AsyncMode := .async .mainEnv)
|
||||
(exportEntriesFn : Environment → NameMap α → OLeanLevel → Array (Name × α) :=
|
||||
(exportEntriesFn : Environment → NameMap α → OLeanEntries (Array (Name × α)) :=
|
||||
-- Do not export info for private defs by default
|
||||
fun env s _ => s.toArray.filter (fun (n, _) => env.contains (skipRealize := false) n)) :
|
||||
fun env s =>
|
||||
let all := s.toArray.filter (fun (n, _) => env.contains (skipRealize := false) n)
|
||||
.uniform all) :
|
||||
IO (MapDeclarationExtension α) :=
|
||||
.mk <$> registerPersistentEnvExtension {
|
||||
name := name,
|
||||
mkInitial := pure {}
|
||||
addImportedFn := fun _ => pure {}
|
||||
addEntryFn := fun s (n, v) => s.insert n v
|
||||
exportEntriesFnEx env s level := exportEntriesFn env s level
|
||||
exportEntriesFnEx env s := exportEntriesFn env s
|
||||
asyncMode
|
||||
replay? := some fun _ newState newConsts s =>
|
||||
newConsts.foldl (init := s) fun s c =>
|
||||
|
||||
@@ -1540,6 +1540,23 @@ deriving DecidableEq, Ord, Repr
|
||||
instance : LE OLeanLevel := leOfOrd
|
||||
instance : LT OLeanLevel := ltOfOrd
|
||||
|
||||
/-- Data computed once per extension for all three olean levels. Avoids calling the export function
|
||||
three separate times so that expensive computations can be shared. -/
|
||||
structure OLeanEntries (α : Type) where
|
||||
exported : α
|
||||
server : α
|
||||
«private» : α
|
||||
deriving Inhabited
|
||||
|
||||
/-- Create `OLeanEntries` with the same value for all levels. -/
|
||||
def OLeanEntries.uniform (a : α) : OLeanEntries α := ⟨a, a, a⟩
|
||||
|
||||
/-- Look up the entry for a given level. -/
|
||||
def OLeanEntries.get (e : OLeanEntries α) : OLeanLevel → α
|
||||
| .exported => e.exported
|
||||
| .server => e.server
|
||||
| .private => e.private
|
||||
|
||||
/--
|
||||
An environment extension with support for storing/retrieving entries from a .olean file.
|
||||
- α is the type of the entries that are stored in .olean files.
|
||||
@@ -1591,16 +1608,17 @@ structure PersistentEnvExtension (α : Type) (β : Type) (σ : Type) where
|
||||
addImportedFn : Array (Array α) → ImportM σ
|
||||
addEntryFn : σ → β → σ
|
||||
/--
|
||||
Function to transform state into data that should be imported into other modules. When using the
|
||||
Function to transform state into data that should be imported into other modules. Returns entries
|
||||
for all three olean levels at once so that expensive computations can be shared. When using the
|
||||
module system without `import all`, `OLeanLevel.exported` is imported, else `OLeanLevel.private`.
|
||||
Additionally, when using the module system in the language server, the `OLeanLevel.server` data is
|
||||
accessible via `getModuleEntries (level := .server)`. By convention, each level should include all
|
||||
data of previous levels.
|
||||
|
||||
This function is run after elaborating the file and joining all asynchronous threads. It is run
|
||||
once for each level when the module system is enabled, otherwise once for `private`.
|
||||
This function is run once after elaborating the file and joining all asynchronous threads.
|
||||
For non-module files, only the `private` field is used.
|
||||
-/
|
||||
exportEntriesFn : Environment → σ → OLeanLevel → Array α
|
||||
exportEntriesFn : Environment → σ → OLeanEntries (Array α)
|
||||
statsFn : σ → Format
|
||||
|
||||
instance {α σ} [Inhabited σ] : Inhabited (PersistentEnvExtensionState α σ) :=
|
||||
@@ -1612,7 +1630,7 @@ instance {α β σ} [Inhabited σ] : Inhabited (PersistentEnvExtension α β σ)
|
||||
name := default,
|
||||
addImportedFn := fun _ => default,
|
||||
addEntryFn := fun s _ => s,
|
||||
exportEntriesFn := fun _ _ _ => #[],
|
||||
exportEntriesFn := fun _ _ => .uniform #[],
|
||||
statsFn := fun _ => Format.nil
|
||||
}
|
||||
|
||||
@@ -1668,7 +1686,7 @@ structure PersistentEnvExtensionDescrCore (α β σ : Type) where
|
||||
mkInitial : IO σ
|
||||
addImportedFn : Array (Array α) → ImportM σ
|
||||
addEntryFn : σ → β → σ
|
||||
exportEntriesFnEx : Environment → σ → OLeanLevel → Array α
|
||||
exportEntriesFnEx : Environment → σ → OLeanEntries (Array α)
|
||||
statsFn : σ → Format := fun _ => Format.nil
|
||||
asyncMode : EnvExtension.AsyncMode := .mainOnly
|
||||
replay? : Option (ReplayFn σ) := none
|
||||
@@ -1687,11 +1705,11 @@ def useDefaultIfOtherFieldGiven (default : α) (_otherField : β) : α :=
|
||||
structure PersistentEnvExtensionDescr (α β σ : Type) extends PersistentEnvExtensionDescrCore α β σ where
|
||||
-- The cyclic default values force the user to specify at least one of the two following fields.
|
||||
/--
|
||||
Obsolete simpler version of `exportEntriesFnEx`. Its value is ignored if the latter is also
|
||||
specified.
|
||||
Obsolete simpler version of `exportEntriesFnEx` that returns the same entries for all levels.
|
||||
Its value is ignored if the latter is also specified.
|
||||
-/
|
||||
exportEntriesFn : σ → Array α := useDefaultIfOtherFieldGiven (fun _ => #[]) exportEntriesFnEx
|
||||
exportEntriesFnEx := fun _ s _ => exportEntriesFn s
|
||||
exportEntriesFnEx := fun _ s => .uniform (exportEntriesFn s)
|
||||
|
||||
unsafe def registerPersistentEnvExtensionUnsafe {α β σ : Type} [Inhabited σ] (descr : PersistentEnvExtensionDescr α β σ) : IO (PersistentEnvExtension α β σ) := do
|
||||
let pExts ← persistentEnvExtensionsRef.get
|
||||
@@ -1779,19 +1797,40 @@ set_option compiler.ignoreBorrowAnnotation true in
|
||||
@[extern "lean_get_ir_extra_const_names"]
|
||||
private opaque getIRExtraConstNames (env : Environment) (level : OLeanLevel) (includeDecls := false) : Array Name
|
||||
|
||||
def mkModuleData (env : Environment) (level : OLeanLevel := .private) : IO ModuleData := do
|
||||
let env := env.setExporting (level != .private)
|
||||
/--
|
||||
Compute extension entries for all levels at once by calling `exportEntriesFn` once per extension.
|
||||
Returns an `OLeanEntries` of arrays mapping extension names to their exported data.
|
||||
-/
|
||||
private def computeExtEntries (env : Environment) :
|
||||
IO (OLeanEntries (Array (Name × Array EnvExtensionEntry))) := do
|
||||
let pExts ← persistentEnvExtensionsRef.get
|
||||
let entries := pExts.filterMap fun pExt => do
|
||||
-- get state from `checked` at the end if `async`; it would otherwise panic
|
||||
let mut asyncMode := pExt.toEnvExtension.asyncMode
|
||||
if asyncMode matches .async _ then
|
||||
asyncMode := .sync
|
||||
let allEntries := pExts.map fun pExt =>
|
||||
let asyncMode := match pExt.toEnvExtension.asyncMode with
|
||||
| .async _ => .sync
|
||||
| m => m
|
||||
let state := pExt.getState (asyncMode := asyncMode) env
|
||||
let ents := pExt.exportEntriesFn env state level
|
||||
-- no need to export empty entries
|
||||
guard !ents.isEmpty
|
||||
return (pExt.name, ents)
|
||||
let oe := pExt.exportEntriesFn env state
|
||||
(pExt.name, oe)
|
||||
let filterNonEmpty (level : OLeanLevel) :=
|
||||
allEntries.filterMap fun (name, oe) => do
|
||||
let ents := oe.get level
|
||||
guard !ents.isEmpty
|
||||
pure (name, ents)
|
||||
return {
|
||||
exported := filterNonEmpty .exported
|
||||
server := filterNonEmpty .server
|
||||
«private» := filterNonEmpty .private
|
||||
}
|
||||
|
||||
def mkModuleData (env : Environment) (level : OLeanLevel := .private)
|
||||
(extEntries? : Option (OLeanEntries (Array (Name × Array EnvExtensionEntry))) := none) :
|
||||
IO ModuleData := do
|
||||
let env := env.setExporting (level != .private)
|
||||
let entries ← match extEntries? with
|
||||
| some ee => pure (ee.get level)
|
||||
| none => do
|
||||
let ee ← computeExtEntries env
|
||||
pure (ee.get level)
|
||||
let kenv := env.toKernelEnv
|
||||
let constNames := kenv.constants.foldStage2 (fun names name _ => names.push name) #[]
|
||||
-- not all kernel constants may be exported at `level < .private`
|
||||
@@ -1828,8 +1867,9 @@ private def mkIRData (env : Environment) : ModuleData :=
|
||||
|
||||
def writeModule (env : Environment) (fname : System.FilePath) (writeIR := true) : IO Unit := do
|
||||
if env.header.isModule then
|
||||
let extEntries ← computeExtEntries env
|
||||
let mkPart (level : OLeanLevel) :=
|
||||
return (level.adjustFileName fname, (← mkModuleData env level))
|
||||
return (level.adjustFileName fname, (← mkModuleData env level extEntries))
|
||||
saveModuleDataParts env.mainModule #[
|
||||
(← mkPart .exported),
|
||||
(← mkPart .server),
|
||||
|
||||
@@ -1751,6 +1751,12 @@ def isFalse (e : Expr) : Bool :=
|
||||
def isTrue (e : Expr) : Bool :=
|
||||
e.cleanupAnnotations.isConstOf ``True
|
||||
|
||||
def isBoolFalse (e : Expr) : Bool :=
|
||||
e.cleanupAnnotations.isConstOf ``false
|
||||
|
||||
def isBoolTrue (e : Expr) : Bool :=
|
||||
e.cleanupAnnotations.isConstOf ``true
|
||||
|
||||
/--
|
||||
`getForallArity type` returns the arity of a `forall`-type. This function consumes nested annotations,
|
||||
and performs pending beta reductions. It does **not** use whnf.
|
||||
|
||||
@@ -66,9 +66,8 @@ builtin_initialize extraModUses : SimplePersistentEnvExtension ExtraModUse (PHas
|
||||
registerSimplePersistentEnvExtension {
|
||||
addEntryFn m k := m.insert k
|
||||
addImportedFn _ := {}
|
||||
exportEntriesFnEx? := some fun _ _ entries => fun
|
||||
| .private => entries.toArray
|
||||
| _ => #[]
|
||||
exportEntriesFnEx? := some fun _ _ entries =>
|
||||
{ exported := #[], server := #[], «private» := entries.toArray }
|
||||
asyncMode := .sync
|
||||
replay? := some <| SimplePersistentEnvExtension.replayOfFilter (·.contains ·) (·.insert ·)
|
||||
}
|
||||
|
||||
@@ -91,10 +91,10 @@ end FoldRelevantConstantsImpl
|
||||
@[implemented_by FoldRelevantConstantsImpl.foldUnsafe]
|
||||
public opaque foldRelevantConstants {α : Type} (e : Expr) (init : α) (f : Name → α → MetaM α) : MetaM α := pure init
|
||||
|
||||
/-- Collect the constants occuring in `e` (once each), skipping instance arguments and proofs. -/
|
||||
/-- Collect the constants occurring in `e` (once each), skipping instance arguments and proofs. -/
|
||||
public def relevantConstants (e : Expr) : MetaM (Array Name) := foldRelevantConstants e #[] (fun n ns => return ns.push n)
|
||||
|
||||
/-- Collect the constants occuring in `e` (once each), skipping instance arguments and proofs. -/
|
||||
/-- Collect the constants occurring in `e` (once each), skipping instance arguments and proofs. -/
|
||||
public def relevantConstantsAsSet (e : Expr) : MetaM NameSet := foldRelevantConstants e ∅ (fun n ns => return ns.insert n)
|
||||
|
||||
end Lean.Expr
|
||||
|
||||
@@ -105,7 +105,9 @@ builtin_initialize sineQuaNonExt : PersistentEnvExtension (NameMap (List (Name
|
||||
addImportedFn := fun mapss _ => pure mapss
|
||||
addEntryFn := nofun
|
||||
-- TODO: it would be nice to avoid the `toArray` here, e.g. via iterators.
|
||||
exportEntriesFnEx := fun env _ _ => unsafe env.unsafeRunMetaM do return #[← prepareTriggers (env.constants.map₂.toArray.map (·.1))]
|
||||
exportEntriesFnEx := fun env _ => unsafe
|
||||
let ents := env.unsafeRunMetaM do return #[← prepareTriggers (env.constants.map₂.toArray.map (·.1))]
|
||||
.uniform ents
|
||||
statsFn := fun _ => "sine qua non premise selection extension"
|
||||
}
|
||||
|
||||
|
||||
@@ -86,7 +86,9 @@ builtin_initialize symbolFrequencyExt : PersistentEnvExtension (NameMap Nat) Emp
|
||||
mkInitial := pure ∅
|
||||
addImportedFn := fun mapss _ => pure mapss
|
||||
addEntryFn := nofun
|
||||
exportEntriesFnEx := fun env _ _ => unsafe env.unsafeRunMetaM do return #[← cachedLocalSymbolFrequencyMap]
|
||||
exportEntriesFnEx := fun env _ => unsafe
|
||||
let ents := env.unsafeRunMetaM do return #[← cachedLocalSymbolFrequencyMap]
|
||||
.uniform ents
|
||||
statsFn := fun _ => "symbol frequency extension"
|
||||
}
|
||||
|
||||
|
||||
@@ -717,20 +717,25 @@ def recordSynthPendingFailure (type : Expr) : MetaM Unit := do
|
||||
modifyDiag fun { unfoldCounter, unfoldAxiomCounter, heuristicCounter, instanceCounter, synthPendingFailures } =>
|
||||
{ unfoldCounter, unfoldAxiomCounter, heuristicCounter, instanceCounter, synthPendingFailures := synthPendingFailures.insert type msg }
|
||||
|
||||
@[inline]
|
||||
def getLocalInstances : MetaM LocalInstances :=
|
||||
return (← read).localInstances
|
||||
|
||||
@[inline]
|
||||
def getConfig : MetaM Config :=
|
||||
return (← read).config
|
||||
|
||||
@[inline]
|
||||
def getConfigWithKey : MetaM ConfigWithKey :=
|
||||
return (← getConfig).toConfigWithKey
|
||||
|
||||
/-- Return the array of postponed universe level constraints. -/
|
||||
@[inline]
|
||||
def getPostponed : MetaM (PersistentArray PostponedEntry) :=
|
||||
return (← get).postponed
|
||||
|
||||
/-- Set the array of postponed universe level constraints. -/
|
||||
@[inline]
|
||||
def setPostponed (postponed : PersistentArray PostponedEntry) : MetaM Unit :=
|
||||
modify fun s => { s with postponed := postponed }
|
||||
|
||||
@@ -904,12 +909,15 @@ def mkConstWithFreshMVarLevels (declName : Name) : MetaM Expr := do
|
||||
return mkConst declName (← mkFreshLevelMVarsFor info)
|
||||
|
||||
/-- Return current transparency setting/mode. -/
|
||||
@[inline]
|
||||
def getTransparency : MetaM TransparencyMode :=
|
||||
return (← getConfig).transparency
|
||||
|
||||
@[inline]
|
||||
def shouldReduceAll : MetaM Bool :=
|
||||
return (← getTransparency) == TransparencyMode.all
|
||||
|
||||
@[inline]
|
||||
def shouldReduceReducibleOnly : MetaM Bool :=
|
||||
return (← getTransparency) == TransparencyMode.reducible
|
||||
|
||||
|
||||
@@ -35,9 +35,11 @@ public structure SparseCasesOnInfo where
|
||||
deriving Inhabited
|
||||
|
||||
private builtin_initialize sparseCasesOnInfoExt : MapDeclarationExtension SparseCasesOnInfo ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s _ =>
|
||||
-- Do not export for non-exposed defs
|
||||
s.filter (fun n _ => env.find? n |>.any (·.hasValue)) |>.toArray)
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
|
||||
let all := s.toArray
|
||||
-- Do not export for non-exposed defs at exported/server levels
|
||||
let exported := s.filter (fun n _ => (env.setExporting true).find? n |>.any (·.hasValue)) |>.toArray
|
||||
{ exported, server := exported, «private» := all })
|
||||
|
||||
/--
|
||||
This module creates sparse variants of `casesOn` that have arms only for some of the constructors,
|
||||
|
||||
@@ -127,9 +127,11 @@ builtin_initialize extension : SimplePersistentEnvExtension Entry State ←
|
||||
addEntryFn := State.addEntry
|
||||
addImportedFn := fun es => (mkStateFromImportedEntries State.addEntry {} es).switch
|
||||
asyncMode := .async .mainEnv
|
||||
exportEntriesFnEx? := some fun env _ entries _ =>
|
||||
-- Do not export info for private defs
|
||||
entries.filter (env.contains (skipRealize := false) ·.name) |>.toArray
|
||||
exportEntriesFnEx? := some fun env _ entries =>
|
||||
let all := entries.toArray
|
||||
-- Do not export info for private defs at exported/server levels
|
||||
let exported := all.filter ((env.setExporting true).contains (skipRealize := false) ·.name)
|
||||
{ exported, server := exported, «private» := all }
|
||||
}
|
||||
|
||||
def addMatcherInfo (env : Environment) (matcherName : Name) (info : MatcherInfo) : Environment :=
|
||||
|
||||
@@ -105,7 +105,7 @@ private def isNatZero (e : Expr) : MetaM Bool := do
|
||||
| some v => return v == 0
|
||||
| _ => return false
|
||||
|
||||
private def mkOffset (e : Expr) (offset : Nat) : MetaM Expr := do
|
||||
def mkOffset (e : Expr) (offset : Nat) : MetaM Expr := do
|
||||
if offset == 0 then
|
||||
return e
|
||||
else if (← isNatZero e) then
|
||||
|
||||
@@ -24,7 +24,9 @@ public import Lean.Meta.Sym.InferType
|
||||
public import Lean.Meta.Sym.Simp
|
||||
public import Lean.Meta.Sym.Util
|
||||
public import Lean.Meta.Sym.Eta
|
||||
public import Lean.Meta.Sym.Canon
|
||||
public import Lean.Meta.Sym.Grind
|
||||
public import Lean.Meta.Sym.SynthInstance
|
||||
|
||||
/-!
|
||||
# Symbolic computation support.
|
||||
|
||||
433
src/Lean/Meta/Sym/Canon.lean
Normal file
433
src/Lean/Meta/Sym/Canon.lean
Normal file
@@ -0,0 +1,433 @@
|
||||
/-
|
||||
Copyright (c) 2026 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Sym.SymM
|
||||
import Lean.Meta.Sym.ExprPtr
|
||||
import Lean.Meta.SynthInstance
|
||||
import Lean.Meta.Sym.SynthInstance
|
||||
import Lean.Meta.IntInstTesters
|
||||
import Lean.Meta.NatInstTesters
|
||||
import Lean.Meta.Sym.Eta
|
||||
import Lean.Meta.WHNF
|
||||
import Init.Grind.Util
|
||||
namespace Lean.Meta.Sym
|
||||
namespace Canon
|
||||
|
||||
builtin_initialize registerTraceClass `sym.debug.canon
|
||||
|
||||
/-!
|
||||
# Type-directed canonicalizer
|
||||
|
||||
Canonicalizes expressions by normalizing types and instances. At the top level, it traverses
|
||||
applications, foralls, lambdas, and let-bindings, classifying each argument as a type, instance,
|
||||
implicit, or value using `shouldCanon`. Values are recursively visited but not normalized.
|
||||
Types and instances receive targeted reductions.
|
||||
|
||||
## Reductions (applied only in type positions)
|
||||
|
||||
- **Eta**: `fun x => f x` → `f`
|
||||
- **Projection**: `⟨a, b⟩.1` → `a` (structure projections, not class projections)
|
||||
- **Match/ite/cond**: reduced when discriminant is a constructor or literal
|
||||
- **Nat arithmetic**: ground evaluation (`2 + 1` → `3`) and offset normalization
|
||||
(`n.succ + 1` → `n + 2`)
|
||||
|
||||
## Instance canonicalization
|
||||
|
||||
Instances are re-synthesized via `synthInstance`. The instance type is first normalized
|
||||
using the type-level reductions above, so that `OfNat (Fin (2+1)) 0` and `OfNat (Fin 3) 0`
|
||||
produce the same canonical instance.
|
||||
|
||||
## Two caches
|
||||
|
||||
The canonicalizer maintains separate caches for type-level and value-level contexts.
|
||||
The same expression may canonicalize differently depending on whether it appears in a
|
||||
type position (where reductions are applied) or a value position (where it is only traversed).
|
||||
Caches are keyed by `Expr` (structural equality), not pointer equality, because
|
||||
the canonicalizer runs before `shareCommon` and enters binders using locally nameless
|
||||
representation.
|
||||
|
||||
## Future work
|
||||
|
||||
If needed we should add support for user-defined extensions.
|
||||
-/
|
||||
|
||||
structure Context where
|
||||
/-- `true` if we are visiting a type. -/
|
||||
insideType : Bool := false
|
||||
|
||||
abbrev CanonM := ReaderT Context SymM
|
||||
|
||||
/--
|
||||
Auxiliary function for normalizing the arguments of `OfNat.ofNat` during canonicalization.
|
||||
This is needed because satellite solvers create `Nat` and `Int` numerals using the
|
||||
APIs `mkNatLit` and `mkIntLit`, which produce terms of the form
|
||||
`@OfNat.ofNat Nat <num> inst` and `@OfNat.ofNat Int <num> inst`.
|
||||
This becomes a problem when a term in the input goal has already been canonicalized
|
||||
and its type is not exactly `Nat` or `Int`. For example, in issue #9477, we have:
|
||||
```
|
||||
structure T where
|
||||
upper_bound : Nat
|
||||
def T.range (a : T) := 0...a.upper_bound
|
||||
theorem range_lower (a : T) : a.range.lower = 0 := by rfl
|
||||
```
|
||||
Here, the `0` in `range_lower` is actually represented as:
|
||||
```
|
||||
(@OfNat.ofNat
|
||||
(Std.PRange.Bound (Std.PRange.RangeShape.lower (Std.PRange.RangeShape.mk Std.PRange.BoundShape.closed Std.PRange.BoundShape.open)) Nat)
|
||||
(nat_lit 0)
|
||||
(instOfNatNat (nat_lit 0)))
|
||||
```
|
||||
Without this normalization step, the satellite solver would need to handle multiple
|
||||
representations for `(0 : Nat)` and `(0 : Int)`, complicating reasoning.
|
||||
-/
|
||||
-- Remark: This is not a great solution. We should consider writing a custom canonicalizer for
|
||||
-- `OfNat.ofNat` and other constants with built-in support in `grind`.
|
||||
private def normOfNatArgs? (args : Array Expr) : MetaM (Option (Array Expr)) := do
|
||||
if h : args.size = 3 then
|
||||
let mut args : Vector Expr 3 := h ▸ args.toVector
|
||||
let mut modified := false
|
||||
if args[1].isAppOf ``OfNat.ofNat then
|
||||
-- If nested `OfNat.ofNat`, convert to raw nat literal
|
||||
let some val ← getNatValue? args[1] | pure ()
|
||||
args := args.set 1 (mkRawNatLit val)
|
||||
modified := true
|
||||
let inst := args[2]
|
||||
if (← Structural.isInstOfNatNat inst) && !args[0].isConstOf ``Nat then
|
||||
return some (args.set 0 Nat.mkType |>.toArray)
|
||||
else if (← Structural.isInstOfNatInt inst) && !args[0].isConstOf ``Int then
|
||||
return some (args.set 0 Int.mkType |>.toArray)
|
||||
else if modified then
|
||||
return some args.toArray
|
||||
return none
|
||||
|
||||
abbrev withCaching (e : Expr) (k : CanonM Expr) : CanonM Expr := do
|
||||
if (← read).insideType then
|
||||
if let some r := (← get).canon.cacheInType.get? e then
|
||||
return r
|
||||
else
|
||||
let r ← k
|
||||
modify fun s => { s with canon.cacheInType := s.canon.cacheInType.insert e r }
|
||||
return r
|
||||
else
|
||||
if let some r := (← get).canon.cache.get? e then
|
||||
return r
|
||||
else
|
||||
let r ← k
|
||||
modify fun s => { s with canon.cache := s.canon.cache.insert e r }
|
||||
return r
|
||||
|
||||
def isTrueCond (e : Expr) : Bool :=
|
||||
match_expr e with
|
||||
| True => true
|
||||
| Eq _ a b => a.isBoolTrue && b.isBoolTrue
|
||||
| _ => false
|
||||
|
||||
def isFalseCond (e : Expr) : Bool :=
|
||||
match_expr e with
|
||||
| False => true
|
||||
| Eq _ a b => a.isBoolFalse && b.isBoolTrue
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Return type for the `shouldCanon` function.
|
||||
-/
|
||||
inductive ShouldCanonResult where
|
||||
| /- Nested types (and type formers) are canonicalized. -/
|
||||
canonType
|
||||
| /- Nested instances are canonicalized. -/
|
||||
canonInst
|
||||
| /- Implicit argument that is not an instance nor a type. -/
|
||||
canonImplicit
|
||||
| /-
|
||||
Term is not a proof, type (former), nor an instance.
|
||||
Thus, it must be recursively visited by the canonicalizer.
|
||||
-/
|
||||
visit
|
||||
deriving Inhabited
|
||||
|
||||
instance : Repr ShouldCanonResult where
|
||||
reprPrec r _ := private match r with
|
||||
| .canonType => "canonType"
|
||||
| .canonInst => "canonInst"
|
||||
| .canonImplicit => "canonImplicit"
|
||||
| .visit => "visit"
|
||||
|
||||
/--
|
||||
See comments at `ShouldCanonResult`.
|
||||
-/
|
||||
def shouldCanon (pinfos : Array ParamInfo) (i : Nat) (arg : Expr) : MetaM ShouldCanonResult := do
|
||||
if h : i < pinfos.size then
|
||||
let pinfo := pinfos[i]
|
||||
if pinfo.isInstance then
|
||||
return .canonInst
|
||||
else if pinfo.isProp then
|
||||
return .visit
|
||||
else if pinfo.isImplicit then
|
||||
if (← isTypeFormer arg) then
|
||||
return .canonType
|
||||
else
|
||||
return .canonImplicit
|
||||
if (← isProp arg) then
|
||||
return .visit
|
||||
else if (← isTypeFormer arg) then
|
||||
return .canonType
|
||||
else
|
||||
return .visit
|
||||
|
||||
/--
|
||||
Reduce a projection function application (e.g., `@Sigma.fst _ _ ⟨a, b⟩` → `a`).
|
||||
Class projections are not reduced — they are support elements handled by instance synthesis.
|
||||
-/
|
||||
def reduceProjFn? (info : ProjectionFunctionInfo) (e : Expr) : SymM (Option Expr) := do
|
||||
if info.fromClass then
|
||||
return none
|
||||
let some e ← unfoldDefinition? e | return none
|
||||
match (← reduceProj? e.getAppFn) with
|
||||
| some f => return some <| mkAppN f e.getAppArgs
|
||||
| none => return none
|
||||
|
||||
def isNat (e : Expr) := e.isConstOf ``Nat
|
||||
|
||||
/-- Returns `true` if `e` is a Nat arithmetic expression that should be normalized
|
||||
(ground evaluation or offset normalization). -/
|
||||
def isNatArithApp (e : Expr) : Bool :=
|
||||
match_expr e with
|
||||
| Nat.zero => true
|
||||
| Nat.succ _ => true
|
||||
| HAdd.hAdd α _ _ _ _ _ => isNat α
|
||||
| HMul.hMul α _ _ _ _ _ => isNat α
|
||||
| HSub.hSub α _ _ _ _ _ => isNat α
|
||||
| HDiv.hDiv α _ _ _ _ _ => isNat α
|
||||
| HMod.hMod α _ _ _ _ _ => isNat α
|
||||
| _ => false
|
||||
|
||||
/-- Check that `e` is definitionally equal to `inst` at instance transparency.
|
||||
Returns `inst` on success, `e` with a reported issue on failure. -/
|
||||
def checkDefEqInst (e : Expr) (inst : Expr) : SymM Expr := do
|
||||
unless (← isDefEqI e inst) do
|
||||
reportIssue! "failed to canonicalize instance{indentExpr e}\nsynthesized instance is not definitionally equal{indentExpr inst}"
|
||||
return e
|
||||
return inst
|
||||
|
||||
/-- Canonicalize `e`. Applies targeted reductions in type positions; recursively visits value positions. -/
|
||||
partial def canon (e : Expr) : CanonM Expr := do
|
||||
match e with
|
||||
| .forallE .. => withCaching e <| canonForall #[] e
|
||||
| .lam .. => withCaching e <| canonLambda e
|
||||
| .letE .. => withCaching e <| canonLet #[] e
|
||||
| .app .. => withCaching e <| canonApp e
|
||||
| .proj .. => withCaching e <| canonProj e
|
||||
| .mdata _ b => return e.updateMData! (← canon b)
|
||||
| _ => return e
|
||||
where
|
||||
canonInsideType (e : Expr) : CanonM Expr := do
|
||||
if (← read).insideType then
|
||||
canon e
|
||||
else if (← isProp e) then
|
||||
/-
|
||||
If the body is a proposition (like `a ∈ m → ...`), normalizing inside it could change the
|
||||
shape of the proposition and confuse grind's proposition handling.
|
||||
-/
|
||||
canon e
|
||||
else
|
||||
withReader (fun ctx => { ctx with insideType := true }) <| canon e
|
||||
|
||||
/--
|
||||
Similar to `canonInsideType`, but skips the `isProp` check.
|
||||
Use only when `e` is known not to be a proposition.
|
||||
-/
|
||||
canonInsideType' (e : Expr) : CanonM Expr := do
|
||||
if (← read).insideType then
|
||||
canon e
|
||||
else
|
||||
withReader (fun ctx => { ctx with insideType := true }) <| canon e
|
||||
|
||||
canonInst (e : Expr) : CanonM Expr := do
|
||||
if let some inst := (← get).canon.cacheInsts.get? e then
|
||||
checkDefEqInst e inst
|
||||
else
|
||||
/-
|
||||
We normalize the type to make sure `OfNat (Fin (2+1)) 1` and `OfNat (Fin 3) 1` will produce
|
||||
the same instances.
|
||||
-/
|
||||
let type ← inferType e
|
||||
let type' ← canonInsideType' type
|
||||
let some inst ← Sym.synthInstance? type' |
|
||||
reportIssue! "failed to canonicalize instance{indentExpr e}\nfailed to synthesize{indentExpr type'}"
|
||||
return e
|
||||
let inst ← checkDefEqInst e inst
|
||||
-- Remark: we cache result using the type **before** canonicalization.
|
||||
modify fun s => { s with canon.cacheInsts := s.canon.cacheInsts.insert e inst }
|
||||
return inst
|
||||
|
||||
canonLambda (e : Expr) : CanonM Expr := do
|
||||
if (← read).insideType then
|
||||
canonLambdaLoop #[] (etaReduce e)
|
||||
else
|
||||
canonLambdaLoop #[] e
|
||||
|
||||
canonLambdaLoop (fvars : Array Expr) (e : Expr) : CanonM Expr := do
|
||||
match e with
|
||||
| .lam n d b c =>
|
||||
withLocalDecl n c (← canonInsideType (d.instantiateRev fvars)) fun x =>
|
||||
canonLambdaLoop (fvars.push x) b
|
||||
| e =>
|
||||
mkLambdaFVars fvars (← canon (e.instantiateRev fvars))
|
||||
|
||||
canonForall (fvars : Array Expr) (e : Expr) : CanonM Expr := do
|
||||
match e with
|
||||
| .forallE n d b c =>
|
||||
withLocalDecl n c (← canonInsideType (d.instantiateRev fvars)) fun x =>
|
||||
canonForall (fvars.push x) b
|
||||
| e =>
|
||||
mkForallFVars fvars (← canonInsideType (e.instantiateRev fvars))
|
||||
|
||||
canonLet (fvars : Array Expr) (e : Expr) : CanonM Expr := do
|
||||
match e with
|
||||
| .letE n t v b nondep =>
|
||||
withLetDecl n (← canonInsideType (t.instantiateRev fvars)) (← canon (v.instantiateRev fvars)) (nondep := nondep) fun x =>
|
||||
canonLet (fvars.push x) b
|
||||
| e =>
|
||||
mkLetFVars (generalizeNondepLet := false) fvars (← canon (e.instantiateRev fvars))
|
||||
|
||||
canonAppDefault (e : Expr) : CanonM Expr := e.withApp fun f args => do
|
||||
if f.isConstOf ``Grind.nestedProof && args.size == 2 then
|
||||
let prop := args[0]!
|
||||
let prop' ← canon prop
|
||||
let e' := if isSameExpr prop prop' then e else mkAppN f (args.set! 0 prop')
|
||||
return e'
|
||||
else if f.isConstOf ``Grind.nestedDecidable && args.size == 2 then
|
||||
let prop := args[0]!
|
||||
let prop' ← canon prop
|
||||
let e' := if isSameExpr prop prop' then e else mkAppN f (args.set! 0 prop')
|
||||
return e'
|
||||
else
|
||||
let mut modified := false
|
||||
let args ← if f.isConstOf ``OfNat.ofNat then
|
||||
let some args ← normOfNatArgs? args | pure args
|
||||
modified := true
|
||||
pure args
|
||||
else
|
||||
pure args
|
||||
let mut f := f
|
||||
let f' ← canon f
|
||||
unless isSameExpr f f' do
|
||||
f := f'
|
||||
modified := true
|
||||
let pinfos := (← getFunInfo f).paramInfo
|
||||
let mut args := args.toVector
|
||||
for h : i in *...args.size do
|
||||
let arg := args[i]
|
||||
trace[sym.debug.canon] "[{repr (← shouldCanon pinfos i arg)}]: {arg} : {← inferType arg}"
|
||||
let arg' ← match (← shouldCanon pinfos i arg) with
|
||||
| .canonType =>
|
||||
/-
|
||||
The type may have nested propositions and terms that may need to be canonicalized too.
|
||||
So, we must recurse over it. See issue #10232
|
||||
-/
|
||||
canonInsideType' arg
|
||||
| .canonImplicit => canon arg
|
||||
| .visit => canon arg
|
||||
| .canonInst =>
|
||||
if arg.isAppOfArity ``Grind.nestedDecidable 2 then
|
||||
let prop := arg.appFn!.appArg!
|
||||
let prop' ← canon prop
|
||||
if isSameExpr prop prop' then pure arg else pure (mkApp2 arg.appFn!.appFn! prop' arg.appArg!)
|
||||
else
|
||||
canonInst arg
|
||||
unless isSameExpr arg arg' do
|
||||
args := args.set i arg'
|
||||
modified := true
|
||||
return if modified then mkAppN f args.toArray else e
|
||||
|
||||
canonIte (f : Expr) (α c inst a b : Expr) : CanonM Expr := do
|
||||
let c ← canon c
|
||||
if isTrueCond c then canon a
|
||||
else if isFalseCond c then canon b
|
||||
else return mkApp5 f (← canonInsideType α) c (← canonInst inst) (← canon a) (← canon b)
|
||||
|
||||
canonCond (f : Expr) (α c a b : Expr) : CanonM Expr := do
|
||||
let c ← canon c
|
||||
if c.isBoolTrue then canon a
|
||||
else if c.isBoolFalse then canon b
|
||||
else return mkApp4 f (← canonInsideType α) c (← canon a) (← canon b)
|
||||
|
||||
postReduce (e : Expr) : CanonM Expr := do
|
||||
if isNatArithApp e then
|
||||
if let some e ← evalNat e |>.run then
|
||||
return mkNatLit e
|
||||
else if let some (e, k) ← isOffset? e |>.run then
|
||||
mkOffset e k
|
||||
else
|
||||
return e
|
||||
else
|
||||
let f := e.getAppFn
|
||||
let .const declName _ := f | return e
|
||||
if let some info ← getProjectionFnInfo? declName then
|
||||
return (← reduceProjFn? info e).getD e
|
||||
else
|
||||
return e
|
||||
|
||||
/-- Canonicalize `e` and apply post reductions. -/
|
||||
canonAppAndPost (e : Expr) : CanonM Expr := do
|
||||
let e ← canonAppDefault e
|
||||
postReduce e
|
||||
|
||||
canonMatch (e : Expr) : CanonM Expr := do
|
||||
if let .reduced e ← reduceMatcher? e then
|
||||
canon e
|
||||
else
|
||||
let e ← canonAppDefault e
|
||||
-- Remark: try again, discriminants may have been simplified.
|
||||
if let .reduced e ← reduceMatcher? e then
|
||||
canon e
|
||||
else
|
||||
return e
|
||||
|
||||
canonApp (e : Expr) : CanonM Expr := do
|
||||
if (← read).insideType then
|
||||
match_expr e with
|
||||
| f@ite α c i a b => canonIte f α c i a b
|
||||
| f@cond α c a b => canonCond f α c a b
|
||||
-- Remark: We currently don't normalize dependent-if-then-else occurring in types.
|
||||
| _ =>
|
||||
let f := e.getAppFn
|
||||
let .const declName _ := f | canonAppAndPost e
|
||||
if (← isMatcher declName) then
|
||||
canonMatch e
|
||||
else
|
||||
canonAppAndPost e
|
||||
else
|
||||
canonAppDefault e
|
||||
|
||||
canonProj (e : Expr) : CanonM Expr := do
|
||||
let e := e.updateProj! (← canon e.projExpr!)
|
||||
if (← read).insideType then
|
||||
return (← reduceProj? e).getD e
|
||||
else
|
||||
return e
|
||||
|
||||
/--
|
||||
Returns `true` if `shouldCannon pinfos i arg` is not `.visit`.
|
||||
This is a helper function used to implement mbtc.
|
||||
-/
|
||||
public def isSupport (pinfos : Array ParamInfo) (i : Nat) (arg : Expr) : MetaM Bool := do
|
||||
let r ← Canon.shouldCanon pinfos i arg
|
||||
return !r matches .visit
|
||||
|
||||
end Canon
|
||||
|
||||
/--
|
||||
Canonicalize `e` by normalizing types, instances, and support arguments.
|
||||
Types receive targeted reductions (eta, projection, match/ite, Nat arithmetic).
|
||||
Instances are re-synthesized. Values are traversed but not reduced.
|
||||
Runs at reducible transparency.
|
||||
-/
|
||||
public def canon (e : Expr) : SymM Expr := do profileitM Exception "sym canon" (← getOptions) do
|
||||
withReducible do Canon.canon e {}
|
||||
|
||||
end Lean.Meta.Sym
|
||||
@@ -41,13 +41,34 @@ where
|
||||
public def isEtaReducible (e : Expr) : Bool :=
|
||||
!isSameExpr e (etaReduce e)
|
||||
|
||||
public partial def etaReduceWithCache (e : Expr) (c : Std.HashMap ExprPtr Expr) : CoreM (Expr × Std.HashMap ExprPtr Expr) := do
|
||||
visit e |>.run c
|
||||
where
|
||||
cache (e e' : Expr) : StateRefT (Std.HashMap ExprPtr Expr) CoreM Expr := do
|
||||
modify fun s => s.insert { expr := e } e'
|
||||
return e'
|
||||
|
||||
visit (e : Expr) : StateRefT (Std.HashMap ExprPtr Expr) CoreM Expr := withIncRecDepth do
|
||||
if let some e' := (← get).get? { expr := e } then
|
||||
return e'
|
||||
match e with
|
||||
| .forallE _ d b _ => cache e (e.updateForallE! (← visit d) (← visit b))
|
||||
| .lam _ d b _ =>
|
||||
let e' := etaReduce.go e e 0
|
||||
if isSameExpr e e' then
|
||||
cache e (e.updateLambdaE! (← visit d) (← visit b))
|
||||
else
|
||||
cache e (← visit e')
|
||||
| .letE _ t v b _ => cache e (e.updateLetE! (← visit t) (← visit v) (← visit b))
|
||||
| .app f a => cache e (e.updateApp! (← visit f) (← visit a))
|
||||
| .mdata _ b => cache e (e.updateMData! (← visit b))
|
||||
| .proj _ _ b => cache e (e.updateProj! (← visit b))
|
||||
| _ => return e
|
||||
|
||||
/-- Applies `etaReduce` to all subexpressions. Returns `e` unchanged if no subexpression is eta-reducible. -/
|
||||
public def etaReduceAll (e : Expr) : MetaM Expr := do
|
||||
public def etaReduceAll (e : Expr) : CoreM Expr := do
|
||||
unless Option.isSome <| e.find? isEtaReducible do return e
|
||||
let pre (e : Expr) : MetaM TransformStep := do
|
||||
let e' := etaReduce e
|
||||
if isSameExpr e e' then return .continue
|
||||
else return .visit e'
|
||||
Meta.transform e (pre := pre)
|
||||
let (e, _) ← etaReduceWithCache e {}
|
||||
return e
|
||||
|
||||
end Lean.Meta.Sym
|
||||
|
||||
@@ -147,6 +147,14 @@ structure Context where
|
||||
sharedExprs : SharedExprs
|
||||
config : Config := {}
|
||||
|
||||
structure Canon.State where
|
||||
/-- Cache for value-level canonicalization (no type reductions applied). -/
|
||||
cache : Std.HashMap Expr Expr := {}
|
||||
/-- Cache for type-level canonicalization (reductions applied). -/
|
||||
cacheInType : Std.HashMap Expr Expr := {}
|
||||
/-- Cache mapping instances to their canonical synthesized instances. -/
|
||||
cacheInsts : Std.HashMap Expr Expr := {}
|
||||
|
||||
/-- Mutable state for the symbolic computation framework. -/
|
||||
structure State where
|
||||
/-- `ShareCommon` (aka `Hash-consing`) state. -/
|
||||
@@ -191,6 +199,7 @@ structure State where
|
||||
within a `sym =>` block and reported when a tactic fails.
|
||||
-/
|
||||
issues : List MessageData := []
|
||||
canon : Canon.State := {}
|
||||
debug : Bool := false
|
||||
|
||||
abbrev SymM := ReaderT Context <| StateRefT State MetaM
|
||||
|
||||
76
src/Lean/Meta/Sym/SynthInstance.lean
Normal file
76
src/Lean/Meta/Sym/SynthInstance.lean
Normal file
@@ -0,0 +1,76 @@
|
||||
/-
|
||||
Copyright (c) 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Sym.SymM
|
||||
import Lean.Meta.SynthInstance
|
||||
public section
|
||||
namespace Lean.Meta.Sym
|
||||
/--
|
||||
Some modules in grind use builtin instances defined directly in core (e.g., `lia`),
|
||||
while others synthesize them using `synthInstance` (e.g., `ring`).
|
||||
This inconsistency is problematic, as it may introduce mismatches and result in
|
||||
two different representations for the same term.
|
||||
|
||||
The following table is used to bypass synthInstance for the builtin cases.
|
||||
-/
|
||||
private def builtinInsts : Std.HashMap Expr Expr :=
|
||||
let nat := Nat.mkType
|
||||
let int := Int.mkType
|
||||
let us := [Level.zero, Level.zero, Level.zero]
|
||||
Std.HashMap.ofList [
|
||||
(mkApp3 (mkConst ``HAdd us) nat nat nat, Nat.mkInstHAdd),
|
||||
(mkApp3 (mkConst ``HSub us) nat nat nat, Nat.mkInstHSub),
|
||||
(mkApp3 (mkConst ``HMul us) nat nat nat, Nat.mkInstHMul),
|
||||
(mkApp3 (mkConst ``HDiv us) nat nat nat, Nat.mkInstHDiv),
|
||||
(mkApp3 (mkConst ``HMod us) nat nat nat, Nat.mkInstHMod),
|
||||
(mkApp3 (mkConst ``HPow us) nat nat nat, Nat.mkInstHPow),
|
||||
(mkApp (mkConst ``LT [0]) nat, Nat.mkInstLT),
|
||||
(mkApp (mkConst ``LE [0]) nat, Nat.mkInstLE),
|
||||
|
||||
(mkApp3 (mkConst ``HAdd us) int int int, Int.mkInstHAdd),
|
||||
(mkApp3 (mkConst ``HSub us) int int int, Int.mkInstHSub),
|
||||
(mkApp3 (mkConst ``HMul us) int int int, Int.mkInstHMul),
|
||||
(mkApp3 (mkConst ``HDiv us) int int int, Int.mkInstHDiv),
|
||||
(mkApp3 (mkConst ``HMod us) int int int, Int.mkInstHMod),
|
||||
(mkApp3 (mkConst ``HPow us) int nat int, Int.mkInstHPow),
|
||||
(mkApp (mkConst ``LT [0]) int, Int.mkInstLT),
|
||||
(mkApp (mkConst ``LE [0]) int, Int.mkInstLE),
|
||||
]
|
||||
|
||||
/--
|
||||
Some modules in grind use builtin instances defined directly in core (e.g., `lia`).
|
||||
Users may provide nonstandard instances that are definitionally equal to the ones in core.
|
||||
Given a type, such as `HAdd Int Int Int`, this function returns the instance defined in
|
||||
core.
|
||||
-/
|
||||
def getBuiltinInstance? (type : Expr) : Option Expr :=
|
||||
builtinInsts[type]?
|
||||
|
||||
def synthInstanceMeta? (type : Expr) : MetaM (Option Expr) := do profileitM Exception "sym typeclass inference" (← getOptions) (decl := type.getAppFn.constName?.getD .anonymous) do
|
||||
if let some inst := getBuiltinInstance? type then
|
||||
return inst
|
||||
catchInternalId isDefEqStuckExceptionId
|
||||
(synthInstanceCore? type none)
|
||||
(fun _ => pure none)
|
||||
|
||||
abbrev synthInstance? (type : Expr) : SymM (Option Expr) :=
|
||||
synthInstanceMeta? type
|
||||
|
||||
def synthInstance (type : Expr) : SymM Expr := do
|
||||
let some inst ← synthInstance? type
|
||||
| throwError "`sym` failed to find instance{indentExpr type}"
|
||||
return inst
|
||||
|
||||
/--
|
||||
Helper function for instantiating a type class `type`, and
|
||||
then using the result to perform `isDefEq x val`.
|
||||
-/
|
||||
def synthInstanceAndAssign (x type : Expr) : SymM Bool := do
|
||||
let some val ← synthInstance? type | return false
|
||||
isDefEq x val
|
||||
|
||||
end Lean.Meta.Sym
|
||||
@@ -12,7 +12,6 @@ public import Lean.Meta.Tactic.Grind.Util
|
||||
public import Lean.Meta.Tactic.Grind.Cases
|
||||
public import Lean.Meta.Tactic.Grind.Injection
|
||||
public import Lean.Meta.Tactic.Grind.Core
|
||||
public import Lean.Meta.Tactic.Grind.Canon
|
||||
public import Lean.Meta.Tactic.Grind.MarkNestedSubsingletons
|
||||
public import Lean.Meta.Tactic.Grind.Inv
|
||||
public import Lean.Meta.Tactic.Grind.Proof
|
||||
|
||||
@@ -6,8 +6,11 @@ Authors: Leonardo de Moura
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Basic
|
||||
import Lean.Meta.Tactic.Grind.SynthInstance
|
||||
import Init.Grind.FieldNormNum
|
||||
import Lean.Meta.Tactic.Grind.SynthInstance
|
||||
import Lean.Meta.AppBuilder
|
||||
import Lean.Meta.LitValues
|
||||
import Lean.Util.SafeExponentiation
|
||||
namespace Lean.Meta.Grind.Arith
|
||||
namespace FieldNormNum
|
||||
|
||||
|
||||
@@ -8,8 +8,13 @@ prelude
|
||||
public import Init.Grind.Ring.Basic
|
||||
public import Init.Simproc
|
||||
public import Lean.Meta.Tactic.Grind.SynthInstance
|
||||
public import Init.Simproc
|
||||
public import Lean.Meta.Tactic.Simp.BuiltinSimprocs.Util
|
||||
import Lean.Meta.LitValues
|
||||
import Init.Grind.Ring.Field
|
||||
import Lean.Meta.DecLevel
|
||||
import Lean.Meta.Tactic.Grind.Arith.FieldNormNum
|
||||
import Lean.Util.SafeExponentiation
|
||||
public section
|
||||
namespace Lean.Meta.Grind.Arith
|
||||
|
||||
|
||||
@@ -1,314 +0,0 @@
|
||||
/-
|
||||
Copyright (c) 2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Tactic.Grind.Types
|
||||
import Init.Grind.Util
|
||||
import Lean.Meta.IntInstTesters
|
||||
import Lean.Meta.NatInstTesters
|
||||
import Lean.Meta.Tactic.Grind.SynthInstance
|
||||
public section
|
||||
namespace Lean.Meta.Grind
|
||||
namespace Canon
|
||||
|
||||
/-!
|
||||
A canonicalizer module for the `grind` tactic. The canonicalizer defined in `Meta/Canonicalizer.lean` is
|
||||
not suitable for the `grind` tactic. It was designed for tactics such as `omega`, where the goal is
|
||||
to detect when two structurally different atoms are definitionally equal.
|
||||
|
||||
The `grind` tactic, on the other hand, uses congruence closure. Moreover, types, type formers, proofs, and instances
|
||||
are considered supporting elements and are not factored into congruence detection.
|
||||
|
||||
This module minimizes the number of `isDefEq` checks by comparing two terms `a` and `b` only if they are instances,
|
||||
types, or type formers and are the `i`-th arguments of two different `f`-applications. This approach is
|
||||
sufficient for the congruence closure procedure used by the `grind` tactic.
|
||||
|
||||
To further optimize `isDefEq` checks, instances are compared using `TransparencyMode.instances`, which reduces
|
||||
the number of constants that need to be unfolded. If diagnostics are enabled, instances are compared using
|
||||
the default transparency mode too for sanity checking, and discrepancies are reported.
|
||||
Types and type formers are always checked using default transparency.
|
||||
|
||||
Remark:
|
||||
The canonicalizer minimizes issues with non-canonical instances and structurally different but definitionally equal types,
|
||||
but it does not solve all problems. For example, consider a situation where we have `(a : BitVec n)`
|
||||
and `(b : BitVec m)`, along with instances `inst1 n : Add (BitVec n)` and `inst2 m : Add (BitVec m)` where `inst1`
|
||||
and `inst2` are structurally different. Now consider the terms `a + a` and `b + b`. After canonicalization, the two
|
||||
additions will still use structurally different (and definitionally different) instances: `inst1 n` and `inst2 m`.
|
||||
Furthermore, `grind` will not be able to infer that `a + a ≍ b + b` even if we add the assumptions `n = m` and `a ≍ b`.
|
||||
-/
|
||||
|
||||
@[inline] private def get' : GoalM State :=
|
||||
return (← get).canon
|
||||
|
||||
@[inline] private def modify' (f : State → State) : GoalM Unit :=
|
||||
modify fun s => { s with canon := f s.canon }
|
||||
|
||||
/--
|
||||
Helper function for `canonElemCore`. It tries `isDefEq a b` with default transparency, but using
|
||||
at most `canonHeartbeats` heartbeats. It reports an issue if the threshold is reached.
|
||||
Remark: `parent` is use only to report an issue.
|
||||
-/
|
||||
private def isDefEqBounded (a b : Expr) (parent : Expr) : GoalM Bool := do
|
||||
withCurrHeartbeats do
|
||||
let curr := (← getConfig).canonHeartbeats
|
||||
tryCatchRuntimeEx
|
||||
(withTheReader Core.Context (fun ctx => { ctx with maxHeartbeats := curr*1000 }) do
|
||||
isDefEqD a b)
|
||||
fun ex => do
|
||||
if ex.isRuntime then
|
||||
reportIssue! "failed to show that{indentExpr a}\nis definitionally equal to{indentExpr b}\nwhile canonicalizing{indentExpr parent}\nusing `{curr}*1000` heartbeats, `(canonHeartbeats := {curr})`"
|
||||
return false
|
||||
else
|
||||
throw ex
|
||||
|
||||
/--
|
||||
Helper function for canonicalizing `e` occurring as the `i`th argument of an `f`-application.
|
||||
If `useIsDefEqBounded` is `true`, we try `isDefEqBounded` before returning false.
|
||||
|
||||
Remark: `isInst` is `true` if element is an instance.
|
||||
-/
|
||||
private def canonElemCore (parent : Expr) (f : Expr) (i : Nat) (e : Expr) (useIsDefEqBounded : Bool) (isInst := false) : GoalM Expr := do
|
||||
let s ← get'
|
||||
let key := { f, i, arg := e : CanonArgKey }
|
||||
/-
|
||||
**Note**: We used to use `s.canon.find? e` instead of `s.canonArg.find? key`. This was incorrect.
|
||||
First, for types and implicit arguments, we recursively visit `e` before invoking this function.
|
||||
Thus, `s.canon.find? e` always returns some value `c`, causing us to miss possible canonicalization opportunities.
|
||||
Moreover, `e` may be the argument of two different `f` functions.
|
||||
-/
|
||||
if let some c := s.canonArg.find? key then
|
||||
return c
|
||||
let c ← go
|
||||
modify' fun s => { s with canonArg := s.canonArg.insert key c }
|
||||
return c
|
||||
where
|
||||
checkDefEq (e c : Expr) : GoalM Bool := do
|
||||
if (← isDefEq e c) then
|
||||
-- We used to check `c.fvarsSubset e` because it is not
|
||||
-- in general safe to replace `e` with `c` if `c` has more free variables than `e`.
|
||||
-- However, we don't revert previously canonicalized elements in the `grind` tactic.
|
||||
-- Moreover, we store the canonicalizer state in the `Goal` because we case-split
|
||||
-- and different locals are added in different branches.
|
||||
modify' fun s => { s with canon := s.canon.insert e c }
|
||||
trace_goal[grind.debug.canon] "found {e} ===> {c}"
|
||||
return true
|
||||
if useIsDefEqBounded then
|
||||
-- If `e` and `c` are not types, we use `isDefEqBounded`
|
||||
if (← isDefEqBounded e c parent) then
|
||||
modify' fun s => { s with canon := s.canon.insert e c }
|
||||
trace_goal[grind.debug.canon] "found using `isDefEqBounded`: {e} ===> {c}"
|
||||
return true
|
||||
return false
|
||||
|
||||
go : GoalM Expr := do
|
||||
let eType ← inferType e
|
||||
if isInst then
|
||||
/-
|
||||
**Note**: Recall that some `grind` modules (e.g., `lia`) rely on instances defined directly in core.
|
||||
This test ensures we use them as the canonical representative.
|
||||
-/
|
||||
if let some c := getBuiltinInstance? eType then
|
||||
if (← checkDefEq e c) then
|
||||
return c
|
||||
let s ← get'
|
||||
let key := (f, i)
|
||||
let cs := s.argMap.find? key |>.getD []
|
||||
for (c, cType) in cs do
|
||||
/-
|
||||
We first check the types
|
||||
The following checks are a performance bottleneck.
|
||||
For example, in the test `grind_ite.lean`, there are many checks of the form:
|
||||
```
|
||||
w_4 ∈ assign.insert v true → Prop =?= w_1 ∈ assign.insert v false → Prop
|
||||
```
|
||||
where `grind` unfolds the definition of `DHashMap.insert` and `TreeMap.insert`.
|
||||
-/
|
||||
if (← isDefEqD eType cType) then
|
||||
if (← checkDefEq e c) then
|
||||
return c
|
||||
trace_goal[grind.debug.canon] "({f}, {i}) ↦ {e}"
|
||||
modify' fun s => { s with canon := s.canon.insert e e, argMap := s.argMap.insert key ((e, eType)::cs) }
|
||||
return e
|
||||
|
||||
private abbrev canonType (parent f : Expr) (i : Nat) (e : Expr) :=
|
||||
withDefault <| canonElemCore parent f i e (useIsDefEqBounded := false)
|
||||
|
||||
private abbrev canonInst (parent f : Expr) (i : Nat) (e : Expr) :=
|
||||
withReducibleAndInstances <| canonElemCore parent f i e (useIsDefEqBounded := true) (isInst := true)
|
||||
|
||||
private abbrev canonImplicit (parent f : Expr) (i : Nat) (e : Expr) :=
|
||||
withReducible <| canonElemCore parent f i e (useIsDefEqBounded := true)
|
||||
|
||||
/--
|
||||
Return type for the `shouldCanon` function.
|
||||
-/
|
||||
private inductive ShouldCanonResult where
|
||||
| /- Nested types (and type formers) are canonicalized. -/
|
||||
canonType
|
||||
| /- Nested instances are canonicalized. -/
|
||||
canonInst
|
||||
| /- Implicit argument that is not an instance nor a type. -/
|
||||
canonImplicit
|
||||
| /-
|
||||
Term is not a proof, type (former), nor an instance.
|
||||
Thus, it must be recursively visited by the canonicalizer.
|
||||
-/
|
||||
visit
|
||||
deriving Inhabited
|
||||
|
||||
private instance : Repr ShouldCanonResult where
|
||||
reprPrec r _ := private match r with
|
||||
| .canonType => "canonType"
|
||||
| .canonInst => "canonInst"
|
||||
| .canonImplicit => "canonImplicit"
|
||||
| .visit => "visit"
|
||||
|
||||
/--
|
||||
See comments at `ShouldCanonResult`.
|
||||
-/
|
||||
private def shouldCanon (pinfos : Array ParamInfo) (i : Nat) (arg : Expr) : MetaM ShouldCanonResult := do
|
||||
if h : i < pinfos.size then
|
||||
let pinfo := pinfos[i]
|
||||
if pinfo.isInstance then
|
||||
return .canonInst
|
||||
else if pinfo.isProp then
|
||||
return .visit
|
||||
else if pinfo.isImplicit then
|
||||
if (← isTypeFormer arg) then
|
||||
return .canonType
|
||||
else
|
||||
return .canonImplicit
|
||||
if (← isProp arg) then
|
||||
return .visit
|
||||
else if (← isTypeFormer arg) then
|
||||
return .canonType
|
||||
else
|
||||
return .visit
|
||||
|
||||
/--
|
||||
Returns `true` if `shouldCannon pinfos i arg` is not `.visit`.
|
||||
This is a helper function used to implement mbtc.
|
||||
-/
|
||||
def isSupport (pinfos : Array ParamInfo) (i : Nat) (arg : Expr) : MetaM Bool := do
|
||||
let r ← shouldCanon pinfos i arg
|
||||
return !r matches .visit
|
||||
|
||||
/--
|
||||
Auxiliary function for normalizing the arguments of `OfNat.ofNat` during canonicalization.
|
||||
This is needed because satellite solvers create `Nat` and `Int` numerals using the
|
||||
APIs `mkNatLit` and `mkIntLit`, which produce terms of the form
|
||||
`@OfNat.ofNat Nat <num> inst` and `@OfNat.ofNat Int <num> inst`.
|
||||
This becomes a problem when a term in the input goal has already been canonicalized
|
||||
and its type is not exactly `Nat` or `Int`. For example, in issue #9477, we have:
|
||||
```
|
||||
structure T where
|
||||
upper_bound : Nat
|
||||
def T.range (a : T) := 0...a.upper_bound
|
||||
theorem range\_lower (a : T) : a.range.lower = 0 := by rfl
|
||||
```
|
||||
Here, the `0` in `range_lower` is actually represented as:
|
||||
```
|
||||
(@OfNat.ofNat
|
||||
(Std.PRange.Bound (Std.PRange.RangeShape.lower (Std.PRange.RangeShape.mk Std.PRange.BoundShape.closed Std.PRange.BoundShape.open)) Nat)
|
||||
(nat_lit 0)
|
||||
(instOfNatNat (nat_lit 0)))
|
||||
```
|
||||
Without this normalization step, the satellite solver would need to handle multiple
|
||||
representations for `(0 : Nat)` and `(0 : Int)`, complicating reasoning.
|
||||
-/
|
||||
-- Remark: This is not a great solution. We should consider writing a custom canonicalizer for
|
||||
-- `OfNat.ofNat` and other constants with built-in support in `grind`.
|
||||
private def normOfNatArgs? (args : Array Expr) : MetaM (Option (Array Expr)) := do
|
||||
if h : args.size = 3 then
|
||||
let mut args : Vector Expr 3 := h ▸ args.toVector
|
||||
let mut modified := false
|
||||
if args[1].isAppOf ``OfNat.ofNat then
|
||||
-- If nested `OfNat.ofNat`, convert to raw nat literal
|
||||
let some val ← getNatValue? args[1] | pure ()
|
||||
args := args.set 1 (mkRawNatLit val)
|
||||
modified := true
|
||||
let inst := args[2]
|
||||
if (← Structural.isInstOfNatNat inst) && !args[0].isConstOf ``Nat then
|
||||
return some (args.set 0 Nat.mkType |>.toArray)
|
||||
else if (← Structural.isInstOfNatInt inst) && !args[0].isConstOf ``Int then
|
||||
return some (args.set 0 Int.mkType |>.toArray)
|
||||
else if modified then
|
||||
return some args.toArray
|
||||
return none
|
||||
|
||||
set_option compiler.ignoreBorrowAnnotation true in
|
||||
@[export lean_grind_canon]
|
||||
partial def canonImpl (e : Expr) : GoalM Expr := do profileitM Exception "grind canon" (← getOptions) do
|
||||
trace_goal[grind.debug.canon] "{e}"
|
||||
visit e |>.run' {}
|
||||
where
|
||||
visit (e : Expr) : StateRefT (Std.HashMap ExprPtr Expr) GoalM Expr := do
|
||||
unless e.isApp || e.isForall do return e
|
||||
-- Check whether it is cached
|
||||
if let some r := (← get).get? { expr := e } then
|
||||
return r
|
||||
let e' ← match e with
|
||||
| .app .. => e.withApp fun f args => do
|
||||
if f.isConstOf ``Grind.nestedProof && args.size == 2 then
|
||||
let prop := args[0]!
|
||||
let prop' ← visit prop
|
||||
if let some r := (← get').proofCanon.find? prop' then
|
||||
pure r
|
||||
else
|
||||
let e' := if isSameExpr prop prop' then e else mkAppN f (args.set! 0 prop')
|
||||
modify' fun s => { s with proofCanon := s.proofCanon.insert prop' e' }
|
||||
pure e'
|
||||
else if f.isConstOf ``Grind.nestedDecidable && args.size == 2 then
|
||||
let prop := args[0]!
|
||||
let prop' ← visit prop
|
||||
let e' := if isSameExpr prop prop' then e else mkAppN f (args.set! 0 prop')
|
||||
pure e'
|
||||
else
|
||||
let mut modified := false
|
||||
let args ← if f.isConstOf ``OfNat.ofNat then
|
||||
let some args ← normOfNatArgs? args | pure args
|
||||
modified := true
|
||||
pure args
|
||||
else
|
||||
pure args
|
||||
let pinfos := (← getFunInfo f).paramInfo
|
||||
let mut args := args.toVector
|
||||
for h : i in *...args.size do
|
||||
let arg := args[i]
|
||||
trace_goal[grind.debug.canon] "[{repr (← shouldCanon pinfos i arg)}]: {arg} : {← inferType arg}"
|
||||
let arg' ← match (← shouldCanon pinfos i arg) with
|
||||
| .canonType =>
|
||||
/-
|
||||
The type may have nested propositions and terms that may need to be canonicalized too.
|
||||
So, we must recurse over it. See issue #10232
|
||||
-/
|
||||
canonType e f i (← visit arg)
|
||||
| .canonImplicit => canonImplicit e f i (← visit arg)
|
||||
| .visit => visit arg
|
||||
| .canonInst =>
|
||||
if arg.isAppOfArity ``Grind.nestedDecidable 2 then
|
||||
let prop := arg.appFn!.appArg!
|
||||
let prop' ← visit prop
|
||||
if isSameExpr prop prop' then pure arg else pure (mkApp2 arg.appFn!.appFn! prop' arg.appArg!)
|
||||
else
|
||||
canonInst e f i arg
|
||||
unless isSameExpr arg arg' do
|
||||
args := args.set i arg'
|
||||
modified := true
|
||||
pure <| if modified then mkAppN f args.toArray else e
|
||||
| .forallE _ d b _ =>
|
||||
-- Recall that we have `ForallProp.lean`.
|
||||
let d' ← visit d
|
||||
-- Remark: users may not want to convert `p → q` into `¬p ∨ q`
|
||||
let b' ← if b.hasLooseBVars then pure b else visit b
|
||||
pure <| e.updateForallE! d' b'
|
||||
| _ => unreachable!
|
||||
modify fun s => s.insert { expr := e } e'
|
||||
return e'
|
||||
|
||||
end Canon
|
||||
|
||||
end Lean.Meta.Grind
|
||||
@@ -51,7 +51,7 @@ private def propagateCtorHomo (α : Expr) (a b : Expr) : GoalM Unit := do
|
||||
There is no guarantee that `inferType (← mkEqProof a b)` is structurally equal to `a = b`.
|
||||
-/
|
||||
let mask := mask.set! (n-1) (some (← mkExpectedTypeHint (← mkEqProof a b) (← mkEq a b)))
|
||||
let injLemma ← mkAppOptM injDeclName mask
|
||||
let injLemma ← withDefault <| mkAppOptM injDeclName mask
|
||||
let injLemmaType ← inferType injLemma
|
||||
let gen := max (← getGeneration a) (← getGeneration b)
|
||||
propagateInjEqs injLemmaType injLemma gen
|
||||
|
||||
@@ -6,7 +6,6 @@ Authors: Leonardo de Moura
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Tactic.Grind.Types
|
||||
import Lean.Meta.Tactic.Grind.Canon
|
||||
import Lean.Meta.Tactic.Grind.CastLike
|
||||
public section
|
||||
namespace Lean.Meta.Grind
|
||||
@@ -66,7 +65,7 @@ private def mkKey (e : Expr) (i : Nat) : MetaM Key :=
|
||||
let arg := args[j]
|
||||
if i == j then
|
||||
args := args.set j mainMark
|
||||
else if !(← Canon.isSupport info.paramInfo j arg) then
|
||||
else if !(← Sym.Canon.isSupport info.paramInfo j arg) then
|
||||
args := args.set j otherMark
|
||||
let mask := mkAppN f args.toArray
|
||||
return { mask }
|
||||
|
||||
@@ -5,7 +5,7 @@ Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Tactic.Grind.SynthInstance
|
||||
public import Lean.Meta.Tactic.Grind.Types
|
||||
public section
|
||||
namespace Lean.Meta.Grind
|
||||
/-!
|
||||
|
||||
@@ -6,6 +6,7 @@ Authors: Leonardo de Moura
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Tactic.Grind.Types
|
||||
import Init.Grind.Util
|
||||
import Lean.Meta.Tactic.Grind.Simp
|
||||
public section
|
||||
namespace Lean.Meta.Grind
|
||||
@@ -112,6 +113,9 @@ private partial def abstractGroundMismatches? (lhs rhs : Expr) : GoalM (Option (
|
||||
if s.lhss.isEmpty then
|
||||
return none
|
||||
let f := mkLambdaWithBodyAndVarType s.varTypes f
|
||||
let fType ← inferType f
|
||||
let u ← getLevel fType
|
||||
let f := mkApp2 (.const ``Grind.abstractFn [u]) fType f
|
||||
return some (mkAppN f s.lhss, mkAppN f s.rhss)
|
||||
where
|
||||
goCore (lhs rhs : Expr) : AbstractM Expr := do
|
||||
|
||||
@@ -5,71 +5,10 @@ Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Lean.Meta.Tactic.Grind.Types
|
||||
public import Lean.Meta.Sym.SynthInstance
|
||||
public section
|
||||
namespace Lean.Meta.Grind
|
||||
/--
|
||||
Some modules in grind use builtin instances defined directly in core (e.g., `lia`),
|
||||
while others synthesize them using `synthInstance` (e.g., `ring`).
|
||||
This inconsistency is problematic, as it may introduce mismatches and result in
|
||||
two different representations for the same term.
|
||||
|
||||
The following table is used to bypass synthInstance for the builtin cases.
|
||||
-/
|
||||
private def builtinInsts : Std.HashMap Expr Expr :=
|
||||
let nat := Nat.mkType
|
||||
let int := Int.mkType
|
||||
let us := [Level.zero, Level.zero, Level.zero]
|
||||
Std.HashMap.ofList [
|
||||
(mkApp3 (mkConst ``HAdd us) nat nat nat, Nat.mkInstHAdd),
|
||||
(mkApp3 (mkConst ``HSub us) nat nat nat, Nat.mkInstHSub),
|
||||
(mkApp3 (mkConst ``HMul us) nat nat nat, Nat.mkInstHMul),
|
||||
(mkApp3 (mkConst ``HDiv us) nat nat nat, Nat.mkInstHDiv),
|
||||
(mkApp3 (mkConst ``HMod us) nat nat nat, Nat.mkInstHMod),
|
||||
(mkApp3 (mkConst ``HPow us) nat nat nat, Nat.mkInstHPow),
|
||||
(mkApp (mkConst ``LT [0]) nat, Nat.mkInstLT),
|
||||
(mkApp (mkConst ``LE [0]) nat, Nat.mkInstLE),
|
||||
|
||||
(mkApp3 (mkConst ``HAdd us) int int int, Int.mkInstHAdd),
|
||||
(mkApp3 (mkConst ``HSub us) int int int, Int.mkInstHSub),
|
||||
(mkApp3 (mkConst ``HMul us) int int int, Int.mkInstHMul),
|
||||
(mkApp3 (mkConst ``HDiv us) int int int, Int.mkInstHDiv),
|
||||
(mkApp3 (mkConst ``HMod us) int int int, Int.mkInstHMod),
|
||||
(mkApp3 (mkConst ``HPow us) int nat int, Int.mkInstHPow),
|
||||
(mkApp (mkConst ``LT [0]) int, Int.mkInstLT),
|
||||
(mkApp (mkConst ``LE [0]) int, Int.mkInstLE),
|
||||
]
|
||||
|
||||
/--
|
||||
Some modules in grind use builtin instances defined directly in core (e.g., `lia`).
|
||||
Users may provide nonstandard instances that are definitionally equal to the ones in core.
|
||||
Given a type, such as `HAdd Int Int Int`, this function returns the instance defined in
|
||||
core.
|
||||
-/
|
||||
def getBuiltinInstance? (type : Expr) : Option Expr :=
|
||||
builtinInsts[type]?
|
||||
|
||||
def synthInstanceMeta? (type : Expr) : MetaM (Option Expr) := do profileitM Exception "grind typeclass inference" (← getOptions) (decl := type.getAppFn.constName?.getD .anonymous) do
|
||||
if let some inst := getBuiltinInstance? type then
|
||||
return inst
|
||||
catchInternalId isDefEqStuckExceptionId
|
||||
(synthInstanceCore? type none)
|
||||
(fun _ => pure none)
|
||||
|
||||
abbrev synthInstance? (type : Expr) : GoalM (Option Expr) :=
|
||||
synthInstanceMeta? type
|
||||
|
||||
def synthInstance (type : Expr) : GoalM Expr := do
|
||||
let some inst ← synthInstance? type
|
||||
| throwError "`grind` failed to find instance{indentExpr type}"
|
||||
return inst
|
||||
|
||||
/--
|
||||
Helper function for instantiating a type class `type`, and
|
||||
then using the result to perform `isDefEq x val`.
|
||||
-/
|
||||
def synthInstanceAndAssign (x type : Expr) : GoalM Bool := do
|
||||
let some val ← synthInstance? type | return false
|
||||
isDefEq x val
|
||||
export Sym (synthInstance synthInstance? synthInstanceMeta? synthInstanceAndAssign)
|
||||
|
||||
end Lean.Meta.Grind
|
||||
|
||||
@@ -5,15 +5,16 @@ Authors: Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
prelude
|
||||
public import Init.Data.Queue
|
||||
public import Init.Grind.Config
|
||||
public import Lean.Meta.Sym.SymM
|
||||
public import Lean.Meta.Tactic.Grind.Attr
|
||||
public import Lean.Meta.Tactic.Grind.CheckResult
|
||||
public import Init.Data.Queue
|
||||
public import Lean.Meta.Sym.Canon
|
||||
meta import Init.Data.String.Basic
|
||||
import Lean.Meta.AbstractNestedProofs
|
||||
import Lean.Meta.Match.MatchEqsExt
|
||||
public import Init.Grind.Config
|
||||
import Init.Data.Nat.Linear
|
||||
meta import Init.Data.String.Basic
|
||||
import Init.Omega
|
||||
import Lean.Util.ShareCommon
|
||||
public section
|
||||
@@ -715,14 +716,6 @@ structure CanonArgKey where
|
||||
arg : Expr
|
||||
deriving BEq, Hashable
|
||||
|
||||
/-- Canonicalizer state. See `Canon.lean` for additional details. -/
|
||||
structure Canon.State where
|
||||
argMap : PHashMap (Expr × Nat) (List (Expr × Expr)) := {}
|
||||
canon : PHashMap Expr Expr := {}
|
||||
proofCanon : PHashMap Expr Expr := {}
|
||||
canonArg : PHashMap CanonArgKey Expr := {}
|
||||
deriving Inhabited
|
||||
|
||||
/-- Trace information for a case split. -/
|
||||
structure CaseTrace where
|
||||
expr : Expr
|
||||
@@ -922,7 +915,6 @@ accumulated facts.
|
||||
structure GoalState where
|
||||
/-- Next local declaration index to process. -/
|
||||
nextDeclIdx : Nat := 0
|
||||
canon : Canon.State := {}
|
||||
enodeMap : ENodeMap := default
|
||||
exprs : PArray Expr := {}
|
||||
parents : ParentMap := {}
|
||||
@@ -1735,10 +1727,7 @@ def withoutModifyingState (x : GoalM α) : GoalM α := do
|
||||
finally
|
||||
set saved
|
||||
|
||||
set_option compiler.ignoreBorrowAnnotation true in
|
||||
/-- Canonicalizes nested types, type formers, and instances in `e`. -/
|
||||
@[extern "lean_grind_canon"] -- Forward definition
|
||||
opaque canon (e : Expr) : GoalM Expr
|
||||
export Sym (canon)
|
||||
|
||||
/-!
|
||||
`Action` is the *control interface* for `grind`’s search steps. It is defined in
|
||||
|
||||
@@ -99,7 +99,7 @@ where
|
||||
if (← withReducibleAndInstances <| isDefEq x val) then
|
||||
return true
|
||||
else
|
||||
trace[Meta.Tactic.simp.discharge] "{← ppOrigin thmId}, failed to assign instance{indentExpr type}\nsythesized value{indentExpr val}\nis not definitionally equal to{indentExpr x}"
|
||||
trace[Meta.Tactic.simp.discharge] "{← ppOrigin thmId}, failed to assign instance{indentExpr type}\nsynthesized value{indentExpr val}\nis not definitionally equal to{indentExpr x}"
|
||||
return false
|
||||
| _ =>
|
||||
trace[Meta.Tactic.simp.discharge] "{← ppOrigin thmId}, failed to synthesize instance{indentExpr type}"
|
||||
|
||||
@@ -445,27 +445,33 @@ unsafe def MethodsRef.toMethodsImpl (m : MethodsRef) : Methods :=
|
||||
@[implemented_by MethodsRef.toMethodsImpl]
|
||||
opaque MethodsRef.toMethods (m : MethodsRef) : Methods
|
||||
|
||||
@[inline]
|
||||
def getMethods : SimpM Methods :=
|
||||
return MethodsRef.toMethods (← read)
|
||||
|
||||
@[inline]
|
||||
def pre (e : Expr) : SimpM Step := do
|
||||
(← getMethods).pre e
|
||||
|
||||
@[inline]
|
||||
def post (e : Expr) : SimpM Step := do
|
||||
(← getMethods).post e
|
||||
|
||||
@[inline] def getContext : SimpM Context :=
|
||||
readThe Context
|
||||
|
||||
@[inline]
|
||||
def getConfig : SimpM Config :=
|
||||
return (← getContext).config
|
||||
|
||||
@[inline] def withParent (parent : Expr) (f : SimpM α) : SimpM α :=
|
||||
withTheReader Context (fun ctx => { ctx with parent? := parent }) f
|
||||
|
||||
@[inline]
|
||||
def getSimpTheorems : SimpM SimpTheoremsArray :=
|
||||
return (← readThe Context).simpTheorems
|
||||
|
||||
@[inline]
|
||||
def getSimpCongrTheorems : SimpM SimpCongrTheorems :=
|
||||
return (← readThe Context).congrTheorems
|
||||
|
||||
@@ -473,6 +479,7 @@ def getSimpCongrTheorems : SimpM SimpCongrTheorems :=
|
||||
Returns `true` if `simp` is in `dsimp` mode.
|
||||
That is, only transformations that preserve definitional equality should be applied.
|
||||
-/
|
||||
@[inline]
|
||||
def inDSimp : SimpM Bool :=
|
||||
return (← readThe Context).inDSimp
|
||||
|
||||
|
||||
@@ -592,6 +592,8 @@ See also: `#reduce e` for evaluation by term reduction.
|
||||
"#print " >> (ident <|> strLit)
|
||||
@[builtin_command_parser] def printSig := leading_parser
|
||||
"#print " >> nonReservedSymbol "sig " >> ident
|
||||
/-- Prints the axioms used by a declaration, directly or indirectly.
|
||||
Please consult [the reference manual](lean-manual://section/validating-proofs) to understand the significance of the output. -/
|
||||
@[builtin_command_parser] def printAxioms := leading_parser
|
||||
"#print " >> nonReservedSymbol "axioms " >> ident
|
||||
@[builtin_command_parser] def printEqns := leading_parser
|
||||
|
||||
@@ -774,15 +774,35 @@ In particular, it is like a unary operation with a fixed parameter `b`, where on
|
||||
@[builtin_term_parser] def noImplicitLambda := leading_parser
|
||||
"no_implicit_lambda% " >> termParser maxPrec
|
||||
/--
|
||||
`inferInstanceAs α` synthesizes an instance of type `α`, transporting it from a
|
||||
definitionally equal type if necessary. This is useful when `α` is definitionally equal to
|
||||
some `α'` for which instances are registered, as it prevents leaking the definition's RHS
|
||||
at lower transparencies.
|
||||
`inferInstanceAs α` synthesizes an instance of type `α` and then adjusts it to conform to the
|
||||
expected type `β`, which must be inferable from context.
|
||||
|
||||
`inferInstanceAs` requires an expected type from context. If you just need to synthesize an
|
||||
instance without transporting between types, use `inferInstance` instead.
|
||||
Example:
|
||||
```
|
||||
def D := Nat
|
||||
instance : Inhabited D := inferInstanceAs (Inhabited Nat)
|
||||
```
|
||||
|
||||
See `Lean.Meta.WrapInstance` for details.
|
||||
The adjustment will make sure that when the resulting instance will not "leak" the RHS `Nat` when
|
||||
reduced at transparency levels below `semireducible`, i.e. where `D` would not be unfolded either,
|
||||
preventing "defeq abuse".
|
||||
|
||||
More specifically, given the "source type" (the argument) and "target type" (the expected type),
|
||||
`inferInstanceAs` synthesizes an instance for the source type and then unfolds and rewraps its
|
||||
components (fields, nested instances) as necessary to make them compatible with the target type. The
|
||||
individual steps are represented by the following options, which all default to enabled and can be
|
||||
disabled to help with porting:
|
||||
|
||||
* `backward.inferInstanceAs.wrap`: master switch for instance adjustment in both `inferInstanceAs`
|
||||
and the default deriving handler
|
||||
* `backward.inferInstanceAs.wrap.reuseSubInstances`: reuse existing instances for the target type
|
||||
for sub-instance fields to avoid non-defeq instance diamonds
|
||||
* `backward.inferInstanceAs.wrap.instances`: wrap non-reducible instances in auxiliary definitions
|
||||
* `backward.inferInstanceAs.wrap.data`: wrap data fields in auxiliary definitions (proof fields are
|
||||
always wrapped)
|
||||
|
||||
If you just need to synthesize an instance without transporting between types, use `inferInstance`
|
||||
instead, potentially with a type annotation for the expected type.
|
||||
-/
|
||||
@[builtin_term_parser] def «inferInstanceAs» := leading_parser
|
||||
"inferInstanceAs" >> (((" $ " <|> " <| ") >> termParser minPrec) <|> (ppSpace >> termParser argPrec))
|
||||
|
||||
@@ -93,10 +93,12 @@ def addEntryFn (descr : Descr α β σ) (s : StateStack α β σ) (e : Entry β)
|
||||
s
|
||||
}
|
||||
|
||||
def exportEntriesFn (descr : Descr α β σ) (level : OLeanLevel) (s : StateStack α β σ) : Array (Entry α) :=
|
||||
s.newEntries.toArray.reverse.filterMap fun
|
||||
| .global e => .global <$> descr.exportEntry? level e
|
||||
| .scoped ns e => .scoped ns <$> descr.exportEntry? level e
|
||||
def exportEntriesFn (descr : Descr α β σ) (s : StateStack α β σ) : OLeanEntries (Array (Entry α)) :=
|
||||
let forLevel (level : OLeanLevel) :=
|
||||
s.newEntries.toArray.reverse.filterMap fun
|
||||
| .global e => .global <$> descr.exportEntry? level e
|
||||
| .scoped ns e => .scoped ns <$> descr.exportEntry? level e
|
||||
{ exported := forLevel .exported, server := forLevel .server, «private» := forLevel .private }
|
||||
|
||||
end ScopedEnvExtension
|
||||
|
||||
@@ -115,7 +117,7 @@ unsafe def registerScopedEnvExtensionUnsafe (descr : Descr α β σ) : IO (Scope
|
||||
mkInitial := mkInitial descr
|
||||
addImportedFn := addImportedFn descr
|
||||
addEntryFn := addEntryFn descr
|
||||
exportEntriesFnEx := fun _ s level => exportEntriesFn descr level s
|
||||
exportEntriesFnEx := fun _ s => exportEntriesFn descr s
|
||||
statsFn := fun s => format "number of local entries: " ++ format s.newEntries.length
|
||||
-- We restrict addition of global and `scoped` entries to the main thread but allow addition of
|
||||
-- scopes and local entries in any thread, which are visible only in that thread (see uses of
|
||||
|
||||
@@ -46,7 +46,7 @@ structure LeanSemanticToken where
|
||||
stx : Syntax
|
||||
/-- Type of the semantic token. -/
|
||||
type : SemanticTokenType
|
||||
/-- In case of overlap, higher-priority tokens will take precendence -/
|
||||
/-- In case of overlap, higher-priority tokens will take precedence -/
|
||||
priority : Nat := 5
|
||||
|
||||
/-- Semantic token information with absolute LSP positions. -/
|
||||
@@ -57,7 +57,7 @@ structure AbsoluteLspSemanticToken where
|
||||
tailPos : Lsp.Position
|
||||
/-- Start position of the semantic token. -/
|
||||
type : SemanticTokenType
|
||||
/-- In case of overlap, higher-priority tokens will take precendence -/
|
||||
/-- In case of overlap, higher-priority tokens will take precedence -/
|
||||
priority : Nat := 5
|
||||
deriving BEq, Hashable, FromJson, ToJson
|
||||
|
||||
|
||||
@@ -8,44 +8,149 @@ module
|
||||
prelude
|
||||
public import Lean.MonadEnv
|
||||
|
||||
public section
|
||||
|
||||
namespace Lean
|
||||
|
||||
namespace CollectAxioms
|
||||
|
||||
structure State where
|
||||
visited : NameSet := {}
|
||||
axioms : Array Name := #[]
|
||||
/-- Cache mapping constants to their (sorted) axiom dependencies. -/
|
||||
seen : NameMap (Array Name) := {}
|
||||
/-- Axioms accumulated for the current constant being processed. -/
|
||||
axioms : NameSet := {}
|
||||
|
||||
abbrev M := ReaderT Environment $ StateM State
|
||||
|
||||
partial def collect (c : Name) : M Unit := do
|
||||
let collectExpr (e : Expr) : M Unit := e.getUsedConstants.forM collect
|
||||
def runM (env : Environment) (x : M α) : α :=
|
||||
x.run env |>.run' {}
|
||||
|
||||
private def insertArray (s : NameSet) (axs : Array Name) : NameSet :=
|
||||
axs.foldl (init := s) fun acc ax => acc.insert ax
|
||||
|
||||
/--
|
||||
Collect axioms reachable from constant `c`, using `extFind?` to look up pre-computed axioms
|
||||
for imported declarations. Results are cached in `State.seen`.
|
||||
|
||||
When processing a constant not found in `extFind?` or the cache, the function temporarily
|
||||
clears the axiom accumulator, recurses into the constant's dependencies, caches the result
|
||||
in `seen`, and merges the collected axioms back.
|
||||
-/
|
||||
private partial def collect
|
||||
(extFind? : Environment → Name → Option (Array Name))
|
||||
(c : Name) : M Unit := do
|
||||
let env ← read
|
||||
-- Check extension for pre-computed axioms (imported declarations)
|
||||
if let some axs := extFind? env c then
|
||||
modify fun s => { s with axioms := insertArray s.axioms axs, seen := s.seen.insert c axs }
|
||||
return
|
||||
-- Check local cache
|
||||
let s ← get
|
||||
unless s.visited.contains c do
|
||||
modify fun s => { s with visited := s.visited.insert c }
|
||||
let env ← read
|
||||
-- We should take the constant from the kernel env, which may differ from the one in the elab
|
||||
-- env in case of (async) errors.
|
||||
match env.checked.get.find? c with
|
||||
| some (ConstantInfo.axiomInfo v) =>
|
||||
modify fun s => { s with axioms := (s.axioms.push c) }
|
||||
collectExpr v.type
|
||||
| some (ConstantInfo.defnInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (ConstantInfo.thmInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (ConstantInfo.opaqueInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (ConstantInfo.quotInfo _) => pure ()
|
||||
| some (ConstantInfo.ctorInfo v) => collectExpr v.type
|
||||
| some (ConstantInfo.recInfo v) => collectExpr v.type
|
||||
| some (ConstantInfo.inductInfo v) => collectExpr v.type *> v.ctors.forM collect
|
||||
| none => pure ()
|
||||
if let some axs := s.seen.find? c then
|
||||
modify fun s => { s with axioms := insertArray s.axioms axs }
|
||||
return
|
||||
-- Recurse: temporarily clear axioms to isolate this constant's contribution.
|
||||
-- Insert sentinel to prevent infinite recursion (e.g., inductives ↔ constructors).
|
||||
let savedAxioms := s.axioms
|
||||
modify fun s => { s with axioms := {}, seen := s.seen.insert c #[] }
|
||||
let collectExpr (e : Expr) : M Unit := e.getUsedConstants.forM (collect extFind?)
|
||||
-- Take constants from the kernel env, which may differ from the elab env for (async) errors.
|
||||
match env.checked.get.find? c with
|
||||
| some (.axiomInfo v) =>
|
||||
modify fun s => { s with axioms := s.axioms.insert c }
|
||||
collectExpr v.type
|
||||
| some (.defnInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (.thmInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (.opaqueInfo v) => collectExpr v.type *> collectExpr v.value
|
||||
| some (.quotInfo _) => pure ()
|
||||
| some (.ctorInfo v) => collectExpr v.type
|
||||
| some (.recInfo v) => collectExpr v.type
|
||||
| some (.inductInfo v) => collectExpr v.type *> v.ctors.forM (collect extFind?)
|
||||
| none => pure ()
|
||||
-- Cache result (sorted for canonical order) and merge back into saved axioms
|
||||
let collected := (← get).axioms
|
||||
let result := collected.toArray.qsort Name.lt
|
||||
modify fun s => { s with
|
||||
seen := s.seen.insert c result
|
||||
axioms := insertArray savedAxioms result
|
||||
}
|
||||
|
||||
/-- Collect axioms for `c` and return its sorted axiom list from the cache. -/
|
||||
private def collectAndGet
|
||||
(extFind? : Environment → Name → Option (Array Name))
|
||||
(c : Name) : M (Array Name) := do
|
||||
collect extFind? c
|
||||
let some axs := (← get).seen.find? c | panic! s!"collectAndGet: '{c}' not in seen after collect"
|
||||
return axs
|
||||
|
||||
end CollectAxioms
|
||||
|
||||
def collectAxioms [Monad m] [MonadEnv m] (constName : Name) : m (Array Name) := do
|
||||
/--
|
||||
Extension state holding imported module entries for efficient lookup of
|
||||
pre-computed axiom data.
|
||||
|
||||
We use `registerPersistentEnvExtension` with manual lookup instead of `MapDeclarationExtension`
|
||||
because `exportEntriesFnEx` needs to call `collect`, which needs the extension's `find?`, but
|
||||
`exportEntriesFnEx` is defined inside the `builtin_initialize` that creates the extension and
|
||||
thus cannot reference it. This state replicates `MapDeclarationExtension.find?`'s per-module
|
||||
binary search without requiring the extension object.
|
||||
-/
|
||||
private structure ExportedAxiomsState where
|
||||
importedModuleEntries : Array (Array (Name × Array Name)) := #[]
|
||||
|
||||
instance : Inhabited ExportedAxiomsState := ⟨{}⟩
|
||||
|
||||
/-- Look up pre-computed axioms for an imported declaration. -/
|
||||
private def ExportedAxiomsState.find? (s : ExportedAxiomsState) (env : Environment)
|
||||
(c : Name) : Option (Array Name) :=
|
||||
match env.getModuleIdxFor? c with
|
||||
| some modIdx =>
|
||||
if h : modIdx.toNat < s.importedModuleEntries.size then
|
||||
match s.importedModuleEntries[modIdx].binSearch (c, #[]) (fun a b => Name.quickLt a.1 b.1) with
|
||||
| some entry => some entry.2
|
||||
| none => none
|
||||
else none
|
||||
| none => none
|
||||
|
||||
/--
|
||||
Environment extension that records axiom dependencies for all declarations in a module.
|
||||
Entries are computed once by `beforeExportFn` when the olean is serialized, not during
|
||||
elaboration. During elaboration, `collectAxioms` walks bodies directly. Downstream modules
|
||||
look up pre-computed entries for imported declarations, so axiom collection never crosses
|
||||
module boundaries.
|
||||
-/
|
||||
private builtin_initialize exportedAxiomsExt :
|
||||
PersistentEnvExtension (Name × Array Name) (Name × Array Name) ExportedAxiomsState ←
|
||||
registerPersistentEnvExtension {
|
||||
mkInitial := pure {}
|
||||
addImportedFn := fun importedEntries => pure { importedModuleEntries := importedEntries }
|
||||
addEntryFn := fun s _ => s
|
||||
exportEntriesFnEx := fun env s =>
|
||||
let exportedEnv := env.setExporting true
|
||||
let privateEnv := env.setExporting false
|
||||
-- Collect current-module declarations visible in the exported view.
|
||||
-- By pre-computing axiom data for every exported declaration, downstream modules can
|
||||
-- look up any imported declaration without walking its body, keeping collection
|
||||
-- module-local.
|
||||
let allNames := env.checked.get.constants.foldStage2
|
||||
(fun names name _ =>
|
||||
if (exportedEnv.find? name).isSome then names.push name
|
||||
else names) #[]
|
||||
-- Compute axioms within a shared state (for caching across declarations).
|
||||
-- Use `privateEnv` so that `collect` can see all constant bodies.
|
||||
let entries := CollectAxioms.runM privateEnv do
|
||||
allNames.mapM fun name =>
|
||||
return (name, ← CollectAxioms.collectAndGet s.find? name)
|
||||
-- Sort by name for binary search at import time.
|
||||
let entries := entries.qsort fun a b => Name.quickLt a.1 b.1
|
||||
.uniform entries
|
||||
asyncMode := .mainOnly
|
||||
}
|
||||
|
||||
/-- Collect all axioms transitively used by a constant. -/
|
||||
public def collectAxioms [Monad m] [MonadEnv m] (constName : Name) : m (Array Name) := do
|
||||
let env ← getEnv
|
||||
let (_, s) := ((CollectAxioms.collect constName).run env).run {}
|
||||
pure s.axioms
|
||||
let privateEnv := env.setExporting false
|
||||
let s := exportedAxiomsExt.getState (asyncMode := .mainOnly) env
|
||||
return CollectAxioms.runM privateEnv do
|
||||
CollectAxioms.collectAndGet s.find? constName
|
||||
|
||||
end Lean
|
||||
|
||||
@@ -114,6 +114,7 @@ where
|
||||
false
|
||||
|
||||
/-- Determine if tracing is available for a given class, checking ancestor classes if appropriate. -/
|
||||
@[inline]
|
||||
def isTracingEnabledFor (cls : Name) : m Bool := do
|
||||
return checkTraceOption (← MonadTrace.getInheritedTraceOptions) (← getOptions) cls
|
||||
|
||||
|
||||
@@ -697,7 +697,7 @@ After leaving the loop, the cursor's prefix is `xs` and the suffix is empty.
|
||||
During the induction step, the invariant holds for a suffix with head element `x`.
|
||||
After running the loop body, the invariant then holds after shifting `x` to the prefix.
|
||||
-/
|
||||
@[mvcgen_invariant_type]
|
||||
@[spec_invariant_type]
|
||||
abbrev Invariant {α : Type u₁} (xs : List α) (β : Type u₂) (ps : PostShape.{max u₁ u₂}) :=
|
||||
PostCond (List.Cursor xs × β) ps
|
||||
|
||||
@@ -2027,7 +2027,7 @@ A loop invariant is a `PostCond` that takes as parameters
|
||||
* A state tuple of type `β`, which will be a nesting of `MProd`s representing the elaboration of
|
||||
`let mut` variables and early return.
|
||||
-/
|
||||
@[mvcgen_invariant_type]
|
||||
@[spec_invariant_type]
|
||||
abbrev StringInvariant (s : String) (β : Type u) (ps : PostShape.{u}) :=
|
||||
PostCond (s.Pos × β) ps
|
||||
|
||||
@@ -2112,7 +2112,7 @@ A loop invariant is a `PostCond` that takes as parameters
|
||||
* A state tuple of type `β`, which will be a nesting of `MProd`s representing the elaboration of
|
||||
`let mut` variables and early return.
|
||||
-/
|
||||
@[mvcgen_invariant_type]
|
||||
@[spec_invariant_type]
|
||||
abbrev StringSliceInvariant (s : String.Slice) (β : Type u) (ps : PostShape.{u}) :=
|
||||
PostCond (s.Pos × β) ps
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ public import Std.Internal.Http.Data.Status
|
||||
public import Std.Internal.Http.Data.Chunk
|
||||
public import Std.Internal.Http.Data.Headers
|
||||
public import Std.Internal.Http.Data.URI
|
||||
public import Std.Internal.Http.Data.Body
|
||||
|
||||
/-!
|
||||
# HTTP Data Types
|
||||
|
||||
24
src/Std/Internal/Http/Data/Body.lean
Normal file
24
src/Std/Internal/Http/Data/Body.lean
Normal file
@@ -0,0 +1,24 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data.Body.Basic
|
||||
public import Std.Internal.Http.Data.Body.Length
|
||||
public import Std.Internal.Http.Data.Body.Any
|
||||
public import Std.Internal.Http.Data.Body.Stream
|
||||
public import Std.Internal.Http.Data.Body.Empty
|
||||
public import Std.Internal.Http.Data.Body.Full
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body
|
||||
|
||||
This module re-exports all HTTP body types: `Body.Empty`, `Body.Full`, `Body.Stream`,
|
||||
`Body.Any`, and `Body.Length`, along with the `Http.Body` typeclass and conversion
|
||||
utilities (`ToByteArray`, `FromByteArray`).
|
||||
-/
|
||||
83
src/Std/Internal/Http/Data/Body/Any.lean
Normal file
83
src/Std/Internal/Http/Data/Body/Any.lean
Normal file
@@ -0,0 +1,83 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data.Body.Basic
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Any
|
||||
|
||||
A type-erased body backed by closures. Implements `Http.Body` and can be constructed from any
|
||||
type that also implements `Http.Body`. Used as the default handler response body type.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Body
|
||||
open Std Internal IO Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
A type-erased body handle. Operations are stored as closures, making it open to any body type
|
||||
that implements `Http.Body`.
|
||||
-/
|
||||
structure Any where
|
||||
|
||||
/--
|
||||
Receives the next body chunk. Returns `none` at end-of-stream.
|
||||
-/
|
||||
recv : Async (Option Chunk)
|
||||
|
||||
/--
|
||||
Closes the body stream.
|
||||
-/
|
||||
close : Async Unit
|
||||
|
||||
/--
|
||||
Returns `true` when the body stream is closed.
|
||||
-/
|
||||
isClosed : Async Bool
|
||||
|
||||
/--
|
||||
Selector that resolves when a chunk is available or EOF is reached.
|
||||
-/
|
||||
recvSelector : Selector (Option Chunk)
|
||||
|
||||
/--
|
||||
Returns the declared size.
|
||||
-/
|
||||
getKnownSize : Async (Option Body.Length)
|
||||
|
||||
/--
|
||||
Sets the size of the body.
|
||||
-/
|
||||
setKnownSize : Option Body.Length → Async Unit
|
||||
namespace Any
|
||||
|
||||
/--
|
||||
Erases a body of any `Http.Body` instance into a `Body.Any`.
|
||||
-/
|
||||
def ofBody [Http.Body α] (body : α) : Any where
|
||||
recv := Http.Body.recv body
|
||||
close := Http.Body.close body
|
||||
isClosed := Http.Body.isClosed body
|
||||
recvSelector := Http.Body.recvSelector body
|
||||
getKnownSize := Http.Body.getKnownSize body
|
||||
setKnownSize := Http.Body.setKnownSize body
|
||||
|
||||
end Any
|
||||
|
||||
instance : Http.Body Any where
|
||||
recv := Any.recv
|
||||
close := Any.close
|
||||
isClosed := Any.isClosed
|
||||
recvSelector := Any.recvSelector
|
||||
getKnownSize := Any.getKnownSize
|
||||
setKnownSize := Any.setKnownSize
|
||||
|
||||
end Std.Http.Body
|
||||
102
src/Std/Internal/Http/Data/Body/Basic.lean
Normal file
102
src/Std/Internal/Http/Data/Body/Basic.lean
Normal file
@@ -0,0 +1,102 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Async
|
||||
public import Std.Internal.Async.ContextAsync
|
||||
public import Std.Internal.Http.Data.Chunk
|
||||
public import Std.Internal.Http.Data.Headers
|
||||
public import Std.Internal.Http.Data.Body.Length
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Basic
|
||||
|
||||
This module defines the `Body` typeclass for HTTP body streams, and shared conversion types
|
||||
`ToByteArray` and `FromByteArray` used for encoding and decoding body content.
|
||||
-/
|
||||
|
||||
namespace Std.Http
|
||||
open Std Internal IO Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Typeclass for values that can be read as HTTP body streams.
|
||||
-/
|
||||
class Body (α : Type) where
|
||||
/--
|
||||
Receives the next body chunk. Returns `none` at end-of-stream.
|
||||
-/
|
||||
recv : α → Async (Option Chunk)
|
||||
|
||||
/--
|
||||
Closes the body stream.
|
||||
-/
|
||||
close : α → Async Unit
|
||||
|
||||
/--
|
||||
Returns `true` when the body stream is closed.
|
||||
-/
|
||||
isClosed : α → Async Bool
|
||||
|
||||
/--
|
||||
Selector that resolves when a chunk is available or EOF is reached.
|
||||
-/
|
||||
recvSelector : α → Selector (Option Chunk)
|
||||
|
||||
/--
|
||||
Gets the declared size of the body.
|
||||
-/
|
||||
getKnownSize : α → Async (Option Body.Length)
|
||||
|
||||
/--
|
||||
Sets the declared size of a body.
|
||||
-/
|
||||
setKnownSize : α → Option Body.Length → Async Unit
|
||||
end Std.Http
|
||||
|
||||
namespace Std.Http.Body
|
||||
|
||||
/--
|
||||
Typeclass for types that can be converted to a `ByteArray`.
|
||||
-/
|
||||
class ToByteArray (α : Type) where
|
||||
|
||||
/--
|
||||
Transforms into a `ByteArray`.
|
||||
-/
|
||||
toByteArray : α → ByteArray
|
||||
|
||||
instance : ToByteArray ByteArray where
|
||||
toByteArray := id
|
||||
|
||||
instance : ToByteArray String where
|
||||
toByteArray := String.toUTF8
|
||||
|
||||
/--
|
||||
Typeclass for types that can be decoded from a `ByteArray`. The conversion may fail with an error
|
||||
message if the bytes are not valid for the target type.
|
||||
-/
|
||||
class FromByteArray (α : Type) where
|
||||
|
||||
/--
|
||||
Attempts to decode a `ByteArray` into the target type, returning an error message on failure.
|
||||
-/
|
||||
fromByteArray : ByteArray → Except String α
|
||||
|
||||
instance : FromByteArray ByteArray where
|
||||
fromByteArray := .ok
|
||||
|
||||
instance : FromByteArray String where
|
||||
fromByteArray bs :=
|
||||
match String.fromUTF8? bs with
|
||||
| some s => .ok s
|
||||
| none => .error "invalid UTF-8 encoding"
|
||||
|
||||
end Std.Http.Body
|
||||
116
src/Std/Internal/Http/Data/Body/Empty.lean
Normal file
116
src/Std/Internal/Http/Data/Body/Empty.lean
Normal file
@@ -0,0 +1,116 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data.Request
|
||||
public import Std.Internal.Http.Data.Response
|
||||
public import Std.Internal.Http.Data.Body.Any
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Empty
|
||||
|
||||
Represents an always-empty, already-closed body handle.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Body
|
||||
open Std Internal IO Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
An empty body handle.
|
||||
-/
|
||||
structure Empty where
|
||||
deriving Inhabited, BEq
|
||||
|
||||
namespace Empty
|
||||
|
||||
/--
|
||||
Receives from an empty body, always returning end-of-stream.
|
||||
-/
|
||||
@[inline]
|
||||
def recv (_ : Empty) : Async (Option Chunk) :=
|
||||
pure none
|
||||
|
||||
/--
|
||||
Closes an empty body (no-op).
|
||||
-/
|
||||
@[inline]
|
||||
def close (_ : Empty) : Async Unit :=
|
||||
pure ()
|
||||
|
||||
/--
|
||||
Empty bodies are always closed for reading.
|
||||
-/
|
||||
@[inline]
|
||||
def isClosed (_ : Empty) : Async Bool :=
|
||||
pure true
|
||||
|
||||
/--
|
||||
Selector that immediately resolves with end-of-stream for an empty body.
|
||||
-/
|
||||
@[inline]
|
||||
def recvSelector (_ : Empty) : Selector (Option Chunk) where
|
||||
tryFn := pure (some none)
|
||||
registerFn waiter := do
|
||||
let lose := pure ()
|
||||
let win promise := do
|
||||
promise.resolve (.ok none)
|
||||
waiter.race lose win
|
||||
unregisterFn := pure ()
|
||||
|
||||
end Empty
|
||||
|
||||
instance : Http.Body Empty where
|
||||
recv := Empty.recv
|
||||
close := Empty.close
|
||||
isClosed := Empty.isClosed
|
||||
recvSelector := Empty.recvSelector
|
||||
getKnownSize _ := pure (some <| .fixed 0)
|
||||
setKnownSize _ _ := pure ()
|
||||
|
||||
|
||||
instance : Coe Empty Any := ⟨Any.ofBody⟩
|
||||
|
||||
instance : Coe (Response Empty) (Response Any) where
|
||||
coe f := { f with }
|
||||
|
||||
instance : Coe (ContextAsync (Response Empty)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
instance : Coe (Async (Response Empty)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
end Body
|
||||
|
||||
namespace Request.Builder
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a request with no body.
|
||||
-/
|
||||
def empty (builder : Builder) : Async (Request Body.Empty) :=
|
||||
pure <| builder.body {}
|
||||
|
||||
end Request.Builder
|
||||
|
||||
namespace Response.Builder
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a response with no body.
|
||||
-/
|
||||
def empty (builder : Builder) : Async (Response Body.Empty) :=
|
||||
pure <| builder.body {}
|
||||
|
||||
end Response.Builder
|
||||
232
src/Std/Internal/Http/Data/Body/Full.lean
Normal file
232
src/Std/Internal/Http/Data/Body/Full.lean
Normal file
@@ -0,0 +1,232 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Sync
|
||||
public import Std.Internal.Http.Data.Request
|
||||
public import Std.Internal.Http.Data.Response
|
||||
public import Std.Internal.Http.Data.Body.Any
|
||||
public import Init.Data.ByteArray
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Full
|
||||
|
||||
A body backed by a fixed `ByteArray` held in a `Mutex`.
|
||||
|
||||
The byte array is consumed at most once: the first call to `recv` atomically takes the data
|
||||
and returns it as a single chunk; subsequent calls return `none` (end-of-stream).
|
||||
Closing the body discards any unconsumed data.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Body
|
||||
open Std Internal IO Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
A body backed by a fixed, mutex-protected `ByteArray`.
|
||||
|
||||
The data is consumed on the first read. Once consumed (or explicitly closed), the body
|
||||
behaves as a closed, empty channel.
|
||||
-/
|
||||
structure Full where
|
||||
private mk ::
|
||||
private state : Mutex (Option ByteArray)
|
||||
deriving Nonempty
|
||||
|
||||
namespace Full
|
||||
|
||||
private def takeChunk : AtomicT (Option ByteArray) Async (Option Chunk) := do
|
||||
match ← get with
|
||||
| none =>
|
||||
pure none
|
||||
| some data =>
|
||||
set (none : Option ByteArray)
|
||||
if data.isEmpty then
|
||||
pure none
|
||||
else
|
||||
pure (some (Chunk.ofByteArray data))
|
||||
|
||||
/--
|
||||
Creates a `Full` body from a `ByteArray`.
|
||||
-/
|
||||
def ofByteArray (data : ByteArray) : Async Full := do
|
||||
let state ← Mutex.new (some data)
|
||||
return { state }
|
||||
|
||||
/--
|
||||
Creates a `Full` body from a `String`.
|
||||
-/
|
||||
def ofString (data : String) : Async Full := do
|
||||
let state ← Mutex.new (some data.toUTF8)
|
||||
return { state }
|
||||
|
||||
/--
|
||||
Receives the body data. Returns the full byte array on the first call as a single chunk,
|
||||
then `none` on all subsequent calls.
|
||||
-/
|
||||
def recv (full : Full) : Async (Option Chunk) :=
|
||||
full.state.atomically do
|
||||
takeChunk
|
||||
|
||||
/--
|
||||
Closes the body, discarding any unconsumed data.
|
||||
-/
|
||||
def close (full : Full) : Async Unit :=
|
||||
full.state.atomically do
|
||||
set (none : Option ByteArray)
|
||||
|
||||
/--
|
||||
Returns `true` when the data has been consumed or the body has been closed.
|
||||
-/
|
||||
def isClosed (full : Full) : Async Bool :=
|
||||
full.state.atomically do
|
||||
return (← get).isNone
|
||||
|
||||
/--
|
||||
Returns the known size of the remaining data.
|
||||
Returns `some (.fixed n)` with the current byte count, or `some (.fixed 0)` if the body has
|
||||
already been consumed or closed.
|
||||
-/
|
||||
def getKnownSize (full : Full) : Async (Option Body.Length) :=
|
||||
full.state.atomically do
|
||||
match ← get with
|
||||
| none => pure (some (.fixed 0))
|
||||
| some data => pure (some (.fixed data.size))
|
||||
|
||||
/--
|
||||
Selector that immediately resolves to the remaining chunk (or EOF).
|
||||
-/
|
||||
def recvSelector (full : Full) : Selector (Option Chunk) where
|
||||
tryFn := do
|
||||
let chunk ← full.state.atomically do
|
||||
takeChunk
|
||||
pure (some chunk)
|
||||
|
||||
registerFn waiter := do
|
||||
full.state.atomically do
|
||||
let lose := pure ()
|
||||
|
||||
let win promise := do
|
||||
let chunk ← takeChunk
|
||||
promise.resolve (.ok chunk)
|
||||
|
||||
waiter.race lose win
|
||||
|
||||
unregisterFn := pure ()
|
||||
|
||||
end Full
|
||||
|
||||
instance : Http.Body Full where
|
||||
recv := Full.recv
|
||||
close := Full.close
|
||||
isClosed := Full.isClosed
|
||||
recvSelector := Full.recvSelector
|
||||
getKnownSize := Full.getKnownSize
|
||||
setKnownSize _ _ := pure ()
|
||||
|
||||
instance : Coe Full Any := ⟨Any.ofBody⟩
|
||||
|
||||
instance : Coe (Response Full) (Response Any) where
|
||||
coe f := { f with }
|
||||
|
||||
instance : Coe (ContextAsync (Response Full)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
instance : Coe (Async (Response Full)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
end Body
|
||||
|
||||
namespace Request.Builder
|
||||
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a request body from raw bytes without setting any headers.
|
||||
Use `bytes` instead if you want `Content-Type: application/octet-stream` set automatically.
|
||||
-/
|
||||
def fromBytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do
|
||||
return builder.body (← Body.Full.ofByteArray content)
|
||||
|
||||
/--
|
||||
Builds a request with a binary body.
|
||||
Sets `Content-Type: application/octet-stream`.
|
||||
Use `fromBytes` instead if you need to set a different `Content-Type` or none at all.
|
||||
-/
|
||||
def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")) content
|
||||
|
||||
/--
|
||||
Builds a request with a text body.
|
||||
Sets `Content-Type: text/plain; charset=utf-8`.
|
||||
-/
|
||||
def text (builder : Builder) (content : String) : Async (Request Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")) content.toUTF8
|
||||
|
||||
/--
|
||||
Builds a request with a JSON body.
|
||||
Sets `Content-Type: application/json`.
|
||||
-/
|
||||
def json (builder : Builder) (content : String) : Async (Request Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/json")) content.toUTF8
|
||||
|
||||
/--
|
||||
Builds a request with an HTML body.
|
||||
Sets `Content-Type: text/html; charset=utf-8`.
|
||||
-/
|
||||
def html (builder : Builder) (content : String) : Async (Request Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")) content.toUTF8
|
||||
|
||||
end Request.Builder
|
||||
|
||||
namespace Response.Builder
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a response body from raw bytes without setting any headers.
|
||||
Use `bytes` instead if you want `Content-Type: application/octet-stream` set automatically.
|
||||
-/
|
||||
def fromBytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do
|
||||
return builder.body (← Body.Full.ofByteArray content)
|
||||
|
||||
/--
|
||||
Builds a response with a binary body.
|
||||
Sets `Content-Type: application/octet-stream`.
|
||||
Use `fromBytes` instead if you need to set a different `Content-Type` or none at all.
|
||||
-/
|
||||
def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")) content
|
||||
|
||||
/--
|
||||
Builds a response with a text body.
|
||||
Sets `Content-Type: text/plain; charset=utf-8`.
|
||||
-/
|
||||
def text (builder : Builder) (content : String) : Async (Response Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")) content.toUTF8
|
||||
|
||||
/--
|
||||
Builds a response with a JSON body.
|
||||
Sets `Content-Type: application/json`.
|
||||
-/
|
||||
def json (builder : Builder) (content : String) : Async (Response Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/json")) content.toUTF8
|
||||
|
||||
/--
|
||||
Builds a response with an HTML body.
|
||||
Sets `Content-Type: text/html; charset=utf-8`.
|
||||
-/
|
||||
def html (builder : Builder) (content : String) : Async (Response Body.Full) :=
|
||||
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")) content.toUTF8
|
||||
|
||||
end Response.Builder
|
||||
60
src/Std/Internal/Http/Data/Body/Length.lean
Normal file
60
src/Std/Internal/Http/Data/Body/Length.lean
Normal file
@@ -0,0 +1,60 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.Repr
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Length
|
||||
|
||||
This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding
|
||||
of an HTTP request or response.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Body
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Size of the body of a response or request.
|
||||
-/
|
||||
inductive Length
|
||||
/--
|
||||
Indicates that the HTTP message body uses **chunked transfer encoding**.
|
||||
-/
|
||||
| chunked
|
||||
|
||||
/--
|
||||
Indicates that the HTTP message body has a **fixed, known length**, as specified by the
|
||||
`Content-Length` header.
|
||||
-/
|
||||
| fixed (n : Nat)
|
||||
deriving Repr, BEq
|
||||
|
||||
namespace Length
|
||||
|
||||
/--
|
||||
Checks if the `Length` is chunked.
|
||||
-/
|
||||
@[inline]
|
||||
def isChunked : Length → Bool
|
||||
| .chunked => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the `Length` is a fixed size.
|
||||
-/
|
||||
@[inline]
|
||||
def isFixed : Length → Bool
|
||||
| .fixed _ => true
|
||||
| _ => false
|
||||
|
||||
end Length
|
||||
|
||||
end Std.Http.Body
|
||||
650
src/Std/Internal/Http/Data/Body/Stream.lean
Normal file
650
src/Std/Internal/Http/Data/Body/Stream.lean
Normal file
@@ -0,0 +1,650 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Sync
|
||||
public import Std.Internal.Async
|
||||
public import Std.Internal.Http.Data.Request
|
||||
public import Std.Internal.Http.Data.Response
|
||||
public import Std.Internal.Http.Data.Chunk
|
||||
public import Std.Internal.Http.Data.Body.Basic
|
||||
public import Std.Internal.Http.Data.Body.Any
|
||||
public import Init.Data.ByteArray
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Body.Stream
|
||||
|
||||
This module defines a zero-buffer rendezvous body channel (`Body.Stream`) that supports
|
||||
both sending and receiving chunks.
|
||||
|
||||
There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender.
|
||||
At most one blocked producer and one blocked consumer are supported.
|
||||
-/
|
||||
|
||||
namespace Std.Http
|
||||
|
||||
namespace Body
|
||||
|
||||
open Std Internal IO Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
namespace Channel
|
||||
|
||||
open Internal.IO.Async in
|
||||
private inductive Consumer where
|
||||
| normal (promise : IO.Promise (Option Chunk))
|
||||
| select (finished : Waiter (Option Chunk))
|
||||
|
||||
private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do
|
||||
match c with
|
||||
| .normal promise =>
|
||||
promise.resolve x
|
||||
return true
|
||||
| .select waiter =>
|
||||
let lose := return false
|
||||
let win promise := do
|
||||
promise.resolve (.ok x)
|
||||
return true
|
||||
waiter.race lose win
|
||||
|
||||
private structure Producer where
|
||||
chunk : Chunk
|
||||
|
||||
/--
|
||||
Resolved with `true` when consumed by a receiver, `false` when the channel closes.
|
||||
-/
|
||||
done : IO.Promise Bool
|
||||
|
||||
open Internal.IO.Async in
|
||||
private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do
|
||||
let lose := return false
|
||||
let win promise := do
|
||||
promise.resolve (.ok x)
|
||||
return true
|
||||
waiter.race lose win
|
||||
|
||||
private structure State where
|
||||
/--
|
||||
A single blocked producer waiting for a receiver.
|
||||
-/
|
||||
pendingProducer : Option Producer
|
||||
|
||||
/--
|
||||
A single blocked consumer waiting for a producer.
|
||||
-/
|
||||
pendingConsumer : Option Consumer
|
||||
|
||||
/--
|
||||
A waiter for `Stream.interestSelector`.
|
||||
-/
|
||||
interestWaiter : Option (Internal.IO.Async.Waiter Bool)
|
||||
|
||||
/--
|
||||
Whether the channel is closed.
|
||||
-/
|
||||
closed : Bool
|
||||
|
||||
/--
|
||||
Known size of the stream if available.
|
||||
-/
|
||||
knownSize : Option Body.Length
|
||||
|
||||
/--
|
||||
Buffered partial chunk data accumulated from `Stream.send ... (incomplete := true)`.
|
||||
These partial pieces are collapsed and emitted as a single chunk on the next complete send.
|
||||
-/
|
||||
pendingIncompleteChunk : Option Chunk := none
|
||||
deriving Nonempty
|
||||
|
||||
end Channel
|
||||
|
||||
/--
|
||||
A zero-buffer rendezvous body channel that supports both sending and receiving chunks.
|
||||
-/
|
||||
structure Stream where
|
||||
private mk ::
|
||||
private state : Mutex Channel.State
|
||||
deriving Nonempty, TypeName
|
||||
|
||||
/--
|
||||
Creates a rendezvous body stream.
|
||||
-/
|
||||
def mkStream : Async Stream := do
|
||||
let state ← Mutex.new {
|
||||
pendingProducer := none
|
||||
pendingConsumer := none
|
||||
interestWaiter := none
|
||||
closed := false
|
||||
knownSize := none
|
||||
}
|
||||
return { state }
|
||||
|
||||
namespace Channel
|
||||
|
||||
private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length :=
|
||||
match knownSize with
|
||||
| some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size))
|
||||
| _ => knownSize
|
||||
|
||||
private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
|
||||
AtomicT State m Unit := do
|
||||
let st ← get
|
||||
|
||||
let pendingConsumer ←
|
||||
match st.pendingConsumer with
|
||||
| some (.select waiter) =>
|
||||
if ← waiter.checkFinished then
|
||||
pure none
|
||||
else
|
||||
pure st.pendingConsumer
|
||||
| _ =>
|
||||
pure st.pendingConsumer
|
||||
|
||||
let interestWaiter ←
|
||||
match st.interestWaiter with
|
||||
| some waiter =>
|
||||
if ← waiter.checkFinished then
|
||||
pure none
|
||||
else
|
||||
pure st.interestWaiter
|
||||
| none =>
|
||||
pure none
|
||||
|
||||
set { st with pendingConsumer, interestWaiter }
|
||||
|
||||
private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
|
||||
AtomicT State m Unit := do
|
||||
let st ← get
|
||||
if let some waiter := st.interestWaiter then
|
||||
discard <| resolveInterestWaiter waiter true
|
||||
set { st with interestWaiter := none }
|
||||
|
||||
private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
|
||||
AtomicT State m Bool := do
|
||||
let st ← get
|
||||
return st.pendingProducer.isSome || st.closed
|
||||
|
||||
private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
|
||||
AtomicT State m Bool := do
|
||||
let st ← get
|
||||
return st.pendingConsumer.isSome
|
||||
|
||||
private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
|
||||
AtomicT State m (Option Chunk) := do
|
||||
let st ← get
|
||||
if let some producer := st.pendingProducer then
|
||||
set {
|
||||
st with
|
||||
pendingProducer := none
|
||||
knownSize := decreaseKnownSize st.knownSize producer.chunk
|
||||
}
|
||||
discard <| producer.done.resolve true
|
||||
return some producer.chunk
|
||||
else
|
||||
return none
|
||||
|
||||
private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
|
||||
AtomicT State m Unit := do
|
||||
let st ← get
|
||||
if st.closed then
|
||||
return ()
|
||||
|
||||
if let some consumer := st.pendingConsumer then
|
||||
discard <| consumer.resolve none
|
||||
|
||||
if let some waiter := st.interestWaiter then
|
||||
discard <| resolveInterestWaiter waiter false
|
||||
|
||||
if let some producer := st.pendingProducer then
|
||||
discard <| producer.done.resolve false
|
||||
|
||||
set {
|
||||
st with
|
||||
pendingProducer := none
|
||||
pendingConsumer := none
|
||||
interestWaiter := none
|
||||
pendingIncompleteChunk := none
|
||||
closed := true
|
||||
}
|
||||
|
||||
end Channel
|
||||
|
||||
namespace Stream
|
||||
|
||||
/--
|
||||
Attempts to receive a chunk from the channel without blocking.
|
||||
Returns `some chunk` only when a producer is already waiting.
|
||||
-/
|
||||
def tryRecv (stream : Stream) : Async (Option Chunk) :=
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
Channel.tryRecv'
|
||||
|
||||
private def recv' (stream : Stream) : BaseIO (AsyncTask (Option Chunk)) := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
|
||||
if let some chunk ← Channel.tryRecv' then
|
||||
return AsyncTask.pure (some chunk)
|
||||
|
||||
let st ← get
|
||||
if st.closed then
|
||||
return AsyncTask.pure none
|
||||
|
||||
if st.pendingConsumer.isSome then
|
||||
return Task.pure (.error (IO.Error.userError "only one blocked consumer is allowed"))
|
||||
|
||||
let promise ← IO.Promise.new
|
||||
set { st with pendingConsumer := some (.normal promise) }
|
||||
Channel.signalInterest
|
||||
return promise.result?.map (sync := true) fun
|
||||
| none => .error (IO.Error.userError "the promise linked to the consumer was dropped")
|
||||
| some res => .ok res
|
||||
|
||||
/--
|
||||
Receives a chunk from the channel. Blocks until a producer sends one.
|
||||
Returns `none` if the channel is closed and no producer is waiting.
|
||||
-/
|
||||
def recv (stream : Stream) : Async (Option Chunk) := do
|
||||
Async.ofAsyncTask (← recv' stream)
|
||||
|
||||
/--
|
||||
Closes the channel.
|
||||
-/
|
||||
def close (stream : Stream) : Async Unit :=
|
||||
stream.state.atomically do
|
||||
Channel.close'
|
||||
|
||||
/--
|
||||
Checks whether the channel is closed.
|
||||
-/
|
||||
@[always_inline, inline]
|
||||
def isClosed (stream : Stream) : Async Bool :=
|
||||
stream.state.atomically do
|
||||
return (← get).closed
|
||||
|
||||
/--
|
||||
Gets the known size if available.
|
||||
-/
|
||||
@[always_inline, inline]
|
||||
def getKnownSize (stream : Stream) : Async (Option Body.Length) :=
|
||||
stream.state.atomically do
|
||||
return (← get).knownSize
|
||||
|
||||
/--
|
||||
Sets known size metadata.
|
||||
-/
|
||||
@[always_inline, inline]
|
||||
def setKnownSize (stream : Stream) (size : Option Body.Length) : Async Unit :=
|
||||
stream.state.atomically do
|
||||
modify fun st => { st with knownSize := size }
|
||||
|
||||
open Internal.IO.Async in
|
||||
|
||||
/--
|
||||
Creates a selector that resolves when a producer is waiting (or the channel closes).
|
||||
-/
|
||||
def recvSelector (stream : Stream) : Selector (Option Chunk) where
|
||||
tryFn := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
if ← Channel.recvReady' then
|
||||
return some (← Channel.tryRecv')
|
||||
else
|
||||
return none
|
||||
|
||||
registerFn waiter := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
if ← Channel.recvReady' then
|
||||
let lose := return ()
|
||||
let win promise := do
|
||||
promise.resolve (.ok (← Channel.tryRecv'))
|
||||
waiter.race lose win
|
||||
else
|
||||
let st ← get
|
||||
if st.pendingConsumer.isSome then
|
||||
throw (.userError "only one blocked consumer is allowed")
|
||||
|
||||
set { st with pendingConsumer := some (.select waiter) }
|
||||
Channel.signalInterest
|
||||
|
||||
unregisterFn := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
|
||||
/--
|
||||
Iterates over chunks until the channel closes.
|
||||
-/
|
||||
@[inline]
|
||||
protected partial def forIn
|
||||
{β : Type} (stream : Stream) (acc : β)
|
||||
(step : Chunk → β → Async (ForInStep β)) : Async β := do
|
||||
|
||||
let rec @[specialize] loop (stream : Stream) (acc : β) : Async β := do
|
||||
if let some chunk ← stream.recv then
|
||||
match ← step chunk acc with
|
||||
| .done res => return res
|
||||
| .yield res => loop stream res
|
||||
else
|
||||
return acc
|
||||
|
||||
loop stream acc
|
||||
|
||||
/--
|
||||
Context-aware iteration over chunks until the channel closes.
|
||||
-/
|
||||
@[inline]
|
||||
protected partial def forIn'
|
||||
{β : Type} (stream : Stream) (acc : β)
|
||||
(step : Chunk → β → ContextAsync (ForInStep β)) : ContextAsync β := do
|
||||
|
||||
let rec @[specialize] loop (stream : Stream) (acc : β) : ContextAsync β := do
|
||||
let data ← Selectable.one #[
|
||||
.case stream.recvSelector pure,
|
||||
.case (← ContextAsync.doneSelector) (fun _ => pure none),
|
||||
]
|
||||
|
||||
if let some chunk := data then
|
||||
match ← step chunk acc with
|
||||
| .done res => return res
|
||||
| .yield res => loop stream res
|
||||
else
|
||||
return acc
|
||||
|
||||
loop stream acc
|
||||
|
||||
/--
|
||||
Abstracts over how the next chunk is received, allowing `readAll` to work in both `Async`
|
||||
(no cancellation) and `ContextAsync` (races with cancellation via `doneSelector`).
|
||||
-/
|
||||
class NextChunk (m : Type → Type) where
|
||||
/--
|
||||
Receives the next chunk, stopping at EOF or (in `ContextAsync`) when the context is cancelled.
|
||||
-/
|
||||
nextChunk : Stream → m (Option Chunk)
|
||||
|
||||
instance : NextChunk Async where
|
||||
nextChunk := Stream.recv
|
||||
|
||||
instance : NextChunk ContextAsync where
|
||||
nextChunk stream := do
|
||||
Selectable.one #[
|
||||
.case stream.recvSelector pure,
|
||||
.case (← ContextAsync.doneSelector) (fun _ => pure none),
|
||||
]
|
||||
|
||||
/--
|
||||
Reads all remaining chunks and decodes them into `α`.
|
||||
|
||||
Works in both `Async` (reads until EOF, no cancellation) and `ContextAsync` (also stops if the
|
||||
context is cancelled).
|
||||
-/
|
||||
partial def readAll
|
||||
[FromByteArray α]
|
||||
[Monad m] [MonadExceptOf IO.Error m] [NextChunk m]
|
||||
(stream : Stream)
|
||||
(maximumSize : Option UInt64 := none) :
|
||||
m α := do
|
||||
|
||||
let rec loop (result : ByteArray) : m ByteArray := do
|
||||
match ← NextChunk.nextChunk stream with
|
||||
| none => return result
|
||||
| some chunk =>
|
||||
let result := result ++ chunk.data
|
||||
if let some max := maximumSize then
|
||||
if result.size.toUInt64 > max then
|
||||
throw (.userError s!"body exceeded maximum size of {max} bytes")
|
||||
loop result
|
||||
|
||||
let result ← loop ByteArray.empty
|
||||
|
||||
match FromByteArray.fromByteArray result with
|
||||
| .ok a => return a
|
||||
| .error msg => throw (.userError msg)
|
||||
|
||||
private def collapseForSend
|
||||
(stream : Stream)
|
||||
(chunk : Chunk)
|
||||
(incomplete : Bool) : BaseIO (Except IO.Error (Option Chunk)) := do
|
||||
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
let st ← get
|
||||
|
||||
if st.closed then
|
||||
return .error (.userError "channel closed")
|
||||
|
||||
let merged := match st.pendingIncompleteChunk with
|
||||
| some pending =>
|
||||
{
|
||||
data := pending.data ++ chunk.data
|
||||
extensions := if pending.extensions.isEmpty then chunk.extensions else pending.extensions
|
||||
}
|
||||
| none => chunk
|
||||
|
||||
if incomplete then
|
||||
set { st with pendingIncompleteChunk := some merged }
|
||||
return .ok none
|
||||
else
|
||||
set { st with pendingIncompleteChunk := none }
|
||||
return .ok (some merged)
|
||||
|
||||
/--
|
||||
Sends a chunk, retrying if a select-mode consumer races and loses. If no consumer is ready,
|
||||
installs the chunk as a pending producer and awaits acknowledgement from the receiver.
|
||||
-/
|
||||
private partial def send' (stream : Stream) (chunk : Chunk) : Async Unit := do
|
||||
let done ← IO.Promise.new
|
||||
let result : Except IO.Error (Option Bool) ← stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
let st ← get
|
||||
|
||||
if st.closed then
|
||||
return .error (IO.Error.userError "channel closed")
|
||||
|
||||
if let some consumer := st.pendingConsumer then
|
||||
let success ← consumer.resolve (some chunk)
|
||||
|
||||
if success then
|
||||
set {
|
||||
st with
|
||||
pendingConsumer := none
|
||||
knownSize := Channel.decreaseKnownSize st.knownSize chunk
|
||||
}
|
||||
return .ok (some true)
|
||||
else
|
||||
set { st with pendingConsumer := none }
|
||||
return .ok (some false)
|
||||
else if st.pendingProducer.isSome then
|
||||
return .error (IO.Error.userError "only one blocked producer is allowed")
|
||||
else
|
||||
set { st with pendingProducer := some { chunk, done } }
|
||||
return .ok none
|
||||
|
||||
match result with
|
||||
| .error err =>
|
||||
throw err
|
||||
| .ok (some true) =>
|
||||
return ()
|
||||
| .ok (some false) =>
|
||||
-- The select-mode consumer raced and lost; recurse to allocate a fresh `done` promise.
|
||||
send' stream chunk
|
||||
| .ok none =>
|
||||
match ← await done.result? with
|
||||
| some true => return ()
|
||||
| _ => throw (IO.Error.userError "channel closed")
|
||||
|
||||
/--
|
||||
Sends a chunk.
|
||||
|
||||
If `incomplete := true`, the chunk is buffered and collapsed with subsequent chunks, and is not
|
||||
delivered to the receiver yet.
|
||||
If `incomplete := false`, any buffered incomplete pieces are collapsed with this chunk and the
|
||||
single merged chunk is sent.
|
||||
-/
|
||||
def send (stream : Stream) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do
|
||||
match (← collapseForSend stream chunk incomplete) with
|
||||
| .error err => throw err
|
||||
| .ok none => pure ()
|
||||
| .ok (some toSend) =>
|
||||
if toSend.data.isEmpty ∧ toSend.extensions.isEmpty then
|
||||
return ()
|
||||
|
||||
send' stream toSend
|
||||
|
||||
/--
|
||||
Returns `true` when a consumer is currently blocked waiting for data.
|
||||
-/
|
||||
def hasInterest (stream : Stream) : Async Bool :=
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
Channel.hasInterest'
|
||||
|
||||
open Internal.IO.Async in
|
||||
/--
|
||||
Creates a selector that resolves when consumer interest is present.
|
||||
Returns `true` when a consumer is waiting, `false` when the channel closes first.
|
||||
-/
|
||||
def interestSelector (stream : Stream) : Selector Bool where
|
||||
tryFn := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
let st ← get
|
||||
if st.pendingConsumer.isSome then
|
||||
return some true
|
||||
else if st.closed then
|
||||
return some false
|
||||
else
|
||||
return none
|
||||
|
||||
registerFn waiter := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
let st ← get
|
||||
|
||||
if st.pendingConsumer.isSome then
|
||||
let lose := return ()
|
||||
let win promise := do
|
||||
promise.resolve (.ok true)
|
||||
waiter.race lose win
|
||||
else if st.closed then
|
||||
let lose := return ()
|
||||
let win promise := do
|
||||
promise.resolve (.ok false)
|
||||
waiter.race lose win
|
||||
else if st.interestWaiter.isSome then
|
||||
throw (.userError "only one blocked interest selector is allowed")
|
||||
else
|
||||
set { st with interestWaiter := some waiter }
|
||||
|
||||
unregisterFn := do
|
||||
stream.state.atomically do
|
||||
Channel.pruneFinishedWaiters
|
||||
|
||||
end Stream
|
||||
|
||||
/--
|
||||
Creates a body from a producer function.
|
||||
Returns the stream immediately and runs `gen` in a detached task.
|
||||
The channel is always closed when `gen` returns or throws.
|
||||
Errors from `gen` are not rethrown here; consumers observe end-of-stream via `recv = none`.
|
||||
-/
|
||||
def stream (gen : Stream → Async Unit) : Async Stream := do
|
||||
let s ← mkStream
|
||||
background <| do
|
||||
try
|
||||
gen s
|
||||
finally
|
||||
s.close
|
||||
return s
|
||||
|
||||
/--
|
||||
Creates a body from a fixed byte array.
|
||||
-/
|
||||
def fromBytes (content : ByteArray) : Async Stream := do
|
||||
stream fun s => do
|
||||
s.setKnownSize (some (.fixed content.size))
|
||||
if content.size > 0 then
|
||||
s.send (Chunk.ofByteArray content)
|
||||
|
||||
/--
|
||||
Creates an empty `Stream` body channel (already closed, no data).
|
||||
|
||||
Prefer `Body.Empty` when you need a concrete zero-cost type. Use this when the calling
|
||||
context requires a `Stream` specifically.
|
||||
-/
|
||||
def empty : Async Stream := do
|
||||
let s ← mkStream
|
||||
s.setKnownSize (some (.fixed 0))
|
||||
s.close
|
||||
return s
|
||||
|
||||
instance : ForIn Async Stream Chunk where
|
||||
forIn := Stream.forIn
|
||||
|
||||
instance : ForIn ContextAsync Stream Chunk where
|
||||
forIn := Stream.forIn'
|
||||
|
||||
instance : Http.Body Stream where
|
||||
recv := Stream.recv
|
||||
close := Stream.close
|
||||
isClosed := Stream.isClosed
|
||||
recvSelector := Stream.recvSelector
|
||||
getKnownSize := Stream.getKnownSize
|
||||
setKnownSize := Stream.setKnownSize
|
||||
|
||||
instance : Coe Stream Any := ⟨Any.ofBody⟩
|
||||
|
||||
instance : Coe (Response Stream) (Response Any) where
|
||||
coe f := { f with }
|
||||
|
||||
instance : Coe (ContextAsync (Response Stream)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
instance : Coe (Async (Response Stream)) (ContextAsync (Response Any)) where
|
||||
coe action := do
|
||||
let response ← action
|
||||
pure (response : Response Any)
|
||||
|
||||
end Body
|
||||
|
||||
namespace Request.Builder
|
||||
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a request with a streaming body generator.
|
||||
-/
|
||||
def stream
|
||||
(builder : Builder)
|
||||
(gen : Body.Stream → Async Unit) :
|
||||
Async (Request Body.Stream) := do
|
||||
let s ← Body.stream gen
|
||||
return Request.Builder.body builder s
|
||||
|
||||
end Request.Builder
|
||||
|
||||
namespace Response.Builder
|
||||
open Internal.IO.Async
|
||||
|
||||
/--
|
||||
Builds a response with a streaming body generator.
|
||||
-/
|
||||
def stream
|
||||
(builder : Builder)
|
||||
(gen : Body.Stream → Async Unit) :
|
||||
Async (Response Body.Stream) := do
|
||||
let s ← Body.stream gen
|
||||
return Response.Builder.body builder s
|
||||
|
||||
end Response.Builder
|
||||
@@ -124,12 +124,6 @@ def new : Builder := { }
|
||||
|
||||
namespace Builder
|
||||
|
||||
/--
|
||||
Creates a new HTTP request builder with the default head
|
||||
(method: GET, version: HTTP/1.1, target: `*`).
|
||||
-/
|
||||
def empty : Builder := { }
|
||||
|
||||
/--
|
||||
Sets the HTTP method for the request being built.
|
||||
-/
|
||||
|
||||
@@ -111,7 +111,7 @@ namespace Builder
|
||||
/--
|
||||
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1).
|
||||
-/
|
||||
def empty : Builder := { }
|
||||
def new : Builder := { }
|
||||
|
||||
/--
|
||||
Sets the HTTP status code for the response being built.
|
||||
@@ -173,66 +173,66 @@ end Builder
|
||||
Creates a new HTTP Response builder with the 200 status code.
|
||||
-/
|
||||
def ok : Builder :=
|
||||
.empty |>.status .ok
|
||||
.new |>.status .ok
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the provided status.
|
||||
-/
|
||||
def withStatus (status : Status) : Builder :=
|
||||
.empty |>.status status
|
||||
.new |>.status status
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 404 status code.
|
||||
-/
|
||||
def notFound : Builder :=
|
||||
.empty |>.status .notFound
|
||||
.new |>.status .notFound
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 500 status code.
|
||||
-/
|
||||
def internalServerError : Builder :=
|
||||
.empty |>.status .internalServerError
|
||||
.new |>.status .internalServerError
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 400 status code.
|
||||
-/
|
||||
def badRequest : Builder :=
|
||||
.empty |>.status .badRequest
|
||||
.new |>.status .badRequest
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 201 status code.
|
||||
-/
|
||||
def created : Builder :=
|
||||
.empty |>.status .created
|
||||
.new |>.status .created
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 202 status code.
|
||||
-/
|
||||
def accepted : Builder :=
|
||||
.empty |>.status .accepted
|
||||
.new |>.status .accepted
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 401 status code.
|
||||
-/
|
||||
def unauthorized : Builder :=
|
||||
.empty |>.status .unauthorized
|
||||
.new |>.status .unauthorized
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 403 status code.
|
||||
-/
|
||||
def forbidden : Builder :=
|
||||
.empty |>.status .forbidden
|
||||
.new |>.status .forbidden
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 409 status code.
|
||||
-/
|
||||
def conflict : Builder :=
|
||||
.empty |>.status .conflict
|
||||
.new |>.status .conflict
|
||||
|
||||
/--
|
||||
Creates a new HTTP Response builder with the 503 status code.
|
||||
-/
|
||||
def serviceUnavailable : Builder :=
|
||||
.empty |>.status .serviceUnavailable
|
||||
.new |>.status .serviceUnavailable
|
||||
|
||||
end Response
|
||||
|
||||
@@ -94,4 +94,3 @@ def parseOrRoot (s : String) : Std.Http.URI.Path :=
|
||||
parse? s |>.getD { segments := #[], absolute := true }
|
||||
|
||||
end Std.Http.URI.Path
|
||||
|
||||
|
||||
@@ -366,12 +366,12 @@ macro "mvcgen_trivial" : tactic =>
|
||||
/--
|
||||
An invariant alternative of the form `· term`, one per invariant goal.
|
||||
-/
|
||||
syntax goalDotAlt := ppDedent(ppLine) cdotTk (colGe term)
|
||||
syntax invariantDotAlt := ppDedent(ppLine) cdotTk (colGe term)
|
||||
|
||||
/--
|
||||
An invariant alternative of the form `| inv<n> a b c => term`, one per invariant goal.
|
||||
-/
|
||||
syntax goalCaseAlt := ppDedent(ppLine) "| " caseArg " => " (colGe term)
|
||||
syntax invariantCaseAlt := ppDedent(ppLine) "| " caseArg " => " (colGe term)
|
||||
|
||||
/--
|
||||
Either the contextual keyword ` invariants ` or its tracing form ` invariants? ` which suggests
|
||||
@@ -387,7 +387,7 @@ After `mvcgen [...]`, there can be an optional `invariants` followed by either
|
||||
The tracing variant ` invariants? ` will suggest a skeleton for missing invariants; see the
|
||||
docstring for `mvcgen`.
|
||||
-/
|
||||
syntax invariantAlts := invariantsKW withPosition((colGe (goalDotAlt <|> goalCaseAlt))*)
|
||||
syntax invariantAlts := invariantsKW withPosition((colGe (invariantDotAlt <|> invariantCaseAlt))*)
|
||||
|
||||
/--
|
||||
In induction alternative, which can have 1 or more cases on the left
|
||||
|
||||
@@ -361,6 +361,11 @@ COMMANDS:
|
||||
clean removes ALL froms the local Lake cache
|
||||
services print configured remote cache services
|
||||
|
||||
STAGING COMMANDS:
|
||||
stage <map> <dir> copy build outputs from the cache to a directory
|
||||
unstage <dir> cache build outputs from a staging directory
|
||||
put-staged <dir> upload build outputs from a staging directory
|
||||
|
||||
See `lake cache help <command>` for more information on a specific command."
|
||||
|
||||
def helpCacheGet :=
|
||||
@@ -381,7 +386,7 @@ OPTIONS:
|
||||
--force-download redownload existing files
|
||||
|
||||
Downloads build outputs for packages in the workspace from a remote cache
|
||||
service. The cache service used can be specifed via the `--service` option.
|
||||
service. The cache service used can be specified via the `--service` option.
|
||||
Otherwise, Lake will the system default, or, if none is configured, Reservoir.
|
||||
See `lake cache services` for more information on how to configure services.
|
||||
|
||||
@@ -424,7 +429,7 @@ USAGE:
|
||||
|
||||
Uploads the input-to-output mappings contained in the specified file along
|
||||
with the corresponding output artifacts to a remote cache. The cache service
|
||||
used via be specified via `--service` option. If not specifed, Lake will used
|
||||
used can be specified via the `--service` option. If not specified, Lake will use
|
||||
the system default, or error if none is configured. See the help page of
|
||||
`lake cache services` for more information on how to configure services.
|
||||
|
||||
@@ -454,7 +459,7 @@ full scope). As such, the command will warn if the work tree currently
|
||||
has changes."
|
||||
|
||||
def helpCacheAdd :=
|
||||
"Addd input-to-output mappings to the Lake cache
|
||||
"Add input-to-output mappings to the Lake cache
|
||||
|
||||
USAGE:
|
||||
lake cache add <mappings>
|
||||
@@ -476,6 +481,48 @@ to avoid clashes. For Reservoir, this scope can either be a package (set via
|
||||
`--scope`) or a repository (set via `--repo`). For S3 services, both options
|
||||
are synonymous."
|
||||
|
||||
def helpCacheStage :=
|
||||
"Copy build outputs from the cache to a staging directory
|
||||
|
||||
USAGE:
|
||||
lake cache stage <mappings> <staging-directory>
|
||||
|
||||
Creates the staging directory and copies the mappings file to it. Then, it
|
||||
copies all artifacts described within the mappings file from the cache to the
|
||||
staging directory. Errors if any of the artifacts described cannot be found in
|
||||
the cache."
|
||||
|
||||
def helpCacheUnstage :=
|
||||
"Cache build outputs from a staging directory
|
||||
|
||||
USAGE:
|
||||
lake cache unstage <staging-directory>
|
||||
|
||||
Copies the mappings and artifacts stored in staging directory (e.g., via
|
||||
`lake cache stage`) back into the cache.
|
||||
|
||||
Reads the mappings file located at `outputs.jsonl` within the staging
|
||||
directory and writes the mappings to the Lake cache. Then, it copies the
|
||||
described artifacts from the staging directory into the cache."
|
||||
|
||||
def helpCachePutStaged :=
|
||||
"Upload build outputs from a staging directory to a remote service
|
||||
|
||||
USAGE:
|
||||
lake cache put-staged <staging-directory>
|
||||
|
||||
OPTIONS:
|
||||
--scope=<remote-scope> verbatim scope
|
||||
--repo=<github-repo> scope with repository + toolchain & platform
|
||||
--toolchain=<name> with --repo, sets the toolchain
|
||||
--platform=<target-triple> with --repo, sets the platform
|
||||
|
||||
Works like `lake cache put` but uploads outputs from the staging directory
|
||||
instead of the Lake cache. Does not configure the workspace and thus does not
|
||||
execute arbitrary user code. However, because of this, the package's platform
|
||||
and toolchain settings will not be automatically detected and must be
|
||||
specified manually via `--platform` and `--toolchain` (if desired)."
|
||||
|
||||
def helpCacheClean :=
|
||||
"Removes ALL files from the local Lake cache
|
||||
|
||||
@@ -641,6 +688,9 @@ public def helpCache : (cmd : String) → String
|
||||
| "get" => helpCacheGet
|
||||
| "put" => helpCachePut
|
||||
| "add" => helpCacheAdd
|
||||
| "stage" => helpCacheStage
|
||||
| "unstage" => helpCacheUnstage
|
||||
| "put-staged" => helpCachePutStaged
|
||||
| "clean" => helpCacheClean
|
||||
| "services" => helpCacheServices
|
||||
| _ => helpCacheCli
|
||||
|
||||
@@ -446,7 +446,7 @@ protected def get : CliM PUnit := do
|
||||
logWarning endpointDeprecation
|
||||
if opts.mappingsOnly then
|
||||
error "`--mappings-only` requires services to be configured
|
||||
via the Lake system configuration (not enviroment variables)"
|
||||
via the Lake system configuration (not environment variables)"
|
||||
return .downloadService artifactEndpoint revisionEndpoint ws.lakeEnv.cacheService?
|
||||
| none, none =>
|
||||
return ws.defaultCacheService
|
||||
@@ -523,61 +523,82 @@ where
|
||||
error s!"{remoteScope}: no outputs found {revisions}"
|
||||
return map
|
||||
|
||||
protected def put : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let file ← takeArg "mappings"
|
||||
let opts ← getThe LakeOptions
|
||||
let some scope := opts.scope?
|
||||
| error "the `--scope` or `--repo` option must be set for `cache put`"
|
||||
noArgsRem do
|
||||
let cfg ← mkLoadConfig opts
|
||||
let ws ← loadWorkspace cfg
|
||||
let pkg := ws.root
|
||||
let platform := cachePlatform pkg (opts.platform?.getD .system)
|
||||
let toolchain := cacheToolchain pkg (opts.toolchain?.getD ws.cacheToolchain)
|
||||
let service : CacheService ← id do
|
||||
if let some service := opts.service? then
|
||||
let some service := ws.findCacheService? service
|
||||
| error (serviceNotFound service ws.lakeConfig.config.cache.services)
|
||||
let some key := ws.lakeEnv.cacheKey?
|
||||
| error "uploads require an authentication key configured through `LAKE_CACHE_KEY`"
|
||||
return service.withKey key
|
||||
else
|
||||
match ws.lakeEnv.cacheKey?, ws.lakeEnv.cacheArtifactEndpoint?, ws.lakeEnv.cacheRevisionEndpoint? with
|
||||
| some key, some artifactEndpoint, some revisionEndpoint =>
|
||||
logWarning endpointDeprecation
|
||||
return .uploadService key artifactEndpoint revisionEndpoint
|
||||
| key?, none, none =>
|
||||
if let some service := ws.defaultCacheUploadService? then
|
||||
let some key := key?
|
||||
| error "uploads require an authentication key configured through `LAKE_CACHE_KEY`"
|
||||
return service.withKey key
|
||||
else
|
||||
error "no default upload service configured; the `--service` option must be set for `cache put`"
|
||||
| key?, artifactEndpoint?, revisionEndpoint? =>
|
||||
logWarning endpointDeprecation
|
||||
error (invalidEndpointConfig key? artifactEndpoint? revisionEndpoint?)
|
||||
let repo := GitRepo.mk pkg.dir
|
||||
if (← repo.hasDiff) then
|
||||
logWarning s!"{pkg.prettyName}: package has changes; \
|
||||
artifacts will be uploaded for the most recent commit"
|
||||
if opts.failLv ≤ .warning then
|
||||
exit 1
|
||||
let rev ← repo.getHeadRevision
|
||||
let map ← CacheMap.load file
|
||||
let descrs ← map.collectOutputDescrs
|
||||
let paths ← ws.lakeCache.getArtifactPaths descrs
|
||||
service.uploadArtifacts ⟨descrs, rfl⟩ paths scope
|
||||
-- Mappings are uploaded after artifacts to allow downloads to assume that
|
||||
-- if the mappings exist, the artifacts should also exist
|
||||
service.uploadRevisionOutputs rev file scope platform toolchain
|
||||
private def computeUploadService
|
||||
(service? : Option String) (lakeEnv : Env) (lakeCfg : LoadedLakeConfig)
|
||||
: CliStateM CacheService := do
|
||||
if let some service := service? then
|
||||
let some service := lakeCfg.cacheServices.find? (.mkSimple service)
|
||||
| error (serviceNotFound service lakeCfg.config.cache.services)
|
||||
let some key := lakeEnv.cacheKey?
|
||||
| error "uploads require an authentication key configured through `LAKE_CACHE_KEY`"
|
||||
return service.withKey key
|
||||
else
|
||||
match lakeEnv.cacheKey?, lakeEnv.cacheArtifactEndpoint?, lakeEnv.cacheRevisionEndpoint? with
|
||||
| some key, some artifactEndpoint, some revisionEndpoint =>
|
||||
return .uploadService key artifactEndpoint revisionEndpoint
|
||||
| key?, none, none =>
|
||||
if let some service := lakeCfg.defaultCacheUploadService? then
|
||||
let some key := key?
|
||||
| error "uploads require an authentication key configured through `LAKE_CACHE_KEY`"
|
||||
return service.withKey key
|
||||
else
|
||||
error "no default upload service configured; the `--service` option must be set"
|
||||
| key?, artifactEndpoint?, revisionEndpoint? =>
|
||||
error (invalidEndpointConfig key? artifactEndpoint? revisionEndpoint?)
|
||||
where
|
||||
invalidEndpointConfig key? artifactEndpoint? revisionEndpoint? :=
|
||||
s!"invalid endpoint configuration:\
|
||||
\n LAKE_CACHE_KEY is {if key?.isNone then "unset" else "set"}\
|
||||
\n LAKE_CACHE_ARTIFACT_ENDPOINT={artifactEndpoint?.getD ""}\
|
||||
\n LAKE_CACHE_REVISION_ENDPOINT={revisionEndpoint?.getD ""}\n\
|
||||
To use `cache put`, these environment variables must be set to non-empty strings."
|
||||
To upload, these environment variables must be set to non-empty strings."
|
||||
|
||||
private def computePackageRev (pkgDir : FilePath) : CliStateM String := do
|
||||
let repo := GitRepo.mk pkgDir
|
||||
if (← repo.hasDiff) then
|
||||
logWarning s!"package has changes; \
|
||||
artifacts will be uploaded for the most recent commit"
|
||||
if (← getThe LakeOptions).failLv ≤ .warning then
|
||||
exit 1
|
||||
repo.getHeadRevision
|
||||
|
||||
private def putCore
|
||||
(rev : String) (outputs : FilePath) (artDir : FilePath)
|
||||
(service : CacheService) (scope : CacheServiceScope)
|
||||
(platform := CachePlatform.none) (toolchain := CacheToolchain.none)
|
||||
: LoggerIO Unit := do
|
||||
let map ← CacheMap.load outputs
|
||||
let descrs ← map.collectOutputDescrs
|
||||
let paths ← computeArtifactPaths descrs
|
||||
service.uploadArtifacts ⟨descrs, rfl⟩ paths scope
|
||||
-- Mappings are uploaded after artifacts to allow downloads to assume that
|
||||
-- if the mappings exist, the artifacts should also exist
|
||||
service.uploadRevisionOutputs rev outputs scope platform toolchain
|
||||
where
|
||||
computeArtifactPaths (descrs : Array ArtifactDescr) : LogIO (Vector FilePath descrs.size) := throwIfLogs do
|
||||
(Vector.mk descrs rfl).mapM fun out => do
|
||||
let art := artDir / out.relPath
|
||||
unless (← art.pathExists) do
|
||||
logError s!"artifact not found in cache: {art}"
|
||||
return art
|
||||
|
||||
protected def put : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let file ← takeArg "mappings"
|
||||
let opts ← getThe LakeOptions
|
||||
let some scope := opts.scope?
|
||||
| error "the `--scope` or `--repo` option must be set"
|
||||
noArgsRem do
|
||||
let cfg ← mkLoadConfig opts
|
||||
let lakeEnv := cfg.lakeEnv
|
||||
let pkg ← loadPackage cfg
|
||||
let lakeCfg ← loadLakeConfig lakeEnv
|
||||
let lakeCache := computeLakeCache pkg lakeEnv
|
||||
let platform := cachePlatform pkg (opts.platform?.getD .system)
|
||||
let toolchain := cacheToolchain pkg (opts.toolchain?.getD lakeEnv.cacheToolchain)
|
||||
let service ← computeUploadService opts.service? lakeEnv lakeCfg
|
||||
let rev ← opts.rev?.getDM (computePackageRev pkg.dir)
|
||||
putCore rev file lakeCache.artifactDir service scope platform toolchain
|
||||
|
||||
protected def add : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
@@ -602,6 +623,99 @@ protected def add : CliM PUnit := do
|
||||
let map ← CacheMap.load file
|
||||
ws.lakeCache.writeMap localScope map service? opts.scope?
|
||||
|
||||
private def stagingOutputsFile := "outputs.jsonl"
|
||||
|
||||
protected def stage : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let opts ← getThe LakeOptions
|
||||
let mappingsFile ← FilePath.mk <$> takeArg "mappings"
|
||||
let stagingDir ← FilePath.mk <$> takeArg "staging directory"
|
||||
noArgsRem do
|
||||
let cfg ← mkLoadConfig opts
|
||||
let cache ← id do
|
||||
if (← configFileExists cfg.configFile) then
|
||||
return (← loadWorkspaceRoot cfg).lakeCache
|
||||
else if let some cache := cfg.lakeEnv.lakeCache? then
|
||||
return cache
|
||||
else
|
||||
error "no workspace configuration found and no system cache detected"
|
||||
let map ← CacheMap.load mappingsFile
|
||||
let descrs ← map.collectOutputDescrs
|
||||
IO.FS.createDirAll stagingDir
|
||||
copyFile mappingsFile (stagingDir / stagingOutputsFile)
|
||||
let ok ← descrs.foldlM (init := true) fun ok descr => do
|
||||
let cachePath := cache.artifactDir / descr.relPath
|
||||
let stagingPath := stagingDir / descr.relPath
|
||||
match (← copyFile cachePath stagingPath |>.toBaseIO) with
|
||||
| .ok _ =>
|
||||
return ok
|
||||
| .error (.noFileOrDirectory ..) =>
|
||||
logError s!"artifact not found in cache: {cachePath}"
|
||||
return false
|
||||
| .error e =>
|
||||
logError s!"failed to copy artifact: {e}"
|
||||
return false
|
||||
unless ok do
|
||||
logError "failed to copy all outputs to the staging directory"
|
||||
exit 1
|
||||
|
||||
protected def unstage : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let opts ← getThe LakeOptions
|
||||
let stagingDir ← FilePath.mk <$> takeArg "staging directory"
|
||||
let pkg? ← takeArg?
|
||||
noArgsRem do
|
||||
let cfg ← mkLoadConfig opts
|
||||
let ws ← loadWorkspace cfg
|
||||
let pkg ← match pkg? with
|
||||
| some pkg => parsePackageSpec ws pkg
|
||||
| _ => pure ws.root
|
||||
let localScope := pkg.cacheScope
|
||||
if opts.scope?.isSome && opts.service?.isNone then
|
||||
error "`--scope` and `--repo` require `--service`"
|
||||
let service? ← id do
|
||||
let some service := opts.service?
|
||||
| return none
|
||||
unless (ws.findCacheService? service).isSome do
|
||||
error (serviceNotFound service ws.lakeConfig.config.cache.services)
|
||||
return some (.ofString service)
|
||||
let map ← CacheMap.load (stagingDir / stagingOutputsFile)
|
||||
let descrs ← map.collectOutputDescrs
|
||||
let artDir := ws.lakeCache.artifactDir
|
||||
IO.FS.createDirAll artDir
|
||||
let ok ← descrs.foldlM (init := true) fun ok descr => do
|
||||
let cachePath := artDir/ descr.relPath
|
||||
let stagingPath := stagingDir / descr.relPath
|
||||
match (← copyFile stagingPath cachePath |>.toBaseIO) with
|
||||
| .ok _ =>
|
||||
return ok
|
||||
| .error (.noFileOrDirectory ..) =>
|
||||
logError s!"output artifact not found in staging directory: {stagingPath}"
|
||||
return false
|
||||
| .error e =>
|
||||
logError s!"failed to copy artifact: {e}"
|
||||
return false
|
||||
unless ok do
|
||||
logError "failed to copy all outputs to the staging directory"
|
||||
exit 1
|
||||
ws.lakeCache.writeMap localScope map service? opts.scope?
|
||||
|
||||
protected def putStaged : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let opts ← getThe LakeOptions
|
||||
let stagingDir ← FilePath.mk <$> takeArg "staging directory"
|
||||
let some scope := opts.scope?
|
||||
| error "the `--scope` or `--repo` option must be set"
|
||||
noArgsRem do
|
||||
let cfg ← mkLoadConfig opts
|
||||
let lakeCfg ← loadLakeConfig cfg.lakeEnv
|
||||
let platform := opts.platform?.getD .none
|
||||
let toolchain := opts.toolchain?.getD .none
|
||||
let service ← computeUploadService opts.service? cfg.lakeEnv lakeCfg
|
||||
let rev ← opts.rev?.getDM (computePackageRev cfg.wsDir)
|
||||
let outputsFile := stagingDir / stagingOutputsFile
|
||||
putCore rev outputsFile stagingDir service scope platform toolchain
|
||||
|
||||
protected def services : CliM PUnit := do
|
||||
processOptions lakeOption
|
||||
let opts ← getThe LakeOptions
|
||||
@@ -630,13 +744,16 @@ protected def help : CliM PUnit := do
|
||||
end cache
|
||||
|
||||
def cacheCli : (cmd : String) → CliM PUnit
|
||||
| "add" => cache.add
|
||||
| "get" => cache.get
|
||||
| "put" => cache.put
|
||||
| "clean" => cache.clean
|
||||
| "services" => cache.services
|
||||
| "help" => cache.help
|
||||
| cmd => throw <| CliError.unknownCommand cmd
|
||||
| "add" => cache.add
|
||||
| "get" => cache.get
|
||||
| "put" => cache.put
|
||||
| "stage" => cache.stage
|
||||
| "unstage" => cache.unstage
|
||||
| "put-staged" => cache.putStaged
|
||||
| "clean" => cache.clean
|
||||
| "services" => cache.services
|
||||
| "help" => cache.help
|
||||
| cmd => throw <| CliError.unknownCommand cmd
|
||||
|
||||
/-! ### `lake script` CLI -/
|
||||
|
||||
|
||||
@@ -421,19 +421,6 @@ public def getArtifact (cache : Cache) (descr : ArtifactDescr) : EIO String Arti
|
||||
| .error e =>
|
||||
error s!"failed to retrieve artifact from cache: {e}"
|
||||
|
||||
/--
|
||||
**For internal use only.**
|
||||
Returns path to the artifact for each output. Errors if any are missing.
|
||||
-/
|
||||
public def getArtifactPaths
|
||||
(cache : Cache) (descrs : Array ArtifactDescr)
|
||||
: LogIO (Vector FilePath descrs.size) := throwIfLogs do
|
||||
(Vector.mk descrs rfl).mapM fun out => do
|
||||
let art := cache.artifactDir / out.relPath
|
||||
unless (← art.pathExists) do
|
||||
logError s!"artifact not found in cache: {art}"
|
||||
return art
|
||||
|
||||
/-- The directory where input-to-output mappings are stored in the Lake cache. -/
|
||||
@[inline] public def outputsDir (cache : Cache) : FilePath :=
|
||||
cache.dir / "outputs"
|
||||
@@ -778,12 +765,13 @@ where
|
||||
\n remote URL: {info.url}"
|
||||
match cfg.kind with
|
||||
| .get =>
|
||||
if let .ok size := out.getAs Nat "size_download" then
|
||||
if size > 0 then
|
||||
if let .ok contentType := out.getAs String "content_type" then
|
||||
if contentType != artifactContentType then
|
||||
if let .ok resp ← IO.FS.readFile info.path |>.toBaseIO then
|
||||
msg := s!"{msg}\nunexpected response:\n{resp}"
|
||||
unless code? matches .ok 404 do -- ignore response bodies on 404s
|
||||
if let .ok size := out.getAs Nat "size_download" then
|
||||
if size > 0 then
|
||||
if let .ok contentType := out.getAs String "content_type" then
|
||||
if contentType != artifactContentType then
|
||||
if let .ok resp ← IO.FS.readFile info.path |>.toBaseIO then
|
||||
msg := s!"{msg}\nunexpected response:\n{resp}"
|
||||
removeFileIfExists info.path
|
||||
| .put =>
|
||||
if let .ok size := out.getAs Nat "size_download" then
|
||||
@@ -800,7 +788,7 @@ private def transferArtifacts
|
||||
match cfg.kind with
|
||||
| .get =>
|
||||
cfg.infos.forM fun info => do
|
||||
h.putStrLn s!"url = {info.url}"
|
||||
h.putStrLn s!"url = {info.url.quote}"
|
||||
h.putStrLn s!"-o {info.path.toString.quote}"
|
||||
h.flush
|
||||
return #[
|
||||
@@ -811,7 +799,7 @@ private def transferArtifacts
|
||||
| .put =>
|
||||
cfg.infos.forM fun info => do
|
||||
h.putStrLn s!"-T {info.path.toString.quote}"
|
||||
h.putStrLn s!"url = {info.url}"
|
||||
h.putStrLn s!"url = {info.url.quote}"
|
||||
h.flush
|
||||
return #[
|
||||
"-Z", "-X", "PUT", "-L",
|
||||
@@ -840,6 +828,13 @@ private def transferArtifacts
|
||||
if s.didError then
|
||||
failure
|
||||
|
||||
private def reservoirArtifactsUrl (service : CacheService) (scope : CacheServiceScope) : String :=
|
||||
let endpoint :=
|
||||
match scope.impl with
|
||||
| .repo scope => appendScope s!"{service.impl.apiEndpoint}/repositories" scope
|
||||
| .str scope => appendScope s!"{service.impl.apiEndpoint}/packages" scope
|
||||
s!"{endpoint}/artifacts"
|
||||
|
||||
public def downloadArtifacts
|
||||
(descrs : Array ArtifactDescr) (cache : Cache)
|
||||
(service : CacheService) (scope : CacheServiceScope) (force := false)
|
||||
@@ -857,8 +852,68 @@ public def downloadArtifacts
|
||||
return s.push {url, path, descr}
|
||||
if infos.isEmpty then
|
||||
return
|
||||
let infos ← id do
|
||||
if service.isReservoir then
|
||||
-- Artifact cloud storage URLs are fetched in a single request
|
||||
-- to avoid hammering the Reservoir web host
|
||||
fetchUrls (service.reservoirArtifactsUrl scope) infos
|
||||
else return infos
|
||||
IO.FS.createDirAll cache.artifactDir
|
||||
transferArtifacts {scope, infos, kind := .get}
|
||||
where
|
||||
fetchUrls url infos := IO.FS.withTempFile fun h path => do
|
||||
let body := Json.arr <| infos.map (toJson ·.descr.hash)
|
||||
h.putStr body.compress
|
||||
h.flush
|
||||
let args := #[
|
||||
"-X", "POST", "-L", "-d", s!"@{path}",
|
||||
"--retry", "3", -- intermittent network errors can occur
|
||||
"-s", "-w", "%{stderr}%{json}\n",
|
||||
"-H", "Content-Type: application/json",
|
||||
]
|
||||
let args := Reservoir.lakeHeaders.foldl (· ++ #["-H", ·]) args
|
||||
let spawnArgs := {
|
||||
cmd := "curl", args := args.push url
|
||||
stdout := .piped, stderr := .piped
|
||||
}
|
||||
logVerbose (mkCmdLog spawnArgs)
|
||||
let {stdout, stderr, exitCode} ← IO.Process.output spawnArgs
|
||||
match Json.parse stdout >>= fromJson? with
|
||||
| .ok (resp : ReservoirResp (Array String)) =>
|
||||
match resp with
|
||||
| .data urls =>
|
||||
if h : infos.size = urls.size then
|
||||
let s := infos.size.fold (init := infos.toVector) fun i hi s =>
|
||||
s.set i {s[i] with url := urls[i]'(h ▸ hi)}
|
||||
return s.toArray
|
||||
else
|
||||
error s!"failed to fetch artifact URLs\
|
||||
\n POST {url}\
|
||||
\nIncorrect number of results: expected {infos.size}, got {urls.size}"
|
||||
| .error status message =>
|
||||
error s!"failed to fetch artifact URLs (status code: {status})\
|
||||
\n POST {url}\
|
||||
\nReservoir error: {message}"
|
||||
| .error _ =>
|
||||
match Json.parse stderr >>= fromJson? with
|
||||
| .ok (out : JsonObject) =>
|
||||
let mut msg := "failed to fetch artifact URLs"
|
||||
if let .ok code := out.getAs Nat "http_code" then
|
||||
msg := s!"{msg} (status code: {code})"
|
||||
msg := s!"{msg}\n POST {url}"
|
||||
if let .ok errMsg := out.getAs String "errormsg" then
|
||||
msg := s!"{msg}\n Transfer error: {errMsg}"
|
||||
unless stdout.isEmpty do
|
||||
msg := s!"{msg}\nstdout:\n{stdout.trimAsciiEnd}"
|
||||
logError msg
|
||||
logVerbose s!"curl JSON:\n{stderr.trimAsciiEnd}"
|
||||
| .error e =>
|
||||
logError s!"failed to fetch artifact URLs\
|
||||
\n POST {url}
|
||||
\nInvalid curl JSON: {e}; received: {stderr.trimAscii}"
|
||||
unless stdout.isEmpty do
|
||||
logWarning s!"curl produced unexpected output:\n{stdout.trimAsciiEnd}"
|
||||
error s!"curl exited with code {exitCode}"
|
||||
|
||||
@[deprecated "Deprecated without replacement." (since := "2026-02-27")]
|
||||
public def downloadOutputArtifacts
|
||||
|
||||
@@ -50,6 +50,6 @@ public abbrev CacheServiceMap := NameMap CacheService
|
||||
public structure LoadedLakeConfig where
|
||||
config : LakeConfig
|
||||
defaultCacheService : CacheService
|
||||
defaultUploadCacheService? : Option CacheService
|
||||
defaultCacheUploadService? : Option CacheService
|
||||
cacheServices : CacheServiceMap
|
||||
deriving Nonempty
|
||||
|
||||
@@ -22,6 +22,13 @@ open Lean (Name LeanOptions)
|
||||
|
||||
namespace Lake
|
||||
|
||||
/-- **For internal use only.** Computes the cache to use for the package based on the environment. -/
|
||||
public def computeLakeCache (pkg : Package) (lakeEnv : Lake.Env) : Cache :=
|
||||
if pkg.bootstrap then
|
||||
lakeEnv.lakeSystemCache?.getD ⟨pkg.lakeDir / "cache"⟩
|
||||
else
|
||||
lakeEnv.lakeCache?.getD ⟨pkg.lakeDir / "cache"⟩
|
||||
|
||||
/-- A Lake workspace -- the top-level package directory. -/
|
||||
public structure Workspace : Type where
|
||||
/-- The root package of the workspace. -/
|
||||
@@ -31,9 +38,7 @@ public structure Workspace : Type where
|
||||
/-- The Lake configuration from the system configuration file. -/
|
||||
lakeConfig : LoadedLakeConfig
|
||||
/-- The Lake cache. -/
|
||||
lakeCache : Cache :=
|
||||
if root.bootstrap then lakeEnv.lakeSystemCache?.getD ⟨root.lakeDir / "cache"⟩
|
||||
else lakeEnv.lakeCache?.getD ⟨root.lakeDir / "cache"⟩
|
||||
lakeCache : Cache := private_decl% computeLakeCache root lakeEnv
|
||||
/--
|
||||
The CLI arguments Lake was run with.
|
||||
Used by {lit}`lake update` to perform a restart of Lake on a toolchain update.
|
||||
@@ -128,7 +133,7 @@ Returns the cache service (if any) used by default for uploads (e.g., for {lit}`
|
||||
This is configured through {lit}`cache.defaultUploadService` in the system Lake configuration.
|
||||
-/
|
||||
@[inline] public def defaultCacheUploadService? (ws : Workspace) : Option CacheService :=
|
||||
ws.lakeConfig.defaultUploadCacheService?
|
||||
ws.lakeConfig.defaultCacheUploadService?
|
||||
|
||||
/--
|
||||
Returns the configured cache service with the given name.
|
||||
|
||||
@@ -33,6 +33,11 @@ def updateGitPkg
|
||||
else
|
||||
logInfo s!"{name}: checking out revision '{rev}'"
|
||||
repo.checkoutDetach rev
|
||||
-- Remove untracked files from tracked folders the package.
|
||||
-- This helps ensure reproducible behavior by removing leftovers.
|
||||
-- For example, Lake will trust leftover `.hash` files unconditionally,
|
||||
-- so stale ones from the previous revision cause incorrect trace computations.
|
||||
repo.clean
|
||||
|
||||
/-- Clone the Git package as `repo`. -/
|
||||
def cloneGitPkg
|
||||
|
||||
@@ -545,7 +545,7 @@ private def loadLakeConfigCore (path : FilePath) (lakeEnv : Lake.Env) : LogIO Lo
|
||||
| error s!"the configured default cache service `{name}` is not defined; \
|
||||
please add a `cache.service` with that name"
|
||||
return service
|
||||
let defaultUploadCacheService? ← id do
|
||||
let defaultCacheUploadService? ← id do
|
||||
let name := config.cache.defaultUploadService
|
||||
if name.isEmpty then
|
||||
return none
|
||||
@@ -555,12 +555,12 @@ private def loadLakeConfigCore (path : FilePath) (lakeEnv : Lake.Env) : LogIO Lo
|
||||
please add a `cache.service` with that name"
|
||||
return some service
|
||||
if cacheServices.contains `reservoir then
|
||||
return {config, defaultCacheService, defaultUploadCacheService?, cacheServices}
|
||||
return {config, defaultCacheService, defaultCacheUploadService?, cacheServices}
|
||||
else
|
||||
let cacheServices := cacheServices.insert `reservoir defaultService
|
||||
let defaultServiceConfig := {name := "reservoir", kind := .reservoir, apiEndpoint := lakeEnv.reservoirApiUrl}
|
||||
let config := {config with cache.services := config.cache.services.push defaultServiceConfig}
|
||||
return {config, defaultCacheService, defaultUploadCacheService?, cacheServices}
|
||||
return {config, defaultCacheService, defaultCacheUploadService?, cacheServices}
|
||||
else
|
||||
errorWithLog <| errs.forM fun {ref, msg} =>
|
||||
let pos := ictx.fileMap.toPosition <| ref.getPos?.getD 0
|
||||
@@ -574,7 +574,7 @@ private def LoadedLakeConfig.mkDefault (lakeEnv : Lake.Env) : LoadedLakeConfig :
|
||||
{
|
||||
config.cache.services := #[defaultServiceConfig]
|
||||
defaultCacheService := defaultService
|
||||
defaultUploadCacheService? := none
|
||||
defaultCacheUploadService? := none
|
||||
cacheServices := NameMap.empty.insert `reservoir defaultService
|
||||
}
|
||||
|
||||
|
||||
@@ -103,24 +103,6 @@ public instance : FromJson RegistryPkg := ⟨RegistryPkg.fromJson?⟩
|
||||
|
||||
end RegistryPkg
|
||||
|
||||
/-- A Reservoir API response object. -/
|
||||
public inductive ReservoirResp (α : Type u)
|
||||
| data (a : α)
|
||||
| error (status : Nat) (message : String)
|
||||
|
||||
public protected def ReservoirResp.fromJson? [FromJson α] (val : Json) : Except String (ReservoirResp α) := do
|
||||
let obj ← JsonObject.fromJson? val
|
||||
if let some (err : JsonObject) ← obj.get? "error" then
|
||||
let status ← err.get "status"
|
||||
let message ← err.get "message"
|
||||
return .error status message
|
||||
else if let some (val : Json) ← obj.get? "data" then
|
||||
.data <$> fromJson? val
|
||||
else
|
||||
.data <$> fromJson? val
|
||||
|
||||
public instance [FromJson α] : FromJson (ReservoirResp α) := ⟨ReservoirResp.fromJson?⟩
|
||||
|
||||
public def Reservoir.pkgApiUrl (lakeEnv : Lake.Env) (owner pkg : String) :=
|
||||
s!"{lakeEnv.reservoirApiUrl}/packages/{uriEncode owner}/{uriEncode pkg}"
|
||||
|
||||
|
||||
@@ -83,6 +83,10 @@ public def checkoutBranch (branch : String) (repo : GitRepo) : LogIO PUnit :=
|
||||
public def checkoutDetach (hash : String) (repo : GitRepo) : LogIO PUnit :=
|
||||
repo.execGit #["checkout", "--detach", hash, "--"]
|
||||
|
||||
/-- Remove untracked files from tracked folders in the repository. -/
|
||||
public def clean (repo : GitRepo) : LogIO PUnit :=
|
||||
repo.execGit #["clean", "-xf"]
|
||||
|
||||
public def resolveRevision? (rev : String) (repo : GitRepo) : BaseIO (Option String) := do
|
||||
repo.captureGit? #["rev-parse", "--verify", "--end-of-options", rev]
|
||||
|
||||
|
||||
@@ -36,8 +36,10 @@ public def mkCmdLog (args : IO.Process.SpawnArgs) : String :=
|
||||
public def proc (args : IO.Process.SpawnArgs) (quiet := false) : LogIO Unit := do
|
||||
withLogErrorPos do
|
||||
let out ← rawProc args
|
||||
logOutput out (if quiet then logVerbose else logInfo)
|
||||
if out.exitCode ≠ 0 then
|
||||
if out.exitCode = 0 then
|
||||
logOutput out (if quiet then logVerbose else logInfo)
|
||||
else
|
||||
logOutput out logInfo
|
||||
error s!"external command '{args.cmd}' exited with code {out.exitCode}"
|
||||
|
||||
public def captureProc' (args : IO.Process.SpawnArgs) : LogIO (IO.Process.Output) := do
|
||||
|
||||
@@ -6,8 +6,9 @@ Authors: Mac Malone
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Prelude
|
||||
import Init.Data.Array.Basic
|
||||
public import Lake.Util.JsonObject
|
||||
|
||||
open Lean
|
||||
|
||||
namespace Lake
|
||||
|
||||
@@ -15,3 +16,23 @@ public def Reservoir.lakeHeaders : Array String := #[
|
||||
"X-Reservoir-Api-Version:1.0.0",
|
||||
"X-Lake-Registry-Api-Version:0.1.0"
|
||||
]
|
||||
|
||||
/-- A Reservoir API response object. -/
|
||||
public inductive ReservoirResp (α : Type u)
|
||||
| data (a : α)
|
||||
| error (status : Nat) (message : String)
|
||||
|
||||
public protected def ReservoirResp.fromJson? [FromJson α] (val : Json) : Except String (ReservoirResp α) := do
|
||||
if let .ok obj := JsonObject.fromJson? val then
|
||||
if let some (err : JsonObject) ← obj.get? "error" then
|
||||
let status ← err.get "status"
|
||||
let message ← err.get "message"
|
||||
return .error status message
|
||||
else if let some (val : Json) ← obj.get? "data" then
|
||||
.data <$> fromJson? val
|
||||
else
|
||||
.data <$> fromJson? val
|
||||
else
|
||||
.data <$> fromJson? val
|
||||
|
||||
public instance [FromJson α] : FromJson (ReservoirResp α) := ⟨ReservoirResp.fromJson?⟩
|
||||
|
||||
@@ -882,9 +882,6 @@ private:
|
||||
auto cached_entry = m_constant_cache.find(fn);
|
||||
if (cached_entry != m_constant_cache.end()) {
|
||||
auto cached = cached_entry->second;
|
||||
if (!cached.m_is_scalar) {
|
||||
inc(cached.m_val.m_obj);
|
||||
}
|
||||
return cached.m_val;
|
||||
}
|
||||
auto o_entry = g_init_globals->find(fn);
|
||||
@@ -931,9 +928,6 @@ private:
|
||||
lean_always_assert(fn_body_tag(decl_fun_body(e.m_decl)) != fn_body_kind::Unreachable);
|
||||
value r = eval_body(decl_fun_body(e.m_decl));
|
||||
pop_frame(r, decl_type(e.m_decl));
|
||||
if (!type_is_scalar(t)) {
|
||||
inc(r.m_obj);
|
||||
}
|
||||
m_constant_cache.insert({ fn, constant_cache_entry { type_is_scalar(t), r } });
|
||||
return r;
|
||||
}
|
||||
@@ -1073,7 +1067,11 @@ public:
|
||||
unsigned arity = decl_params(e.m_decl).size();
|
||||
object * r;
|
||||
if (arity == 0) {
|
||||
r = box_t(load(fn, decl_type(e.m_decl)), decl_type(e.m_decl));
|
||||
type t = decl_type(e.m_decl);
|
||||
r = box_t(load(fn, t), t);
|
||||
if (!type_is_scalar(t)) {
|
||||
inc(r);
|
||||
}
|
||||
} else {
|
||||
// First allocate a closure with zero fixed parameters. This is slightly wasteful in the under-application
|
||||
// case, but simpler to handle.
|
||||
|
||||
BIN
stage0/src/library/ir_interpreter.cpp
generated
BIN
stage0/src/library/ir_interpreter.cpp
generated
Binary file not shown.
BIN
stage0/stdlib/Init/BinderPredicates.c
generated
BIN
stage0/stdlib/Init/BinderPredicates.c
generated
Binary file not shown.
BIN
stage0/stdlib/Init/ByCases.c
generated
BIN
stage0/stdlib/Init/ByCases.c
generated
Binary file not shown.
BIN
stage0/stdlib/Init/CbvSimproc.c
generated
BIN
stage0/stdlib/Init/CbvSimproc.c
generated
Binary file not shown.
BIN
stage0/stdlib/Init/Control/Basic.c
generated
BIN
stage0/stdlib/Init/Control/Basic.c
generated
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user