mirror of
https://github.com/leanprover/lean4.git
synced 2026-04-13 23:54:10 +00:00
Compare commits
14 Commits
sg/fix-get
...
sofia/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3d1fa9b4d | ||
|
|
4dced0287e | ||
|
|
c4d9573342 | ||
|
|
9db52c7fa6 | ||
|
|
ea209d19e0 | ||
|
|
f0c999a668 | ||
|
|
c769515d94 | ||
|
|
1b81a9889b | ||
|
|
1c9e26420f | ||
|
|
cd697eac81 | ||
|
|
c54f691f4a | ||
|
|
0d7e76ea88 | ||
|
|
2b8c273687 | ||
|
|
ff19ad9c38 |
@@ -7,11 +7,6 @@ To build Lean you should use `make -j$(nproc) -C build/release`.
|
||||
The build uses `ccache`, and in a sandbox `ccache` may complain about read-only file systems.
|
||||
Use `CCACHE_READONLY` and `CCACHE_TEMPDIR` instead of disabling ccache completely.
|
||||
|
||||
To rebuild individual modules without a full build, use Lake directly:
|
||||
```
|
||||
cd src && lake build Init.Prelude
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
See `tests/README.md` for full documentation. Quick reference:
|
||||
|
||||
@@ -3096,13 +3096,13 @@ theorem foldl_eq_foldlM {f : β → α → β} {b} {xs : Array α} {start stop :
|
||||
theorem foldr_eq_foldrM {f : α → β → β} {b} {xs : Array α} {start stop : Nat} :
|
||||
xs.foldr f b start stop = (xs.foldrM (m := Id) (pure <| f · ·) b start stop).run := rfl
|
||||
|
||||
public theorem foldl_eq_foldl_extract {xs : Array α} {f : β → α → β} {init : β} :
|
||||
theorem foldl_eq_foldl_extract {xs : Array α} {f : β → α → β} {init : β} :
|
||||
xs.foldl (init := init) (start := start) (stop := stop) f =
|
||||
(xs.extract start stop).foldl (init := init) f := by
|
||||
simp only [foldl_eq_foldlM]
|
||||
rw [foldlM_start_stop]
|
||||
|
||||
public theorem foldr_eq_foldr_extract {xs : Array α} {f : α → β → β} {init : β} :
|
||||
theorem foldr_eq_foldr_extract {xs : Array α} {f : α → β → β} {init : β} :
|
||||
xs.foldr (init := init) (start := start) (stop := stop) f =
|
||||
(xs.extract stop start).foldr (init := init) f := by
|
||||
simp only [foldr_eq_foldrM]
|
||||
|
||||
@@ -80,7 +80,7 @@ instance : SliceSize (Internal.SubarrayData α) where
|
||||
size s := s.internalRepresentation.stop - s.internalRepresentation.start
|
||||
|
||||
@[grind =, suggest_for Subarray.size]
|
||||
public theorem size_eq {xs : Subarray α} :
|
||||
theorem size_eq {xs : Subarray α} :
|
||||
xs.size = xs.stop - xs.start := by
|
||||
simp [Std.Slice.size, SliceSize.size, start, stop]
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ protected theorem isGE_compare {a b : Int} :
|
||||
rw [← Int.compare_swap, Ordering.isGE_swap]
|
||||
exact Int.isLE_compare
|
||||
|
||||
public instance : Std.LawfulOrderOrd Int where
|
||||
instance : Std.LawfulOrderOrd Int where
|
||||
isLE_compare _ _ := Int.isLE_compare
|
||||
isGE_compare _ _ := Int.isGE_compare
|
||||
|
||||
|
||||
@@ -42,29 +42,29 @@ The conversion functions {name (scope := "Init.Data.Iterators.Basic")}`Shrink.de
|
||||
{name (scope := "Init.Data.Iterators.Basic")}`Shrink.inflate` form an equivalence between
|
||||
{name}`α` and {lean}`Shrink α`, but this equivalence is intentionally not definitional.
|
||||
-/
|
||||
public def Shrink (α : Type u) : Type u := Internal.idOpaque.1 α
|
||||
def Shrink (α : Type u) : Type u := Internal.idOpaque.1 α
|
||||
|
||||
/-- Converts elements of {name}`α` into elements of {lean}`Shrink α`. -/
|
||||
@[always_inline]
|
||||
public def Shrink.deflate {α} (x : α) : Shrink α :=
|
||||
def Shrink.deflate {α} (x : α) : Shrink α :=
|
||||
cast (by simp [Shrink, Internal.idOpaque.property]) x
|
||||
|
||||
/-- Converts elements of {lean}`Shrink α` into elements of {name}`α`. -/
|
||||
@[always_inline]
|
||||
public def Shrink.inflate {α} (x : Shrink α) : α :=
|
||||
def Shrink.inflate {α} (x : Shrink α) : α :=
|
||||
cast (by simp [Shrink, Internal.idOpaque.property]) x
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem Shrink.deflate_inflate {α} {x : Shrink α} :
|
||||
theorem Shrink.deflate_inflate {α} {x : Shrink α} :
|
||||
Shrink.deflate x.inflate = x := by
|
||||
simp [deflate, inflate]
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem Shrink.inflate_deflate {α} {x : α} :
|
||||
theorem Shrink.inflate_deflate {α} {x : α} :
|
||||
(Shrink.deflate x).inflate = x := by
|
||||
simp [deflate, inflate]
|
||||
|
||||
public theorem Shrink.inflate_inj {α} {x y : Shrink α} :
|
||||
theorem Shrink.inflate_inj {α} {x y : Shrink α} :
|
||||
x.inflate = y.inflate ↔ x = y := by
|
||||
apply Iff.intro
|
||||
· intro h
|
||||
@@ -72,7 +72,7 @@ public theorem Shrink.inflate_inj {α} {x y : Shrink α} :
|
||||
· rintro rfl
|
||||
rfl
|
||||
|
||||
public theorem Shrink.deflate_inj {α} {x y : α} :
|
||||
theorem Shrink.deflate_inj {α} {x y : α} :
|
||||
Shrink.deflate x = Shrink.deflate y ↔ x = y := by
|
||||
apply Iff.intro
|
||||
· intro h
|
||||
|
||||
@@ -190,7 +190,7 @@ def Append.instFinitenessRelation [Monad m] [Iterator α₁ m β] [Iterator α
|
||||
exact IterM.TerminationMeasures.Finite.rel_of_skip ‹_›
|
||||
|
||||
@[no_expose]
|
||||
public instance Append.instFinite [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
|
||||
instance Append.instFinite [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
|
||||
[Finite α₁ m] [Finite α₂ m] : Finite (Append α₁ α₂ m β) m :=
|
||||
.of_finitenessRelation instFinitenessRelation
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ open Nat
|
||||
@[simp, grind =] theorem min?_nil [Min α] : ([] : List α).min? = none := rfl
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem min?_singleton [Min α] {x : α} : [x].min? = some x :=
|
||||
theorem min?_singleton [Min α] {x : α} : [x].min? = some x :=
|
||||
(rfl)
|
||||
|
||||
-- We don't put `@[simp]` on `min?_cons'`,
|
||||
@@ -52,7 +52,7 @@ theorem min?_cons' [Min α] {xs : List α} : (x :: xs).min? = some (foldl min x
|
||||
cases xs <;> simp [min?]
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem isSome_min?_iff [Min α] {xs : List α} : xs.min?.isSome ↔ xs ≠ [] := by
|
||||
theorem isSome_min?_iff [Min α] {xs : List α} : xs.min?.isSome ↔ xs ≠ [] := by
|
||||
cases xs <;> simp [min?]
|
||||
|
||||
@[grind .]
|
||||
@@ -247,7 +247,7 @@ theorem foldl_min_eq_min [Min α] [Std.IdempotentOp (min : α → α → α)] [S
|
||||
@[simp, grind =] theorem max?_nil [Max α] : ([] : List α).max? = none := rfl
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem max?_singleton [Max α] {x : α} : [x].max? = some x :=
|
||||
theorem max?_singleton [Max α] {x : α} : [x].max? = some x :=
|
||||
(rfl)
|
||||
|
||||
-- We don't put `@[simp]` on `max?_cons'`,
|
||||
@@ -262,7 +262,7 @@ theorem max?_cons' [Max α] {xs : List α} : (x :: xs).max? = some (foldl max x
|
||||
cases xs <;> simp [max?]
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem isSome_max?_iff [Max α] {xs : List α} : xs.max?.isSome ↔ xs ≠ [] := by
|
||||
theorem isSome_max?_iff [Max α] {xs : List α} : xs.max?.isSome ↔ xs ≠ [] := by
|
||||
cases xs <;> simp [max?]
|
||||
|
||||
@[grind .]
|
||||
|
||||
@@ -70,7 +70,7 @@ protected theorem isGE_compare {a b : Nat} :
|
||||
rw [← Nat.compare_swap, Ordering.isGE_swap]
|
||||
exact Nat.isLE_compare
|
||||
|
||||
public instance : Std.LawfulOrderOrd Nat where
|
||||
instance : Std.LawfulOrderOrd Nat where
|
||||
isLE_compare _ _ := Nat.isLE_compare
|
||||
isGE_compare _ _ := Nat.isGE_compare
|
||||
|
||||
|
||||
@@ -444,7 +444,7 @@ instance : MonadAttach Option where
|
||||
CanReturn x a := x = some a
|
||||
attach x := x.attach
|
||||
|
||||
public instance : LawfulMonadAttach Option where
|
||||
instance : LawfulMonadAttach Option where
|
||||
map_attach {α} x := by simp [MonadAttach.attach]
|
||||
canReturn_map_imp {α P x a} := by
|
||||
cases x
|
||||
@@ -455,7 +455,7 @@ end Option
|
||||
|
||||
namespace OptionT
|
||||
|
||||
public instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
|
||||
instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
|
||||
WeaklyLawfulMonadAttach (OptionT m) where
|
||||
map_attach {α} x := by
|
||||
apply OptionT.ext
|
||||
@@ -466,7 +466,7 @@ public instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAtta
|
||||
| ⟨some a, _⟩ => simp [OptionT.pure, OptionT.mk]
|
||||
| ⟨none, _⟩ => simp
|
||||
|
||||
public instance [Monad m] [MonadAttach m] [LawfulMonad m] [LawfulMonadAttach m] :
|
||||
instance [Monad m] [MonadAttach m] [LawfulMonad m] [LawfulMonadAttach m] :
|
||||
LawfulMonadAttach (OptionT m) where
|
||||
canReturn_map_imp {α P x a} h := by
|
||||
simp only [MonadAttach.CanReturn, OptionT.run_map] at h
|
||||
|
||||
@@ -152,7 +152,7 @@ public theorem max_eq_if_isGE_compare {α : Type u} [Ord α] [LE α] {_ : Max α
|
||||
{a b : α} : max a b = if (compare a b).isGE then a else b := by
|
||||
open Classical in simp [max_eq_if, isGE_compare]
|
||||
|
||||
private theorem min_le_min [LE α] [Min α] [Std.LawfulOrderLeftLeaningMin α] [IsLinearOrder α] (a b : α) : min a b ≤ min b a := by
|
||||
theorem min_le_min [LE α] [Min α] [Std.LawfulOrderLeftLeaningMin α] [IsLinearOrder α] (a b : α) : min a b ≤ min b a := by
|
||||
apply (LawfulOrderInf.le_min_iff (min a b) b a).2
|
||||
rw [And.comm]
|
||||
by_cases h : a ≤ b
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -248,11 +248,11 @@ instance : HasModel Int8 (BitVec 8) where
|
||||
le_iff_encode_le := by simp [LE.le, Int8.le]
|
||||
lt_iff_encode_lt := by simp [LT.lt, Int8.lt]
|
||||
|
||||
private theorem succ?_eq_minValueSealed {x : Int8} :
|
||||
theorem succ?_eq_minValueSealed {x : Int8} :
|
||||
UpwardEnumerable.succ? x = if x + 1 = minValueSealed then none else some (x + 1) :=
|
||||
(rfl)
|
||||
|
||||
private theorem succMany?_eq_maxValueSealed {i : Int8} :
|
||||
theorem succMany?_eq_maxValueSealed {i : Int8} :
|
||||
UpwardEnumerable.succMany? n i =
|
||||
have := i.minValue_le_toInt
|
||||
if h : i.toInt + n ≤ maxValueSealed.toInt then some (.ofIntLE _ (by omega) (maxValueSealed_def ▸ h)) else none :=
|
||||
@@ -605,12 +605,12 @@ theorem minValueSealed_def : minValueSealed = ISize.minValue := (rfl)
|
||||
theorem maxValueSealed_def : maxValueSealed = ISize.maxValue := (rfl)
|
||||
seal minValueSealed maxValueSealed
|
||||
|
||||
private theorem toBitVec_minValueSealed_eq_intMinSealed :
|
||||
theorem toBitVec_minValueSealed_eq_intMinSealed :
|
||||
minValueSealed.toBitVec = BitVec.Signed.intMinSealed System.Platform.numBits := by
|
||||
rw [minValueSealed_def, BitVec.Signed.intMinSealed_def, toBitVec_minValue]
|
||||
have := System.Platform.numBits_eq; generalize System.Platform.numBits = a at this ⊢
|
||||
rcases this with rfl | rfl <;> rfl
|
||||
private theorem toBitVec_maxValueSealed_eq_intMaxSealed :
|
||||
theorem toBitVec_maxValueSealed_eq_intMaxSealed :
|
||||
maxValueSealed.toBitVec = BitVec.Signed.intMaxSealed System.Platform.numBits := by
|
||||
rw [maxValueSealed_def, BitVec.Signed.intMaxSealed_def, toBitVec_maxValue]
|
||||
have := System.Platform.numBits_eq; generalize System.Platform.numBits = a at this ⊢
|
||||
|
||||
@@ -233,7 +233,7 @@ public theorem Subarray.toList_eq {xs : Subarray α} :
|
||||
simp [this, ListSlice.toList_eq, lslice]
|
||||
|
||||
-- TODO: The current `List.extract_eq_drop_take` should be called `List.extract_eq_take_drop`
|
||||
private theorem Std.Internal.List.extract_eq_drop_take' {l : List α} {start stop : Nat} :
|
||||
theorem Std.Internal.List.extract_eq_drop_take' {l : List α} {start stop : Nat} :
|
||||
l.extract start stop = (l.take stop).drop start := by
|
||||
simp [List.take_drop]
|
||||
by_cases start ≤ stop
|
||||
|
||||
@@ -94,7 +94,7 @@ public def String.utf8EncodeCharFast (c : Char) : List UInt8 :=
|
||||
(v >>> 6).toUInt8 &&& 0x3f ||| 0x80,
|
||||
v.toUInt8 &&& 0x3f ||| 0x80]
|
||||
|
||||
private theorem Nat.add_two_pow_eq_or_of_lt {b : Nat} (i : Nat) (b_lt : b < 2 ^ i) (a : Nat) :
|
||||
theorem Nat.add_two_pow_eq_or_of_lt {b : Nat} (i : Nat) (b_lt : b < 2 ^ i) (a : Nat) :
|
||||
b + 2 ^ i * a = b ||| 2 ^ i * a := by
|
||||
rw [Nat.add_comm, Nat.or_comm, Nat.two_pow_add_eq_or_of_lt b_lt]
|
||||
|
||||
|
||||
@@ -56,11 +56,11 @@ def markSparseCasesOn (env : Environment) (declName : Name) : Environment :=
|
||||
sparseCasesOnExt.tag env declName
|
||||
|
||||
/-- Is this a constructor elimination or a sparse casesOn? -/
|
||||
public def isSparseCasesOn (env : Environment) (declName : Name) : Bool :=
|
||||
def isSparseCasesOn (env : Environment) (declName : Name) : Bool :=
|
||||
sparseCasesOnExt.isTagged env declName
|
||||
|
||||
/-- Is this a `.casesOn`, a constructor elimination or a sparse casesOn? -/
|
||||
public def isCasesOnLike (env : Environment) (declName : Name) : Bool :=
|
||||
def isCasesOnLike (env : Environment) (declName : Name) : Bool :=
|
||||
isCasesOnRecursor env declName || isSparseCasesOn env declName
|
||||
|
||||
/--
|
||||
|
||||
@@ -774,7 +774,7 @@ where
|
||||
|
||||
mutual
|
||||
|
||||
private partial def emitBasicBlock (code : Code .impure) : EmitM Unit := do
|
||||
partial def emitBasicBlock (code : Code .impure) : EmitM Unit := do
|
||||
match code with
|
||||
| .jp (k := k) .. => emitBasicBlock k
|
||||
| .let decl k =>
|
||||
@@ -896,7 +896,7 @@ where
|
||||
emitUnreach : EmitM Unit := do
|
||||
emitLn "lean_internal_panic_unreachable();"
|
||||
|
||||
private partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
|
||||
partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
|
||||
match code with
|
||||
| .jp decl k =>
|
||||
emit decl.binderName; emitLn ":"
|
||||
@@ -906,7 +906,7 @@ private partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
|
||||
| .sset (k := k) .. | .uset (k := k) .. | .oset (k := k) .. => emitJoinPoints k
|
||||
| .cases .. | .return .. | .jmp .. | .unreach .. => return ()
|
||||
|
||||
private partial def emitCode (code : Code .impure) : EmitM Unit := do
|
||||
partial def emitCode (code : Code .impure) : EmitM Unit := do
|
||||
withEmitBlock do
|
||||
let declared ← declareVars code
|
||||
if declared then emitLn ""
|
||||
|
||||
@@ -12,7 +12,7 @@ import Lean.Compiler.InitAttr
|
||||
|
||||
namespace Lean.Compiler.LCNF
|
||||
|
||||
private structure CollectUsedDeclsState where
|
||||
structure CollectUsedDeclsState where
|
||||
visited : NameSet := {}
|
||||
localDecls : Array (Decl .impure) := #[]
|
||||
extSigs : Array (Signature .impure) := #[]
|
||||
|
||||
@@ -29,7 +29,7 @@ public def mkOrderedDeclSetExt : IO (EnvExtension (List Name × NameSet)) :=
|
||||
/--
|
||||
Set of declarations to be exported to other modules; visibility shared by base/mono/IR phases.
|
||||
-/
|
||||
private builtin_initialize publicDeclsExt : EnvExtension (List Name × NameSet) ← mkOrderedDeclSetExt
|
||||
builtin_initialize publicDeclsExt : EnvExtension (List Name × NameSet) ← mkOrderedDeclSetExt
|
||||
|
||||
public def isDeclPublic (env : Environment) (declName : Name) : Bool := Id.run do
|
||||
if !env.header.isModule then
|
||||
|
||||
@@ -142,10 +142,10 @@ partial def Code.toExprM (code : Code pu) : ToExprM Expr := do
|
||||
return .letE `dummy (mkConst ``Unit) value body true
|
||||
end
|
||||
|
||||
public def Code.toExpr (code : Code pu) (xs : Array FVarId := #[]) : Expr :=
|
||||
def Code.toExpr (code : Code pu) (xs : Array FVarId := #[]) : Expr :=
|
||||
run' code.toExprM xs
|
||||
|
||||
public def FunDecl.toExpr (decl : FunDecl pu) (xs : Array FVarId := #[]) : Expr :=
|
||||
def FunDecl.toExpr (decl : FunDecl pu) (xs : Array FVarId := #[]) : Expr :=
|
||||
run' decl.toExprM xs
|
||||
|
||||
end Lean.Compiler.LCNF
|
||||
|
||||
@@ -213,11 +213,22 @@ structure Context where
|
||||
-/
|
||||
expectedType : Option Expr
|
||||
|
||||
/--
|
||||
Key for the LCNF translation cache. `ignoreNoncomputable` is part of the key
|
||||
because entries cached in irrelevant positions skip the `checkComputable`
|
||||
check and must not be reused in relevant positions.
|
||||
-/
|
||||
structure CacheKey where
|
||||
expr : Expr
|
||||
expectedType? : Option Expr
|
||||
ignoreNoncomputable : Bool
|
||||
deriving BEq, Hashable
|
||||
|
||||
structure State where
|
||||
/-- Local context containing the original Lean types (not LCNF ones). -/
|
||||
lctx : LocalContext := {}
|
||||
/-- Cache from Lean regular expression to LCNF argument. -/
|
||||
cache : PHashMap (Expr × Option Expr) (Arg .pure) := {}
|
||||
cache : PHashMap CacheKey (Arg .pure) := {}
|
||||
/--
|
||||
Determines whether caching has been disabled due to finding a use of
|
||||
a constant marked with `never_extract`.
|
||||
@@ -473,7 +484,9 @@ partial def toLCNF (e : Expr) (eType : Expr) : CompilerM (Code .pure) := do
|
||||
where
|
||||
visitCore (e : Expr) : M (Arg .pure) := withIncRecDepth do
|
||||
let eType? := (← read).expectedType
|
||||
if let some arg := (← get).cache.find? (e, eType?) then
|
||||
let ignoreNoncomputable := (← read).ignoreNoncomputable
|
||||
let key : CacheKey := { expr := e, expectedType? := eType?, ignoreNoncomputable }
|
||||
if let some arg := (← get).cache.find? key then
|
||||
return arg
|
||||
let r : Arg .pure ← match e with
|
||||
| .app .. => visitApp e
|
||||
@@ -485,7 +498,7 @@ where
|
||||
| .lit lit => visitLit lit
|
||||
| .fvar fvarId => if (← get).toAny.contains fvarId then pure .erased else pure (.fvar fvarId)
|
||||
| .forallE .. | .mvar .. | .bvar .. | .sort .. => unreachable!
|
||||
modify fun s => if s.shouldCache then { s with cache := s.cache.insert (e, eType?) r } else s
|
||||
modify fun s => if s.shouldCache then { s with cache := s.cache.insert key r } else s
|
||||
return r
|
||||
|
||||
visit (e : Expr) : M (Arg .pure) := withIncRecDepth do
|
||||
|
||||
@@ -37,7 +37,7 @@ public def getStateByIdx? [Inhabited σ] (ext : ModuleEnvExtension σ) (env : En
|
||||
|
||||
end ModuleEnvExtension
|
||||
|
||||
private initialize modPkgExt : ModuleEnvExtension (Option PkgId) ←
|
||||
initialize modPkgExt : ModuleEnvExtension (Option PkgId) ←
|
||||
registerModuleEnvExtension (pure none)
|
||||
|
||||
/-- Returns the package (if any) of an imported module (by its index). -/
|
||||
|
||||
@@ -20,13 +20,13 @@ line parsing. Called from the C runtime via `@[export]` for backtrace display. -
|
||||
namespace Lean.Name.Demangle
|
||||
|
||||
/-- `String.dropPrefix?` returning a `String` instead of a `Slice`. -/
|
||||
private def dropPrefix? (s : String) (pre : String) : Option String :=
|
||||
def dropPrefix? (s : String) (pre : String) : Option String :=
|
||||
(s.dropPrefix? pre).map (·.toString)
|
||||
|
||||
private def isAllDigits (s : String) : Bool :=
|
||||
def isAllDigits (s : String) : Bool :=
|
||||
!s.isEmpty && s.all (·.isDigit)
|
||||
|
||||
private def nameToNameParts (n : Name) : Array NamePart :=
|
||||
def nameToNameParts (n : Name) : Array NamePart :=
|
||||
go n [] |>.toArray
|
||||
where
|
||||
go : Name → List NamePart → List NamePart
|
||||
@@ -34,17 +34,17 @@ where
|
||||
| .str pre s, acc => go pre (NamePart.str s :: acc)
|
||||
| .num pre n, acc => go pre (NamePart.num n :: acc)
|
||||
|
||||
private def namePartsToName (parts : Array NamePart) : Name :=
|
||||
def namePartsToName (parts : Array NamePart) : Name :=
|
||||
parts.foldl (fun acc p =>
|
||||
match p with
|
||||
| .str s => acc.mkStr s
|
||||
| .num n => acc.mkNum n) .anonymous
|
||||
|
||||
/-- Format name parts using `Name.toString` for correct escaping. -/
|
||||
private def formatNameParts (comps : Array NamePart) : String :=
|
||||
def formatNameParts (comps : Array NamePart) : String :=
|
||||
if comps.isEmpty then "" else (namePartsToName comps).toString
|
||||
|
||||
private def matchSuffix (c : NamePart) : Option String :=
|
||||
def matchSuffix (c : NamePart) : Option String :=
|
||||
match c with
|
||||
| NamePart.str s =>
|
||||
if s == "_redArg" then some "arity↓"
|
||||
@@ -58,12 +58,12 @@ private def matchSuffix (c : NamePart) : Option String :=
|
||||
else none
|
||||
| _ => none
|
||||
|
||||
private def isSpecIndex (c : NamePart) : Bool :=
|
||||
def isSpecIndex (c : NamePart) : Bool :=
|
||||
match c with
|
||||
| NamePart.str s => (dropPrefix? s "spec_").any isAllDigits
|
||||
| _ => false
|
||||
|
||||
private def stripPrivate (comps : Array NamePart) (start stop : Nat) :
|
||||
def stripPrivate (comps : Array NamePart) (start stop : Nat) :
|
||||
Nat × Bool :=
|
||||
if stop - start >= 3 && comps[start]? == some (NamePart.str "_private") then
|
||||
Id.run do
|
||||
@@ -75,11 +75,11 @@ private def stripPrivate (comps : Array NamePart) (start stop : Nat) :
|
||||
else
|
||||
(start, false)
|
||||
|
||||
private structure SpecEntry where
|
||||
structure SpecEntry where
|
||||
name : String
|
||||
flags : Array String
|
||||
|
||||
private def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
|
||||
def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
|
||||
let mut begin_ := 0
|
||||
if comps.size >= 3 && comps[0]? == some (NamePart.str "_private") then
|
||||
for i in [1:comps.size] do
|
||||
@@ -99,7 +99,7 @@ private def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
|
||||
else parts := parts.push c
|
||||
{ name := formatNameParts parts, flags }
|
||||
|
||||
private def postprocessNameParts (components : Array NamePart) : String := Id.run do
|
||||
def postprocessNameParts (components : Array NamePart) : String := Id.run do
|
||||
if components.isEmpty then return ""
|
||||
|
||||
let (privStart, isPrivate) := stripPrivate components 0 components.size
|
||||
@@ -206,14 +206,14 @@ private def postprocessNameParts (components : Array NamePart) : String := Id.ru
|
||||
|
||||
return result
|
||||
|
||||
private def demangleBody (body : String) : String :=
|
||||
def demangleBody (body : String) : String :=
|
||||
let name := Name.demangle body
|
||||
postprocessNameParts (nameToNameParts name)
|
||||
|
||||
/-- Split a `lp_`-prefixed symbol into (demangled body, package name).
|
||||
Tries each underscore as a split point; the first valid split (shortest single-component
|
||||
package where the remainder is a valid mangled name) is correct. -/
|
||||
private def demangleWithPkg (s : String) : Option (String × String) := do
|
||||
def demangleWithPkg (s : String) : Option (String × String) := do
|
||||
for ⟨pos, h⟩ in s.positions do
|
||||
if pos.get h == '_' && pos ≠ s.startPos then
|
||||
let nextPos := pos.next h
|
||||
@@ -230,12 +230,12 @@ private def demangleWithPkg (s : String) : Option (String × String) := do
|
||||
return (demangleBody body, pkgName)
|
||||
none
|
||||
|
||||
private def stripColdSuffix (s : String) : String × String :=
|
||||
def stripColdSuffix (s : String) : String × String :=
|
||||
match s.find? ".cold" with
|
||||
| some pos => (s.extract s.startPos pos, s.extract pos s.endPos)
|
||||
| none => (s, "")
|
||||
|
||||
private def demangleCore (s : String) : Option String := do
|
||||
def demangleCore (s : String) : Option String := do
|
||||
-- _init_l_
|
||||
if let some body := dropPrefix? s "_init_l_" then
|
||||
if !body.isEmpty then return s!"[init] {demangleBody body}"
|
||||
@@ -291,17 +291,17 @@ public def demangleSymbol (symbol : String) : Option String := do
|
||||
if coldSuffix.isEmpty then return result
|
||||
else return s!"{result} {coldSuffix}"
|
||||
|
||||
private def skipWhile (s : String) (pos : s.Pos) (pred : Char → Bool) : s.Pos :=
|
||||
def skipWhile (s : String) (pos : s.Pos) (pred : Char → Bool) : s.Pos :=
|
||||
if h : pos = s.endPos then pos
|
||||
else if pred (pos.get h) then skipWhile s (pos.next h) pred
|
||||
else pos
|
||||
termination_by pos
|
||||
|
||||
private def splitAt₂ (s : String) (p₁ p₂ : s.Pos) : String × String × String :=
|
||||
def splitAt₂ (s : String) (p₁ p₂ : s.Pos) : String × String × String :=
|
||||
(s.extract s.startPos p₁, s.extract p₁ p₂, s.extract p₂ s.endPos)
|
||||
|
||||
/-- Extract the symbol from a backtrace line (Linux glibc or macOS format). -/
|
||||
private def extractSymbol (line : String) :
|
||||
def extractSymbol (line : String) :
|
||||
Option (String × String × String) :=
|
||||
tryLinux line |>.orElse (fun _ => tryMacOS line)
|
||||
where
|
||||
|
||||
@@ -400,7 +400,7 @@ Namely:
|
||||
def parseMessageMetaData (input : String) : Except String MessageMetaData :=
|
||||
messageMetaDataParser input |>.run input
|
||||
|
||||
public inductive MessageDirection where
|
||||
inductive MessageDirection where
|
||||
| clientToServer
|
||||
| serverToClient
|
||||
deriving Inhabited, FromJson, ToJson
|
||||
|
||||
@@ -43,14 +43,14 @@ public structure State where
|
||||
/-- Footnotes -/
|
||||
footnotes : Array (String × String) := #[]
|
||||
|
||||
private def combineBlocks (prior current : String) :=
|
||||
def combineBlocks (prior current : String) :=
|
||||
if prior.isEmpty then current
|
||||
else if current.isEmpty then prior
|
||||
else if prior.endsWith "\n\n" then prior ++ current
|
||||
else if prior.endsWith "\n" then prior ++ "\n" ++ current
|
||||
else prior ++ "\n\n" ++ current
|
||||
|
||||
private def State.endBlock (state : State) : State :=
|
||||
def State.endBlock (state : State) : State :=
|
||||
{ state with
|
||||
priorBlocks :=
|
||||
combineBlocks state.priorBlocks state.currentBlock ++
|
||||
@@ -60,13 +60,13 @@ private def State.endBlock (state : State) : State :=
|
||||
footnotes := #[]
|
||||
}
|
||||
|
||||
private def State.render (state : State) : String :=
|
||||
def State.render (state : State) : String :=
|
||||
state.endBlock.priorBlocks
|
||||
|
||||
private def State.push (state : State) (txt : String) : State :=
|
||||
def State.push (state : State) (txt : String) : State :=
|
||||
{ state with currentBlock := state.currentBlock ++ txt }
|
||||
|
||||
private def State.endsWith (state : State) (txt : String) : Bool :=
|
||||
def State.endsWith (state : State) (txt : String) : Bool :=
|
||||
state.currentBlock.endsWith txt || (state.currentBlock.isEmpty && state.priorBlocks.endsWith txt)
|
||||
|
||||
end MarkdownM
|
||||
@@ -150,7 +150,7 @@ public class MarkdownBlock (i : Type u) (b : Type v) where
|
||||
public instance : MarkdownBlock i Empty where
|
||||
toMarkdown := nofun
|
||||
|
||||
private def escape (s : String) : String := Id.run do
|
||||
def escape (s : String) : String := Id.run do
|
||||
let mut s' := ""
|
||||
let mut iter := s.startPos
|
||||
while h : ¬iter.IsAtEnd do
|
||||
@@ -163,7 +163,7 @@ private def escape (s : String) : String := Id.run do
|
||||
where
|
||||
isSpecial c := "*_`-+.!<>[]{}()#".any (· == c)
|
||||
|
||||
private def quoteCode (str : String) : String := Id.run do
|
||||
def quoteCode (str : String) : String := Id.run do
|
||||
let mut longest := 0
|
||||
let mut current := 0
|
||||
let mut iter := str.startPos
|
||||
@@ -179,7 +179,7 @@ private def quoteCode (str : String) : String := Id.run do
|
||||
let str := if str.startsWith "`" || str.endsWith "`" then " " ++ str ++ " " else str
|
||||
backticks ++ str ++ backticks
|
||||
|
||||
private partial def trimLeft (inline : Inline i) : (String × Inline i) := go [inline]
|
||||
partial def trimLeft (inline : Inline i) : (String × Inline i) := go [inline]
|
||||
where
|
||||
go : List (Inline i) → String × Inline i
|
||||
| [] => ("", .empty)
|
||||
@@ -194,7 +194,7 @@ where
|
||||
| .concat xs :: more => go (xs.toList ++ more)
|
||||
| here :: more => ("", here ++ .concat more.toArray)
|
||||
|
||||
private partial def trimRight (inline : Inline i) : (Inline i × String) := go [inline]
|
||||
partial def trimRight (inline : Inline i) : (Inline i × String) := go [inline]
|
||||
where
|
||||
go : List (Inline i) → Inline i × String
|
||||
| [] => (.empty, "")
|
||||
@@ -209,13 +209,13 @@ where
|
||||
| .concat xs :: more => go (xs.reverse.toList ++ more)
|
||||
| here :: more => (.concat more.toArray.reverse ++ here, "")
|
||||
|
||||
private def trim (inline : Inline i) : (String × Inline i × String) :=
|
||||
def trim (inline : Inline i) : (String × Inline i × String) :=
|
||||
let (pre, more) := trimLeft inline
|
||||
let (mid, post) := trimRight more
|
||||
(pre, mid, post)
|
||||
|
||||
open MarkdownM in
|
||||
private partial def inlineMarkdown [MarkdownInline i] : Inline i → MarkdownM Unit
|
||||
partial def inlineMarkdown [MarkdownInline i] : Inline i → MarkdownM Unit
|
||||
| .text s =>
|
||||
push (escape s)
|
||||
| .linebreak s => do
|
||||
@@ -271,7 +271,7 @@ private partial def inlineMarkdown [MarkdownInline i] : Inline i → MarkdownM U
|
||||
public instance [MarkdownInline i] : ToMarkdown (Inline i) where
|
||||
toMarkdown inline := private inlineMarkdown inline
|
||||
|
||||
private def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
|
||||
def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
|
||||
let mut longest := 2
|
||||
let mut current := 0
|
||||
let mut iter := str.startPos
|
||||
@@ -292,7 +292,7 @@ private def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
|
||||
backticks ++ "\n" ++ out ++ backticks ++ "\n"
|
||||
|
||||
open MarkdownM in
|
||||
private partial def blockMarkdown [MarkdownInline i] [MarkdownBlock i b] : Block i b → MarkdownM Unit
|
||||
partial def blockMarkdown [MarkdownInline i] [MarkdownBlock i b] : Block i b → MarkdownM Unit
|
||||
| .para xs => do
|
||||
for i in xs do
|
||||
ToMarkdown.toMarkdown i
|
||||
@@ -345,7 +345,7 @@ public instance [MarkdownInline i] [MarkdownBlock i b] : ToMarkdown (Block i b)
|
||||
|
||||
open MarkdownM in
|
||||
open ToMarkdown in
|
||||
private partial def partMarkdown [MarkdownInline i] [MarkdownBlock i b] (level : Nat) (part : Part i b p) : MarkdownM Unit := do
|
||||
partial def partMarkdown [MarkdownInline i] [MarkdownBlock i b] (level : Nat) (part : Part i b p) : MarkdownM Unit := do
|
||||
push ("".pushn '#' (level + 1))
|
||||
push " "
|
||||
for i in part.title do
|
||||
|
||||
@@ -18,7 +18,7 @@ open Lean.Doc.Syntax
|
||||
local instance : Coe Char ParserFn where
|
||||
coe := chFn
|
||||
|
||||
private partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s => Id.run do
|
||||
partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s => Id.run do
|
||||
let iniSz := s.stackSize
|
||||
let iniPos := s.pos
|
||||
let mut s := p c s
|
||||
@@ -30,7 +30,7 @@ private partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s =>
|
||||
s := s.mkNode nullKind iniSz
|
||||
atLeastAux (n - 1) p c s
|
||||
|
||||
private def atLeastFn (n : Nat) (p : ParserFn) : ParserFn := fun c s =>
|
||||
def atLeastFn (n : Nat) (p : ParserFn) : ParserFn := fun c s =>
|
||||
let iniSz := s.stackSize
|
||||
let s := atLeastAux n p c s
|
||||
s.mkNode nullKind iniSz
|
||||
@@ -40,9 +40,9 @@ A parser that does nothing.
|
||||
-/
|
||||
public def skipFn : ParserFn := fun _ s => s
|
||||
|
||||
private def eatSpaces := takeWhileFn (· == ' ')
|
||||
def eatSpaces := takeWhileFn (· == ' ')
|
||||
|
||||
private def repFn : Nat → ParserFn → ParserFn
|
||||
def repFn : Nat → ParserFn → ParserFn
|
||||
| 0, _ => skipFn
|
||||
| n+1, p => p >> repFn n p
|
||||
|
||||
@@ -55,7 +55,7 @@ partial def satisfyFn' (p : Char → Bool)
|
||||
else if p (c.get' i h) then s.next' c i h
|
||||
else s.mkUnexpectedError errorMsg
|
||||
|
||||
private partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn :=
|
||||
partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn :=
|
||||
fun c s => Id.run do
|
||||
let iniSz := s.stackSize
|
||||
let iniPos := s.pos
|
||||
@@ -70,13 +70,13 @@ private partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn
|
||||
s := s.mkNode nullKind iniSz
|
||||
atMostAux (n - 1) p msg c s
|
||||
|
||||
private def atMostFn (n : Nat) (p : ParserFn) (msg : String) : ParserFn := fun c s =>
|
||||
def atMostFn (n : Nat) (p : ParserFn) (msg : String) : ParserFn := fun c s =>
|
||||
let iniSz := s.stackSize
|
||||
let s := atMostAux n p msg c s
|
||||
s.mkNode nullKind iniSz
|
||||
|
||||
/-- Like `satisfyFn`, but allows any escape sequence through -/
|
||||
private partial def satisfyEscFn (p : Char → Bool)
|
||||
partial def satisfyEscFn (p : Char → Bool)
|
||||
(errorMsg : String := "unexpected character") :
|
||||
ParserFn := fun c s =>
|
||||
let i := s.pos
|
||||
@@ -89,7 +89,7 @@ private partial def satisfyEscFn (p : Char → Bool)
|
||||
else if p (c.get' i h) then s.next' c i h
|
||||
else s.mkUnexpectedError errorMsg
|
||||
|
||||
private partial def takeUntilEscFn (p : Char → Bool) : ParserFn := fun c s =>
|
||||
partial def takeUntilEscFn (p : Char → Bool) : ParserFn := fun c s =>
|
||||
let i := s.pos
|
||||
if h : c.atEnd i then s
|
||||
else if c.get' i h == '\\' then
|
||||
@@ -100,7 +100,7 @@ private partial def takeUntilEscFn (p : Char → Bool) : ParserFn := fun c s =>
|
||||
else if p (c.get' i h) then s
|
||||
else takeUntilEscFn p c (s.next' c i h)
|
||||
|
||||
private partial def takeWhileEscFn (p : Char → Bool) : ParserFn := takeUntilEscFn (not ∘ p)
|
||||
partial def takeWhileEscFn (p : Char → Bool) : ParserFn := takeUntilEscFn (not ∘ p)
|
||||
|
||||
/--
|
||||
Parses as `p`, but discards the result.
|
||||
@@ -111,7 +111,7 @@ public def ignoreFn (p : ParserFn) : ParserFn := fun c s =>
|
||||
s'.shrinkStack iniSz
|
||||
|
||||
|
||||
private def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo → ParserFn) : ParserFn := fun c s =>
|
||||
def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo → ParserFn) : ParserFn := fun c s =>
|
||||
let iniSz := s.stxStack.size
|
||||
let startPos := s.pos
|
||||
let s := p c s
|
||||
@@ -121,7 +121,7 @@ private def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo → ParserFn) :
|
||||
let info := SourceInfo.original leading startPos trailing stopPos
|
||||
infoP info c (s.shrinkStack iniSz)
|
||||
|
||||
private def unescapeStr (str : String) : String := Id.run do
|
||||
def unescapeStr (str : String) : String := Id.run do
|
||||
let mut out := ""
|
||||
let mut iter := str.startPos
|
||||
while h : ¬iter.IsAtEnd do
|
||||
@@ -135,7 +135,7 @@ private def unescapeStr (str : String) : String := Id.run do
|
||||
out := out.push c
|
||||
out
|
||||
|
||||
private def asStringAux (quoted : Bool) (startPos : String.Pos.Raw) (transform : String → String) :
|
||||
def asStringAux (quoted : Bool) (startPos : String.Pos.Raw) (transform : String → String) :
|
||||
ParserFn := fun c s =>
|
||||
let stopPos := s.pos
|
||||
let leading := c.mkEmptySubstringAt startPos
|
||||
@@ -156,26 +156,26 @@ public def asStringFn (p : ParserFn) (quoted := false) (transform : String → S
|
||||
if s.hasError then s
|
||||
else asStringAux quoted startPos transform c (s.shrinkStack iniSz)
|
||||
|
||||
private def checkCol0Fn (errorMsg : String) : ParserFn := fun c s =>
|
||||
def checkCol0Fn (errorMsg : String) : ParserFn := fun c s =>
|
||||
let pos := c.fileMap.toPosition s.pos
|
||||
if pos.column = 1 then s
|
||||
else s.mkError errorMsg
|
||||
|
||||
private def _root_.Lean.Parser.ParserContext.currentColumn
|
||||
def _root_.Lean.Parser.ParserContext.currentColumn
|
||||
(c : ParserContext) (s : ParserState) : Nat :=
|
||||
c.fileMap.toPosition s.pos |>.column
|
||||
|
||||
private def pushColumn : ParserFn := fun c s =>
|
||||
def pushColumn : ParserFn := fun c s =>
|
||||
let col := c.fileMap.toPosition s.pos |>.column
|
||||
s.pushSyntax <| Syntax.mkLit `column (toString col) (SourceInfo.synthetic s.pos s.pos)
|
||||
|
||||
private def guardColumn (p : Nat → Bool) (message : String) : ParserFn := fun c s =>
|
||||
def guardColumn (p : Nat → Bool) (message : String) : ParserFn := fun c s =>
|
||||
if p (c.currentColumn s) then s else s.mkErrorAt message s.pos
|
||||
|
||||
private def guardMinColumn (min : Nat) (description : String := s!"expected column at least {min}") : ParserFn :=
|
||||
def guardMinColumn (min : Nat) (description : String := s!"expected column at least {min}") : ParserFn :=
|
||||
guardColumn (· ≥ min) description
|
||||
|
||||
private def withCurrentColumn (p : Nat → ParserFn) : ParserFn := fun c s =>
|
||||
def withCurrentColumn (p : Nat → ParserFn) : ParserFn := fun c s =>
|
||||
p (c.currentColumn s) c s
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ private def withCurrentColumn (p : Nat → ParserFn) : ParserFn := fun c s =>
|
||||
We can only start a nestable block if we're immediately after a newline followed by a sequence of
|
||||
nestable block openers
|
||||
-/
|
||||
private def onlyBlockOpeners : ParserFn := fun c s =>
|
||||
def onlyBlockOpeners : ParserFn := fun c s =>
|
||||
let position := c.fileMap.toPosition s.pos
|
||||
let lineStart := c.fileMap.lineStart position.line
|
||||
let ok : Bool := Id.run do
|
||||
@@ -206,7 +206,7 @@ private def onlyBlockOpeners : ParserFn := fun c s =>
|
||||
if ok then s
|
||||
else s.mkErrorAt "beginning of line or sequence of nestable block openers" s.pos
|
||||
|
||||
private def nl := satisfyFn (· == '\n') "newline"
|
||||
def nl := satisfyFn (· == '\n') "newline"
|
||||
|
||||
/--
|
||||
Construct a “fake” atom with the given string content and source information.
|
||||
@@ -225,13 +225,13 @@ current position.
|
||||
Normally, atoms are always substrings of the original input; however, Verso's concrete syntax is
|
||||
different enough from Lean's that this isn't always a good match.
|
||||
-/
|
||||
private def fakeAtomHere (str : String) : ParserFn :=
|
||||
def fakeAtomHere (str : String) : ParserFn :=
|
||||
withInfoSyntaxFn skip.fn (fun info => fakeAtom str (info := info))
|
||||
|
||||
private def pushMissing : ParserFn := fun _c s =>
|
||||
def pushMissing : ParserFn := fun _c s =>
|
||||
s.pushSyntax .missing
|
||||
|
||||
private def strFn (str : String) : ParserFn := asStringFn <| fun c s =>
|
||||
def strFn (str : String) : ParserFn := asStringFn <| fun c s =>
|
||||
let rec go (iter : str.Pos) (s : ParserState) :=
|
||||
if h : iter.IsAtEnd then s
|
||||
else
|
||||
@@ -260,10 +260,10 @@ public instance : Ord OrderedListType where
|
||||
| .parenAfter, .numDot => .gt
|
||||
| .parenAfter, .parenAfter => .eq
|
||||
|
||||
private def OrderedListType.all : List OrderedListType :=
|
||||
def OrderedListType.all : List OrderedListType :=
|
||||
[.numDot, .parenAfter]
|
||||
|
||||
private theorem OrderedListType.all_complete : ∀ x : OrderedListType, x ∈ all := by
|
||||
theorem OrderedListType.all_complete : ∀ x : OrderedListType, x ∈ all := by
|
||||
unfold all; intro x; cases x <;> repeat constructor
|
||||
|
||||
/--
|
||||
@@ -288,40 +288,40 @@ public instance : Ord UnorderedListType where
|
||||
| .plus, .plus => .eq
|
||||
| .plus, _ => .gt
|
||||
|
||||
private def UnorderedListType.all : List UnorderedListType :=
|
||||
def UnorderedListType.all : List UnorderedListType :=
|
||||
[.asterisk, .dash, .plus]
|
||||
|
||||
private theorem UnorderedListType.all_complete : ∀ x : UnorderedListType, x ∈ all := by
|
||||
theorem UnorderedListType.all_complete : ∀ x : UnorderedListType, x ∈ all := by
|
||||
unfold all; intro x; cases x <;> repeat constructor
|
||||
|
||||
private def unorderedListIndicator (type : UnorderedListType) : ParserFn :=
|
||||
def unorderedListIndicator (type : UnorderedListType) : ParserFn :=
|
||||
asStringFn <|
|
||||
match type with
|
||||
| .asterisk => chFn '*'
|
||||
| .dash => chFn '-'
|
||||
| .plus => chFn '+'
|
||||
|
||||
private def orderedListIndicator (type : OrderedListType) : ParserFn :=
|
||||
def orderedListIndicator (type : OrderedListType) : ParserFn :=
|
||||
asStringFn <|
|
||||
takeWhile1Fn (·.isDigit) "digits" >>
|
||||
match type with
|
||||
| .numDot => chFn '.'
|
||||
| .parenAfter => chFn ')'
|
||||
|
||||
private def blankLine : ParserFn :=
|
||||
def blankLine : ParserFn :=
|
||||
nodeFn `blankLine <| atomicFn <| asStringFn <| takeWhileFn (· == ' ') >> nl
|
||||
|
||||
private def endLine : ParserFn :=
|
||||
def endLine : ParserFn :=
|
||||
ignoreFn <| atomicFn <| asStringFn <| takeWhileFn (· == ' ') >> eoiFn
|
||||
|
||||
private def bullet := atomicFn (go UnorderedListType.all)
|
||||
def bullet := atomicFn (go UnorderedListType.all)
|
||||
where
|
||||
go
|
||||
| [] => fun _ s => s.mkError "no list type"
|
||||
| [x] => atomicFn (unorderedListIndicator x)
|
||||
| x :: xs => atomicFn (unorderedListIndicator x) <|> go xs
|
||||
|
||||
private def numbering := atomicFn (go OrderedListType.all)
|
||||
def numbering := atomicFn (go OrderedListType.all)
|
||||
where
|
||||
go
|
||||
| [] => fun _ s => s.mkError "no list type"
|
||||
@@ -374,7 +374,7 @@ public def inlineTextChar : ParserFn := fun c s =>
|
||||
/-- Return some inline text up to the next inline opener or the end of
|
||||
the line, whichever is first. Always consumes at least one
|
||||
logical character on success, taking escaping into account. -/
|
||||
private def inlineText : ParserFn :=
|
||||
def inlineText : ParserFn :=
|
||||
asStringFn (transform := unescapeStr) <| atomicFn inlineTextChar >> manyFn inlineTextChar
|
||||
|
||||
/--
|
||||
@@ -410,23 +410,23 @@ public def val : ParserFn := fun c s =>
|
||||
else
|
||||
s.mkError "expected identifier, string, or number"
|
||||
|
||||
private def withCurrentStackSize (p : Nat → ParserFn) : ParserFn := fun c s =>
|
||||
def withCurrentStackSize (p : Nat → ParserFn) : ParserFn := fun c s =>
|
||||
p s.stxStack.size c s
|
||||
|
||||
/-- Match the character indicated, pushing nothing to the stack in case of success -/
|
||||
private def skipChFn (c : Char) : ParserFn :=
|
||||
def skipChFn (c : Char) : ParserFn :=
|
||||
satisfyFn (· == c) c.toString
|
||||
|
||||
private def skipToNewline : ParserFn :=
|
||||
def skipToNewline : ParserFn :=
|
||||
takeUntilFn (· == '\n')
|
||||
|
||||
private def skipToSpace : ParserFn :=
|
||||
def skipToSpace : ParserFn :=
|
||||
takeUntilFn (· == ' ')
|
||||
|
||||
private def skipRestOfLine : ParserFn :=
|
||||
def skipRestOfLine : ParserFn :=
|
||||
skipToNewline >> (eoiFn <|> nl)
|
||||
|
||||
private def skipBlock : ParserFn :=
|
||||
def skipBlock : ParserFn :=
|
||||
skipToNewline >> manyFn nonEmptyLine >> takeWhileFn (· == '\n')
|
||||
where
|
||||
nonEmptyLine : ParserFn :=
|
||||
@@ -444,7 +444,7 @@ public def recoverBlock (p : ParserFn) (final : ParserFn := skipFn) : ParserFn :
|
||||
ignoreFn skipBlock >> final
|
||||
|
||||
-- Like `recoverBlock` but stores recovered errors at the original error position.
|
||||
private def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
let s := p c s
|
||||
if let some msg := s.errorMsg then
|
||||
let errPos := s.pos
|
||||
@@ -457,36 +457,36 @@ private def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
recoveredErrors := s.recoveredErrors.push (errPos, s'.stxStack, msg)}
|
||||
else s
|
||||
|
||||
private def recoverBlockWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
def recoverBlockWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
ignoreFn skipBlock >>
|
||||
show ParserFn from
|
||||
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
|
||||
|
||||
private def recoverLine (p : ParserFn) : ParserFn :=
|
||||
def recoverLine (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun _ =>
|
||||
ignoreFn skipRestOfLine
|
||||
|
||||
private def recoverWs (p : ParserFn) : ParserFn :=
|
||||
def recoverWs (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun _ =>
|
||||
ignoreFn <| takeUntilFn (fun c => c == ' ' || c == '\n')
|
||||
|
||||
private def recoverNonSpace (p : ParserFn) : ParserFn :=
|
||||
def recoverNonSpace (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
ignoreFn (takeUntilFn (fun c => c != ' ')) >>
|
||||
show ParserFn from
|
||||
fun _ s => s.shrinkStack rctx.initialSize
|
||||
|
||||
private def recoverWsWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
def recoverWsWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
ignoreFn <| takeUntilFn (fun c => c == ' ' || c == '\n') >>
|
||||
show ParserFn from
|
||||
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
|
||||
|
||||
private def recoverEol (p : ParserFn) : ParserFn :=
|
||||
def recoverEol (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun _ => ignoreFn <| skipToNewline
|
||||
|
||||
private def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
ignoreFn skipToNewline >>
|
||||
show ParserFn from
|
||||
@@ -494,7 +494,7 @@ private def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
|
||||
-- Like `recoverEol` but stores recovered errors at the original error position
|
||||
-- rather than the post-recovery position.
|
||||
private def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
let s := p c s
|
||||
if let some msg := s.errorMsg then
|
||||
let errPos := s.pos
|
||||
@@ -509,7 +509,7 @@ private def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
|
||||
|
||||
-- Like `recoverEolWith` but stores recovered errors at the original error position
|
||||
-- rather than the post-recovery position.
|
||||
private def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : ParserFn := fun c s =>
|
||||
def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : ParserFn := fun c s =>
|
||||
let iniSz := s.stxStack.size
|
||||
let s := p c s
|
||||
if let some msg := s.errorMsg then
|
||||
@@ -521,10 +521,10 @@ private def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : Parser
|
||||
{s' with recoveredErrors := s.recoveredErrors.push (errPos, s'.stxStack, msg)}
|
||||
else s
|
||||
|
||||
private def recoverSkip (p : ParserFn) : ParserFn :=
|
||||
def recoverSkip (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun _ => skipFn
|
||||
|
||||
private def recoverSkipWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
def recoverSkipWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
show ParserFn from
|
||||
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
|
||||
@@ -535,7 +535,7 @@ def recoverHereWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
|
||||
show ParserFn from
|
||||
fun _ s => stxs.foldl (init := s.restore rctx.initialSize rctx.initialPos) (·.pushSyntax ·)
|
||||
|
||||
private def recoverHereWithKeeping (stxs : Array Syntax) (keep : Nat) (p : ParserFn) : ParserFn :=
|
||||
def recoverHereWithKeeping (stxs : Array Syntax) (keep : Nat) (p : ParserFn) : ParserFn :=
|
||||
recoverFn p fun rctx =>
|
||||
show ParserFn from
|
||||
fun _ s => stxs.foldl (init := s.restore (rctx.initialSize + keep) rctx.initialPos) (·.pushSyntax ·)
|
||||
@@ -584,7 +584,7 @@ it's in a single-line context and whitespace may only be the space
|
||||
character. If it's `some N`, then newlines are allowed, but `N` is the
|
||||
minimum indentation column.
|
||||
-/
|
||||
private def nameArgWhitespace : (multiline : Option Nat) → ParserFn
|
||||
def nameArgWhitespace : (multiline : Option Nat) → ParserFn
|
||||
| none => eatSpaces
|
||||
| some n => takeWhileFn (fun c => c == ' ' || c == '\n') >> guardMinColumn n
|
||||
|
||||
@@ -598,7 +598,7 @@ each sub-parser of `delimitedInline` contributes a clear expected-token name, an
|
||||
unhelpful generic "unexpected" messages from inner parsers so that the more informative message
|
||||
from `inlineTextChar` survives error merging via `<|>`.
|
||||
-/
|
||||
private def expectedFn (msg : String) (p : ParserFn) : ParserFn := fun c s =>
|
||||
def expectedFn (msg : String) (p : ParserFn) : ParserFn := fun c s =>
|
||||
let iniPos := s.pos
|
||||
let s := p c s
|
||||
if s.hasError && s.pos == iniPos then
|
||||
@@ -649,18 +649,18 @@ def linebreak (ctxt : InlineCtxt) : ParserFn :=
|
||||
else
|
||||
errorFn "Newlines not allowed here"
|
||||
|
||||
private partial def notInLink (ctxt : InlineCtxt) : ParserFn := fun _ s =>
|
||||
partial def notInLink (ctxt : InlineCtxt) : ParserFn := fun _ s =>
|
||||
if ctxt.inLink then s.mkError "Already in a link" else s
|
||||
|
||||
-- Like `satisfyFn (· == '\n')` but with a better error message that mentions what was expected.
|
||||
private def newlineOrUnexpected (msg : String) : ParserFn := fun c s =>
|
||||
def newlineOrUnexpected (msg : String) : ParserFn := fun c s =>
|
||||
let i := s.pos
|
||||
if h : c.atEnd i then s.mkEOIError
|
||||
else if c.get' i h == '\n' then s.next' c i h
|
||||
else s.mkUnexpectedError s!"unexpected '{c.get' i h}'" [msg]
|
||||
|
||||
mutual
|
||||
private partial def emphLike
|
||||
partial def emphLike
|
||||
(name : SyntaxNodeKind) (char : Char) (what plural : String)
|
||||
(getter : InlineCtxt → Option Nat) (setter : InlineCtxt → Option Nat → InlineCtxt)
|
||||
(ctxt : InlineCtxt) : ParserFn :=
|
||||
@@ -799,7 +799,7 @@ mutual
|
||||
nodeFn `str (asStringFn (quoted := true) (many1Fn (satisfyEscFn (fun c => c != ']' && c != '\n') "other than ']' or newline"))) >>
|
||||
strFn "]")
|
||||
|
||||
private partial def linkTarget : ParserFn := fun c s =>
|
||||
partial def linkTarget : ParserFn := fun c s =>
|
||||
let s := (ref <|> url) c s
|
||||
if s.hasError then
|
||||
match s.errorMsg with
|
||||
@@ -922,7 +922,7 @@ deriving Inhabited, Repr
|
||||
Finds the minimum column of the first non-whitespace character on each non-empty content line
|
||||
between `startPos` and `endPos`, returning `init` if no such line exists.
|
||||
-/
|
||||
private def minContentIndent (text : FileMap) (startPos endPos : String.Pos.Raw)
|
||||
def minContentIndent (text : FileMap) (startPos endPos : String.Pos.Raw)
|
||||
(init : Nat) : Nat := Id.run do
|
||||
let mut result := init
|
||||
let mut thisLineCol := 0
|
||||
@@ -980,13 +980,13 @@ public def BlockCtxt.forDocString (text : FileMap)
|
||||
else text.source.rawEndPos
|
||||
{ docStartPosition := text.toPosition pos, baseColumn }
|
||||
|
||||
private def bol (ctxt : BlockCtxt) : ParserFn := fun c s =>
|
||||
def bol (ctxt : BlockCtxt) : ParserFn := fun c s =>
|
||||
let pos := c.fileMap.toPosition s.pos
|
||||
if pos.column ≤ ctxt.baseColumn then s
|
||||
else if pos.line == ctxt.docStartPosition.line && pos.column ≤ ctxt.docStartPosition.column then s
|
||||
else s.mkErrorAt s!"beginning of line at {pos}" s.pos
|
||||
|
||||
private def bolThen (ctxt : BlockCtxt) (p : ParserFn) (description : String) : ParserFn := fun c s =>
|
||||
def bolThen (ctxt : BlockCtxt) (p : ParserFn) (description : String) : ParserFn := fun c s =>
|
||||
let position := c.fileMap.toPosition s.pos
|
||||
let positionOk :=
|
||||
position.column ≤ ctxt.baseColumn ||
|
||||
@@ -1075,16 +1075,16 @@ public def lookaheadUnorderedListIndicator (ctxt : BlockCtxt) (p : UnorderedList
|
||||
if s.hasError then s.setPos iniPos
|
||||
else p type c (s.shrinkStack iniSz |>.setPos bulletPos)
|
||||
|
||||
private def skipUntilDedent (indent : Nat) : ParserFn :=
|
||||
def skipUntilDedent (indent : Nat) : ParserFn :=
|
||||
skipRestOfLine >>
|
||||
manyFn (chFn ' ' >> takeWhileFn (· == ' ') >> guardColumn (· ≥ indent) s!"indentation at {indent}" >> skipRestOfLine)
|
||||
|
||||
private def recoverUnindent (indent : Nat) (p : ParserFn) (finish : ParserFn := skipFn) :
|
||||
def recoverUnindent (indent : Nat) (p : ParserFn) (finish : ParserFn := skipFn) :
|
||||
ParserFn :=
|
||||
recoverFn p (fun _ => ignoreFn (skipUntilDedent indent) >> finish)
|
||||
|
||||
|
||||
private def blockSep := ignoreFn (manyFn blankLine >> optionalFn endLine)
|
||||
def blockSep := ignoreFn (manyFn blankLine >> optionalFn endLine)
|
||||
|
||||
mutual
|
||||
/-- Parses a list item according to the current nesting context. -/
|
||||
|
||||
@@ -74,7 +74,7 @@ def isValidAutoBoundLevelName (n : Name) (relaxed : Bool) : Bool :=
|
||||
/--
|
||||
Tracks extra context needed within the scope of `Lean.Elab.Term.withAutoBoundImplicit`
|
||||
-/
|
||||
public structure AutoBoundImplicitContext where
|
||||
structure AutoBoundImplicitContext where
|
||||
/--
|
||||
This always matches the `autoImplicit` option; it is duplicated here in
|
||||
order to support the behavior of the deprecated `Lean.Elab.Term.Context.autoImplicit`
|
||||
@@ -95,7 +95,7 @@ instance : EmptyCollection AutoBoundImplicitContext where
|
||||
Pushes a new variable onto the autoImplicit context, indicating that it needs
|
||||
to be bound as an implicit parameter.
|
||||
-/
|
||||
public def AutoBoundImplicitContext.push (ctx : AutoBoundImplicitContext) (x : Expr) :=
|
||||
def AutoBoundImplicitContext.push (ctx : AutoBoundImplicitContext) (x : Expr) :=
|
||||
{ ctx with boundVariables := ctx.boundVariables.push x }
|
||||
|
||||
end Lean.Elab
|
||||
|
||||
@@ -116,8 +116,9 @@ private def checkEndHeader : Name → List Scope → Option Name
|
||||
addScope (isNewNamespace := false) (isNoncomputable := ncTk.isSome) (isPublic := publicTk.isSome) (isMeta := metaTk.isSome) (attrs := attrs) "" (← getCurrNamespace)
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
@[builtin_command_elab InternalSyntax.end_local_scope] def elabEndLocalScope : CommandElab := fun _ => do
|
||||
setDelimitsLocal
|
||||
@[builtin_command_elab InternalSyntax.end_local_scope] def elabEndLocalScope : CommandElab := fun stx => do
|
||||
let depth := stx[1].toNat
|
||||
setDelimitsLocal depth
|
||||
|
||||
/--
|
||||
Produces a `Name` composed of the names of at most the innermost `n` scopes in `ss`, truncating if an
|
||||
@@ -528,7 +529,7 @@ open Lean.Parser.Command.InternalSyntax in
|
||||
@[builtin_macro Lean.Parser.Command.«in»] def expandInCmd : Macro
|
||||
| `($cmd₁ in%$tk $cmd₂) =>
|
||||
-- Limit ref variability for incrementality; see Note [Incremental Macros]
|
||||
withRef tk `(section $cmd₁:command $endLocalScopeSyntax:command $cmd₂ end)
|
||||
withRef tk `(section $cmd₁:command $(endLocalScopeSyntax 1):command $cmd₂ end)
|
||||
| _ => Macro.throwUnsupported
|
||||
|
||||
@[builtin_command_elab Parser.Command.addDocString] def elabAddDeclDoc : CommandElab := fun stx => do
|
||||
|
||||
@@ -314,6 +314,23 @@ private def mkSilentAnnotationIfHole (e : Expr) : TermElabM Expr := do
|
||||
return val
|
||||
| _ => panic! "resolveId? returned an unexpected expression"
|
||||
|
||||
/--
|
||||
Rebuild a type application with fresh synthetic metavariables for instance-implicit arguments.
|
||||
Non-instance-implicit arguments are assigned from the original application's arguments.
|
||||
If the function is over-applied, extra arguments are preserved.
|
||||
-/
|
||||
private def resynthInstImplicitArgs (type : Expr) : TermElabM Expr := do
|
||||
let fn := type.getAppFn
|
||||
let args := type.getAppArgs
|
||||
let (mvars, bis, _) ← forallMetaTelescope (← inferType fn)
|
||||
for i in [:mvars.size] do
|
||||
if bis[i]!.isInstImplicit then
|
||||
mvars[i]!.mvarId!.assign (← mkInstMVar (← inferType mvars[i]!))
|
||||
else
|
||||
mvars[i]!.mvarId!.assign args[i]!
|
||||
let args := mvars ++ args.drop mvars.size
|
||||
instantiateMVars (mkAppN fn args)
|
||||
|
||||
@[builtin_term_elab Lean.Parser.Term.inferInstanceAs] def elabInferInstanceAs : TermElab := fun stx expectedType? => do
|
||||
-- The type argument is the last child (works for both `inferInstanceAs T` and `inferInstanceAs <| T`)
|
||||
let typeStx := stx[stx.getNumArgs - 1]!
|
||||
@@ -325,19 +342,21 @@ private def mkSilentAnnotationIfHole (e : Expr) : TermElabM Expr := do
|
||||
.note "`inferInstanceAs` requires full knowledge of the expected (\"target\") type to do its \
|
||||
instance translation. If you do not intend to transport instances between two types, \
|
||||
consider using `inferInstance` or `(inferInstance : expectedType)` instead.")
|
||||
let type ← withSynthesize (postpone := .yes) <| elabType typeStx
|
||||
-- Unify with expected type to resolve metavariables (e.g., `_` placeholders)
|
||||
discard <| isDefEq type expectedType
|
||||
let type ← withSynthesize do
|
||||
let type ← elabType typeStx
|
||||
-- Unify with expected type to resolve metavariables (e.g., `_` placeholders)
|
||||
discard <| isDefEq type expectedType
|
||||
return type
|
||||
-- Re-infer instance-implicit args, so that synthesis is not influenced by the expected type's
|
||||
-- instance choices.
|
||||
let type ← withSynthesize <| resynthInstImplicitArgs type
|
||||
let type ← instantiateMVars type
|
||||
-- Rebuild type with fresh synthetic mvars for instance-implicit args, so that
|
||||
-- synthesis is not influenced by the expected type's instance choices.
|
||||
let type ← abstractInstImplicitArgs type
|
||||
let inst ← synthInstance type
|
||||
let inst ← if backward.inferInstanceAs.wrap.get (← getOptions) then
|
||||
-- Wrap instance so its type matches the expected type exactly.
|
||||
let logCompileErrors := !(← read).isNoncomputableSection && !(← read).declName?.any (Lean.isNoncomputable (← getEnv))
|
||||
let isMeta := (← read).declName?.any (isMarkedMeta (← getEnv))
|
||||
withNewMCtxDepth <| wrapInstance inst expectedType (logCompileErrors := logCompileErrors) (isMeta := isMeta)
|
||||
wrapInstance inst expectedType (logCompileErrors := logCompileErrors) (isMeta := isMeta)
|
||||
else
|
||||
pure inst
|
||||
ensureHasType expectedType? inst
|
||||
|
||||
@@ -7,11 +7,19 @@ module
|
||||
|
||||
prelude
|
||||
public import Lean.DocString.Add
|
||||
public import Lean.Linter.Basic
|
||||
meta import Lean.Parser.Command
|
||||
|
||||
public section
|
||||
|
||||
namespace Lean.Elab
|
||||
namespace Lean
|
||||
|
||||
register_builtin_option linter.redundantVisibility : Bool := {
|
||||
defValue := false
|
||||
descr := "warn on redundant `private`/`public` visibility modifiers"
|
||||
}
|
||||
|
||||
namespace Elab
|
||||
|
||||
/--
|
||||
Ensure the environment does not contain a declaration with name `declName`.
|
||||
@@ -77,14 +85,30 @@ def Visibility.isInferredPublic (env : Environment) (v : Visibility) : Bool :=
|
||||
if env.isExporting || !env.header.isModule then !v.isPrivate else v.isPublic
|
||||
|
||||
/-- Converts optional visibility syntax to a `Visibility` value. -/
|
||||
def elabVisibility [Monad m] [MonadError m] (vis? : Option (TSyntax ``Parser.Command.visibility)) :
|
||||
m Visibility :=
|
||||
def elabVisibility [Monad m] [MonadError m] [MonadEnv m] [MonadOptions m] [MonadLog m]
|
||||
[AddMessageContext m]
|
||||
(vis? : Option (TSyntax ``Parser.Command.visibility)) :
|
||||
m Visibility := do
|
||||
let env ← getEnv
|
||||
match vis? with
|
||||
| none => pure .regular
|
||||
| some v =>
|
||||
match v with
|
||||
| `(Parser.Command.visibility| private) => pure .private
|
||||
| `(Parser.Command.visibility| public) => pure .public
|
||||
| `(Parser.Command.visibility| private) =>
|
||||
if v.raw.getHeadInfo matches .original .. then -- skip macro output
|
||||
if env.header.isModule && !env.isExporting then
|
||||
Linter.logLintIf linter.redundantVisibility v
|
||||
m!"`private` has no effect in a `module` file outside `public section`; \
|
||||
declarations are already `private` by default"
|
||||
pure .private
|
||||
| `(Parser.Command.visibility| public) =>
|
||||
if v.raw.getHeadInfo matches .original .. then -- skip macro output
|
||||
if env.isExporting || !env.header.isModule then
|
||||
Linter.logLintIf linter.redundantVisibility v
|
||||
m!"`public` is the default visibility{
|
||||
if env.header.isModule then " inside a `public section`" else ""
|
||||
}; the modifier has no effect"
|
||||
pure .public
|
||||
| _ => throwErrorAt v "unexpected visibility modifier"
|
||||
|
||||
/-- Whether a declaration is default, partial or nonrec. -/
|
||||
|
||||
@@ -152,8 +152,9 @@ def expandNamespacedDeclaration : Macro := fun stx => do
|
||||
| some (ns, newStx) => do
|
||||
-- Limit ref variability for incrementality; see Note [Incremental Macros]
|
||||
let declTk := stx[1][0]
|
||||
let depth := ns.getNumParts
|
||||
let ns := mkIdentFrom declTk ns
|
||||
withRef declTk `(namespace $ns $endLocalScopeSyntax:command $(⟨newStx⟩) end $ns)
|
||||
withRef declTk `(namespace $ns $(endLocalScopeSyntax depth):command $(⟨newStx⟩) end $ns)
|
||||
| none => Macro.throwUnsupported
|
||||
|
||||
@[builtin_command_elab declaration, builtin_incremental]
|
||||
|
||||
@@ -172,7 +172,7 @@ def mkMatchNew (header : Header) (indVal : InductiveVal) (auxFunName : Name) : T
|
||||
if indVal.numCtors == 1 then
|
||||
`( $(mkCIdent casesOnSameCtorName) $x1:term $x2:term rfl $alts:term* )
|
||||
else
|
||||
`( match decEq ($(mkCIdent ctorIdxName) $x1:ident) ($(mkCIdent ctorIdxName) $x2:ident) with
|
||||
`( match Nat.decEq ($(mkCIdent ctorIdxName) $x1:ident) ($(mkCIdent ctorIdxName) $x2:ident) with
|
||||
| .isTrue h => $(mkCIdent casesOnSameCtorName) $x1:term $x2:term h $alts:term*
|
||||
| .isFalse _ => false)
|
||||
|
||||
|
||||
@@ -1038,19 +1038,19 @@ builtin_initialize registerBuiltinAttribute {
|
||||
}
|
||||
end
|
||||
|
||||
private unsafe def codeSuggestionsUnsafe : TermElabM (Array (StrLit → DocM (Array CodeSuggestion))) := do
|
||||
unsafe def codeSuggestionsUnsafe : TermElabM (Array (StrLit → DocM (Array CodeSuggestion))) := do
|
||||
let names := (codeSuggestionExt.getState (← getEnv)) |>.toArray
|
||||
return (← names.mapM (evalConst _)) ++ (← builtinCodeSuggestions.get).map (·.2)
|
||||
|
||||
@[implemented_by codeSuggestionsUnsafe]
|
||||
private opaque codeSuggestions : TermElabM (Array (StrLit → DocM (Array CodeSuggestion)))
|
||||
opaque codeSuggestions : TermElabM (Array (StrLit → DocM (Array CodeSuggestion)))
|
||||
|
||||
private unsafe def codeBlockSuggestionsUnsafe : TermElabM (Array (StrLit → DocM (Array CodeBlockSuggestion))) := do
|
||||
unsafe def codeBlockSuggestionsUnsafe : TermElabM (Array (StrLit → DocM (Array CodeBlockSuggestion))) := do
|
||||
let names := (codeBlockSuggestionExt.getState (← getEnv)) |>.toArray
|
||||
return (← names.mapM (evalConst _)) ++ (← builtinCodeBlockSuggestions.get).map (·.2)
|
||||
|
||||
@[implemented_by codeBlockSuggestionsUnsafe]
|
||||
private opaque codeBlockSuggestions : TermElabM (Array (StrLit → DocM (Array CodeBlockSuggestion)))
|
||||
opaque codeBlockSuggestions : TermElabM (Array (StrLit → DocM (Array CodeBlockSuggestion)))
|
||||
|
||||
/--
|
||||
Resolves a name against `NameMap` that contains a list of builtin expanders, taking into account
|
||||
@@ -1060,7 +1060,7 @@ resolution (`realizeGlobalConstNoOverload`) won't find them.
|
||||
|
||||
This is called as a fallback when the identifier can't be resolved.
|
||||
-/
|
||||
private def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name) : TermElabM (Option α) := do
|
||||
def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name) : TermElabM (Option α) := do
|
||||
if let some v := builtins.get? x then return some v
|
||||
|
||||
-- Builtins shouldn't require a prefix, as they're part of the language.
|
||||
@@ -1089,7 +1089,7 @@ private def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name)
|
||||
|
||||
return none
|
||||
|
||||
private unsafe def roleExpandersForUnsafe (roleName : Ident) :
|
||||
unsafe def roleExpandersForUnsafe (roleName : Ident) :
|
||||
TermElabM (Array (Name × (TSyntaxArray `inline → StateT (Array (TSyntax `doc_arg)) DocM (Inline ElabInline)))) := do
|
||||
let x? ←
|
||||
try some <$> realizeGlobalConstNoOverload roleName
|
||||
@@ -1105,10 +1105,10 @@ private unsafe def roleExpandersForUnsafe (roleName : Ident) :
|
||||
|
||||
|
||||
@[implemented_by roleExpandersForUnsafe]
|
||||
private opaque roleExpandersFor (roleName : Ident) :
|
||||
opaque roleExpandersFor (roleName : Ident) :
|
||||
TermElabM (Array (Name × (TSyntaxArray `inline → StateT (Array (TSyntax `doc_arg)) DocM (Inline ElabInline))))
|
||||
|
||||
private unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
|
||||
unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
|
||||
TermElabM (Array (Name × (StrLit → StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))) := do
|
||||
let x? ←
|
||||
try some <$> realizeGlobalConstNoOverload codeBlockName
|
||||
@@ -1124,10 +1124,10 @@ private unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
|
||||
|
||||
|
||||
@[implemented_by codeBlockExpandersForUnsafe]
|
||||
private opaque codeBlockExpandersFor (codeBlockName : Ident) :
|
||||
opaque codeBlockExpandersFor (codeBlockName : Ident) :
|
||||
TermElabM (Array (Name × (StrLit → StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))))
|
||||
|
||||
private unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
|
||||
unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
|
||||
TermElabM (Array (Name × (TSyntaxArray `block → StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))) := do
|
||||
let x? ←
|
||||
try some <$> realizeGlobalConstNoOverload directiveName
|
||||
@@ -1142,10 +1142,10 @@ private unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
|
||||
return hasBuiltin.toArray.flatten
|
||||
|
||||
@[implemented_by directiveExpandersForUnsafe]
|
||||
private opaque directiveExpandersFor (directiveName : Ident) :
|
||||
opaque directiveExpandersFor (directiveName : Ident) :
|
||||
TermElabM (Array (Name × (TSyntaxArray `block → StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))))
|
||||
|
||||
private unsafe def commandExpandersForUnsafe (commandName : Ident) :
|
||||
unsafe def commandExpandersForUnsafe (commandName : Ident) :
|
||||
TermElabM (Array (Name × StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))) := do
|
||||
let x? ←
|
||||
try some <$> realizeGlobalConstNoOverload commandName
|
||||
@@ -1161,18 +1161,18 @@ private unsafe def commandExpandersForUnsafe (commandName : Ident) :
|
||||
return hasBuiltin.toArray.flatten
|
||||
|
||||
@[implemented_by commandExpandersForUnsafe]
|
||||
private opaque commandExpandersFor (commandName : Ident) :
|
||||
opaque commandExpandersFor (commandName : Ident) :
|
||||
TermElabM (Array (Name × StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))
|
||||
|
||||
|
||||
private def mkArgVal (arg : TSyntax `arg_val) : DocM Term :=
|
||||
def mkArgVal (arg : TSyntax `arg_val) : DocM Term :=
|
||||
match arg with
|
||||
| `(arg_val|$n:ident) => pure n
|
||||
| `(arg_val|$n:num) => pure n
|
||||
| `(arg_val|$s:str) => pure s
|
||||
| _ => throwErrorAt arg "Didn't understand as argument value"
|
||||
|
||||
private def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argument) := do
|
||||
def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argument) := do
|
||||
match arg with
|
||||
| `(doc_arg|$x:arg_val) =>
|
||||
let x ← mkArgVal x
|
||||
@@ -1190,7 +1190,7 @@ private def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argumen
|
||||
`(Parser.Term.argument| ($x := $v))
|
||||
| _ => throwErrorAt arg "Didn't understand as argument"
|
||||
|
||||
private def mkAppStx (name : Ident) (args : TSyntaxArray `doc_arg) : DocM Term := do
|
||||
def mkAppStx (name : Ident) (args : TSyntaxArray `doc_arg) : DocM Term := do
|
||||
return ⟨mkNode ``Parser.Term.app #[name, mkNullNode (← args.mapM mkArg)]⟩
|
||||
|
||||
/--
|
||||
@@ -1204,7 +1204,7 @@ register_builtin_option doc.verso.suggestions : Bool := {
|
||||
-- Normally, name suggestions should be provided relative to the current scope. But
|
||||
-- during bootstrapping, the names in question may not yet be defined, so builtin
|
||||
-- names need special handling.
|
||||
private def suggestionName (name : Name) : TermElabM Name := do
|
||||
def suggestionName (name : Name) : TermElabM Name := do
|
||||
let name' ←
|
||||
-- Builtin expander names never need namespacing
|
||||
if (← builtinDocRoles.get).contains name then pure (some name)
|
||||
@@ -1241,7 +1241,7 @@ private def suggestionName (name : Name) : TermElabM Name := do
|
||||
-- Fall back to doing nothing
|
||||
pure name
|
||||
|
||||
private def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.Suggestion :=
|
||||
def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.Suggestion :=
|
||||
let cmp : (x y : Meta.Tactic.TryThis.SuggestionText) → Bool
|
||||
| .string s1, .string s2 => s1 < s2
|
||||
| .string _, _ => true
|
||||
@@ -1250,7 +1250,7 @@ private def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.
|
||||
ss.qsort (cmp ·.suggestion ·.suggestion)
|
||||
|
||||
open Diff in
|
||||
private def mkSuggestion
|
||||
def mkSuggestion
|
||||
(ref : Syntax) (hintTitle : MessageData)
|
||||
(newStrings : Array (String × Option String × Option String)) :
|
||||
DocM MessageData := do
|
||||
@@ -1281,7 +1281,7 @@ def nameOrBuiltinName [Monad m] [MonadEnv m] (x : Name) : m Name := do
|
||||
Finds registered expander names that `x` is a suffix of, for use in error message hints when the
|
||||
name is shadowed. Returns display names suitable for `mkSuggestion`.
|
||||
-/
|
||||
private def findShadowedNames {α : Type}
|
||||
def findShadowedNames {α : Type}
|
||||
(nonBuiltIns : NameMap (Array Name)) (builtins : NameMap α) (x : Name) :
|
||||
TermElabM (Array Name) := do
|
||||
if x.isAnonymous then return #[]
|
||||
@@ -1303,7 +1303,7 @@ private def findShadowedNames {α : Type}
|
||||
/--
|
||||
Builds a hint for an "Unknown role/directive/..." error when the name might be shadowed.
|
||||
-/
|
||||
private def shadowedHint {α : Type}
|
||||
def shadowedHint {α : Type}
|
||||
(envEntries : NameMap (Array Name)) (builtins : NameMap α)
|
||||
(name : Ident) (kind : String) : DocM MessageData := do
|
||||
let candidates ← findShadowedNames envEntries builtins name.getId
|
||||
@@ -1316,7 +1316,7 @@ Throws an appropriate error for an unknown doc element (role/directive/code bloc
|
||||
Distinguishes "name resolves but isn't registered" from "name doesn't resolve at all",
|
||||
and includes shadowed-name suggestions when applicable.
|
||||
-/
|
||||
private def throwUnknownDocElem {α β : Type}
|
||||
def throwUnknownDocElem {α β : Type}
|
||||
(envEntries : NameMap (Array Name)) (builtins : NameMap α)
|
||||
(name : Ident) (kind : String) : DocM β := do
|
||||
let hint ← shadowedHint envEntries builtins name kind
|
||||
@@ -1545,12 +1545,12 @@ where
|
||||
withSpace (s : String) : String :=
|
||||
if s.endsWith " " then s else s ++ " "
|
||||
|
||||
private def takeFirst? (xs : Array α) : Option (α × Array α) :=
|
||||
def takeFirst? (xs : Array α) : Option (α × Array α) :=
|
||||
if h : xs.size > 0 then
|
||||
some (xs[0], xs.extract 1)
|
||||
else none
|
||||
|
||||
private partial def elabBlocks' (level : Nat) :
|
||||
partial def elabBlocks' (level : Nat) :
|
||||
StateT (TSyntaxArray `block) DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) := do
|
||||
let mut pre := #[]
|
||||
let mut sub := #[]
|
||||
@@ -1586,7 +1586,7 @@ private partial def elabBlocks' (level : Nat) :
|
||||
break
|
||||
return (pre, sub)
|
||||
|
||||
private def elabModSnippet'
|
||||
def elabModSnippet'
|
||||
(range : DeclarationRange) (level : Nat) (blocks : TSyntaxArray `block) :
|
||||
DocM VersoModuleDocs.Snippet := do
|
||||
let mut snippet : VersoModuleDocs.Snippet := {
|
||||
@@ -1616,7 +1616,7 @@ private def elabModSnippet'
|
||||
snippet := snippet.addBlock (← elabBlock b)
|
||||
return snippet
|
||||
|
||||
private partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInline) := do
|
||||
partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInline) := do
|
||||
match inl with
|
||||
| .concat xs => .concat <$> xs.mapM fixupInline
|
||||
| .emph xs => .emph <$> xs.mapM fixupInline
|
||||
@@ -1663,7 +1663,7 @@ private partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInl
|
||||
return .empty
|
||||
| _ => .other i <$> xs.mapM fixupInline
|
||||
|
||||
private partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Block ElabInline ElabBlock) := do
|
||||
partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Block ElabInline ElabBlock) := do
|
||||
match block with
|
||||
| .para xs => .para <$> xs.mapM fixupInline
|
||||
| .concat xs => .concat <$> xs.mapM fixupBlock
|
||||
@@ -1677,7 +1677,7 @@ private partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Bloc
|
||||
| .code s => pure (.code s)
|
||||
| .other i xs => .other i <$> xs.mapM fixupBlock
|
||||
|
||||
private partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (Part ElabInline ElabBlock Empty) := do
|
||||
partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (Part ElabInline ElabBlock Empty) := do
|
||||
return { part with
|
||||
title := ← part.title.mapM fixupInline
|
||||
content := ← part.content.mapM fixupBlock,
|
||||
@@ -1685,13 +1685,13 @@ private partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (P
|
||||
}
|
||||
|
||||
|
||||
private partial def fixupBlocks : (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) → DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty))
|
||||
partial def fixupBlocks : (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) → DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty))
|
||||
| (bs, ps) => do
|
||||
let bs ← bs.mapM fixupBlock
|
||||
let ps ← ps.mapM fixupPart
|
||||
return (bs, ps)
|
||||
|
||||
private partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM VersoModuleDocs.Snippet := do
|
||||
partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM VersoModuleDocs.Snippet := do
|
||||
return {snippet with
|
||||
text := ← snippet.text.mapM fixupBlock,
|
||||
sections := ← snippet.sections.mapM fun (level, range, content) => do
|
||||
@@ -1700,7 +1700,7 @@ private partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM Vers
|
||||
/--
|
||||
After fixing up the references, check to see which were not used and emit a suitable warning.
|
||||
-/
|
||||
private def warnUnusedRefs : DocM Unit := do
|
||||
def warnUnusedRefs : DocM Unit := do
|
||||
for (_, {location, seen, ..}) in (← getThe InternalState).urls do
|
||||
unless seen do
|
||||
logWarningAt location "Unused URL"
|
||||
|
||||
@@ -31,7 +31,7 @@ structure Data.Atom where
|
||||
deriving TypeName
|
||||
|
||||
|
||||
private def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m StrLit := do
|
||||
def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m StrLit := do
|
||||
if h : xs.size = 1 then
|
||||
match xs[0] with
|
||||
| `(inline|code($s)) => return s
|
||||
@@ -43,7 +43,7 @@ private def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m St
|
||||
/--
|
||||
Checks whether a syntax descriptor's value contains the given atom.
|
||||
-/
|
||||
private partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
|
||||
partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
|
||||
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM Bool := do
|
||||
match p.getAppFnArgs with
|
||||
| (``ParserDescr.node, #[_, _, p]) => containsAtom p atom
|
||||
@@ -67,7 +67,7 @@ private partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
|
||||
Checks whether a syntax descriptor's value contains the given atom. If so, the residual value after
|
||||
the atom is returned.
|
||||
-/
|
||||
private partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Expr) := do
|
||||
partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Expr) := do
|
||||
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM (Option Expr) := do
|
||||
match p.getAppFnArgs with
|
||||
| (``ParserDescr.node, #[_, _, p]) => containsAtom' p atom
|
||||
@@ -92,7 +92,7 @@ private partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Exp
|
||||
| _ => if tryWhnf then attempt (← Meta.whnf p) false else pure none
|
||||
attempt e true
|
||||
|
||||
private partial def canEpsilon (e : Expr) : MetaM Bool := do
|
||||
partial def canEpsilon (e : Expr) : MetaM Bool := do
|
||||
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM Bool := do
|
||||
match p.getAppFnArgs with
|
||||
| (``ParserDescr.node, #[_, _, p]) => canEpsilon p
|
||||
@@ -118,7 +118,7 @@ private partial def canEpsilon (e : Expr) : MetaM Bool := do
|
||||
Checks whether a syntax descriptor's value begins with the given atom. If so, the residual value
|
||||
after the atom is returned.
|
||||
-/
|
||||
private partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option Expr) := do
|
||||
partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option Expr) := do
|
||||
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM (Option Expr) := do
|
||||
match p.getAppFnArgs with
|
||||
| (``ParserDescr.node, #[_, _, p]) => startsWithAtom? p atom
|
||||
@@ -149,7 +149,7 @@ private partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option E
|
||||
Checks whether a syntax descriptor's value begins with the given atoms. If so, the residual value
|
||||
after the atoms is returned.
|
||||
-/
|
||||
private partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (Option Expr) := do
|
||||
partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (Option Expr) := do
|
||||
match atoms with
|
||||
| [] => pure e
|
||||
| a :: as =>
|
||||
@@ -157,7 +157,7 @@ private partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (O
|
||||
startsWithAtoms? e' as
|
||||
else pure none
|
||||
|
||||
private partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM Bool := do
|
||||
partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM Bool := do
|
||||
match atoms with
|
||||
| [] => pure true
|
||||
| a :: as =>
|
||||
@@ -165,7 +165,7 @@ private partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM B
|
||||
(startsWithAtoms? e' as <&> Option.isSome) <||> exprContainsAtoms e' (a :: as)
|
||||
else pure false
|
||||
|
||||
private def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
|
||||
def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
|
||||
let env ← getEnv
|
||||
let some catContents := (Lean.Parser.parserExtension.getState env).categories.find? cat
|
||||
| return #[]
|
||||
@@ -177,7 +177,7 @@ private def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
|
||||
found := found.push k
|
||||
return found
|
||||
|
||||
private partial def isAtoms (atoms : List String) (stx : Syntax) : Bool :=
|
||||
partial def isAtoms (atoms : List String) (stx : Syntax) : Bool :=
|
||||
StateT.run (go [stx]) atoms |>.fst
|
||||
where
|
||||
go (stxs : List Syntax) : StateM (List String) Bool := do
|
||||
@@ -196,7 +196,7 @@ where
|
||||
| .node _ _ args :: ss =>
|
||||
go (args.toList ++ ss)
|
||||
|
||||
private def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM Bool := do
|
||||
def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM Bool := do
|
||||
let str := " ".intercalate atoms
|
||||
let env ← getEnv
|
||||
let options ← getOptions
|
||||
@@ -206,16 +206,16 @@ private def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM B
|
||||
let s := p.fn.run {inputString := str, fileName := "", fileMap := FileMap.ofString str} {env, options} (getTokenTable env) s
|
||||
return isAtoms atoms (mkNullNode (s.stxStack.extract 1 s.stxStack.size))
|
||||
|
||||
private unsafe def namedParserHasAtomPrefixUnsafe (atoms : List String) (parserName : Name) : TermElabM Bool := do
|
||||
unsafe def namedParserHasAtomPrefixUnsafe (atoms : List String) (parserName : Name) : TermElabM Bool := do
|
||||
try
|
||||
let p ← evalConstCheck Parser ``Parser parserName
|
||||
parserHasAtomPrefix atoms p
|
||||
catch | _ => pure false
|
||||
|
||||
@[implemented_by namedParserHasAtomPrefixUnsafe]
|
||||
private opaque namedParserHasAtomPrefix (atoms : List String) (parserName : Name) : TermElabM Bool
|
||||
opaque namedParserHasAtomPrefix (atoms : List String) (parserName : Name) : TermElabM Bool
|
||||
|
||||
private def parserDescrCanEps : ParserDescr → Bool
|
||||
def parserDescrCanEps : ParserDescr → Bool
|
||||
| .node _ _ p | .trailingNode _ _ _ p => parserDescrCanEps p
|
||||
| .binary ``Parser.andthen p1 p2 => parserDescrCanEps p1 && parserDescrCanEps p2
|
||||
| .binary ``Parser.orelse p1 p2 => parserDescrCanEps p1 || parserDescrCanEps p2
|
||||
@@ -227,7 +227,7 @@ private def parserDescrCanEps : ParserDescr → Bool
|
||||
| .const ``Parser.ppHardSpace => true
|
||||
| _ => false
|
||||
|
||||
private def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
|
||||
def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
|
||||
match p with
|
||||
| .node _ _ p | .trailingNode _ _ _ p | .unary _ p =>
|
||||
parserDescrHasAtom atom p
|
||||
@@ -249,7 +249,7 @@ private def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Op
|
||||
| none, none => pure none
|
||||
| _ => pure none
|
||||
|
||||
private def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
|
||||
def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
|
||||
match p with
|
||||
| .node _ _ p | .trailingNode _ _ _ p | .unary _ p =>
|
||||
parserDescrStartsWithAtom atom p
|
||||
@@ -272,7 +272,7 @@ private def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermEl
|
||||
| none, none => pure none
|
||||
| _ => pure none
|
||||
|
||||
private def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
|
||||
def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
|
||||
match atoms with
|
||||
| [] => pure true
|
||||
| a :: as =>
|
||||
@@ -280,7 +280,7 @@ private def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) :
|
||||
parserDescrStartsWithAtoms as p'
|
||||
else pure false
|
||||
|
||||
private partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
|
||||
partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
|
||||
match atoms with
|
||||
| [] => pure true
|
||||
| a :: as =>
|
||||
@@ -289,16 +289,16 @@ private partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr)
|
||||
else parserDescrHasAtoms (a :: as) p'
|
||||
else pure false
|
||||
|
||||
private unsafe def parserDescrNameHasAtomsUnsafe (atoms : List String) (p : Name) : TermElabM Bool := do
|
||||
unsafe def parserDescrNameHasAtomsUnsafe (atoms : List String) (p : Name) : TermElabM Bool := do
|
||||
try
|
||||
let p ← evalConstCheck ParserDescr ``ParserDescr p
|
||||
parserDescrHasAtoms atoms p
|
||||
catch | _ => pure false
|
||||
|
||||
@[implemented_by parserDescrNameHasAtomsUnsafe]
|
||||
private opaque parserDescrNameHasAtoms (atoms : List String) (p : Name) : TermElabM Bool
|
||||
opaque parserDescrNameHasAtoms (atoms : List String) (p : Name) : TermElabM Bool
|
||||
|
||||
private def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
|
||||
def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
|
||||
let env ← getEnv
|
||||
if let some ci := env.find? k then
|
||||
if let some d := ci.value? then
|
||||
@@ -312,7 +312,7 @@ private def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
|
||||
return true
|
||||
return false
|
||||
|
||||
private def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name) := do
|
||||
def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name) := do
|
||||
let env ← getEnv
|
||||
let some catContents := (Lean.Parser.parserExtension.getState env).categories.find? cat
|
||||
| return #[]
|
||||
@@ -323,7 +323,7 @@ private def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name
|
||||
found := found.push k
|
||||
return found
|
||||
|
||||
private def kwImpl (cat : Ident := mkIdent .anonymous) (of : Ident := mkIdent .anonymous)
|
||||
def kwImpl (cat : Ident := mkIdent .anonymous) (of : Ident := mkIdent .anonymous)
|
||||
(suggest : Bool)
|
||||
(s : StrLit) : TermElabM (Inline ElabInline) := do
|
||||
let atoms := s.getString |>.split Char.isWhitespace |>.toStringList
|
||||
|
||||
@@ -55,7 +55,7 @@ def unfoldLHS (declName : Name) (mvarId : MVarId) : MetaM MVarId := mvarId.withC
|
||||
-- Else use delta reduction
|
||||
deltaLHS mvarId
|
||||
|
||||
private partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := do
|
||||
partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := do
|
||||
withTraceNode `Elab.definition.eqns (fun _ => return m!"proving:{indentExpr type}") do
|
||||
withNewMCtxDepth do
|
||||
let main ← mkFreshExprSyntheticOpaqueMVar type
|
||||
@@ -102,7 +102,7 @@ private partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := d
|
||||
else
|
||||
throwError "failed to generate equational theorem for `{.ofConstName declName}`\n{MessageData.ofGoal mvarId}"
|
||||
|
||||
private def lhsDependsOn (type : Expr) (fvarId : FVarId) : MetaM Bool :=
|
||||
def lhsDependsOn (type : Expr) (fvarId : FVarId) : MetaM Bool :=
|
||||
forallTelescope type fun _ type => do
|
||||
if let some (_, lhs, _) ← matchEq? type then
|
||||
dependsOn lhs fvarId
|
||||
|
||||
@@ -73,19 +73,19 @@ def hasErrorExplanation [Monad m] [MonadEnv m] (name : Name) : m Bool :=
|
||||
return errorExplanationExt.getState (← getEnv) |>.contains name
|
||||
|
||||
/-- Returns all error explanations with their names, sorted by name. -/
|
||||
public def getErrorExplanations [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
|
||||
def getErrorExplanations [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
|
||||
return errorExplanationExt.getState (← getEnv)
|
||||
|>.toArray
|
||||
|>.qsort fun e e' => e.1.toString < e'.1.toString
|
||||
|
||||
@[deprecated getErrorExplanations (since := "2026-12-20")]
|
||||
public def getErrorExplanationsRaw (env : Environment) : Array (Name × ErrorExplanation) :=
|
||||
def getErrorExplanationsRaw (env : Environment) : Array (Name × ErrorExplanation) :=
|
||||
errorExplanationExt.getState env
|
||||
|>.toArray
|
||||
|>.qsort fun e e' => e.1.toString < e'.1.toString
|
||||
|
||||
@[deprecated getErrorExplanations (since := "2026-12-20")]
|
||||
public def getErrorExplanationsSorted [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
|
||||
def getErrorExplanationsSorted [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
|
||||
getErrorExplanations
|
||||
|
||||
end Lean
|
||||
|
||||
@@ -19,3 +19,4 @@ public import Lean.Linter.Sets
|
||||
public import Lean.Linter.UnusedSimpArgs
|
||||
public import Lean.Linter.Coe
|
||||
public import Lean.Linter.GlobalAttributeIn
|
||||
public import Lean.Linter.EnvLinter
|
||||
|
||||
10
src/Lean/Linter/EnvLinter.lean
Normal file
10
src/Lean/Linter/EnvLinter.lean
Normal file
@@ -0,0 +1,10 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Wojciech Różowski
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Lean.Linter.EnvLinter.Basic
|
||||
public import Lean.Linter.EnvLinter.Frontend
|
||||
163
src/Lean/Linter/EnvLinter/Basic.lean
Normal file
163
src/Lean/Linter/EnvLinter/Basic.lean
Normal file
@@ -0,0 +1,163 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Wojciech Różowski
|
||||
|
||||
Copyright (c) 2020 Floris van Doorn. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Floris van Doorn, Robert Y. Lewis, Gabriel Ebner
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Lean.Structure
|
||||
public import Lean.Elab.InfoTree.Main
|
||||
import Lean.ExtraModUses
|
||||
|
||||
public section
|
||||
|
||||
open Lean Meta
|
||||
|
||||
namespace Lean.Linter.EnvLinter
|
||||
|
||||
/-!
|
||||
# Basic environment linter types and attributes
|
||||
|
||||
This file defines the basic types and attributes used by the environment
|
||||
linting framework. An environment linter consists of a function
|
||||
`(declaration : Name) → MetaM (Option MessageData)`, this function together with some
|
||||
metadata is stored in the `EnvLinter` structure. We define two attributes:
|
||||
|
||||
* `@[builtin_env_linter]` applies to a declaration of type `EnvLinter`
|
||||
and adds it to the default linter set.
|
||||
|
||||
* `@[builtin_nolint linterName]` omits the tagged declaration from being checked by
|
||||
the linter with name `linterName`.
|
||||
-/
|
||||
|
||||
/--
|
||||
Returns true if `decl` is an automatically generated declaration.
|
||||
|
||||
Also returns true if `decl` is an internal name or created during macro
|
||||
expansion.
|
||||
-/
|
||||
def isAutoDecl (decl : Name) : CoreM Bool := do
|
||||
if decl.hasMacroScopes then return true
|
||||
if decl.isInternal then return true
|
||||
let env ← getEnv
|
||||
if isReservedName env decl then return true
|
||||
if let Name.str n s := decl then
|
||||
if (← isAutoDecl n) then return true
|
||||
if s.startsWith "proof_"
|
||||
|| s.startsWith "match_"
|
||||
|| s.startsWith "unsafe_"
|
||||
|| s.startsWith "grind_"
|
||||
then return true
|
||||
if env.isConstructor n && s ∈ ["injEq", "inj", "sizeOf_spec", "elim", "noConfusion"] then
|
||||
return true
|
||||
if let ConstantInfo.inductInfo _ := env.find? n then
|
||||
if s.startsWith "brecOn_" || s.startsWith "below_" then return true
|
||||
if s ∈ [casesOnSuffix, recOnSuffix, brecOnSuffix, belowSuffix,
|
||||
"ndrec", "ndrecOn", "noConfusionType", "noConfusion", "ofNat", "toCtorIdx", "ctorIdx",
|
||||
"ctorElim", "ctorElimType"] then
|
||||
return true
|
||||
if let some _ := isSubobjectField? env n (.mkSimple s) then
|
||||
return true
|
||||
-- Coinductive/inductive lattice-theoretic predicates:
|
||||
if let ConstantInfo.inductInfo _ := env.find? (Name.str n "_functor") then
|
||||
if s == "functor_unfold" || s == casesOnSuffix || s == "mutual" then return true
|
||||
if env.isConstructor (Name.str (Name.str n "_functor") s) then return true
|
||||
pure false
|
||||
|
||||
/-- An environment linting test for the `lake builtin-lint` command. -/
|
||||
structure EnvLinter where
|
||||
/-- `test` defines a test to perform on every declaration. It should never fail. Returning `none`
|
||||
signifies a passing test. Returning `some msg` reports a failing test with error `msg`. -/
|
||||
test : Name → MetaM (Option MessageData)
|
||||
/-- `noErrorsFound` is the message printed when all tests are negative -/
|
||||
noErrorsFound : MessageData
|
||||
/-- `errorsFound` is printed when at least one test is positive -/
|
||||
errorsFound : MessageData
|
||||
/-- If `isDefault` is false, this linter is only run when `--clippy` is passed. -/
|
||||
isDefault := true
|
||||
|
||||
/-- A `NamedEnvLinter` is an environment linter associated to a particular declaration. -/
|
||||
structure NamedEnvLinter extends EnvLinter where
|
||||
/-- The name of the named linter. This is just the declaration name without the namespace. -/
|
||||
name : Name
|
||||
/-- The linter declaration name -/
|
||||
declName : Name
|
||||
|
||||
/-- Gets an environment linter by declaration name. -/
|
||||
def getEnvLinter (name declName : Name) : CoreM NamedEnvLinter := unsafe
|
||||
return { ← evalConstCheck EnvLinter ``EnvLinter declName with name, declName }
|
||||
|
||||
/-- Defines the `envLinterExt` extension for adding an environment linter to the default set. -/
|
||||
builtin_initialize envLinterExt :
|
||||
SimplePersistentEnvExtension (Name × Bool) (NameMap (Name × Bool)) ←
|
||||
let addEntryFn := fun m (n, b) => m.insert (n.updatePrefix .anonymous) (n, b)
|
||||
registerSimplePersistentEnvExtension {
|
||||
addImportedFn := fun nss =>
|
||||
nss.foldl (init := {}) fun m ns => ns.foldl (init := m) addEntryFn
|
||||
addEntryFn
|
||||
}
|
||||
|
||||
/--
|
||||
Defines the `@[builtin_env_linter]` attribute for adding a linter to the default set.
|
||||
The form `@[builtin_env_linter clippy]` will not add the linter to the default set,
|
||||
but it can be selected by `lake builtin-lint --clippy`.
|
||||
|
||||
Linters are named using their declaration names, without the namespace. These must be distinct.
|
||||
-/
|
||||
syntax (name := builtin_env_linter) "builtin_env_linter" &" clippy"? : attr
|
||||
|
||||
builtin_initialize registerBuiltinAttribute {
|
||||
name := `builtin_env_linter
|
||||
descr := "Use this declaration as a linting test in `lake builtin-lint`"
|
||||
add := fun decl stx kind => do
|
||||
let dflt := stx[1].isNone
|
||||
unless kind == .global do throwError "invalid attribute `builtin_env_linter`, must be global"
|
||||
let shortName := decl.updatePrefix .anonymous
|
||||
if let some (declName, _) := (envLinterExt.getState (← getEnv)).find? shortName then
|
||||
Elab.addConstInfo stx declName
|
||||
throwError
|
||||
"invalid attribute `builtin_env_linter`, linter `{shortName}` has already been declared"
|
||||
let isPublic := !isPrivateName decl; let isMeta := isMarkedMeta (← getEnv) decl
|
||||
unless isPublic && isMeta do
|
||||
throwError "invalid attribute `builtin_env_linter`, \
|
||||
declaration `{.ofConstName decl}` must be marked as `public` and `meta`\
|
||||
{if isPublic then " but is only marked `public`" else ""}\
|
||||
{if isMeta then " but is only marked `meta`" else ""}"
|
||||
let constInfo ← getConstInfo decl
|
||||
unless ← (isDefEq constInfo.type (mkConst ``EnvLinter)).run' do
|
||||
throwError "`{.ofConstName decl}` must have type `{.ofConstName ``EnvLinter}`, got \
|
||||
`{constInfo.type}`"
|
||||
modifyEnv fun env => envLinterExt.addEntry env (decl, dflt)
|
||||
}
|
||||
|
||||
/-- `@[builtin_nolint linterName]` omits the tagged declaration from being checked by
|
||||
the linter with name `linterName`. -/
|
||||
syntax (name := builtin_nolint) "builtin_nolint" (ppSpace ident)+ : attr
|
||||
|
||||
/-- Defines the user attribute `builtin_nolint` for skipping environment linters. -/
|
||||
builtin_initialize builtinNolintAttr : ParametricAttribute (Array Name) ←
|
||||
registerParametricAttribute {
|
||||
name := `builtin_nolint
|
||||
descr := "Do not report this declaration in any of the tests of `lake builtin-lint`"
|
||||
getParam := fun _ => fun
|
||||
| `(attr| builtin_nolint $[$ids]*) => ids.mapM fun id => withRef id <| do
|
||||
let shortName := id.getId.eraseMacroScopes
|
||||
let some (declName, _) := (envLinterExt.getState (← getEnv)).find? shortName
|
||||
| throwError "linter '{shortName}' not found"
|
||||
Elab.addConstInfo id declName
|
||||
recordExtraModUseFromDecl (isMeta := false) declName
|
||||
pure shortName
|
||||
| _ => Elab.throwUnsupportedSyntax
|
||||
}
|
||||
|
||||
/-- Returns true if `decl` should be checked
|
||||
using `linter`, i.e., if there is no `builtin_nolint` attribute. -/
|
||||
def shouldBeLinted [Monad m] [MonadEnv m] (linter : Name) (decl : Name) : m Bool :=
|
||||
return !((builtinNolintAttr.getParam? (← getEnv) decl).getD #[]).contains linter
|
||||
|
||||
end Lean.Linter.EnvLinter
|
||||
180
src/Lean/Linter/EnvLinter/Frontend.lean
Normal file
180
src/Lean/Linter/EnvLinter/Frontend.lean
Normal file
@@ -0,0 +1,180 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Wojciech Różowski
|
||||
|
||||
Copyright (c) 2020 Floris van Doorn. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Floris van Doorn, Robert Y. Lewis, Gabriel Ebner
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Lean.Linter.EnvLinter.Basic
|
||||
import Lean.DeclarationRange
|
||||
import Lean.Util.Path
|
||||
import Lean.CoreM
|
||||
import Lean.Elab.Command
|
||||
|
||||
public section
|
||||
|
||||
open Lean Meta
|
||||
|
||||
namespace Lean.Linter.EnvLinter
|
||||
|
||||
/-- Verbosity for the linter output. -/
|
||||
inductive LintVerbosity
|
||||
/-- `low`: only print failing checks, print nothing on success. -/
|
||||
| low
|
||||
/-- `medium`: only print failing checks, print confirmation on success. -/
|
||||
| medium
|
||||
/-- `high`: print output of every check. -/
|
||||
| high
|
||||
deriving Inhabited, DecidableEq, Repr
|
||||
|
||||
/-- `getChecks clippy runOnly` produces a list of linters.
|
||||
`runOnly` is an optional list of names that should resolve to declarations with type
|
||||
`NamedEnvLinter`. If populated, only these linters are run (regardless of the default
|
||||
configuration). Otherwise, it uses all enabled linters in the environment tagged with
|
||||
`@[builtin_env_linter]`. If `clippy` is false, it only uses linters with `isDefault = true`. -/
|
||||
def getChecks (clippy : Bool) (runOnly : Option (List Name)) :
|
||||
CoreM (Array NamedEnvLinter) := do
|
||||
let mut result := #[]
|
||||
for (name, declName, isDefault) in envLinterExt.getState (← getEnv) do
|
||||
let shouldRun := match runOnly with
|
||||
| some only => only.contains name
|
||||
| none => clippy || isDefault
|
||||
if shouldRun then
|
||||
let linter ← getEnvLinter name declName
|
||||
result := result.binInsert (·.name.lt ·.name) linter
|
||||
pure result
|
||||
|
||||
|
||||
/--
|
||||
Runs all the specified linters on all the specified declarations in parallel,
|
||||
producing a list of results.
|
||||
-/
|
||||
def lintCore (decls : Array Name) (linters : Array NamedEnvLinter) :
|
||||
CoreM (Array (NamedEnvLinter × Std.HashMap Name MessageData)) := do
|
||||
let tasks : Array (NamedEnvLinter × Array (Name × Task (Except Exception <| Option MessageData))) ←
|
||||
linters.mapM fun linter => do
|
||||
let decls ← decls.filterM (shouldBeLinted linter.name)
|
||||
(linter, ·) <$> decls.mapM fun decl => (decl, ·) <$> do
|
||||
let act : MetaM (Option MessageData) := do
|
||||
linter.test decl
|
||||
EIO.asTask <| (← Core.wrapAsync (fun _ =>
|
||||
act |>.run' Elab.Command.mkMetaContext
|
||||
) (cancelTk? := none)) ()
|
||||
|
||||
let result ← tasks.mapM fun (linter, decls) => do
|
||||
let mut msgs : Std.HashMap Name MessageData := {}
|
||||
for (declName, msgTask) in decls do
|
||||
let msg? ← match msgTask.get with
|
||||
| Except.ok msg? => pure msg?
|
||||
| Except.error err => pure m!"LINTER FAILED:\n{err.toMessageData}"
|
||||
if let .some msg := msg? then
|
||||
msgs := msgs.insert declName msg
|
||||
pure (linter, msgs)
|
||||
return result
|
||||
|
||||
/-- Sorts a map with declaration keys as names by line number. -/
|
||||
def sortResults (results : Std.HashMap Name α) : CoreM <| Array (Name × α) := do
|
||||
let mut key : Std.HashMap Name Nat := {}
|
||||
for (n, _) in results.toArray do
|
||||
if let some range ← findDeclarationRanges? n then
|
||||
key := key.insert n <| range.range.pos.line
|
||||
pure $ results.toArray.qsort fun (a, _) (b, _) => key.getD a 0 < key.getD b 0
|
||||
|
||||
/-- Formats a linter warning as `#check` command with comment. -/
|
||||
def printWarning (declName : Name) (warning : MessageData) (useErrorFormat : Bool := false)
|
||||
(filePath : System.FilePath := default) : CoreM MessageData := do
|
||||
if useErrorFormat then
|
||||
if let some range ← findDeclarationRanges? declName then
|
||||
let msg ← addMessageContextPartial
|
||||
m!"{filePath}:{range.range.pos.line}:{range.range.pos.column + 1}: error: {
|
||||
← mkConstWithLevelParams declName} {warning}"
|
||||
return msg
|
||||
addMessageContextPartial m!"#check {← mkConstWithLevelParams declName} /- {warning} -/"
|
||||
|
||||
/-- Formats a map of linter warnings using `printWarning`, sorted by line number. -/
|
||||
def printWarnings (results : Std.HashMap Name MessageData) (filePath : System.FilePath := default)
|
||||
(useErrorFormat : Bool := false) : CoreM MessageData := do
|
||||
(MessageData.joinSep ·.toList Format.line) <$>
|
||||
(← sortResults results).mapM fun (declName, warning) =>
|
||||
printWarning declName warning (useErrorFormat := useErrorFormat) (filePath := filePath)
|
||||
|
||||
/--
|
||||
Formats a map of linter warnings grouped by filename with `-- filename` comments.
|
||||
-/
|
||||
def groupedByFilename (results : Std.HashMap Name MessageData) (useErrorFormat : Bool := false) :
|
||||
CoreM MessageData := do
|
||||
let sp ← if useErrorFormat then getSrcSearchPath else pure {}
|
||||
let grouped : Std.HashMap Name (System.FilePath × Std.HashMap Name MessageData) ←
|
||||
results.foldM (init := {}) fun grouped declName msg => do
|
||||
let mod ← findModuleOf? declName
|
||||
let mod := mod.getD (← getEnv).mainModule
|
||||
grouped.insert mod <$>
|
||||
match grouped[mod]? with
|
||||
| some (fp, msgs) => pure (fp, msgs.insert declName msg)
|
||||
| none => do
|
||||
let fp ← if useErrorFormat then
|
||||
pure <| (← sp.findWithExt "lean" mod).getD (modToFilePath "." mod "lean")
|
||||
else pure default
|
||||
pure (fp, .insert {} declName msg)
|
||||
let grouped' := grouped.toArray.qsort fun (a, _) (b, _) => toString a < toString b
|
||||
(MessageData.joinSep · (Format.line ++ Format.line)) <$>
|
||||
grouped'.toList.mapM fun (mod, fp, msgs) => do
|
||||
pure m!"-- {mod}\n{← printWarnings msgs (filePath := fp) (useErrorFormat := useErrorFormat)}"
|
||||
|
||||
/--
|
||||
Formats the linter results as Lean code with comments and `#check` commands.
|
||||
-/
|
||||
def formatLinterResults
|
||||
(results : Array (NamedEnvLinter × Std.HashMap Name MessageData))
|
||||
(decls : Array Name)
|
||||
(groupByFilename : Bool)
|
||||
(whereDesc : String) (runClippyLinters : Bool)
|
||||
(verbose : LintVerbosity) (numLinters : Nat) (useErrorFormat : Bool := false) :
|
||||
CoreM MessageData := do
|
||||
let formattedResults ← results.filterMapM fun (linter, results) => do
|
||||
if !results.isEmpty then
|
||||
let warnings ←
|
||||
if groupByFilename || useErrorFormat then
|
||||
groupedByFilename results (useErrorFormat := useErrorFormat)
|
||||
else
|
||||
printWarnings results
|
||||
pure $ some m!"/- The `{linter.name}` linter reports:\n{linter.errorsFound} -/\n{warnings}\n"
|
||||
else if verbose = LintVerbosity.high then
|
||||
pure $ some m!"/- OK: {linter.noErrorsFound} -/"
|
||||
else
|
||||
pure none
|
||||
let mut s := MessageData.joinSep formattedResults.toList Format.line
|
||||
let numAutoDecls := (← decls.filterM isAutoDecl).size
|
||||
let failed := results.map (·.2.size) |>.foldl (·+·) 0
|
||||
unless verbose matches LintVerbosity.low do
|
||||
s := m!"-- Found {failed} error{if failed == 1 then "" else "s"
|
||||
} in {decls.size - numAutoDecls} declarations (plus {
|
||||
numAutoDecls} automatically generated ones) {whereDesc
|
||||
} with {numLinters} linters\n\n{s}"
|
||||
unless runClippyLinters do s := m!"{s}-- (non-clippy linters skipped)\n"
|
||||
pure s
|
||||
|
||||
/-- Get the list of declarations in the current module. -/
|
||||
def getDeclsInCurrModule : CoreM (Array Name) := do
|
||||
pure $ (← getEnv).constants.map₂.foldl (init := #[]) fun r k _ => r.push k
|
||||
|
||||
/-- Get the list of all declarations in the environment. -/
|
||||
def getAllDecls : CoreM (Array Name) := do
|
||||
pure $ (← getEnv).constants.map₁.fold (init := ← getDeclsInCurrModule) fun r k _ => r.push k
|
||||
|
||||
/-- Get the list of all declarations in the specified package. -/
|
||||
def getDeclsInPackage (pkg : Name) : CoreM (Array Name) := do
|
||||
let env ← getEnv
|
||||
let mut decls ← getDeclsInCurrModule
|
||||
let modules := env.header.moduleNames.map (pkg.isPrefixOf ·)
|
||||
return env.constants.map₁.fold (init := decls) fun decls declName _ =>
|
||||
if modules[env.const2ModIdx[declName]?.get!]! then
|
||||
decls.push declName
|
||||
else decls
|
||||
|
||||
end Lean.Linter.EnvLinter
|
||||
@@ -30,7 +30,7 @@ of type
|
||||
α → List α → Sort (max 1 u_1) → Sort (max 1 u_1)
|
||||
```
|
||||
-/
|
||||
private def buildBelowMinorPremise (rlvl : Level) (motives : Array Expr) (minorType : Expr) : MetaM Expr :=
|
||||
def buildBelowMinorPremise (rlvl : Level) (motives : Array Expr) (minorType : Expr) : MetaM Expr :=
|
||||
forallTelescope minorType fun minor_args _ => do go #[] minor_args.toList
|
||||
where
|
||||
go (prods : Array Expr) : List Expr → MetaM Expr
|
||||
@@ -56,7 +56,7 @@ For example for the `List` type, it constructs,
|
||||
fun {a} {motive} t => List.rec PUnit (fun a_1 a a_ih => motive a ×' a_ih) t
|
||||
```
|
||||
-/
|
||||
private def mkBelowFromRec (recName : Name) (nParams : Nat)
|
||||
def mkBelowFromRec (recName : Name) (nParams : Nat)
|
||||
(belowName : Name) : MetaM Unit := do
|
||||
-- The construction follows the type of `ind.rec`
|
||||
let .recInfo recVal ← getConstInfo recName
|
||||
@@ -146,7 +146,7 @@ of type
|
||||
PProd (motive (head :: tail)) (PProd (PProd (motive tail) (List.below tail)) PUnit)
|
||||
```
|
||||
-/
|
||||
private def buildBRecOnMinorPremise (rlvl : Level) (motives : Array Expr)
|
||||
def buildBRecOnMinorPremise (rlvl : Level) (motives : Array Expr)
|
||||
(belows : Array Expr) (fs : Array Expr) (minorType : Expr) : MetaM Expr :=
|
||||
forallTelescope minorType fun minor_args minor_type => do
|
||||
let rec go (prods : Array Expr) : List Expr → MetaM Expr
|
||||
@@ -188,7 +188,7 @@ protected theorem List.brecOn.eq.{u} : ∀ {a : Type} {motive : List a → Sort
|
||||
(F_1 : (t : List a) → List.below t → motive t), List.brecOn t F_1 = F_1 t (List.brecOn.go t F_1).2
|
||||
```
|
||||
-/
|
||||
private def mkBRecOnFromRec (recName : Name) (nParams : Nat)
|
||||
def mkBRecOnFromRec (recName : Name) (nParams : Nat)
|
||||
(all : Array Name) (brecOnName : Name) : MetaM Unit := do
|
||||
let brecOnGoName := brecOnName.str "go"
|
||||
let brecOnEqName := brecOnName.str "eq"
|
||||
|
||||
@@ -15,7 +15,7 @@ import Lean.Meta.Transform
|
||||
namespace Lean.Meta
|
||||
|
||||
|
||||
private structure SparseCasesOnKey where
|
||||
structure SparseCasesOnKey where
|
||||
indName : Name
|
||||
ctors : Array Name
|
||||
-- When this is created in a private context and thus may contain private references, we must
|
||||
@@ -23,7 +23,7 @@ private structure SparseCasesOnKey where
|
||||
isPrivate : Bool
|
||||
deriving BEq, Hashable
|
||||
|
||||
private builtin_initialize sparseCasesOnCacheExt : EnvExtension (PHashMap SparseCasesOnKey Name) ←
|
||||
builtin_initialize sparseCasesOnCacheExt : EnvExtension (PHashMap SparseCasesOnKey Name) ←
|
||||
registerEnvExtension (pure {}) (asyncMode := .local) -- mere cache, keep it local
|
||||
|
||||
/-- Information necessary to recognize and split on sparse casesOn (in particular in MatchEqs) -/
|
||||
@@ -34,7 +34,7 @@ public structure SparseCasesOnInfo where
|
||||
insterestingCtors : Array Name
|
||||
deriving Inhabited
|
||||
|
||||
private builtin_initialize sparseCasesOnInfoExt : MapDeclarationExtension SparseCasesOnInfo ←
|
||||
builtin_initialize sparseCasesOnInfoExt : MapDeclarationExtension SparseCasesOnInfo ←
|
||||
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
|
||||
let all := s.toArray
|
||||
-- Do not export for non-exposed defs at exported/server levels
|
||||
|
||||
@@ -83,7 +83,7 @@ where
|
||||
value := val
|
||||
})
|
||||
|
||||
private def isName (env : Environment) (n : Name) : Bool :=
|
||||
def isName (env : Environment) (n : Name) : Bool :=
|
||||
if let .str p "else_eq" := n then
|
||||
(getSparseCasesOnInfoCore env p).isSome
|
||||
else
|
||||
|
||||
@@ -16,7 +16,7 @@ def hinjSuffix := "hinj"
|
||||
public def mkCtorIdxHInjTheoremNameFor (indName : Name) : Name :=
|
||||
(mkCtorIdxName indName).str hinjSuffix
|
||||
|
||||
private partial def mkHInjectiveTheorem? (thmName : Name) (indVal : InductiveVal) : MetaM TheoremVal := do
|
||||
partial def mkHInjectiveTheorem? (thmName : Name) (indVal : InductiveVal) : MetaM TheoremVal := do
|
||||
let us := indVal.levelParams.map mkLevelParam
|
||||
let thmType ←
|
||||
forallBoundedTelescope indVal.type (indVal.numParams + indVal.numIndices) fun xs1 _ => do
|
||||
|
||||
@@ -40,7 +40,7 @@ namespace Lean.Meta.Match
|
||||
|
||||
This can be used to use the alternative of a match expression in its splitter.
|
||||
-/
|
||||
public partial def forallAltVarsTelescope (altType : Expr) (altInfo : AltParamInfo)
|
||||
partial def forallAltVarsTelescope (altType : Expr) (altInfo : AltParamInfo)
|
||||
(k : (patVars : Array Expr) → (args : Array Expr) → (mask : Array Bool) → (type : Expr) → MetaM α) : MetaM α := do
|
||||
assert! altInfo.numOverlaps = 0
|
||||
if altInfo.hasUnitThunk then
|
||||
@@ -104,7 +104,7 @@ where
|
||||
|
||||
This can be used to use the alternative of a match expression in its splitter.
|
||||
-/
|
||||
public partial def forallAltTelescope (altType : Expr) (altInfo : AltParamInfo) (numDiscrEqs : Nat)
|
||||
partial def forallAltTelescope (altType : Expr) (altInfo : AltParamInfo) (numDiscrEqs : Nat)
|
||||
(k : (ys : Array Expr) → (eqs : Array Expr) → (args : Array Expr) → (mask : Array Bool) → (type : Expr) → MetaM α)
|
||||
: MetaM α := do
|
||||
forallAltVarsTelescope altType altInfo fun ys args mask altType => do
|
||||
|
||||
@@ -21,7 +21,7 @@ We will eventually have to write more efficient proof automation for this module
|
||||
The new proof automation should exploit the structure of the generated goals and avoid general purpose tactics
|
||||
such as `contradiction`.
|
||||
-/
|
||||
private def _root_.Lean.MVarId.contradictionQuick (mvarId : MVarId) : MetaM Bool := do
|
||||
def _root_.Lean.MVarId.contradictionQuick (mvarId : MVarId) : MetaM Bool := do
|
||||
mvarId.withContext do
|
||||
let mut posMap : Std.HashMap Expr FVarId := {}
|
||||
let mut negMap : Std.HashMap Expr FVarId := {}
|
||||
|
||||
@@ -16,7 +16,7 @@ import Lean.Meta.Tactic.Replace
|
||||
|
||||
namespace Lean.Meta
|
||||
|
||||
private def rewriteGoalUsingEq (goal : MVarId) (eq : Expr) (symm : Bool := false) : MetaM MVarId := do
|
||||
def rewriteGoalUsingEq (goal : MVarId) (eq : Expr) (symm : Bool := false) : MetaM MVarId := do
|
||||
let rewriteResult ← goal.rewrite (←goal.getType) eq symm
|
||||
goal.replaceTargetEq rewriteResult.eNew rewriteResult.eqProof
|
||||
|
||||
|
||||
@@ -383,7 +383,7 @@ When equal, uses `eq_self` (no kernel evaluation needed). When different, uses
|
||||
`mkStringLitNeProof` which finds the first differing character position and proves
|
||||
inequality via `congrArg (List.get?Internal · i)`.
|
||||
-/
|
||||
private def evalStringEq (a b : Expr) : SimpM Result := do
|
||||
def evalStringEq (a b : Expr) : SimpM Result := do
|
||||
let some va := getStringValue? a | return .rfl
|
||||
let some vb := getStringValue? b | return .rfl
|
||||
if va = vb then
|
||||
|
||||
@@ -172,7 +172,7 @@ inductive Result where
|
||||
Pre-computed `.rfl` results to avoid dynamic memory allocation.
|
||||
Each combination of `done` and `contextDependent` maps to a compile-time constant.
|
||||
-/
|
||||
public def mkRflResult (done : Bool := false) (contextDependent : Bool := false) : Result :=
|
||||
def mkRflResult (done : Bool := false) (contextDependent : Bool := false) : Result :=
|
||||
match done, contextDependent with
|
||||
| false, false => .rfl
|
||||
| false, true => .rfl false true
|
||||
@@ -180,15 +180,15 @@ public def mkRflResult (done : Bool := false) (contextDependent : Bool := false)
|
||||
| true, true => .rfl true true
|
||||
|
||||
/-- Like `mkRflResult` with `done := false`. -/
|
||||
public def mkRflResultCD (contextDependent : Bool) : Result :=
|
||||
def mkRflResultCD (contextDependent : Bool) : Result :=
|
||||
if contextDependent then .rfl false true else .rfl
|
||||
|
||||
/-- Returns `true` if this result depends on the local context (e.g., hypotheses). -/
|
||||
public abbrev Result.isContextDependent : Result → Bool
|
||||
abbrev Result.isContextDependent : Result → Bool
|
||||
| .rfl _ cd | .step _ _ _ cd => cd
|
||||
|
||||
/-- Marks a result as context-dependent. -/
|
||||
public def Result.withContextDependent : Result → Result
|
||||
def Result.withContextDependent : Result → Result
|
||||
| .rfl done _ => .rfl done true
|
||||
| .step e h done _ => .step e h done true
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ This is used during pattern matching and structural definitional equality tests
|
||||
to identify arguments that can be skipped or handled specially
|
||||
(e.g., instance arguments can be synthesized, proof arguments can be inferred).
|
||||
-/
|
||||
public structure ProofInstArgInfo where
|
||||
structure ProofInstArgInfo where
|
||||
/-- `true` if this argument is a proof (its type is a `Prop`). -/
|
||||
isProof : Bool
|
||||
/-- `true` if this argument is a type class instance. -/
|
||||
@@ -78,7 +78,7 @@ Information about a function symbol. It stores which argument positions are proo
|
||||
enabling optimizations during pattern matching and structural definitional equality tests
|
||||
such as skipping proof arguments or deferring instance synthesis.
|
||||
-/
|
||||
public structure ProofInstInfo where
|
||||
structure ProofInstInfo where
|
||||
/-- Information about each argument position. -/
|
||||
argsInfo : Array ProofInstArgInfo
|
||||
deriving Inhabited
|
||||
|
||||
@@ -17,7 +17,7 @@ import Init.GetElem
|
||||
namespace Lean.Meta.Tactic.Cbv
|
||||
|
||||
/-- Extract elements from an array literal (`Array.mk` applied to a list literal). -/
|
||||
private def getArrayLitElems? (e : Expr) : Option <| Array Expr :=
|
||||
def getArrayLitElems? (e : Expr) : Option <| Array Expr :=
|
||||
match_expr e with
|
||||
| Array.mk _ as => getListLitElems as
|
||||
| _ => none
|
||||
|
||||
@@ -12,7 +12,7 @@ import Lean.Meta.Tactic.Util
|
||||
|
||||
namespace Lean.Meta
|
||||
|
||||
private partial def cleanupCore (mvarId : MVarId) (toPreserve : Array FVarId) (indirectProps : Bool) : MetaM MVarId := do
|
||||
partial def cleanupCore (mvarId : MVarId) (toPreserve : Array FVarId) (indirectProps : Bool) : MetaM MVarId := do
|
||||
mvarId.withContext do
|
||||
mvarId.checkNotAssigned `cleanup
|
||||
let used ← collectUsed |>.run' (false, {})
|
||||
|
||||
@@ -50,6 +50,18 @@ register_builtin_option debug.tactic.simp.checkDefEqAttr : Bool := {
|
||||
of the `defeq` attribute, and warn if it was. Note that this is a costly check."
|
||||
}
|
||||
|
||||
register_builtin_option warning.simp.varHead : Bool := {
|
||||
defValue := false
|
||||
descr := "If true, warns when the head symbol of the left-hand side of a `@[simp]` theorem \
|
||||
is a variable. Such lemmas are tried on every simp step, which can be slow."
|
||||
}
|
||||
|
||||
register_builtin_option warning.simp.otherHead : Bool := {
|
||||
defValue := true
|
||||
descr := "If true, warns when the left-hand side of a `@[simp]` theorem is headed by a \
|
||||
`.other` key in the discrimination tree (e.g. a lambda expression). Such lemmas can cause slowdowns."
|
||||
}
|
||||
|
||||
/--
|
||||
An `Origin` is an identifier for simp theorems which indicates roughly
|
||||
what action the user took which lead to this theorem existing in the simp set.
|
||||
@@ -359,12 +371,27 @@ private def checkTypeIsProp (type : Expr) : MetaM Unit :=
|
||||
unless (← isProp type) do
|
||||
throwError "Invalid simp theorem: Expected a proposition, but found{indentExpr type}"
|
||||
|
||||
private def mkSimpTheoremKeys (type : Expr) (noIndexAtArgs : Bool) : MetaM (Array SimpTheoremKey × Bool) := do
|
||||
private def mkSimpTheoremKeys (type : Expr) (noIndexAtArgs : Bool) (checkLhs : Bool := false) : MetaM (Array SimpTheoremKey × Bool) := do
|
||||
withNewMCtxDepth do
|
||||
let (_, _, type) ← forallMetaTelescopeReducing type
|
||||
let type ← whnfR type
|
||||
match type.eq? with
|
||||
| some (_, lhs, rhs) => pure (← DiscrTree.mkPath lhs noIndexAtArgs, ← isPerm lhs rhs)
|
||||
| some (_, lhs, rhs) =>
|
||||
let keys ← DiscrTree.mkPath lhs noIndexAtArgs
|
||||
if checkLhs then
|
||||
if warning.simp.varHead.get (← getOptions) && keys[0]? == some .star then
|
||||
logWarning m!"Left-hand side of simp theorem has a variable as head symbol. \
|
||||
This means the theorem will be tried on every simp step, which can be expensive. \
|
||||
This may be acceptable for `local` or `scoped` simp lemmas.\n\
|
||||
Use `set_option warning.simp.varHead false` to disable this warning."
|
||||
if warning.simp.otherHead.get (← getOptions) && keys[0]? == some .other then
|
||||
logWarning m!"Left-hand side of simp theorem is headed by a `.other` key in the \
|
||||
discrimination tree (e.g. because it is a lambda expression). \
|
||||
This theorem will be tried against all expressions that also have a `.other` key as head, \
|
||||
which can cause slowdowns. \
|
||||
This may be acceptable for `local` or `scoped` simp lemmas.\n\
|
||||
Use `set_option warning.simp.otherHead false` to disable this warning."
|
||||
pure (keys, ← isPerm lhs rhs)
|
||||
| none => throwError "Unexpected kind of simp theorem{indentExpr type}"
|
||||
|
||||
register_builtin_option simp.rfl.checkTransparency: Bool := {
|
||||
@@ -373,10 +400,10 @@ register_builtin_option simp.rfl.checkTransparency: Bool := {
|
||||
}
|
||||
|
||||
private def mkSimpTheoremCore (origin : Origin) (e : Expr) (levelParams : Array Name) (proof : Expr)
|
||||
(post : Bool) (prio : Nat) (noIndexAtArgs : Bool) : MetaM SimpTheorem := do
|
||||
(post : Bool) (prio : Nat) (noIndexAtArgs : Bool) (checkLhs : Bool := false): MetaM SimpTheorem := do
|
||||
assert! origin != .fvar ⟨.anonymous⟩
|
||||
let type ← instantiateMVars (← inferType e)
|
||||
let (keys, perm) ← mkSimpTheoremKeys type noIndexAtArgs
|
||||
let (keys, perm) ← mkSimpTheoremKeys type noIndexAtArgs checkLhs
|
||||
let rfl ← isRflProof proof
|
||||
if rfl && simp.rfl.checkTransparency.get (← getOptions) then
|
||||
forallTelescopeReducing type fun _ type => do
|
||||
@@ -399,7 +426,7 @@ Creates a `SimpTheorem` from a global theorem.
|
||||
Because some theorems lead to multiple `SimpTheorems` (in particular conjunctions), returns an array.
|
||||
-/
|
||||
def mkSimpTheoremFromConst (declName : Name) (post := true) (inv := false)
|
||||
(prio : Nat := eval_prio default) : MetaM (Array SimpTheorem) := do
|
||||
(prio : Nat := eval_prio default) (checkLhs : Bool := false) : MetaM (Array SimpTheorem) := do
|
||||
let cinfo ← getConstVal declName
|
||||
let us := cinfo.levelParams.map mkLevelParam
|
||||
let origin := .decl declName post inv
|
||||
@@ -413,10 +440,10 @@ def mkSimpTheoremFromConst (declName : Name) (post := true) (inv := false)
|
||||
let auxName ← mkAuxLemma (kind? := `_simp) cinfo.levelParams type val (inferRfl := true)
|
||||
(forceExpose := true) -- These kinds of theorems are small and `to_additive` may need to
|
||||
-- unfold them.
|
||||
r := r.push <| (← do mkSimpTheoremCore origin (mkConst auxName us) #[] (mkConst auxName) post prio (noIndexAtArgs := false))
|
||||
r := r.push <| (← do mkSimpTheoremCore origin (mkConst auxName us) #[] (mkConst auxName) post prio (noIndexAtArgs := false) (checkLhs := checkLhs))
|
||||
return r
|
||||
else
|
||||
return #[← withoutExporting do mkSimpTheoremCore origin (mkConst declName us) #[] (mkConst declName) post prio (noIndexAtArgs := false)]
|
||||
return #[← withoutExporting do mkSimpTheoremCore origin (mkConst declName us) #[] (mkConst declName) post prio (noIndexAtArgs := false) (checkLhs := checkLhs)]
|
||||
|
||||
def SimpTheorem.getValue (simpThm : SimpTheorem) : MetaM Expr := do
|
||||
if simpThm.proof.isConst && simpThm.levelParams.isEmpty then
|
||||
@@ -670,7 +697,7 @@ def SimpExtension.getTheorems (ext : SimpExtension) : CoreM SimpTheorems :=
|
||||
Adds a simp theorem to a simp extension
|
||||
-/
|
||||
def addSimpTheorem (ext : SimpExtension) (declName : Name) (post : Bool) (inv : Bool) (attrKind : AttributeKind) (prio : Nat) : MetaM Unit := do
|
||||
let simpThms ← withExporting (isExporting := attrKind != .local && !isPrivateName declName) do mkSimpTheoremFromConst declName post inv prio
|
||||
let simpThms ← withExporting (isExporting := attrKind != .local && !isPrivateName declName) do mkSimpTheoremFromConst declName post inv prio (checkLhs := true)
|
||||
for simpThm in simpThms do
|
||||
ext.add (SimpEntry.thm simpThm) attrKind
|
||||
|
||||
|
||||
@@ -88,21 +88,6 @@ builtin_initialize registerTraceClass `Meta.wrapInstance
|
||||
|
||||
open Meta
|
||||
|
||||
/--
|
||||
Rebuild a type application with fresh synthetic metavariables for instance-implicit arguments.
|
||||
Non-instance-implicit arguments are assigned from the original application's arguments.
|
||||
If the function is over-applied, extra arguments are preserved.
|
||||
-/
|
||||
public def abstractInstImplicitArgs (type : Expr) : MetaM Expr := do
|
||||
let fn := type.getAppFn
|
||||
let args := type.getAppArgs
|
||||
let (mvars, bis, _) ← forallMetaTelescope (← inferType fn)
|
||||
for i in [:mvars.size] do
|
||||
unless bis[i]!.isInstImplicit do
|
||||
mvars[i]!.mvarId!.assign args[i]!
|
||||
let args := mvars ++ args.drop mvars.size
|
||||
instantiateMVars (mkAppN fn args)
|
||||
|
||||
partial def getFieldOrigin (structName field : Name) : MetaM (Name × StructureFieldInfo) := do
|
||||
let env ← getEnv
|
||||
for parent in getStructureParentInfo env structName do
|
||||
|
||||
@@ -342,10 +342,11 @@ namespace InternalSyntax
|
||||
This command is for internal use only. It is intended for macros that implicitly introduce new
|
||||
scopes, such as `expandInCmd` and `expandNamespacedDeclaration`. It allows local attributes to remain
|
||||
accessible beyond those implicit scopes, even though they would normally be hidden from the user.
|
||||
The numeric argument specifies how many scope levels to mark as non-delimiting.
|
||||
-/
|
||||
scoped syntax (name := end_local_scope) "end_local_scope" : command
|
||||
scoped syntax (name := end_local_scope) "end_local_scope" num : command
|
||||
|
||||
def endLocalScopeSyntax : Command := Unhygienic.run `(end_local_scope)
|
||||
def endLocalScopeSyntax (depth : Nat) : Command := Unhygienic.run `(end_local_scope $(Syntax.mkNumLit (toString depth)))
|
||||
end InternalSyntax
|
||||
|
||||
/-- Declares one or more typed variables, or modifies whether already-declared variables are
|
||||
|
||||
@@ -50,17 +50,17 @@ def versoCommentBodyFn : ParserFn := fun c s =>
|
||||
rawFn (Doc.Parser.ignoreFn <| chFn '-' >> chFn '/') (trailingWs := true) c s
|
||||
else s
|
||||
|
||||
public def versoCommentBody : Parser where
|
||||
def versoCommentBody : Parser where
|
||||
fn := fun c s => nodeFn `Lean.Parser.Command.versoCommentBody versoCommentBodyFn c s
|
||||
|
||||
|
||||
@[combinator_parenthesizer versoCommentBody, expose]
|
||||
public def versoCommentBody.parenthesizer := PrettyPrinter.Parenthesizer.visitToken
|
||||
def versoCommentBody.parenthesizer := PrettyPrinter.Parenthesizer.visitToken
|
||||
|
||||
open PrettyPrinter Formatter in
|
||||
open Syntax.MonadTraverser in
|
||||
@[combinator_formatter versoCommentBody, expose]
|
||||
public def versoCommentBody.formatter : PrettyPrinter.Formatter := do
|
||||
def versoCommentBody.formatter : PrettyPrinter.Formatter := do
|
||||
visitArgs $ do
|
||||
visitAtom `«-/»
|
||||
goLeft
|
||||
|
||||
@@ -144,13 +144,18 @@ def ScopedEnvExtension.popScope (ext : ScopedEnvExtension α β σ) (env : Envir
|
||||
| _ :: state₂ :: stack => { s with stateStack := state₂ :: stack }
|
||||
| _ => s
|
||||
|
||||
/-- Modifies `delimitsLocal` flag to `false` to turn off delimiting of local entries.
|
||||
/-- Modifies `delimitsLocal` flag to `false` on the top `depth` entries of the state stack,
|
||||
to turn off delimiting of local entries across multiple implicit scope levels
|
||||
(e.g. those introduced by compound `namespace A.B.C` expansions).
|
||||
-/
|
||||
def ScopedEnvExtension.setDelimitsLocal (ext : ScopedEnvExtension α β σ) (env : Environment) : Environment :=
|
||||
def ScopedEnvExtension.setDelimitsLocal (ext : ScopedEnvExtension α β σ) (env : Environment) (depth : Nat) : Environment :=
|
||||
ext.ext.modifyState (asyncMode := .local) env fun s =>
|
||||
match s.stateStack with
|
||||
| [] => s
|
||||
| state :: stack => {s with stateStack := {state with delimitsLocal := false} :: stack}
|
||||
{s with stateStack := go depth s.stateStack}
|
||||
where
|
||||
go : Nat → List (State σ) → List (State σ)
|
||||
| 0, stack => stack
|
||||
| _, [] => []
|
||||
| n + 1, state :: stack => {state with delimitsLocal := false} :: go n stack
|
||||
|
||||
def ScopedEnvExtension.addEntry (ext : ScopedEnvExtension α β σ) (env : Environment) (b : β) : Environment :=
|
||||
ext.ext.addEntry env (Entry.global b)
|
||||
@@ -226,11 +231,12 @@ def popScope [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] : m Unit :=
|
||||
for ext in (← scopedEnvExtensionsRef.get) do
|
||||
modifyEnv ext.popScope
|
||||
|
||||
/-- Used to implement `end_local_scope` command, that disables delimiting local entries of ScopedEnvExtension in a current scope.
|
||||
/-- Used to implement `end_local_scope` command, that disables delimiting local entries of ScopedEnvExtension
|
||||
across `depth` scope levels.
|
||||
-/
|
||||
def setDelimitsLocal [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] : m Unit := do
|
||||
def setDelimitsLocal [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] (depth : Nat) : m Unit := do
|
||||
for ext in (← scopedEnvExtensionsRef.get) do
|
||||
modifyEnv (ext.setDelimitsLocal ·)
|
||||
modifyEnv (ext.setDelimitsLocal · depth)
|
||||
|
||||
def activateScoped [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] (namespaceName : Name) : m Unit := do
|
||||
for ext in (← scopedEnvExtensionsRef.get) do
|
||||
|
||||
@@ -35,7 +35,7 @@ structure Import where
|
||||
|
||||
-- TODO: move further up into `Init` by using simpler representation for `imports`
|
||||
@[extern "lean_idbg_client_loop"]
|
||||
public opaque Idbg.idbgClientLoop {α : Type} [Nonempty α]
|
||||
opaque Idbg.idbgClientLoop {α : Type} [Nonempty α]
|
||||
(siteId : String) (imports : Array Import) (apply : α → String) : IO Unit
|
||||
|
||||
instance : Coe Name Import := ⟨({module := ·})⟩
|
||||
@@ -130,7 +130,7 @@ The type of module package identifiers.
|
||||
This is a {name}`String` that is used to disambiguate native symbol prefixes between
|
||||
different packages (and different versions of the same package).
|
||||
-/
|
||||
public abbrev PkgId := String
|
||||
abbrev PkgId := String
|
||||
|
||||
/-- A module's setup information as described by a JSON file. -/
|
||||
structure ModuleSetup where
|
||||
|
||||
@@ -22,7 +22,7 @@ open Server Std Lean SubExpr
|
||||
|
||||
NOTE: in the future we may add other tags.
|
||||
-/
|
||||
private inductive ExprDiffTag where
|
||||
inductive ExprDiffTag where
|
||||
| change
|
||||
| delete
|
||||
| insert
|
||||
|
||||
@@ -7,3 +7,4 @@ module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Protocol.H1
|
||||
|
||||
@@ -6,6 +6,7 @@ Authors: Sofia Rodrigues
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data.URI
|
||||
public import Std.Internal.Http.Data.Headers.Name
|
||||
public import Std.Internal.Http.Data.Headers.Value
|
||||
public import Std.Internal.Parsec.Basic
|
||||
@@ -215,4 +216,89 @@ def serialize (connection : Connection) : Header.Name × Header.Value :=
|
||||
|
||||
instance : Header Connection := ⟨parse, serialize⟩
|
||||
|
||||
end Std.Http.Header.Connection
|
||||
end Connection
|
||||
|
||||
/--
|
||||
The `Host` header.
|
||||
|
||||
Represents the authority component of a URI:
|
||||
host [ ":" port ]
|
||||
|
||||
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-host-and-authority
|
||||
-/
|
||||
structure Host where
|
||||
/--
|
||||
Host name (reg-name, IPv4, or IPv6 literal).
|
||||
-/
|
||||
host : URI.Host
|
||||
|
||||
/--
|
||||
Optional port.
|
||||
-/
|
||||
port : URI.Port
|
||||
deriving Repr, BEq
|
||||
|
||||
namespace Host
|
||||
|
||||
/--
|
||||
Parses a `Host` header value.
|
||||
-/
|
||||
def parse (v : Value) : Option Host :=
|
||||
let parsed := (Std.Http.URI.Parser.parseHostHeader <* Std.Internal.Parsec.eof).run v.value.toUTF8
|
||||
match parsed with
|
||||
| .ok ⟨host, port⟩ => some ⟨host, port⟩
|
||||
| .error _ => none
|
||||
|
||||
/--
|
||||
Serializes a `Host` header back to a name and a value.
|
||||
-/
|
||||
def serialize (host : Host) : Header.Name × Header.Value :=
|
||||
let value := match host.port with
|
||||
| .value port => Header.Value.ofString! s!"{host.host}:{port}"
|
||||
| .empty => Header.Value.ofString! s!"{host.host}:"
|
||||
| .omitted => Header.Value.ofString! <| toString host.host
|
||||
|
||||
(.mk "host", value)
|
||||
|
||||
instance : Header Host := ⟨parse, serialize⟩
|
||||
|
||||
end Host
|
||||
|
||||
/--
|
||||
The `Expect` header.
|
||||
|
||||
Represents an expectation token.
|
||||
The only standardized expectation is `100-continue`.
|
||||
|
||||
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-expect
|
||||
-/
|
||||
structure Expect where
|
||||
deriving Repr, BEq
|
||||
|
||||
namespace Expect
|
||||
|
||||
/--
|
||||
Parses an `Expect` header.
|
||||
|
||||
Succeeds only if the value is exactly `100-continue`
|
||||
(case-insensitive, trimmed).
|
||||
-/
|
||||
def parse (v : Value) : Option Expect :=
|
||||
let normalized := v.value.trimAscii.toString.toLower
|
||||
|
||||
if normalized == "100-continue" then
|
||||
some ⟨⟩
|
||||
else
|
||||
none
|
||||
|
||||
/--
|
||||
Serializes an `Expect` header.
|
||||
-/
|
||||
def serialize (_ : Expect) : Header.Name × Header.Value :=
|
||||
(Header.Name.expect, Value.ofString! "100-continue")
|
||||
|
||||
instance : Header Expect := ⟨parse, serialize⟩
|
||||
|
||||
end Expect
|
||||
|
||||
end Std.Http.Header
|
||||
|
||||
@@ -52,13 +52,13 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
|
||||
if config.maxSchemeLength = 0 then
|
||||
fail "scheme length limit is 0 (no scheme allowed)"
|
||||
|
||||
let first ← takeWhileUpTo1 isAlphaByte 1
|
||||
let rest ← takeWhileUpTo
|
||||
let first : UInt8 ← satisfy isAlphaByte
|
||||
let rest ← takeWhileAtMost
|
||||
(fun c =>
|
||||
isAlphaNum c ∨
|
||||
c = '+'.toUInt8 ∨ c = '-'.toUInt8 ∨ c = '.'.toUInt8)
|
||||
(config.maxSchemeLength - 1)
|
||||
let schemeBytes := first.toByteArray ++ rest.toByteArray
|
||||
let schemeBytes := ByteArray.empty.push first ++ rest.toByteArray
|
||||
let str := String.fromUTF8! schemeBytes |>.toLower
|
||||
|
||||
if h : URI.IsValidScheme str then
|
||||
@@ -68,7 +68,7 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
|
||||
|
||||
-- port = 1*DIGIT
|
||||
private def parsePortNumber : Parser UInt16 := do
|
||||
let portBytes ← takeWhileUpTo1 isDigitByte 5
|
||||
let portBytes ← takeWhileAtMost isDigitByte 5
|
||||
|
||||
let portStr := String.fromUTF8! portBytes.toByteArray
|
||||
|
||||
@@ -82,7 +82,7 @@ private def parsePortNumber : Parser UInt16 := do
|
||||
|
||||
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
|
||||
private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
let userBytesName ← takeWhileUpTo
|
||||
let userBytesName ← takeWhileAtMost
|
||||
(fun x =>
|
||||
x ≠ ':'.toUInt8 ∧
|
||||
(isUserInfoChar x ∨ x = '%'.toUInt8))
|
||||
@@ -94,7 +94,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
let userPassEncoded ← if ← peekIs (· == ':'.toUInt8) then
|
||||
skip
|
||||
|
||||
let userBytesPass ← takeWhileUpTo
|
||||
let userBytesPass ← takeWhileAtMost
|
||||
(fun x => isUserInfoChar x ∨ x = '%'.toUInt8)
|
||||
config.maxUserInfoLength
|
||||
|
||||
@@ -113,7 +113,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
private def parseIPv6 : Parser Net.IPv6Addr := do
|
||||
skipByte '['.toUInt8
|
||||
|
||||
let result ← takeWhileUpTo1
|
||||
let result ← takeWhile1AtMost
|
||||
(fun x => x = ':'.toUInt8 ∨ x = '.'.toUInt8 ∨ isHexDigitByte x)
|
||||
256
|
||||
|
||||
@@ -127,7 +127,7 @@ private def parseIPv6 : Parser Net.IPv6Addr := do
|
||||
|
||||
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
|
||||
private def parseIPv4 : Parser Net.IPv4Addr := do
|
||||
let result ← takeWhileUpTo1
|
||||
let result ← takeWhile1AtMost
|
||||
(fun x => x = '.'.toUInt8 ∨ isDigitByte x)
|
||||
256
|
||||
|
||||
@@ -148,8 +148,8 @@ private def parseHost (config : URI.Config) : Parser URI.Host := do
|
||||
if let some ipv4 ← tryOpt parseIPv4 then
|
||||
return .ipv4 ipv4
|
||||
|
||||
-- We intentionally parse DNS names here (not full RFC 3986 reg-name).
|
||||
let some str := String.fromUTF8? (← takeWhileUpTo1
|
||||
-- It needs to be a legal DNS label, so it differs from reg-name.
|
||||
let some str := String.fromUTF8? (← takeWhile1AtMost
|
||||
(fun x => isAlphaNum x ∨ x = '-'.toUInt8 ∨ x = '.'.toUInt8)
|
||||
config.maxHostLength).toByteArray
|
||||
| fail s!"invalid host"
|
||||
@@ -187,7 +187,7 @@ private def parseAuthority (config : URI.Config) : Parser URI.Authority := do
|
||||
|
||||
-- segment = *pchar
|
||||
private def parseSegment (config : URI.Config) : Parser ByteSlice := do
|
||||
takeWhileUpTo (fun c => isPChar c ∨ c = '%'.toUInt8) config.maxSegmentLength
|
||||
takeWhileAtMost (fun c => isPChar c ∨ c = '%'.toUInt8) config.maxSegmentLength
|
||||
|
||||
/-
|
||||
path = path-abempty ; begins with "/" or is empty
|
||||
@@ -272,7 +272,7 @@ def parsePath (config : URI.Config) (forceAbsolute : Bool) (allowEmpty : Bool) :
|
||||
-- query = *( pchar / "/" / "?" )
|
||||
private def parseQuery (config : URI.Config) : Parser URI.Query := do
|
||||
let queryBytes ←
|
||||
takeWhileUpTo (fun c => isQueryChar c ∨ c = '%'.toUInt8) config.maxQueryLength
|
||||
takeWhileAtMost (fun c => isQueryChar c ∨ c = '%'.toUInt8) config.maxQueryLength
|
||||
|
||||
let some queryStr := String.fromUTF8? queryBytes.toByteArray
|
||||
| fail "invalid query string"
|
||||
@@ -304,7 +304,7 @@ private def parseQuery (config : URI.Config) : Parser URI.Query := do
|
||||
-- fragment = *( pchar / "/" / "?" )
|
||||
private def parseFragment (config : URI.Config) : Parser URI.EncodedFragment := do
|
||||
let fragmentBytes ←
|
||||
takeWhileUpTo (fun c => isFragmentChar c ∨ c = '%'.toUInt8) config.maxFragmentLength
|
||||
takeWhileAtMost (fun c => isFragmentChar c ∨ c = '%'.toUInt8) config.maxFragmentLength
|
||||
|
||||
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
|
||||
| fail "invalid percent encoding in fragment"
|
||||
@@ -328,7 +328,7 @@ Parses a URI (Uniform Resource Identifier).
|
||||
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
|
||||
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
|
||||
-/
|
||||
public def parseURI (config : URI.Config := {}) : Parser URI := do
|
||||
def parseURI (config : URI.Config := {}) : Parser URI := do
|
||||
let scheme ← parseScheme config
|
||||
skipByte ':'.toUInt8
|
||||
|
||||
@@ -347,7 +347,7 @@ public def parseURI (config : URI.Config := {}) : Parser URI := do
|
||||
/--
|
||||
Parses a request target with combined parsing and validation.
|
||||
-/
|
||||
public def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
|
||||
def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
|
||||
asterisk <|> origin <|> absoluteHttp <|> authority <|> absolute
|
||||
where
|
||||
-- The asterisk form
|
||||
@@ -406,7 +406,7 @@ where
|
||||
/--
|
||||
Parses an HTTP `Host` header value.
|
||||
-/
|
||||
public def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
|
||||
def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
|
||||
let host ← parseHost config
|
||||
|
||||
let port : URI.Port ←
|
||||
|
||||
1622
src/Std/Internal/Http/Protocol/H1.lean
Normal file
1622
src/Std/Internal/Http/Protocol/H1.lean
Normal file
File diff suppressed because it is too large
Load Diff
134
src/Std/Internal/Http/Protocol/H1/Config.lean
Normal file
134
src/Std/Internal/Http/Protocol/H1/Config.lean
Normal file
@@ -0,0 +1,134 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Configuration
|
||||
|
||||
This module defines the configuration options for HTTP/1.1 protocol processing,
|
||||
including connection limits, header constraints, and various size limits.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
open Std Internal Parsec ByteArray
|
||||
open Internal
|
||||
|
||||
/--
|
||||
Connection limits and parser bounds configuration.
|
||||
-/
|
||||
structure Config where
|
||||
/--
|
||||
Maximum number of requests (server) or responses (client) per connection.
|
||||
-/
|
||||
maxMessages : Nat := 100
|
||||
|
||||
/--
|
||||
Maximum number of headers allowed per message.
|
||||
-/
|
||||
maxHeaders : Nat := 100
|
||||
|
||||
/--
|
||||
Maximum aggregate byte size of all header field lines in a single message
|
||||
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
|
||||
-/
|
||||
maxHeaderBytes : Nat := 65536
|
||||
|
||||
/--
|
||||
Whether to enable keep-alive connections by default.
|
||||
-/
|
||||
enableKeepAlive : Bool := true
|
||||
|
||||
/--
|
||||
The `Server` header value injected into outgoing responses (receiving mode) or the
|
||||
`User-Agent` header value injected into outgoing requests (sending mode).
|
||||
`none` suppresses the header entirely.
|
||||
-/
|
||||
agentName : Option Header.Value := none
|
||||
|
||||
/--
|
||||
Maximum length of request URI (default: 8192 bytes).
|
||||
-/
|
||||
maxUriLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing request/status start-lines (default: 8192 bytes).
|
||||
-/
|
||||
maxStartLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum length of header field name (default: 256 bytes).
|
||||
-/
|
||||
maxHeaderNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of header field value (default: 8192 bytes).
|
||||
-/
|
||||
maxHeaderValueLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of spaces in delimiter sequences (default: 16).
|
||||
-/
|
||||
maxSpaceSequence : Nat := 16
|
||||
|
||||
/--
|
||||
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
|
||||
(RFC 9112 §2.2 robustness). Default: 8.
|
||||
-/
|
||||
maxLeadingEmptyLines : Nat := 8
|
||||
|
||||
/--
|
||||
Maximum number of extensions on a single chunk-size line (default: 16).
|
||||
-/
|
||||
maxChunkExtensions : Nat := 16
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension name (default: 256 bytes).
|
||||
-/
|
||||
maxChunkExtNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension value (default: 256 bytes).
|
||||
-/
|
||||
maxChunkExtValueLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
|
||||
-/
|
||||
maxChunkLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum allowed chunk payload size in bytes (default: 8 MiB).
|
||||
-/
|
||||
maxChunkSize : Nat := 8 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum allowed total body size per message in bytes (default: 64 MiB).
|
||||
This limit applies across all body framing modes. For chunked transfer encoding,
|
||||
chunk-size lines (including extensions) and the trailer section also count toward
|
||||
this limit, so the total wire bytes consumed by the body cannot exceed this value.
|
||||
-/
|
||||
maxBodySize : Nat := 64 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum length of reason phrase (default: 512 bytes).
|
||||
-/
|
||||
maxReasonPhraseLength : Nat := 512
|
||||
|
||||
/--
|
||||
Maximum number of trailer headers (default: 20).
|
||||
-/
|
||||
maxTrailerHeaders : Nat := 20
|
||||
|
||||
end Std.Http.Protocol.H1
|
||||
110
src/Std/Internal/Http/Protocol/H1/Error.lean
Normal file
110
src/Std/Internal/Http/Protocol/H1/Error.lean
Normal file
@@ -0,0 +1,110 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Errors
|
||||
|
||||
This module defines the error types for HTTP/1.1 protocol processing,
|
||||
including parsing errors, timeout errors, and connection errors.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Specific HTTP processing errors with detailed information.
|
||||
-/
|
||||
inductive Error
|
||||
/--
|
||||
Malformed start line (request-line or status-line).
|
||||
-/
|
||||
| invalidStatusLine
|
||||
|
||||
/--
|
||||
Invalid or malformed header.
|
||||
-/
|
||||
| invalidHeader
|
||||
|
||||
/--
|
||||
Request timeout occurred.
|
||||
-/
|
||||
| timeout
|
||||
|
||||
/--
|
||||
Request entity too large.
|
||||
-/
|
||||
| entityTooLarge
|
||||
|
||||
/--
|
||||
Request URI is too long.
|
||||
-/
|
||||
| uriTooLong
|
||||
|
||||
/--
|
||||
Unsupported HTTP version.
|
||||
-/
|
||||
| unsupportedVersion
|
||||
|
||||
/--
|
||||
Invalid chunk encoding.
|
||||
-/
|
||||
| invalidChunk
|
||||
|
||||
/--
|
||||
Connection closed.
|
||||
-/
|
||||
| connectionClosed
|
||||
|
||||
/--
|
||||
Bad request or response message.
|
||||
-/
|
||||
| badMessage
|
||||
|
||||
/--
|
||||
The number of header fields in the message exceeds the configured limit.
|
||||
Maps to HTTP 431 Request Header Fields Too Large.
|
||||
-/
|
||||
| tooManyHeaders
|
||||
|
||||
/--
|
||||
The aggregate byte size of all header fields exceeds the configured limit.
|
||||
Maps to HTTP 431 Request Header Fields Too Large.
|
||||
-/
|
||||
| headersTooLarge
|
||||
|
||||
/--
|
||||
Generic error with message.
|
||||
-/
|
||||
| other (message : String)
|
||||
deriving Repr, BEq
|
||||
|
||||
instance : ToString Error where
|
||||
toString
|
||||
| .invalidStatusLine => "Invalid status line"
|
||||
| .invalidHeader => "Invalid header"
|
||||
| .timeout => "Timeout"
|
||||
| .entityTooLarge => "Entity too large"
|
||||
| .uriTooLong => "URI too long"
|
||||
| .unsupportedVersion => "Unsupported version"
|
||||
| .invalidChunk => "Invalid chunk"
|
||||
| .connectionClosed => "Connection closed"
|
||||
| .badMessage => "Bad message"
|
||||
| .tooManyHeaders => "Too many headers"
|
||||
| .headersTooLarge => "Headers too large"
|
||||
| .other msg => s!"Other error: {msg}"
|
||||
|
||||
73
src/Std/Internal/Http/Protocol/H1/Event.lean
Normal file
73
src/Std/Internal/Http/Protocol/H1/Event.lean
Normal file
@@ -0,0 +1,73 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Events
|
||||
|
||||
This module defines the events that can occur during HTTP/1.1 message processing,
|
||||
including header completion and control/error signals.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Events emitted during HTTP message processing.
|
||||
-/
|
||||
inductive Event (dir : Direction)
|
||||
/--
|
||||
Indicates that all headers have been successfully parsed.
|
||||
-/
|
||||
| endHeaders (head : Message.Head dir)
|
||||
|
||||
/--
|
||||
Signals that additional input data is required to continue processing.
|
||||
-/
|
||||
| needMoreData (size : Option Nat)
|
||||
|
||||
/--
|
||||
Indicates a failure during parsing or processing.
|
||||
-/
|
||||
| failed (err : Error)
|
||||
|
||||
/--
|
||||
Requests that the connection be closed.
|
||||
-/
|
||||
| close
|
||||
|
||||
/--
|
||||
The body should be closed.
|
||||
-/
|
||||
| closeBody
|
||||
|
||||
/--
|
||||
Indicates that a response is required.
|
||||
-/
|
||||
| needAnswer
|
||||
|
||||
/--
|
||||
Indicates readiness to process the next message.
|
||||
-/
|
||||
| next
|
||||
|
||||
/--
|
||||
Signals that an `Expect: 100-continue` decision is pending.
|
||||
-/
|
||||
| «continue»
|
||||
deriving Inhabited, Repr
|
||||
139
src/Std/Internal/Http/Protocol/H1/Message.lean
Normal file
139
src/Std/Internal/Http/Protocol/H1/Message.lean
Normal file
@@ -0,0 +1,139 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
import Init.Data.Array
|
||||
public import Std.Internal.Http.Data
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Message
|
||||
|
||||
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
|
||||
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
|
||||
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
|
||||
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
|
||||
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
|
||||
while exposing common operations such as headers, version, and `shouldKeepAlive`
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Direction of message flow from the server's perspective.
|
||||
-/
|
||||
inductive Direction
|
||||
/--
|
||||
Receiving and parsing incoming requests from clients.
|
||||
-/
|
||||
| receiving
|
||||
|
||||
/--
|
||||
Client perspective: writing outgoing requests and reading incoming responses.
|
||||
-/
|
||||
| sending
|
||||
deriving BEq
|
||||
|
||||
/--
|
||||
Inverts the message direction.
|
||||
-/
|
||||
@[expose]
|
||||
abbrev Direction.swap : Direction → Direction
|
||||
| .receiving => .sending
|
||||
| .sending => .receiving
|
||||
|
||||
/--
|
||||
Gets the message head type based on direction.
|
||||
-/
|
||||
@[expose]
|
||||
def Message.Head : Direction → Type
|
||||
| .receiving => Request.Head
|
||||
| .sending => Response.Head
|
||||
|
||||
/--
|
||||
Gets the headers of a `Message`.
|
||||
-/
|
||||
def Message.Head.headers (m : Message.Head dir) : Headers :=
|
||||
match dir with
|
||||
| .receiving => Request.Head.headers m
|
||||
| .sending => Response.Head.headers m
|
||||
|
||||
/--
|
||||
Gets the version of a `Message`.
|
||||
-/
|
||||
def Message.Head.version (m : Message.Head dir) : Version :=
|
||||
match dir with
|
||||
| .receiving => Request.Head.version m
|
||||
| .sending => Response.Head.version m
|
||||
|
||||
/--
|
||||
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
|
||||
-/
|
||||
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
|
||||
let contentLength := message.headers.getAll? .contentLength
|
||||
|
||||
match message.headers.getAll? .transferEncoding with
|
||||
| none =>
|
||||
match contentLength with
|
||||
| some #[cl] => .fixed <$> cl.value.toNat?
|
||||
| some _ => none -- To avoid request smuggling with malformed/multiple content-length headers.
|
||||
| none => if allowEOFBody then some (.fixed 0) else none
|
||||
|
||||
-- Single transfer-encoding header.
|
||||
| some #[header] =>
|
||||
let te := Header.TransferEncoding.parse header
|
||||
|
||||
match Header.TransferEncoding.isChunked <$> te, contentLength with
|
||||
| some true, none =>
|
||||
-- HTTP/1.0 does not define chunked transfer encoding (RFC 2068 §19.4.6).
|
||||
-- A server MUST NOT use chunked with an HTTP/1.0 peer; likewise, an
|
||||
-- HTTP/1.0 request carrying Transfer-Encoding: chunked is malformed.
|
||||
if message.version == .v10 then none else some .chunked
|
||||
| _, _ => none -- To avoid request smuggling when TE and CL are mixed.
|
||||
|
||||
-- We disallow multiple transfer-encoding headers.
|
||||
| some _ => none
|
||||
/--
|
||||
Checks whether the message indicates that the connection should be kept alive.
|
||||
-/
|
||||
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
|
||||
let tokens? : Option (Array String) :=
|
||||
match message.headers.getAll? .connection with
|
||||
| none => some #[]
|
||||
| some values =>
|
||||
values.foldl (fun acc raw => do
|
||||
let acc ← acc
|
||||
let parsed ← Header.Connection.parse raw
|
||||
pure (acc ++ parsed.tokens)
|
||||
) (some #[])
|
||||
|
||||
match tokens? with
|
||||
| none => false
|
||||
| some tokens =>
|
||||
if message.version == .v11 then
|
||||
!tokens.any (· == "close")
|
||||
else
|
||||
tokens.any (· == "keep-alive")
|
||||
|
||||
instance : Repr (Message.Head dir) :=
|
||||
match dir with
|
||||
| .receiving => inferInstanceAs (Repr Request.Head)
|
||||
| .sending => inferInstanceAs (Repr Response.Head)
|
||||
|
||||
instance : Internal.Encode .v11 (Message.Head dir) :=
|
||||
match dir with
|
||||
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
|
||||
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
|
||||
|
||||
instance : EmptyCollection (Message.Head dir) where
|
||||
emptyCollection :=
|
||||
match dir with
|
||||
| .receiving => { method := .get, version := .v11 }
|
||||
| .sending => {}
|
||||
548
src/Std/Internal/Http/Protocol/H1/Parser.lean
Normal file
548
src/Std/Internal/Http/Protocol/H1/Parser.lean
Normal file
@@ -0,0 +1,548 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Parsec
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Parsec.ByteArray
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
|
||||
/-!
|
||||
This module defines parsers for HTTP/1.1 request and response lines, headers, and body framing. The
|
||||
reference used is https://httpwg.org/specs/rfc9112.html.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
open Std Internal Parsec ByteArray Internal Internal.Char
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Checks if a byte may appear inside a field value.
|
||||
|
||||
This parser enforces strict ASCII-only field values and allows only `field-content`
|
||||
(`HTAB / SP / VCHAR`).
|
||||
-/
|
||||
@[inline]
|
||||
def isFieldVChar (c : UInt8) : Bool :=
|
||||
fieldContent (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Checks if a byte may appear unescaped inside a quoted-string value.
|
||||
|
||||
Allows `HTAB / SP / %x21 / %x23-5B / %x5D-7E` (strict ASCII-only; no obs-text).
|
||||
-/
|
||||
@[inline]
|
||||
def isQdText (c : UInt8) : Bool :=
|
||||
qdtext (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Checks if a byte is optional whitespace (`OWS = SP / HTAB`, RFC 9110 §5.6.3).
|
||||
-/
|
||||
@[inline]
|
||||
def isOwsByte (c : UInt8) : Bool :=
|
||||
ows (Char.ofUInt8 c)
|
||||
|
||||
-- Parser blocks
|
||||
|
||||
/--
|
||||
Repeatedly applies `parser` until it returns `none` or the `maxCount` limit is
|
||||
exceeded. Returns the collected results as an array.
|
||||
-/
|
||||
partial def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
|
||||
let rec go (acc : Array α) : Parser (Array α) := do
|
||||
let step ← optional <| attempt do
|
||||
match ← parser with
|
||||
| none => fail "end of items"
|
||||
| some x => return x
|
||||
|
||||
match step with
|
||||
| none =>
|
||||
return acc
|
||||
| some x =>
|
||||
let acc := acc.push x
|
||||
|
||||
if acc.size > maxCount then
|
||||
fail s!"too many items: {acc.size} > {maxCount}"
|
||||
|
||||
go acc
|
||||
go #[]
|
||||
|
||||
|
||||
/--
|
||||
Lifts an `Option` into the parser monad, failing with a generic message if the value is `none`.
|
||||
-/
|
||||
def liftOption (x : Option α) : Parser α :=
|
||||
if let some res := x then
|
||||
return res
|
||||
else
|
||||
fail "expected value but got none"
|
||||
|
||||
/--
|
||||
Parses an HTTP token (RFC 9110 §5.6.2): one or more token characters, up to `limit` bytes.
|
||||
Fails if the input starts with a non-token character or is empty.
|
||||
-/
|
||||
@[inline]
|
||||
def parseToken (limit : Nat) : Parser ByteSlice :=
|
||||
takeWhileUpTo1 (fun c => tchar (Char.ofUInt8 c)) limit
|
||||
|
||||
/--
|
||||
Parses a line terminator.
|
||||
-/
|
||||
@[inline]
|
||||
def crlf : Parser Unit := do
|
||||
skipBytes "\r\n".toUTF8
|
||||
|
||||
/--
|
||||
Consumes and ignores empty lines (`CRLF`) that appear before a request-line.
|
||||
|
||||
https://httpwg.org/specs/rfc9112.html#rfc.section.2.2:
|
||||
|
||||
"In the interest of robustness, a server that is expecting to receive and parse a request-line SHOULD
|
||||
ignore at least one empty line (CRLF) received prior to the request-line."
|
||||
-/
|
||||
def skipLeadingRequestEmptyLines (limits : H1.Config) : Parser Unit := do
|
||||
let mut count := 0
|
||||
while (← peekWhen? (· == '\r'.toUInt8)).isSome do
|
||||
if count >= limits.maxLeadingEmptyLines then
|
||||
fail "too many leading empty lines"
|
||||
crlf
|
||||
count := count + 1
|
||||
|
||||
/--
|
||||
Parses a single space (SP, 0x20).
|
||||
-/
|
||||
@[inline]
|
||||
def sp : Parser Unit :=
|
||||
skipByte ' '.toUInt8
|
||||
|
||||
/--
|
||||
Parses optional whitespace (OWS = *(SP / HTAB), RFC 9110 §5.6.3), bounded by
|
||||
`limits.maxSpaceSequence`. Fails if more whitespace follows the limit, so oversized
|
||||
padding is rejected rather than silently truncated.
|
||||
-/
|
||||
@[inline]
|
||||
def ows (limits : H1.Config) : Parser Unit := do
|
||||
discard <| takeWhileUpTo isOwsByte limits.maxSpaceSequence
|
||||
|
||||
if (← peekWhen? isOwsByte) |>.isSome then
|
||||
fail "invalid space sequence"
|
||||
else
|
||||
pure ()
|
||||
|
||||
/--
|
||||
Parses a single ASCII hex digit and returns its numeric value (`0`–`15`).
|
||||
-/
|
||||
def hexDigit : Parser UInt8 := do
|
||||
let b ← any
|
||||
if isHexDigitByte b then
|
||||
if b ≥ '0'.toUInt8 && b ≤ '9'.toUInt8 then return b - '0'.toUInt8
|
||||
else if b ≥ 'A'.toUInt8 && b ≤ 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
|
||||
else return b - 'a'.toUInt8 + 10
|
||||
else fail s!"invalid hex digit {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
/--
|
||||
Parses a hexadecimal integer (one or more hex digits, up to 16 digits).
|
||||
Used for chunk-size lines in chunked transfer encoding.
|
||||
-/
|
||||
partial def hex : Parser Nat := do
|
||||
let rec go (acc : Nat) (count : Nat) : Parser Nat := do
|
||||
match ← optional (attempt hexDigit) with
|
||||
| some d =>
|
||||
if count + 1 > 16 then
|
||||
fail "chunk size too large"
|
||||
else
|
||||
go (acc * 16 + d.toNat) (count + 1)
|
||||
| none =>
|
||||
if count = 0 then
|
||||
-- Preserve EOF as incremental chunk-size parsing can request more data.
|
||||
-- For non-EOF invalid bytes, keep the specific parse failure.
|
||||
let _ ← peek!
|
||||
fail "expected hex digit"
|
||||
else
|
||||
return acc
|
||||
go 0 0
|
||||
|
||||
-- Actual parsers
|
||||
|
||||
/--
|
||||
Parses `HTTP-version = HTTP-name "/" DIGIT "." DIGIT` and returns the major and
|
||||
minor version numbers as a pair.
|
||||
-/
|
||||
def parseHttpVersionNumber : Parser (Nat × Nat) := do
|
||||
skipBytes "HTTP/".toUTF8
|
||||
let major ← digit
|
||||
skipByte '.'.toUInt8
|
||||
let minor ← digit
|
||||
pure ((major.toNat - 48), (minor.toNat - 48))
|
||||
|
||||
/--
|
||||
Parses an HTTP version string and returns the corresponding `Version` value.
|
||||
Fails if the version is not recognized by `Version.ofNumber?`.
|
||||
-/
|
||||
def parseHttpVersion : Parser Version := do
|
||||
let (major, minor) ← parseHttpVersionNumber
|
||||
liftOption <| Version.ofNumber? major minor
|
||||
|
||||
/-
|
||||
method = token
|
||||
|
||||
Every branch is wrapped in `attempt` so that `<|>` always backtracks on
|
||||
failure, even after consuming bytes. This is strictly necessary only for the
|
||||
P-group (POST / PUT / PATCH) which share a common first byte, but wrapping
|
||||
all alternatives keeps the parser defensively correct if new methods are
|
||||
added in the future.
|
||||
-/
|
||||
def parseMethod : Parser Method :=
|
||||
(attempt <| skipBytes "GET".toUTF8 <&> fun _ => Method.get)
|
||||
<|> (attempt <| skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
|
||||
<|> (attempt <| skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
|
||||
<|> (attempt <| skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
|
||||
<|> (attempt <| skipBytes "ACL".toUTF8 <&> fun _ => Method.acl)
|
||||
<|> (attempt <| skipBytes "QUERY".toUTF8 <&> fun _ => Method.query)
|
||||
<|> (attempt <| skipBytes "SEARCH".toUTF8 <&> fun _ => Method.search)
|
||||
<|> (attempt <| skipBytes "BASELINE-CONTROL".toUTF8 <&> fun _ => Method.baselineControl)
|
||||
<|> (attempt <| skipBytes "BIND".toUTF8 <&> fun _ => Method.bind)
|
||||
<|> (attempt <| skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
|
||||
<|> (attempt <| skipBytes "CHECKIN".toUTF8 <&> fun _ => Method.checkin)
|
||||
<|> (attempt <| skipBytes "CHECKOUT".toUTF8 <&> fun _ => Method.checkout)
|
||||
<|> (attempt <| skipBytes "COPY".toUTF8 <&> fun _ => Method.copy)
|
||||
<|> (attempt <| skipBytes "LABEL".toUTF8 <&> fun _ => Method.label)
|
||||
<|> (attempt <| skipBytes "LINK".toUTF8 <&> fun _ => Method.link)
|
||||
<|> (attempt <| skipBytes "LOCK".toUTF8 <&> fun _ => Method.lock)
|
||||
<|> (attempt <| skipBytes "MERGE".toUTF8 <&> fun _ => Method.merge)
|
||||
<|> (attempt <| skipBytes "MKACTIVITY".toUTF8 <&> fun _ => Method.mkactivity)
|
||||
<|> (attempt <| skipBytes "MKCALENDAR".toUTF8 <&> fun _ => Method.mkcalendar)
|
||||
<|> (attempt <| skipBytes "MKCOL".toUTF8 <&> fun _ => Method.mkcol)
|
||||
<|> (attempt <| skipBytes "MKREDIRECTREF".toUTF8 <&> fun _ => Method.mkredirectref)
|
||||
<|> (attempt <| skipBytes "MKWORKSPACE".toUTF8 <&> fun _ => Method.mkworkspace)
|
||||
<|> (attempt <| skipBytes "MOVE".toUTF8 <&> fun _ => Method.move)
|
||||
<|> (attempt <| skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
|
||||
<|> (attempt <| skipBytes "ORDERPATCH".toUTF8 <&> fun _ => Method.orderpatch)
|
||||
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
|
||||
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
|
||||
<|> (attempt <| skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
|
||||
<|> (attempt <| skipBytes "PRI".toUTF8 <&> fun _ => Method.pri)
|
||||
<|> (attempt <| skipBytes "PROPFIND".toUTF8 <&> fun _ => Method.propfind)
|
||||
<|> (attempt <| skipBytes "PROPPATCH".toUTF8 <&> fun _ => Method.proppatch)
|
||||
<|> (attempt <| skipBytes "REBIND".toUTF8 <&> fun _ => Method.rebind)
|
||||
<|> (attempt <| skipBytes "REPORT".toUTF8 <&> fun _ => Method.report)
|
||||
<|> (attempt <| skipBytes "UNBIND".toUTF8 <&> fun _ => Method.unbind)
|
||||
<|> (attempt <| skipBytes "UNCHECKOUT".toUTF8 <&> fun _ => Method.uncheckout)
|
||||
<|> (attempt <| skipBytes "UNLINK".toUTF8 <&> fun _ => Method.unlink)
|
||||
<|> (attempt <| skipBytes "UNLOCK".toUTF8 <&> fun _ => Method.unlock)
|
||||
<|> (attempt <| skipBytes "UPDATEREDIRECTREF".toUTF8 <&> fun _ => Method.updateredirectref)
|
||||
<|> (attempt <| skipBytes "UPDATE".toUTF8 <&> fun _ => Method.update)
|
||||
<|> (attempt <| skipBytes "VERSION-CONTROL".toUTF8 <&> fun _ => Method.versionControl)
|
||||
<|> (parseToken 64 *> fail "unrecognized method")
|
||||
|
||||
/--
|
||||
Parses a request-target URI, up to `limits.maxUriLength` bytes.
|
||||
Fails with `"uri too long"` if the target exceeds the configured limit.
|
||||
-/
|
||||
def parseURI (limits : H1.Config) : Parser ByteArray := do
|
||||
let uri ← takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
|
||||
if uri.size == limits.maxUriLength then
|
||||
if (← peekWhen? (· != ' '.toUInt8)) |>.isSome then
|
||||
fail "uri too long"
|
||||
|
||||
return uri.toByteArray
|
||||
|
||||
/--
|
||||
Shared core for request-line parsing: parses `request-target SP HTTP-version CRLF`
|
||||
and returns the `RequestTarget` together with the raw major/minor version numbers.
|
||||
|
||||
Both `parseRequestLine` and `parseRequestLineRawVersion` call this after consuming
|
||||
the method token, keeping URI validation and version parsing in one place.
|
||||
-/
|
||||
def parseRequestLineBody (limits : H1.Config) : Parser (RequestTarget × Nat × Nat) := do
|
||||
let rawUri ← parseURI limits <* sp
|
||||
let uri ← match (Std.Http.URI.Parser.parseRequestTarget <* eof).run rawUri with
|
||||
| .ok res => pure res
|
||||
| .error res => fail res
|
||||
let versionPair ← parseHttpVersionNumber <* crlf
|
||||
return (uri, versionPair)
|
||||
|
||||
/--
|
||||
Parses a request line and returns a fully-typed `Request.Head`.
|
||||
`request-line = method SP request-target SP HTTP-version`
|
||||
-/
|
||||
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
|
||||
skipLeadingRequestEmptyLines limits
|
||||
let method ← parseMethod <* sp
|
||||
let (uri, (major, minor)) ← parseRequestLineBody limits
|
||||
if major == 1 ∧ minor == 1 then
|
||||
return ⟨method, .v11, uri, .empty⟩
|
||||
else if major == 1 ∧ minor == 0 then
|
||||
return ⟨method, .v10, uri, .empty⟩
|
||||
else
|
||||
fail "unsupported HTTP version"
|
||||
|
||||
/--
|
||||
Parses a request line and returns the recognized HTTP method and version when available.
|
||||
|
||||
request-line = method SP request-target SP HTTP-version
|
||||
-/
|
||||
public def parseRequestLineRawVersion (limits : H1.Config) : Parser (Method × RequestTarget × Option Version) := do
|
||||
skipLeadingRequestEmptyLines limits
|
||||
let method ← parseMethod <* sp
|
||||
let (uri, (major, minor)) ← parseRequestLineBody limits
|
||||
return (method, uri, Version.ofNumber? major minor)
|
||||
|
||||
/--
|
||||
Parses a single header field line.
|
||||
|
||||
`field-line = field-name ":" OWS field-value OWS`
|
||||
-/
|
||||
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
|
||||
let name ← parseToken limits.maxHeaderNameLength
|
||||
let value ← skipByte ':'.toUInt8 *> ows limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* ows limits
|
||||
|
||||
let name ← liftOption <| String.fromUTF8? name.toByteArray
|
||||
let value ← liftOption <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
|
||||
let value := value.trimAsciiEnd.toString
|
||||
|
||||
return (name, value)
|
||||
|
||||
/--
|
||||
Parses a single header field line, or returns `none` when it sees the blank line that
|
||||
terminates the header section.
|
||||
|
||||
```
|
||||
field-line = field-name ":" OWS field-value OWS CRLF
|
||||
```
|
||||
-/
|
||||
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
|
||||
let next ← peek?
|
||||
if next == some '\r'.toUInt8 ∨ next == some '\n'.toUInt8 then
|
||||
crlf
|
||||
pure none
|
||||
else
|
||||
some <$> (parseFieldLine limits <* crlf)
|
||||
|
||||
/--
|
||||
Parses a backslash-escaped character inside a quoted-string.
|
||||
|
||||
`quoted-pair = "\" ( HTAB / SP / VCHAR )` — strict ASCII-only (no obs-text).
|
||||
-/
|
||||
def parseQuotedPair : Parser UInt8 := do
|
||||
skipByte '\\'.toUInt8
|
||||
let b ← any
|
||||
|
||||
if quotedPairChar (Char.ofUInt8 b) then
|
||||
return b
|
||||
else
|
||||
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
/--
|
||||
Parses a quoted-string value, unescaping quoted-pairs.
|
||||
|
||||
`quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE`
|
||||
-/
|
||||
partial def parseQuotedString (maxLength : Nat) : Parser String := do
|
||||
skipByte '"'.toUInt8
|
||||
|
||||
let rec loop (buf : ByteArray) (length : Nat) : Parser ByteArray := do
|
||||
let b ← any
|
||||
|
||||
if b == '"'.toUInt8 then
|
||||
return buf
|
||||
else if b == '\\'.toUInt8 then
|
||||
let next ← any
|
||||
if quotedPairChar (Char.ofUInt8 next)
|
||||
then
|
||||
let length := length + 1
|
||||
if length > maxLength then
|
||||
fail "quoted-string too long"
|
||||
else
|
||||
loop (buf.push next) length
|
||||
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
|
||||
else if isQdText b then
|
||||
let length := length + 1
|
||||
if length > maxLength then
|
||||
fail "quoted-string too long"
|
||||
else
|
||||
loop (buf.push b) length
|
||||
else
|
||||
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
liftOption <| String.fromUTF8? (← loop .empty 0)
|
||||
|
||||
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
|
||||
def parseChunkExt (limits : H1.Config) : Parser (Chunk.ExtensionName × Option Chunk.ExtensionValue) := do
|
||||
ows limits *> skipByte ';'.toUInt8 *> ows limits
|
||||
let name ← (liftOption =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtNameLength) <* ows limits
|
||||
|
||||
let some name := Chunk.ExtensionName.ofString? name
|
||||
| fail "invalid extension name"
|
||||
|
||||
if (← peekWhen? (· == '='.toUInt8)) |>.isSome then
|
||||
-- RFC 9112 §7.1.1: BWS is allowed around "=".
|
||||
-- The `<* ows limits` after the name already consumed any trailing whitespace,
|
||||
-- so these ows calls are no-ops in practice, but kept for explicit grammar correspondence.
|
||||
ows limits *> skipByte '='.toUInt8 *> ows limits
|
||||
let value ← ows limits *> (parseQuotedString limits.maxChunkExtValueLength <|> liftOption =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtValueLength))
|
||||
|
||||
let some value := Chunk.ExtensionValue.ofString? value
|
||||
| fail "invalid extension value"
|
||||
|
||||
return (name, some value)
|
||||
|
||||
return (name, none)
|
||||
|
||||
/--
|
||||
Parses the size and extensions of a chunk.
|
||||
-/
|
||||
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) := do
|
||||
let size ← hex
|
||||
let ext ← manyItems (optional (attempt (parseChunkExt limits))) limits.maxChunkExtensions
|
||||
crlf
|
||||
return (size, ext)
|
||||
|
||||
/--
|
||||
Result of parsing partial or complete information.
|
||||
-/
|
||||
public inductive TakeResult
|
||||
| complete (data : ByteSlice)
|
||||
| incomplete (data : ByteSlice) (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parses a single chunk in chunked transfer encoding.
|
||||
-/
|
||||
public def parseChunkPartial (limits : H1.Config) : Parser (Option (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue) × ByteSlice)) := do
|
||||
let (size, ext) ← parseChunkSize limits
|
||||
if size == 0 then
|
||||
return none
|
||||
else
|
||||
let data ← take size
|
||||
return some ⟨size, ext, data⟩
|
||||
|
||||
/--
|
||||
Parses fixed-size data that can be incomplete.
|
||||
-/
|
||||
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
|
||||
if it.remainingBytes = 0 then
|
||||
.error it .eof
|
||||
else if it.remainingBytes < size then
|
||||
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
|
||||
else
|
||||
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
|
||||
|
||||
/--
|
||||
Parses fixed-size chunk data that can be incomplete.
|
||||
-/
|
||||
public def parseChunkSizedData (size : Nat) : Parser TakeResult := do
|
||||
match ← parseFixedSizeData size with
|
||||
| .complete data => crlf *> return .complete data
|
||||
| .incomplete data res => return .incomplete data res
|
||||
|
||||
/--
|
||||
Returns `true` if `name` (compared case-insensitively) is a field that MUST NOT appear in HTTP/1.1
|
||||
trailer sections per RFC 9112 §6.5. Forbidden fields are those required for message framing
|
||||
(`content-length`, `transfer-encoding`), routing (`host`), or connection management (`connection`).
|
||||
-/
|
||||
def isForbiddenTrailerField (name : String) : Bool :=
|
||||
let n := name.toLower
|
||||
n == "content-length" || n == "transfer-encoding" || n == "host" ||
|
||||
n == "connection" || n == "expect" || n == "te" ||
|
||||
n == "authorization" || n == "max-forwards" || n == "cache-control" ||
|
||||
n == "content-encoding" || n == "upgrade" || n == "trailer"
|
||||
|
||||
/--
|
||||
Parses a trailer header (used after a chunked body), rejecting forbidden field names per RFC 9112
|
||||
§6.5. Fields used for message framing (`content-length`, `transfer-encoding`), routing (`host`),
|
||||
or connection management (`connection`, `te`, `upgrade`) are rejected to prevent trailer injection
|
||||
attacks where a downstream proxy might re-interpret them.
|
||||
-/
|
||||
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := do
|
||||
let result ← parseSingleHeader limits
|
||||
if let some (name, _) := result then
|
||||
if isForbiddenTrailerField name then
|
||||
fail s!"forbidden trailer field: {name}"
|
||||
return result
|
||||
|
||||
/--
|
||||
Parses trailer headers after a chunked body and returns them as an array of name-value pairs.
|
||||
|
||||
This is exposed for callers that need the trailer values directly (e.g. clients). The
|
||||
internal protocol machine uses `parseLastChunkBody` instead, which discards trailer values.
|
||||
-/
|
||||
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
|
||||
let trailers ← manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
|
||||
crlf
|
||||
return trailers
|
||||
|
||||
/--
|
||||
Returns `true` if `c` is a valid reason-phrase byte (`HTAB / SP / VCHAR`, strict ASCII-only).
|
||||
-/
|
||||
@[inline]
|
||||
def isReasonPhraseByte (c : UInt8) : Bool :=
|
||||
fieldContent (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Parses a reason phrase (text after status code).
|
||||
|
||||
Allows only `HTAB / SP / VCHAR` bytes (strict ASCII-only).
|
||||
-/
|
||||
def parseReasonPhrase (limits : H1.Config) : Parser String := do
|
||||
let bytes ← takeWhileUpTo isReasonPhraseByte limits.maxReasonPhraseLength
|
||||
liftOption <| String.fromUTF8? bytes.toByteArray
|
||||
|
||||
/--
|
||||
Parses a status-code (3 decimal digits), the following reason phrase, and the
|
||||
terminating CRLF; returns a typed `Status`.
|
||||
-/
|
||||
def parseStatusCode (limits : H1.Config) : Parser Status := do
|
||||
let d1 ← digit
|
||||
let d2 ← digit
|
||||
let d3 ← digit
|
||||
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
|
||||
sp
|
||||
let phrase ← parseReasonPhrase limits <* crlf
|
||||
|
||||
if h : IsValidReasonPhrase phrase then
|
||||
if let some status := Status.ofCode (some ⟨phrase, h⟩) code.toUInt16 then
|
||||
return status
|
||||
|
||||
fail "invalid status code"
|
||||
|
||||
/--
|
||||
Parses a status line and returns a fully-typed `Response.Head`.
|
||||
`status-line = HTTP-version SP status-code SP [ reason-phrase ]`
|
||||
Accepts only HTTP/1.1. For parsing where the version may be unrecognized and must be
|
||||
mapped to an error event, use `parseStatusLineRawVersion`.
|
||||
-/
|
||||
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
|
||||
let (major, minor) ← parseHttpVersionNumber <* sp
|
||||
let status ← parseStatusCode limits
|
||||
if major == 1 ∧ minor == 1 then
|
||||
return { status, version := .v11, headers := .empty }
|
||||
else if major == 1 ∧ minor == 0 then
|
||||
return { status, version := .v10, headers := .empty }
|
||||
else
|
||||
fail "unsupported HTTP version"
|
||||
|
||||
/--
|
||||
Parses a status line and returns the status code plus recognized HTTP version when available.
|
||||
Consumes and discards the reason phrase.
|
||||
|
||||
status-line = HTTP-version SP status-code SP [ reason-phrase ] CRLF
|
||||
-/
|
||||
public def parseStatusLineRawVersion (limits : H1.Config) : Parser (Status × Option Version) := do
|
||||
let (major, minor) ← parseHttpVersionNumber <* sp
|
||||
let status ← parseStatusCode limits
|
||||
return (status, Version.ofNumber? major minor)
|
||||
|
||||
/--
|
||||
Parses the trailer section that follows the last chunk size line (`0\r\n`).
|
||||
-/
|
||||
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
|
||||
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
|
||||
crlf
|
||||
|
||||
end Std.Http.Protocol.H1
|
||||
319
src/Std/Internal/Http/Protocol/H1/Reader.lean
Normal file
319
src/Std/Internal/Http/Protocol/H1/Reader.lean
Normal file
@@ -0,0 +1,319 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Reader
|
||||
|
||||
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
|
||||
It tracks the parsing state including start line, headers, and body handling for
|
||||
both fixed-length and chunked transfer encodings.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
The body-framing sub-state of the `Reader` state machine.
|
||||
-/
|
||||
inductive Reader.BodyState where
|
||||
/--
|
||||
Parse fixed-length body bytes, tracking the number of bytes remaining.
|
||||
-/
|
||||
| fixed (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parse the next chunk-size line in chunked transfer encoding.
|
||||
-/
|
||||
| chunkedSize
|
||||
|
||||
/--
|
||||
Parse chunk data for the current chunk.
|
||||
-/
|
||||
| chunkedBody (ext : Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parse body bytes until EOF (connection close).
|
||||
-/
|
||||
| closeDelimited
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
The state of the `Reader` state machine.
|
||||
-/
|
||||
inductive Reader.State (dir : Direction) : Type
|
||||
/--
|
||||
Initial state waiting for HTTP start line.
|
||||
-/
|
||||
| needStartLine : State dir
|
||||
|
||||
/--
|
||||
State waiting for HTTP headers, tracking number of headers parsed.
|
||||
-/
|
||||
| needHeader : Nat → State dir
|
||||
|
||||
/--
|
||||
Unified body-reading state.
|
||||
-/
|
||||
| readBody : Reader.BodyState → State dir
|
||||
|
||||
/--
|
||||
Paused waiting for a `canContinue` decision, carrying the next state.
|
||||
-/
|
||||
| continue : State dir → State dir
|
||||
|
||||
/--
|
||||
State waiting to be able to read new data.
|
||||
-/
|
||||
| pending : State dir
|
||||
|
||||
/--
|
||||
State that it completed a single request or response and can go to the next one
|
||||
-/
|
||||
| complete
|
||||
|
||||
/--
|
||||
State that it has completed and cannot process more data.
|
||||
-/
|
||||
| closed
|
||||
|
||||
/--
|
||||
The input is malformed.
|
||||
-/
|
||||
| failed (error : Error) : State dir
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
Manages the reading state of the HTTP parsing and processing machine.
|
||||
-/
|
||||
structure Reader (dir : Direction) where
|
||||
/--
|
||||
The current state of the machine.
|
||||
-/
|
||||
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
|
||||
|
||||
/--
|
||||
The input byte array.
|
||||
-/
|
||||
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
|
||||
|
||||
/--
|
||||
The incoming message head.
|
||||
-/
|
||||
messageHead : Message.Head dir := {}
|
||||
|
||||
/--
|
||||
Count of messages that this connection has already parsed.
|
||||
-/
|
||||
messageCount : Nat := 0
|
||||
|
||||
/--
|
||||
Number of body bytes read for the current message.
|
||||
-/
|
||||
bodyBytesRead : Nat := 0
|
||||
|
||||
/--
|
||||
Number of header bytes accumulated for the current message.
|
||||
Counts name + value bytes plus 4 bytes per line for `: ` and `\r\n`.
|
||||
-/
|
||||
headerBytesRead : Nat := 0
|
||||
|
||||
/--
|
||||
Set when no further input bytes will arrive (the remote end has closed the connection).
|
||||
-/
|
||||
noMoreInput : Bool := false
|
||||
|
||||
namespace Reader
|
||||
|
||||
/--
|
||||
Checks if the reader is in a closed state and cannot process more messages.
|
||||
-/
|
||||
@[inline]
|
||||
def isClosed (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .closed => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the reader has completed parsing the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def isComplete (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .complete => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the reader has encountered an error.
|
||||
-/
|
||||
@[inline]
|
||||
def hasFailed (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .failed _ => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Feeds new data into the reader's input buffer.
|
||||
If the current input is exhausted, replaces it; otherwise compacts the buffer
|
||||
by discarding already-parsed bytes before appending.
|
||||
-/
|
||||
@[inline]
|
||||
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input :=
|
||||
if reader.input.atEnd
|
||||
then data.iter
|
||||
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
|
||||
|
||||
/--
|
||||
Replaces the reader's input iterator with a new one.
|
||||
-/
|
||||
@[inline]
|
||||
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input }
|
||||
|
||||
/--
|
||||
Updates the message head being constructed.
|
||||
-/
|
||||
@[inline]
|
||||
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with messageHead }
|
||||
|
||||
/--
|
||||
Adds a header to the current message head.
|
||||
-/
|
||||
@[inline]
|
||||
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
|
||||
match dir with
|
||||
| .sending | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
|
||||
|
||||
/--
|
||||
Closes the reader, transitioning to the closed state.
|
||||
-/
|
||||
@[inline]
|
||||
def close (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .closed, noMoreInput := true }
|
||||
|
||||
/--
|
||||
Marks the current message as complete and prepares for the next message.
|
||||
-/
|
||||
@[inline]
|
||||
def markComplete (reader : Reader dir) : Reader dir :=
|
||||
{ reader with
|
||||
state := .complete
|
||||
messageCount := reader.messageCount + 1 }
|
||||
|
||||
/--
|
||||
Transitions the reader to a failed state with the given error.
|
||||
-/
|
||||
@[inline]
|
||||
def fail (error : Error) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .failed error }
|
||||
|
||||
/--
|
||||
Resets the reader to parse a new message on the same connection.
|
||||
-/
|
||||
@[inline]
|
||||
def reset (reader : Reader dir) : Reader dir :=
|
||||
{ reader with
|
||||
state := .needStartLine
|
||||
bodyBytesRead := 0
|
||||
headerBytesRead := 0
|
||||
messageHead := {} }
|
||||
|
||||
/--
|
||||
Checks if more input is needed to continue parsing.
|
||||
-/
|
||||
@[inline]
|
||||
def needsMoreInput (reader : Reader dir) : Bool :=
|
||||
reader.input.atEnd && !reader.noMoreInput &&
|
||||
match reader.state with
|
||||
| .complete | .closed | .failed _ | .«continue» _ => false
|
||||
| _ => true
|
||||
|
||||
/--
|
||||
Returns the current parse error if the reader has failed.
|
||||
-/
|
||||
@[inline]
|
||||
def getError (reader : Reader dir) : Option Error :=
|
||||
match reader.state with
|
||||
| .failed err => some err
|
||||
| _ => none
|
||||
|
||||
/--
|
||||
Gets the number of bytes remaining in the input buffer.
|
||||
-/
|
||||
@[inline]
|
||||
def remainingBytes (reader : Reader dir) : Nat :=
|
||||
reader.input.array.size - reader.input.pos
|
||||
|
||||
/--
|
||||
Advances the input iterator by n bytes.
|
||||
-/
|
||||
@[inline]
|
||||
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input := reader.input.forward n }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading headers.
|
||||
-/
|
||||
@[inline]
|
||||
def startHeaders (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .needHeader 0, bodyBytesRead := 0, headerBytesRead := 0 }
|
||||
|
||||
/--
|
||||
Adds body bytes parsed for the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def addBodyBytes (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with bodyBytesRead := reader.bodyBytesRead + n }
|
||||
|
||||
/--
|
||||
Adds header bytes accumulated for the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def addHeaderBytes (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with headerBytesRead := reader.headerBytesRead + n }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading a fixed-length body.
|
||||
-/
|
||||
@[inline]
|
||||
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .readBody (.fixed size) }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading chunked transfer encoding.
|
||||
-/
|
||||
@[inline]
|
||||
def startChunkedBody (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .readBody .chunkedSize }
|
||||
|
||||
/--
|
||||
Marks that no more input will be provided (connection closed).
|
||||
-/
|
||||
@[inline]
|
||||
def markNoMoreInput (reader : Reader dir) : Reader dir :=
|
||||
{ reader with noMoreInput := true }
|
||||
|
||||
/--
|
||||
Checks if the connection should be kept alive for the next message.
|
||||
-/
|
||||
def shouldKeepAlive (reader : Reader dir) : Bool :=
|
||||
reader.messageHead.shouldKeepAlive
|
||||
|
||||
end Reader
|
||||
280
src/Std/Internal/Http/Protocol/H1/Writer.lean
Normal file
280
src/Std/Internal/Http/Protocol/H1/Writer.lean
Normal file
@@ -0,0 +1,280 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Writer
|
||||
|
||||
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
|
||||
It handles encoding headers and body content for both fixed-length and chunked
|
||||
transfer encodings.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
open Internal
|
||||
|
||||
/--
|
||||
The state of the `Writer` state machine.
|
||||
-/
|
||||
inductive Writer.State
|
||||
/--
|
||||
Initial state before any outgoing message has been prepared.
|
||||
-/
|
||||
| pending
|
||||
|
||||
/--
|
||||
Waiting for the application to provide the outgoing message head via `send`.
|
||||
-/
|
||||
| waitingHeaders
|
||||
|
||||
/--
|
||||
The message head has been provided; waiting for `shouldFlush` to become true before
|
||||
serializing headers to output.
|
||||
-/
|
||||
| waitingForFlush
|
||||
|
||||
/--
|
||||
Writing the body output (either fixed-length or chunked).
|
||||
-/
|
||||
| writingBody (mode : Body.Length)
|
||||
|
||||
/--
|
||||
Completed writing a single message and ready to begin the next one.
|
||||
-/
|
||||
| complete
|
||||
|
||||
/--
|
||||
Closed; no further data can be written.
|
||||
-/
|
||||
| closed
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
Manages the writing state of the HTTP generating and writing machine.
|
||||
-/
|
||||
structure Writer (dir : Direction) where
|
||||
/--
|
||||
Body chunks supplied by the user, accumulated before being flushed to output.
|
||||
-/
|
||||
userData : Array Chunk := .empty
|
||||
|
||||
/--
|
||||
All the data produced by the writer, ready to be sent to the socket.
|
||||
-/
|
||||
outputData : ChunkedBuffer := .empty
|
||||
|
||||
/--
|
||||
The state of the writer machine.
|
||||
-/
|
||||
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
|
||||
|
||||
/--
|
||||
When the user specifies the exact body size upfront, `Content-Length` framing is
|
||||
used instead of chunked transfer encoding.
|
||||
-/
|
||||
knownSize : Option Body.Length := none
|
||||
|
||||
/--
|
||||
The outgoing message that will be written to the output.
|
||||
-/
|
||||
messageHead : Message.Head dir.swap := {}
|
||||
|
||||
/--
|
||||
Whether the user has called `send` to provide the outgoing message head.
|
||||
-/
|
||||
sentMessage : Bool := false
|
||||
|
||||
/--
|
||||
Set when the user has finished sending body data, allowing fixed-size framing
|
||||
to be determined upfront.
|
||||
-/
|
||||
userClosedBody : Bool := false
|
||||
|
||||
/--
|
||||
When `true`, body bytes are intentionally omitted from the wire for this message
|
||||
(e.g. HEAD responses), while headers/framing metadata may still describe the
|
||||
hypothetical representation.
|
||||
-/
|
||||
omitBody : Bool := false
|
||||
|
||||
/--
|
||||
Running total of bytes across all `userData` chunks, maintained incrementally
|
||||
to avoid an O(n) fold on every fixed-length write step.
|
||||
-/
|
||||
userDataBytes : Nat := 0
|
||||
|
||||
namespace Writer
|
||||
|
||||
/--
|
||||
Returns `true` when no more user body data will arrive: either the user called
|
||||
`closeBody`, or the writer has already transitioned to `complete` or `closed`.
|
||||
|
||||
Note: this does **not** mean the wire is ready to accept new bytes — a `closed`
|
||||
writer cannot send anything. Use this to decide whether to flush pending body
|
||||
data rather than to check writability.
|
||||
-/
|
||||
@[inline]
|
||||
def noMoreUserData {dir} (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .closed | .complete => true
|
||||
| _ => writer.userClosedBody
|
||||
|
||||
/--
|
||||
Checks if the writer is closed (cannot process more data).
|
||||
-/
|
||||
@[inline]
|
||||
def isClosed (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .closed => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the writer has completed processing a request.
|
||||
-/
|
||||
@[inline]
|
||||
def isComplete (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .complete => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the writer can accept more data from the user.
|
||||
-/
|
||||
@[inline]
|
||||
def canAcceptData (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .waitingHeaders => true
|
||||
| .waitingForFlush => true
|
||||
| .writingBody _ => !writer.userClosedBody
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Marks the body as closed, indicating no more user data will be added.
|
||||
-/
|
||||
@[inline]
|
||||
def closeBody (writer : Writer dir) : Writer dir :=
|
||||
{ writer with userClosedBody := true }
|
||||
|
||||
/--
|
||||
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked.
|
||||
-/
|
||||
def determineTransferMode (writer : Writer dir) : Body.Length :=
|
||||
if let some mode := writer.knownSize then
|
||||
mode
|
||||
else if writer.userClosedBody then
|
||||
.fixed writer.userDataBytes
|
||||
else
|
||||
.chunked
|
||||
|
||||
/--
|
||||
Adds user data chunks to the writer's buffer if the writer can accept data.
|
||||
-/
|
||||
@[inline]
|
||||
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
|
||||
if writer.canAcceptData then
|
||||
let extraBytes := data.foldl (fun acc c => acc + c.data.size) 0
|
||||
{ writer with userData := writer.userData ++ data, userDataBytes := writer.userDataBytes + extraBytes }
|
||||
else
|
||||
writer
|
||||
|
||||
/--
|
||||
Writes accumulated user data to output using fixed-size encoding.
|
||||
-/
|
||||
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
|
||||
if writer.userData.size = 0 then
|
||||
(writer, limitSize)
|
||||
else
|
||||
let (chunks, pending, totalSize) := writer.userData.foldl (fun (state : Array ByteArray × Array Chunk × Nat) chunk =>
|
||||
let (acc, pending, size) := state
|
||||
if size >= limitSize then
|
||||
(acc, pending.push chunk, size)
|
||||
else
|
||||
let remaining := limitSize - size
|
||||
let takeSize := min chunk.data.size remaining
|
||||
let dataPart := chunk.data.extract 0 takeSize
|
||||
let acc := if takeSize = 0 then acc else acc.push dataPart
|
||||
let size := size + takeSize
|
||||
if takeSize < chunk.data.size then
|
||||
let pendingChunk : Chunk := { chunk with data := chunk.data.extract takeSize chunk.data.size }
|
||||
(acc, pending.push pendingChunk, size)
|
||||
else
|
||||
(acc, pending, size)
|
||||
) (#[], #[], 0)
|
||||
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
|
||||
let remaining := limitSize - totalSize
|
||||
({ writer with userData := pending, outputData, userDataBytes := writer.userDataBytes - totalSize }, remaining)
|
||||
|
||||
/--
|
||||
Writes accumulated user data to output using chunked transfer encoding.
|
||||
-/
|
||||
def writeChunkedBody (writer : Writer dir) : Writer dir :=
|
||||
if writer.userData.size = 0 then
|
||||
writer
|
||||
else
|
||||
let data := writer.userData
|
||||
{ writer with userData := #[], userDataBytes := 0, outputData := data.foldl (Encode.encode .v11) writer.outputData }
|
||||
|
||||
/--
|
||||
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state.
|
||||
-/
|
||||
def writeFinalChunk (writer : Writer dir) : Writer dir :=
|
||||
let writer := writer.writeChunkedBody
|
||||
{ writer with
|
||||
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
|
||||
state := .complete
|
||||
}
|
||||
|
||||
/--
|
||||
Extracts all accumulated output data and returns it with a cleared output buffer.
|
||||
-/
|
||||
@[inline]
|
||||
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
|
||||
let output := writer.outputData.toByteArray
|
||||
some ({ writer with outputData := ChunkedBuffer.empty }, output)
|
||||
|
||||
/--
|
||||
Updates the writer's state machine to a new state.
|
||||
-/
|
||||
@[inline]
|
||||
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
|
||||
{ writer with state }
|
||||
|
||||
/--
|
||||
Writes the message headers to the output buffer.
|
||||
-/
|
||||
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
|
||||
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
|
||||
|
||||
/--
|
||||
Checks if the connection should be kept alive based on the Connection header.
|
||||
-/
|
||||
def shouldKeepAlive (writer : Writer dir) : Bool :=
|
||||
writer.messageHead.headers.get? .connection
|
||||
|>.map (fun v => v.value.toLower != "close")
|
||||
|>.getD true
|
||||
|
||||
/--
|
||||
Closes the writer, transitioning to the closed state.
|
||||
-/
|
||||
@[inline]
|
||||
def close (writer : Writer dir) : Writer dir :=
|
||||
{ writer with state := .closed }
|
||||
|
||||
end Writer
|
||||
@@ -44,8 +44,15 @@ protected def Parser.run (p : Parser α) (arr : ByteArray) : Except String α :=
|
||||
Parse a single byte equal to `b`, fails if different.
|
||||
-/
|
||||
@[inline]
|
||||
def pbyte (b : UInt8) : Parser UInt8 := attempt do
|
||||
if (← any) = b then pure b else fail s!"expected: '{b}'"
|
||||
def pbyte (b : UInt8) : Parser UInt8 := fun it =>
|
||||
if h : it.hasNext then
|
||||
let got := it.curr' h
|
||||
if got = b then
|
||||
.success (it.next' h) got
|
||||
else
|
||||
.error it (.other s!"expected: '{b}'")
|
||||
else
|
||||
.error it .eof
|
||||
|
||||
/--
|
||||
Skip a single byte equal to `b`, fails if different.
|
||||
@@ -57,16 +64,29 @@ def skipByte (b : UInt8) : Parser Unit :=
|
||||
/--
|
||||
Skip a sequence of bytes equal to the given `ByteArray`.
|
||||
-/
|
||||
def skipBytes (arr : ByteArray) : Parser Unit := do
|
||||
for b in arr do
|
||||
skipByte b
|
||||
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
|
||||
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
|
||||
if h : idx < arr.size then
|
||||
if hnext : it.hasNext then
|
||||
let got := it.curr' hnext
|
||||
let want := arr[idx]
|
||||
if got = want then
|
||||
go (idx + 1) (it.next' hnext)
|
||||
else
|
||||
.error it (.other s!"expected byte {want}, got {got}")
|
||||
else
|
||||
.error it .eof
|
||||
else
|
||||
.success it ()
|
||||
go 0 it
|
||||
|
||||
/--
|
||||
Parse a string by matching its UTF-8 bytes, returns the string on success.
|
||||
-/
|
||||
@[inline]
|
||||
def pstring (s : String) : Parser String := do
|
||||
skipBytes s.toUTF8
|
||||
let utf8 := s.toUTF8
|
||||
skipBytes utf8
|
||||
return s
|
||||
|
||||
/--
|
||||
@@ -193,19 +213,47 @@ def take (n : Nat) : Parser ByteSlice := fun it =>
|
||||
else
|
||||
.success (it.forward n) (it.array[it.idx...(it.idx+n)])
|
||||
|
||||
/--
|
||||
Scans while `pred` is satisfied. Returns `(count, iterator, hitEof)`.
|
||||
-/
|
||||
private partial def scanWhile (pred : UInt8 → Bool) (count : Nat) (iter : ByteArray.Iterator) :
|
||||
Nat × ByteArray.Iterator × Bool :=
|
||||
if h : iter.hasNext then
|
||||
if pred (iter.curr' h) then
|
||||
scanWhile pred (count + 1) (iter.next' h)
|
||||
else
|
||||
(count, iter, false)
|
||||
else
|
||||
(count, iter, true)
|
||||
|
||||
/--
|
||||
Scans while `pred` is satisfied, bounded by `limit`.
|
||||
Returns `(count, iterator, hitEof)`.
|
||||
-/
|
||||
private partial def scanWhileUpTo (pred : UInt8 → Bool) (limit : Nat) (count : Nat)
|
||||
(iter : ByteArray.Iterator) : Nat × ByteArray.Iterator × Bool :=
|
||||
if count ≥ limit then
|
||||
(count, iter, false)
|
||||
else if h : iter.hasNext then
|
||||
if pred (iter.curr' h) then
|
||||
scanWhileUpTo pred limit (count + 1) (iter.next' h)
|
||||
else
|
||||
(count, iter, false)
|
||||
else
|
||||
(count, iter, true)
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied.
|
||||
Fails with `.eof` if input ends while the predicate still holds.
|
||||
-/
|
||||
@[inline]
|
||||
partial def takeWhile (pred : UInt8 → Bool) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
let (length, newIt, hitEof) := scanWhile pred 0 it
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses until a predicate is satisfied (exclusive).
|
||||
@@ -216,16 +264,16 @@ def takeUntil (pred : UInt8 → Bool) : Parser ByteSlice :=
|
||||
|
||||
/--
|
||||
Skips while a predicate is satisfied.
|
||||
Fails with `.eof` if input ends while the predicate still holds.
|
||||
-/
|
||||
@[inline]
|
||||
partial def skipWhile (pred : UInt8 → Bool) : Parser Unit :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
|
||||
if ¬iter.hasNext then iter
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else iter
|
||||
|
||||
.success (findEnd 0 it) ()
|
||||
let (_, newIt, hitEof) := scanWhile pred 0 it
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt ()
|
||||
|
||||
/--
|
||||
Skips until a predicate is satisfied.
|
||||
@@ -236,34 +284,31 @@ def skipUntil (pred : UInt8 → Bool) : Parser Unit :=
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, up to a given limit.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
partial def takeWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if count ≥ limit then (count, iter)
|
||||
else if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, up to a given limit, requiring at least one byte.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhileUpTo1 (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if count ≥ limit then (count, iter)
|
||||
else if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
if length = 0 then
|
||||
.error it (if newIt.atEnd then .eof else .other "expected at least one char")
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else if length = 0 then
|
||||
.error it (.other "expected at least one char")
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
@@ -274,19 +319,42 @@ Parses until a predicate is satisfied (exclusive), up to a given limit.
|
||||
def takeUntilUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
takeWhileUpTo (fun b => ¬pred b) limit
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, consuming at most `limit` bytes.
|
||||
Unlike `takeWhileUpTo`, succeeds even if input ends before the predicate stops holding.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhileAtMost (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, consuming at most `limit` bytes, requiring at least one.
|
||||
Unlike `takeWhileUpTo1`, succeeds even if input ends before the predicate stops holding.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhile1AtMost (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
|
||||
if length = 0 then
|
||||
.error it (.other "expected at least one char")
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Skips while a predicate is satisfied, up to a given limit.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
partial def skipWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser Unit :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
|
||||
if count ≥ limit then iter
|
||||
else if ¬iter.hasNext then iter
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else iter
|
||||
let (_, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
.success (findEnd 0 it) ()
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt ()
|
||||
|
||||
/--
|
||||
Skips until a predicate is satisfied, up to a given limit.
|
||||
|
||||
@@ -325,6 +325,55 @@ LEAN_EXPORT LEAN_NORETURN void lean_internal_panic(char const * msg);
|
||||
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_out_of_memory(void);
|
||||
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_unreachable(void);
|
||||
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_rc_overflow(void);
|
||||
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_overflow(void);
|
||||
|
||||
static inline bool lean_usize_mul_would_overflow(size_t a, size_t b) {
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
size_t r;
|
||||
return __builtin_mul_overflow(a, b, &r);
|
||||
#else
|
||||
return a != 0 && b > SIZE_MAX / a;
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline bool lean_usize_add_would_overflow(size_t a, size_t b) {
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
size_t r;
|
||||
return __builtin_add_overflow(a, b, &r);
|
||||
#else
|
||||
return a > SIZE_MAX - b;
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline size_t lean_usize_mul_checked(size_t a, size_t b) {
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
size_t r;
|
||||
if (LEAN_UNLIKELY(__builtin_mul_overflow(a, b, &r))) {
|
||||
lean_internal_panic_overflow();
|
||||
}
|
||||
return r;
|
||||
#else
|
||||
if (a != 0 && b > SIZE_MAX / a) {
|
||||
lean_internal_panic_overflow();
|
||||
}
|
||||
return a * b;
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline size_t lean_usize_add_checked(size_t a, size_t b) {
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
size_t r;
|
||||
if (LEAN_UNLIKELY(__builtin_add_overflow(a, b, &r))) {
|
||||
lean_internal_panic_overflow();
|
||||
}
|
||||
return r;
|
||||
#else
|
||||
if (a > SIZE_MAX - b) {
|
||||
lean_internal_panic_overflow();
|
||||
}
|
||||
return a + b;
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline size_t lean_align(size_t v, size_t a) {
|
||||
return (v / a)*a + a * (v % a != 0);
|
||||
@@ -617,7 +666,7 @@ static inline uint8_t * lean_ctor_scalar_cptr(lean_object * o) {
|
||||
|
||||
static inline lean_object * lean_alloc_ctor(unsigned tag, unsigned num_objs, unsigned scalar_sz) {
|
||||
assert(tag <= LeanMaxCtorTag && num_objs < LEAN_MAX_CTOR_FIELDS && scalar_sz < LEAN_MAX_CTOR_SCALARS_SIZE);
|
||||
lean_object * o = lean_alloc_ctor_memory(sizeof(lean_ctor_object) + sizeof(void*)*num_objs + scalar_sz);
|
||||
lean_object * o = lean_alloc_ctor_memory(lean_usize_add_checked(lean_usize_add_checked(sizeof(lean_ctor_object), lean_usize_mul_checked(sizeof(void*), num_objs)), scalar_sz));
|
||||
lean_set_st_header(o, tag, num_objs);
|
||||
return o;
|
||||
}
|
||||
@@ -727,7 +776,7 @@ static inline lean_object ** lean_closure_arg_cptr(lean_object * o) { return lea
|
||||
static inline lean_obj_res lean_alloc_closure(void * fun, unsigned arity, unsigned num_fixed) {
|
||||
assert(arity > 0);
|
||||
assert(num_fixed < arity);
|
||||
lean_closure_object * o = (lean_closure_object*)lean_alloc_object(sizeof(lean_closure_object) + sizeof(void*)*num_fixed);
|
||||
lean_closure_object * o = (lean_closure_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_closure_object), lean_usize_mul_checked(sizeof(void*), num_fixed)));
|
||||
lean_set_st_header((lean_object*)o, LeanClosure, 0);
|
||||
o->m_fun = fun;
|
||||
o->m_arity = arity;
|
||||
@@ -773,7 +822,7 @@ LEAN_EXPORT lean_object* lean_apply_m(lean_object* f, unsigned n, lean_object**
|
||||
|
||||
/* Arrays of objects (low level API) */
|
||||
static inline lean_obj_res lean_alloc_array(size_t size, size_t capacity) {
|
||||
lean_array_object * o = (lean_array_object*)lean_alloc_object(sizeof(lean_array_object) + sizeof(void*)*capacity);
|
||||
lean_array_object * o = (lean_array_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_array_object), lean_usize_mul_checked(sizeof(void*), capacity)));
|
||||
lean_set_st_header((lean_object*)o, LeanArray, 0);
|
||||
o->m_size = size;
|
||||
o->m_capacity = capacity;
|
||||
@@ -950,8 +999,18 @@ LEAN_EXPORT lean_object * lean_mk_array(lean_obj_arg n, lean_obj_arg v);
|
||||
|
||||
/* Array of scalars */
|
||||
|
||||
static inline bool lean_alloc_sarray_would_overflow(unsigned elem_size, size_t capacity) {
|
||||
if (lean_usize_mul_would_overflow(elem_size, capacity)) {
|
||||
return true;
|
||||
}
|
||||
if (lean_usize_add_would_overflow(sizeof(lean_sarray_object), elem_size * capacity)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static inline lean_obj_res lean_alloc_sarray(unsigned elem_size, size_t size, size_t capacity) {
|
||||
lean_sarray_object * o = (lean_sarray_object*)lean_alloc_object(sizeof(lean_sarray_object) + elem_size*capacity);
|
||||
lean_sarray_object * o = (lean_sarray_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_sarray_object), lean_usize_mul_checked(elem_size, capacity)));
|
||||
lean_set_st_header((lean_object*)o, LeanScalarArray, elem_size);
|
||||
o->m_size = size;
|
||||
o->m_capacity = capacity;
|
||||
@@ -1113,7 +1172,7 @@ static inline lean_obj_res lean_float_array_set(lean_obj_arg a, b_lean_obj_arg i
|
||||
/* Strings */
|
||||
|
||||
static inline lean_obj_res lean_alloc_string(size_t size, size_t capacity, size_t len) {
|
||||
lean_string_object * o = (lean_string_object*)lean_alloc_object(sizeof(lean_string_object) + capacity);
|
||||
lean_string_object * o = (lean_string_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_string_object), capacity));
|
||||
lean_set_st_header((lean_object*)o, LeanString, 0);
|
||||
o->m_size = size;
|
||||
o->m_capacity = capacity;
|
||||
|
||||
@@ -136,7 +136,7 @@ public def compileStaticLib
|
||||
let args := args.push libFile.toString ++ (← mkArgs libFile <| oFiles.map toString)
|
||||
proc {cmd := ar.toString, args}
|
||||
|
||||
private def getMacOSXDeploymentEnv : BaseIO (Array (String × Option String)) := do
|
||||
def getMacOSXDeploymentEnv : BaseIO (Array (String × Option String)) := do
|
||||
-- It is difficult to identify the correct minor version here, leading to linking warnings like:
|
||||
-- `ld64.lld: warning: /usr/lib/system/libsystem_kernel.dylib has version 13.5.0, which is newer than target minimum of 13.0.0`
|
||||
-- In order to suppress these we set the MACOSX_DEPLOYMENT_TARGET variable into the far future.
|
||||
|
||||
@@ -126,7 +126,7 @@ public def BuildMetadata.parse (contents : String) : Except String BuildMetadata
|
||||
public def BuildMetadata.ofFetch (inputHash : Hash) (outputs : Json) : BuildMetadata :=
|
||||
{depHash := inputHash, outputs? := outputs, synthetic := true, inputs := #[], log := {}}
|
||||
|
||||
private partial def serializeInputs (inputs : Array BuildTrace) : Array (String × Json) :=
|
||||
partial def serializeInputs (inputs : Array BuildTrace) : Array (String × Json) :=
|
||||
inputs.foldl (init := {}) fun r trace =>
|
||||
let val :=
|
||||
if trace.inputs.isEmpty then
|
||||
@@ -135,7 +135,7 @@ private partial def serializeInputs (inputs : Array BuildTrace) : Array (String
|
||||
toJson (serializeInputs trace.inputs)
|
||||
r.push (trace.caption, val)
|
||||
|
||||
private def BuildMetadata.ofBuildCore
|
||||
def BuildMetadata.ofBuildCore
|
||||
(depTrace : BuildTrace) (outputs : Json) (log : Log)
|
||||
: BuildMetadata where
|
||||
inputs := serializeInputs depTrace.inputs
|
||||
@@ -210,7 +210,7 @@ deriving DecidableEq
|
||||
@[inline] public def OutputStatus.isCacheable (status : OutputStatus) : Bool :=
|
||||
status != .mtimeUpToDate
|
||||
|
||||
@[specialize] private def checkHashUpToDate'
|
||||
@[specialize] def checkHashUpToDate'
|
||||
[CheckExists ι] [GetMTime ι]
|
||||
(info : ι) (depTrace : BuildTrace) (depHash : Option Hash)
|
||||
(oldTrace := depTrace.mtime)
|
||||
@@ -514,7 +514,7 @@ stored in the cached input-to-content mapping.
|
||||
|
||||
**For internal use only.**
|
||||
-/
|
||||
@[specialize] private def getArtifactsUsingCache?
|
||||
@[specialize] def getArtifactsUsingCache?
|
||||
[ResolveOutputs α] (inputHash : Hash) (pkg : Package)
|
||||
: JobM (Option α) := do
|
||||
if let some out ← (← getLakeCache).readOutputs? pkg.cacheScope inputHash then
|
||||
@@ -868,7 +868,7 @@ public def buildStaticLib
|
||||
compileStaticLib libFile oFiles (← getLeanAr) thin
|
||||
return art.path
|
||||
|
||||
private def mkLinkObjArgs
|
||||
def mkLinkObjArgs
|
||||
(objs : Array FilePath) (libs : Array Dynlib) : Array String
|
||||
:= Id.run do
|
||||
let mut args := #[]
|
||||
@@ -884,7 +884,7 @@ private def mkLinkObjArgs
|
||||
Topologically sorts the library dependency tree by name.
|
||||
Libraries come *before* their dependencies.
|
||||
-/
|
||||
private partial def mkLinkOrder (libs : Array Dynlib) : JobM (Array Dynlib) := do
|
||||
partial def mkLinkOrder (libs : Array Dynlib) : JobM (Array Dynlib) := do
|
||||
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
|
||||
go lib [] v o
|
||||
match r with
|
||||
|
||||
@@ -19,7 +19,7 @@ open System (FilePath)
|
||||
The build function definition for a Lean executable.
|
||||
-/
|
||||
|
||||
private def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
|
||||
def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
|
||||
withRegisterJob s!"{self.name}:exe" <| withCurrPackage self.pkg do
|
||||
/-
|
||||
Remark: We must build the root before we fetch the transitive imports
|
||||
@@ -56,7 +56,7 @@ private def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
|
||||
public def LeanExe.exeFacetConfig : LeanExeFacetConfig exeFacet :=
|
||||
mkFacetJobConfig recBuildExe
|
||||
|
||||
private def LeanExe.recBuildDefault (lib : LeanExe) : FetchM (Job FilePath) :=
|
||||
def LeanExe.recBuildDefault (lib : LeanExe) : FetchM (Job FilePath) :=
|
||||
lib.exe.fetch
|
||||
|
||||
/-- The facet configuration for the builtin `ExternLib.dynlibFacet`. -/
|
||||
|
||||
@@ -20,7 +20,7 @@ open System
|
||||
|
||||
namespace Lake
|
||||
|
||||
private def ExternLib.recBuildStatic (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
def ExternLib.recBuildStatic (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
withRegisterJob s!"{lib.staticTargetName.toString}:static" do
|
||||
lib.config.getPath <$> fetch (lib.pkg.target lib.staticTargetName)
|
||||
|
||||
@@ -53,7 +53,7 @@ public def buildLeanSharedLibOfStatic
|
||||
compileSharedLib dynlib args lean.cc
|
||||
return dynlib
|
||||
|
||||
private def ExternLib.recBuildShared (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
def ExternLib.recBuildShared (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
withRegisterJob s!"{lib.staticTargetName.toString}:shared" do
|
||||
buildLeanSharedLibOfStatic (← lib.static.fetch) lib.linkArgs
|
||||
|
||||
@@ -74,7 +74,7 @@ def computeDynlibOfShared (sharedLibTarget : Job FilePath) : SpawnM (Job Dynlib)
|
||||
else
|
||||
error s!"shared library `{sharedLib}` has no file name"
|
||||
|
||||
private def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) := do
|
||||
def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) := do
|
||||
withRegisterJob s!"{lib.staticTargetName.toString}:dynlib" do
|
||||
computeDynlibOfShared (← lib.shared.fetch)
|
||||
|
||||
@@ -82,7 +82,7 @@ private def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) :
|
||||
public def ExternLib.dynlibFacetConfig : ExternLibFacetConfig dynlibFacet :=
|
||||
mkFacetJobConfig recComputeDynlib
|
||||
|
||||
private def ExternLib.recBuildDefault (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
def ExternLib.recBuildDefault (lib : ExternLib) : FetchM (Job FilePath) :=
|
||||
lib.static.fetch
|
||||
|
||||
/-- The facet configuration for the builtin `ExternLib.dynlibFacet`. -/
|
||||
|
||||
@@ -26,7 +26,7 @@ open System (FilePath)
|
||||
namespace Lake
|
||||
|
||||
/-- Recursive build function for anything in the Lake build index. -/
|
||||
private def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.key)) :=
|
||||
def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.key)) :=
|
||||
match info with
|
||||
| .target pkg target => do
|
||||
if let some decl := pkg.findTargetDecl? target then
|
||||
@@ -55,7 +55,7 @@ private def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.k
|
||||
error s!"invalid target '{info}': unknown facet '{facet}'"
|
||||
|
||||
/-- Recursive build function with memoization. -/
|
||||
private def recFetchWithIndex : (info : BuildInfo) → RecBuildM (Job (BuildData info.key)) :=
|
||||
def recFetchWithIndex : (info : BuildInfo) → RecBuildM (Job (BuildData info.key)) :=
|
||||
inline <| recFetchAcyclic (β := (Job <| BuildData ·.key)) BuildInfo.key recBuildWithIndex
|
||||
|
||||
/--
|
||||
|
||||
@@ -21,7 +21,7 @@ namespace Lake
|
||||
|
||||
/-! ## Input File -/
|
||||
|
||||
private def InputFile.recFetch (t : InputFile) : FetchM (Job FilePath) :=
|
||||
def InputFile.recFetch (t : InputFile) : FetchM (Job FilePath) :=
|
||||
withRegisterJob s!"{t.name}" do
|
||||
inputFile t.path t.text
|
||||
|
||||
@@ -39,7 +39,7 @@ public def InputFile.initFacetConfigs : DNameMap (KFacetConfig InputFile.facetKi
|
||||
|
||||
/-! ## Input Directory -/
|
||||
|
||||
private def InputDir.recFetch (t : InputDir) : FetchM (Job (Array FilePath)) :=
|
||||
def InputDir.recFetch (t : InputDir) : FetchM (Job (Array FilePath)) :=
|
||||
withRegisterJob s!"{t.name}" do
|
||||
inputDir t.path t.text t.filter
|
||||
|
||||
|
||||
@@ -191,7 +191,7 @@ end Job
|
||||
/-- A Lake job task with an opaque value in `Type`. -/
|
||||
public abbrev OpaqueJobTask := JobTask Opaque
|
||||
|
||||
@[inline] private unsafe def JobTask.toOpaqueImpl (self : JobTask α) : OpaqueJobTask :=
|
||||
@[inline] unsafe def JobTask.toOpaqueImpl (self : JobTask α) : OpaqueJobTask :=
|
||||
unsafeCast self
|
||||
|
||||
/-- Forget the value of a job task. Implemented as a no-op cast. -/
|
||||
|
||||
@@ -275,7 +275,7 @@ public def collectList (jobs : List (Job α)) (traceCaption := "<collection>") :
|
||||
public def collectArray (jobs : Array (Job α)) (traceCaption := "<collection>") : Job (Array α) :=
|
||||
jobs.foldl (zipWith (sync := true) Array.push) (traceRoot (Array.mkEmpty jobs.size) traceCaption)
|
||||
|
||||
private instance : Nonempty ({α : Type u} → [Nonempty α] → α) := ⟨Classical.ofNonempty⟩
|
||||
instance : Nonempty ({α : Type u} → [Nonempty α] → α) := ⟨Classical.ofNonempty⟩
|
||||
|
||||
/-- Merge an `Vector` of jobs into one, collecting their outputs into an `Array`. -/
|
||||
public def collectVector {α : Type u} [Nonempty α] (jobs : Vector (Job α) n) (traceCaption := "<collection>") : Job (Vector α n) :=
|
||||
|
||||
@@ -33,7 +33,7 @@ private structure ModuleCollection where
|
||||
Collect the local modules of a library.
|
||||
That is, the modules from `getModuleArray` plus their local transitive imports.
|
||||
-/
|
||||
private partial def LeanLib.recCollectLocalModules
|
||||
partial def LeanLib.recCollectLocalModules
|
||||
(self : LeanLib) : FetchM (Job (Array Module))
|
||||
:= ensureJob do
|
||||
let mut col : ModuleCollection := {}
|
||||
@@ -59,10 +59,10 @@ where
|
||||
return col
|
||||
|
||||
/-- The `LibraryFacetConfig` for the builtin `modulesFacet`. -/
|
||||
private def LeanLib.modulesFacetConfig : LibraryFacetConfig modulesFacet :=
|
||||
def LeanLib.modulesFacetConfig : LibraryFacetConfig modulesFacet :=
|
||||
mkFacetJobConfig LeanLib.recCollectLocalModules (buildable := false)
|
||||
|
||||
private def LeanLib.recBuildLean
|
||||
def LeanLib.recBuildLean
|
||||
(self : LeanLib) : FetchM (Job Unit)
|
||||
:= do
|
||||
let mods ← (← self.modules.fetch).await
|
||||
@@ -73,7 +73,7 @@ private def LeanLib.recBuildLean
|
||||
public def LeanLib.leanArtsFacetConfig : LibraryFacetConfig leanArtsFacet :=
|
||||
mkFacetJobConfig LeanLib.recBuildLean
|
||||
|
||||
@[specialize] private def LeanLib.recBuildStatic
|
||||
@[specialize] def LeanLib.recBuildStatic
|
||||
(self : LeanLib) (shouldExport : Bool) : FetchM (Job FilePath)
|
||||
:= do
|
||||
let suffix :=
|
||||
@@ -123,7 +123,7 @@ public def LeanLib.staticExportFacetConfig : LibraryFacetConfig staticExportFace
|
||||
|
||||
/-! ## Build Shared Lib -/
|
||||
|
||||
private def LeanLib.recBuildShared (self : LeanLib) : FetchM (Job Dynlib) := do
|
||||
def LeanLib.recBuildShared (self : LeanLib) : FetchM (Job Dynlib) := do
|
||||
withRegisterJob s!"{self.name}:shared" <| withCurrPackage self.pkg do
|
||||
let mods ← (← self.modules.fetch).await
|
||||
let objJobs ← mods.flatMapM fun mod =>
|
||||
@@ -158,7 +158,7 @@ public def LeanLib.sharedFacetConfig : LibraryFacetConfig sharedFacet :=
|
||||
|
||||
/--
|
||||
Build extra target dependencies of the library (e.g., `extraDepTargets`, `needs`). -/
|
||||
private def LeanLib.recBuildExtraDepTargets (self : LeanLib) : FetchM (Job Unit) := do
|
||||
def LeanLib.recBuildExtraDepTargets (self : LeanLib) : FetchM (Job Unit) := do
|
||||
let mut job := Job.nil s!"{self.pkg.baseName}/{self.name}:extraDep"
|
||||
job := job.mix (← self.pkg.extraDep.fetch)
|
||||
for target in self.extraDepTargets do
|
||||
@@ -172,7 +172,7 @@ public def LeanLib.extraDepFacetConfig : LibraryFacetConfig extraDepFacet :=
|
||||
mkFacetJobConfig LeanLib.recBuildExtraDepTargets
|
||||
|
||||
/-- Build the default facets for the library. -/
|
||||
private def LeanLib.recBuildDefaultFacets (self : LeanLib) : FetchM (Job Unit) := do
|
||||
def LeanLib.recBuildDefaultFacets (self : LeanLib) : FetchM (Job Unit) := do
|
||||
Job.mixArray <$> self.defaultFacets.mapM fun facet => do
|
||||
let job ← (self.facetCore facet).fetch
|
||||
return job.toOpaque
|
||||
|
||||
@@ -27,7 +27,7 @@ Build function definitions for a module's builtin facets.
|
||||
-/
|
||||
|
||||
/-- Parse the header of a Lean file from its source. -/
|
||||
private def Module.recFetchInput (mod : Module) : FetchM (Job ModuleInput) := Job.async do
|
||||
def Module.recFetchInput (mod : Module) : FetchM (Job ModuleInput) := Job.async do
|
||||
let path := mod.leanFile
|
||||
let contents ← IO.FS.readFile path
|
||||
let trace := {caption := path.toString, mtime := ← getMTime path, hash := .ofText contents}
|
||||
@@ -52,7 +52,7 @@ public def Module.headerFacetConfig : ModuleFacetConfig headerFacet :=
|
||||
return (← mod.input.fetch).map (sync := true) (·.header)
|
||||
|
||||
/-- Compute an `Array` of a module's direct local imports from its header. -/
|
||||
private def Module.recParseImports (mod : Module) : FetchM (Job (Array Module)) := do
|
||||
def Module.recParseImports (mod : Module) : FetchM (Job (Array Module)) := do
|
||||
(← mod.input.fetch).mapM (sync := true) fun input => do
|
||||
let mods := input.imports.foldl (init := OrdModuleSet.empty) fun set imp =>
|
||||
match imp.module? with | some mod => set.insert mod | none => set
|
||||
@@ -62,12 +62,12 @@ private def Module.recParseImports (mod : Module) : FetchM (Job (Array Module))
|
||||
public def Module.importsFacetConfig : ModuleFacetConfig importsFacet :=
|
||||
mkFacetJobConfig recParseImports (buildable := false)
|
||||
|
||||
private structure ModuleImportData where
|
||||
structure ModuleImportData where
|
||||
module : Module
|
||||
transImports : Job (Array Module)
|
||||
includeSelf : Bool
|
||||
|
||||
@[inline] private def collectImportsAux
|
||||
@[inline] def collectImportsAux
|
||||
(fileName : String) (imports : Array Module)
|
||||
(f : Module → FetchM (Bool × Job (Array Module)))
|
||||
: FetchM (Job (Array Module)) := do
|
||||
@@ -93,21 +93,21 @@ private structure ModuleImportData where
|
||||
| .ok impSet s => .ok impSet.toArray s
|
||||
| .error e s => .error e s
|
||||
|
||||
private def computeTransImportsAux
|
||||
def computeTransImportsAux
|
||||
(fileName : String) (imports : Array Module)
|
||||
: FetchM (Job (Array Module)) := do
|
||||
collectImportsAux fileName imports fun imp =>
|
||||
(true, ·) <$> imp.transImports.fetch
|
||||
|
||||
/-- Recursively compute a module's transitive imports. -/
|
||||
private def Module.recComputeTransImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
|
||||
def Module.recComputeTransImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
|
||||
inline <| computeTransImportsAux mod.relLeanFile.toString (← (← mod.imports.fetch).await)
|
||||
|
||||
/-- The `ModuleFacetConfig` for the builtin `transImportsFacet`. -/
|
||||
public def Module.transImportsFacetConfig : ModuleFacetConfig transImportsFacet :=
|
||||
mkFacetJobConfig recComputeTransImports (buildable := false)
|
||||
|
||||
private def computePrecompileImportsAux
|
||||
def computePrecompileImportsAux
|
||||
(fileName : String) (imports : Array Module)
|
||||
: FetchM (Job (Array Module)) := do
|
||||
collectImportsAux fileName imports fun imp =>
|
||||
@@ -117,7 +117,7 @@ private def computePrecompileImportsAux
|
||||
(false, ·) <$> imp.precompileImports.fetch
|
||||
|
||||
/-- Recursively compute a module's precompiled imports. -/
|
||||
private def Module.recComputePrecompileImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
|
||||
def Module.recComputePrecompileImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
|
||||
inline <| computePrecompileImportsAux mod.relLeanFile.toString (← (← mod.imports.fetch).await)
|
||||
|
||||
/-- The `ModuleFacetConfig` for the builtin `precompileImportsFacet`. -/
|
||||
@@ -129,7 +129,7 @@ Computes the transitive dynamic libraries of a module's imports.
|
||||
Modules from the same library are loaded individually, while modules
|
||||
from other libraries are loaded as part of the whole library.
|
||||
-/
|
||||
private def Module.fetchImportLibs
|
||||
def Module.fetchImportLibs
|
||||
(self : Module) (imps : Array Module) (compileSelf : Bool)
|
||||
: FetchM (Array (Job Dynlib)) := do
|
||||
let (_, jobs) ← imps.foldlM (init := (({} : NameSet), #[])) fun (libs, jobs) imp => do
|
||||
@@ -149,7 +149,7 @@ private def Module.fetchImportLibs
|
||||
Fetches the library dynlibs of a list of non-local imports.
|
||||
Modules are loaded as part of their whole library.
|
||||
-/
|
||||
private def fetchImportLibs
|
||||
def fetchImportLibs
|
||||
(mods : Array Module) : FetchM (Job (Array Dynlib))
|
||||
:= do
|
||||
let (_, jobs) ← mods.foldlM (init := (({} : NameSet), #[])) fun (libs, jobs) imp => do
|
||||
@@ -166,7 +166,7 @@ private def fetchImportLibs
|
||||
Topologically sorts the library dependency tree by name.
|
||||
Libraries come *after* their dependencies.
|
||||
-/
|
||||
private partial def mkLoadOrder (libs : Array Dynlib) : FetchM (Array Dynlib) := do
|
||||
partial def mkLoadOrder (libs : Array Dynlib) : FetchM (Array Dynlib) := do
|
||||
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
|
||||
go lib [] v o
|
||||
match r with
|
||||
@@ -185,12 +185,12 @@ where
|
||||
let o := o.push lib
|
||||
return (v, o)
|
||||
|
||||
private structure ModuleDeps where
|
||||
structure ModuleDeps where
|
||||
dynlibs : Array Dynlib := #[]
|
||||
plugins : Array Dynlib := #[]
|
||||
deriving Inhabited, Repr
|
||||
|
||||
private def computeModuleDeps
|
||||
def computeModuleDeps
|
||||
(impLibs : Array Dynlib) (externLibs : Array Dynlib)
|
||||
(dynlibs : Array Dynlib) (plugins : Array Dynlib)
|
||||
: FetchM ModuleDeps := do
|
||||
@@ -219,7 +219,7 @@ private def computeModuleDeps
|
||||
plugins := plugins.push (← getLakeInstall).sharedDynlib
|
||||
return {dynlibs, plugins}
|
||||
|
||||
private partial def fetchTransImportArts
|
||||
partial def fetchTransImportArts
|
||||
(directImports : Array ModuleImport) (directArts : NameMap ImportArtifacts) (nonModule : Bool)
|
||||
: FetchM (NameMap ImportArtifacts) := do
|
||||
let q ← directImports.foldlM (init := #[]) fun q imp => do
|
||||
@@ -254,7 +254,7 @@ where
|
||||
else q
|
||||
else q
|
||||
|
||||
private def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
|
||||
def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
|
||||
directArts := {}
|
||||
trace := .nil s!"imports"
|
||||
transTrace := .nil s!"{modName} transitive imports (public)"
|
||||
@@ -262,7 +262,7 @@ private def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
|
||||
allTransTrace := .nil s!"{modName} transitive imports (all)"
|
||||
legacyTransTrace := .nil s!"{modName} transitive imports (legacy)"
|
||||
|
||||
private def ModuleExportInfo.disambiguationHash
|
||||
def ModuleExportInfo.disambiguationHash
|
||||
(self : ModuleExportInfo) (nonModule : Bool) (imp : Import)
|
||||
: Hash :=
|
||||
if nonModule then
|
||||
@@ -274,7 +274,7 @@ private def ModuleExportInfo.disambiguationHash
|
||||
else
|
||||
self.transTrace.hash.mix self.artsTrace.hash
|
||||
|
||||
private def ModuleImportInfo.addImport
|
||||
def ModuleImportInfo.addImport
|
||||
(info : ModuleImportInfo) (nonModule : Bool)
|
||||
(imp : Import) (expInfo : ModuleExportInfo)
|
||||
: ModuleImportInfo :=
|
||||
@@ -351,14 +351,14 @@ private def ModuleImportInfo.addImport
|
||||
else
|
||||
info
|
||||
|
||||
private def Package.discriminant (self : Package) :=
|
||||
def Package.discriminant (self : Package) :=
|
||||
if self.version == {} then
|
||||
self.prettyName
|
||||
else
|
||||
s!"{self.prettyName}@{self.version}"
|
||||
|
||||
set_option linter.unusedVariables.funArgs false in
|
||||
private def fetchImportInfo
|
||||
def fetchImportInfo
|
||||
(fileName : String) (pkgName modName : Name) (header : ModuleHeader)
|
||||
: FetchM (Job ModuleImportInfo) := do
|
||||
let nonModule := !header.isModule
|
||||
@@ -422,17 +422,17 @@ public def Module.importInfoFacetConfig : ModuleFacetConfig importInfoFacet :=
|
||||
let header ← (← mod.header.fetch).await
|
||||
fetchImportInfo mod.relLeanFile.toString mod.pkg.keyName mod.name header
|
||||
|
||||
private def noServerOLeanError :=
|
||||
def noServerOLeanError :=
|
||||
"No server olean generated. Ensure the module system is enabled."
|
||||
|
||||
private def noPrivateOLeanError :=
|
||||
def noPrivateOLeanError :=
|
||||
"No private olean generated. Ensure the module system is enabled."
|
||||
|
||||
private def noIRError :=
|
||||
def noIRError :=
|
||||
"No `.ir` generated. Ensure the module system is enabled."
|
||||
|
||||
/-- Computes the import artifacts and transitive import trace of a module's imports. -/
|
||||
private def Module.computeExportInfo (mod : Module) : FetchM (Job ModuleExportInfo) := do
|
||||
def Module.computeExportInfo (mod : Module) : FetchM (Job ModuleExportInfo) := do
|
||||
(← mod.leanArts.fetch).mapM (sync := true) fun arts => do
|
||||
let input ← (← mod.input.fetch).await
|
||||
let importInfo ← (← mod.importInfo.fetch).await
|
||||
@@ -496,7 +496,7 @@ Recursively build a module's dependencies, including:
|
||||
* Shared libraries (e.g., `extern_lib` targets or precompiled modules)
|
||||
* `extraDepTargets` of its library
|
||||
-/
|
||||
private def Module.recFetchSetup (mod : Module) : FetchM (Job ModuleSetup) := ensureJob do
|
||||
def Module.recFetchSetup (mod : Module) : FetchM (Job ModuleSetup) := ensureJob do
|
||||
let extraDepJob ← mod.lib.extraDep.fetch
|
||||
let headerJob ← mod.header.fetch
|
||||
|
||||
@@ -660,7 +660,7 @@ def resolveModuleOutputs (out : CacheOutput) : JobM ModuleOutputs := do
|
||||
instance : ResolveOutputs ModuleOutputs := ⟨resolveModuleOutputs⟩
|
||||
|
||||
/-- Save module build artifacts to the local Lake cache. -/
|
||||
private def Module.cacheOutputArtifacts
|
||||
def Module.cacheOutputArtifacts
|
||||
(mod : Module) (isModule : Bool) (useLocalFile : Bool)
|
||||
: JobM ModuleOutputArtifacts := do
|
||||
return {
|
||||
@@ -686,12 +686,12 @@ Some module build artifacts must be located in the build directory (e.g., ILeans
|
||||
This copies the required artifacts from the local Lake cache to the build directory and
|
||||
updates the data structure with the new paths.
|
||||
-/
|
||||
private def Module.restoreNeededArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
|
||||
def Module.restoreNeededArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
|
||||
return {cached with
|
||||
ilean := ← restoreArtifact mod.ileanFile cached.ilean
|
||||
}
|
||||
|
||||
private def Module.restoreAllArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
|
||||
def Module.restoreAllArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
|
||||
return {cached with
|
||||
olean := ← restoreArtifact mod.oleanFile cached.olean
|
||||
oleanServer? := ← restoreSome mod.oleanServerFile cached.oleanServer?
|
||||
@@ -745,7 +745,7 @@ public protected def Module.getMTime (self : Module) (isModule : Bool) : IO MTim
|
||||
@[deprecated Module.getMTime (since := "2025-03-04")]
|
||||
public instance : GetMTime Module := ⟨Module.getMTime (isModule := false)⟩
|
||||
|
||||
private def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime : MTime) : ModuleOutputArtifacts :=
|
||||
def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime : MTime) : ModuleOutputArtifacts :=
|
||||
{self with
|
||||
olean := {self.olean with mtime}
|
||||
oleanServer? := self.oleanServer?.map ({· with mtime})
|
||||
@@ -756,7 +756,7 @@ private def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime
|
||||
bc? := self.bc?.map ({· with mtime})
|
||||
}
|
||||
|
||||
private def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : Bool) : ModuleArtifacts where
|
||||
def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : Bool) : ModuleArtifacts where
|
||||
lean? := srcFile
|
||||
olean? := mod.oleanFile
|
||||
oleanServer? := if isModule then some mod.oleanServerFile else none
|
||||
@@ -766,7 +766,7 @@ private def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : B
|
||||
c? := mod.cFile
|
||||
bc? := if Lean.Internal.hasLLVMBackend () then some mod.bcFile else none
|
||||
|
||||
private def Module.computeArtifacts (mod : Module) (isModule : Bool) : FetchM ModuleOutputArtifacts :=
|
||||
def Module.computeArtifacts (mod : Module) (isModule : Bool) : FetchM ModuleOutputArtifacts :=
|
||||
return {
|
||||
isModule
|
||||
olean := ← compute mod.oleanFile "olean"
|
||||
@@ -829,7 +829,7 @@ def Module.unpackLtar (self : Module) (ltar : FilePath) : JobM Unit := do
|
||||
]
|
||||
proc (quiet := true) {cmd := (← getLeantar).toString, args}
|
||||
|
||||
private def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
|
||||
def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
|
||||
withRegisterJob s!"{self.name}:ltar" <| withCurrPackage self.pkg do
|
||||
(← self.leanArts.fetch).mapM fun arts => do
|
||||
let art ← arts.ltar?.getDM do
|
||||
@@ -846,7 +846,7 @@ private def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
|
||||
public def Module.ltarFacetConfig : ModuleFacetConfig ltarFacet :=
|
||||
mkFacetJobConfig recBuildLtar
|
||||
|
||||
private def Module.buildLean
|
||||
def Module.buildLean
|
||||
(mod : Module) (depTrace : BuildTrace) (srcFile : FilePath) (setup : ModuleSetup)
|
||||
: JobM ModuleOutputArtifacts := buildAction depTrace mod.traceFile do
|
||||
let args := mod.weakLeanArgs ++ mod.leanArgs
|
||||
@@ -861,7 +861,7 @@ private def Module.buildLean
|
||||
mod.clearOutputHashes
|
||||
mod.computeArtifacts setup.isModule
|
||||
|
||||
private def traceOptions (opts : LeanOptions) (caption := "opts") : BuildTrace :=
|
||||
def traceOptions (opts : LeanOptions) (caption := "opts") : BuildTrace :=
|
||||
opts.values.foldl (init := .nil caption) fun t n v =>
|
||||
let opt := s!"-D{n}={v.asCliFlagValue}"
|
||||
t.mix <| .ofHash (pureHash opt) opt
|
||||
@@ -871,7 +871,7 @@ Recursively build a Lean module.
|
||||
Fetch its dependencies and then elaborate the Lean source file, producing
|
||||
all possible artifacts (e.g., `.olean`, `.ilean`, `.c`, `.bc`).
|
||||
-/
|
||||
private def Module.recBuildLean (mod : Module) : FetchM (Job ModuleOutputArtifacts) := do
|
||||
def Module.recBuildLean (mod : Module) : FetchM (Job ModuleOutputArtifacts) := do
|
||||
/-
|
||||
Remark: `withRegisterJob` must register `setupJob` to display module builds
|
||||
in the job monitor. However, it must also include the fetching of both jobs to
|
||||
@@ -997,7 +997,7 @@ where
|
||||
public def Module.leanArtsFacetConfig : ModuleFacetConfig leanArtsFacet :=
|
||||
mkFacetJobConfig recBuildLean
|
||||
|
||||
@[inline] private def Module.fetchOLeanCore
|
||||
@[inline] def Module.fetchOLeanCore
|
||||
(facet : String) (f : ModuleOutputArtifacts → Option Artifact) (errMsg : String) (mod : Module)
|
||||
: FetchM (Job FilePath) := do
|
||||
(← mod.leanArts.fetch).mapM (sync := true) fun arts => do
|
||||
@@ -1080,7 +1080,7 @@ public def Module.bcFacetConfig : ModuleFacetConfig bcFacet :=
|
||||
Recursively build the module's object file from its C file produced by `lean`
|
||||
with `-DLEAN_EXPORTING` set, which exports Lean symbols defined within the C files.
|
||||
-/
|
||||
private def Module.recBuildLeanCToOExport (self : Module) : FetchM (Job FilePath) := do
|
||||
def Module.recBuildLeanCToOExport (self : Module) : FetchM (Job FilePath) := do
|
||||
let suffix := if (← getIsVerbose) then " (with exports)" else ""
|
||||
withRegisterJob s!"{self.name}:c.o{suffix}" <| withCurrPackage self.pkg do
|
||||
-- TODO: add option to pass a target triplet for cross compilation
|
||||
@@ -1095,7 +1095,7 @@ public def Module.coExportFacetConfig : ModuleFacetConfig coExportFacet :=
|
||||
Recursively build the module's object file from its C file produced by `lean`.
|
||||
This version does not export any Lean symbols.
|
||||
-/
|
||||
private def Module.recBuildLeanCToONoExport (self : Module) : FetchM (Job FilePath) := do
|
||||
def Module.recBuildLeanCToONoExport (self : Module) : FetchM (Job FilePath) := do
|
||||
let suffix := if (← getIsVerbose) then " (without exports)" else ""
|
||||
withRegisterJob s!"{self.name}:c.o{suffix}" <| withCurrPackage self.pkg do
|
||||
-- TODO: add option to pass a target triplet for cross compilation
|
||||
@@ -1111,7 +1111,7 @@ public def Module.coFacetConfig : ModuleFacetConfig coFacet :=
|
||||
if Platform.isWindows then mod.coNoExport.fetch else mod.coExport.fetch
|
||||
|
||||
/-- Recursively build the module's object file from its bitcode file produced by `lean`. -/
|
||||
private def Module.recBuildLeanBcToO (self : Module) : FetchM (Job FilePath) := do
|
||||
def Module.recBuildLeanBcToO (self : Module) : FetchM (Job FilePath) := do
|
||||
withRegisterJob s!"{self.name}:bc.o" <| withCurrPackage self.pkg do
|
||||
-- TODO: add option to pass a target triplet for cross compilation
|
||||
buildLeanO self.bcoFile (← self.bc.fetch) self.weakLeancArgs self.leancArgs
|
||||
@@ -1145,7 +1145,7 @@ public def Module.oFacetConfig : ModuleFacetConfig oFacet :=
|
||||
Recursively build the shared library of a module
|
||||
(e.g., for `--load-dynlib` or `--plugin`).
|
||||
-/
|
||||
private def Module.recBuildDynlib (mod : Module) : FetchM (Job Dynlib) :=
|
||||
def Module.recBuildDynlib (mod : Module) : FetchM (Job Dynlib) :=
|
||||
withRegisterJob s!"{mod.name}:dynlib" <| withCurrPackage mod.pkg do
|
||||
/-
|
||||
Fetch the module's object files.
|
||||
@@ -1221,7 +1221,7 @@ building its imports and other dependencies. Used by `lake setup-file`.
|
||||
|
||||
Due to its exclusive use as a top-level build, it does not construct a proper trace state.
|
||||
-/
|
||||
private def setupEditedModule
|
||||
def setupEditedModule
|
||||
(mod : Module) (header : ModuleHeader)
|
||||
: FetchM (Job ModuleSetup) := do
|
||||
let extraDepJob ← mod.lib.extraDep.fetch
|
||||
@@ -1269,7 +1269,7 @@ to build the dependencies of the file and generate the data for `lean --setup`.
|
||||
|
||||
Due to its exclusive use as a top-level build, it does not construct a proper trace state.
|
||||
-/
|
||||
private def setupExternalModule
|
||||
def setupExternalModule
|
||||
(fileName : String) (header : ModuleHeader) (leanOpts : LeanOptions)
|
||||
: FetchM (Job ModuleSetup) := do
|
||||
let root ← getRootPackage
|
||||
|
||||
@@ -25,7 +25,7 @@ namespace Lake
|
||||
open Lean (Name)
|
||||
|
||||
/-- Fetch the package's direct dependencies. -/
|
||||
private def Package.recFetchDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
|
||||
def Package.recFetchDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
|
||||
(pure ·) <$> self.depConfigs.mapM fun cfg => do
|
||||
let some dep ← findPackageByName? cfg.name
|
||||
| error s!"{self.prettyName}: package not found for dependency '{cfg.name}' \
|
||||
@@ -37,7 +37,7 @@ public def Package.depsFacetConfig : PackageFacetConfig depsFacet :=
|
||||
mkFacetJobConfig recFetchDeps (buildable := false)
|
||||
|
||||
/-- Compute a topological ordering of the package's transitive dependencies. -/
|
||||
private def Package.recComputeTransDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
|
||||
def Package.recComputeTransDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
|
||||
(pure ·.toArray) <$> self.depConfigs.foldlM (init := OrdPackageSet.empty) fun deps cfg => do
|
||||
let some dep ← findPackageByName? cfg.name
|
||||
| error s!"{self.prettyName}: package not found for dependency '{cfg.name}' \
|
||||
@@ -53,7 +53,7 @@ public def Package.transDepsFacetConfig : PackageFacetConfig transDepsFacet :=
|
||||
Tries to download and unpack the package's cached build archive
|
||||
(e.g., from Reservoir or GitHub).
|
||||
-/
|
||||
private def Package.fetchOptBuildCacheCore (self : Package) : FetchM (Job Bool) := do
|
||||
def Package.fetchOptBuildCacheCore (self : Package) : FetchM (Job Bool) := do
|
||||
if self.preferReleaseBuild then
|
||||
self.optGitHubRelease.fetch
|
||||
else
|
||||
@@ -64,7 +64,7 @@ public def Package.optBuildCacheFacetConfig : PackageFacetConfig optBuildCacheFa
|
||||
mkFacetJobConfig (·.fetchOptBuildCacheCore)
|
||||
|
||||
/-- Tries to download the package's build cache (if configured). -/
|
||||
private def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) := do
|
||||
def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) := do
|
||||
let shouldFetch :=
|
||||
(← getTryCache) &&
|
||||
!(← self.buildDir.pathExists) && -- do not automatically clobber prebuilt artifacts
|
||||
@@ -77,7 +77,7 @@ private def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) :=
|
||||
return pure true
|
||||
|
||||
@[inline]
|
||||
private def Package.optFacetDetails (self : Package) (facet : Name) : JobM String := do
|
||||
def Package.optFacetDetails (self : Package) (facet : Name) : JobM String := do
|
||||
if (← getIsVerbose) then
|
||||
return s!" (see '{self.baseName}:{Name.eraseHead facet}' for details)"
|
||||
else
|
||||
@@ -87,7 +87,7 @@ private def Package.optFacetDetails (self : Package) (facet : Name) : JobM Strin
|
||||
Tries to download and unpack the package's cached build archive
|
||||
(e.g., from Reservoir or GitHub). Prints a warning on failure.
|
||||
-/
|
||||
private def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
|
||||
def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
|
||||
let job ← self.maybeFetchBuildCache
|
||||
job.mapM fun success => do
|
||||
unless success do
|
||||
@@ -102,7 +102,7 @@ private def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
|
||||
Build the `extraDepTargets` for the package.
|
||||
Also, if the package is a dependency, maybe fetch its build cache.
|
||||
-/
|
||||
private def Package.recBuildExtraDepTargets (self : Package) : FetchM (Job Unit) :=
|
||||
def Package.recBuildExtraDepTargets (self : Package) : FetchM (Job Unit) :=
|
||||
withRegisterJob s!"{self.baseName}:extraDep" do
|
||||
let mut job := Job.nil s!"@{self.baseName}:extraDep"
|
||||
-- Fetch build cache if this package is a dependency
|
||||
@@ -118,7 +118,7 @@ public def Package.extraDepFacetConfig : PackageFacetConfig extraDepFacet :=
|
||||
mkFacetJobConfig Package.recBuildExtraDepTargets
|
||||
|
||||
/-- Compute the package's Reservoir barrel URL. -/
|
||||
private def Package.getBarrelUrl (self : Package) : JobM String := do
|
||||
def Package.getBarrelUrl (self : Package) : JobM String := do
|
||||
if self.scope.isEmpty then
|
||||
error "package has no Reservoir scope"
|
||||
let repo := GitRepo.mk self.dir
|
||||
@@ -132,7 +132,7 @@ private def Package.getBarrelUrl (self : Package) : JobM String := do
|
||||
return url
|
||||
|
||||
/-- Compute the package's GitHub release URL. -/
|
||||
private def Package.getReleaseUrl (self : Package) : JobM String := do
|
||||
def Package.getReleaseUrl (self : Package) : JobM String := do
|
||||
let repo := GitRepo.mk self.dir
|
||||
let repoUrl? := self.releaseRepo? <|> self.remoteUrl?
|
||||
let some repoUrl := repoUrl? <|> (← repo.getFilteredRemoteUrl?)
|
||||
@@ -144,7 +144,7 @@ private def Package.getReleaseUrl (self : Package) : JobM String := do
|
||||
return s!"{repoUrl}/releases/download/{tag}/{self.buildArchive}"
|
||||
|
||||
/-- Tries to download and unpack a build archive for the package from a URL. -/
|
||||
private def Package.fetchBuildArchive
|
||||
def Package.fetchBuildArchive
|
||||
(self : Package) (url : String) (archiveFile : FilePath)
|
||||
(headers : Array String := #[])
|
||||
: JobM PUnit := do
|
||||
@@ -157,7 +157,7 @@ private def Package.fetchBuildArchive
|
||||
untar archiveFile self.buildDir
|
||||
|
||||
@[inline]
|
||||
private def Package.mkOptBuildArchiveFacetConfig
|
||||
def Package.mkOptBuildArchiveFacetConfig
|
||||
{facet : Name} (archiveFile : Package → FilePath)
|
||||
(getUrl : Package → JobM String) (headers : Array String := #[])
|
||||
[FamilyDef FacetOut facet Bool]
|
||||
@@ -172,7 +172,7 @@ private def Package.mkOptBuildArchiveFacetConfig
|
||||
return false
|
||||
|
||||
@[inline]
|
||||
private def Package.mkBuildArchiveFacetConfig
|
||||
def Package.mkBuildArchiveFacetConfig
|
||||
{facet : Name} (optFacet : Name) (what : String)
|
||||
[FamilyDef FacetOut facet Unit]
|
||||
[FamilyDef FacetOut optFacet Bool]
|
||||
|
||||
@@ -34,11 +34,11 @@ public def mkBuildContext (ws : Workspace) (config : BuildConfig) : BaseIO Build
|
||||
}
|
||||
|
||||
/-- Unicode icons that make up the spinner in animation order. -/
|
||||
private def Monitor.spinnerFrames :=
|
||||
def Monitor.spinnerFrames :=
|
||||
#['⣾','⣷','⣯','⣟','⡿','⢿','⣻','⣽']
|
||||
|
||||
/-- Context of the Lake build monitor. -/
|
||||
private structure MonitorContext where
|
||||
structure MonitorContext where
|
||||
jobs : JobQueue
|
||||
out : IO.FS.Stream
|
||||
outLv : LogLevel
|
||||
@@ -55,7 +55,7 @@ private structure MonitorContext where
|
||||
.stream ctx.out ctx.outLv ctx.useAnsi
|
||||
|
||||
/-- State of the Lake build monitor. -/
|
||||
private structure MonitorState where
|
||||
structure MonitorState where
|
||||
jobNo : Nat := 0
|
||||
totalJobs : Nat := 0
|
||||
wantsRebuild : Bool := false
|
||||
@@ -65,9 +65,9 @@ private structure MonitorState where
|
||||
spinnerIdx : Fin Monitor.spinnerFrames.size := ⟨0, by decide⟩
|
||||
|
||||
/-- Monad of the Lake build monitor. -/
|
||||
private abbrev MonitorM := ReaderT MonitorContext <| StateT MonitorState BaseIO
|
||||
abbrev MonitorM := ReaderT MonitorContext <| StateT MonitorState BaseIO
|
||||
|
||||
@[inline] private def MonitorM.run
|
||||
@[inline] def MonitorM.run
|
||||
(ctx : MonitorContext) (s : MonitorState) (self : MonitorM α)
|
||||
: BaseIO (α × MonitorState) :=
|
||||
StateT.run (ReaderT.run self ctx) s
|
||||
@@ -80,23 +80,23 @@ def Ansi.resetLine : String :=
|
||||
"\x1B[2K\r"
|
||||
|
||||
/-- Like `IO.FS.Stream.flush`, but ignores errors. -/
|
||||
@[inline] private def flush (out : IO.FS.Stream) : BaseIO PUnit :=
|
||||
@[inline] def flush (out : IO.FS.Stream) : BaseIO PUnit :=
|
||||
out.flush |>.catchExceptions fun _ => pure ()
|
||||
|
||||
/-- Like `IO.FS.Stream.putStr`, but panics on errors. -/
|
||||
@[inline] private def print! (out : IO.FS.Stream) (s : String) : BaseIO PUnit :=
|
||||
@[inline] def print! (out : IO.FS.Stream) (s : String) : BaseIO PUnit :=
|
||||
out.putStr s |>.catchExceptions fun e =>
|
||||
panic! s!"[{decl_name%} failed: {e}] {repr s}"
|
||||
|
||||
namespace Monitor
|
||||
|
||||
@[inline] private def print (s : String) : MonitorM PUnit := do
|
||||
@[inline] def print (s : String) : MonitorM PUnit := do
|
||||
print! (← read).out s
|
||||
|
||||
@[inline] private nonrec def flush : MonitorM PUnit := do
|
||||
@[inline] nonrec def flush : MonitorM PUnit := do
|
||||
flush (← read).out
|
||||
|
||||
private def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfinished.size) : MonitorM PUnit := do
|
||||
def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfinished.size) : MonitorM PUnit := do
|
||||
let {jobNo, totalJobs, ..} ← get
|
||||
let {useAnsi, showProgress, ..} ← read
|
||||
if showProgress ∧ useAnsi then
|
||||
@@ -114,7 +114,7 @@ private def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfin
|
||||
print s!"{resetCtrl}{spinnerIcon} [{jobNo}/{totalJobs}] {caption}"
|
||||
flush
|
||||
|
||||
private def reportJob (job : OpaqueJob) : MonitorM PUnit := do
|
||||
def reportJob (job : OpaqueJob) : MonitorM PUnit := do
|
||||
let {jobNo, totalJobs, ..} ← get
|
||||
let {failLv, outLv, showOptional, out, useAnsi, showProgress, minAction, showTime, ..} ← read
|
||||
let {task, caption, optional, ..} := job
|
||||
@@ -153,7 +153,7 @@ where
|
||||
else if ms > 1000 then s!"{(ms) / 1000}.{(ms+50) / 100 % 10}s"
|
||||
else s!"{ms}ms"
|
||||
|
||||
private def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × Array OpaqueJob) := do
|
||||
def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × Array OpaqueJob) := do
|
||||
let newJobs ← (← read).jobs.modifyGet ((·, #[]))
|
||||
modify fun s => {s with totalJobs := s.totalJobs + newJobs.size}
|
||||
let pollJobs := fun (running, unfinished) job => do
|
||||
@@ -169,7 +169,7 @@ private def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × A
|
||||
let r ← unfinished.foldlM pollJobs (#[], #[])
|
||||
newJobs.foldlM pollJobs r
|
||||
|
||||
private def sleep : MonitorM PUnit := do
|
||||
def sleep : MonitorM PUnit := do
|
||||
let now ← IO.monoMsNow
|
||||
let lastUpdate := (← get).lastUpdate
|
||||
let sleepTime : Nat := (← read).updateFrequency - (now - lastUpdate)
|
||||
@@ -178,14 +178,14 @@ private def sleep : MonitorM PUnit := do
|
||||
let now ← IO.monoMsNow
|
||||
modify fun s => {s with lastUpdate := now}
|
||||
|
||||
private partial def loop (unfinished : Array OpaqueJob) : MonitorM PUnit := do
|
||||
partial def loop (unfinished : Array OpaqueJob) : MonitorM PUnit := do
|
||||
let (running, unfinished) ← poll unfinished
|
||||
if h : 0 < unfinished.size then
|
||||
renderProgress running unfinished h
|
||||
sleep
|
||||
loop unfinished
|
||||
|
||||
private def main (init : Array OpaqueJob) : MonitorM PUnit := do
|
||||
def main (init : Array OpaqueJob) : MonitorM PUnit := do
|
||||
loop init
|
||||
let resetCtrl ← modifyGet fun s => (s.resetCtrl, {s with resetCtrl := ""})
|
||||
unless resetCtrl.isEmpty do
|
||||
|
||||
@@ -32,7 +32,7 @@ public abbrev BuildStore :=
|
||||
namespace BuildStore
|
||||
|
||||
set_option linter.deprecated false in
|
||||
private def getModuleFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
def getModuleFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
(k : BuildKey) (v : Job (BuildData k)) : Option (Name × Job α) :=
|
||||
match k with
|
||||
| .moduleFacet m f
|
||||
@@ -67,7 +67,7 @@ public def collectModuleFacetMap
|
||||
res := res.insert m job
|
||||
return res
|
||||
|
||||
private def getPackageFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
def getPackageFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
(k : BuildKey) (v : Job (BuildData k)) : Option (Job α) :=
|
||||
match k with
|
||||
| .packageFacet p f =>
|
||||
@@ -88,7 +88,7 @@ public def collectPackageFacetArray
|
||||
res := res.push job
|
||||
return res
|
||||
|
||||
private def getTargetFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
def getTargetFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
|
||||
(k : BuildKey) (v : Job (BuildData k)) : Option (Job α) :=
|
||||
match k with
|
||||
| .targetFacet _ _ f =>
|
||||
|
||||
@@ -16,7 +16,7 @@ open Lean
|
||||
namespace Lake
|
||||
|
||||
variable (defaultPkg : Package) (root : PartialBuildKey) in
|
||||
private def PartialBuildKey.fetchInCoreAux
|
||||
def PartialBuildKey.fetchInCoreAux
|
||||
(self : PartialBuildKey) (facetless : Bool := false)
|
||||
: FetchM ((key : BuildKey) × Job (BuildData key)) := do
|
||||
match self with
|
||||
@@ -98,7 +98,7 @@ Fetches the target specified by this key, resolving gaps as needed.
|
||||
(·.2.toOpaque) <$> fetchInCore defaultPkg self
|
||||
|
||||
variable (root : BuildKey) in
|
||||
private def BuildKey.fetchCore
|
||||
def BuildKey.fetchCore
|
||||
(self : BuildKey)
|
||||
: FetchM (Job (BuildData self)) :=
|
||||
match self with
|
||||
|
||||
@@ -140,7 +140,7 @@ def resolveTargetInWorkspace
|
||||
else
|
||||
throw <| CliError.unknownTarget target
|
||||
|
||||
private def resolveTargetLikeSpec
|
||||
def resolveTargetLikeSpec
|
||||
(ws : Workspace) (spec : String) (facet : Name) (isMaybePath explicit := false)
|
||||
: Except CliError (Array BuildSpec) := do
|
||||
match spec.split '/' |>.toList with
|
||||
@@ -169,7 +169,7 @@ private def resolveTargetLikeSpec
|
||||
else
|
||||
throw <| CliError.invalidTargetSpec spec '/'
|
||||
|
||||
private def resolveTargetBaseSpec
|
||||
def resolveTargetBaseSpec
|
||||
(ws : Workspace) (spec : String) (facet : Name)
|
||||
: EIO CliError (Array BuildSpec) := do
|
||||
if spec.startsWith "@" then
|
||||
|
||||
@@ -391,14 +391,14 @@ def serviceNotFound (service : String) (configuredServices : Array CacheServiceC
|
||||
let msg := s!"{msg}; configured services:\n"
|
||||
configuredServices.foldl (· ++ s!" {·.name}") msg
|
||||
|
||||
@[inline] private def cacheToolchain (pkg : Package) (toolchain : CacheToolchain) : CacheToolchain :=
|
||||
@[inline] def cacheToolchain (pkg : Package) (toolchain : CacheToolchain) : CacheToolchain :=
|
||||
if pkg.fixedToolchain || pkg.bootstrap then .none else toolchain
|
||||
|
||||
@[inline] private def cachePlatform (pkg : Package) (platform : CachePlatform) : CachePlatform :=
|
||||
@[inline] def cachePlatform (pkg : Package) (platform : CachePlatform) : CachePlatform :=
|
||||
if pkg.isPlatformIndependent then .none else platform
|
||||
|
||||
-- since 2026-02-19
|
||||
private def endpointDeprecation : String :=
|
||||
def endpointDeprecation : String :=
|
||||
"configuring the cache service via environment variables is deprecated; use --service instead"
|
||||
|
||||
protected def get : CliM PUnit := do
|
||||
|
||||
@@ -16,13 +16,13 @@ import Lake.Load.Lean.Elab
|
||||
namespace Lake
|
||||
open Toml Lean System PrettyPrinter
|
||||
|
||||
private partial def descopeSyntax : Syntax → Syntax
|
||||
partial def descopeSyntax : Syntax → Syntax
|
||||
| .ident info rawVal val preresolved =>
|
||||
.ident info rawVal val.eraseMacroScopes preresolved
|
||||
| .node info k args => .node info k <| args.map descopeSyntax
|
||||
| stx => stx
|
||||
|
||||
private def descopeTSyntax (stx : TSyntax k) : TSyntax k :=
|
||||
def descopeTSyntax (stx : TSyntax k) : TSyntax k :=
|
||||
⟨descopeSyntax stx.raw⟩
|
||||
|
||||
public def Package.mkConfigString (pkg : Package) (lang : ConfigLang) : LogIO String := do
|
||||
|
||||
@@ -23,7 +23,7 @@ open Lean System Toml
|
||||
|
||||
/-! ## General Helpers -/
|
||||
|
||||
private local instance : BEq FilePath where
|
||||
local instance : BEq FilePath where
|
||||
beq a b := a.normalize == b.normalize
|
||||
|
||||
class EncodeField (σ : Type u) (name : Name) (α : Type u) where
|
||||
@@ -69,7 +69,7 @@ public def Toml.encodeLeanOptions (opts : Array LeanOption) : Table :=
|
||||
public instance : ToToml (Array LeanOption) where
|
||||
toToml opts := .table .missing <| encodeLeanOptions opts
|
||||
|
||||
@[inline] private def encodeSingleton? [ToToml? α] (name : Name) (a : α) : Option Value :=
|
||||
@[inline] def encodeSingleton? [ToToml? α] (name : Name) (a : α) : Option Value :=
|
||||
toToml? a |>.map fun v => toToml <| Table.empty.insert name v
|
||||
|
||||
mutual
|
||||
@@ -149,7 +149,7 @@ public instance : ToToml Dependency := ⟨(toToml ·.toToml)⟩
|
||||
|
||||
/-! ## Package & Target Configuration Encoders -/
|
||||
|
||||
private meta def genToToml
|
||||
meta def genToToml
|
||||
(cmds : Array Command)
|
||||
(tyName : Name) [info : ConfigInfo tyName]
|
||||
(exclude : Array Name := #[])
|
||||
|
||||
@@ -97,7 +97,7 @@ public partial def parse
|
||||
else
|
||||
loop 2 {} (lfPos.next h)
|
||||
|
||||
@[inline] private partial def loadCore
|
||||
@[inline] partial def loadCore
|
||||
(h : IO.FS.Handle) (fileName : String) (platformIndependent : Bool)
|
||||
: LogIO CacheMap := do
|
||||
let rec loop (i : Nat) (cache : CacheMap) := do
|
||||
@@ -575,7 +575,7 @@ def uploadS3
|
||||
| .error e =>
|
||||
error s!"curl produced invalid JSON output: {e}; received:\n{out.stderr}"
|
||||
|
||||
private structure CacheServiceImpl where
|
||||
structure CacheServiceImpl where
|
||||
mk ::
|
||||
name? : Option CacheServiceName := none
|
||||
/- S3 Bucket -/
|
||||
@@ -641,11 +641,11 @@ namespace CacheService
|
||||
/-- The MIME type of a Lake/Reservoir artifact. -/
|
||||
public def artifactContentType : String := "application/vnd.reservoir.artifact"
|
||||
|
||||
private def appendScope (endpoint : String) (scope : String) : String :=
|
||||
def appendScope (endpoint : String) (scope : String) : String :=
|
||||
scope.split '/' |>.fold (init := endpoint) fun s component =>
|
||||
uriEncode component.copy (s.push '/')
|
||||
|
||||
private def s3ArtifactUrl (contentHash : Hash) (service : CacheService) (scope : CacheServiceScope) : String :=
|
||||
def s3ArtifactUrl (contentHash : Hash) (service : CacheService) (scope : CacheServiceScope) : String :=
|
||||
let endpoint :=
|
||||
match scope.impl with
|
||||
| .repo scope => appendScope service.impl.artifactEndpoint scope
|
||||
@@ -687,27 +687,27 @@ public def uploadArtifact
|
||||
|
||||
/-! ## Multi-Artifact Transfer -/
|
||||
|
||||
private inductive TransferKind
|
||||
inductive TransferKind
|
||||
| get
|
||||
| put
|
||||
deriving DecidableEq
|
||||
|
||||
private structure TransferInfo where
|
||||
structure TransferInfo where
|
||||
url : String
|
||||
path : FilePath
|
||||
descr : ArtifactDescr
|
||||
|
||||
private structure TransferConfig where
|
||||
structure TransferConfig where
|
||||
kind : TransferKind
|
||||
scope : CacheServiceScope
|
||||
infos : Array TransferInfo
|
||||
key : String := ""
|
||||
|
||||
private structure TransferState where
|
||||
structure TransferState where
|
||||
didError : Bool := false
|
||||
numSuccesses : Nat := 0
|
||||
|
||||
private partial def monitorTransfer
|
||||
partial def monitorTransfer
|
||||
(cfg : TransferConfig) (h hOut : IO.FS.Handle) (s : TransferState)
|
||||
: LoggerIO TransferState := do
|
||||
let line ← h.getLine
|
||||
@@ -781,7 +781,7 @@ where
|
||||
logError msg
|
||||
logVerbose s!"curl JSON: {line.trimAsciiEnd}"
|
||||
|
||||
private def transferArtifacts
|
||||
def transferArtifacts
|
||||
(cfg : TransferConfig)
|
||||
: LoggerIO Unit := IO.FS.withTempFile fun h path => do
|
||||
let args ← id do
|
||||
@@ -941,7 +941,7 @@ public def uploadArtifacts
|
||||
/-- The MIME type of a Lake/Reservoir input-to-output mappings for a Git revision. -/
|
||||
public def mapContentType : String := "application/vnd.reservoir.outputs+json-lines"
|
||||
|
||||
private def s3RevisionUrl
|
||||
def s3RevisionUrl
|
||||
(rev : String) (service : CacheService) (scope : CacheServiceScope)
|
||||
(platform := CachePlatform.none) (toolchain := CacheToolchain.none)
|
||||
: String :=
|
||||
|
||||
@@ -78,7 +78,7 @@ as the facet kind for modules.
|
||||
Returns the facet kind for an Lake target namespace.
|
||||
Used by the `builtin_facet` macro.
|
||||
-/
|
||||
private def facetKindForNamespace (ns : Name) : Name :=
|
||||
def facetKindForNamespace (ns : Name) : Name :=
|
||||
match ns with
|
||||
| `Lake.Package => Package.facetKind
|
||||
| `Lake.Module => Module.facetKind
|
||||
|
||||
@@ -38,7 +38,7 @@ scoped syntax (name := configDecl)
|
||||
instance : Coe Ident (TSyntax ``Term.structInstLVal) where
|
||||
coe stx := Unhygienic.run `(Term.structInstLVal| $stx:ident)
|
||||
|
||||
private structure FieldView where
|
||||
structure FieldView where
|
||||
ref : Syntax
|
||||
mods : TSyntax ``Command.declModifiers := Unhygienic.run `(declModifiers|)
|
||||
id : Ident
|
||||
@@ -48,7 +48,7 @@ private structure FieldView where
|
||||
decl? : Option (TSyntax ``structSimpleBinder) := none
|
||||
parent : Bool := false
|
||||
|
||||
private structure FieldMetadata where
|
||||
structure FieldMetadata where
|
||||
cmds : Array Command := #[]
|
||||
fields : Term := Unhygienic.run `(Array.empty)
|
||||
|
||||
@@ -56,7 +56,7 @@ private structure FieldMetadata where
|
||||
-- quotations and are called only by `macro`s, so we disable the option for them manually.
|
||||
set_option internal.parseQuotWithCurrentStage false
|
||||
|
||||
private meta def mkConfigAuxDecls
|
||||
meta def mkConfigAuxDecls
|
||||
(vis? : Option (TSyntax ``visibility))
|
||||
(structId : Ident) (structArity : Nat) (structTy : Term) (views : Array FieldView)
|
||||
: MacroM (Array Command) := do
|
||||
@@ -123,7 +123,7 @@ private meta def mkConfigAuxDecls
|
||||
let emptyInst ← `( $[$vis?:visibility]? instance $instId:ident : EmptyCollection $structTy := ⟨{}⟩)
|
||||
return data.cmds.push fieldsDef |>.push fieldsInst |>.push infoInst |>.push emptyInst
|
||||
|
||||
private meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView := withRef stx do
|
||||
meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView := withRef stx do
|
||||
let `(configField|$mods:declModifiers $[$id? @]? $ids,* $bs* : $rty $[:= $val?]?) := stx
|
||||
| Macro.throwError "ill-formed configuration field declaration"
|
||||
let bvs ← expandBinders bs
|
||||
@@ -137,7 +137,7 @@ private meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView :=
|
||||
let decl ← `(structSimpleBinder|$mods:declModifiers $id : $type := $defVal)
|
||||
return {ref := stx, mods, id, ids, type, defVal, decl? := decl}
|
||||
|
||||
private meta def mkParentFieldView (stx : TSyntax ``structParent) : MacroM FieldView := withRef stx do
|
||||
meta def mkParentFieldView (stx : TSyntax ``structParent) : MacroM FieldView := withRef stx do
|
||||
let `(structParent|$[$id? :]? $type) := stx
|
||||
| Macro.throwError "ill-formed parent"
|
||||
let id ← do
|
||||
|
||||
@@ -36,7 +36,7 @@ mutual
|
||||
A pattern. Matches some subset of the values of a type.
|
||||
May also include a declarative description.
|
||||
-/
|
||||
public structure Pattern (α : Type u) (β : Type v) where
|
||||
structure Pattern (α : Type u) (β : Type v) where
|
||||
/-- Returns whether the value matches the pattern. -/
|
||||
filter : α → Bool
|
||||
/-- An optional name for the filter. -/
|
||||
@@ -49,7 +49,7 @@ public structure Pattern (α : Type u) (β : Type v) where
|
||||
An abstract declarative pattern.
|
||||
Augments another pattern description `β` with logical connectives.
|
||||
-/
|
||||
public inductive PatternDescr (α : Type u) (β : Type v)
|
||||
inductive PatternDescr (α : Type u) (β : Type v)
|
||||
/-- Matches a value that does not satisfy the pattern. -/
|
||||
| not (p : Pattern α β)
|
||||
/-- Matches a value that satisfies every pattern. Short-circuits. -/
|
||||
|
||||
@@ -22,7 +22,7 @@ open System Lean
|
||||
namespace Lake
|
||||
|
||||
/-- Unsafe implementation of `evalConstCheck`. -/
|
||||
private unsafe def unsafeEvalConstCheck
|
||||
unsafe def unsafeEvalConstCheck
|
||||
(env : Environment) (opts : Options) (α) [TypeName α] (const : Name)
|
||||
: Except String α :=
|
||||
match env.find? const with
|
||||
@@ -44,12 +44,12 @@ Like `Lean.Environment.evalConstCheck`,
|
||||
but with plain universe-polymorphic `Except`.
|
||||
-/
|
||||
@[implemented_by unsafeEvalConstCheck]
|
||||
private opaque evalConstCheck
|
||||
opaque evalConstCheck
|
||||
(env : Environment) (opts : Options) (α : Type) [TypeName α] (const : Name)
|
||||
: Except String α
|
||||
|
||||
/-- Construct a `DNameMap` from the declarations tagged with `attr`. -/
|
||||
private def mkDTagMap
|
||||
def mkDTagMap
|
||||
(env : Environment) (attr : OrderedTagAttribute)
|
||||
[Monad m] (f : (n : Name) → m (β n))
|
||||
: m (DNameMap β) :=
|
||||
@@ -58,7 +58,7 @@ private def mkDTagMap
|
||||
return map.insert declName <| ← f declName
|
||||
|
||||
/-- Construct a `NameMap` from the declarations tagged with `attr`. -/
|
||||
private def mkTagMap
|
||||
def mkTagMap
|
||||
(env : Environment) (attr : OrderedTagAttribute)
|
||||
[Monad m] (f : (n : Name) → m β)
|
||||
: m (NameMap β) :=
|
||||
@@ -67,7 +67,7 @@ private def mkTagMap
|
||||
return map.insert declName <| ← f declName
|
||||
|
||||
/-- Construct a `OrdNameMap` from the declarations tagged with `attr`. -/
|
||||
private def mkOrdTagMap
|
||||
def mkOrdTagMap
|
||||
(env : Environment) (attr : OrderedTagAttribute)
|
||||
[Monad m] (f : (n : Name) → m β)
|
||||
: m (OrdNameMap β) :=
|
||||
|
||||
@@ -55,7 +55,7 @@ That is, Lake ignores the `-` suffix.
|
||||
@[inline] public def Manifest.version : StdVer := {major := 1, minor := 2}
|
||||
|
||||
/-- Manifest version `0.6.0` package entry. For backwards compatibility. -/
|
||||
private inductive PackageEntryV6
|
||||
inductive PackageEntryV6
|
||||
| path (name : Name) (opts : NameMap String) (inherited : Bool) (dir : FilePath)
|
||||
| git (name : Name) (opts : NameMap String) (inherited : Bool) (url : String) (rev : String)
|
||||
(inputRev? : Option String) (subDir? : Option FilePath)
|
||||
@@ -166,7 +166,7 @@ public instance : FromJson PackageEntry := ⟨PackageEntry.fromJson?⟩
|
||||
@[inline] public def inDirectory (pkgDir : FilePath) (entry : PackageEntry) : PackageEntry :=
|
||||
{entry with src := match entry.src with | .path dir => .path (pkgDir / dir) | s => s}
|
||||
|
||||
private def ofV6 : PackageEntryV6 → PackageEntry
|
||||
def ofV6 : PackageEntryV6 → PackageEntry
|
||||
| .path name _opts inherited dir =>
|
||||
{name, inherited, src := .path dir}
|
||||
| .git name _opts inherited url rev inputRev? subDir? =>
|
||||
@@ -200,7 +200,7 @@ public protected def toJson (self : Manifest) : Json :=
|
||||
|
||||
public instance : ToJson Manifest := ⟨Manifest.toJson⟩
|
||||
|
||||
private def getVersion (obj : JsonObject) : Except String SemVerCore := do
|
||||
def getVersion (obj : JsonObject) : Except String SemVerCore := do
|
||||
let ver : Json ← obj.get "version" <|> obj.get "schemaVersion"
|
||||
let ver : SemVerCore ←
|
||||
match ver with
|
||||
@@ -216,7 +216,7 @@ private def getVersion (obj : JsonObject) : Except String SemVerCore := do
|
||||
else
|
||||
return ver
|
||||
|
||||
private def getPackages (ver : StdVer) (obj : JsonObject) : Except String (Array PackageEntry) := do
|
||||
def getPackages (ver : StdVer) (obj : JsonObject) : Except String (Array PackageEntry) := do
|
||||
if ver < {minor := 7} then
|
||||
(·.map PackageEntry.ofV6) <$> obj.getD "packages" #[]
|
||||
else
|
||||
|
||||
@@ -94,7 +94,7 @@ abbrev ResolveT m := DepStackT <| StateT Workspace m
|
||||
: m (α × Workspace) := x.run stack |>.run ws
|
||||
|
||||
/-- Recursively run a `ResolveT` monad starting from the workspace's root. -/
|
||||
@[specialize] private def Workspace.runResolveT
|
||||
@[specialize] def Workspace.runResolveT
|
||||
[Monad m] [MonadError m] (ws : Workspace)
|
||||
(go : RecFetchFn Package PUnit (ResolveT m))
|
||||
(root := ws.root) (stack : DepStack := {})
|
||||
@@ -113,7 +113,7 @@ resolved in reverse order before recursing to the dependencies' dependencies.
|
||||
|
||||
See `Workspace.updateAndMaterializeCore` for more details.
|
||||
-/
|
||||
@[inline] private def Workspace.resolveDepsCore
|
||||
@[inline] def Workspace.resolveDepsCore
|
||||
[Monad m] [MonadError m] [MonadLiftT LogIO m] (ws : Workspace)
|
||||
(resolve : Package → Dependency → Workspace → m MaterializedDep)
|
||||
(root : Package := ws.root) (stack : DepStack := {})
|
||||
@@ -148,7 +148,7 @@ abbrev UpdateT := StateT (NameMap PackageEntry)
|
||||
Reuse manifest versions of root packages that should not be updated.
|
||||
Also, move the packages directory if its location has changed.
|
||||
-/
|
||||
private def reuseManifest
|
||||
def reuseManifest
|
||||
(ws : Workspace) (toUpdate : NameSet)
|
||||
: UpdateT LoggerIO PUnit := do
|
||||
let rootName := ws.root.prettyName
|
||||
@@ -178,7 +178,7 @@ private def reuseManifest
|
||||
logWarning s!"{rootName}: ignoring previous manifest because it failed to load: {e}"
|
||||
|
||||
/-- Add a package dependency's manifest entries to the update state. -/
|
||||
private def addDependencyEntries (dep : MaterializedDep) : UpdateT LoggerIO PUnit := do
|
||||
def addDependencyEntries (dep : MaterializedDep) : UpdateT LoggerIO PUnit := do
|
||||
match (← Manifest.load dep.manifestFile |>.toBaseIO) with
|
||||
| .ok manifest =>
|
||||
manifest.packages.forM fun entry => do
|
||||
@@ -191,7 +191,7 @@ private def addDependencyEntries (dep : MaterializedDep) : UpdateT LoggerIO PUni
|
||||
logWarning s!"{dep.prettyName}: ignoring manifest because it failed to load: {e}"
|
||||
|
||||
/-- Materialize a single dependency, updating it if desired. -/
|
||||
private def updateAndMaterializeDep
|
||||
def updateAndMaterializeDep
|
||||
(ws : Workspace) (pkg : Package) (dep : Dependency)
|
||||
: UpdateT LoggerIO MaterializedDep := do
|
||||
if let some entry ← fetch? dep.name then
|
||||
@@ -218,7 +218,7 @@ Used, for instance, if the toolchain is updated and no Elan is detected.
|
||||
def restartCode : ExitCode := 4
|
||||
|
||||
/-- The toolchain information of a package. -/
|
||||
private structure ToolchainCandidate where
|
||||
structure ToolchainCandidate where
|
||||
/-- The name of the package which provided the toolchain candidate. -/
|
||||
src : Name
|
||||
/-- The version of the toolchain candidate. -/
|
||||
@@ -227,7 +227,7 @@ private structure ToolchainCandidate where
|
||||
fixed : Bool := false
|
||||
|
||||
private structure ToolchainState where
|
||||
/-- The name of dependency which provided the current candidate toolchain. -/
|
||||
/-- The name of depedency which provided the current candidate toolchain. -/
|
||||
src : Name
|
||||
/-- The current candidate toolchain version (if any). -/
|
||||
tc? : Option ToolchainVer
|
||||
|
||||
@@ -342,20 +342,20 @@ public instance : DecodeToml CacheServiceKind := ⟨CacheServiceKind.decodeToml
|
||||
public structure TomlFieldInfo (σ : Type) where
|
||||
decodeAndSet : Table → Value → σ → DecodeM σ
|
||||
|
||||
private abbrev TomlFieldInfos (σ : Type) :=
|
||||
abbrev TomlFieldInfos (σ : Type) :=
|
||||
NameMap (TomlFieldInfo σ)
|
||||
|
||||
private def TomlFieldInfos.empty : TomlFieldInfos σ := {}
|
||||
def TomlFieldInfos.empty : TomlFieldInfos σ := {}
|
||||
|
||||
@[inline] private def TomlFieldInfos.insert
|
||||
@[inline] def TomlFieldInfos.insert
|
||||
(name : Name) [DecodeField σ name] (infos : TomlFieldInfos σ)
|
||||
: TomlFieldInfos σ :=
|
||||
NameMap.insert infos name ⟨decodeField name⟩
|
||||
|
||||
private class ConfigTomlInfo (α : Type) where
|
||||
class ConfigTomlInfo (α : Type) where
|
||||
fieldInfos : TomlFieldInfos α
|
||||
|
||||
private def decodeTomlConfig
|
||||
def decodeTomlConfig
|
||||
[EmptyCollection α] [ConfigTomlInfo α] (t : Table)
|
||||
: Toml.DecodeM α :=
|
||||
t.foldM (init := ∅) fun cfg key val => do
|
||||
@@ -373,7 +373,7 @@ section
|
||||
-- we can't use `in` as it is parsed as a single command and so the option would not influence the
|
||||
-- parser.
|
||||
set_option internal.parseQuotWithCurrentStage false
|
||||
private meta def genDecodeToml
|
||||
meta def genDecodeToml
|
||||
(cmds : Array Command)
|
||||
(tyName : Name) [info : ConfigInfo tyName]
|
||||
(exclude : Array Name := {})
|
||||
@@ -417,12 +417,12 @@ local macro "gen_toml_decoders%" : command => do
|
||||
|
||||
gen_toml_decoders%
|
||||
|
||||
private structure DecodeTargetState (pkg : Name) where
|
||||
structure DecodeTargetState (pkg : Name) where
|
||||
decls : Array (PConfigDecl pkg) := #[]
|
||||
map : DNameMap (NConfigDecl pkg) := {}
|
||||
exeRoots : Lean.NameMap Name := {}
|
||||
|
||||
private def decodeTargetDecls
|
||||
def decodeTargetDecls
|
||||
(pkg : Name) (prettyName : String) (t : Table)
|
||||
: DecodeM (Array (PConfigDecl pkg) × DNameMap (NConfigDecl pkg)) := do
|
||||
let r : DecodeTargetState pkg := {}
|
||||
@@ -505,7 +505,7 @@ public def loadTomlConfig (cfg : LoadConfig) : LogIO LakefileConfig := do
|
||||
/-! ## System Configuration Loader -/
|
||||
|
||||
/-- Load the system Lake configuration from a TOML file. -/
|
||||
private def loadLakeConfigCore (path : FilePath) (lakeEnv : Lake.Env) : LogIO LoadedLakeConfig := do
|
||||
def loadLakeConfigCore (path : FilePath) (lakeEnv : Lake.Env) : LogIO LoadedLakeConfig := do
|
||||
let input ← IO.FS.readFile path
|
||||
let ictx := mkInputContext input path.toString
|
||||
match (← loadToml ictx |>.toBaseIO) with
|
||||
@@ -564,7 +564,7 @@ private def loadLakeConfigCore (path : FilePath) (lakeEnv : Lake.Env) : LogIO Lo
|
||||
| .error log =>
|
||||
errorWithLog <| log.forM fun msg => do logError (← msg.toString)
|
||||
|
||||
private def LoadedLakeConfig.mkDefault (lakeEnv : Lake.Env) : LoadedLakeConfig :=
|
||||
def LoadedLakeConfig.mkDefault (lakeEnv : Lake.Env) : LoadedLakeConfig :=
|
||||
let defaultService := .reservoirService lakeEnv.reservoirApiUrl
|
||||
let defaultServiceConfig := {name := "reservoir", kind := .reservoir, apiEndpoint := lakeEnv.reservoirApiUrl}
|
||||
{
|
||||
|
||||
@@ -9,7 +9,7 @@ prelude
|
||||
public import Init.Prelude
|
||||
import Init.Tactics
|
||||
|
||||
private opaque POpaque.nonemptyType.{u} : NonemptyType.{u}
|
||||
opaque POpaque.nonemptyType.{u} : NonemptyType.{u}
|
||||
|
||||
/-- An value of unknown type in a specific universe. -/
|
||||
public def POpaque : Type u := POpaque.nonemptyType.type
|
||||
|
||||
@@ -53,7 +53,7 @@ where
|
||||
termination_by p
|
||||
|
||||
/-- Returns whether a version component is a wildcard. -/
|
||||
private def isWildVer (s : String.Slice) : Bool :=
|
||||
def isWildVer (s : String.Slice) : Bool :=
|
||||
let p := s.startPos
|
||||
if h : p ≠ s.endPos then
|
||||
if p.next h = s.endPos then
|
||||
@@ -62,7 +62,7 @@ private def isWildVer (s : String.Slice) : Bool :=
|
||||
else false
|
||||
else false
|
||||
|
||||
@[inline] private def parseVerNat (what : String) (s : String.Slice) : EStateM String σ Nat := do
|
||||
@[inline] def parseVerNat (what : String) (s : String.Slice) : EStateM String σ Nat := do
|
||||
let some v := s.toNat?
|
||||
| throw s!"invalid {what} version: expected numeral, got '{s.copy}'"
|
||||
return v
|
||||
@@ -70,7 +70,7 @@ private def isWildVer (s : String.Slice) : Bool :=
|
||||
inductive VerComponent
|
||||
| none | wild | nat (n : Nat)
|
||||
|
||||
private def parseVerComponent {σ} (what : String) (s? : Option String.Slice) : EStateM String σ VerComponent := do
|
||||
def parseVerComponent {σ} (what : String) (s? : Option String.Slice) : EStateM String σ VerComponent := do
|
||||
let some s := s?
|
||||
| return .none
|
||||
if isWildVer s then
|
||||
@@ -103,14 +103,14 @@ where
|
||||
nextUntilWhitespace (p.next h)
|
||||
termination_by p
|
||||
|
||||
private def parseSpecialDescr (s : String) : EStateM String s.Pos String := do
|
||||
def parseSpecialDescr (s : String) : EStateM String s.Pos String := do
|
||||
let some specialDescr ← parseSpecialDescr? s
|
||||
| return ""
|
||||
if specialDescr.isEmpty then
|
||||
throw "invalid version: '-' suffix cannot be empty"
|
||||
return specialDescr
|
||||
|
||||
private def runVerParse
|
||||
def runVerParse
|
||||
(s : String) (x : (s : String) → EStateM String s.Pos α)
|
||||
(startPos := s.startPos) (endPos := s.endPos)
|
||||
: Except String α :=
|
||||
|
||||
@@ -143,7 +143,7 @@ object * object_compactor::copy_object(object * o) {
|
||||
void object_compactor::insert_sarray(object * o) {
|
||||
size_t sz = lean_sarray_size(o);
|
||||
unsigned elem_sz = lean_sarray_elem_size(o);
|
||||
size_t obj_sz = sizeof(lean_sarray_object) + elem_sz*sz;
|
||||
size_t obj_sz = lean_usize_add_checked(sizeof(lean_sarray_object), lean_usize_mul_checked(elem_sz, sz));
|
||||
lean_sarray_object * new_o = (lean_sarray_object*)alloc(obj_sz);
|
||||
lean_set_non_heap_header_for_big((lean_object*)new_o, LeanScalarArray, elem_sz);
|
||||
new_o->m_size = sz;
|
||||
@@ -155,7 +155,7 @@ void object_compactor::insert_sarray(object * o) {
|
||||
void object_compactor::insert_string(object * o) {
|
||||
size_t sz = lean_string_size(o);
|
||||
size_t len = lean_string_len(o);
|
||||
size_t obj_sz = sizeof(lean_string_object) + sz;
|
||||
size_t obj_sz = lean_usize_add_checked(sizeof(lean_string_object), sz);
|
||||
lean_string_object * new_o = (lean_string_object*)alloc(obj_sz);
|
||||
lean_set_non_heap_header_for_big((lean_object*)new_o, LeanString, 0);
|
||||
new_o->m_size = sz;
|
||||
@@ -214,7 +214,7 @@ bool object_compactor::insert_array(object * o) {
|
||||
}
|
||||
if (missing_children)
|
||||
return false;
|
||||
size_t obj_sz = sizeof(lean_array_object) + sizeof(void*)*sz;
|
||||
size_t obj_sz = lean_usize_add_checked(sizeof(lean_array_object), lean_usize_mul_checked(sizeof(void*), sz));
|
||||
lean_array_object * new_o = (lean_array_object*)alloc(obj_sz);
|
||||
lean_set_non_heap_header_for_big((lean_object*)new_o, LeanArray, 0);
|
||||
new_o->m_size = sz;
|
||||
@@ -274,8 +274,8 @@ bool object_compactor::insert_promise(object * o) {
|
||||
void object_compactor::insert_mpz(object * o) {
|
||||
#ifdef LEAN_USE_GMP
|
||||
size_t nlimbs = mpz_size(to_mpz(o)->m_value.m_val);
|
||||
size_t data_sz = sizeof(mp_limb_t) * nlimbs;
|
||||
size_t sz = sizeof(mpz_object) + data_sz;
|
||||
size_t data_sz = lean_usize_mul_checked(sizeof(mp_limb_t), nlimbs);
|
||||
size_t sz = lean_usize_add_checked(sizeof(mpz_object), data_sz);
|
||||
mpz_object * new_o = (mpz_object *)alloc(sz);
|
||||
memcpy(new_o, to_mpz(o), sizeof(mpz_object));
|
||||
lean_set_non_heap_header((lean_object*)new_o, sz, LeanMPZ, 0);
|
||||
@@ -287,8 +287,8 @@ void object_compactor::insert_mpz(object * o) {
|
||||
m._mp_alloc = nlimbs;
|
||||
save(o, (lean_object*)new_o);
|
||||
#else
|
||||
size_t data_sz = sizeof(mpn_digit) * to_mpz(o)->m_value.m_size;
|
||||
size_t sz = sizeof(mpz_object) + data_sz;
|
||||
size_t data_sz = lean_usize_mul_checked(sizeof(mpn_digit), to_mpz(o)->m_value.m_size);
|
||||
size_t sz = lean_usize_add_checked(sizeof(mpz_object), data_sz);
|
||||
mpz_object * new_o = (mpz_object *)alloc(sz);
|
||||
// Manually copy the `mpz_object` to ensure `mpz` struct padding is left as
|
||||
// zero as prepared by `object_compactor::alloc`. `memcpy` would copy the
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user