Compare commits

..

1 Commits

Author SHA1 Message Date
Joachim Breitner
016110a38f fix: generate SizeOf spec theorems for inductives with private constructors
This PR fixes `SizeOf` instance generation for public inductive types that have
private constructors. The spec theorem proof construction needs to unfold
`_sizeOf` helper functions which may not be exposed in the public view, so
we use `withoutExporting` for the proof construction and type check.

Closes #13373

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-12 11:36:56 +00:00
767 changed files with 919 additions and 6555 deletions

View File

@@ -7,6 +7,11 @@ To build Lean you should use `make -j$(nproc) -C build/release`.
The build uses `ccache`, and in a sandbox `ccache` may complain about read-only file systems.
Use `CCACHE_READONLY` and `CCACHE_TEMPDIR` instead of disabling ccache completely.
To rebuild individual modules without a full build, use Lake directly:
```
cd src && lake build Init.Prelude
```
## Running Tests
See `tests/README.md` for full documentation. Quick reference:

View File

@@ -220,9 +220,7 @@ add_custom_target(
DEPENDS stage2
)
add_custom_target(clean-stdlib COMMAND $(MAKE) -C stage1 clean-stdlib DEPENDS stage1-configure)
add_custom_target(cache-get COMMAND $(MAKE) -C stage1 cache-get DEPENDS stage1-configure)
add_custom_target(clean-stdlib COMMAND $(MAKE) -C stage1 clean-stdlib DEPENDS stage1)
install(CODE "execute_process(COMMAND make -C stage1 install)")

View File

@@ -992,13 +992,6 @@ add_custom_target(
add_custom_target(clean-olean DEPENDS clean-stdlib)
if(USE_LAKE_CACHE)
add_custom_target(
cache-get
COMMAND ${PREV_STAGE}/bin/lake${CMAKE_EXECUTABLE_SUFFIX} cache get --repo=leanprover/lean4
)
endif()
install(
DIRECTORY "${CMAKE_BINARY_DIR}/lib/"
DESTINATION lib

View File

@@ -3096,13 +3096,13 @@ theorem foldl_eq_foldlM {f : β → α → β} {b} {xs : Array α} {start stop :
theorem foldr_eq_foldrM {f : α β β} {b} {xs : Array α} {start stop : Nat} :
xs.foldr f b start stop = (xs.foldrM (m := Id) (pure <| f · ·) b start stop).run := rfl
theorem foldl_eq_foldl_extract {xs : Array α} {f : β α β} {init : β} :
public theorem foldl_eq_foldl_extract {xs : Array α} {f : β α β} {init : β} :
xs.foldl (init := init) (start := start) (stop := stop) f =
(xs.extract start stop).foldl (init := init) f := by
simp only [foldl_eq_foldlM]
rw [foldlM_start_stop]
theorem foldr_eq_foldr_extract {xs : Array α} {f : α β β} {init : β} :
public theorem foldr_eq_foldr_extract {xs : Array α} {f : α β β} {init : β} :
xs.foldr (init := init) (start := start) (stop := stop) f =
(xs.extract stop start).foldr (init := init) f := by
simp only [foldr_eq_foldrM]

View File

@@ -80,7 +80,7 @@ instance : SliceSize (Internal.SubarrayData α) where
size s := s.internalRepresentation.stop - s.internalRepresentation.start
@[grind =, suggest_for Subarray.size]
theorem size_eq {xs : Subarray α} :
public theorem size_eq {xs : Subarray α} :
xs.size = xs.stop - xs.start := by
simp [Std.Slice.size, SliceSize.size, start, stop]

View File

@@ -74,7 +74,7 @@ protected theorem isGE_compare {a b : Int} :
rw [ Int.compare_swap, Ordering.isGE_swap]
exact Int.isLE_compare
instance : Std.LawfulOrderOrd Int where
public instance : Std.LawfulOrderOrd Int where
isLE_compare _ _ := Int.isLE_compare
isGE_compare _ _ := Int.isGE_compare

View File

@@ -42,29 +42,29 @@ The conversion functions {name (scope := "Init.Data.Iterators.Basic")}`Shrink.de
{name (scope := "Init.Data.Iterators.Basic")}`Shrink.inflate` form an equivalence between
{name}`α` and {lean}`Shrink α`, but this equivalence is intentionally not definitional.
-/
def Shrink (α : Type u) : Type u := Internal.idOpaque.1 α
public def Shrink (α : Type u) : Type u := Internal.idOpaque.1 α
/-- Converts elements of {name}`α` into elements of {lean}`Shrink α`. -/
@[always_inline]
def Shrink.deflate {α} (x : α) : Shrink α :=
public def Shrink.deflate {α} (x : α) : Shrink α :=
cast (by simp [Shrink, Internal.idOpaque.property]) x
/-- Converts elements of {lean}`Shrink α` into elements of {name}`α`. -/
@[always_inline]
def Shrink.inflate {α} (x : Shrink α) : α :=
public def Shrink.inflate {α} (x : Shrink α) : α :=
cast (by simp [Shrink, Internal.idOpaque.property]) x
@[simp, grind =]
theorem Shrink.deflate_inflate {α} {x : Shrink α} :
public theorem Shrink.deflate_inflate {α} {x : Shrink α} :
Shrink.deflate x.inflate = x := by
simp [deflate, inflate]
@[simp, grind =]
theorem Shrink.inflate_deflate {α} {x : α} :
public theorem Shrink.inflate_deflate {α} {x : α} :
(Shrink.deflate x).inflate = x := by
simp [deflate, inflate]
theorem Shrink.inflate_inj {α} {x y : Shrink α} :
public theorem Shrink.inflate_inj {α} {x y : Shrink α} :
x.inflate = y.inflate x = y := by
apply Iff.intro
· intro h
@@ -72,7 +72,7 @@ theorem Shrink.inflate_inj {α} {x y : Shrink α} :
· rintro rfl
rfl
theorem Shrink.deflate_inj {α} {x y : α} :
public theorem Shrink.deflate_inj {α} {x y : α} :
Shrink.deflate x = Shrink.deflate y x = y := by
apply Iff.intro
· intro h

View File

@@ -190,7 +190,7 @@ def Append.instFinitenessRelation [Monad m] [Iterator α₁ m β] [Iterator α
exact IterM.TerminationMeasures.Finite.rel_of_skip _
@[no_expose]
instance Append.instFinite [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
public instance Append.instFinite [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] : Finite (Append α₁ α₂ m β) m :=
.of_finitenessRelation instFinitenessRelation

View File

@@ -37,7 +37,7 @@ open Nat
@[simp, grind =] theorem min?_nil [Min α] : ([] : List α).min? = none := rfl
@[simp, grind =]
theorem min?_singleton [Min α] {x : α} : [x].min? = some x :=
public theorem min?_singleton [Min α] {x : α} : [x].min? = some x :=
(rfl)
-- We don't put `@[simp]` on `min?_cons'`,
@@ -52,7 +52,7 @@ theorem min?_cons' [Min α] {xs : List α} : (x :: xs).min? = some (foldl min x
cases xs <;> simp [min?]
@[simp, grind =]
theorem isSome_min?_iff [Min α] {xs : List α} : xs.min?.isSome xs [] := by
public theorem isSome_min?_iff [Min α] {xs : List α} : xs.min?.isSome xs [] := by
cases xs <;> simp [min?]
@[grind .]
@@ -247,7 +247,7 @@ theorem foldl_min_eq_min [Min α] [Std.IdempotentOp (min : ααα)] [S
@[simp, grind =] theorem max?_nil [Max α] : ([] : List α).max? = none := rfl
@[simp, grind =]
theorem max?_singleton [Max α] {x : α} : [x].max? = some x :=
public theorem max?_singleton [Max α] {x : α} : [x].max? = some x :=
(rfl)
-- We don't put `@[simp]` on `max?_cons'`,
@@ -262,7 +262,7 @@ theorem max?_cons' [Max α] {xs : List α} : (x :: xs).max? = some (foldl max x
cases xs <;> simp [max?]
@[simp, grind =]
theorem isSome_max?_iff [Max α] {xs : List α} : xs.max?.isSome xs [] := by
public theorem isSome_max?_iff [Max α] {xs : List α} : xs.max?.isSome xs [] := by
cases xs <;> simp [max?]
@[grind .]

View File

@@ -70,7 +70,7 @@ protected theorem isGE_compare {a b : Nat} :
rw [ Nat.compare_swap, Ordering.isGE_swap]
exact Nat.isLE_compare
instance : Std.LawfulOrderOrd Nat where
public instance : Std.LawfulOrderOrd Nat where
isLE_compare _ _ := Nat.isLE_compare
isGE_compare _ _ := Nat.isGE_compare

View File

@@ -444,7 +444,7 @@ instance : MonadAttach Option where
CanReturn x a := x = some a
attach x := x.attach
instance : LawfulMonadAttach Option where
public instance : LawfulMonadAttach Option where
map_attach {α} x := by simp [MonadAttach.attach]
canReturn_map_imp {α P x a} := by
cases x
@@ -455,7 +455,7 @@ end Option
namespace OptionT
instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
public instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
WeaklyLawfulMonadAttach (OptionT m) where
map_attach {α} x := by
apply OptionT.ext
@@ -466,7 +466,7 @@ instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
| some a, _ => simp [OptionT.pure, OptionT.mk]
| none, _ => simp
instance [Monad m] [MonadAttach m] [LawfulMonad m] [LawfulMonadAttach m] :
public instance [Monad m] [MonadAttach m] [LawfulMonad m] [LawfulMonadAttach m] :
LawfulMonadAttach (OptionT m) where
canReturn_map_imp {α P x a} h := by
simp only [MonadAttach.CanReturn, OptionT.run_map] at h

View File

@@ -152,7 +152,7 @@ public theorem max_eq_if_isGE_compare {α : Type u} [Ord α] [LE α] {_ : Max α
{a b : α} : max a b = if (compare a b).isGE then a else b := by
open Classical in simp [max_eq_if, isGE_compare]
theorem min_le_min [LE α] [Min α] [Std.LawfulOrderLeftLeaningMin α] [IsLinearOrder α] (a b : α) : min a b min b a := by
private theorem min_le_min [LE α] [Min α] [Std.LawfulOrderLeftLeaningMin α] [IsLinearOrder α] (a b : α) : min a b min b a := by
apply (LawfulOrderInf.le_min_iff (min a b) b a).2
rw [And.comm]
by_cases h : a b

File diff suppressed because it is too large Load Diff

View File

@@ -248,11 +248,11 @@ instance : HasModel Int8 (BitVec 8) where
le_iff_encode_le := by simp [LE.le, Int8.le]
lt_iff_encode_lt := by simp [LT.lt, Int8.lt]
theorem succ?_eq_minValueSealed {x : Int8} :
private theorem succ?_eq_minValueSealed {x : Int8} :
UpwardEnumerable.succ? x = if x + 1 = minValueSealed then none else some (x + 1) :=
(rfl)
theorem succMany?_eq_maxValueSealed {i : Int8} :
private theorem succMany?_eq_maxValueSealed {i : Int8} :
UpwardEnumerable.succMany? n i =
have := i.minValue_le_toInt
if h : i.toInt + n maxValueSealed.toInt then some (.ofIntLE _ (by omega) (maxValueSealed_def h)) else none :=
@@ -605,12 +605,12 @@ theorem minValueSealed_def : minValueSealed = ISize.minValue := (rfl)
theorem maxValueSealed_def : maxValueSealed = ISize.maxValue := (rfl)
seal minValueSealed maxValueSealed
theorem toBitVec_minValueSealed_eq_intMinSealed :
private theorem toBitVec_minValueSealed_eq_intMinSealed :
minValueSealed.toBitVec = BitVec.Signed.intMinSealed System.Platform.numBits := by
rw [minValueSealed_def, BitVec.Signed.intMinSealed_def, toBitVec_minValue]
have := System.Platform.numBits_eq; generalize System.Platform.numBits = a at this
rcases this with rfl | rfl <;> rfl
theorem toBitVec_maxValueSealed_eq_intMaxSealed :
private theorem toBitVec_maxValueSealed_eq_intMaxSealed :
maxValueSealed.toBitVec = BitVec.Signed.intMaxSealed System.Platform.numBits := by
rw [maxValueSealed_def, BitVec.Signed.intMaxSealed_def, toBitVec_maxValue]
have := System.Platform.numBits_eq; generalize System.Platform.numBits = a at this

View File

@@ -233,7 +233,7 @@ public theorem Subarray.toList_eq {xs : Subarray α} :
simp [this, ListSlice.toList_eq, lslice]
-- TODO: The current `List.extract_eq_drop_take` should be called `List.extract_eq_take_drop`
theorem Std.Internal.List.extract_eq_drop_take' {l : List α} {start stop : Nat} :
private theorem Std.Internal.List.extract_eq_drop_take' {l : List α} {start stop : Nat} :
l.extract start stop = (l.take stop).drop start := by
simp [List.take_drop]
by_cases start stop

View File

@@ -94,7 +94,7 @@ public def String.utf8EncodeCharFast (c : Char) : List UInt8 :=
(v >>> 6).toUInt8 &&& 0x3f ||| 0x80,
v.toUInt8 &&& 0x3f ||| 0x80]
theorem Nat.add_two_pow_eq_or_of_lt {b : Nat} (i : Nat) (b_lt : b < 2 ^ i) (a : Nat) :
private theorem Nat.add_two_pow_eq_or_of_lt {b : Nat} (i : Nat) (b_lt : b < 2 ^ i) (a : Nat) :
b + 2 ^ i * a = b ||| 2 ^ i * a := by
rw [Nat.add_comm, Nat.or_comm, Nat.two_pow_add_eq_or_of_lt b_lt]

View File

@@ -909,7 +909,7 @@ theorem Slice.Pos.skipWhile_copy {ρ : Type} {pat : ρ} [ForwardPattern pat] [Pa
simp
@[simp]
theorem Pos.le_skipWhile {ρ : Type} {pat : ρ} [ForwardPattern pat] [PatternModel pat]
theorem Pos.skipWhile_le {ρ : Type} {pat : ρ} [ForwardPattern pat] [PatternModel pat]
[LawfulForwardPatternModel pat] {s : String} {pos : s.Pos} : pos pos.skipWhile pat := by
simp [skipWhile_eq_skipWhile_toSlice, Pos.le_ofToSlice_iff]

View File

@@ -56,11 +56,11 @@ def markSparseCasesOn (env : Environment) (declName : Name) : Environment :=
sparseCasesOnExt.tag env declName
/-- Is this a constructor elimination or a sparse casesOn? -/
def isSparseCasesOn (env : Environment) (declName : Name) : Bool :=
public def isSparseCasesOn (env : Environment) (declName : Name) : Bool :=
sparseCasesOnExt.isTagged env declName
/-- Is this a `.casesOn`, a constructor elimination or a sparse casesOn? -/
def isCasesOnLike (env : Environment) (declName : Name) : Bool :=
public def isCasesOnLike (env : Environment) (declName : Name) : Bool :=
isCasesOnRecursor env declName || isSparseCasesOn env declName
/--

View File

@@ -774,7 +774,7 @@ where
mutual
partial def emitBasicBlock (code : Code .impure) : EmitM Unit := do
private partial def emitBasicBlock (code : Code .impure) : EmitM Unit := do
match code with
| .jp (k := k) .. => emitBasicBlock k
| .let decl k =>
@@ -896,7 +896,7 @@ where
emitUnreach : EmitM Unit := do
emitLn "lean_internal_panic_unreachable();"
partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
private partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
match code with
| .jp decl k =>
emit decl.binderName; emitLn ":"
@@ -906,7 +906,7 @@ partial def emitJoinPoints (code : Code .impure) : EmitM Unit := do
| .sset (k := k) .. | .uset (k := k) .. | .oset (k := k) .. => emitJoinPoints k
| .cases .. | .return .. | .jmp .. | .unreach .. => return ()
partial def emitCode (code : Code .impure) : EmitM Unit := do
private partial def emitCode (code : Code .impure) : EmitM Unit := do
withEmitBlock do
let declared declareVars code
if declared then emitLn ""

View File

@@ -12,7 +12,7 @@ import Lean.Compiler.InitAttr
namespace Lean.Compiler.LCNF
structure CollectUsedDeclsState where
private structure CollectUsedDeclsState where
visited : NameSet := {}
localDecls : Array (Decl .impure) := #[]
extSigs : Array (Signature .impure) := #[]

View File

@@ -29,7 +29,7 @@ public def mkOrderedDeclSetExt : IO (EnvExtension (List Name × NameSet)) :=
/--
Set of declarations to be exported to other modules; visibility shared by base/mono/IR phases.
-/
builtin_initialize publicDeclsExt : EnvExtension (List Name × NameSet) mkOrderedDeclSetExt
private builtin_initialize publicDeclsExt : EnvExtension (List Name × NameSet) mkOrderedDeclSetExt
public def isDeclPublic (env : Environment) (declName : Name) : Bool := Id.run do
if !env.header.isModule then

View File

@@ -142,10 +142,10 @@ partial def Code.toExprM (code : Code pu) : ToExprM Expr := do
return .letE `dummy (mkConst ``Unit) value body true
end
def Code.toExpr (code : Code pu) (xs : Array FVarId := #[]) : Expr :=
public def Code.toExpr (code : Code pu) (xs : Array FVarId := #[]) : Expr :=
run' code.toExprM xs
def FunDecl.toExpr (decl : FunDecl pu) (xs : Array FVarId := #[]) : Expr :=
public def FunDecl.toExpr (decl : FunDecl pu) (xs : Array FVarId := #[]) : Expr :=
run' decl.toExprM xs
end Lean.Compiler.LCNF

View File

@@ -213,22 +213,11 @@ structure Context where
-/
expectedType : Option Expr
/--
Key for the LCNF translation cache. `ignoreNoncomputable` is part of the key
because entries cached in irrelevant positions skip the `checkComputable`
check and must not be reused in relevant positions.
-/
structure CacheKey where
expr : Expr
expectedType? : Option Expr
ignoreNoncomputable : Bool
deriving BEq, Hashable
structure State where
/-- Local context containing the original Lean types (not LCNF ones). -/
lctx : LocalContext := {}
/-- Cache from Lean regular expression to LCNF argument. -/
cache : PHashMap CacheKey (Arg .pure) := {}
cache : PHashMap (Expr × Option Expr) (Arg .pure) := {}
/--
Determines whether caching has been disabled due to finding a use of
a constant marked with `never_extract`.
@@ -484,9 +473,7 @@ partial def toLCNF (e : Expr) (eType : Expr) : CompilerM (Code .pure) := do
where
visitCore (e : Expr) : M (Arg .pure) := withIncRecDepth do
let eType? := ( read).expectedType
let ignoreNoncomputable := ( read).ignoreNoncomputable
let key : CacheKey := { expr := e, expectedType? := eType?, ignoreNoncomputable }
if let some arg := ( get).cache.find? key then
if let some arg := ( get).cache.find? (e, eType?) then
return arg
let r : Arg .pure match e with
| .app .. => visitApp e
@@ -498,7 +485,7 @@ where
| .lit lit => visitLit lit
| .fvar fvarId => if ( get).toAny.contains fvarId then pure .erased else pure (.fvar fvarId)
| .forallE .. | .mvar .. | .bvar .. | .sort .. => unreachable!
modify fun s => if s.shouldCache then { s with cache := s.cache.insert key r } else s
modify fun s => if s.shouldCache then { s with cache := s.cache.insert (e, eType?) r } else s
return r
visit (e : Expr) : M (Arg .pure) := withIncRecDepth do

View File

@@ -37,7 +37,7 @@ public def getStateByIdx? [Inhabited σ] (ext : ModuleEnvExtension σ) (env : En
end ModuleEnvExtension
initialize modPkgExt : ModuleEnvExtension (Option PkgId)
private initialize modPkgExt : ModuleEnvExtension (Option PkgId)
registerModuleEnvExtension (pure none)
/-- Returns the package (if any) of an imported module (by its index). -/

View File

@@ -20,13 +20,13 @@ line parsing. Called from the C runtime via `@[export]` for backtrace display. -
namespace Lean.Name.Demangle
/-- `String.dropPrefix?` returning a `String` instead of a `Slice`. -/
def dropPrefix? (s : String) (pre : String) : Option String :=
private def dropPrefix? (s : String) (pre : String) : Option String :=
(s.dropPrefix? pre).map (·.toString)
def isAllDigits (s : String) : Bool :=
private def isAllDigits (s : String) : Bool :=
!s.isEmpty && s.all (·.isDigit)
def nameToNameParts (n : Name) : Array NamePart :=
private def nameToNameParts (n : Name) : Array NamePart :=
go n [] |>.toArray
where
go : Name List NamePart List NamePart
@@ -34,17 +34,17 @@ where
| .str pre s, acc => go pre (NamePart.str s :: acc)
| .num pre n, acc => go pre (NamePart.num n :: acc)
def namePartsToName (parts : Array NamePart) : Name :=
private def namePartsToName (parts : Array NamePart) : Name :=
parts.foldl (fun acc p =>
match p with
| .str s => acc.mkStr s
| .num n => acc.mkNum n) .anonymous
/-- Format name parts using `Name.toString` for correct escaping. -/
def formatNameParts (comps : Array NamePart) : String :=
private def formatNameParts (comps : Array NamePart) : String :=
if comps.isEmpty then "" else (namePartsToName comps).toString
def matchSuffix (c : NamePart) : Option String :=
private def matchSuffix (c : NamePart) : Option String :=
match c with
| NamePart.str s =>
if s == "_redArg" then some "arity↓"
@@ -58,12 +58,12 @@ def matchSuffix (c : NamePart) : Option String :=
else none
| _ => none
def isSpecIndex (c : NamePart) : Bool :=
private def isSpecIndex (c : NamePart) : Bool :=
match c with
| NamePart.str s => (dropPrefix? s "spec_").any isAllDigits
| _ => false
def stripPrivate (comps : Array NamePart) (start stop : Nat) :
private def stripPrivate (comps : Array NamePart) (start stop : Nat) :
Nat × Bool :=
if stop - start >= 3 && comps[start]? == some (NamePart.str "_private") then
Id.run do
@@ -75,11 +75,11 @@ def stripPrivate (comps : Array NamePart) (start stop : Nat) :
else
(start, false)
structure SpecEntry where
private structure SpecEntry where
name : String
flags : Array String
def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
private def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
let mut begin_ := 0
if comps.size >= 3 && comps[0]? == some (NamePart.str "_private") then
for i in [1:comps.size] do
@@ -99,7 +99,7 @@ def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
else parts := parts.push c
{ name := formatNameParts parts, flags }
def postprocessNameParts (components : Array NamePart) : String := Id.run do
private def postprocessNameParts (components : Array NamePart) : String := Id.run do
if components.isEmpty then return ""
let (privStart, isPrivate) := stripPrivate components 0 components.size
@@ -206,14 +206,14 @@ def postprocessNameParts (components : Array NamePart) : String := Id.run do
return result
def demangleBody (body : String) : String :=
private def demangleBody (body : String) : String :=
let name := Name.demangle body
postprocessNameParts (nameToNameParts name)
/-- Split a `lp_`-prefixed symbol into (demangled body, package name).
Tries each underscore as a split point; the first valid split (shortest single-component
package where the remainder is a valid mangled name) is correct. -/
def demangleWithPkg (s : String) : Option (String × String) := do
private def demangleWithPkg (s : String) : Option (String × String) := do
for pos, h in s.positions do
if pos.get h == '_' && pos s.startPos then
let nextPos := pos.next h
@@ -230,12 +230,12 @@ def demangleWithPkg (s : String) : Option (String × String) := do
return (demangleBody body, pkgName)
none
def stripColdSuffix (s : String) : String × String :=
private def stripColdSuffix (s : String) : String × String :=
match s.find? ".cold" with
| some pos => (s.extract s.startPos pos, s.extract pos s.endPos)
| none => (s, "")
def demangleCore (s : String) : Option String := do
private def demangleCore (s : String) : Option String := do
-- _init_l_
if let some body := dropPrefix? s "_init_l_" then
if !body.isEmpty then return s!"[init] {demangleBody body}"
@@ -291,17 +291,17 @@ public def demangleSymbol (symbol : String) : Option String := do
if coldSuffix.isEmpty then return result
else return s!"{result} {coldSuffix}"
def skipWhile (s : String) (pos : s.Pos) (pred : Char Bool) : s.Pos :=
private def skipWhile (s : String) (pos : s.Pos) (pred : Char Bool) : s.Pos :=
if h : pos = s.endPos then pos
else if pred (pos.get h) then skipWhile s (pos.next h) pred
else pos
termination_by pos
def splitAt₂ (s : String) (p₁ p₂ : s.Pos) : String × String × String :=
private def splitAt₂ (s : String) (p₁ p₂ : s.Pos) : String × String × String :=
(s.extract s.startPos p₁, s.extract p₁ p₂, s.extract p₂ s.endPos)
/-- Extract the symbol from a backtrace line (Linux glibc or macOS format). -/
def extractSymbol (line : String) :
private def extractSymbol (line : String) :
Option (String × String × String) :=
tryLinux line |>.orElse (fun _ => tryMacOS line)
where

View File

@@ -400,7 +400,7 @@ Namely:
def parseMessageMetaData (input : String) : Except String MessageMetaData :=
messageMetaDataParser input |>.run input
inductive MessageDirection where
public inductive MessageDirection where
| clientToServer
| serverToClient
deriving Inhabited, FromJson, ToJson

View File

@@ -43,14 +43,14 @@ public structure State where
/-- Footnotes -/
footnotes : Array (String × String) := #[]
def combineBlocks (prior current : String) :=
private def combineBlocks (prior current : String) :=
if prior.isEmpty then current
else if current.isEmpty then prior
else if prior.endsWith "\n\n" then prior ++ current
else if prior.endsWith "\n" then prior ++ "\n" ++ current
else prior ++ "\n\n" ++ current
def State.endBlock (state : State) : State :=
private def State.endBlock (state : State) : State :=
{ state with
priorBlocks :=
combineBlocks state.priorBlocks state.currentBlock ++
@@ -60,13 +60,13 @@ def State.endBlock (state : State) : State :=
footnotes := #[]
}
def State.render (state : State) : String :=
private def State.render (state : State) : String :=
state.endBlock.priorBlocks
def State.push (state : State) (txt : String) : State :=
private def State.push (state : State) (txt : String) : State :=
{ state with currentBlock := state.currentBlock ++ txt }
def State.endsWith (state : State) (txt : String) : Bool :=
private def State.endsWith (state : State) (txt : String) : Bool :=
state.currentBlock.endsWith txt || (state.currentBlock.isEmpty && state.priorBlocks.endsWith txt)
end MarkdownM
@@ -150,7 +150,7 @@ public class MarkdownBlock (i : Type u) (b : Type v) where
public instance : MarkdownBlock i Empty where
toMarkdown := nofun
def escape (s : String) : String := Id.run do
private def escape (s : String) : String := Id.run do
let mut s' := ""
let mut iter := s.startPos
while h : ¬iter.IsAtEnd do
@@ -163,7 +163,7 @@ def escape (s : String) : String := Id.run do
where
isSpecial c := "*_`-+.!<>[]{}()#".any (· == c)
def quoteCode (str : String) : String := Id.run do
private def quoteCode (str : String) : String := Id.run do
let mut longest := 0
let mut current := 0
let mut iter := str.startPos
@@ -179,7 +179,7 @@ def quoteCode (str : String) : String := Id.run do
let str := if str.startsWith "`" || str.endsWith "`" then " " ++ str ++ " " else str
backticks ++ str ++ backticks
partial def trimLeft (inline : Inline i) : (String × Inline i) := go [inline]
private partial def trimLeft (inline : Inline i) : (String × Inline i) := go [inline]
where
go : List (Inline i) String × Inline i
| [] => ("", .empty)
@@ -194,7 +194,7 @@ where
| .concat xs :: more => go (xs.toList ++ more)
| here :: more => ("", here ++ .concat more.toArray)
partial def trimRight (inline : Inline i) : (Inline i × String) := go [inline]
private partial def trimRight (inline : Inline i) : (Inline i × String) := go [inline]
where
go : List (Inline i) Inline i × String
| [] => (.empty, "")
@@ -209,13 +209,13 @@ where
| .concat xs :: more => go (xs.reverse.toList ++ more)
| here :: more => (.concat more.toArray.reverse ++ here, "")
def trim (inline : Inline i) : (String × Inline i × String) :=
private def trim (inline : Inline i) : (String × Inline i × String) :=
let (pre, more) := trimLeft inline
let (mid, post) := trimRight more
(pre, mid, post)
open MarkdownM in
partial def inlineMarkdown [MarkdownInline i] : Inline i MarkdownM Unit
private partial def inlineMarkdown [MarkdownInline i] : Inline i MarkdownM Unit
| .text s =>
push (escape s)
| .linebreak s => do
@@ -271,7 +271,7 @@ partial def inlineMarkdown [MarkdownInline i] : Inline i → MarkdownM Unit
public instance [MarkdownInline i] : ToMarkdown (Inline i) where
toMarkdown inline := private inlineMarkdown inline
def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
private def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
let mut longest := 2
let mut current := 0
let mut iter := str.startPos
@@ -292,7 +292,7 @@ def quoteCodeBlock (indent : Nat) (str : String) : String := Id.run do
backticks ++ "\n" ++ out ++ backticks ++ "\n"
open MarkdownM in
partial def blockMarkdown [MarkdownInline i] [MarkdownBlock i b] : Block i b MarkdownM Unit
private partial def blockMarkdown [MarkdownInline i] [MarkdownBlock i b] : Block i b MarkdownM Unit
| .para xs => do
for i in xs do
ToMarkdown.toMarkdown i
@@ -345,7 +345,7 @@ public instance [MarkdownInline i] [MarkdownBlock i b] : ToMarkdown (Block i b)
open MarkdownM in
open ToMarkdown in
partial def partMarkdown [MarkdownInline i] [MarkdownBlock i b] (level : Nat) (part : Part i b p) : MarkdownM Unit := do
private partial def partMarkdown [MarkdownInline i] [MarkdownBlock i b] (level : Nat) (part : Part i b p) : MarkdownM Unit := do
push ("".pushn '#' (level + 1))
push " "
for i in part.title do

View File

@@ -18,7 +18,7 @@ open Lean.Doc.Syntax
local instance : Coe Char ParserFn where
coe := chFn
partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s => Id.run do
private partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s => Id.run do
let iniSz := s.stackSize
let iniPos := s.pos
let mut s := p c s
@@ -30,7 +30,7 @@ partial def atLeastAux (n : Nat) (p : ParserFn) : ParserFn := fun c s => Id.run
s := s.mkNode nullKind iniSz
atLeastAux (n - 1) p c s
def atLeastFn (n : Nat) (p : ParserFn) : ParserFn := fun c s =>
private def atLeastFn (n : Nat) (p : ParserFn) : ParserFn := fun c s =>
let iniSz := s.stackSize
let s := atLeastAux n p c s
s.mkNode nullKind iniSz
@@ -40,9 +40,9 @@ A parser that does nothing.
-/
public def skipFn : ParserFn := fun _ s => s
def eatSpaces := takeWhileFn (· == ' ')
private def eatSpaces := takeWhileFn (· == ' ')
def repFn : Nat ParserFn ParserFn
private def repFn : Nat ParserFn ParserFn
| 0, _ => skipFn
| n+1, p => p >> repFn n p
@@ -55,7 +55,7 @@ partial def satisfyFn' (p : Char → Bool)
else if p (c.get' i h) then s.next' c i h
else s.mkUnexpectedError errorMsg
partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn :=
private partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn :=
fun c s => Id.run do
let iniSz := s.stackSize
let iniPos := s.pos
@@ -70,13 +70,13 @@ partial def atMostAux (n : Nat) (p : ParserFn) (msg : String) : ParserFn :=
s := s.mkNode nullKind iniSz
atMostAux (n - 1) p msg c s
def atMostFn (n : Nat) (p : ParserFn) (msg : String) : ParserFn := fun c s =>
private def atMostFn (n : Nat) (p : ParserFn) (msg : String) : ParserFn := fun c s =>
let iniSz := s.stackSize
let s := atMostAux n p msg c s
s.mkNode nullKind iniSz
/-- Like `satisfyFn`, but allows any escape sequence through -/
partial def satisfyEscFn (p : Char Bool)
private partial def satisfyEscFn (p : Char Bool)
(errorMsg : String := "unexpected character") :
ParserFn := fun c s =>
let i := s.pos
@@ -89,7 +89,7 @@ partial def satisfyEscFn (p : Char → Bool)
else if p (c.get' i h) then s.next' c i h
else s.mkUnexpectedError errorMsg
partial def takeUntilEscFn (p : Char Bool) : ParserFn := fun c s =>
private partial def takeUntilEscFn (p : Char Bool) : ParserFn := fun c s =>
let i := s.pos
if h : c.atEnd i then s
else if c.get' i h == '\\' then
@@ -100,7 +100,7 @@ partial def takeUntilEscFn (p : Char → Bool) : ParserFn := fun c s =>
else if p (c.get' i h) then s
else takeUntilEscFn p c (s.next' c i h)
partial def takeWhileEscFn (p : Char Bool) : ParserFn := takeUntilEscFn (not p)
private partial def takeWhileEscFn (p : Char Bool) : ParserFn := takeUntilEscFn (not p)
/--
Parses as `p`, but discards the result.
@@ -111,7 +111,7 @@ public def ignoreFn (p : ParserFn) : ParserFn := fun c s =>
s'.shrinkStack iniSz
def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo ParserFn) : ParserFn := fun c s =>
private def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo ParserFn) : ParserFn := fun c s =>
let iniSz := s.stxStack.size
let startPos := s.pos
let s := p c s
@@ -121,7 +121,7 @@ def withInfoSyntaxFn (p : ParserFn) (infoP : SourceInfo → ParserFn) : ParserFn
let info := SourceInfo.original leading startPos trailing stopPos
infoP info c (s.shrinkStack iniSz)
def unescapeStr (str : String) : String := Id.run do
private def unescapeStr (str : String) : String := Id.run do
let mut out := ""
let mut iter := str.startPos
while h : ¬iter.IsAtEnd do
@@ -135,7 +135,7 @@ def unescapeStr (str : String) : String := Id.run do
out := out.push c
out
def asStringAux (quoted : Bool) (startPos : String.Pos.Raw) (transform : String String) :
private def asStringAux (quoted : Bool) (startPos : String.Pos.Raw) (transform : String String) :
ParserFn := fun c s =>
let stopPos := s.pos
let leading := c.mkEmptySubstringAt startPos
@@ -156,26 +156,26 @@ public def asStringFn (p : ParserFn) (quoted := false) (transform : String → S
if s.hasError then s
else asStringAux quoted startPos transform c (s.shrinkStack iniSz)
def checkCol0Fn (errorMsg : String) : ParserFn := fun c s =>
private def checkCol0Fn (errorMsg : String) : ParserFn := fun c s =>
let pos := c.fileMap.toPosition s.pos
if pos.column = 1 then s
else s.mkError errorMsg
def _root_.Lean.Parser.ParserContext.currentColumn
private def _root_.Lean.Parser.ParserContext.currentColumn
(c : ParserContext) (s : ParserState) : Nat :=
c.fileMap.toPosition s.pos |>.column
def pushColumn : ParserFn := fun c s =>
private def pushColumn : ParserFn := fun c s =>
let col := c.fileMap.toPosition s.pos |>.column
s.pushSyntax <| Syntax.mkLit `column (toString col) (SourceInfo.synthetic s.pos s.pos)
def guardColumn (p : Nat Bool) (message : String) : ParserFn := fun c s =>
private def guardColumn (p : Nat Bool) (message : String) : ParserFn := fun c s =>
if p (c.currentColumn s) then s else s.mkErrorAt message s.pos
def guardMinColumn (min : Nat) (description : String := s!"expected column at least {min}") : ParserFn :=
private def guardMinColumn (min : Nat) (description : String := s!"expected column at least {min}") : ParserFn :=
guardColumn (· min) description
def withCurrentColumn (p : Nat ParserFn) : ParserFn := fun c s =>
private def withCurrentColumn (p : Nat ParserFn) : ParserFn := fun c s =>
p (c.currentColumn s) c s
@@ -183,7 +183,7 @@ def withCurrentColumn (p : Nat → ParserFn) : ParserFn := fun c s =>
We can only start a nestable block if we're immediately after a newline followed by a sequence of
nestable block openers
-/
def onlyBlockOpeners : ParserFn := fun c s =>
private def onlyBlockOpeners : ParserFn := fun c s =>
let position := c.fileMap.toPosition s.pos
let lineStart := c.fileMap.lineStart position.line
let ok : Bool := Id.run do
@@ -206,7 +206,7 @@ def onlyBlockOpeners : ParserFn := fun c s =>
if ok then s
else s.mkErrorAt "beginning of line or sequence of nestable block openers" s.pos
def nl := satisfyFn (· == '\n') "newline"
private def nl := satisfyFn (· == '\n') "newline"
/--
Construct a “fake” atom with the given string content and source information.
@@ -225,13 +225,13 @@ current position.
Normally, atoms are always substrings of the original input; however, Verso's concrete syntax is
different enough from Lean's that this isn't always a good match.
-/
def fakeAtomHere (str : String) : ParserFn :=
private def fakeAtomHere (str : String) : ParserFn :=
withInfoSyntaxFn skip.fn (fun info => fakeAtom str (info := info))
def pushMissing : ParserFn := fun _c s =>
private def pushMissing : ParserFn := fun _c s =>
s.pushSyntax .missing
def strFn (str : String) : ParserFn := asStringFn <| fun c s =>
private def strFn (str : String) : ParserFn := asStringFn <| fun c s =>
let rec go (iter : str.Pos) (s : ParserState) :=
if h : iter.IsAtEnd then s
else
@@ -260,10 +260,10 @@ public instance : Ord OrderedListType where
| .parenAfter, .numDot => .gt
| .parenAfter, .parenAfter => .eq
def OrderedListType.all : List OrderedListType :=
private def OrderedListType.all : List OrderedListType :=
[.numDot, .parenAfter]
theorem OrderedListType.all_complete : x : OrderedListType, x all := by
private theorem OrderedListType.all_complete : x : OrderedListType, x all := by
unfold all; intro x; cases x <;> repeat constructor
/--
@@ -288,40 +288,40 @@ public instance : Ord UnorderedListType where
| .plus, .plus => .eq
| .plus, _ => .gt
def UnorderedListType.all : List UnorderedListType :=
private def UnorderedListType.all : List UnorderedListType :=
[.asterisk, .dash, .plus]
theorem UnorderedListType.all_complete : x : UnorderedListType, x all := by
private theorem UnorderedListType.all_complete : x : UnorderedListType, x all := by
unfold all; intro x; cases x <;> repeat constructor
def unorderedListIndicator (type : UnorderedListType) : ParserFn :=
private def unorderedListIndicator (type : UnorderedListType) : ParserFn :=
asStringFn <|
match type with
| .asterisk => chFn '*'
| .dash => chFn '-'
| .plus => chFn '+'
def orderedListIndicator (type : OrderedListType) : ParserFn :=
private def orderedListIndicator (type : OrderedListType) : ParserFn :=
asStringFn <|
takeWhile1Fn (·.isDigit) "digits" >>
match type with
| .numDot => chFn '.'
| .parenAfter => chFn ')'
def blankLine : ParserFn :=
private def blankLine : ParserFn :=
nodeFn `blankLine <| atomicFn <| asStringFn <| takeWhileFn (· == ' ') >> nl
def endLine : ParserFn :=
private def endLine : ParserFn :=
ignoreFn <| atomicFn <| asStringFn <| takeWhileFn (· == ' ') >> eoiFn
def bullet := atomicFn (go UnorderedListType.all)
private def bullet := atomicFn (go UnorderedListType.all)
where
go
| [] => fun _ s => s.mkError "no list type"
| [x] => atomicFn (unorderedListIndicator x)
| x :: xs => atomicFn (unorderedListIndicator x) <|> go xs
def numbering := atomicFn (go OrderedListType.all)
private def numbering := atomicFn (go OrderedListType.all)
where
go
| [] => fun _ s => s.mkError "no list type"
@@ -374,7 +374,7 @@ public def inlineTextChar : ParserFn := fun c s =>
/-- Return some inline text up to the next inline opener or the end of
the line, whichever is first. Always consumes at least one
logical character on success, taking escaping into account. -/
def inlineText : ParserFn :=
private def inlineText : ParserFn :=
asStringFn (transform := unescapeStr) <| atomicFn inlineTextChar >> manyFn inlineTextChar
/--
@@ -410,23 +410,23 @@ public def val : ParserFn := fun c s =>
else
s.mkError "expected identifier, string, or number"
def withCurrentStackSize (p : Nat → ParserFn) : ParserFn := fun c s =>
private def withCurrentStackSize (p : Nat → ParserFn) : ParserFn := fun c s =>
p s.stxStack.size c s
/-- Match the character indicated, pushing nothing to the stack in case of success -/
def skipChFn (c : Char) : ParserFn :=
private def skipChFn (c : Char) : ParserFn :=
satisfyFn (· == c) c.toString
def skipToNewline : ParserFn :=
private def skipToNewline : ParserFn :=
takeUntilFn (· == '\n')
def skipToSpace : ParserFn :=
private def skipToSpace : ParserFn :=
takeUntilFn (· == ' ')
def skipRestOfLine : ParserFn :=
private def skipRestOfLine : ParserFn :=
skipToNewline >> (eoiFn <|> nl)
def skipBlock : ParserFn :=
private def skipBlock : ParserFn :=
skipToNewline >> manyFn nonEmptyLine >> takeWhileFn (· == '\n')
where
nonEmptyLine : ParserFn :=
@@ -444,7 +444,7 @@ public def recoverBlock (p : ParserFn) (final : ParserFn := skipFn) : ParserFn :
ignoreFn skipBlock >> final
-- Like `recoverBlock` but stores recovered errors at the original error position.
def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
private def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
let s := p c s
if let some msg := s.errorMsg then
let errPos := s.pos
@@ -457,36 +457,36 @@ def recoverBlockAtErrPos (p : ParserFn) : ParserFn := fun c s =>
recoveredErrors := s.recoveredErrors.push (errPos, s'.stxStack, msg)}
else s
def recoverBlockWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
private def recoverBlockWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
ignoreFn skipBlock >>
show ParserFn from
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
def recoverLine (p : ParserFn) : ParserFn :=
private def recoverLine (p : ParserFn) : ParserFn :=
recoverFn p fun _ =>
ignoreFn skipRestOfLine
def recoverWs (p : ParserFn) : ParserFn :=
private def recoverWs (p : ParserFn) : ParserFn :=
recoverFn p fun _ =>
ignoreFn <| takeUntilFn (fun c => c == ' ' || c == '\n')
def recoverNonSpace (p : ParserFn) : ParserFn :=
private def recoverNonSpace (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
ignoreFn (takeUntilFn (fun c => c != ' ')) >>
show ParserFn from
fun _ s => s.shrinkStack rctx.initialSize
def recoverWsWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
private def recoverWsWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
ignoreFn <| takeUntilFn (fun c => c == ' ' || c == '\n') >>
show ParserFn from
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
def recoverEol (p : ParserFn) : ParserFn :=
private def recoverEol (p : ParserFn) : ParserFn :=
recoverFn p fun _ => ignoreFn <| skipToNewline
def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
private def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
ignoreFn skipToNewline >>
show ParserFn from
@@ -494,7 +494,7 @@ def recoverEolWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
-- Like `recoverEol` but stores recovered errors at the original error position
-- rather than the post-recovery position.
def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
private def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
let s := p c s
if let some msg := s.errorMsg then
let errPos := s.pos
@@ -509,7 +509,7 @@ def recoverEolAtErrPos (p : ParserFn) : ParserFn := fun c s =>
-- Like `recoverEolWith` but stores recovered errors at the original error position
-- rather than the post-recovery position.
def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : ParserFn := fun c s =>
private def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : ParserFn := fun c s =>
let iniSz := s.stxStack.size
let s := p c s
if let some msg := s.errorMsg then
@@ -521,10 +521,10 @@ def recoverEolWithAtErrPos (stxs : Array Syntax) (p : ParserFn) : ParserFn := fu
{s' with recoveredErrors := s.recoveredErrors.push (errPos, s'.stxStack, msg)}
else s
def recoverSkip (p : ParserFn) : ParserFn :=
private def recoverSkip (p : ParserFn) : ParserFn :=
recoverFn p fun _ => skipFn
def recoverSkipWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
private def recoverSkipWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
show ParserFn from
fun _ s => stxs.foldl (init := s.shrinkStack rctx.initialSize) (·.pushSyntax ·)
@@ -535,7 +535,7 @@ def recoverHereWith (stxs : Array Syntax) (p : ParserFn) : ParserFn :=
show ParserFn from
fun _ s => stxs.foldl (init := s.restore rctx.initialSize rctx.initialPos) (·.pushSyntax ·)
def recoverHereWithKeeping (stxs : Array Syntax) (keep : Nat) (p : ParserFn) : ParserFn :=
private def recoverHereWithKeeping (stxs : Array Syntax) (keep : Nat) (p : ParserFn) : ParserFn :=
recoverFn p fun rctx =>
show ParserFn from
fun _ s => stxs.foldl (init := s.restore (rctx.initialSize + keep) rctx.initialPos) (·.pushSyntax ·)
@@ -584,7 +584,7 @@ it's in a single-line context and whitespace may only be the space
character. If it's `some N`, then newlines are allowed, but `N` is the
minimum indentation column.
-/
def nameArgWhitespace : (multiline : Option Nat) → ParserFn
private def nameArgWhitespace : (multiline : Option Nat) → ParserFn
| none => eatSpaces
| some n => takeWhileFn (fun c => c == ' ' || c == '\n') >> guardMinColumn n
@@ -598,7 +598,7 @@ each sub-parser of `delimitedInline` contributes a clear expected-token name, an
unhelpful generic "unexpected" messages from inner parsers so that the more informative message
from `inlineTextChar` survives error merging via `<|>`.
-/
def expectedFn (msg : String) (p : ParserFn) : ParserFn := fun c s =>
private def expectedFn (msg : String) (p : ParserFn) : ParserFn := fun c s =>
let iniPos := s.pos
let s := p c s
if s.hasError && s.pos == iniPos then
@@ -649,18 +649,18 @@ def linebreak (ctxt : InlineCtxt) : ParserFn :=
else
errorFn "Newlines not allowed here"
partial def notInLink (ctxt : InlineCtxt) : ParserFn := fun _ s =>
private partial def notInLink (ctxt : InlineCtxt) : ParserFn := fun _ s =>
if ctxt.inLink then s.mkError "Already in a link" else s
-- Like `satisfyFn (· == '\n')` but with a better error message that mentions what was expected.
def newlineOrUnexpected (msg : String) : ParserFn := fun c s =>
private def newlineOrUnexpected (msg : String) : ParserFn := fun c s =>
let i := s.pos
if h : c.atEnd i then s.mkEOIError
else if c.get' i h == '\n' then s.next' c i h
else s.mkUnexpectedError s!"unexpected '{c.get' i h}'" [msg]
mutual
partial def emphLike
private partial def emphLike
(name : SyntaxNodeKind) (char : Char) (what plural : String)
(getter : InlineCtxt → Option Nat) (setter : InlineCtxt → Option Nat → InlineCtxt)
(ctxt : InlineCtxt) : ParserFn :=
@@ -799,7 +799,7 @@ mutual
nodeFn `str (asStringFn (quoted := true) (many1Fn (satisfyEscFn (fun c => c != ']' && c != '\n') "other than ']' or newline"))) >>
strFn "]")
partial def linkTarget : ParserFn := fun c s =>
private partial def linkTarget : ParserFn := fun c s =>
let s := (ref <|> url) c s
if s.hasError then
match s.errorMsg with
@@ -922,7 +922,7 @@ deriving Inhabited, Repr
Finds the minimum column of the first non-whitespace character on each non-empty content line
between `startPos` and `endPos`, returning `init` if no such line exists.
-/
def minContentIndent (text : FileMap) (startPos endPos : String.Pos.Raw)
private def minContentIndent (text : FileMap) (startPos endPos : String.Pos.Raw)
(init : Nat) : Nat := Id.run do
let mut result := init
let mut thisLineCol := 0
@@ -980,13 +980,13 @@ public def BlockCtxt.forDocString (text : FileMap)
else text.source.rawEndPos
{ docStartPosition := text.toPosition pos, baseColumn }
def bol (ctxt : BlockCtxt) : ParserFn := fun c s =>
private def bol (ctxt : BlockCtxt) : ParserFn := fun c s =>
let pos := c.fileMap.toPosition s.pos
if pos.column ctxt.baseColumn then s
else if pos.line == ctxt.docStartPosition.line && pos.column ctxt.docStartPosition.column then s
else s.mkErrorAt s!"beginning of line at {pos}" s.pos
def bolThen (ctxt : BlockCtxt) (p : ParserFn) (description : String) : ParserFn := fun c s =>
private def bolThen (ctxt : BlockCtxt) (p : ParserFn) (description : String) : ParserFn := fun c s =>
let position := c.fileMap.toPosition s.pos
let positionOk :=
position.column ctxt.baseColumn ||
@@ -1075,16 +1075,16 @@ public def lookaheadUnorderedListIndicator (ctxt : BlockCtxt) (p : UnorderedList
if s.hasError then s.setPos iniPos
else p type c (s.shrinkStack iniSz |>.setPos bulletPos)
def skipUntilDedent (indent : Nat) : ParserFn :=
private def skipUntilDedent (indent : Nat) : ParserFn :=
skipRestOfLine >>
manyFn (chFn ' ' >> takeWhileFn (· == ' ') >> guardColumn (· indent) s!"indentation at {indent}" >> skipRestOfLine)
def recoverUnindent (indent : Nat) (p : ParserFn) (finish : ParserFn := skipFn) :
private def recoverUnindent (indent : Nat) (p : ParserFn) (finish : ParserFn := skipFn) :
ParserFn :=
recoverFn p (fun _ => ignoreFn (skipUntilDedent indent) >> finish)
def blockSep := ignoreFn (manyFn blankLine >> optionalFn endLine)
private def blockSep := ignoreFn (manyFn blankLine >> optionalFn endLine)
mutual
/-- Parses a list item according to the current nesting context. -/

View File

@@ -74,7 +74,7 @@ def isValidAutoBoundLevelName (n : Name) (relaxed : Bool) : Bool :=
/--
Tracks extra context needed within the scope of `Lean.Elab.Term.withAutoBoundImplicit`
-/
structure AutoBoundImplicitContext where
public structure AutoBoundImplicitContext where
/--
This always matches the `autoImplicit` option; it is duplicated here in
order to support the behavior of the deprecated `Lean.Elab.Term.Context.autoImplicit`
@@ -95,7 +95,7 @@ instance : EmptyCollection AutoBoundImplicitContext where
Pushes a new variable onto the autoImplicit context, indicating that it needs
to be bound as an implicit parameter.
-/
def AutoBoundImplicitContext.push (ctx : AutoBoundImplicitContext) (x : Expr) :=
public def AutoBoundImplicitContext.push (ctx : AutoBoundImplicitContext) (x : Expr) :=
{ ctx with boundVariables := ctx.boundVariables.push x }
end Lean.Elab

View File

@@ -116,9 +116,8 @@ private def checkEndHeader : Name → List Scope → Option Name
addScope (isNewNamespace := false) (isNoncomputable := ncTk.isSome) (isPublic := publicTk.isSome) (isMeta := metaTk.isSome) (attrs := attrs) "" ( getCurrNamespace)
| _ => throwUnsupportedSyntax
@[builtin_command_elab InternalSyntax.end_local_scope] def elabEndLocalScope : CommandElab := fun stx => do
let depth := stx[1].toNat
setDelimitsLocal depth
@[builtin_command_elab InternalSyntax.end_local_scope] def elabEndLocalScope : CommandElab := fun _ => do
setDelimitsLocal
/--
Produces a `Name` composed of the names of at most the innermost `n` scopes in `ss`, truncating if an
@@ -529,7 +528,7 @@ open Lean.Parser.Command.InternalSyntax in
@[builtin_macro Lean.Parser.Command.«in»] def expandInCmd : Macro
| `($cmd₁ in%$tk $cmd₂) =>
-- Limit ref variability for incrementality; see Note [Incremental Macros]
withRef tk `(section $cmd₁:command $(endLocalScopeSyntax 1):command $cmd₂ end)
withRef tk `(section $cmd₁:command $endLocalScopeSyntax:command $cmd₂ end)
| _ => Macro.throwUnsupported
@[builtin_command_elab Parser.Command.addDocString] def elabAddDeclDoc : CommandElab := fun stx => do

View File

@@ -153,9 +153,6 @@ open Lean.Meta
let body
withLocalDeclsD xh fun xh => do
Term.addLocalVarInfo x xh[0]!
if let some h := h? then
Term.addLocalVarInfo h xh[1]!
withLocalDecl s .default σ (kind := .implDetail) fun loopS => do
mkLambdaFVars (xh.push loopS) <| do
bindMutVarsFromTuple loopMutVarNames loopS.fvarId! do

View File

@@ -314,23 +314,6 @@ private def mkSilentAnnotationIfHole (e : Expr) : TermElabM Expr := do
return val
| _ => panic! "resolveId? returned an unexpected expression"
/--
Rebuild a type application with fresh synthetic metavariables for instance-implicit arguments.
Non-instance-implicit arguments are assigned from the original application's arguments.
If the function is over-applied, extra arguments are preserved.
-/
private def resynthInstImplicitArgs (type : Expr) : TermElabM Expr := do
let fn := type.getAppFn
let args := type.getAppArgs
let (mvars, bis, _) forallMetaTelescope ( inferType fn)
for i in [:mvars.size] do
if bis[i]!.isInstImplicit then
mvars[i]!.mvarId!.assign ( mkInstMVar ( inferType mvars[i]!))
else
mvars[i]!.mvarId!.assign args[i]!
let args := mvars ++ args.drop mvars.size
instantiateMVars (mkAppN fn args)
@[builtin_term_elab Lean.Parser.Term.inferInstanceAs] def elabInferInstanceAs : TermElab := fun stx expectedType? => do
-- The type argument is the last child (works for both `inferInstanceAs T` and `inferInstanceAs <| T`)
let typeStx := stx[stx.getNumArgs - 1]!
@@ -342,21 +325,19 @@ private def resynthInstImplicitArgs (type : Expr) : TermElabM Expr := do
.note "`inferInstanceAs` requires full knowledge of the expected (\"target\") type to do its \
instance translation. If you do not intend to transport instances between two types, \
consider using `inferInstance` or `(inferInstance : expectedType)` instead.")
let type withSynthesize do
let type elabType typeStx
-- Unify with expected type to resolve metavariables (e.g., `_` placeholders)
discard <| isDefEq type expectedType
return type
-- Re-infer instance-implicit args, so that synthesis is not influenced by the expected type's
-- instance choices.
let type withSynthesize <| resynthInstImplicitArgs type
let type withSynthesize (postpone := .yes) <| elabType typeStx
-- Unify with expected type to resolve metavariables (e.g., `_` placeholders)
discard <| isDefEq type expectedType
let type instantiateMVars type
-- Rebuild type with fresh synthetic mvars for instance-implicit args, so that
-- synthesis is not influenced by the expected type's instance choices.
let type abstractInstImplicitArgs type
let inst synthInstance type
let inst if backward.inferInstanceAs.wrap.get ( getOptions) then
-- Wrap instance so its type matches the expected type exactly.
let logCompileErrors := !( read).isNoncomputableSection && !( read).declName?.any (Lean.isNoncomputable ( getEnv))
let isMeta := ( read).declName?.any (isMarkedMeta ( getEnv))
wrapInstance inst expectedType (logCompileErrors := logCompileErrors) (isMeta := isMeta)
withNewMCtxDepth <| wrapInstance inst expectedType (logCompileErrors := logCompileErrors) (isMeta := isMeta)
else
pure inst
ensureHasType expectedType? inst

View File

@@ -7,19 +7,11 @@ module
prelude
public import Lean.DocString.Add
public import Lean.Linter.Basic
meta import Lean.Parser.Command
public section
namespace Lean
register_builtin_option linter.redundantVisibility : Bool := {
defValue := false
descr := "warn on redundant `private`/`public` visibility modifiers"
}
namespace Elab
namespace Lean.Elab
/--
Ensure the environment does not contain a declaration with name `declName`.
@@ -85,30 +77,14 @@ def Visibility.isInferredPublic (env : Environment) (v : Visibility) : Bool :=
if env.isExporting || !env.header.isModule then !v.isPrivate else v.isPublic
/-- Converts optional visibility syntax to a `Visibility` value. -/
def elabVisibility [Monad m] [MonadError m] [MonadEnv m] [MonadOptions m] [MonadLog m]
[AddMessageContext m]
(vis? : Option (TSyntax ``Parser.Command.visibility)) :
m Visibility := do
let env getEnv
def elabVisibility [Monad m] [MonadError m] (vis? : Option (TSyntax ``Parser.Command.visibility)) :
m Visibility :=
match vis? with
| none => pure .regular
| some v =>
match v with
| `(Parser.Command.visibility| private) =>
if v.raw.getHeadInfo matches .original .. then -- skip macro output
if env.header.isModule && !env.isExporting then
Linter.logLintIf linter.redundantVisibility v
m!"`private` has no effect in a `module` file outside `public section`; \
declarations are already `private` by default"
pure .private
| `(Parser.Command.visibility| public) =>
if v.raw.getHeadInfo matches .original .. then -- skip macro output
if env.isExporting || !env.header.isModule then
Linter.logLintIf linter.redundantVisibility v
m!"`public` is the default visibility{
if env.header.isModule then " inside a `public section`" else ""
}; the modifier has no effect"
pure .public
| `(Parser.Command.visibility| private) => pure .private
| `(Parser.Command.visibility| public) => pure .public
| _ => throwErrorAt v "unexpected visibility modifier"
/-- Whether a declaration is default, partial or nonrec. -/

View File

@@ -152,9 +152,8 @@ def expandNamespacedDeclaration : Macro := fun stx => do
| some (ns, newStx) => do
-- Limit ref variability for incrementality; see Note [Incremental Macros]
let declTk := stx[1][0]
let depth := ns.getNumParts
let ns := mkIdentFrom declTk ns
withRef declTk `(namespace $ns $(endLocalScopeSyntax depth):command $(newStx) end $ns)
withRef declTk `(namespace $ns $endLocalScopeSyntax:command $(newStx) end $ns)
| none => Macro.throwUnsupported
@[builtin_command_elab declaration, builtin_incremental]

View File

@@ -172,7 +172,7 @@ def mkMatchNew (header : Header) (indVal : InductiveVal) (auxFunName : Name) : T
if indVal.numCtors == 1 then
`( $(mkCIdent casesOnSameCtorName) $x1:term $x2:term rfl $alts:term* )
else
`( match Nat.decEq ($(mkCIdent ctorIdxName) $x1:ident) ($(mkCIdent ctorIdxName) $x2:ident) with
`( match decEq ($(mkCIdent ctorIdxName) $x1:ident) ($(mkCIdent ctorIdxName) $x2:ident) with
| .isTrue h => $(mkCIdent casesOnSameCtorName) $x1:term $x2:term h $alts:term*
| .isFalse _ => false)

View File

@@ -89,7 +89,7 @@ where
let val
if isStructure ( getEnv) inductiveTypeName then
withTraceNode `Elab.Deriving.inhabited (fun _ => return m!"using structure instance elaborator") do
let stx `(structInstDefault| struct_inst_default%)
let stx `(structInst| {..})
withoutErrToSorry <| elabTermAndSynthesize stx type
else
withTraceNode `Elab.Deriving.inhabited (fun _ => return m!"using constructor `{.ofConstName ctorName}`") do

View File

@@ -384,7 +384,6 @@ def DoElemCont.mkBindUnlessPure (dec : DoElemCont) (e : Expr) : DoElabM Expr :=
let k := dec.k
-- The .ofBinderName below is mainly to interpret `__do_lift` binders as implementation details.
let declKind := .ofBinderName x
let kResultTy mkFreshResultType `kResultTy
withLocalDecl x .default eResultTy (kind := declKind) fun xFVar => do
let body k
let body' := body.consumeMData
@@ -412,6 +411,7 @@ def DoElemCont.mkBindUnlessPure (dec : DoElemCont) (e : Expr) : DoElabM Expr :=
-- else -- would be too aggressive
-- return ← mapLetDecl (nondep := true) (kind := declKind) x eResultTy eRes fun _ => k ref
let kResultTy mkFreshResultType `kResultTy
let body Term.ensureHasType ( mkMonadicType kResultTy) body
let k mkLambdaFVars #[xFVar] body
mkBindApp eResultTy kResultTy e k
@@ -545,10 +545,7 @@ def DoElemCont.withDuplicableCont (nondupDec : DoElemCont) (callerInfo : Control
withDeadCode (if callerInfo.numRegularExits > 0 then .alive else .deadSemantically) do
let e nondupDec.k
mkLambdaFVars (#[r] ++ muts) e
unless joinRhsMVar.mvarId!.checkedAssign joinRhs do
joinRhsMVar.mvarId!.withContext do
throwError "Bug in a `do` elaborator. Failed to assign join point RHS{indentExpr joinRhs}\n\
to metavariable\n{joinRhsMVar.mvarId!}"
discard <| joinRhsMVar.mvarId!.checkedAssign joinRhs
let body body?.getDM do
-- Here we unconditionally add a pending MVar.

View File

@@ -1038,19 +1038,19 @@ builtin_initialize registerBuiltinAttribute {
}
end
unsafe def codeSuggestionsUnsafe : TermElabM (Array (StrLit DocM (Array CodeSuggestion))) := do
private unsafe def codeSuggestionsUnsafe : TermElabM (Array (StrLit DocM (Array CodeSuggestion))) := do
let names := (codeSuggestionExt.getState ( getEnv)) |>.toArray
return ( names.mapM (evalConst _)) ++ ( builtinCodeSuggestions.get).map (·.2)
@[implemented_by codeSuggestionsUnsafe]
opaque codeSuggestions : TermElabM (Array (StrLit DocM (Array CodeSuggestion)))
private opaque codeSuggestions : TermElabM (Array (StrLit DocM (Array CodeSuggestion)))
unsafe def codeBlockSuggestionsUnsafe : TermElabM (Array (StrLit DocM (Array CodeBlockSuggestion))) := do
private unsafe def codeBlockSuggestionsUnsafe : TermElabM (Array (StrLit DocM (Array CodeBlockSuggestion))) := do
let names := (codeBlockSuggestionExt.getState ( getEnv)) |>.toArray
return ( names.mapM (evalConst _)) ++ ( builtinCodeBlockSuggestions.get).map (·.2)
@[implemented_by codeBlockSuggestionsUnsafe]
opaque codeBlockSuggestions : TermElabM (Array (StrLit DocM (Array CodeBlockSuggestion)))
private opaque codeBlockSuggestions : TermElabM (Array (StrLit DocM (Array CodeBlockSuggestion)))
/--
Resolves a name against `NameMap` that contains a list of builtin expanders, taking into account
@@ -1060,7 +1060,7 @@ resolution (`realizeGlobalConstNoOverload`) won't find them.
This is called as a fallback when the identifier can't be resolved.
-/
def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name) : TermElabM (Option α) := do
private def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name) : TermElabM (Option α) := do
if let some v := builtins.get? x then return some v
-- Builtins shouldn't require a prefix, as they're part of the language.
@@ -1089,7 +1089,7 @@ def resolveBuiltinDocName {α : Type} (builtins : NameMap α) (x : Name) : TermE
return none
unsafe def roleExpandersForUnsafe (roleName : Ident) :
private unsafe def roleExpandersForUnsafe (roleName : Ident) :
TermElabM (Array (Name × (TSyntaxArray `inline StateT (Array (TSyntax `doc_arg)) DocM (Inline ElabInline)))) := do
let x?
try some <$> realizeGlobalConstNoOverload roleName
@@ -1105,10 +1105,10 @@ unsafe def roleExpandersForUnsafe (roleName : Ident) :
@[implemented_by roleExpandersForUnsafe]
opaque roleExpandersFor (roleName : Ident) :
private opaque roleExpandersFor (roleName : Ident) :
TermElabM (Array (Name × (TSyntaxArray `inline StateT (Array (TSyntax `doc_arg)) DocM (Inline ElabInline))))
unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
private unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
TermElabM (Array (Name × (StrLit StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))) := do
let x?
try some <$> realizeGlobalConstNoOverload codeBlockName
@@ -1124,10 +1124,10 @@ unsafe def codeBlockExpandersForUnsafe (codeBlockName : Ident) :
@[implemented_by codeBlockExpandersForUnsafe]
opaque codeBlockExpandersFor (codeBlockName : Ident) :
private opaque codeBlockExpandersFor (codeBlockName : Ident) :
TermElabM (Array (Name × (StrLit StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))))
unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
private unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
TermElabM (Array (Name × (TSyntaxArray `block StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))) := do
let x?
try some <$> realizeGlobalConstNoOverload directiveName
@@ -1142,10 +1142,10 @@ unsafe def directiveExpandersForUnsafe (directiveName : Ident) :
return hasBuiltin.toArray.flatten
@[implemented_by directiveExpandersForUnsafe]
opaque directiveExpandersFor (directiveName : Ident) :
private opaque directiveExpandersFor (directiveName : Ident) :
TermElabM (Array (Name × (TSyntaxArray `block StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))))
unsafe def commandExpandersForUnsafe (commandName : Ident) :
private unsafe def commandExpandersForUnsafe (commandName : Ident) :
TermElabM (Array (Name × StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock))) := do
let x?
try some <$> realizeGlobalConstNoOverload commandName
@@ -1161,18 +1161,18 @@ unsafe def commandExpandersForUnsafe (commandName : Ident) :
return hasBuiltin.toArray.flatten
@[implemented_by commandExpandersForUnsafe]
opaque commandExpandersFor (commandName : Ident) :
private opaque commandExpandersFor (commandName : Ident) :
TermElabM (Array (Name × StateT (Array (TSyntax `doc_arg)) DocM (Block ElabInline ElabBlock)))
def mkArgVal (arg : TSyntax `arg_val) : DocM Term :=
private def mkArgVal (arg : TSyntax `arg_val) : DocM Term :=
match arg with
| `(arg_val|$n:ident) => pure n
| `(arg_val|$n:num) => pure n
| `(arg_val|$s:str) => pure s
| _ => throwErrorAt arg "Didn't understand as argument value"
def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argument) := do
private def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argument) := do
match arg with
| `(doc_arg|$x:arg_val) =>
let x mkArgVal x
@@ -1190,7 +1190,7 @@ def mkArg (arg : TSyntax `doc_arg) : DocM (TSyntax ``Parser.Term.argument) := do
`(Parser.Term.argument| ($x := $v))
| _ => throwErrorAt arg "Didn't understand as argument"
def mkAppStx (name : Ident) (args : TSyntaxArray `doc_arg) : DocM Term := do
private def mkAppStx (name : Ident) (args : TSyntaxArray `doc_arg) : DocM Term := do
return mkNode ``Parser.Term.app #[name, mkNullNode ( args.mapM mkArg)]
/--
@@ -1204,7 +1204,7 @@ register_builtin_option doc.verso.suggestions : Bool := {
-- Normally, name suggestions should be provided relative to the current scope. But
-- during bootstrapping, the names in question may not yet be defined, so builtin
-- names need special handling.
def suggestionName (name : Name) : TermElabM Name := do
private def suggestionName (name : Name) : TermElabM Name := do
let name'
-- Builtin expander names never need namespacing
if ( builtinDocRoles.get).contains name then pure (some name)
@@ -1241,7 +1241,7 @@ def suggestionName (name : Name) : TermElabM Name := do
-- Fall back to doing nothing
pure name
def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.Suggestion :=
private def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.Suggestion :=
let cmp : (x y : Meta.Tactic.TryThis.SuggestionText) Bool
| .string s1, .string s2 => s1 < s2
| .string _, _ => true
@@ -1250,7 +1250,7 @@ def sortSuggestions (ss : Array Meta.Hint.Suggestion) : Array Meta.Hint.Suggesti
ss.qsort (cmp ·.suggestion ·.suggestion)
open Diff in
def mkSuggestion
private def mkSuggestion
(ref : Syntax) (hintTitle : MessageData)
(newStrings : Array (String × Option String × Option String)) :
DocM MessageData := do
@@ -1281,7 +1281,7 @@ def nameOrBuiltinName [Monad m] [MonadEnv m] (x : Name) : m Name := do
Finds registered expander names that `x` is a suffix of, for use in error message hints when the
name is shadowed. Returns display names suitable for `mkSuggestion`.
-/
def findShadowedNames {α : Type}
private def findShadowedNames {α : Type}
(nonBuiltIns : NameMap (Array Name)) (builtins : NameMap α) (x : Name) :
TermElabM (Array Name) := do
if x.isAnonymous then return #[]
@@ -1303,7 +1303,7 @@ def findShadowedNames {α : Type}
/--
Builds a hint for an "Unknown role/directive/..." error when the name might be shadowed.
-/
def shadowedHint {α : Type}
private def shadowedHint {α : Type}
(envEntries : NameMap (Array Name)) (builtins : NameMap α)
(name : Ident) (kind : String) : DocM MessageData := do
let candidates findShadowedNames envEntries builtins name.getId
@@ -1316,7 +1316,7 @@ Throws an appropriate error for an unknown doc element (role/directive/code bloc
Distinguishes "name resolves but isn't registered" from "name doesn't resolve at all",
and includes shadowed-name suggestions when applicable.
-/
def throwUnknownDocElem {α β : Type}
private def throwUnknownDocElem {α β : Type}
(envEntries : NameMap (Array Name)) (builtins : NameMap α)
(name : Ident) (kind : String) : DocM β := do
let hint shadowedHint envEntries builtins name kind
@@ -1545,12 +1545,12 @@ where
withSpace (s : String) : String :=
if s.endsWith " " then s else s ++ " "
def takeFirst? (xs : Array α) : Option (α × Array α) :=
private def takeFirst? (xs : Array α) : Option (α × Array α) :=
if h : xs.size > 0 then
some (xs[0], xs.extract 1)
else none
partial def elabBlocks' (level : Nat) :
private partial def elabBlocks' (level : Nat) :
StateT (TSyntaxArray `block) DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) := do
let mut pre := #[]
let mut sub := #[]
@@ -1586,7 +1586,7 @@ partial def elabBlocks' (level : Nat) :
break
return (pre, sub)
def elabModSnippet'
private def elabModSnippet'
(range : DeclarationRange) (level : Nat) (blocks : TSyntaxArray `block) :
DocM VersoModuleDocs.Snippet := do
let mut snippet : VersoModuleDocs.Snippet := {
@@ -1616,7 +1616,7 @@ def elabModSnippet'
snippet := snippet.addBlock ( elabBlock b)
return snippet
partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInline) := do
private partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInline) := do
match inl with
| .concat xs => .concat <$> xs.mapM fixupInline
| .emph xs => .emph <$> xs.mapM fixupInline
@@ -1663,7 +1663,7 @@ partial def fixupInline (inl : Inline ElabInline) : DocM (Inline ElabInline) :=
return .empty
| _ => .other i <$> xs.mapM fixupInline
partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Block ElabInline ElabBlock) := do
private partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Block ElabInline ElabBlock) := do
match block with
| .para xs => .para <$> xs.mapM fixupInline
| .concat xs => .concat <$> xs.mapM fixupBlock
@@ -1677,7 +1677,7 @@ partial def fixupBlock (block : Block ElabInline ElabBlock) : DocM (Block ElabIn
| .code s => pure (.code s)
| .other i xs => .other i <$> xs.mapM fixupBlock
partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (Part ElabInline ElabBlock Empty) := do
private partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (Part ElabInline ElabBlock Empty) := do
return { part with
title := part.title.mapM fixupInline
content := part.content.mapM fixupBlock,
@@ -1685,13 +1685,13 @@ partial def fixupPart (part : Part ElabInline ElabBlock Empty) : DocM (Part Elab
}
partial def fixupBlocks : (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty))
private partial def fixupBlocks : (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty)) DocM (Array (Block ElabInline ElabBlock) × Array (Part ElabInline ElabBlock Empty))
| (bs, ps) => do
let bs bs.mapM fixupBlock
let ps ps.mapM fixupPart
return (bs, ps)
partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM VersoModuleDocs.Snippet := do
private partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM VersoModuleDocs.Snippet := do
return {snippet with
text := snippet.text.mapM fixupBlock,
sections := snippet.sections.mapM fun (level, range, content) => do
@@ -1700,7 +1700,7 @@ partial def fixupSnippet (snippet : VersoModuleDocs.Snippet) : DocM VersoModuleD
/--
After fixing up the references, check to see which were not used and emit a suitable warning.
-/
def warnUnusedRefs : DocM Unit := do
private def warnUnusedRefs : DocM Unit := do
for (_, {location, seen, ..}) in ( getThe InternalState).urls do
unless seen do
logWarningAt location "Unused URL"

View File

@@ -31,7 +31,7 @@ structure Data.Atom where
deriving TypeName
def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m StrLit := do
private def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m StrLit := do
if h : xs.size = 1 then
match xs[0] with
| `(inline|code($s)) => return s
@@ -43,7 +43,7 @@ def onlyCode [Monad m] [MonadError m] (xs : TSyntaxArray `inline) : m StrLit :=
/--
Checks whether a syntax descriptor's value contains the given atom.
-/
partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
private partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM Bool := do
match p.getAppFnArgs with
| (``ParserDescr.node, #[_, _, p]) => containsAtom p atom
@@ -67,7 +67,7 @@ partial def containsAtom (e : Expr) (atom : String) : MetaM Bool := do
Checks whether a syntax descriptor's value contains the given atom. If so, the residual value after
the atom is returned.
-/
partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Expr) := do
private partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Expr) := do
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM (Option Expr) := do
match p.getAppFnArgs with
| (``ParserDescr.node, #[_, _, p]) => containsAtom' p atom
@@ -92,7 +92,7 @@ partial def containsAtom' (e : Expr) (atom : String) : MetaM (Option Expr) := do
| _ => if tryWhnf then attempt ( Meta.whnf p) false else pure none
attempt e true
partial def canEpsilon (e : Expr) : MetaM Bool := do
private partial def canEpsilon (e : Expr) : MetaM Bool := do
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM Bool := do
match p.getAppFnArgs with
| (``ParserDescr.node, #[_, _, p]) => canEpsilon p
@@ -118,7 +118,7 @@ partial def canEpsilon (e : Expr) : MetaM Bool := do
Checks whether a syntax descriptor's value begins with the given atom. If so, the residual value
after the atom is returned.
-/
partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option Expr) := do
private partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option Expr) := do
let rec attempt (p : Expr) (tryWhnf : Bool) : MetaM (Option Expr) := do
match p.getAppFnArgs with
| (``ParserDescr.node, #[_, _, p]) => startsWithAtom? p atom
@@ -149,7 +149,7 @@ partial def startsWithAtom? (e : Expr) (atom : String) : MetaM (Option Expr) :=
Checks whether a syntax descriptor's value begins with the given atoms. If so, the residual value
after the atoms is returned.
-/
partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (Option Expr) := do
private partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (Option Expr) := do
match atoms with
| [] => pure e
| a :: as =>
@@ -157,7 +157,7 @@ partial def startsWithAtoms? (e : Expr) (atoms : List String) : MetaM (Option Ex
startsWithAtoms? e' as
else pure none
partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM Bool := do
private partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM Bool := do
match atoms with
| [] => pure true
| a :: as =>
@@ -165,7 +165,7 @@ partial def exprContainsAtoms (e : Expr) (atoms : List String) : MetaM Bool := d
(startsWithAtoms? e' as <&> Option.isSome) <||> exprContainsAtoms e' (a :: as)
else pure false
def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
private def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
let env getEnv
let some catContents := (Lean.Parser.parserExtension.getState env).categories.find? cat
| return #[]
@@ -177,7 +177,7 @@ def withAtom (cat : Name) (atom : String) : DocM (Array Name) := do
found := found.push k
return found
partial def isAtoms (atoms : List String) (stx : Syntax) : Bool :=
private partial def isAtoms (atoms : List String) (stx : Syntax) : Bool :=
StateT.run (go [stx]) atoms |>.fst
where
go (stxs : List Syntax) : StateM (List String) Bool := do
@@ -196,7 +196,7 @@ where
| .node _ _ args :: ss =>
go (args.toList ++ ss)
def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM Bool := do
private def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM Bool := do
let str := " ".intercalate atoms
let env getEnv
let options getOptions
@@ -206,16 +206,16 @@ def parserHasAtomPrefix (atoms : List String) (p : Parser) : TermElabM Bool := d
let s := p.fn.run {inputString := str, fileName := "", fileMap := FileMap.ofString str} {env, options} (getTokenTable env) s
return isAtoms atoms (mkNullNode (s.stxStack.extract 1 s.stxStack.size))
unsafe def namedParserHasAtomPrefixUnsafe (atoms : List String) (parserName : Name) : TermElabM Bool := do
private unsafe def namedParserHasAtomPrefixUnsafe (atoms : List String) (parserName : Name) : TermElabM Bool := do
try
let p evalConstCheck Parser ``Parser parserName
parserHasAtomPrefix atoms p
catch | _ => pure false
@[implemented_by namedParserHasAtomPrefixUnsafe]
opaque namedParserHasAtomPrefix (atoms : List String) (parserName : Name) : TermElabM Bool
private opaque namedParserHasAtomPrefix (atoms : List String) (parserName : Name) : TermElabM Bool
def parserDescrCanEps : ParserDescr Bool
private def parserDescrCanEps : ParserDescr Bool
| .node _ _ p | .trailingNode _ _ _ p => parserDescrCanEps p
| .binary ``Parser.andthen p1 p2 => parserDescrCanEps p1 && parserDescrCanEps p2
| .binary ``Parser.orelse p1 p2 => parserDescrCanEps p1 || parserDescrCanEps p2
@@ -227,7 +227,7 @@ def parserDescrCanEps : ParserDescr → Bool
| .const ``Parser.ppHardSpace => true
| _ => false
def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
private def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
match p with
| .node _ _ p | .trailingNode _ _ _ p | .unary _ p =>
parserDescrHasAtom atom p
@@ -249,7 +249,7 @@ def parserDescrHasAtom (atom : String) (p : ParserDescr) : TermElabM (Option Par
| none, none => pure none
| _ => pure none
def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
private def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermElabM (Option ParserDescr) := do
match p with
| .node _ _ p | .trailingNode _ _ _ p | .unary _ p =>
parserDescrStartsWithAtom atom p
@@ -272,7 +272,7 @@ def parserDescrStartsWithAtom (atom : String) (p : ParserDescr) : TermElabM (Opt
| none, none => pure none
| _ => pure none
def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
private def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
match atoms with
| [] => pure true
| a :: as =>
@@ -280,7 +280,7 @@ def parserDescrStartsWithAtoms (atoms : List String) (p : ParserDescr) : TermEla
parserDescrStartsWithAtoms as p'
else pure false
partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
private partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr) : TermElabM Bool := do
match atoms with
| [] => pure true
| a :: as =>
@@ -289,16 +289,16 @@ partial def parserDescrHasAtoms (atoms : List String) (p : ParserDescr) : TermEl
else parserDescrHasAtoms (a :: as) p'
else pure false
unsafe def parserDescrNameHasAtomsUnsafe (atoms : List String) (p : Name) : TermElabM Bool := do
private unsafe def parserDescrNameHasAtomsUnsafe (atoms : List String) (p : Name) : TermElabM Bool := do
try
let p evalConstCheck ParserDescr ``ParserDescr p
parserDescrHasAtoms atoms p
catch | _ => pure false
@[implemented_by parserDescrNameHasAtomsUnsafe]
opaque parserDescrNameHasAtoms (atoms : List String) (p : Name) : TermElabM Bool
private opaque parserDescrNameHasAtoms (atoms : List String) (p : Name) : TermElabM Bool
def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
private def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
let env getEnv
if let some ci := env.find? k then
if let some d := ci.value? then
@@ -312,7 +312,7 @@ def kindHasAtoms (k : Name) (atoms : List String) : TermElabM Bool := do
return true
return false
def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name) := do
private def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name) := do
let env getEnv
let some catContents := (Lean.Parser.parserExtension.getState env).categories.find? cat
| return #[]
@@ -323,7 +323,7 @@ def withAtoms (cat : Name) (atoms : List String) : TermElabM (Array Name) := do
found := found.push k
return found
def kwImpl (cat : Ident := mkIdent .anonymous) (of : Ident := mkIdent .anonymous)
private def kwImpl (cat : Ident := mkIdent .anonymous) (of : Ident := mkIdent .anonymous)
(suggest : Bool)
(s : StrLit) : TermElabM (Inline ElabInline) := do
let atoms := s.getString |>.split Char.isWhitespace |>.toStringList

View File

@@ -55,7 +55,7 @@ def unfoldLHS (declName : Name) (mvarId : MVarId) : MetaM MVarId := mvarId.withC
-- Else use delta reduction
deltaLHS mvarId
partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := do
private partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := do
withTraceNode `Elab.definition.eqns (fun _ => return m!"proving:{indentExpr type}") do
withNewMCtxDepth do
let main mkFreshExprSyntheticOpaqueMVar type
@@ -102,7 +102,7 @@ partial def mkEqnProof (declName : Name) (type : Expr) : MetaM Expr := do
else
throwError "failed to generate equational theorem for `{.ofConstName declName}`\n{MessageData.ofGoal mvarId}"
def lhsDependsOn (type : Expr) (fvarId : FVarId) : MetaM Bool :=
private def lhsDependsOn (type : Expr) (fvarId : FVarId) : MetaM Bool :=
forallTelescope type fun _ type => do
if let some (_, lhs, _) matchEq? type then
dependsOn lhs fvarId

View File

@@ -150,8 +150,6 @@ structure SourcesView where
explicit : Array ExplicitSourceView
/-- The syntax for a trailing `..`. This is "ellipsis mode" for missing fields, similar to ellipsis mode for applications. -/
implicit : Option Syntax
/-- Use `Inhabited` instances inherited from parent structures, and use `default` instances for missing fields. -/
useInhabited : Bool
deriving Inhabited
/--
@@ -181,7 +179,7 @@ private def getStructSources (structStx : Syntax) : TermElabM SourcesView :=
let structName getStructureName srcType
return { stx, fvar := src, structName }
let implicit := if implicitSource[0].isNone then none else implicitSource
return { explicit, implicit, useInhabited := false }
return { explicit, implicit }
/--
We say a structure instance notation is a "modifyOp" if it contains only a single array update.
@@ -581,9 +579,6 @@ private structure StructInstContext where
/-- If true, then try using default values or autoParams for missing fields.
(Considered after `useParentInstanceFields`.) -/
useDefaults : Bool
/-- If true, then tries `Inhabited` instances as an alternative to parent instances,
and when default values are missing. -/
useInhabited : Bool
/-- If true, then missing fields after default value synthesis remain as metavariables rather than yielding an error.
Only applies if `useDefaults` is true. -/
unsynthesizedAsMVars : Bool
@@ -740,27 +735,14 @@ The arguments for the `_default` auxiliary function are provided by `fieldMap`.
After default values are resolved, then the one that is added to the environment
as an `_inherited_default` auxiliary function is normalized; we don't do those normalizations here.
-/
private def getFieldDefaultValue? (fieldName : Name) : StructInstM (Option (NameSet × Expr)) := do
private partial def getFieldDefaultValue? (fieldName : Name) : StructInstM (NameSet × Option Expr) := do
let some defFn := getEffectiveDefaultFnForField? ( getEnv) ( read).structName fieldName
| return none
| return ({}, none)
let fieldMap := ( get).fieldMap
let some (fields, val) instantiateStructDefaultValueFn? defFn ( read).levels ( read).params (pure fieldMap.find?)
| logError m!"default value for field `{fieldName}` of structure `{.ofConstName (← read).structName}` could not be instantiated, ignoring"
return none
return some (fields, val)
/--
If `useInhabited` is enabled, tries synthesizing an `Inhabited` instance for the field.
-/
private def getFieldDefaultValueUsingInhabited (fieldType : Expr) : StructInstM (Option (NameSet × Expr)) := do
if ( read).useInhabited then
try
let val mkDefault fieldType
return some ({}, val)
catch _ =>
return none
else
return none
return ({}, none)
return (fields, val)
/--
Auxiliary type for `synthDefaultFields`
@@ -769,16 +751,8 @@ private structure PendingField where
fieldName : Name
fieldType : Expr
required : Bool
/-- If present, field dependencies and the default value. -/
val? : Option (NameSet × Expr)
private def PendingField.isReady (pendingField : PendingField) (hasDep : Name Bool) : Bool :=
pendingField.val?.any fun (deps, _) => deps.all hasDep
private def PendingField.val! (pendingField : PendingField) : Expr :=
match pendingField.val? with
| some (_, val) => val
| none => panic! "PendingField has no value"
deps : NameSet
val? : Option Expr
private def registerFieldMVarError (e : Expr) (ref : Syntax) (fieldName : Name) : StructInstM Unit :=
registerCustomErrorIfMVar e ref m!"Cannot synthesize placeholder for field `{fieldName}`"
@@ -812,15 +786,12 @@ private def synthOptParamFields : StructInstM Unit := do
-- Process default values for pending optParam fields.
let mut pendingFields : Array PendingField optParamFields.filterMapM fun (fieldName, fieldType, required) => do
if required || ( isFieldNotSolved? fieldName).isSome then
let val? if ( read).useDefaults then getFieldDefaultValue? fieldName else pure none
let val? pure val? <||> if ( read).useInhabited then getFieldDefaultValueUsingInhabited fieldType else pure none
trace[Elab.struct]
if let some (deps, val) := val? then
m!"default value for {fieldName}:{inlineExpr val}" ++
if deps.isEmpty then m!"" else m!"(depends on fields {deps.toArray})"
else
m!"no default value for {fieldName}"
pure <| some { fieldName, fieldType, required, val? }
let (deps, val?) if ( read).useDefaults then getFieldDefaultValue? fieldName else pure ({}, none)
if let some val := val? then
trace[Elab.struct] "default value for {fieldName}:{indentExpr val}"
else
trace[Elab.struct] "no default value for {fieldName}"
pure <| some { fieldName, fieldType, required, deps, val? }
else
pure none
-- We then iteratively look for pending fields that do not depend on unsolved-for fields.
@@ -828,11 +799,12 @@ private def synthOptParamFields : StructInstM Unit := do
-- so we need to keep trying until no more progress is made.
let mut pendingSet : NameSet := pendingFields.foldl (init := {}) fun set pending => set.insert pending.fieldName
while !pendingSet.isEmpty do
let selectedFields := pendingFields.filter (·.isReady (!pendingSet.contains ·))
let selectedFields := pendingFields.filter fun pendingField =>
pendingField.val?.isSome && pendingField.deps.all (fun dep => !pendingSet.contains dep)
let mut toRemove : Array Name := #[]
let mut assignErrors : Array MessageData := #[]
for selected in selectedFields do
let selectedVal := selected.val!
let some selectedVal := selected.val? | unreachable!
if let some mvarId isFieldNotSolved? selected.fieldName then
let fieldType := selected.fieldType
let selectedType inferType selectedVal
@@ -1112,7 +1084,6 @@ private def processNoField (loop : StructInstM α) (fieldName : Name) (binfo : B
addStructFieldAux fieldName mvar
return loop
-- Default case: natural metavariable, register it for optParams
let fieldType := fieldType.consumeTypeAnnotations
discard <| addStructFieldMVar fieldName fieldType
modify fun s => { s with optParamFields := s.optParamFields.push (fieldName, fieldType, binfo.isExplicit) }
loop
@@ -1140,44 +1111,29 @@ private partial def loop : StructInstM Expr := withViewRef do
For each parent class, see if it can be used to synthesize the fields that haven't been provided.
-/
private partial def addParentInstanceFields : StructInstM Unit := do
let env getEnv
let structName := ( read).structName
let fieldNames := getStructureFieldsFlattened ( getEnv) structName (includeSubobjectFields := false)
let fieldNames := getStructureFieldsFlattened env structName (includeSubobjectFields := false)
let fieldViews := ( read).fieldViews
if fieldNames.all fieldViews.contains then
-- Every field is accounted for already
return
-- We look at parents in resolution order
-- We look at class parents in resolution order
let parents getAllParentStructures structName
let env getEnv
let parentsToVisit := if ( read).useInhabited then parents else parents.filter (isClass env)
if parentsToVisit.isEmpty then return
let classParents := parents.filter (isClass env)
if classParents.isEmpty then return
let allowedFields := fieldNames.filter (!fieldViews.contains ·)
let mut remainingFields := allowedFields
-- Worklist of parent/fields pairs. If fields is empty, then it will be computed later.
let mut worklist : List (Name × Array Name) := parentsToVisit |>.map (·, #[]) |>.toList
let mut worklist : List (Name × Array Name) := classParents |>.map (·, #[]) |>.toList
let mut deferred : List (Name × Array Name) := []
let trySynthParent (parentName : Name) (parentTy : Expr) : StructInstM (LOption Expr) := do
if isClass ( getEnv) parentName then
match trySynthInstance parentTy with
| .none => pure ()
| r => return r
if ( read).useInhabited then
let u getLevel parentTy
let cls := Expr.app (Expr.const ``Inhabited [u]) parentTy
match trySynthInstance cls with
| .none => pure ()
| .undef => return .undef
| .some inst => return .some <| mkApp2 (Expr.const ``Inhabited.default [u]) parentTy inst
return .none
while !worklist.isEmpty do
let (parentName, parentFields) :: worklist' := worklist | unreachable!
worklist := worklist'
let env getEnv
let parentFields := if parentFields.isEmpty then getStructureFieldsFlattened env parentName (includeSubobjectFields := false) else parentFields
-- We only try synthesizing if the parent contains one of the remaining fields
-- and if every parent field is an allowed field.
@@ -1189,7 +1145,7 @@ private partial def addParentInstanceFields : StructInstM Unit := do
trace[Elab.struct] "could not calculate type for parent `{.ofConstName parentName}`"
deferred := (parentName, parentFields) :: deferred
| some (parentTy, _) =>
match trySynthParent parentName parentTy with
match trySynthInstance parentTy with
| .none => trace[Elab.struct] "failed to synthesize instance for parent {parentTy}"
| .undef =>
trace[Elab.struct] "instance synthesis stuck for parent {parentTy}"
@@ -1243,19 +1199,17 @@ private def elabStructInstView (s : StructInstView) (structName : Name) (structT
let fields addSourceFields structName s.sources.explicit fields
trace[Elab.struct] "expanded fields:\n{MessageData.joinSep (fields.toList.map (fun (_, field) => m!"- {MessageData.nestD (toMessageData field)}")) "\n"}"
let ellipsis := s.sources.implicit.isSome
let useInhabited := s.sources.useInhabited
let (val, _) main
|>.run { view := s, structName, structType, levels, params, fieldViews := fields, val := ctorFn
-- See the note in `ElabAppArgs.processExplicitArg`
-- For structure instances though, there's a sense in which app-style ellipsis mode is always enabled,
-- so we do not specifically check for it to disable defaults.
-- An effect of this is that if a user forgets `..` they'll be reminded with a "Fields missing" error.
useDefaults := !( readThe Term.Context).inPattern || useInhabited
useDefaults := !( readThe Term.Context).inPattern
-- Similarly, for patterns we disable using parent instances to fill in fields
useParentInstanceFields := !( readThe Term.Context).inPattern || useInhabited
useParentInstanceFields := !( readThe Term.Context).inPattern
-- In ellipsis mode, unsynthesized missing fields become metavariables, rather than being an error
unsynthesizedAsMVars := ellipsis
useInhabited := useInhabited
}
|>.run { type := ctorFnType }
return val
@@ -1379,15 +1333,6 @@ where
trace[Elab.struct] "result:{indentExpr r}"
return r
@[builtin_term_elab structInstDefault] def elabStructInstDefault : TermElab := fun stx expectedType? => do
let sourcesView : SourcesView := { explicit := #[], implicit := none, useInhabited := true }
let (structName, structType?) getStructName expectedType? sourcesView
withTraceNode `Elab.struct (fun _ => return m!"elaborating default value for `{structName}`") do
let struct : StructInstView := { ref := stx, fields := #[], sources := sourcesView }
let r withSynthesize (postpone := .yes) <| elabStructInstView struct structName structType?
trace[Elab.struct] "result:{indentExpr r}"
return r
builtin_initialize
registerTraceClass `Elab.struct
registerTraceClass `Elab.struct.modifyOp

View File

@@ -73,19 +73,19 @@ def hasErrorExplanation [Monad m] [MonadEnv m] (name : Name) : m Bool :=
return errorExplanationExt.getState ( getEnv) |>.contains name
/-- Returns all error explanations with their names, sorted by name. -/
def getErrorExplanations [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
public def getErrorExplanations [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
return errorExplanationExt.getState ( getEnv)
|>.toArray
|>.qsort fun e e' => e.1.toString < e'.1.toString
@[deprecated getErrorExplanations (since := "2026-12-20")]
def getErrorExplanationsRaw (env : Environment) : Array (Name × ErrorExplanation) :=
public def getErrorExplanationsRaw (env : Environment) : Array (Name × ErrorExplanation) :=
errorExplanationExt.getState env
|>.toArray
|>.qsort fun e e' => e.1.toString < e'.1.toString
@[deprecated getErrorExplanations (since := "2026-12-20")]
def getErrorExplanationsSorted [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
public def getErrorExplanationsSorted [Monad m] [MonadEnv m] : m (Array (Name × ErrorExplanation)) := do
getErrorExplanations
end Lean

View File

@@ -19,4 +19,3 @@ public import Lean.Linter.Sets
public import Lean.Linter.UnusedSimpArgs
public import Lean.Linter.Coe
public import Lean.Linter.GlobalAttributeIn
public import Lean.Linter.EnvLinter

View File

@@ -1,10 +0,0 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Wojciech Różowski
-/
module
prelude
public import Lean.Linter.EnvLinter.Basic
public import Lean.Linter.EnvLinter.Frontend

View File

@@ -1,163 +0,0 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Wojciech Różowski
Copyright (c) 2020 Floris van Doorn. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Floris van Doorn, Robert Y. Lewis, Gabriel Ebner
-/
module
prelude
public import Lean.Structure
public import Lean.Elab.InfoTree.Main
import Lean.ExtraModUses
public section
open Lean Meta
namespace Lean.Linter.EnvLinter
/-!
# Basic environment linter types and attributes
This file defines the basic types and attributes used by the environment
linting framework. An environment linter consists of a function
`(declaration : Name) → MetaM (Option MessageData)`, this function together with some
metadata is stored in the `EnvLinter` structure. We define two attributes:
* `@[builtin_env_linter]` applies to a declaration of type `EnvLinter`
and adds it to the default linter set.
* `@[builtin_nolint linterName]` omits the tagged declaration from being checked by
the linter with name `linterName`.
-/
/--
Returns true if `decl` is an automatically generated declaration.
Also returns true if `decl` is an internal name or created during macro
expansion.
-/
def isAutoDecl (decl : Name) : CoreM Bool := do
if decl.hasMacroScopes then return true
if decl.isInternal then return true
let env getEnv
if isReservedName env decl then return true
if let Name.str n s := decl then
if ( isAutoDecl n) then return true
if s.startsWith "proof_"
|| s.startsWith "match_"
|| s.startsWith "unsafe_"
|| s.startsWith "grind_"
then return true
if env.isConstructor n && s ["injEq", "inj", "sizeOf_spec", "elim", "noConfusion"] then
return true
if let ConstantInfo.inductInfo _ := env.find? n then
if s.startsWith "brecOn_" || s.startsWith "below_" then return true
if s [casesOnSuffix, recOnSuffix, brecOnSuffix, belowSuffix,
"ndrec", "ndrecOn", "noConfusionType", "noConfusion", "ofNat", "toCtorIdx", "ctorIdx",
"ctorElim", "ctorElimType"] then
return true
if let some _ := isSubobjectField? env n (.mkSimple s) then
return true
-- Coinductive/inductive lattice-theoretic predicates:
if let ConstantInfo.inductInfo _ := env.find? (Name.str n "_functor") then
if s == "functor_unfold" || s == casesOnSuffix || s == "mutual" then return true
if env.isConstructor (Name.str (Name.str n "_functor") s) then return true
pure false
/-- An environment linting test for the `lake builtin-lint` command. -/
structure EnvLinter where
/-- `test` defines a test to perform on every declaration. It should never fail. Returning `none`
signifies a passing test. Returning `some msg` reports a failing test with error `msg`. -/
test : Name MetaM (Option MessageData)
/-- `noErrorsFound` is the message printed when all tests are negative -/
noErrorsFound : MessageData
/-- `errorsFound` is printed when at least one test is positive -/
errorsFound : MessageData
/-- If `isDefault` is false, this linter is only run when `--clippy` is passed. -/
isDefault := true
/-- A `NamedEnvLinter` is an environment linter associated to a particular declaration. -/
structure NamedEnvLinter extends EnvLinter where
/-- The name of the named linter. This is just the declaration name without the namespace. -/
name : Name
/-- The linter declaration name -/
declName : Name
/-- Gets an environment linter by declaration name. -/
def getEnvLinter (name declName : Name) : CoreM NamedEnvLinter := unsafe
return { evalConstCheck EnvLinter ``EnvLinter declName with name, declName }
/-- Defines the `envLinterExt` extension for adding an environment linter to the default set. -/
builtin_initialize envLinterExt :
SimplePersistentEnvExtension (Name × Bool) (NameMap (Name × Bool))
let addEntryFn := fun m (n, b) => m.insert (n.updatePrefix .anonymous) (n, b)
registerSimplePersistentEnvExtension {
addImportedFn := fun nss =>
nss.foldl (init := {}) fun m ns => ns.foldl (init := m) addEntryFn
addEntryFn
}
/--
Defines the `@[builtin_env_linter]` attribute for adding a linter to the default set.
The form `@[builtin_env_linter clippy]` will not add the linter to the default set,
but it can be selected by `lake builtin-lint --clippy`.
Linters are named using their declaration names, without the namespace. These must be distinct.
-/
syntax (name := builtin_env_linter) "builtin_env_linter" &" clippy"? : attr
builtin_initialize registerBuiltinAttribute {
name := `builtin_env_linter
descr := "Use this declaration as a linting test in `lake builtin-lint`"
add := fun decl stx kind => do
let dflt := stx[1].isNone
unless kind == .global do throwError "invalid attribute `builtin_env_linter`, must be global"
let shortName := decl.updatePrefix .anonymous
if let some (declName, _) := (envLinterExt.getState ( getEnv)).find? shortName then
Elab.addConstInfo stx declName
throwError
"invalid attribute `builtin_env_linter`, linter `{shortName}` has already been declared"
let isPublic := !isPrivateName decl; let isMeta := isMarkedMeta ( getEnv) decl
unless isPublic && isMeta do
throwError "invalid attribute `builtin_env_linter`, \
declaration `{.ofConstName decl}` must be marked as `public` and `meta`\
{if isPublic then " but is only marked `public`" else ""}\
{if isMeta then " but is only marked `meta`" else ""}"
let constInfo getConstInfo decl
unless (isDefEq constInfo.type (mkConst ``EnvLinter)).run' do
throwError "`{.ofConstName decl}` must have type `{.ofConstName ``EnvLinter}`, got \
`{constInfo.type}`"
modifyEnv fun env => envLinterExt.addEntry env (decl, dflt)
}
/-- `@[builtin_nolint linterName]` omits the tagged declaration from being checked by
the linter with name `linterName`. -/
syntax (name := builtin_nolint) "builtin_nolint" (ppSpace ident)+ : attr
/-- Defines the user attribute `builtin_nolint` for skipping environment linters. -/
builtin_initialize builtinNolintAttr : ParametricAttribute (Array Name)
registerParametricAttribute {
name := `builtin_nolint
descr := "Do not report this declaration in any of the tests of `lake builtin-lint`"
getParam := fun _ => fun
| `(attr| builtin_nolint $[$ids]*) => ids.mapM fun id => withRef id <| do
let shortName := id.getId.eraseMacroScopes
let some (declName, _) := (envLinterExt.getState ( getEnv)).find? shortName
| throwError "linter '{shortName}' not found"
Elab.addConstInfo id declName
recordExtraModUseFromDecl (isMeta := false) declName
pure shortName
| _ => Elab.throwUnsupportedSyntax
}
/-- Returns true if `decl` should be checked
using `linter`, i.e., if there is no `builtin_nolint` attribute. -/
def shouldBeLinted [Monad m] [MonadEnv m] (linter : Name) (decl : Name) : m Bool :=
return !((builtinNolintAttr.getParam? ( getEnv) decl).getD #[]).contains linter
end Lean.Linter.EnvLinter

View File

@@ -1,180 +0,0 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Wojciech Różowski
Copyright (c) 2020 Floris van Doorn. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Floris van Doorn, Robert Y. Lewis, Gabriel Ebner
-/
module
prelude
public import Lean.Linter.EnvLinter.Basic
import Lean.DeclarationRange
import Lean.Util.Path
import Lean.CoreM
import Lean.Elab.Command
public section
open Lean Meta
namespace Lean.Linter.EnvLinter
/-- Verbosity for the linter output. -/
inductive LintVerbosity
/-- `low`: only print failing checks, print nothing on success. -/
| low
/-- `medium`: only print failing checks, print confirmation on success. -/
| medium
/-- `high`: print output of every check. -/
| high
deriving Inhabited, DecidableEq, Repr
/-- `getChecks clippy runOnly` produces a list of linters.
`runOnly` is an optional list of names that should resolve to declarations with type
`NamedEnvLinter`. If populated, only these linters are run (regardless of the default
configuration). Otherwise, it uses all enabled linters in the environment tagged with
`@[builtin_env_linter]`. If `clippy` is false, it only uses linters with `isDefault = true`. -/
def getChecks (clippy : Bool) (runOnly : Option (List Name)) :
CoreM (Array NamedEnvLinter) := do
let mut result := #[]
for (name, declName, isDefault) in envLinterExt.getState ( getEnv) do
let shouldRun := match runOnly with
| some only => only.contains name
| none => clippy || isDefault
if shouldRun then
let linter getEnvLinter name declName
result := result.binInsert (·.name.lt ·.name) linter
pure result
/--
Runs all the specified linters on all the specified declarations in parallel,
producing a list of results.
-/
def lintCore (decls : Array Name) (linters : Array NamedEnvLinter) :
CoreM (Array (NamedEnvLinter × Std.HashMap Name MessageData)) := do
let tasks : Array (NamedEnvLinter × Array (Name × Task (Except Exception <| Option MessageData)))
linters.mapM fun linter => do
let decls decls.filterM (shouldBeLinted linter.name)
(linter, ·) <$> decls.mapM fun decl => (decl, ·) <$> do
let act : MetaM (Option MessageData) := do
linter.test decl
EIO.asTask <| ( Core.wrapAsync (fun _ =>
act |>.run' Elab.Command.mkMetaContext
) (cancelTk? := none)) ()
let result tasks.mapM fun (linter, decls) => do
let mut msgs : Std.HashMap Name MessageData := {}
for (declName, msgTask) in decls do
let msg? match msgTask.get with
| Except.ok msg? => pure msg?
| Except.error err => pure m!"LINTER FAILED:\n{err.toMessageData}"
if let .some msg := msg? then
msgs := msgs.insert declName msg
pure (linter, msgs)
return result
/-- Sorts a map with declaration keys as names by line number. -/
def sortResults (results : Std.HashMap Name α) : CoreM <| Array (Name × α) := do
let mut key : Std.HashMap Name Nat := {}
for (n, _) in results.toArray do
if let some range findDeclarationRanges? n then
key := key.insert n <| range.range.pos.line
pure $ results.toArray.qsort fun (a, _) (b, _) => key.getD a 0 < key.getD b 0
/-- Formats a linter warning as `#check` command with comment. -/
def printWarning (declName : Name) (warning : MessageData) (useErrorFormat : Bool := false)
(filePath : System.FilePath := default) : CoreM MessageData := do
if useErrorFormat then
if let some range findDeclarationRanges? declName then
let msg addMessageContextPartial
m!"{filePath}:{range.range.pos.line}:{range.range.pos.column + 1}: error: {
← mkConstWithLevelParams declName} {warning}"
return msg
addMessageContextPartial m!"#check {← mkConstWithLevelParams declName} /- {warning} -/"
/-- Formats a map of linter warnings using `printWarning`, sorted by line number. -/
def printWarnings (results : Std.HashMap Name MessageData) (filePath : System.FilePath := default)
(useErrorFormat : Bool := false) : CoreM MessageData := do
(MessageData.joinSep ·.toList Format.line) <$>
( sortResults results).mapM fun (declName, warning) =>
printWarning declName warning (useErrorFormat := useErrorFormat) (filePath := filePath)
/--
Formats a map of linter warnings grouped by filename with `-- filename` comments.
-/
def groupedByFilename (results : Std.HashMap Name MessageData) (useErrorFormat : Bool := false) :
CoreM MessageData := do
let sp if useErrorFormat then getSrcSearchPath else pure {}
let grouped : Std.HashMap Name (System.FilePath × Std.HashMap Name MessageData)
results.foldM (init := {}) fun grouped declName msg => do
let mod findModuleOf? declName
let mod := mod.getD ( getEnv).mainModule
grouped.insert mod <$>
match grouped[mod]? with
| some (fp, msgs) => pure (fp, msgs.insert declName msg)
| none => do
let fp if useErrorFormat then
pure <| ( sp.findWithExt "lean" mod).getD (modToFilePath "." mod "lean")
else pure default
pure (fp, .insert {} declName msg)
let grouped' := grouped.toArray.qsort fun (a, _) (b, _) => toString a < toString b
(MessageData.joinSep · (Format.line ++ Format.line)) <$>
grouped'.toList.mapM fun (mod, fp, msgs) => do
pure m!"-- {mod}\n{← printWarnings msgs (filePath := fp) (useErrorFormat := useErrorFormat)}"
/--
Formats the linter results as Lean code with comments and `#check` commands.
-/
def formatLinterResults
(results : Array (NamedEnvLinter × Std.HashMap Name MessageData))
(decls : Array Name)
(groupByFilename : Bool)
(whereDesc : String) (runClippyLinters : Bool)
(verbose : LintVerbosity) (numLinters : Nat) (useErrorFormat : Bool := false) :
CoreM MessageData := do
let formattedResults results.filterMapM fun (linter, results) => do
if !results.isEmpty then
let warnings
if groupByFilename || useErrorFormat then
groupedByFilename results (useErrorFormat := useErrorFormat)
else
printWarnings results
pure $ some m!"/- The `{linter.name}` linter reports:\n{linter.errorsFound} -/\n{warnings}\n"
else if verbose = LintVerbosity.high then
pure $ some m!"/- OK: {linter.noErrorsFound} -/"
else
pure none
let mut s := MessageData.joinSep formattedResults.toList Format.line
let numAutoDecls := ( decls.filterM isAutoDecl).size
let failed := results.map (·.2.size) |>.foldl (·+·) 0
unless verbose matches LintVerbosity.low do
s := m!"-- Found {failed} error{if failed == 1 then "" else "s"
} in {decls.size - numAutoDecls} declarations (plus {
numAutoDecls} automatically generated ones) {whereDesc
} with {numLinters} linters\n\n{s}"
unless runClippyLinters do s := m!"{s}-- (non-clippy linters skipped)\n"
pure s
/-- Get the list of declarations in the current module. -/
def getDeclsInCurrModule : CoreM (Array Name) := do
pure $ ( getEnv).constants.map₂.foldl (init := #[]) fun r k _ => r.push k
/-- Get the list of all declarations in the environment. -/
def getAllDecls : CoreM (Array Name) := do
pure $ ( getEnv).constants.map₁.fold (init := getDeclsInCurrModule) fun r k _ => r.push k
/-- Get the list of all declarations in the specified package. -/
def getDeclsInPackage (pkg : Name) : CoreM (Array Name) := do
let env getEnv
let mut decls getDeclsInCurrModule
let modules := env.header.moduleNames.map (pkg.isPrefixOf ·)
return env.constants.map₁.fold (init := decls) fun decls declName _ =>
if modules[env.const2ModIdx[declName]?.get!]! then
decls.push declName
else decls
end Lean.Linter.EnvLinter

View File

@@ -30,7 +30,7 @@ of type
α → List α → Sort (max 1 u_1) → Sort (max 1 u_1)
```
-/
def buildBelowMinorPremise (rlvl : Level) (motives : Array Expr) (minorType : Expr) : MetaM Expr :=
private def buildBelowMinorPremise (rlvl : Level) (motives : Array Expr) (minorType : Expr) : MetaM Expr :=
forallTelescope minorType fun minor_args _ => do go #[] minor_args.toList
where
go (prods : Array Expr) : List Expr MetaM Expr
@@ -56,7 +56,7 @@ For example for the `List` type, it constructs,
fun {a} {motive} t => List.rec PUnit (fun a_1 a a_ih => motive a ×' a_ih) t
```
-/
def mkBelowFromRec (recName : Name) (nParams : Nat)
private def mkBelowFromRec (recName : Name) (nParams : Nat)
(belowName : Name) : MetaM Unit := do
-- The construction follows the type of `ind.rec`
let .recInfo recVal getConstInfo recName
@@ -146,7 +146,7 @@ of type
PProd (motive (head :: tail)) (PProd (PProd (motive tail) (List.below tail)) PUnit)
```
-/
def buildBRecOnMinorPremise (rlvl : Level) (motives : Array Expr)
private def buildBRecOnMinorPremise (rlvl : Level) (motives : Array Expr)
(belows : Array Expr) (fs : Array Expr) (minorType : Expr) : MetaM Expr :=
forallTelescope minorType fun minor_args minor_type => do
let rec go (prods : Array Expr) : List Expr MetaM Expr
@@ -188,7 +188,7 @@ protected theorem List.brecOn.eq.{u} : ∀ {a : Type} {motive : List a → Sort
(F_1 : (t : List a) → List.below t → motive t), List.brecOn t F_1 = F_1 t (List.brecOn.go t F_1).2
```
-/
def mkBRecOnFromRec (recName : Name) (nParams : Nat)
private def mkBRecOnFromRec (recName : Name) (nParams : Nat)
(all : Array Name) (brecOnName : Name) : MetaM Unit := do
let brecOnGoName := brecOnName.str "go"
let brecOnEqName := brecOnName.str "eq"

View File

@@ -15,7 +15,7 @@ import Lean.Meta.Transform
namespace Lean.Meta
structure SparseCasesOnKey where
private structure SparseCasesOnKey where
indName : Name
ctors : Array Name
-- When this is created in a private context and thus may contain private references, we must
@@ -23,7 +23,7 @@ structure SparseCasesOnKey where
isPrivate : Bool
deriving BEq, Hashable
builtin_initialize sparseCasesOnCacheExt : EnvExtension (PHashMap SparseCasesOnKey Name)
private builtin_initialize sparseCasesOnCacheExt : EnvExtension (PHashMap SparseCasesOnKey Name)
registerEnvExtension (pure {}) (asyncMode := .local) -- mere cache, keep it local
/-- Information necessary to recognize and split on sparse casesOn (in particular in MatchEqs) -/
@@ -34,7 +34,7 @@ public structure SparseCasesOnInfo where
insterestingCtors : Array Name
deriving Inhabited
builtin_initialize sparseCasesOnInfoExt : MapDeclarationExtension SparseCasesOnInfo
private builtin_initialize sparseCasesOnInfoExt : MapDeclarationExtension SparseCasesOnInfo
mkMapDeclarationExtension (exportEntriesFn := fun env s =>
let all := s.toArray
-- Do not export for non-exposed defs at exported/server levels

View File

@@ -83,7 +83,7 @@ where
value := val
})
def isName (env : Environment) (n : Name) : Bool :=
private def isName (env : Environment) (n : Name) : Bool :=
if let .str p "else_eq" := n then
(getSparseCasesOnInfoCore env p).isSome
else

View File

@@ -16,7 +16,7 @@ def hinjSuffix := "hinj"
public def mkCtorIdxHInjTheoremNameFor (indName : Name) : Name :=
(mkCtorIdxName indName).str hinjSuffix
partial def mkHInjectiveTheorem? (thmName : Name) (indVal : InductiveVal) : MetaM TheoremVal := do
private partial def mkHInjectiveTheorem? (thmName : Name) (indVal : InductiveVal) : MetaM TheoremVal := do
let us := indVal.levelParams.map mkLevelParam
let thmType
forallBoundedTelescope indVal.type (indVal.numParams + indVal.numIndices) fun xs1 _ => do

View File

@@ -40,7 +40,7 @@ namespace Lean.Meta.Match
This can be used to use the alternative of a match expression in its splitter.
-/
partial def forallAltVarsTelescope (altType : Expr) (altInfo : AltParamInfo)
public partial def forallAltVarsTelescope (altType : Expr) (altInfo : AltParamInfo)
(k : (patVars : Array Expr) (args : Array Expr) (mask : Array Bool) (type : Expr) MetaM α) : MetaM α := do
assert! altInfo.numOverlaps = 0
if altInfo.hasUnitThunk then
@@ -104,7 +104,7 @@ where
This can be used to use the alternative of a match expression in its splitter.
-/
partial def forallAltTelescope (altType : Expr) (altInfo : AltParamInfo) (numDiscrEqs : Nat)
public partial def forallAltTelescope (altType : Expr) (altInfo : AltParamInfo) (numDiscrEqs : Nat)
(k : (ys : Array Expr) (eqs : Array Expr) (args : Array Expr) (mask : Array Bool) (type : Expr) MetaM α)
: MetaM α := do
forallAltVarsTelescope altType altInfo fun ys args mask altType => do

View File

@@ -21,7 +21,7 @@ We will eventually have to write more efficient proof automation for this module
The new proof automation should exploit the structure of the generated goals and avoid general purpose tactics
such as `contradiction`.
-/
def _root_.Lean.MVarId.contradictionQuick (mvarId : MVarId) : MetaM Bool := do
private def _root_.Lean.MVarId.contradictionQuick (mvarId : MVarId) : MetaM Bool := do
mvarId.withContext do
let mut posMap : Std.HashMap Expr FVarId := {}
let mut negMap : Std.HashMap Expr FVarId := {}

View File

@@ -16,7 +16,7 @@ import Lean.Meta.Tactic.Replace
namespace Lean.Meta
def rewriteGoalUsingEq (goal : MVarId) (eq : Expr) (symm : Bool := false) : MetaM MVarId := do
private def rewriteGoalUsingEq (goal : MVarId) (eq : Expr) (symm : Bool := false) : MetaM MVarId := do
let rewriteResult goal.rewrite (goal.getType) eq symm
goal.replaceTargetEq rewriteResult.eNew rewriteResult.eqProof

View File

@@ -383,7 +383,7 @@ When equal, uses `eq_self` (no kernel evaluation needed). When different, uses
`mkStringLitNeProof` which finds the first differing character position and proves
inequality via `congrArg (List.get?Internal · i)`.
-/
def evalStringEq (a b : Expr) : SimpM Result := do
private def evalStringEq (a b : Expr) : SimpM Result := do
let some va := getStringValue? a | return .rfl
let some vb := getStringValue? b | return .rfl
if va = vb then

View File

@@ -172,7 +172,7 @@ inductive Result where
Pre-computed `.rfl` results to avoid dynamic memory allocation.
Each combination of `done` and `contextDependent` maps to a compile-time constant.
-/
def mkRflResult (done : Bool := false) (contextDependent : Bool := false) : Result :=
public def mkRflResult (done : Bool := false) (contextDependent : Bool := false) : Result :=
match done, contextDependent with
| false, false => .rfl
| false, true => .rfl false true
@@ -180,15 +180,15 @@ def mkRflResult (done : Bool := false) (contextDependent : Bool := false) : Resu
| true, true => .rfl true true
/-- Like `mkRflResult` with `done := false`. -/
def mkRflResultCD (contextDependent : Bool) : Result :=
public def mkRflResultCD (contextDependent : Bool) : Result :=
if contextDependent then .rfl false true else .rfl
/-- Returns `true` if this result depends on the local context (e.g., hypotheses). -/
abbrev Result.isContextDependent : Result Bool
public abbrev Result.isContextDependent : Result Bool
| .rfl _ cd | .step _ _ _ cd => cd
/-- Marks a result as context-dependent. -/
def Result.withContextDependent : Result Result
public def Result.withContextDependent : Result Result
| .rfl done _ => .rfl done true
| .step e h done _ => .step e h done true

View File

@@ -66,7 +66,7 @@ This is used during pattern matching and structural definitional equality tests
to identify arguments that can be skipped or handled specially
(e.g., instance arguments can be synthesized, proof arguments can be inferred).
-/
structure ProofInstArgInfo where
public structure ProofInstArgInfo where
/-- `true` if this argument is a proof (its type is a `Prop`). -/
isProof : Bool
/-- `true` if this argument is a type class instance. -/
@@ -78,7 +78,7 @@ Information about a function symbol. It stores which argument positions are proo
enabling optimizations during pattern matching and structural definitional equality tests
such as skipping proof arguments or deferring instance synthesis.
-/
structure ProofInstInfo where
public structure ProofInstInfo where
/-- Information about each argument position. -/
argsInfo : Array ProofInstArgInfo
deriving Inhabited

View File

@@ -17,7 +17,7 @@ import Init.GetElem
namespace Lean.Meta.Tactic.Cbv
/-- Extract elements from an array literal (`Array.mk` applied to a list literal). -/
def getArrayLitElems? (e : Expr) : Option <| Array Expr :=
private def getArrayLitElems? (e : Expr) : Option <| Array Expr :=
match_expr e with
| Array.mk _ as => getListLitElems as
| _ => none

View File

@@ -12,7 +12,7 @@ import Lean.Meta.Tactic.Util
namespace Lean.Meta
partial def cleanupCore (mvarId : MVarId) (toPreserve : Array FVarId) (indirectProps : Bool) : MetaM MVarId := do
private partial def cleanupCore (mvarId : MVarId) (toPreserve : Array FVarId) (indirectProps : Bool) : MetaM MVarId := do
mvarId.withContext do
mvarId.checkNotAssigned `cleanup
let used collectUsed |>.run' (false, {})

View File

@@ -50,18 +50,6 @@ register_builtin_option debug.tactic.simp.checkDefEqAttr : Bool := {
of the `defeq` attribute, and warn if it was. Note that this is a costly check."
}
register_builtin_option warning.simp.varHead : Bool := {
defValue := false
descr := "If true, warns when the head symbol of the left-hand side of a `@[simp]` theorem \
is a variable. Such lemmas are tried on every simp step, which can be slow."
}
register_builtin_option warning.simp.otherHead : Bool := {
defValue := true
descr := "If true, warns when the left-hand side of a `@[simp]` theorem is headed by a \
`.other` key in the discrimination tree (e.g. a lambda expression). Such lemmas can cause slowdowns."
}
/--
An `Origin` is an identifier for simp theorems which indicates roughly
what action the user took which lead to this theorem existing in the simp set.
@@ -371,27 +359,12 @@ private def checkTypeIsProp (type : Expr) : MetaM Unit :=
unless ( isProp type) do
throwError "Invalid simp theorem: Expected a proposition, but found{indentExpr type}"
private def mkSimpTheoremKeys (type : Expr) (noIndexAtArgs : Bool) (checkLhs : Bool := false) : MetaM (Array SimpTheoremKey × Bool) := do
private def mkSimpTheoremKeys (type : Expr) (noIndexAtArgs : Bool) : MetaM (Array SimpTheoremKey × Bool) := do
withNewMCtxDepth do
let (_, _, type) forallMetaTelescopeReducing type
let type whnfR type
match type.eq? with
| some (_, lhs, rhs) =>
let keys DiscrTree.mkPath lhs noIndexAtArgs
if checkLhs then
if warning.simp.varHead.get ( getOptions) && keys[0]? == some .star then
logWarning m!"Left-hand side of simp theorem has a variable as head symbol. \
This means the theorem will be tried on every simp step, which can be expensive. \
This may be acceptable for `local` or `scoped` simp lemmas.\n\
Use `set_option warning.simp.varHead false` to disable this warning."
if warning.simp.otherHead.get ( getOptions) && keys[0]? == some .other then
logWarning m!"Left-hand side of simp theorem is headed by a `.other` key in the \
discrimination tree (e.g. because it is a lambda expression). \
This theorem will be tried against all expressions that also have a `.other` key as head, \
which can cause slowdowns. \
This may be acceptable for `local` or `scoped` simp lemmas.\n\
Use `set_option warning.simp.otherHead false` to disable this warning."
pure (keys, isPerm lhs rhs)
| some (_, lhs, rhs) => pure ( DiscrTree.mkPath lhs noIndexAtArgs, isPerm lhs rhs)
| none => throwError "Unexpected kind of simp theorem{indentExpr type}"
register_builtin_option simp.rfl.checkTransparency: Bool := {
@@ -400,10 +373,10 @@ register_builtin_option simp.rfl.checkTransparency: Bool := {
}
private def mkSimpTheoremCore (origin : Origin) (e : Expr) (levelParams : Array Name) (proof : Expr)
(post : Bool) (prio : Nat) (noIndexAtArgs : Bool) (checkLhs : Bool := false): MetaM SimpTheorem := do
(post : Bool) (prio : Nat) (noIndexAtArgs : Bool) : MetaM SimpTheorem := do
assert! origin != .fvar .anonymous
let type instantiateMVars ( inferType e)
let (keys, perm) mkSimpTheoremKeys type noIndexAtArgs checkLhs
let (keys, perm) mkSimpTheoremKeys type noIndexAtArgs
let rfl isRflProof proof
if rfl && simp.rfl.checkTransparency.get ( getOptions) then
forallTelescopeReducing type fun _ type => do
@@ -426,7 +399,7 @@ Creates a `SimpTheorem` from a global theorem.
Because some theorems lead to multiple `SimpTheorems` (in particular conjunctions), returns an array.
-/
def mkSimpTheoremFromConst (declName : Name) (post := true) (inv := false)
(prio : Nat := eval_prio default) (checkLhs : Bool := false) : MetaM (Array SimpTheorem) := do
(prio : Nat := eval_prio default) : MetaM (Array SimpTheorem) := do
let cinfo getConstVal declName
let us := cinfo.levelParams.map mkLevelParam
let origin := .decl declName post inv
@@ -440,10 +413,10 @@ def mkSimpTheoremFromConst (declName : Name) (post := true) (inv := false)
let auxName mkAuxLemma (kind? := `_simp) cinfo.levelParams type val (inferRfl := true)
(forceExpose := true) -- These kinds of theorems are small and `to_additive` may need to
-- unfold them.
r := r.push <| ( do mkSimpTheoremCore origin (mkConst auxName us) #[] (mkConst auxName) post prio (noIndexAtArgs := false) (checkLhs := checkLhs))
r := r.push <| ( do mkSimpTheoremCore origin (mkConst auxName us) #[] (mkConst auxName) post prio (noIndexAtArgs := false))
return r
else
return #[ withoutExporting do mkSimpTheoremCore origin (mkConst declName us) #[] (mkConst declName) post prio (noIndexAtArgs := false) (checkLhs := checkLhs)]
return #[ withoutExporting do mkSimpTheoremCore origin (mkConst declName us) #[] (mkConst declName) post prio (noIndexAtArgs := false)]
def SimpTheorem.getValue (simpThm : SimpTheorem) : MetaM Expr := do
if simpThm.proof.isConst && simpThm.levelParams.isEmpty then
@@ -697,7 +670,7 @@ def SimpExtension.getTheorems (ext : SimpExtension) : CoreM SimpTheorems :=
Adds a simp theorem to a simp extension
-/
def addSimpTheorem (ext : SimpExtension) (declName : Name) (post : Bool) (inv : Bool) (attrKind : AttributeKind) (prio : Nat) : MetaM Unit := do
let simpThms withExporting (isExporting := attrKind != .local && !isPrivateName declName) do mkSimpTheoremFromConst declName post inv prio (checkLhs := true)
let simpThms withExporting (isExporting := attrKind != .local && !isPrivateName declName) do mkSimpTheoremFromConst declName post inv prio
for simpThm in simpThms do
ext.add (SimpEntry.thm simpThm) attrKind

View File

@@ -88,6 +88,21 @@ builtin_initialize registerTraceClass `Meta.wrapInstance
open Meta
/--
Rebuild a type application with fresh synthetic metavariables for instance-implicit arguments.
Non-instance-implicit arguments are assigned from the original application's arguments.
If the function is over-applied, extra arguments are preserved.
-/
public def abstractInstImplicitArgs (type : Expr) : MetaM Expr := do
let fn := type.getAppFn
let args := type.getAppArgs
let (mvars, bis, _) forallMetaTelescope ( inferType fn)
for i in [:mvars.size] do
unless bis[i]!.isInstImplicit do
mvars[i]!.mvarId!.assign args[i]!
let args := mvars ++ args.drop mvars.size
instantiateMVars (mkAppN fn args)
partial def getFieldOrigin (structName field : Name) : MetaM (Name × StructureFieldInfo) := do
let env getEnv
for parent in getStructureParentInfo env structName do

View File

@@ -342,11 +342,10 @@ namespace InternalSyntax
This command is for internal use only. It is intended for macros that implicitly introduce new
scopes, such as `expandInCmd` and `expandNamespacedDeclaration`. It allows local attributes to remain
accessible beyond those implicit scopes, even though they would normally be hidden from the user.
The numeric argument specifies how many scope levels to mark as non-delimiting.
-/
scoped syntax (name := end_local_scope) "end_local_scope" num : command
scoped syntax (name := end_local_scope) "end_local_scope" : command
def endLocalScopeSyntax (depth : Nat) : Command := Unhygienic.run `(end_local_scope $(Syntax.mkNumLit (toString depth)))
def endLocalScopeSyntax : Command := Unhygienic.run `(end_local_scope)
end InternalSyntax
/-- Declares one or more typed variables, or modifies whether already-declared variables are

View File

@@ -50,17 +50,17 @@ def versoCommentBodyFn : ParserFn := fun c s =>
rawFn (Doc.Parser.ignoreFn <| chFn '-' >> chFn '/') (trailingWs := true) c s
else s
def versoCommentBody : Parser where
public def versoCommentBody : Parser where
fn := fun c s => nodeFn `Lean.Parser.Command.versoCommentBody versoCommentBodyFn c s
@[combinator_parenthesizer versoCommentBody, expose]
def versoCommentBody.parenthesizer := PrettyPrinter.Parenthesizer.visitToken
public def versoCommentBody.parenthesizer := PrettyPrinter.Parenthesizer.visitToken
open PrettyPrinter Formatter in
open Syntax.MonadTraverser in
@[combinator_formatter versoCommentBody, expose]
def versoCommentBody.formatter : PrettyPrinter.Formatter := do
public def versoCommentBody.formatter : PrettyPrinter.Formatter := do
visitArgs $ do
visitAtom `«-/»
goLeft
@@ -354,13 +354,6 @@ def structInstFieldDef := leading_parser
def structInstFieldEqns := leading_parser
optional "private" >> matchAlts
/--
Synthesizes a default value for a structure, making use of `Inhabited` instances for
missing fields, as well as `Inhabited` instances for parent structures.
-/
@[builtin_term_parser] def structInstDefault := leading_parser
"struct_inst_default%"
def funImplicitBinder := withAntiquot (mkAntiquot "implicitBinder" ``implicitBinder) <|
atomic (lookahead ("{" >> many1 binderIdent >> (symbol " : " <|> "}"))) >> implicitBinder
def funStrictImplicitBinder :=

View File

@@ -144,18 +144,13 @@ def ScopedEnvExtension.popScope (ext : ScopedEnvExtension α β σ) (env : Envir
| _ :: state₂ :: stack => { s with stateStack := state₂ :: stack }
| _ => s
/-- Modifies `delimitsLocal` flag to `false` on the top `depth` entries of the state stack,
to turn off delimiting of local entries across multiple implicit scope levels
(e.g. those introduced by compound `namespace A.B.C` expansions).
/-- Modifies `delimitsLocal` flag to `false` to turn off delimiting of local entries.
-/
def ScopedEnvExtension.setDelimitsLocal (ext : ScopedEnvExtension α β σ) (env : Environment) (depth : Nat) : Environment :=
def ScopedEnvExtension.setDelimitsLocal (ext : ScopedEnvExtension α β σ) (env : Environment) : Environment :=
ext.ext.modifyState (asyncMode := .local) env fun s =>
{s with stateStack := go depth s.stateStack}
where
go : Nat List (State σ) List (State σ)
| 0, stack => stack
| _, [] => []
| n + 1, state :: stack => {state with delimitsLocal := false} :: go n stack
match s.stateStack with
| [] => s
| state :: stack => {s with stateStack := {state with delimitsLocal := false} :: stack}
def ScopedEnvExtension.addEntry (ext : ScopedEnvExtension α β σ) (env : Environment) (b : β) : Environment :=
ext.ext.addEntry env (Entry.global b)
@@ -231,12 +226,11 @@ def popScope [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] : m Unit :=
for ext in ( scopedEnvExtensionsRef.get) do
modifyEnv ext.popScope
/-- Used to implement `end_local_scope` command, that disables delimiting local entries of ScopedEnvExtension
across `depth` scope levels.
/-- Used to implement `end_local_scope` command, that disables delimiting local entries of ScopedEnvExtension in a current scope.
-/
def setDelimitsLocal [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] (depth : Nat) : m Unit := do
def setDelimitsLocal [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] : m Unit := do
for ext in ( scopedEnvExtensionsRef.get) do
modifyEnv (ext.setDelimitsLocal · depth)
modifyEnv (ext.setDelimitsLocal ·)
def activateScoped [Monad m] [MonadEnv m] [MonadLiftT (ST IO.RealWorld) m] (namespaceName : Name) : m Unit := do
for ext in ( scopedEnvExtensionsRef.get) do

View File

@@ -35,7 +35,7 @@ structure Import where
-- TODO: move further up into `Init` by using simpler representation for `imports`
@[extern "lean_idbg_client_loop"]
opaque Idbg.idbgClientLoop {α : Type} [Nonempty α]
public opaque Idbg.idbgClientLoop {α : Type} [Nonempty α]
(siteId : String) (imports : Array Import) (apply : α String) : IO Unit
instance : Coe Name Import := ({module := ·})
@@ -130,7 +130,7 @@ The type of module package identifiers.
This is a {name}`String` that is used to disambiguate native symbol prefixes between
different packages (and different versions of the same package).
-/
abbrev PkgId := String
public abbrev PkgId := String
/-- A module's setup information as described by a JSON file. -/
structure ModuleSetup where

View File

@@ -22,7 +22,7 @@ open Server Std Lean SubExpr
NOTE: in the future we may add other tags.
-/
inductive ExprDiffTag where
private inductive ExprDiffTag where
| change
| delete
| insert

View File

@@ -7,4 +7,3 @@ module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Protocol.H1

View File

@@ -6,7 +6,6 @@ Authors: Sofia Rodrigues
module
prelude
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public import Std.Internal.Parsec.Basic
@@ -216,89 +215,4 @@ def serialize (connection : Connection) : Header.Name × Header.Value :=
instance : Header Connection := parse, serialize
end Connection
/--
The `Host` header.
Represents the authority component of a URI:
host [ ":" port ]
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-host-and-authority
-/
structure Host where
/--
Host name (reg-name, IPv4, or IPv6 literal).
-/
host : URI.Host
/--
Optional port.
-/
port : URI.Port
deriving Repr, BEq
namespace Host
/--
Parses a `Host` header value.
-/
def parse (v : Value) : Option Host :=
let parsed := (Std.Http.URI.Parser.parseHostHeader <* Std.Internal.Parsec.eof).run v.value.toUTF8
match parsed with
| .ok host, port => some host, port
| .error _ => none
/--
Serializes a `Host` header back to a name and a value.
-/
def serialize (host : Host) : Header.Name × Header.Value :=
let value := match host.port with
| .value port => Header.Value.ofString! s!"{host.host}:{port}"
| .empty => Header.Value.ofString! s!"{host.host}:"
| .omitted => Header.Value.ofString! <| toString host.host
(.mk "host", value)
instance : Header Host := parse, serialize
end Host
/--
The `Expect` header.
Represents an expectation token.
The only standardized expectation is `100-continue`.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-expect
-/
structure Expect where
deriving Repr, BEq
namespace Expect
/--
Parses an `Expect` header.
Succeeds only if the value is exactly `100-continue`
(case-insensitive, trimmed).
-/
def parse (v : Value) : Option Expect :=
let normalized := v.value.trimAscii.toString.toLower
if normalized == "100-continue" then
some
else
none
/--
Serializes an `Expect` header.
-/
def serialize (_ : Expect) : Header.Name × Header.Value :=
(Header.Name.expect, Value.ofString! "100-continue")
instance : Header Expect := parse, serialize
end Expect
end Std.Http.Header
end Std.Http.Header.Connection

View File

@@ -52,13 +52,13 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
if config.maxSchemeLength = 0 then
fail "scheme length limit is 0 (no scheme allowed)"
let first : UInt8 satisfy isAlphaByte
let rest takeWhileAtMost
let first takeWhileUpTo1 isAlphaByte 1
let rest takeWhileUpTo
(fun c =>
isAlphaNum c
c = '+'.toUInt8 c = '-'.toUInt8 c = '.'.toUInt8)
(config.maxSchemeLength - 1)
let schemeBytes := ByteArray.empty.push first ++ rest.toByteArray
let schemeBytes := first.toByteArray ++ rest.toByteArray
let str := String.fromUTF8! schemeBytes |>.toLower
if h : URI.IsValidScheme str then
@@ -68,7 +68,7 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
-- port = 1*DIGIT
private def parsePortNumber : Parser UInt16 := do
let portBytes takeWhileAtMost isDigitByte 5
let portBytes takeWhileUpTo1 isDigitByte 5
let portStr := String.fromUTF8! portBytes.toByteArray
@@ -82,7 +82,7 @@ private def parsePortNumber : Parser UInt16 := do
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
let userBytesName takeWhileAtMost
let userBytesName takeWhileUpTo
(fun x =>
x ':'.toUInt8
(isUserInfoChar x x = '%'.toUInt8))
@@ -94,7 +94,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
let userPassEncoded if peekIs (· == ':'.toUInt8) then
skip
let userBytesPass takeWhileAtMost
let userBytesPass takeWhileUpTo
(fun x => isUserInfoChar x x = '%'.toUInt8)
config.maxUserInfoLength
@@ -113,7 +113,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
private def parseIPv6 : Parser Net.IPv6Addr := do
skipByte '['.toUInt8
let result takeWhile1AtMost
let result takeWhileUpTo1
(fun x => x = ':'.toUInt8 x = '.'.toUInt8 isHexDigitByte x)
256
@@ -127,7 +127,7 @@ private def parseIPv6 : Parser Net.IPv6Addr := do
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
private def parseIPv4 : Parser Net.IPv4Addr := do
let result takeWhile1AtMost
let result takeWhileUpTo1
(fun x => x = '.'.toUInt8 isDigitByte x)
256
@@ -148,8 +148,8 @@ private def parseHost (config : URI.Config) : Parser URI.Host := do
if let some ipv4 tryOpt parseIPv4 then
return .ipv4 ipv4
-- It needs to be a legal DNS label, so it differs from reg-name.
let some str := String.fromUTF8? ( takeWhile1AtMost
-- We intentionally parse DNS names here (not full RFC 3986 reg-name).
let some str := String.fromUTF8? ( takeWhileUpTo1
(fun x => isAlphaNum x x = '-'.toUInt8 x = '.'.toUInt8)
config.maxHostLength).toByteArray
| fail s!"invalid host"
@@ -187,7 +187,7 @@ private def parseAuthority (config : URI.Config) : Parser URI.Authority := do
-- segment = *pchar
private def parseSegment (config : URI.Config) : Parser ByteSlice := do
takeWhileAtMost (fun c => isPChar c c = '%'.toUInt8) config.maxSegmentLength
takeWhileUpTo (fun c => isPChar c c = '%'.toUInt8) config.maxSegmentLength
/-
path = path-abempty ; begins with "/" or is empty
@@ -272,7 +272,7 @@ def parsePath (config : URI.Config) (forceAbsolute : Bool) (allowEmpty : Bool) :
-- query = *( pchar / "/" / "?" )
private def parseQuery (config : URI.Config) : Parser URI.Query := do
let queryBytes
takeWhileAtMost (fun c => isQueryChar c c = '%'.toUInt8) config.maxQueryLength
takeWhileUpTo (fun c => isQueryChar c c = '%'.toUInt8) config.maxQueryLength
let some queryStr := String.fromUTF8? queryBytes.toByteArray
| fail "invalid query string"
@@ -304,7 +304,7 @@ private def parseQuery (config : URI.Config) : Parser URI.Query := do
-- fragment = *( pchar / "/" / "?" )
private def parseFragment (config : URI.Config) : Parser URI.EncodedFragment := do
let fragmentBytes
takeWhileAtMost (fun c => isFragmentChar c c = '%'.toUInt8) config.maxFragmentLength
takeWhileUpTo (fun c => isFragmentChar c c = '%'.toUInt8) config.maxFragmentLength
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
| fail "invalid percent encoding in fragment"
@@ -328,7 +328,7 @@ Parses a URI (Uniform Resource Identifier).
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
-/
def parseURI (config : URI.Config := {}) : Parser URI := do
public def parseURI (config : URI.Config := {}) : Parser URI := do
let scheme parseScheme config
skipByte ':'.toUInt8
@@ -347,7 +347,7 @@ def parseURI (config : URI.Config := {}) : Parser URI := do
/--
Parses a request target with combined parsing and validation.
-/
def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
public def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
asterisk <|> origin <|> absoluteHttp <|> authority <|> absolute
where
-- The asterisk form
@@ -406,7 +406,7 @@ where
/--
Parses an HTTP `Host` header value.
-/
def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
public def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
let host parseHost config
let port : URI.Port

File diff suppressed because it is too large Load Diff

View File

@@ -1,134 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public section
/-!
# HTTP/1.1 Configuration
This module defines the configuration options for HTTP/1.1 protocol processing,
including connection limits, header constraints, and various size limits.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Connection limits and parser bounds configuration.
-/
structure Config where
/--
Maximum number of requests (server) or responses (client) per connection.
-/
maxMessages : Nat := 100
/--
Maximum number of headers allowed per message.
-/
maxHeaders : Nat := 100
/--
Maximum aggregate byte size of all header field lines in a single message
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
-/
maxHeaderBytes : Nat := 65536
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The `Server` header value injected into outgoing responses (receiving mode) or the
`User-Agent` header value injected into outgoing requests (sending mode).
`none` suppresses the header entirely.
-/
agentName : Option Header.Value := none
/--
Maximum length of request URI (default: 8192 bytes).
-/
maxUriLength : Nat := 8192
/--
Maximum number of bytes consumed while parsing request/status start-lines (default: 8192 bytes).
-/
maxStartLineLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes).
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes).
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 16).
-/
maxSpaceSequence : Nat := 16
/--
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
(RFC 9112 §2.2 robustness). Default: 8.
-/
maxLeadingEmptyLines : Nat := 8
/--
Maximum number of extensions on a single chunk-size line (default: 16).
-/
maxChunkExtensions : Nat := 16
/--
Maximum length of chunk extension name (default: 256 bytes).
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes).
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
-/
maxChunkLineLength : Nat := 8192
/--
Maximum allowed chunk payload size in bytes (default: 8 MiB).
-/
maxChunkSize : Nat := 8 * 1024 * 1024
/--
Maximum allowed total body size per message in bytes (default: 64 MiB).
This limit applies across all body framing modes. For chunked transfer encoding,
chunk-size lines (including extensions) and the trailer section also count toward
this limit, so the total wire bytes consumed by the body cannot exceed this value.
-/
maxBodySize : Nat := 64 * 1024 * 1024
/--
Maximum length of reason phrase (default: 512 bytes).
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 20).
-/
maxTrailerHeaders : Nat := 20
end Std.Http.Protocol.H1

View File

@@ -1,110 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public section
/-!
# HTTP/1.1 Errors
This module defines the error types for HTTP/1.1 protocol processing,
including parsing errors, timeout errors, and connection errors.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Specific HTTP processing errors with detailed information.
-/
inductive Error
/--
Malformed start line (request-line or status-line).
-/
| invalidStatusLine
/--
Invalid or malformed header.
-/
| invalidHeader
/--
Request timeout occurred.
-/
| timeout
/--
Request entity too large.
-/
| entityTooLarge
/--
Request URI is too long.
-/
| uriTooLong
/--
Unsupported HTTP version.
-/
| unsupportedVersion
/--
Invalid chunk encoding.
-/
| invalidChunk
/--
Connection closed.
-/
| connectionClosed
/--
Bad request or response message.
-/
| badMessage
/--
The number of header fields in the message exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| tooManyHeaders
/--
The aggregate byte size of all header fields exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| headersTooLarge
/--
Generic error with message.
-/
| other (message : String)
deriving Repr, BEq
instance : ToString Error where
toString
| .invalidStatusLine => "Invalid status line"
| .invalidHeader => "Invalid header"
| .timeout => "Timeout"
| .entityTooLarge => "Entity too large"
| .uriTooLong => "URI too long"
| .unsupportedVersion => "Unsupported version"
| .invalidChunk => "Invalid chunk"
| .connectionClosed => "Connection closed"
| .badMessage => "Bad message"
| .tooManyHeaders => "Too many headers"
| .headersTooLarge => "Headers too large"
| .other msg => s!"Other error: {msg}"

View File

@@ -1,73 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Events
This module defines the events that can occur during HTTP/1.1 message processing,
including header completion and control/error signals.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Events emitted during HTTP message processing.
-/
inductive Event (dir : Direction)
/--
Indicates that all headers have been successfully parsed.
-/
| endHeaders (head : Message.Head dir)
/--
Signals that additional input data is required to continue processing.
-/
| needMoreData (size : Option Nat)
/--
Indicates a failure during parsing or processing.
-/
| failed (err : Error)
/--
Requests that the connection be closed.
-/
| close
/--
The body should be closed.
-/
| closeBody
/--
Indicates that a response is required.
-/
| needAnswer
/--
Indicates readiness to process the next message.
-/
| next
/--
Signals that an `Expect: 100-continue` decision is pending.
-/
| «continue»
deriving Inhabited, Repr

View File

@@ -1,139 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.Array
public import Std.Internal.Http.Data
public section
/-!
# Message
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
while exposing common operations such as headers, version, and `shouldKeepAlive`
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Direction of message flow from the server's perspective.
-/
inductive Direction
/--
Receiving and parsing incoming requests from clients.
-/
| receiving
/--
Client perspective: writing outgoing requests and reading incoming responses.
-/
| sending
deriving BEq
/--
Inverts the message direction.
-/
@[expose]
abbrev Direction.swap : Direction Direction
| .receiving => .sending
| .sending => .receiving
/--
Gets the message head type based on direction.
-/
@[expose]
def Message.Head : Direction Type
| .receiving => Request.Head
| .sending => Response.Head
/--
Gets the headers of a `Message`.
-/
def Message.Head.headers (m : Message.Head dir) : Headers :=
match dir with
| .receiving => Request.Head.headers m
| .sending => Response.Head.headers m
/--
Gets the version of a `Message`.
-/
def Message.Head.version (m : Message.Head dir) : Version :=
match dir with
| .receiving => Request.Head.version m
| .sending => Response.Head.version m
/--
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
-/
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
let contentLength := message.headers.getAll? .contentLength
match message.headers.getAll? .transferEncoding with
| none =>
match contentLength with
| some #[cl] => .fixed <$> cl.value.toNat?
| some _ => none -- To avoid request smuggling with malformed/multiple content-length headers.
| none => if allowEOFBody then some (.fixed 0) else none
-- Single transfer-encoding header.
| some #[header] =>
let te := Header.TransferEncoding.parse header
match Header.TransferEncoding.isChunked <$> te, contentLength with
| some true, none =>
-- HTTP/1.0 does not define chunked transfer encoding (RFC 2068 §19.4.6).
-- A server MUST NOT use chunked with an HTTP/1.0 peer; likewise, an
-- HTTP/1.0 request carrying Transfer-Encoding: chunked is malformed.
if message.version == .v10 then none else some .chunked
| _, _ => none -- To avoid request smuggling when TE and CL are mixed.
-- We disallow multiple transfer-encoding headers.
| some _ => none
/--
Checks whether the message indicates that the connection should be kept alive.
-/
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
let tokens? : Option (Array String) :=
match message.headers.getAll? .connection with
| none => some #[]
| some values =>
values.foldl (fun acc raw => do
let acc acc
let parsed Header.Connection.parse raw
pure (acc ++ parsed.tokens)
) (some #[])
match tokens? with
| none => false
| some tokens =>
if message.version == .v11 then
!tokens.any (· == "close")
else
tokens.any (· == "keep-alive")
instance : Repr (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Repr Request.Head)
| .sending => inferInstanceAs (Repr Response.Head)
instance : Internal.Encode .v11 (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
instance : EmptyCollection (Message.Head dir) where
emptyCollection :=
match dir with
| .receiving => { method := .get, version := .v11 }
| .sending => {}

View File

@@ -1,548 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Parsec
public import Std.Internal.Http.Data
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Protocol.H1.Config
/-!
This module defines parsers for HTTP/1.1 request and response lines, headers, and body framing. The
reference used is https://httpwg.org/specs/rfc9112.html.
-/
namespace Std.Http.Protocol.H1
open Std Internal Parsec ByteArray Internal Internal.Char
set_option linter.all true
/--
Checks if a byte may appear inside a field value.
This parser enforces strict ASCII-only field values and allows only `field-content`
(`HTAB / SP / VCHAR`).
-/
@[inline]
def isFieldVChar (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Checks if a byte may appear unescaped inside a quoted-string value.
Allows `HTAB / SP / %x21 / %x23-5B / %x5D-7E` (strict ASCII-only; no obs-text).
-/
@[inline]
def isQdText (c : UInt8) : Bool :=
qdtext (Char.ofUInt8 c)
/--
Checks if a byte is optional whitespace (`OWS = SP / HTAB`, RFC 9110 §5.6.3).
-/
@[inline]
def isOwsByte (c : UInt8) : Bool :=
ows (Char.ofUInt8 c)
-- Parser blocks
/--
Repeatedly applies `parser` until it returns `none` or the `maxCount` limit is
exceeded. Returns the collected results as an array.
-/
partial def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
let rec go (acc : Array α) : Parser (Array α) := do
let step optional <| attempt do
match parser with
| none => fail "end of items"
| some x => return x
match step with
| none =>
return acc
| some x =>
let acc := acc.push x
if acc.size > maxCount then
fail s!"too many items: {acc.size} > {maxCount}"
go acc
go #[]
/--
Lifts an `Option` into the parser monad, failing with a generic message if the value is `none`.
-/
def liftOption (x : Option α) : Parser α :=
if let some res := x then
return res
else
fail "expected value but got none"
/--
Parses an HTTP token (RFC 9110 §5.6.2): one or more token characters, up to `limit` bytes.
Fails if the input starts with a non-token character or is empty.
-/
@[inline]
def parseToken (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo1 (fun c => tchar (Char.ofUInt8 c)) limit
/--
Parses a line terminator.
-/
@[inline]
def crlf : Parser Unit := do
skipBytes "\r\n".toUTF8
/--
Consumes and ignores empty lines (`CRLF`) that appear before a request-line.
https://httpwg.org/specs/rfc9112.html#rfc.section.2.2:
"In the interest of robustness, a server that is expecting to receive and parse a request-line SHOULD
ignore at least one empty line (CRLF) received prior to the request-line."
-/
def skipLeadingRequestEmptyLines (limits : H1.Config) : Parser Unit := do
let mut count := 0
while ( peekWhen? (· == '\r'.toUInt8)).isSome do
if count >= limits.maxLeadingEmptyLines then
fail "too many leading empty lines"
crlf
count := count + 1
/--
Parses a single space (SP, 0x20).
-/
@[inline]
def sp : Parser Unit :=
skipByte ' '.toUInt8
/--
Parses optional whitespace (OWS = *(SP / HTAB), RFC 9110 §5.6.3), bounded by
`limits.maxSpaceSequence`. Fails if more whitespace follows the limit, so oversized
padding is rejected rather than silently truncated.
-/
@[inline]
def ows (limits : H1.Config) : Parser Unit := do
discard <| takeWhileUpTo isOwsByte limits.maxSpaceSequence
if ( peekWhen? isOwsByte) |>.isSome then
fail "invalid space sequence"
else
pure ()
/--
Parses a single ASCII hex digit and returns its numeric value (`0``15`).
-/
def hexDigit : Parser UInt8 := do
let b any
if isHexDigitByte b then
if b '0'.toUInt8 && b '9'.toUInt8 then return b - '0'.toUInt8
else if b 'A'.toUInt8 && b 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
else return b - 'a'.toUInt8 + 10
else fail s!"invalid hex digit {Char.ofUInt8 b |>.quote}"
/--
Parses a hexadecimal integer (one or more hex digits, up to 16 digits).
Used for chunk-size lines in chunked transfer encoding.
-/
partial def hex : Parser Nat := do
let rec go (acc : Nat) (count : Nat) : Parser Nat := do
match optional (attempt hexDigit) with
| some d =>
if count + 1 > 16 then
fail "chunk size too large"
else
go (acc * 16 + d.toNat) (count + 1)
| none =>
if count = 0 then
-- Preserve EOF as incremental chunk-size parsing can request more data.
-- For non-EOF invalid bytes, keep the specific parse failure.
let _ peek!
fail "expected hex digit"
else
return acc
go 0 0
-- Actual parsers
/--
Parses `HTTP-version = HTTP-name "/" DIGIT "." DIGIT` and returns the major and
minor version numbers as a pair.
-/
def parseHttpVersionNumber : Parser (Nat × Nat) := do
skipBytes "HTTP/".toUTF8
let major digit
skipByte '.'.toUInt8
let minor digit
pure ((major.toNat - 48), (minor.toNat - 48))
/--
Parses an HTTP version string and returns the corresponding `Version` value.
Fails if the version is not recognized by `Version.ofNumber?`.
-/
def parseHttpVersion : Parser Version := do
let (major, minor) parseHttpVersionNumber
liftOption <| Version.ofNumber? major minor
/-
method = token
Every branch is wrapped in `attempt` so that `<|>` always backtracks on
failure, even after consuming bytes. This is strictly necessary only for the
P-group (POST / PUT / PATCH) which share a common first byte, but wrapping
all alternatives keeps the parser defensively correct if new methods are
added in the future.
-/
def parseMethod : Parser Method :=
(attempt <| skipBytes "GET".toUTF8 <&> fun _ => Method.get)
<|> (attempt <| skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
<|> (attempt <| skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
<|> (attempt <| skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
<|> (attempt <| skipBytes "ACL".toUTF8 <&> fun _ => Method.acl)
<|> (attempt <| skipBytes "QUERY".toUTF8 <&> fun _ => Method.query)
<|> (attempt <| skipBytes "SEARCH".toUTF8 <&> fun _ => Method.search)
<|> (attempt <| skipBytes "BASELINE-CONTROL".toUTF8 <&> fun _ => Method.baselineControl)
<|> (attempt <| skipBytes "BIND".toUTF8 <&> fun _ => Method.bind)
<|> (attempt <| skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
<|> (attempt <| skipBytes "CHECKIN".toUTF8 <&> fun _ => Method.checkin)
<|> (attempt <| skipBytes "CHECKOUT".toUTF8 <&> fun _ => Method.checkout)
<|> (attempt <| skipBytes "COPY".toUTF8 <&> fun _ => Method.copy)
<|> (attempt <| skipBytes "LABEL".toUTF8 <&> fun _ => Method.label)
<|> (attempt <| skipBytes "LINK".toUTF8 <&> fun _ => Method.link)
<|> (attempt <| skipBytes "LOCK".toUTF8 <&> fun _ => Method.lock)
<|> (attempt <| skipBytes "MERGE".toUTF8 <&> fun _ => Method.merge)
<|> (attempt <| skipBytes "MKACTIVITY".toUTF8 <&> fun _ => Method.mkactivity)
<|> (attempt <| skipBytes "MKCALENDAR".toUTF8 <&> fun _ => Method.mkcalendar)
<|> (attempt <| skipBytes "MKCOL".toUTF8 <&> fun _ => Method.mkcol)
<|> (attempt <| skipBytes "MKREDIRECTREF".toUTF8 <&> fun _ => Method.mkredirectref)
<|> (attempt <| skipBytes "MKWORKSPACE".toUTF8 <&> fun _ => Method.mkworkspace)
<|> (attempt <| skipBytes "MOVE".toUTF8 <&> fun _ => Method.move)
<|> (attempt <| skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
<|> (attempt <| skipBytes "ORDERPATCH".toUTF8 <&> fun _ => Method.orderpatch)
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
<|> (attempt <| skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
<|> (attempt <| skipBytes "PRI".toUTF8 <&> fun _ => Method.pri)
<|> (attempt <| skipBytes "PROPFIND".toUTF8 <&> fun _ => Method.propfind)
<|> (attempt <| skipBytes "PROPPATCH".toUTF8 <&> fun _ => Method.proppatch)
<|> (attempt <| skipBytes "REBIND".toUTF8 <&> fun _ => Method.rebind)
<|> (attempt <| skipBytes "REPORT".toUTF8 <&> fun _ => Method.report)
<|> (attempt <| skipBytes "UNBIND".toUTF8 <&> fun _ => Method.unbind)
<|> (attempt <| skipBytes "UNCHECKOUT".toUTF8 <&> fun _ => Method.uncheckout)
<|> (attempt <| skipBytes "UNLINK".toUTF8 <&> fun _ => Method.unlink)
<|> (attempt <| skipBytes "UNLOCK".toUTF8 <&> fun _ => Method.unlock)
<|> (attempt <| skipBytes "UPDATEREDIRECTREF".toUTF8 <&> fun _ => Method.updateredirectref)
<|> (attempt <| skipBytes "UPDATE".toUTF8 <&> fun _ => Method.update)
<|> (attempt <| skipBytes "VERSION-CONTROL".toUTF8 <&> fun _ => Method.versionControl)
<|> (parseToken 64 *> fail "unrecognized method")
/--
Parses a request-target URI, up to `limits.maxUriLength` bytes.
Fails with `"uri too long"` if the target exceeds the configured limit.
-/
def parseURI (limits : H1.Config) : Parser ByteArray := do
let uri takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
if uri.size == limits.maxUriLength then
if ( peekWhen? (· != ' '.toUInt8)) |>.isSome then
fail "uri too long"
return uri.toByteArray
/--
Shared core for request-line parsing: parses `request-target SP HTTP-version CRLF`
and returns the `RequestTarget` together with the raw major/minor version numbers.
Both `parseRequestLine` and `parseRequestLineRawVersion` call this after consuming
the method token, keeping URI validation and version parsing in one place.
-/
def parseRequestLineBody (limits : H1.Config) : Parser (RequestTarget × Nat × Nat) := do
let rawUri parseURI limits <* sp
let uri match (Std.Http.URI.Parser.parseRequestTarget <* eof).run rawUri with
| .ok res => pure res
| .error res => fail res
let versionPair parseHttpVersionNumber <* crlf
return (uri, versionPair)
/--
Parses a request line and returns a fully-typed `Request.Head`.
`request-line = method SP request-target SP HTTP-version`
-/
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
if major == 1 minor == 1 then
return method, .v11, uri, .empty
else if major == 1 minor == 0 then
return method, .v10, uri, .empty
else
fail "unsupported HTTP version"
/--
Parses a request line and returns the recognized HTTP method and version when available.
request-line = method SP request-target SP HTTP-version
-/
public def parseRequestLineRawVersion (limits : H1.Config) : Parser (Method × RequestTarget × Option Version) := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
return (method, uri, Version.ofNumber? major minor)
/--
Parses a single header field line.
`field-line = field-name ":" OWS field-value OWS`
-/
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
let name parseToken limits.maxHeaderNameLength
let value skipByte ':'.toUInt8 *> ows limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* ows limits
let name liftOption <| String.fromUTF8? name.toByteArray
let value liftOption <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
let value := value.trimAsciiEnd.toString
return (name, value)
/--
Parses a single header field line, or returns `none` when it sees the blank line that
terminates the header section.
```
field-line = field-name ":" OWS field-value OWS CRLF
```
-/
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let next peek?
if next == some '\r'.toUInt8 next == some '\n'.toUInt8 then
crlf
pure none
else
some <$> (parseFieldLine limits <* crlf)
/--
Parses a backslash-escaped character inside a quoted-string.
`quoted-pair = "\" ( HTAB / SP / VCHAR )` — strict ASCII-only (no obs-text).
-/
def parseQuotedPair : Parser UInt8 := do
skipByte '\\'.toUInt8
let b any
if quotedPairChar (Char.ofUInt8 b) then
return b
else
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
/--
Parses a quoted-string value, unescaping quoted-pairs.
`quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE`
-/
partial def parseQuotedString (maxLength : Nat) : Parser String := do
skipByte '"'.toUInt8
let rec loop (buf : ByteArray) (length : Nat) : Parser ByteArray := do
let b ← any
if b == '"'.toUInt8 then
return buf
else if b == '\\'.toUInt8 then
let next any
if quotedPairChar (Char.ofUInt8 next)
then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push next) length
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
else if isQdText b then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push b) length
else
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
liftOption <| String.fromUTF8? ( loop .empty 0)
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
def parseChunkExt (limits : H1.Config) : Parser (Chunk.ExtensionName × Option Chunk.ExtensionValue) := do
ows limits *> skipByte ';'.toUInt8 *> ows limits
let name (liftOption =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtNameLength) <* ows limits
let some name := Chunk.ExtensionName.ofString? name
| fail "invalid extension name"
if ( peekWhen? (· == '='.toUInt8)) |>.isSome then
-- RFC 9112 §7.1.1: BWS is allowed around "=".
-- The `<* ows limits` after the name already consumed any trailing whitespace,
-- so these ows calls are no-ops in practice, but kept for explicit grammar correspondence.
ows limits *> skipByte '='.toUInt8 *> ows limits
let value ows limits *> (parseQuotedString limits.maxChunkExtValueLength <|> liftOption =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtValueLength))
let some value := Chunk.ExtensionValue.ofString? value
| fail "invalid extension value"
return (name, some value)
return (name, none)
/--
Parses the size and extensions of a chunk.
-/
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) := do
let size hex
let ext manyItems (optional (attempt (parseChunkExt limits))) limits.maxChunkExtensions
crlf
return (size, ext)
/--
Result of parsing partial or complete information.
-/
public inductive TakeResult
| complete (data : ByteSlice)
| incomplete (data : ByteSlice) (remaining : Nat)
/--
Parses a single chunk in chunked transfer encoding.
-/
public def parseChunkPartial (limits : H1.Config) : Parser (Option (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue) × ByteSlice)) := do
let (size, ext) parseChunkSize limits
if size == 0 then
return none
else
let data take size
return some size, ext, data
/--
Parses fixed-size data that can be incomplete.
-/
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
if it.remainingBytes = 0 then
.error it .eof
else if it.remainingBytes < size then
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
else
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
/--
Parses fixed-size chunk data that can be incomplete.
-/
public def parseChunkSizedData (size : Nat) : Parser TakeResult := do
match parseFixedSizeData size with
| .complete data => crlf *> return .complete data
| .incomplete data res => return .incomplete data res
/--
Returns `true` if `name` (compared case-insensitively) is a field that MUST NOT appear in HTTP/1.1
trailer sections per RFC 9112 §6.5. Forbidden fields are those required for message framing
(`content-length`, `transfer-encoding`), routing (`host`), or connection management (`connection`).
-/
def isForbiddenTrailerField (name : String) : Bool :=
let n := name.toLower
n == "content-length" || n == "transfer-encoding" || n == "host" ||
n == "connection" || n == "expect" || n == "te" ||
n == "authorization" || n == "max-forwards" || n == "cache-control" ||
n == "content-encoding" || n == "upgrade" || n == "trailer"
/--
Parses a trailer header (used after a chunked body), rejecting forbidden field names per RFC 9112
§6.5. Fields used for message framing (`content-length`, `transfer-encoding`), routing (`host`),
or connection management (`connection`, `te`, `upgrade`) are rejected to prevent trailer injection
attacks where a downstream proxy might re-interpret them.
-/
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let result parseSingleHeader limits
if let some (name, _) := result then
if isForbiddenTrailerField name then
fail s!"forbidden trailer field: {name}"
return result
/--
Parses trailer headers after a chunked body and returns them as an array of name-value pairs.
This is exposed for callers that need the trailer values directly (e.g. clients). The
internal protocol machine uses `parseLastChunkBody` instead, which discards trailer values.
-/
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
let trailers manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
return trailers
/--
Returns `true` if `c` is a valid reason-phrase byte (`HTAB / SP / VCHAR`, strict ASCII-only).
-/
@[inline]
def isReasonPhraseByte (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Parses a reason phrase (text after status code).
Allows only `HTAB / SP / VCHAR` bytes (strict ASCII-only).
-/
def parseReasonPhrase (limits : H1.Config) : Parser String := do
let bytes takeWhileUpTo isReasonPhraseByte limits.maxReasonPhraseLength
liftOption <| String.fromUTF8? bytes.toByteArray
/--
Parses a status-code (3 decimal digits), the following reason phrase, and the
terminating CRLF; returns a typed `Status`.
-/
def parseStatusCode (limits : H1.Config) : Parser Status := do
let d1 digit
let d2 digit
let d3 digit
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
sp
let phrase parseReasonPhrase limits <* crlf
if h : IsValidReasonPhrase phrase then
if let some status := Status.ofCode (some phrase, h) code.toUInt16 then
return status
fail "invalid status code"
/--
Parses a status line and returns a fully-typed `Response.Head`.
`status-line = HTTP-version SP status-code SP [ reason-phrase ]`
Accepts only HTTP/1.1. For parsing where the version may be unrecognized and must be
mapped to an error event, use `parseStatusLineRawVersion`.
-/
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
if major == 1 minor == 1 then
return { status, version := .v11, headers := .empty }
else if major == 1 minor == 0 then
return { status, version := .v10, headers := .empty }
else
fail "unsupported HTTP version"
/--
Parses a status line and returns the status code plus recognized HTTP version when available.
Consumes and discards the reason phrase.
status-line = HTTP-version SP status-code SP [ reason-phrase ] CRLF
-/
public def parseStatusLineRawVersion (limits : H1.Config) : Parser (Status × Option Version) := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
return (status, Version.ofNumber? major minor)
/--
Parses the trailer section that follows the last chunk size line (`0\r\n`).
-/
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
end Std.Http.Protocol.H1

View File

@@ -1,319 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Reader
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
It tracks the parsing state including start line, headers, and body handling for
both fixed-length and chunked transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
The body-framing sub-state of the `Reader` state machine.
-/
inductive Reader.BodyState where
/--
Parse fixed-length body bytes, tracking the number of bytes remaining.
-/
| fixed (remaining : Nat)
/--
Parse the next chunk-size line in chunked transfer encoding.
-/
| chunkedSize
/--
Parse chunk data for the current chunk.
-/
| chunkedBody (ext : Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) (remaining : Nat)
/--
Parse body bytes until EOF (connection close).
-/
| closeDelimited
deriving Inhabited, Repr, BEq
/--
The state of the `Reader` state machine.
-/
inductive Reader.State (dir : Direction) : Type
/--
Initial state waiting for HTTP start line.
-/
| needStartLine : State dir
/--
State waiting for HTTP headers, tracking number of headers parsed.
-/
| needHeader : Nat State dir
/--
Unified body-reading state.
-/
| readBody : Reader.BodyState State dir
/--
Paused waiting for a `canContinue` decision, carrying the next state.
-/
| continue : State dir State dir
/--
State waiting to be able to read new data.
-/
| pending : State dir
/--
State that it completed a single request or response and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
/--
The input is malformed.
-/
| failed (error : Error) : State dir
deriving Inhabited, Repr, BEq
/--
Manages the reading state of the HTTP parsing and processing machine.
-/
structure Reader (dir : Direction) where
/--
The current state of the machine.
-/
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
/--
The input byte array.
-/
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
/--
The incoming message head.
-/
messageHead : Message.Head dir := {}
/--
Count of messages that this connection has already parsed.
-/
messageCount : Nat := 0
/--
Number of body bytes read for the current message.
-/
bodyBytesRead : Nat := 0
/--
Number of header bytes accumulated for the current message.
Counts name + value bytes plus 4 bytes per line for `: ` and `\r\n`.
-/
headerBytesRead : Nat := 0
/--
Set when no further input bytes will arrive (the remote end has closed the connection).
-/
noMoreInput : Bool := false
namespace Reader
/--
Checks if the reader is in a closed state and cannot process more messages.
-/
@[inline]
def isClosed (reader : Reader dir) : Bool :=
match reader.state with
| .closed => true
| _ => false
/--
Checks if the reader has completed parsing the current message.
-/
@[inline]
def isComplete (reader : Reader dir) : Bool :=
match reader.state with
| .complete => true
| _ => false
/--
Checks if the reader has encountered an error.
-/
@[inline]
def hasFailed (reader : Reader dir) : Bool :=
match reader.state with
| .failed _ => true
| _ => false
/--
Feeds new data into the reader's input buffer.
If the current input is exhausted, replaces it; otherwise compacts the buffer
by discarding already-parsed bytes before appending.
-/
@[inline]
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
{ reader with input :=
if reader.input.atEnd
then data.iter
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
/--
Replaces the reader's input iterator with a new one.
-/
@[inline]
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
{ reader with input }
/--
Updates the message head being constructed.
-/
@[inline]
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
{ reader with messageHead }
/--
Adds a header to the current message head.
-/
@[inline]
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
match dir with
| .sending | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
/--
Closes the reader, transitioning to the closed state.
-/
@[inline]
def close (reader : Reader dir) : Reader dir :=
{ reader with state := .closed, noMoreInput := true }
/--
Marks the current message as complete and prepares for the next message.
-/
@[inline]
def markComplete (reader : Reader dir) : Reader dir :=
{ reader with
state := .complete
messageCount := reader.messageCount + 1 }
/--
Transitions the reader to a failed state with the given error.
-/
@[inline]
def fail (error : Error) (reader : Reader dir) : Reader dir :=
{ reader with state := .failed error }
/--
Resets the reader to parse a new message on the same connection.
-/
@[inline]
def reset (reader : Reader dir) : Reader dir :=
{ reader with
state := .needStartLine
bodyBytesRead := 0
headerBytesRead := 0
messageHead := {} }
/--
Checks if more input is needed to continue parsing.
-/
@[inline]
def needsMoreInput (reader : Reader dir) : Bool :=
reader.input.atEnd && !reader.noMoreInput &&
match reader.state with
| .complete | .closed | .failed _ | .«continue» _ => false
| _ => true
/--
Returns the current parse error if the reader has failed.
-/
@[inline]
def getError (reader : Reader dir) : Option Error :=
match reader.state with
| .failed err => some err
| _ => none
/--
Gets the number of bytes remaining in the input buffer.
-/
@[inline]
def remainingBytes (reader : Reader dir) : Nat :=
reader.input.array.size - reader.input.pos
/--
Advances the input iterator by n bytes.
-/
@[inline]
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with input := reader.input.forward n }
/--
Transitions to the state for reading headers.
-/
@[inline]
def startHeaders (reader : Reader dir) : Reader dir :=
{ reader with state := .needHeader 0, bodyBytesRead := 0, headerBytesRead := 0 }
/--
Adds body bytes parsed for the current message.
-/
@[inline]
def addBodyBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with bodyBytesRead := reader.bodyBytesRead + n }
/--
Adds header bytes accumulated for the current message.
-/
@[inline]
def addHeaderBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with headerBytesRead := reader.headerBytesRead + n }
/--
Transitions to the state for reading a fixed-length body.
-/
@[inline]
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody (.fixed size) }
/--
Transitions to the state for reading chunked transfer encoding.
-/
@[inline]
def startChunkedBody (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody .chunkedSize }
/--
Marks that no more input will be provided (connection closed).
-/
@[inline]
def markNoMoreInput (reader : Reader dir) : Reader dir :=
{ reader with noMoreInput := true }
/--
Checks if the connection should be kept alive for the next message.
-/
def shouldKeepAlive (reader : Reader dir) : Bool :=
reader.messageHead.shouldKeepAlive
end Reader

View File

@@ -1,280 +0,0 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Writer
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
It handles encoding headers and body content for both fixed-length and chunked
transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Internal
/--
The state of the `Writer` state machine.
-/
inductive Writer.State
/--
Initial state before any outgoing message has been prepared.
-/
| pending
/--
Waiting for the application to provide the outgoing message head via `send`.
-/
| waitingHeaders
/--
The message head has been provided; waiting for `shouldFlush` to become true before
serializing headers to output.
-/
| waitingForFlush
/--
Writing the body output (either fixed-length or chunked).
-/
| writingBody (mode : Body.Length)
/--
Completed writing a single message and ready to begin the next one.
-/
| complete
/--
Closed; no further data can be written.
-/
| closed
deriving Inhabited, Repr, BEq
/--
Manages the writing state of the HTTP generating and writing machine.
-/
structure Writer (dir : Direction) where
/--
Body chunks supplied by the user, accumulated before being flushed to output.
-/
userData : Array Chunk := .empty
/--
All the data produced by the writer, ready to be sent to the socket.
-/
outputData : ChunkedBuffer := .empty
/--
The state of the writer machine.
-/
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
/--
When the user specifies the exact body size upfront, `Content-Length` framing is
used instead of chunked transfer encoding.
-/
knownSize : Option Body.Length := none
/--
The outgoing message that will be written to the output.
-/
messageHead : Message.Head dir.swap := {}
/--
Whether the user has called `send` to provide the outgoing message head.
-/
sentMessage : Bool := false
/--
Set when the user has finished sending body data, allowing fixed-size framing
to be determined upfront.
-/
userClosedBody : Bool := false
/--
When `true`, body bytes are intentionally omitted from the wire for this message
(e.g. HEAD responses), while headers/framing metadata may still describe the
hypothetical representation.
-/
omitBody : Bool := false
/--
Running total of bytes across all `userData` chunks, maintained incrementally
to avoid an O(n) fold on every fixed-length write step.
-/
userDataBytes : Nat := 0
namespace Writer
/--
Returns `true` when no more user body data will arrive: either the user called
`closeBody`, or the writer has already transitioned to `complete` or `closed`.
Note: this does **not** mean the wire is ready to accept new bytes — a `closed`
writer cannot send anything. Use this to decide whether to flush pending body
data rather than to check writability.
-/
@[inline]
def noMoreUserData {dir} (writer : Writer dir) : Bool :=
match writer.state with
| .closed | .complete => true
| _ => writer.userClosedBody
/--
Checks if the writer is closed (cannot process more data).
-/
@[inline]
def isClosed (writer : Writer dir) : Bool :=
match writer.state with
| .closed => true
| _ => false
/--
Checks if the writer has completed processing a request.
-/
@[inline]
def isComplete (writer : Writer dir) : Bool :=
match writer.state with
| .complete => true
| _ => false
/--
Checks if the writer can accept more data from the user.
-/
@[inline]
def canAcceptData (writer : Writer dir) : Bool :=
match writer.state with
| .waitingHeaders => true
| .waitingForFlush => true
| .writingBody _ => !writer.userClosedBody
| _ => false
/--
Marks the body as closed, indicating no more user data will be added.
-/
@[inline]
def closeBody (writer : Writer dir) : Writer dir :=
{ writer with userClosedBody := true }
/--
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked.
-/
def determineTransferMode (writer : Writer dir) : Body.Length :=
if let some mode := writer.knownSize then
mode
else if writer.userClosedBody then
.fixed writer.userDataBytes
else
.chunked
/--
Adds user data chunks to the writer's buffer if the writer can accept data.
-/
@[inline]
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
if writer.canAcceptData then
let extraBytes := data.foldl (fun acc c => acc + c.data.size) 0
{ writer with userData := writer.userData ++ data, userDataBytes := writer.userDataBytes + extraBytes }
else
writer
/--
Writes accumulated user data to output using fixed-size encoding.
-/
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
if writer.userData.size = 0 then
(writer, limitSize)
else
let (chunks, pending, totalSize) := writer.userData.foldl (fun (state : Array ByteArray × Array Chunk × Nat) chunk =>
let (acc, pending, size) := state
if size >= limitSize then
(acc, pending.push chunk, size)
else
let remaining := limitSize - size
let takeSize := min chunk.data.size remaining
let dataPart := chunk.data.extract 0 takeSize
let acc := if takeSize = 0 then acc else acc.push dataPart
let size := size + takeSize
if takeSize < chunk.data.size then
let pendingChunk : Chunk := { chunk with data := chunk.data.extract takeSize chunk.data.size }
(acc, pending.push pendingChunk, size)
else
(acc, pending, size)
) (#[], #[], 0)
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
let remaining := limitSize - totalSize
({ writer with userData := pending, outputData, userDataBytes := writer.userDataBytes - totalSize }, remaining)
/--
Writes accumulated user data to output using chunked transfer encoding.
-/
def writeChunkedBody (writer : Writer dir) : Writer dir :=
if writer.userData.size = 0 then
writer
else
let data := writer.userData
{ writer with userData := #[], userDataBytes := 0, outputData := data.foldl (Encode.encode .v11) writer.outputData }
/--
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state.
-/
def writeFinalChunk (writer : Writer dir) : Writer dir :=
let writer := writer.writeChunkedBody
{ writer with
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
state := .complete
}
/--
Extracts all accumulated output data and returns it with a cleared output buffer.
-/
@[inline]
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
let output := writer.outputData.toByteArray
some ({ writer with outputData := ChunkedBuffer.empty }, output)
/--
Updates the writer's state machine to a new state.
-/
@[inline]
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
{ writer with state }
/--
Writes the message headers to the output buffer.
-/
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
/--
Checks if the connection should be kept alive based on the Connection header.
-/
def shouldKeepAlive (writer : Writer dir) : Bool :=
writer.messageHead.headers.get? .connection
|>.map (fun v => v.value.toLower != "close")
|>.getD true
/--
Closes the writer, transitioning to the closed state.
-/
@[inline]
def close (writer : Writer dir) : Writer dir :=
{ writer with state := .closed }
end Writer

View File

@@ -44,15 +44,8 @@ protected def Parser.run (p : Parser α) (arr : ByteArray) : Except String α :=
Parse a single byte equal to `b`, fails if different.
-/
@[inline]
def pbyte (b : UInt8) : Parser UInt8 := fun it =>
if h : it.hasNext then
let got := it.curr' h
if got = b then
.success (it.next' h) got
else
.error it (.other s!"expected: '{b}'")
else
.error it .eof
def pbyte (b : UInt8) : Parser UInt8 := attempt do
if ( any) = b then pure b else fail s!"expected: '{b}'"
/--
Skip a single byte equal to `b`, fails if different.
@@ -64,29 +57,16 @@ def skipByte (b : UInt8) : Parser Unit :=
/--
Skip a sequence of bytes equal to the given `ByteArray`.
-/
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
if h : idx < arr.size then
if hnext : it.hasNext then
let got := it.curr' hnext
let want := arr[idx]
if got = want then
go (idx + 1) (it.next' hnext)
else
.error it (.other s!"expected byte {want}, got {got}")
else
.error it .eof
else
.success it ()
go 0 it
def skipBytes (arr : ByteArray) : Parser Unit := do
for b in arr do
skipByte b
/--
Parse a string by matching its UTF-8 bytes, returns the string on success.
-/
@[inline]
def pstring (s : String) : Parser String := do
let utf8 := s.toUTF8
skipBytes utf8
skipBytes s.toUTF8
return s
/--
@@ -213,47 +193,19 @@ def take (n : Nat) : Parser ByteSlice := fun it =>
else
.success (it.forward n) (it.array[it.idx...(it.idx+n)])
/--
Scans while `pred` is satisfied. Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhile (pred : UInt8 Bool) (count : Nat) (iter : ByteArray.Iterator) :
Nat × ByteArray.Iterator × Bool :=
if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhile pred (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Scans while `pred` is satisfied, bounded by `limit`.
Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhileUpTo (pred : UInt8 Bool) (limit : Nat) (count : Nat)
(iter : ByteArray.Iterator) : Nat × ByteArray.Iterator × Bool :=
if count limit then
(count, iter, false)
else if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhileUpTo pred limit (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Parses while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def takeWhile (pred : UInt8 Bool) : Parser ByteSlice :=
fun it =>
let (length, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses until a predicate is satisfied (exclusive).
@@ -264,16 +216,16 @@ def takeUntil (pred : UInt8 → Bool) : Parser ByteSlice :=
/--
Skips while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def skipWhile (pred : UInt8 Bool) : Parser Unit :=
fun it =>
let (_, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt ()
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
.success (findEnd 0 it) ()
/--
Skips until a predicate is satisfied.
@@ -284,31 +236,34 @@ def skipUntil (pred : UInt8 → Bool) : Parser Unit :=
/--
Parses while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def takeWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, up to a given limit, requiring at least one byte.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
def takeWhileUpTo1 (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
if hitEof then
.error newIt .eof
else if length = 0 then
.error it (.other "expected at least one char")
let (length, newIt) := findEnd 0 it
if length = 0 then
.error it (if newIt.atEnd then .eof else .other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
@@ -319,42 +274,19 @@ Parses until a predicate is satisfied (exclusive), up to a given limit.
def takeUntilUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo (fun b => ¬pred b) limit
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes.
Unlike `takeWhileUpTo`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhileAtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes, requiring at least one.
Unlike `takeWhileUpTo1`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhile1AtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
if length = 0 then
.error it (.other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Skips while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def skipWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser Unit :=
fun it =>
let (_, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if count limit then iter
else if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
if hitEof then
.error newIt .eof
else
.success newIt ()
.success (findEnd 0 it) ()
/--
Skips until a predicate is satisfied, up to a given limit.

View File

@@ -325,55 +325,6 @@ LEAN_EXPORT LEAN_NORETURN void lean_internal_panic(char const * msg);
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_out_of_memory(void);
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_unreachable(void);
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_rc_overflow(void);
LEAN_EXPORT LEAN_NORETURN void lean_internal_panic_overflow(void);
static inline bool lean_usize_mul_would_overflow(size_t a, size_t b) {
#if defined(__GNUC__) || defined(__clang__)
size_t r;
return __builtin_mul_overflow(a, b, &r);
#else
return a != 0 && b > SIZE_MAX / a;
#endif
}
static inline bool lean_usize_add_would_overflow(size_t a, size_t b) {
#if defined(__GNUC__) || defined(__clang__)
size_t r;
return __builtin_add_overflow(a, b, &r);
#else
return a > SIZE_MAX - b;
#endif
}
static inline size_t lean_usize_mul_checked(size_t a, size_t b) {
#if defined(__GNUC__) || defined(__clang__)
size_t r;
if (LEAN_UNLIKELY(__builtin_mul_overflow(a, b, &r))) {
lean_internal_panic_overflow();
}
return r;
#else
if (a != 0 && b > SIZE_MAX / a) {
lean_internal_panic_overflow();
}
return a * b;
#endif
}
static inline size_t lean_usize_add_checked(size_t a, size_t b) {
#if defined(__GNUC__) || defined(__clang__)
size_t r;
if (LEAN_UNLIKELY(__builtin_add_overflow(a, b, &r))) {
lean_internal_panic_overflow();
}
return r;
#else
if (a > SIZE_MAX - b) {
lean_internal_panic_overflow();
}
return a + b;
#endif
}
static inline size_t lean_align(size_t v, size_t a) {
return (v / a)*a + a * (v % a != 0);
@@ -666,7 +617,7 @@ static inline uint8_t * lean_ctor_scalar_cptr(lean_object * o) {
static inline lean_object * lean_alloc_ctor(unsigned tag, unsigned num_objs, unsigned scalar_sz) {
assert(tag <= LeanMaxCtorTag && num_objs < LEAN_MAX_CTOR_FIELDS && scalar_sz < LEAN_MAX_CTOR_SCALARS_SIZE);
lean_object * o = lean_alloc_ctor_memory(lean_usize_add_checked(lean_usize_add_checked(sizeof(lean_ctor_object), lean_usize_mul_checked(sizeof(void*), num_objs)), scalar_sz));
lean_object * o = lean_alloc_ctor_memory(sizeof(lean_ctor_object) + sizeof(void*)*num_objs + scalar_sz);
lean_set_st_header(o, tag, num_objs);
return o;
}
@@ -776,7 +727,7 @@ static inline lean_object ** lean_closure_arg_cptr(lean_object * o) { return lea
static inline lean_obj_res lean_alloc_closure(void * fun, unsigned arity, unsigned num_fixed) {
assert(arity > 0);
assert(num_fixed < arity);
lean_closure_object * o = (lean_closure_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_closure_object), lean_usize_mul_checked(sizeof(void*), num_fixed)));
lean_closure_object * o = (lean_closure_object*)lean_alloc_object(sizeof(lean_closure_object) + sizeof(void*)*num_fixed);
lean_set_st_header((lean_object*)o, LeanClosure, 0);
o->m_fun = fun;
o->m_arity = arity;
@@ -822,7 +773,7 @@ LEAN_EXPORT lean_object* lean_apply_m(lean_object* f, unsigned n, lean_object**
/* Arrays of objects (low level API) */
static inline lean_obj_res lean_alloc_array(size_t size, size_t capacity) {
lean_array_object * o = (lean_array_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_array_object), lean_usize_mul_checked(sizeof(void*), capacity)));
lean_array_object * o = (lean_array_object*)lean_alloc_object(sizeof(lean_array_object) + sizeof(void*)*capacity);
lean_set_st_header((lean_object*)o, LeanArray, 0);
o->m_size = size;
o->m_capacity = capacity;
@@ -999,18 +950,8 @@ LEAN_EXPORT lean_object * lean_mk_array(lean_obj_arg n, lean_obj_arg v);
/* Array of scalars */
static inline bool lean_alloc_sarray_would_overflow(unsigned elem_size, size_t capacity) {
if (lean_usize_mul_would_overflow(elem_size, capacity)) {
return true;
}
if (lean_usize_add_would_overflow(sizeof(lean_sarray_object), elem_size * capacity)) {
return true;
}
return false;
}
static inline lean_obj_res lean_alloc_sarray(unsigned elem_size, size_t size, size_t capacity) {
lean_sarray_object * o = (lean_sarray_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_sarray_object), lean_usize_mul_checked(elem_size, capacity)));
lean_sarray_object * o = (lean_sarray_object*)lean_alloc_object(sizeof(lean_sarray_object) + elem_size*capacity);
lean_set_st_header((lean_object*)o, LeanScalarArray, elem_size);
o->m_size = size;
o->m_capacity = capacity;
@@ -1172,7 +1113,7 @@ static inline lean_obj_res lean_float_array_set(lean_obj_arg a, b_lean_obj_arg i
/* Strings */
static inline lean_obj_res lean_alloc_string(size_t size, size_t capacity, size_t len) {
lean_string_object * o = (lean_string_object*)lean_alloc_object(lean_usize_add_checked(sizeof(lean_string_object), capacity));
lean_string_object * o = (lean_string_object*)lean_alloc_object(sizeof(lean_string_object) + capacity);
lean_set_st_header((lean_object*)o, LeanString, 0);
o->m_size = size;
o->m_capacity = capacity;

View File

@@ -136,7 +136,7 @@ public def compileStaticLib
let args := args.push libFile.toString ++ ( mkArgs libFile <| oFiles.map toString)
proc {cmd := ar.toString, args}
def getMacOSXDeploymentEnv : BaseIO (Array (String × Option String)) := do
private def getMacOSXDeploymentEnv : BaseIO (Array (String × Option String)) := do
-- It is difficult to identify the correct minor version here, leading to linking warnings like:
-- `ld64.lld: warning: /usr/lib/system/libsystem_kernel.dylib has version 13.5.0, which is newer than target minimum of 13.0.0`
-- In order to suppress these we set the MACOSX_DEPLOYMENT_TARGET variable into the far future.

View File

@@ -126,7 +126,7 @@ public def BuildMetadata.parse (contents : String) : Except String BuildMetadata
public def BuildMetadata.ofFetch (inputHash : Hash) (outputs : Json) : BuildMetadata :=
{depHash := inputHash, outputs? := outputs, synthetic := true, inputs := #[], log := {}}
partial def serializeInputs (inputs : Array BuildTrace) : Array (String × Json) :=
private partial def serializeInputs (inputs : Array BuildTrace) : Array (String × Json) :=
inputs.foldl (init := {}) fun r trace =>
let val :=
if trace.inputs.isEmpty then
@@ -135,7 +135,7 @@ partial def serializeInputs (inputs : Array BuildTrace) : Array (String × Json)
toJson (serializeInputs trace.inputs)
r.push (trace.caption, val)
def BuildMetadata.ofBuildCore
private def BuildMetadata.ofBuildCore
(depTrace : BuildTrace) (outputs : Json) (log : Log)
: BuildMetadata where
inputs := serializeInputs depTrace.inputs
@@ -210,7 +210,7 @@ deriving DecidableEq
@[inline] public def OutputStatus.isCacheable (status : OutputStatus) : Bool :=
status != .mtimeUpToDate
@[specialize] def checkHashUpToDate'
@[specialize] private def checkHashUpToDate'
[CheckExists ι] [GetMTime ι]
(info : ι) (depTrace : BuildTrace) (depHash : Option Hash)
(oldTrace := depTrace.mtime)
@@ -514,7 +514,7 @@ stored in the cached input-to-content mapping.
**For internal use only.**
-/
@[specialize] def getArtifactsUsingCache?
@[specialize] private def getArtifactsUsingCache?
[ResolveOutputs α] (inputHash : Hash) (pkg : Package)
: JobM (Option α) := do
if let some out ( getLakeCache).readOutputs? pkg.cacheScope inputHash then
@@ -868,7 +868,7 @@ public def buildStaticLib
compileStaticLib libFile oFiles ( getLeanAr) thin
return art.path
def mkLinkObjArgs
private def mkLinkObjArgs
(objs : Array FilePath) (libs : Array Dynlib) : Array String
:= Id.run do
let mut args := #[]
@@ -884,7 +884,7 @@ def mkLinkObjArgs
Topologically sorts the library dependency tree by name.
Libraries come *before* their dependencies.
-/
partial def mkLinkOrder (libs : Array Dynlib) : JobM (Array Dynlib) := do
private partial def mkLinkOrder (libs : Array Dynlib) : JobM (Array Dynlib) := do
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
go lib [] v o
match r with

View File

@@ -19,7 +19,7 @@ open System (FilePath)
The build function definition for a Lean executable.
-/
def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
private def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
withRegisterJob s!"{self.name}:exe" <| withCurrPackage self.pkg do
/-
Remark: We must build the root before we fetch the transitive imports
@@ -56,7 +56,7 @@ def LeanExe.recBuildExe (self : LeanExe) : FetchM (Job FilePath) :=
public def LeanExe.exeFacetConfig : LeanExeFacetConfig exeFacet :=
mkFacetJobConfig recBuildExe
def LeanExe.recBuildDefault (lib : LeanExe) : FetchM (Job FilePath) :=
private def LeanExe.recBuildDefault (lib : LeanExe) : FetchM (Job FilePath) :=
lib.exe.fetch
/-- The facet configuration for the builtin `ExternLib.dynlibFacet`. -/

View File

@@ -20,7 +20,7 @@ open System
namespace Lake
def ExternLib.recBuildStatic (lib : ExternLib) : FetchM (Job FilePath) :=
private def ExternLib.recBuildStatic (lib : ExternLib) : FetchM (Job FilePath) :=
withRegisterJob s!"{lib.staticTargetName.toString}:static" do
lib.config.getPath <$> fetch (lib.pkg.target lib.staticTargetName)
@@ -53,7 +53,7 @@ public def buildLeanSharedLibOfStatic
compileSharedLib dynlib args lean.cc
return dynlib
def ExternLib.recBuildShared (lib : ExternLib) : FetchM (Job FilePath) :=
private def ExternLib.recBuildShared (lib : ExternLib) : FetchM (Job FilePath) :=
withRegisterJob s!"{lib.staticTargetName.toString}:shared" do
buildLeanSharedLibOfStatic ( lib.static.fetch) lib.linkArgs
@@ -74,7 +74,7 @@ def computeDynlibOfShared (sharedLibTarget : Job FilePath) : SpawnM (Job Dynlib)
else
error s!"shared library `{sharedLib}` has no file name"
def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) := do
private def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) := do
withRegisterJob s!"{lib.staticTargetName.toString}:dynlib" do
computeDynlibOfShared ( lib.shared.fetch)
@@ -82,7 +82,7 @@ def ExternLib.recComputeDynlib (lib : ExternLib) : FetchM (Job Dynlib) := do
public def ExternLib.dynlibFacetConfig : ExternLibFacetConfig dynlibFacet :=
mkFacetJobConfig recComputeDynlib
def ExternLib.recBuildDefault (lib : ExternLib) : FetchM (Job FilePath) :=
private def ExternLib.recBuildDefault (lib : ExternLib) : FetchM (Job FilePath) :=
lib.static.fetch
/-- The facet configuration for the builtin `ExternLib.dynlibFacet`. -/

View File

@@ -26,7 +26,7 @@ open System (FilePath)
namespace Lake
/-- Recursive build function for anything in the Lake build index. -/
def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.key)) :=
private def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.key)) :=
match info with
| .target pkg target => do
if let some decl := pkg.findTargetDecl? target then
@@ -55,7 +55,7 @@ def recBuildWithIndex (info : BuildInfo) : FetchM (Job (BuildData info.key)) :=
error s!"invalid target '{info}': unknown facet '{facet}'"
/-- Recursive build function with memoization. -/
def recFetchWithIndex : (info : BuildInfo) RecBuildM (Job (BuildData info.key)) :=
private def recFetchWithIndex : (info : BuildInfo) RecBuildM (Job (BuildData info.key)) :=
inline <| recFetchAcyclic (β := (Job <| BuildData ·.key)) BuildInfo.key recBuildWithIndex
/--

View File

@@ -21,7 +21,7 @@ namespace Lake
/-! ## Input File -/
def InputFile.recFetch (t : InputFile) : FetchM (Job FilePath) :=
private def InputFile.recFetch (t : InputFile) : FetchM (Job FilePath) :=
withRegisterJob s!"{t.name}" do
inputFile t.path t.text
@@ -39,7 +39,7 @@ public def InputFile.initFacetConfigs : DNameMap (KFacetConfig InputFile.facetKi
/-! ## Input Directory -/
def InputDir.recFetch (t : InputDir) : FetchM (Job (Array FilePath)) :=
private def InputDir.recFetch (t : InputDir) : FetchM (Job (Array FilePath)) :=
withRegisterJob s!"{t.name}" do
inputDir t.path t.text t.filter

View File

@@ -191,7 +191,7 @@ end Job
/-- A Lake job task with an opaque value in `Type`. -/
public abbrev OpaqueJobTask := JobTask Opaque
@[inline] unsafe def JobTask.toOpaqueImpl (self : JobTask α) : OpaqueJobTask :=
@[inline] private unsafe def JobTask.toOpaqueImpl (self : JobTask α) : OpaqueJobTask :=
unsafeCast self
/-- Forget the value of a job task. Implemented as a no-op cast. -/

View File

@@ -275,7 +275,7 @@ public def collectList (jobs : List (Job α)) (traceCaption := "<collection>") :
public def collectArray (jobs : Array (Job α)) (traceCaption := "<collection>") : Job (Array α) :=
jobs.foldl (zipWith (sync := true) Array.push) (traceRoot (Array.mkEmpty jobs.size) traceCaption)
instance : Nonempty ({α : Type u} [Nonempty α] α) := Classical.ofNonempty
private instance : Nonempty ({α : Type u} [Nonempty α] α) := Classical.ofNonempty
/-- Merge an `Vector` of jobs into one, collecting their outputs into an `Array`. -/
public def collectVector {α : Type u} [Nonempty α] (jobs : Vector (Job α) n) (traceCaption := "<collection>") : Job (Vector α n) :=

View File

@@ -33,7 +33,7 @@ private structure ModuleCollection where
Collect the local modules of a library.
That is, the modules from `getModuleArray` plus their local transitive imports.
-/
partial def LeanLib.recCollectLocalModules
private partial def LeanLib.recCollectLocalModules
(self : LeanLib) : FetchM (Job (Array Module))
:= ensureJob do
let mut col : ModuleCollection := {}
@@ -59,10 +59,10 @@ where
return col
/-- The `LibraryFacetConfig` for the builtin `modulesFacet`. -/
def LeanLib.modulesFacetConfig : LibraryFacetConfig modulesFacet :=
private def LeanLib.modulesFacetConfig : LibraryFacetConfig modulesFacet :=
mkFacetJobConfig LeanLib.recCollectLocalModules (buildable := false)
def LeanLib.recBuildLean
private def LeanLib.recBuildLean
(self : LeanLib) : FetchM (Job Unit)
:= do
let mods ( self.modules.fetch).await
@@ -73,7 +73,7 @@ def LeanLib.recBuildLean
public def LeanLib.leanArtsFacetConfig : LibraryFacetConfig leanArtsFacet :=
mkFacetJobConfig LeanLib.recBuildLean
@[specialize] def LeanLib.recBuildStatic
@[specialize] private def LeanLib.recBuildStatic
(self : LeanLib) (shouldExport : Bool) : FetchM (Job FilePath)
:= do
let suffix :=
@@ -123,7 +123,7 @@ public def LeanLib.staticExportFacetConfig : LibraryFacetConfig staticExportFace
/-! ## Build Shared Lib -/
def LeanLib.recBuildShared (self : LeanLib) : FetchM (Job Dynlib) := do
private def LeanLib.recBuildShared (self : LeanLib) : FetchM (Job Dynlib) := do
withRegisterJob s!"{self.name}:shared" <| withCurrPackage self.pkg do
let mods ( self.modules.fetch).await
let objJobs mods.flatMapM fun mod =>
@@ -158,7 +158,7 @@ public def LeanLib.sharedFacetConfig : LibraryFacetConfig sharedFacet :=
/--
Build extra target dependencies of the library (e.g., `extraDepTargets`, `needs`). -/
def LeanLib.recBuildExtraDepTargets (self : LeanLib) : FetchM (Job Unit) := do
private def LeanLib.recBuildExtraDepTargets (self : LeanLib) : FetchM (Job Unit) := do
let mut job := Job.nil s!"{self.pkg.baseName}/{self.name}:extraDep"
job := job.mix ( self.pkg.extraDep.fetch)
for target in self.extraDepTargets do
@@ -172,7 +172,7 @@ public def LeanLib.extraDepFacetConfig : LibraryFacetConfig extraDepFacet :=
mkFacetJobConfig LeanLib.recBuildExtraDepTargets
/-- Build the default facets for the library. -/
def LeanLib.recBuildDefaultFacets (self : LeanLib) : FetchM (Job Unit) := do
private def LeanLib.recBuildDefaultFacets (self : LeanLib) : FetchM (Job Unit) := do
Job.mixArray <$> self.defaultFacets.mapM fun facet => do
let job (self.facetCore facet).fetch
return job.toOpaque

View File

@@ -27,7 +27,7 @@ Build function definitions for a module's builtin facets.
-/
/-- Parse the header of a Lean file from its source. -/
def Module.recFetchInput (mod : Module) : FetchM (Job ModuleInput) := Job.async do
private def Module.recFetchInput (mod : Module) : FetchM (Job ModuleInput) := Job.async do
let path := mod.leanFile
let contents IO.FS.readFile path
let trace := {caption := path.toString, mtime := getMTime path, hash := .ofText contents}
@@ -52,7 +52,7 @@ public def Module.headerFacetConfig : ModuleFacetConfig headerFacet :=
return ( mod.input.fetch).map (sync := true) (·.header)
/-- Compute an `Array` of a module's direct local imports from its header. -/
def Module.recParseImports (mod : Module) : FetchM (Job (Array Module)) := do
private def Module.recParseImports (mod : Module) : FetchM (Job (Array Module)) := do
( mod.input.fetch).mapM (sync := true) fun input => do
let mods := input.imports.foldl (init := OrdModuleSet.empty) fun set imp =>
match imp.module? with | some mod => set.insert mod | none => set
@@ -62,12 +62,12 @@ def Module.recParseImports (mod : Module) : FetchM (Job (Array Module)) := do
public def Module.importsFacetConfig : ModuleFacetConfig importsFacet :=
mkFacetJobConfig recParseImports (buildable := false)
structure ModuleImportData where
private structure ModuleImportData where
module : Module
transImports : Job (Array Module)
includeSelf : Bool
@[inline] def collectImportsAux
@[inline] private def collectImportsAux
(fileName : String) (imports : Array Module)
(f : Module FetchM (Bool × Job (Array Module)))
: FetchM (Job (Array Module)) := do
@@ -93,21 +93,21 @@ structure ModuleImportData where
| .ok impSet s => .ok impSet.toArray s
| .error e s => .error e s
def computeTransImportsAux
private def computeTransImportsAux
(fileName : String) (imports : Array Module)
: FetchM (Job (Array Module)) := do
collectImportsAux fileName imports fun imp =>
(true, ·) <$> imp.transImports.fetch
/-- Recursively compute a module's transitive imports. -/
def Module.recComputeTransImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
private def Module.recComputeTransImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
inline <| computeTransImportsAux mod.relLeanFile.toString ( ( mod.imports.fetch).await)
/-- The `ModuleFacetConfig` for the builtin `transImportsFacet`. -/
public def Module.transImportsFacetConfig : ModuleFacetConfig transImportsFacet :=
mkFacetJobConfig recComputeTransImports (buildable := false)
def computePrecompileImportsAux
private def computePrecompileImportsAux
(fileName : String) (imports : Array Module)
: FetchM (Job (Array Module)) := do
collectImportsAux fileName imports fun imp =>
@@ -117,7 +117,7 @@ def computePrecompileImportsAux
(false, ·) <$> imp.precompileImports.fetch
/-- Recursively compute a module's precompiled imports. -/
def Module.recComputePrecompileImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
private def Module.recComputePrecompileImports (mod : Module) : FetchM (Job (Array Module)) := ensureJob do
inline <| computePrecompileImportsAux mod.relLeanFile.toString ( ( mod.imports.fetch).await)
/-- The `ModuleFacetConfig` for the builtin `precompileImportsFacet`. -/
@@ -129,7 +129,7 @@ Computes the transitive dynamic libraries of a module's imports.
Modules from the same library are loaded individually, while modules
from other libraries are loaded as part of the whole library.
-/
def Module.fetchImportLibs
private def Module.fetchImportLibs
(self : Module) (imps : Array Module) (compileSelf : Bool)
: FetchM (Array (Job Dynlib)) := do
let (_, jobs) imps.foldlM (init := (({} : NameSet), #[])) fun (libs, jobs) imp => do
@@ -149,7 +149,7 @@ def Module.fetchImportLibs
Fetches the library dynlibs of a list of non-local imports.
Modules are loaded as part of their whole library.
-/
def fetchImportLibs
private def fetchImportLibs
(mods : Array Module) : FetchM (Job (Array Dynlib))
:= do
let (_, jobs) mods.foldlM (init := (({} : NameSet), #[])) fun (libs, jobs) imp => do
@@ -166,7 +166,7 @@ def fetchImportLibs
Topologically sorts the library dependency tree by name.
Libraries come *after* their dependencies.
-/
partial def mkLoadOrder (libs : Array Dynlib) : FetchM (Array Dynlib) := do
private partial def mkLoadOrder (libs : Array Dynlib) : FetchM (Array Dynlib) := do
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
go lib [] v o
match r with
@@ -185,12 +185,12 @@ where
let o := o.push lib
return (v, o)
structure ModuleDeps where
private structure ModuleDeps where
dynlibs : Array Dynlib := #[]
plugins : Array Dynlib := #[]
deriving Inhabited, Repr
def computeModuleDeps
private def computeModuleDeps
(impLibs : Array Dynlib) (externLibs : Array Dynlib)
(dynlibs : Array Dynlib) (plugins : Array Dynlib)
: FetchM ModuleDeps := do
@@ -219,7 +219,7 @@ def computeModuleDeps
plugins := plugins.push ( getLakeInstall).sharedDynlib
return {dynlibs, plugins}
partial def fetchTransImportArts
private partial def fetchTransImportArts
(directImports : Array ModuleImport) (directArts : NameMap ImportArtifacts) (nonModule : Bool)
: FetchM (NameMap ImportArtifacts) := do
let q directImports.foldlM (init := #[]) fun q imp => do
@@ -254,7 +254,7 @@ where
else q
else q
def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
private def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
directArts := {}
trace := .nil s!"imports"
transTrace := .nil s!"{modName} transitive imports (public)"
@@ -262,7 +262,7 @@ def ModuleImportInfo.nil (modName : Name) : ModuleImportInfo where
allTransTrace := .nil s!"{modName} transitive imports (all)"
legacyTransTrace := .nil s!"{modName} transitive imports (legacy)"
def ModuleExportInfo.disambiguationHash
private def ModuleExportInfo.disambiguationHash
(self : ModuleExportInfo) (nonModule : Bool) (imp : Import)
: Hash :=
if nonModule then
@@ -274,7 +274,7 @@ def ModuleExportInfo.disambiguationHash
else
self.transTrace.hash.mix self.artsTrace.hash
def ModuleImportInfo.addImport
private def ModuleImportInfo.addImport
(info : ModuleImportInfo) (nonModule : Bool)
(imp : Import) (expInfo : ModuleExportInfo)
: ModuleImportInfo :=
@@ -351,14 +351,14 @@ def ModuleImportInfo.addImport
else
info
def Package.discriminant (self : Package) :=
private def Package.discriminant (self : Package) :=
if self.version == {} then
self.prettyName
else
s!"{self.prettyName}@{self.version}"
set_option linter.unusedVariables.funArgs false in
def fetchImportInfo
private def fetchImportInfo
(fileName : String) (pkgName modName : Name) (header : ModuleHeader)
: FetchM (Job ModuleImportInfo) := do
let nonModule := !header.isModule
@@ -422,17 +422,17 @@ public def Module.importInfoFacetConfig : ModuleFacetConfig importInfoFacet :=
let header ( mod.header.fetch).await
fetchImportInfo mod.relLeanFile.toString mod.pkg.keyName mod.name header
def noServerOLeanError :=
private def noServerOLeanError :=
"No server olean generated. Ensure the module system is enabled."
def noPrivateOLeanError :=
private def noPrivateOLeanError :=
"No private olean generated. Ensure the module system is enabled."
def noIRError :=
private def noIRError :=
"No `.ir` generated. Ensure the module system is enabled."
/-- Computes the import artifacts and transitive import trace of a module's imports. -/
def Module.computeExportInfo (mod : Module) : FetchM (Job ModuleExportInfo) := do
private def Module.computeExportInfo (mod : Module) : FetchM (Job ModuleExportInfo) := do
( mod.leanArts.fetch).mapM (sync := true) fun arts => do
let input ( mod.input.fetch).await
let importInfo ( mod.importInfo.fetch).await
@@ -496,7 +496,7 @@ Recursively build a module's dependencies, including:
* Shared libraries (e.g., `extern_lib` targets or precompiled modules)
* `extraDepTargets` of its library
-/
def Module.recFetchSetup (mod : Module) : FetchM (Job ModuleSetup) := ensureJob do
private def Module.recFetchSetup (mod : Module) : FetchM (Job ModuleSetup) := ensureJob do
let extraDepJob mod.lib.extraDep.fetch
let headerJob mod.header.fetch
@@ -660,7 +660,7 @@ def resolveModuleOutputs (out : CacheOutput) : JobM ModuleOutputs := do
instance : ResolveOutputs ModuleOutputs := resolveModuleOutputs
/-- Save module build artifacts to the local Lake cache. -/
def Module.cacheOutputArtifacts
private def Module.cacheOutputArtifacts
(mod : Module) (isModule : Bool) (useLocalFile : Bool)
: JobM ModuleOutputArtifacts := do
return {
@@ -686,12 +686,12 @@ Some module build artifacts must be located in the build directory (e.g., ILeans
This copies the required artifacts from the local Lake cache to the build directory and
updates the data structure with the new paths.
-/
def Module.restoreNeededArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
private def Module.restoreNeededArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
return {cached with
ilean := restoreArtifact mod.ileanFile cached.ilean
}
def Module.restoreAllArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
private def Module.restoreAllArtifacts (mod : Module) (cached : ModuleOutputArtifacts) : JobM ModuleOutputArtifacts := do
return {cached with
olean := restoreArtifact mod.oleanFile cached.olean
oleanServer? := restoreSome mod.oleanServerFile cached.oleanServer?
@@ -745,7 +745,7 @@ public protected def Module.getMTime (self : Module) (isModule : Bool) : IO MTim
@[deprecated Module.getMTime (since := "2025-03-04")]
public instance : GetMTime Module := Module.getMTime (isModule := false)
def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime : MTime) : ModuleOutputArtifacts :=
private def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime : MTime) : ModuleOutputArtifacts :=
{self with
olean := {self.olean with mtime}
oleanServer? := self.oleanServer?.map ({· with mtime})
@@ -756,7 +756,7 @@ def ModuleOutputArtifacts.setMTime (self : ModuleOutputArtifacts) (mtime : MTime
bc? := self.bc?.map ({· with mtime})
}
def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : Bool) : ModuleArtifacts where
private def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : Bool) : ModuleArtifacts where
lean? := srcFile
olean? := mod.oleanFile
oleanServer? := if isModule then some mod.oleanServerFile else none
@@ -766,7 +766,7 @@ def Module.mkArtifacts (mod : Module) (srcFile : FilePath) (isModule : Bool) : M
c? := mod.cFile
bc? := if Lean.Internal.hasLLVMBackend () then some mod.bcFile else none
def Module.computeArtifacts (mod : Module) (isModule : Bool) : FetchM ModuleOutputArtifacts :=
private def Module.computeArtifacts (mod : Module) (isModule : Bool) : FetchM ModuleOutputArtifacts :=
return {
isModule
olean := compute mod.oleanFile "olean"
@@ -829,7 +829,7 @@ def Module.unpackLtar (self : Module) (ltar : FilePath) : JobM Unit := do
]
proc (quiet := true) {cmd := ( getLeantar).toString, args}
def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
private def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
withRegisterJob s!"{self.name}:ltar" <| withCurrPackage self.pkg do
( self.leanArts.fetch).mapM fun arts => do
let art arts.ltar?.getDM do
@@ -846,7 +846,7 @@ def Module.recBuildLtar (self : Module) : FetchM (Job FilePath) := do
public def Module.ltarFacetConfig : ModuleFacetConfig ltarFacet :=
mkFacetJobConfig recBuildLtar
def Module.buildLean
private def Module.buildLean
(mod : Module) (depTrace : BuildTrace) (srcFile : FilePath) (setup : ModuleSetup)
: JobM ModuleOutputArtifacts := buildAction depTrace mod.traceFile do
let args := mod.weakLeanArgs ++ mod.leanArgs
@@ -861,7 +861,7 @@ def Module.buildLean
mod.clearOutputHashes
mod.computeArtifacts setup.isModule
def traceOptions (opts : LeanOptions) (caption := "opts") : BuildTrace :=
private def traceOptions (opts : LeanOptions) (caption := "opts") : BuildTrace :=
opts.values.foldl (init := .nil caption) fun t n v =>
let opt := s!"-D{n}={v.asCliFlagValue}"
t.mix <| .ofHash (pureHash opt) opt
@@ -871,7 +871,7 @@ Recursively build a Lean module.
Fetch its dependencies and then elaborate the Lean source file, producing
all possible artifacts (e.g., `.olean`, `.ilean`, `.c`, `.bc`).
-/
def Module.recBuildLean (mod : Module) : FetchM (Job ModuleOutputArtifacts) := do
private def Module.recBuildLean (mod : Module) : FetchM (Job ModuleOutputArtifacts) := do
/-
Remark: `withRegisterJob` must register `setupJob` to display module builds
in the job monitor. However, it must also include the fetching of both jobs to
@@ -997,7 +997,7 @@ where
public def Module.leanArtsFacetConfig : ModuleFacetConfig leanArtsFacet :=
mkFacetJobConfig recBuildLean
@[inline] def Module.fetchOLeanCore
@[inline] private def Module.fetchOLeanCore
(facet : String) (f : ModuleOutputArtifacts Option Artifact) (errMsg : String) (mod : Module)
: FetchM (Job FilePath) := do
( mod.leanArts.fetch).mapM (sync := true) fun arts => do
@@ -1080,7 +1080,7 @@ public def Module.bcFacetConfig : ModuleFacetConfig bcFacet :=
Recursively build the module's object file from its C file produced by `lean`
with `-DLEAN_EXPORTING` set, which exports Lean symbols defined within the C files.
-/
def Module.recBuildLeanCToOExport (self : Module) : FetchM (Job FilePath) := do
private def Module.recBuildLeanCToOExport (self : Module) : FetchM (Job FilePath) := do
let suffix := if ( getIsVerbose) then " (with exports)" else ""
withRegisterJob s!"{self.name}:c.o{suffix}" <| withCurrPackage self.pkg do
-- TODO: add option to pass a target triplet for cross compilation
@@ -1095,7 +1095,7 @@ public def Module.coExportFacetConfig : ModuleFacetConfig coExportFacet :=
Recursively build the module's object file from its C file produced by `lean`.
This version does not export any Lean symbols.
-/
def Module.recBuildLeanCToONoExport (self : Module) : FetchM (Job FilePath) := do
private def Module.recBuildLeanCToONoExport (self : Module) : FetchM (Job FilePath) := do
let suffix := if ( getIsVerbose) then " (without exports)" else ""
withRegisterJob s!"{self.name}:c.o{suffix}" <| withCurrPackage self.pkg do
-- TODO: add option to pass a target triplet for cross compilation
@@ -1111,7 +1111,7 @@ public def Module.coFacetConfig : ModuleFacetConfig coFacet :=
if Platform.isWindows then mod.coNoExport.fetch else mod.coExport.fetch
/-- Recursively build the module's object file from its bitcode file produced by `lean`. -/
def Module.recBuildLeanBcToO (self : Module) : FetchM (Job FilePath) := do
private def Module.recBuildLeanBcToO (self : Module) : FetchM (Job FilePath) := do
withRegisterJob s!"{self.name}:bc.o" <| withCurrPackage self.pkg do
-- TODO: add option to pass a target triplet for cross compilation
buildLeanO self.bcoFile ( self.bc.fetch) self.weakLeancArgs self.leancArgs
@@ -1145,7 +1145,7 @@ public def Module.oFacetConfig : ModuleFacetConfig oFacet :=
Recursively build the shared library of a module
(e.g., for `--load-dynlib` or `--plugin`).
-/
def Module.recBuildDynlib (mod : Module) : FetchM (Job Dynlib) :=
private def Module.recBuildDynlib (mod : Module) : FetchM (Job Dynlib) :=
withRegisterJob s!"{mod.name}:dynlib" <| withCurrPackage mod.pkg do
/-
Fetch the module's object files.
@@ -1221,7 +1221,7 @@ building its imports and other dependencies. Used by `lake setup-file`.
Due to its exclusive use as a top-level build, it does not construct a proper trace state.
-/
def setupEditedModule
private def setupEditedModule
(mod : Module) (header : ModuleHeader)
: FetchM (Job ModuleSetup) := do
let extraDepJob mod.lib.extraDep.fetch
@@ -1269,7 +1269,7 @@ to build the dependencies of the file and generate the data for `lean --setup`.
Due to its exclusive use as a top-level build, it does not construct a proper trace state.
-/
def setupExternalModule
private def setupExternalModule
(fileName : String) (header : ModuleHeader) (leanOpts : LeanOptions)
: FetchM (Job ModuleSetup) := do
let root getRootPackage

View File

@@ -25,7 +25,7 @@ namespace Lake
open Lean (Name)
/-- Fetch the package's direct dependencies. -/
def Package.recFetchDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
private def Package.recFetchDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
(pure ·) <$> self.depConfigs.mapM fun cfg => do
let some dep findPackageByName? cfg.name
| error s!"{self.prettyName}: package not found for dependency '{cfg.name}' \
@@ -37,7 +37,7 @@ public def Package.depsFacetConfig : PackageFacetConfig depsFacet :=
mkFacetJobConfig recFetchDeps (buildable := false)
/-- Compute a topological ordering of the package's transitive dependencies. -/
def Package.recComputeTransDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
private def Package.recComputeTransDeps (self : Package) : FetchM (Job (Array Package)) := ensureJob do
(pure ·.toArray) <$> self.depConfigs.foldlM (init := OrdPackageSet.empty) fun deps cfg => do
let some dep findPackageByName? cfg.name
| error s!"{self.prettyName}: package not found for dependency '{cfg.name}' \
@@ -53,7 +53,7 @@ public def Package.transDepsFacetConfig : PackageFacetConfig transDepsFacet :=
Tries to download and unpack the package's cached build archive
(e.g., from Reservoir or GitHub).
-/
def Package.fetchOptBuildCacheCore (self : Package) : FetchM (Job Bool) := do
private def Package.fetchOptBuildCacheCore (self : Package) : FetchM (Job Bool) := do
if self.preferReleaseBuild then
self.optGitHubRelease.fetch
else
@@ -64,7 +64,7 @@ public def Package.optBuildCacheFacetConfig : PackageFacetConfig optBuildCacheFa
mkFacetJobConfig (·.fetchOptBuildCacheCore)
/-- Tries to download the package's build cache (if configured). -/
def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) := do
private def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) := do
let shouldFetch :=
( getTryCache) &&
!( self.buildDir.pathExists) && -- do not automatically clobber prebuilt artifacts
@@ -77,7 +77,7 @@ def Package.maybeFetchBuildCache (self : Package) : FetchM (Job Bool) := do
return pure true
@[inline]
def Package.optFacetDetails (self : Package) (facet : Name) : JobM String := do
private def Package.optFacetDetails (self : Package) (facet : Name) : JobM String := do
if ( getIsVerbose) then
return s!" (see '{self.baseName}:{Name.eraseHead facet}' for details)"
else
@@ -87,7 +87,7 @@ def Package.optFacetDetails (self : Package) (facet : Name) : JobM String := do
Tries to download and unpack the package's cached build archive
(e.g., from Reservoir or GitHub). Prints a warning on failure.
-/
def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
private def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
let job self.maybeFetchBuildCache
job.mapM fun success => do
unless success do
@@ -102,7 +102,7 @@ def Package.maybeFetchBuildCacheWithWarning (self : Package) := do
Build the `extraDepTargets` for the package.
Also, if the package is a dependency, maybe fetch its build cache.
-/
def Package.recBuildExtraDepTargets (self : Package) : FetchM (Job Unit) :=
private def Package.recBuildExtraDepTargets (self : Package) : FetchM (Job Unit) :=
withRegisterJob s!"{self.baseName}:extraDep" do
let mut job := Job.nil s!"@{self.baseName}:extraDep"
-- Fetch build cache if this package is a dependency
@@ -118,7 +118,7 @@ public def Package.extraDepFacetConfig : PackageFacetConfig extraDepFacet :=
mkFacetJobConfig Package.recBuildExtraDepTargets
/-- Compute the package's Reservoir barrel URL. -/
def Package.getBarrelUrl (self : Package) : JobM String := do
private def Package.getBarrelUrl (self : Package) : JobM String := do
if self.scope.isEmpty then
error "package has no Reservoir scope"
let repo := GitRepo.mk self.dir
@@ -132,7 +132,7 @@ def Package.getBarrelUrl (self : Package) : JobM String := do
return url
/-- Compute the package's GitHub release URL. -/
def Package.getReleaseUrl (self : Package) : JobM String := do
private def Package.getReleaseUrl (self : Package) : JobM String := do
let repo := GitRepo.mk self.dir
let repoUrl? := self.releaseRepo? <|> self.remoteUrl?
let some repoUrl := repoUrl? <|> ( repo.getFilteredRemoteUrl?)
@@ -144,7 +144,7 @@ def Package.getReleaseUrl (self : Package) : JobM String := do
return s!"{repoUrl}/releases/download/{tag}/{self.buildArchive}"
/-- Tries to download and unpack a build archive for the package from a URL. -/
def Package.fetchBuildArchive
private def Package.fetchBuildArchive
(self : Package) (url : String) (archiveFile : FilePath)
(headers : Array String := #[])
: JobM PUnit := do
@@ -157,7 +157,7 @@ def Package.fetchBuildArchive
untar archiveFile self.buildDir
@[inline]
def Package.mkOptBuildArchiveFacetConfig
private def Package.mkOptBuildArchiveFacetConfig
{facet : Name} (archiveFile : Package FilePath)
(getUrl : Package JobM String) (headers : Array String := #[])
[FamilyDef FacetOut facet Bool]
@@ -172,7 +172,7 @@ def Package.mkOptBuildArchiveFacetConfig
return false
@[inline]
def Package.mkBuildArchiveFacetConfig
private def Package.mkBuildArchiveFacetConfig
{facet : Name} (optFacet : Name) (what : String)
[FamilyDef FacetOut facet Unit]
[FamilyDef FacetOut optFacet Bool]

View File

@@ -34,11 +34,11 @@ public def mkBuildContext (ws : Workspace) (config : BuildConfig) : BaseIO Build
}
/-- Unicode icons that make up the spinner in animation order. -/
def Monitor.spinnerFrames :=
private def Monitor.spinnerFrames :=
#['','','','','','','','']
/-- Context of the Lake build monitor. -/
structure MonitorContext where
private structure MonitorContext where
jobs : JobQueue
out : IO.FS.Stream
outLv : LogLevel
@@ -55,7 +55,7 @@ structure MonitorContext where
.stream ctx.out ctx.outLv ctx.useAnsi
/-- State of the Lake build monitor. -/
structure MonitorState where
private structure MonitorState where
jobNo : Nat := 0
totalJobs : Nat := 0
wantsRebuild : Bool := false
@@ -65,9 +65,9 @@ structure MonitorState where
spinnerIdx : Fin Monitor.spinnerFrames.size := 0, by decide
/-- Monad of the Lake build monitor. -/
abbrev MonitorM := ReaderT MonitorContext <| StateT MonitorState BaseIO
private abbrev MonitorM := ReaderT MonitorContext <| StateT MonitorState BaseIO
@[inline] def MonitorM.run
@[inline] private def MonitorM.run
(ctx : MonitorContext) (s : MonitorState) (self : MonitorM α)
: BaseIO (α × MonitorState) :=
StateT.run (ReaderT.run self ctx) s
@@ -80,23 +80,23 @@ def Ansi.resetLine : String :=
"\x1B[2K\r"
/-- Like `IO.FS.Stream.flush`, but ignores errors. -/
@[inline] def flush (out : IO.FS.Stream) : BaseIO PUnit :=
@[inline] private def flush (out : IO.FS.Stream) : BaseIO PUnit :=
out.flush |>.catchExceptions fun _ => pure ()
/-- Like `IO.FS.Stream.putStr`, but panics on errors. -/
@[inline] def print! (out : IO.FS.Stream) (s : String) : BaseIO PUnit :=
@[inline] private def print! (out : IO.FS.Stream) (s : String) : BaseIO PUnit :=
out.putStr s |>.catchExceptions fun e =>
panic! s!"[{decl_name%} failed: {e}] {repr s}"
namespace Monitor
@[inline] def print (s : String) : MonitorM PUnit := do
@[inline] private def print (s : String) : MonitorM PUnit := do
print! ( read).out s
@[inline] nonrec def flush : MonitorM PUnit := do
@[inline] private nonrec def flush : MonitorM PUnit := do
flush ( read).out
def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfinished.size) : MonitorM PUnit := do
private def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfinished.size) : MonitorM PUnit := do
let {jobNo, totalJobs, ..} get
let {useAnsi, showProgress, ..} read
if showProgress useAnsi then
@@ -114,7 +114,7 @@ def renderProgress (running unfinished : Array OpaqueJob) (h : 0 < unfinished.si
print s!"{resetCtrl}{spinnerIcon} [{jobNo}/{totalJobs}] {caption}"
flush
def reportJob (job : OpaqueJob) : MonitorM PUnit := do
private def reportJob (job : OpaqueJob) : MonitorM PUnit := do
let {jobNo, totalJobs, ..} get
let {failLv, outLv, showOptional, out, useAnsi, showProgress, minAction, showTime, ..} read
let {task, caption, optional, ..} := job
@@ -153,7 +153,7 @@ where
else if ms > 1000 then s!"{(ms) / 1000}.{(ms+50) / 100 % 10}s"
else s!"{ms}ms"
def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × Array OpaqueJob) := do
private def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × Array OpaqueJob) := do
let newJobs ( read).jobs.modifyGet ((·, #[]))
modify fun s => {s with totalJobs := s.totalJobs + newJobs.size}
let pollJobs := fun (running, unfinished) job => do
@@ -169,7 +169,7 @@ def poll (unfinished : Array OpaqueJob) : MonitorM (Array OpaqueJob × Array Opa
let r unfinished.foldlM pollJobs (#[], #[])
newJobs.foldlM pollJobs r
def sleep : MonitorM PUnit := do
private def sleep : MonitorM PUnit := do
let now IO.monoMsNow
let lastUpdate := ( get).lastUpdate
let sleepTime : Nat := ( read).updateFrequency - (now - lastUpdate)
@@ -178,14 +178,14 @@ def sleep : MonitorM PUnit := do
let now IO.monoMsNow
modify fun s => {s with lastUpdate := now}
partial def loop (unfinished : Array OpaqueJob) : MonitorM PUnit := do
private partial def loop (unfinished : Array OpaqueJob) : MonitorM PUnit := do
let (running, unfinished) poll unfinished
if h : 0 < unfinished.size then
renderProgress running unfinished h
sleep
loop unfinished
def main (init : Array OpaqueJob) : MonitorM PUnit := do
private def main (init : Array OpaqueJob) : MonitorM PUnit := do
loop init
let resetCtrl modifyGet fun s => (s.resetCtrl, {s with resetCtrl := ""})
unless resetCtrl.isEmpty do

View File

@@ -32,7 +32,7 @@ public abbrev BuildStore :=
namespace BuildStore
set_option linter.deprecated false in
def getModuleFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
private def getModuleFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
(k : BuildKey) (v : Job (BuildData k)) : Option (Name × Job α) :=
match k with
| .moduleFacet m f
@@ -67,7 +67,7 @@ public def collectModuleFacetMap
res := res.insert m job
return res
def getPackageFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
private def getPackageFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
(k : BuildKey) (v : Job (BuildData k)) : Option (Job α) :=
match k with
| .packageFacet p f =>
@@ -88,7 +88,7 @@ public def collectPackageFacetArray
res := res.push job
return res
def getTargetFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
private def getTargetFacetJob? (facet : Name) [FamilyOut FacetOut facet α]
(k : BuildKey) (v : Job (BuildData k)) : Option (Job α) :=
match k with
| .targetFacet _ _ f =>

View File

@@ -16,7 +16,7 @@ open Lean
namespace Lake
variable (defaultPkg : Package) (root : PartialBuildKey) in
def PartialBuildKey.fetchInCoreAux
private def PartialBuildKey.fetchInCoreAux
(self : PartialBuildKey) (facetless : Bool := false)
: FetchM ((key : BuildKey) × Job (BuildData key)) := do
match self with
@@ -98,7 +98,7 @@ Fetches the target specified by this key, resolving gaps as needed.
(·.2.toOpaque) <$> fetchInCore defaultPkg self
variable (root : BuildKey) in
def BuildKey.fetchCore
private def BuildKey.fetchCore
(self : BuildKey)
: FetchM (Job (BuildData self)) :=
match self with

View File

@@ -140,7 +140,7 @@ def resolveTargetInWorkspace
else
throw <| CliError.unknownTarget target
def resolveTargetLikeSpec
private def resolveTargetLikeSpec
(ws : Workspace) (spec : String) (facet : Name) (isMaybePath explicit := false)
: Except CliError (Array BuildSpec) := do
match spec.split '/' |>.toList with
@@ -169,7 +169,7 @@ def resolveTargetLikeSpec
else
throw <| CliError.invalidTargetSpec spec '/'
def resolveTargetBaseSpec
private def resolveTargetBaseSpec
(ws : Workspace) (spec : String) (facet : Name)
: EIO CliError (Array BuildSpec) := do
if spec.startsWith "@" then

View File

@@ -391,14 +391,14 @@ def serviceNotFound (service : String) (configuredServices : Array CacheServiceC
let msg := s!"{msg}; configured services:\n"
configuredServices.foldl (· ++ s!" {·.name}") msg
@[inline] def cacheToolchain (pkg : Package) (toolchain : CacheToolchain) : CacheToolchain :=
@[inline] private def cacheToolchain (pkg : Package) (toolchain : CacheToolchain) : CacheToolchain :=
if pkg.fixedToolchain || pkg.bootstrap then .none else toolchain
@[inline] def cachePlatform (pkg : Package) (platform : CachePlatform) : CachePlatform :=
@[inline] private def cachePlatform (pkg : Package) (platform : CachePlatform) : CachePlatform :=
if pkg.isPlatformIndependent then .none else platform
-- since 2026-02-19
def endpointDeprecation : String :=
private def endpointDeprecation : String :=
"configuring the cache service via environment variables is deprecated; use --service instead"
protected def get : CliM PUnit := do

View File

@@ -16,13 +16,13 @@ import Lake.Load.Lean.Elab
namespace Lake
open Toml Lean System PrettyPrinter
partial def descopeSyntax : Syntax Syntax
private partial def descopeSyntax : Syntax Syntax
| .ident info rawVal val preresolved =>
.ident info rawVal val.eraseMacroScopes preresolved
| .node info k args => .node info k <| args.map descopeSyntax
| stx => stx
def descopeTSyntax (stx : TSyntax k) : TSyntax k :=
private def descopeTSyntax (stx : TSyntax k) : TSyntax k :=
descopeSyntax stx.raw
public def Package.mkConfigString (pkg : Package) (lang : ConfigLang) : LogIO String := do

View File

@@ -23,7 +23,7 @@ open Lean System Toml
/-! ## General Helpers -/
local instance : BEq FilePath where
private local instance : BEq FilePath where
beq a b := a.normalize == b.normalize
class EncodeField (σ : Type u) (name : Name) (α : Type u) where
@@ -69,7 +69,7 @@ public def Toml.encodeLeanOptions (opts : Array LeanOption) : Table :=
public instance : ToToml (Array LeanOption) where
toToml opts := .table .missing <| encodeLeanOptions opts
@[inline] def encodeSingleton? [ToToml? α] (name : Name) (a : α) : Option Value :=
@[inline] private def encodeSingleton? [ToToml? α] (name : Name) (a : α) : Option Value :=
toToml? a |>.map fun v => toToml <| Table.empty.insert name v
mutual
@@ -149,7 +149,7 @@ public instance : ToToml Dependency := ⟨(toToml ·.toToml)⟩
/-! ## Package & Target Configuration Encoders -/
meta def genToToml
private meta def genToToml
(cmds : Array Command)
(tyName : Name) [info : ConfigInfo tyName]
(exclude : Array Name := #[])

View File

@@ -97,7 +97,7 @@ public partial def parse
else
loop 2 {} (lfPos.next h)
@[inline] partial def loadCore
@[inline] private partial def loadCore
(h : IO.FS.Handle) (fileName : String) (platformIndependent : Bool)
: LogIO CacheMap := do
let rec loop (i : Nat) (cache : CacheMap) := do
@@ -575,7 +575,7 @@ def uploadS3
| .error e =>
error s!"curl produced invalid JSON output: {e}; received:\n{out.stderr}"
structure CacheServiceImpl where
private structure CacheServiceImpl where
mk ::
name? : Option CacheServiceName := none
/- S3 Bucket -/
@@ -641,11 +641,11 @@ namespace CacheService
/-- The MIME type of a Lake/Reservoir artifact. -/
public def artifactContentType : String := "application/vnd.reservoir.artifact"
def appendScope (endpoint : String) (scope : String) : String :=
private def appendScope (endpoint : String) (scope : String) : String :=
scope.split '/' |>.fold (init := endpoint) fun s component =>
uriEncode component.copy (s.push '/')
def s3ArtifactUrl (contentHash : Hash) (service : CacheService) (scope : CacheServiceScope) : String :=
private def s3ArtifactUrl (contentHash : Hash) (service : CacheService) (scope : CacheServiceScope) : String :=
let endpoint :=
match scope.impl with
| .repo scope => appendScope service.impl.artifactEndpoint scope
@@ -687,27 +687,27 @@ public def uploadArtifact
/-! ## Multi-Artifact Transfer -/
inductive TransferKind
private inductive TransferKind
| get
| put
deriving DecidableEq
structure TransferInfo where
private structure TransferInfo where
url : String
path : FilePath
descr : ArtifactDescr
structure TransferConfig where
private structure TransferConfig where
kind : TransferKind
scope : CacheServiceScope
infos : Array TransferInfo
key : String := ""
structure TransferState where
private structure TransferState where
didError : Bool := false
numSuccesses : Nat := 0
partial def monitorTransfer
private partial def monitorTransfer
(cfg : TransferConfig) (h hOut : IO.FS.Handle) (s : TransferState)
: LoggerIO TransferState := do
let line h.getLine
@@ -781,7 +781,7 @@ where
logError msg
logVerbose s!"curl JSON: {line.trimAsciiEnd}"
def transferArtifacts
private def transferArtifacts
(cfg : TransferConfig)
: LoggerIO Unit := IO.FS.withTempFile fun h path => do
let args id do
@@ -941,7 +941,7 @@ public def uploadArtifacts
/-- The MIME type of a Lake/Reservoir input-to-output mappings for a Git revision. -/
public def mapContentType : String := "application/vnd.reservoir.outputs+json-lines"
def s3RevisionUrl
private def s3RevisionUrl
(rev : String) (service : CacheService) (scope : CacheServiceScope)
(platform := CachePlatform.none) (toolchain := CacheToolchain.none)
: String :=

View File

@@ -78,7 +78,7 @@ as the facet kind for modules.
Returns the facet kind for an Lake target namespace.
Used by the `builtin_facet` macro.
-/
def facetKindForNamespace (ns : Name) : Name :=
private def facetKindForNamespace (ns : Name) : Name :=
match ns with
| `Lake.Package => Package.facetKind
| `Lake.Module => Module.facetKind

View File

@@ -38,7 +38,7 @@ scoped syntax (name := configDecl)
instance : Coe Ident (TSyntax ``Term.structInstLVal) where
coe stx := Unhygienic.run `(Term.structInstLVal| $stx:ident)
structure FieldView where
private structure FieldView where
ref : Syntax
mods : TSyntax ``Command.declModifiers := Unhygienic.run `(declModifiers|)
id : Ident
@@ -48,7 +48,7 @@ structure FieldView where
decl? : Option (TSyntax ``structSimpleBinder) := none
parent : Bool := false
structure FieldMetadata where
private structure FieldMetadata where
cmds : Array Command := #[]
fields : Term := Unhygienic.run `(Array.empty)
@@ -56,7 +56,7 @@ structure FieldMetadata where
-- quotations and are called only by `macro`s, so we disable the option for them manually.
set_option internal.parseQuotWithCurrentStage false
meta def mkConfigAuxDecls
private meta def mkConfigAuxDecls
(vis? : Option (TSyntax ``visibility))
(structId : Ident) (structArity : Nat) (structTy : Term) (views : Array FieldView)
: MacroM (Array Command) := do
@@ -123,7 +123,7 @@ meta def mkConfigAuxDecls
let emptyInst `( $[$vis?:visibility]? instance $instId:ident : EmptyCollection $structTy := {})
return data.cmds.push fieldsDef |>.push fieldsInst |>.push infoInst |>.push emptyInst
meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView := withRef stx do
private meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView := withRef stx do
let `(configField|$mods:declModifiers $[$id? @]? $ids,* $bs* : $rty $[:= $val?]?) := stx
| Macro.throwError "ill-formed configuration field declaration"
let bvs expandBinders bs
@@ -137,7 +137,7 @@ meta def mkFieldView (stx : TSyntax ``configField) : MacroM FieldView := withRef
let decl `(structSimpleBinder|$mods:declModifiers $id : $type := $defVal)
return {ref := stx, mods, id, ids, type, defVal, decl? := decl}
meta def mkParentFieldView (stx : TSyntax ``structParent) : MacroM FieldView := withRef stx do
private meta def mkParentFieldView (stx : TSyntax ``structParent) : MacroM FieldView := withRef stx do
let `(structParent|$[$id? :]? $type) := stx
| Macro.throwError "ill-formed parent"
let id do

View File

@@ -36,7 +36,7 @@ mutual
A pattern. Matches some subset of the values of a type.
May also include a declarative description.
-/
structure Pattern (α : Type u) (β : Type v) where
public structure Pattern (α : Type u) (β : Type v) where
/-- Returns whether the value matches the pattern. -/
filter : α Bool
/-- An optional name for the filter. -/
@@ -49,7 +49,7 @@ structure Pattern (α : Type u) (β : Type v) where
An abstract declarative pattern.
Augments another pattern description `β` with logical connectives.
-/
inductive PatternDescr (α : Type u) (β : Type v)
public inductive PatternDescr (α : Type u) (β : Type v)
/-- Matches a value that does not satisfy the pattern. -/
| not (p : Pattern α β)
/-- Matches a value that satisfies every pattern. Short-circuits. -/

Some files were not shown because too many files have changed in this diff Show More