chore: fix spelling errors (#10042)

Typos were found with
```
pip install codespell --upgrade
codespell --summary --ignore-words-list enew,forin,fro,happend,hge,ihs,iterm,spred --skip stage0 --check-filenames
codespell --summary --ignore-words-list enew,forin,fro,happend,hge,ihs,iterm,spred --skip stage0 --check-filenames --regex '[A-Z][a-z]*'
codespell --summary --ignore-words-list enew,forin,fro,happend,hge,ihs,iterm,spred --skip stage0 --check-filenames --regex "\b[a-z']*"
```
This commit is contained in:
Jason Yuen
2025-08-22 00:23:12 -07:00
committed by GitHub
parent c4b3f303bb
commit facc356a0a
43 changed files with 51 additions and 51 deletions

View File

@@ -75,7 +75,7 @@ The github repository will automatically update stage0 on `master` once
NOTE: A full rebuild of stage 1 will only be triggered when the *committed* contents of `stage0/` are changed.
Thus if you change files in it manually instead of through `update-stage0-commit` (see below) or fetching updates from git, you either need to commit those changes first or run `make -C build/release clean-stdlib`.
The same is true for further stages except that a rebuild of them is retriggered on any commited change, not just to a specific directory.
The same is true for further stages except that a rebuild of them is retriggered on any committed change, not just to a specific directory.
Thus when debugging e.g. stage 2 failures, you can resume the build from these failures on but may want to explicitly call `clean-stdlib` to either observe changes from `.olean` files of modules that built successfully or to check that you did not break modules that built successfully at some prior point.
If you have write access to the lean4 repository, you can also manually

View File

@@ -734,7 +734,7 @@ public structure Packages.LinearOrderOfOrdArgs (α : Type u) extends
| exact fun a b => Std.min_eq_if_isLE_compare (a := a) (b := b)
| fail "Failed to automatically prove that `min` is left-leaning. \
Please ensure that a `LawfulOrderLeftLeaningMin` instance can be synthesized or \
manuelly provide the field `min_eq`."
manually provide the field `min_eq`."
max_eq :
let := ord; let := le; let := max; have := lawfulOrderOrd
a b : α, Max.max a b = if (compare a b).isGE then a else b := by

View File

@@ -146,7 +146,7 @@ variable (sep : String) (escape : Bool) in
Uses the separator `sep` (usually `"."`) to combine the components of the `Name` into a string.
See the documentation for `Name.toStringWithToken` for an explanation of `escape` and `isToken`.
-/
@[specialize isToken] -- explicit annotation because isToken is overriden in recursive call
@[specialize isToken] -- explicit annotation because isToken is overridden in recursive call
def toStringWithSep (n : Name) (isToken : String Bool := fun _ => false) : String :=
match n with
| anonymous => "[anonymous]"

View File

@@ -158,7 +158,7 @@ def findEnvDecl (env : Environment) (declName : Name) : Option Decl :=
match env.getModuleIdxFor? declName with
| some modIdx =>
-- `meta import/import all` and server `#eval`
-- This case is important even for codegen because it needs to see IR via `import all` (beause
-- This case is important even for codegen because it needs to see IR via `import all` (because
-- it can also see the LCNF)
findAtSorted? (declMapExt.getModuleIREntries env modIdx) declName <|>
-- (closure of) `meta def`; will report `.extern`s for other `def`s so needs to come second

View File

@@ -39,7 +39,7 @@ def getIRPhases (env : Environment) (declName : Name) : IRPhases := Id.run do
.comptime
else
env.header.modules[idx.toNat]?.map (·.irPhases) |>.get!
-- Allow `meta`->non-`meta` acesses in the same module
-- Allow `meta`->non-`meta` accesses in the same module
| none => if isMeta env declName then .comptime else .all
end Lean

View File

@@ -177,7 +177,7 @@ def deriveInduction (name : Name) (isMutual : Bool) : MetaM Unit := do
let motives forallTelescope eTyp fun args _ => do
let motives unfoldPredRelMutual eqnInfo (inferType args[1]!) (reduceConclusion := true)
motives.mapM (fun x => mkForallFVars #[args[0]!] x)
-- For each predicate in the mutual group we generate an approprate candidate predicate
-- For each predicate in the mutual group we generate an appropriate candidate predicate
let predicates := (numberNames infos.size "pred").zip <| PProdN.unpack α infos.size
-- Then we make the induction principle more readable, by currying the hypotheses and projecting the conclusion
withLocalDeclsDND predicates fun predVars => do

View File

@@ -40,7 +40,7 @@ Preprocesses the expressions to improve the effectiveness of `elimRecursion`.
| i+1 => (f x) i
```
* Unfold auxillary definitions abstracting over the function call
* Unfold auxiliary definitions abstracting over the function call
(typically abstracted) proofs.
-/

View File

@@ -810,9 +810,9 @@ def guessLex (preDefs : Array PreDefinition) (unaryPreDef : PreDefinition)
-- For every function, the measures we want to use
-- (One for each non-forbiddend arg)
let basicMeassures₁ simpleMeasures preDefs fixedParamPerms userVarNamess
let basicMeassures₂ complexMeasures preDefs fixedParamPerms userVarNamess recCalls
let basicMeasures := Array.zipWith (· ++ ·) basicMeassures₁ basicMeassures₂
let basicMeasures₁ simpleMeasures preDefs fixedParamPerms userVarNamess
let basicMeasures₂ complexMeasures preDefs fixedParamPerms userVarNamess recCalls
let basicMeasures := Array.zipWith (· ++ ·) basicMeasures₁ basicMeasures₂
-- The list of measures, including the measures that order functions.
-- The function ordering measures come last

View File

@@ -182,7 +182,7 @@ builtin_simproc_decl matcherPushArg (_) := fun e => do
mkLambdaFVars xs (.lam motiveBodyArg.bindingName! motiveBodyArg.bindingDomain! motiveBodyArg.bindingBody!.bindingDomain! .default)
let argPusher mkMatchArgPusher matcherApp.matcherName matcherApp.toMatcherInfo
-- Let's infer the level paramters:
-- Let's infer the level parameters:
let proof withTransparency .all <| mkAppOptM
argPusher ((matcherApp.params ++ #[motive', alpha, beta, fExpr, rel] ++ matcherApp.discrs ++ matcherApp.alts).map some)
let some (_, _, rhs) := ( inferType proof).eq? | throwError "matcherPushArg: expected equality:{indentExpr (← inferType proof)}"

View File

@@ -28,7 +28,7 @@ them to substitute occurrences of `x` within other hypotheses. Additionally this
redundant top level hypotheses.
-/
def embeddedConstraintPass : Pass where
name := `embeddedConstraintSubsitution
name := `embeddedConstraintSubstitution
run' goal := do
goal.withContext do
let hyps getPropHyps

View File

@@ -179,7 +179,7 @@ private def elabDeclToUnfoldOrTheorem (config : Meta.ConfigWithKey) (id : Origin
else
if inv then
throwError m!"Invalid `←` modifier: `{declName}` is a declaration name to be unfolded"
++ .hint' m!"The simplifier cannot \"refold\" definitions by name. Use `rw` for this intead,
++ .hint' m!"The simplifier cannot \"refold\" definitions by name. Use `rw` for this instead,
or use the `←` simp modifier with an equational lemma for `{declName}`."
if kind == .dsimp then
return .addEntries #[.toUnfold declName]

View File

@@ -2182,7 +2182,7 @@ def finalizeImport (s : ImportState) (imports : Array Import) (opts : Options) (
serverBaseExts := ( setImportedEntries privateBase.extensions serverData)
}
if leakEnv then
/- Mark persistent a first time before `finalizePersistenExtensions`, which
/- Mark persistent a first time before `finalizePersistentExtensions`, which
avoids costly MT markings when e.g. an interpreter closure (which
contains the environment) is put in an `IO.Ref`. This can happen in e.g.
initializers of user environment extensions and is wasteful because the

View File

@@ -60,7 +60,7 @@ In this module, we produce
```lean
def aux := fun (y : Nat) => h (g y)
```
Note that in this particular case, it is safe to lambda abstract the let-varible `y`.
Note that in this particular case, it is safe to lambda abstract the let-variable `y`.
This module uses the following approach to decide whether it is safe or not to lambda
abstract a let-variable.
1) We enable zetaDelta-expansion tracking in `MetaM`. That is, whenever we perform type checking

View File

@@ -45,7 +45,7 @@ Optimizations, present and future:
however checking for `let`s is O(n), so we only try this for expressions with a small `approxDepth`.
(We can consider precomputing this somehow.)
- The cache is currently responsible for the check.
- We also do it before entering telescopes, to avoid unnecesasry fvar overhead.
- We also do it before entering telescopes, to avoid unnecessary fvar overhead.
- If we are not currently inside a `let`, then we do not need to do full typechecking.
- We try to reuse Exprs to promote subexpression sharing.
- We might consider not transforming lets to haves if we are in a proof that is not inside a `let`.

View File

@@ -12,7 +12,7 @@ import Lean.Meta.InferType
open Lean Meta
/-!
This module provies builder for efficient `Nat → …` functions based on binary decision trees.
This module provides builder for efficient `Nat → …` functions based on binary decision trees.
-/
/--

View File

@@ -303,7 +303,7 @@ partial def foldAndCollect (oldIH newIH : FVarId) (isRecCall : Expr → Option E
-- abstraction.
-- To collect the IHs, we collect them in each branch, and combine
-- them to a type-leve match
-- them to a type-level match
let ihMatcherApp' liftM <| matcherApp.transform
(onParams := fun e => M.eval <| foldAndCollect oldIH newIH isRecCall e)
(onMotive := fun xs _body => do

View File

@@ -93,7 +93,7 @@ You can use `TSyntax.getId` to extract the name from the resulting syntax object
withAntiquot (mkAntiquot "ident" identKind) rawIdentNoAntiquot
/--
The parser `hygieneInfo` parses no text, but creates a `hygineInfoKind` node
The parser `hygieneInfo` parses no text, but creates a `hygieneInfoKind` node
containing an anonymous identifier as if it were parsed at the current position.
This identifier is modified by syntax quotations to add macro scopes like a regular identifier.

View File

@@ -238,7 +238,7 @@ def handleUnknownIdentifierCodeAction
| none => { line := 0, character := 0 }
let importInsertionRange : Lsp.Range := importInsertionPos, importInsertionPos
let mut unknownIdentifierCodeActions := #[]
let mut hasUnambigiousImportCodeAction := false
let mut hasUnambiguousImportCodeAction := false
let some result := response.queryResults[0]?
| return #[]
for query in queries, result in response.queryResults do
@@ -264,7 +264,7 @@ def handleUnknownIdentifierCodeAction
}
}
if isExactMatch then
hasUnambigiousImportCodeAction := true
hasUnambiguousImportCodeAction := true
else
unknownIdentifierCodeActions := unknownIdentifierCodeActions.push {
title := s!"Change to {insertion.fullName}"
@@ -274,7 +274,7 @@ def handleUnknownIdentifierCodeAction
edits := #[insertion.edit]
}
}
if hasUnambigiousImportCodeAction then
if hasUnambiguousImportCodeAction then
unknownIdentifierCodeActions := unknownIdentifierCodeActions.push <|
importAllUnknownIdentifiersCodeAction params "quickfix"
return unknownIdentifierCodeActions

View File

@@ -88,7 +88,7 @@ where
/--
Gets type names for resolving `id` in `.id x₁ ... xₙ` notation.
The process mimics the dotted indentifier notation elaboration procedure at `Lean.Elab.App`.
The process mimics the dotted identifier notation elaboration procedure at `Lean.Elab.App`.
Catches and ignores all errors, so no need to run this within `try`/`catch`.
-/
partial def getDotIdCompletionTypeNames (type : Expr) : MetaM (Array Name) :=

View File

@@ -73,7 +73,7 @@ macro_rules
| `(spred( $xs:explicitBinders, $P)) => do expandExplicitBinders ``SPred.exists xs ( `(spred($P)))
| `( $P) => ``(SPred.entails True spred($P))
| `($P $Q) => ``(SPred.bientails spred($P) spred($Q))
-- Sadly, ∀ does not resently use expandExplicitBinders...
-- Sadly, ∀ does not presently use expandExplicitBinders...
| `(spred( _%$tk, $P)) => ``(SPred.forall (fun _%$tk => spred($P)))
| `(spred( _%$tk : $ty, $P)) => ``(SPred.forall (fun _%$tk : $ty => spred($P)))
| `(spred( (_%$tk $xs* : $ty), $P)) => ``(SPred.forall (fun _%$tk : $ty => spred( ($xs* : $ty), $P)))

View File

@@ -32,7 +32,7 @@ structure Config where
-/
elimLets : Bool := true
/--
If `false` (the default), then we aggresively split `if` and `match` statements and inline join
If `false` (the default), then we aggressively split `if` and `match` statements and inline join
points unconditionally. For some programs this causes exponential blowup of VCs.
Set this flag to choose a more conservative (but slightly lossy) encoding that traverses
every join point only once and yields a formula the size of which is linear in the number of

View File

@@ -61,7 +61,7 @@ get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH)
# function returns an empty string via _git_dir_var.
#
# Example: Given a path C:/bla/foo/bar and assuming C:/bla/.git exists and
# neither foo nor bar contain a file/directory .git. This wil return
# neither foo nor bar contain a file/directory .git. This will return
# C:/bla/.git
#
function(_git_find_closest_git_dir _start_dir _git_dir_var)

View File

@@ -178,15 +178,15 @@ protected def Pattern.toLean? [ToLean? (PatternDescr α β)] (p : Pattern α β)
instance [ToLean? (PatternDescr α β)] : ToLean? (Pattern α β) := Pattern.toLean?
protected partial def PattternDescr.toLean? [ToLean? β] (p : PatternDescr α β) : Option Term :=
have : ToLean? (PatternDescr α β) := PattternDescr.toLean?
protected partial def PatternDescr.toLean? [ToLean? β] (p : PatternDescr α β) : Option Term :=
have : ToLean? (PatternDescr α β) := PatternDescr.toLean?
match p with
| .not p => quoteSingleton? `not p
| .any p => quoteSingleton? `any p
| .all p => quoteSingleton? `all p
| .coe p => toLean? p
instance [ToLean? β] : ToLean? (PatternDescr α β) := PattternDescr.toLean?
instance [ToLean? β] : ToLean? (PatternDescr α β) := PatternDescr.toLean?
protected def StrPatDescr.toLean (pat : StrPatDescr) : Term :=
match pat with

View File

@@ -69,7 +69,7 @@ instance : ToToml (Array LeanOption) where
mutual
partial def Pattern.toToml? [ToToml? β] (p : Pattern α β) : Option Value :=
have : ToToml? (PatternDescr α β) := PattternDescr.toToml?
have : ToToml? (PatternDescr α β) := PatternDescr.toToml?
match p.name with
| .anonymous =>
p.descr?.bind toToml?
@@ -80,7 +80,7 @@ partial def Pattern.toToml? [ToToml? β] (p : Pattern α β) : Option Value :=
| n =>
toToml <| Table.empty.insert `preset n
partial def PattternDescr.toToml?
partial def PatternDescr.toToml?
[ToToml? β] (p : PatternDescr α β) : Option Value
:=
have : ToToml? (Pattern α β) := Pattern.toToml?
@@ -93,7 +93,7 @@ partial def PattternDescr.toToml?
end
instance [ToToml? β] : ToToml? (Pattern α β) := Pattern.toToml?
instance [ToToml? β] : ToToml? (PatternDescr α β) := PattternDescr.toToml?
instance [ToToml? β] : ToToml? (PatternDescr α β) := PatternDescr.toToml?
protected def StrPatDescr.toToml (p : StrPatDescr) : Value :=
match p with

View File

@@ -19,7 +19,7 @@ test-tests: $(addsuffix .test, $(TESTS))
test-examples: $(addsuffix .test, $(EXAMPLES))
test-bootstrapped: test-boostrapped-hello
test-bootstrapped: test-bootstrapped-hello
clean: clean-tests clean-examples clean-bootstrap
@@ -32,7 +32,7 @@ clean-examples: $(addsuffix .clean, $(EXAMPLES))
.PHONY:
all test test-ci test-tests test-examples test-bootstrap\
clean clean-all clean-tests clean-examples clean-build clean-bootstrap\
time-bootstrap check-bootstrap test-bootstrapped test-boostrapped-hello\
time-bootstrap check-bootstrap test-bootstrapped test-bootstrapped-hello\
$(addsuffix .clean, $(TESTS) $(EXAMPLES)) $(addsuffix .test, $(TESTS) $(EXAMPLES))
#-------------------------------------------------------------------------------
@@ -82,5 +82,5 @@ time-bootstrap:
check-bootstrap:
cd examples/bootstrap && ./check.sh
test-boostrapped-hello:
test-bootstrapped-hello:
cd examples/hello && ./bootstrapped-test.sh

View File

@@ -15,7 +15,7 @@ $LAKE script run greet --me | tee -a produced.out
$LAKE script doc greet | tee -a produced.out
$LAKE script run hello | tee -a produced.out
$LAKE script run dep/hello | tee -a produced.out
# Test that non-indentifier names work
# Test that non-identifier names work
# https://leanprover.zulipchat.com/#narrow/stream/270676-lean4/topic/Running.20.60lake.60.20scripts.20from.20the.20command.20line/near/446944450
$LAKE script run say-goodbye | tee -a produced.out
($LAKE script run nonexistent 2>&1 | tee -a produced.out) && exit 1 || true

View File

@@ -326,7 +326,7 @@
"object"
],
"default": "*",
"description": "Includes only the files from the directory whose paths satisify this pattern.\n\nFormat: Either `\"*\"`, or a recursive object with one of the following properties at every layer:\n- `path`: String pattern matching the path.\n- `extension`: String pattern matching the extension of the file in the path.\n- `fileName`: String pattern matching the name of the file in the path.\n- `not`: Negate a path pattern.\n- `any`: Match any of an array of path patterns.\n- `all`: Match all of an array of path patterns.\n\nString patterns have the following format: Either `\"*\"`, an array of all strings that are a match, or a recursive object with one of the following properties at every layer:\n- `startsWith`: Denotes a prefix string of all strings that are a match.\n- `endsWith`: Denotes a suffix string of all strings that are a match.\n- `not`: Negate a string pattern.\n- `any`: Match any of an array of string patterns.\n- `all`: Match all of an array of string patterns."
"description": "Includes only the files from the directory whose paths satisfy this pattern.\n\nFormat: Either `\"*\"`, or a recursive object with one of the following properties at every layer:\n- `path`: String pattern matching the path.\n- `extension`: String pattern matching the extension of the file in the path.\n- `fileName`: String pattern matching the name of the file in the path.\n- `not`: Negate a path pattern.\n- `any`: Match any of an array of path patterns.\n- `all`: Match all of an array of path patterns.\n\nString patterns have the following format: Either `\"*\"`, an array of all strings that are a match, or a recursive object with one of the following properties at every layer:\n- `startsWith`: Denotes a prefix string of all strings that are a match.\n- `endsWith`: Denotes a suffix string of all strings that are a match.\n- `not`: Negate a string pattern.\n- `any`: Match any of an array of string patterns.\n- `all`: Match all of an array of string patterns."
}
},
"required": [

View File

@@ -1,4 +1,4 @@
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 1, "character": 5}}
{"range":
{"start": {"line": 1, "character": 5}, "end": {"line": 1, "character": 6}},
@@ -6,7 +6,7 @@
{"value":
"```lean\nNat\n```\n***\nA *hole* (or *placeholder term*), which stands for an unknown term that is expected to be inferred based on context.\nFor example, in `@id _ Nat.zero`, the `_` must be the type of `Nat.zero`, which is `Nat`.\n\nThe way this works is that holes create fresh metavariables.\nThe elaborator is allowed to assign terms to metavariables while it is checking definitional equalities.\nThis is often known as *unification*.\n\nNormally, all holes must be solved for. However, there are a few contexts where this is not necessary:\n* In `match` patterns, holes are catch-all patterns.\n* In some tactics, such as `refine'` and `apply`, unsolved-for placeholders become new goals.\n\nRelated concept: implicit parameters are automatically filled in with holes during the elaboration process.\n\nSee also `?m` syntax (synthetic holes).\n",
"kind": "markdown"}}
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 1, "character": 7}}
{"range":
{"start": {"line": 1, "character": 7}, "end": {"line": 1, "character": 8}},
@@ -14,7 +14,7 @@
{"value":
"```lean\nBool\n```\n***\nA *hole* (or *placeholder term*), which stands for an unknown term that is expected to be inferred based on context.\nFor example, in `@id _ Nat.zero`, the `_` must be the type of `Nat.zero`, which is `Nat`.\n\nThe way this works is that holes create fresh metavariables.\nThe elaborator is allowed to assign terms to metavariables while it is checking definitional equalities.\nThis is often known as *unification*.\n\nNormally, all holes must be solved for. However, there are a few contexts where this is not necessary:\n* In `match` patterns, holes are catch-all patterns.\n* In some tactics, such as `refine'` and `apply`, unsolved-for placeholders become new goals.\n\nRelated concept: implicit parameters are automatically filled in with holes during the elaboration process.\n\nSee also `?m` syntax (synthetic holes).\n",
"kind": "markdown"}}
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 6, "character": 6}}
{"range":
{"start": {"line": 6, "character": 6}, "end": {"line": 6, "character": 7}},
@@ -22,7 +22,7 @@
{"value":
"```lean\nNat\n```\n***\nA *hole* (or *placeholder term*), which stands for an unknown term that is expected to be inferred based on context.\nFor example, in `@id _ Nat.zero`, the `_` must be the type of `Nat.zero`, which is `Nat`.\n\nThe way this works is that holes create fresh metavariables.\nThe elaborator is allowed to assign terms to metavariables while it is checking definitional equalities.\nThis is often known as *unification*.\n\nNormally, all holes must be solved for. However, there are a few contexts where this is not necessary:\n* In `match` patterns, holes are catch-all patterns.\n* In some tactics, such as `refine'` and `apply`, unsolved-for placeholders become new goals.\n\nRelated concept: implicit parameters are automatically filled in with holes during the elaboration process.\n\nSee also `?m` syntax (synthetic holes).\n",
"kind": "markdown"}}
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 6, "character": 8}}
{"range":
{"start": {"line": 6, "character": 8}, "end": {"line": 6, "character": 9}},
@@ -30,7 +30,7 @@
{"value":
"```lean\nBool\n```\n***\nA *hole* (or *placeholder term*), which stands for an unknown term that is expected to be inferred based on context.\nFor example, in `@id _ Nat.zero`, the `_` must be the type of `Nat.zero`, which is `Nat`.\n\nThe way this works is that holes create fresh metavariables.\nThe elaborator is allowed to assign terms to metavariables while it is checking definitional equalities.\nThis is often known as *unification*.\n\nNormally, all holes must be solved for. However, there are a few contexts where this is not necessary:\n* In `match` patterns, holes are catch-all patterns.\n* In some tactics, such as `refine'` and `apply`, unsolved-for placeholders become new goals.\n\nRelated concept: implicit parameters are automatically filled in with holes during the elaboration process.\n\nSee also `?m` syntax (synthetic holes).\n",
"kind": "markdown"}}
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 11, "character": 6}}
{"range":
{"start": {"line": 11, "character": 6}, "end": {"line": 11, "character": 7}},
@@ -38,7 +38,7 @@
{"value":
"```lean\nNat\n```\n***\nA *hole* (or *placeholder term*), which stands for an unknown term that is expected to be inferred based on context.\nFor example, in `@id _ Nat.zero`, the `_` must be the type of `Nat.zero`, which is `Nat`.\n\nThe way this works is that holes create fresh metavariables.\nThe elaborator is allowed to assign terms to metavariables while it is checking definitional equalities.\nThis is often known as *unification*.\n\nNormally, all holes must be solved for. However, there are a few contexts where this is not necessary:\n* In `match` patterns, holes are catch-all patterns.\n* In some tactics, such as `refine'` and `apply`, unsolved-for placeholders become new goals.\n\nRelated concept: implicit parameters are automatically filled in with holes during the elaboration process.\n\nSee also `?m` syntax (synthetic holes).\n",
"kind": "markdown"}}
{"textDocument": {"uri": "file:///hoverBinderUndescore.lean"},
{"textDocument": {"uri": "file:///hoverBinderUnderscore.lean"},
"position": {"line": 11, "character": 8}}
{"range":
{"start": {"line": 11, "character": 8}, "end": {"line": 11, "character": 9}},

View File

@@ -1,4 +1,4 @@
motiveNotTypeCorect.lean:7:6-7:7: error: Tactic `rewrite` failed: motive is not type correct:
motiveNotTypeCorrect.lean:7:6-7:7: error: Tactic `rewrite` failed: motive is not type correct:
fun _a => P _a d
Error: Application type mismatch: The argument
d
@@ -19,7 +19,7 @@ h : f t = t
d : D (f t)
P : (t : Nat) → D t → Prop
⊢ P (f t) d
motiveNotTypeCorect.lean:18:8-18:9: error: Tactic `rewrite` failed: Motive is dependent:
motiveNotTypeCorrect.lean:18:8-18:9: error: Tactic `rewrite` failed: Motive is dependent:
fun _a => A _a
Note: The rewrite tactic cannot substitute terms on which the type of the target expression depends. The type of the expression

View File

@@ -34,7 +34,7 @@ example : f 0 > 0 := by simp!
-- NB: simp! disables all warnings, not just for declarations to unfold
-- Mild bug, but not a regresion.
-- Mild bug, but not a regression.
/--
error: unsolved goals

View File

@@ -90,7 +90,7 @@ local macro "repeat_mul" n:num "with" x:term : term =>
This test showcases that the runtime of `bv_ac_nf` is not a bottleneck:
* Testing with 100 as the repetition amount runs in about 200ms with `skipKernelTC` set,
or ~3.3 seconds without (c.q. 2.3s for `ac_rfl`), and
* Putting in 125 for the repetition amount wil give a `maximum recursion depth has been reached`
* Putting in 125 for the repetition amount will give a `maximum recursion depth has been reached`
error thrown by simp anyway, so the runtime is not a limiting factor to begin with.
-/
set_option debug.skipKernelTC true in

View File

@@ -61,7 +61,7 @@ info: zip.induct.{u_1, u_2} {α : Type u_1} {β : Type u_2} (motive : List α
#guard_msgs in
#check zip.induct
/-- Lets try ot use it! -/
/-- Let's try to use it! -/
theorem zip_length {α β} (xs : List α) (ys : List β) :
(zip xs ys).length = xs.length.min ys.length := by
induction xs, ys using zip.induct

View File

@@ -1,6 +1,6 @@
module
-- In the following test, the first 8 case-splits are irrelevant,
-- and non-choronological backtracking is used to avoid searching
-- and non-chronological backtracking is used to avoid searching
-- (2^8 - 1) irrelevant branches
/--
trace: [grind.split] p8 q8, generation: 0

View File

@@ -43,8 +43,8 @@ end Test3
namespace Test4
-- This checks that when unfolding abstraced proofs, we do not unfold function calls
-- that were actuallly there, like the one to `Function.cons` below
-- This checks that when unfolding abstracted proofs, we do not unfold function calls
-- that were actually there, like the one to `Function.cons` below
/--
error: failed to infer structural recursion: