Compare commits

...

10 Commits

Author SHA1 Message Date
Mac Malone
6caaee842e chore: lake: backport dynlib fixes (#8187)
This PR backports fixes to dynamic library linking and loading in Lake
to `v4.19.0`.

Includes changes from #7809, #8026, #8152, and #8190.
2025-04-30 23:13:10 -04:00
Sebastian Ullrich
502c342004 fix: linter should have access to all messages, really (#8117)
Continuation of #8101

(cherry picked from commit 82723489c9)
2025-04-27 16:55:56 +02:00
Sebastian Ullrich
2300842a64 fix: linter should have access to complete command message log (#8101)
This PR fixes a parallelism regression where linters that e.g. check for
errors in the command would no longer find such messages.

---------

Co-authored-by: damiano <adomani@gmail.com>
(cherry picked from commit 4323507b91)
2025-04-27 16:10:15 +02:00
Sebastian Ullrich
3825defc45 chore: fix cmake install exclude patterns (#7941)
(cherry picked from commit 973f521c46)
2025-04-14 11:26:37 +02:00
Sebastian Ullrich
045d07d234 fix: cancellation of synchronous part of previous elaboration (#7882)
This PR fixes a regression where elaboration of a previous document
version is not cancelled on changes to the document.

Done by removing the default from `SnapshotTask.cancelTk?` and
consistently passing the current thread's token for synchronous
elaboration steps.

(cherry picked from commit 1421b6145e)
2025-04-10 14:08:31 +02:00
Sebastian Ullrich
aa6e135acf fix: avoid blocking wait in sync task (#7852)
This PR fixes an issue where editing a Lean file may lead to a server
deadlock from threadpool starvation, especially on machines with a low
number of cores.

(cherry picked from commit acd6b13d76)
2025-04-08 14:07:18 +02:00
Kim Morrison
fafd381c90 Merge tag 'nightly-2025-04-02' into releases/v4.19.0 2025-04-03 11:01:05 +11:00
Sebastian Ullrich
3cb82da23e feat: importModules without loading environment extensions (#6325)
This PR ensures that environments can be loaded, repeatedly, without
executing arbitrary code

(cherry picked from commit 5df4e48dc9)
2025-04-02 22:34:06 +11:00
Scott Morrison
359abc8726 chore: normalize URLs to the language reference in test results
update tests

syntax

sigh

fix regex

allow rcs
2025-04-02 16:21:11 +11:00
Scott Morrison
d59b911462 set LEAN_VERSION_IS_RELEASE 2025-04-02 10:18:06 +11:00
52 changed files with 750 additions and 341 deletions

View File

@@ -0,0 +1,6 @@
**Breaking Changes**
* The functions `Lean.Environment.importModules` and `Lean.Environment.finalizeImport` have been extended with a new parameter `loadExts : Bool := false` that enables environment extension state loading.
Their previous behavior corresponds to setting the flag to `true` but is only safe to do in combination with `enableInitializersExecution`; see also the `importModules` docstring.
The new default value `false` ensures the functions can be used correctly multiple times within the same process when environment extension access is not needed.
The wrapper function `Lean.Environment.withImportModules` now always calls `importModules` with `loadExts := false` as it is incompatible with extension loading.

View File

@@ -12,7 +12,7 @@ project(LEAN CXX C)
set(LEAN_VERSION_MAJOR 4)
set(LEAN_VERSION_MINOR 19)
set(LEAN_VERSION_PATCH 0)
set(LEAN_VERSION_IS_RELEASE 0) # This number is 1 in the release revision, and 0 otherwise.
set(LEAN_VERSION_IS_RELEASE 1) # This number is 1 in the release revision, and 0 otherwise.
set(LEAN_SPECIAL_VERSION_DESC "" CACHE STRING "Additional version description like 'nightly-2018-03-11'")
set(LEAN_VERSION_STRING "${LEAN_VERSION_MAJOR}.${LEAN_VERSION_MINOR}.${LEAN_VERSION_PATCH}")
if (LEAN_SPECIAL_VERSION_DESC)
@@ -780,12 +780,11 @@ add_custom_target(clean-olean
DEPENDS clean-stdlib)
install(DIRECTORY "${CMAKE_BINARY_DIR}/lib/" DESTINATION lib
PATTERN temp
PATTERN "*.export"
PATTERN "*.hash"
PATTERN "*.trace"
PATTERN "*.rsp"
EXCLUDE)
PATTERN temp EXCLUDE
PATTERN "*.export" EXCLUDE
PATTERN "*.hash" EXCLUDE
PATTERN "*.trace" EXCLUDE
PATTERN "*.rsp" EXCLUDE)
# symlink source into expected installation location for go-to-definition, if file system allows it
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/src)

View File

@@ -363,15 +363,27 @@ def runLintersAsync (stx : Syntax) : CommandElabM Unit := do
runLinters stx
return
-- linters should have access to the complete info tree and message log
let mut snaps := ( get).snapshotTasks
if let some elabSnap := ( read).snap? then
snaps := snaps.push { stx? := none, cancelTk? := none, task := elabSnap.new.result!.map (sync := true) toSnapshotTree }
let tree := SnapshotTree.mk { diagnostics := .empty } snaps
let treeTask tree.waitAll
-- We only start one task for all linters for now as most linters are fast and we simply want
-- to unblock elaboration of the next command
let cancelTk IO.CancelToken.new
let lintAct wrapAsyncAsSnapshot (cancelTk? := cancelTk) fun infoSt => do
let messages := tree.getAll.map (·.diagnostics.msgLog) |>.foldl (· ++ ·) .empty
-- do not double-report
let messages := messages.markAllReported
modify fun st => { st with messages := st.messages ++ messages }
modifyInfoState fun _ => infoSt
runLinters stx
-- linters should have access to the complete info tree
let task BaseIO.mapTask (t := ( getInfoState).substituteLazy) lintAct
let task BaseIO.bindTask (sync := true) (t := ( getInfoState).substituteLazy) fun infoSt =>
BaseIO.mapTask (t := treeTask) fun _ =>
lintAct infoSt
logSnapshotTask { stx? := none, task, cancelTk? := cancelTk }
protected def getCurrMacroScope : CommandElabM Nat := do pure ( read).currMacroScope
@@ -510,6 +522,7 @@ where go := do
let oldCmds? := oldSnap?.map fun old =>
if old.newStx.isOfKind nullKind then old.newStx.getArgs else #[old.newStx]
let cmdPromises cmds.mapM fun _ => IO.Promise.new
let cancelTk? := ( read).cancelTk?
snap.new.resolve <| .ofTyped {
diagnostics := .empty
macroDecl := decl
@@ -517,7 +530,7 @@ where go := do
newNextMacroScope := nextMacroScope
hasTraces
next := Array.zipWith (fun cmdPromise cmd =>
{ stx? := some cmd, task := cmdPromise.resultD default }) cmdPromises cmds
{ stx? := some cmd, task := cmdPromise.resultD default, cancelTk? }) cmdPromises cmds
: MacroExpandedSnapshot
}
-- After the first command whose syntax tree changed, we must disable
@@ -577,10 +590,17 @@ def elabCommandTopLevel (stx : Syntax) : CommandElabM Unit := withRef stx do pro
let initMsgs modifyGet fun st => (st.messages, { st with messages := {} })
let initInfoTrees getResetInfoTrees
try
-- We should *not* factor out `elabCommand`'s `withLogging` to here since it would make its error
-- recovery more coarse. In particular, If `c` in `set_option ... in $c` fails, the remaining
-- `end` command of the `in` macro would be skipped and the option would be leaked to the outside!
elabCommand stx
try
-- We should *not* factor out `elabCommand`'s `withLogging` to here since it would make its error
-- recovery more coarse. In particular, if `c` in `set_option ... in $c` fails, the remaining
-- `end` command of the `in` macro would be skipped and the option would be leaked to the outside!
elabCommand stx
finally
-- Make sure `snap?` is definitely resolved; we do not use it for reporting as `#guard_msgs` may
-- be the caller of this function and add new messages and info trees
if let some snap := ( read).snap? then
snap.new.resolve default
-- Run the linters, unless `#guard_msgs` is present, which is special and runs `elabCommandTopLevel` itself,
-- so it is a "super-top-level" command. This is the only command that does this, so we just special case it here
-- rather than engineer a general solution.
@@ -588,11 +608,6 @@ def elabCommandTopLevel (stx : Syntax) : CommandElabM Unit := withRef stx do pro
withLogging do
runLintersAsync stx
finally
-- Make sure `snap?` is definitely resolved; we do not use it for reporting as `#guard_msgs` may
-- be the caller of this function and add new messages and info trees
if let some snap := ( read).snap? then
snap.new.resolve default
let msgs := ( get).messages
modify fun st => { st with
messages := initMsgs ++ msgs

View File

@@ -22,7 +22,8 @@ def processHeader (header : Syntax) (opts : Options) (messages : MessageLog)
(plugins : Array System.FilePath := #[]) (leakEnv := false)
: IO (Environment × MessageLog) := do
try
let env importModules (leakEnv := leakEnv) (headerToImports header) opts trustLevel plugins
let env
importModules (leakEnv := leakEnv) (loadExts := true) (headerToImports header) opts trustLevel plugins
pure (env, messages)
catch e =>
let env mkEmptyEnvironment

View File

@@ -165,6 +165,8 @@ private def elabHeaders (views : Array DefView) (expandedDeclIds : Array ExpandD
-- no syntax guard to store, we already did the necessary checks
oldBodySnap? := guard reuseBody *> pure .missing, old.bodySnap
if oldBodySnap?.isNone then
-- NOTE: this will eagerly cancel async tasks not associated with an inner snapshot, most
-- importantly kernel checking and compilation of the top-level declaration
old.bodySnap.cancelRec
oldTacSnap? := do
guard reuseTac
@@ -217,6 +219,7 @@ private def elabHeaders (views : Array DefView) (expandedDeclIds : Array ExpandD
return newHeader
if let some snap := view.headerSnap? then
let (tacStx?, newTacTask?) mkTacTask view.value tacPromise
let cancelTk? := ( readThe Core.Context).cancelTk?
let bodySnap := {
stx? := view.value
reportingRange? :=
@@ -227,6 +230,8 @@ private def elabHeaders (views : Array DefView) (expandedDeclIds : Array ExpandD
else
getBodyTerm? view.value |>.getD view.value |>.getRange?
task := bodyPromise.resultD default
-- We should not cancel the entire body early if we have tactics
cancelTk? := guard newTacTask?.isNone *> cancelTk?
}
snap.new.resolve <| some {
diagnostics :=
@@ -269,7 +274,8 @@ where
:= do
if let some e := getBodyTerm? body then
if let `(by $tacs*) := e then
return (e, some { stx? := mkNullNode tacs, task := tacPromise.resultD default })
let cancelTk? := ( readThe Core.Context).cancelTk?
return (e, some { stx? := mkNullNode tacs, task := tacPromise.resultD default, cancelTk? })
tacPromise.resolve default
return (none, none)
@@ -432,8 +438,7 @@ private def elabFunValues (headers : Array DefViewElabHeader) (vars : Array Expr
snap.new.resolve <| some old
reusableResult? := some (old.value, old.state)
else
-- NOTE: this will eagerly cancel async tasks not associated with an inner snapshot, most
-- importantly kernel checking and compilation of the top-level declaration
-- make sure to cancel any async tasks that may still be running (e.g. kernel and codegen)
old.val.cancelRec
let (val, state) withRestoreOrSaveFull reusableResult? header.tacSnap? do
@@ -1197,6 +1202,7 @@ private def logGoalsAccomplishedSnapshotTask (views : Array DefView)
-- Use first line of the mutual block to avoid covering the progress of the whole mutual block
reportingRange? := ( getRef).getPos?.map fun pos => pos, pos
task := logGoalsAccomplishedTask
cancelTk? := none
}
end Term
@@ -1235,9 +1241,10 @@ def elabMutualDef (ds : Array Syntax) : CommandElabM Unit := do
} }
if snap.old?.isSome && (view.headerSnap?.bind (·.old?)).isNone then
snap.old?.forM (·.val.cancelRec)
let cancelTk? := ( read).cancelTk?
defs := defs.push {
fullHeaderRef
headerProcessedSnap := { stx? := d, task := headerPromise.resultD default }
headerProcessedSnap := { stx? := d, task := headerPromise.resultD default, cancelTk? }
}
reusedAllHeaders := reusedAllHeaders && view.headerSnap?.any (·.old?.isSome)
views := views.push view

View File

@@ -165,6 +165,7 @@ structure EvalTacticFailure where
state : SavedState
partial def evalTactic (stx : Syntax) : TacticM Unit := do
checkSystem "tactic execution"
profileitM Exception "tactic execution" (decl := stx.getKind) ( getOptions) <|
withRef stx <| withIncRecDepth <| withFreshMacroScope <| match stx with
| .node _ k _ =>
@@ -240,6 +241,7 @@ where
snap.old?.forM (·.val.cancelRec)
let promise IO.Promise.new
-- Store new unfolding in the snapshot tree
let cancelTk? := ( readThe Core.Context).cancelTk?
snap.new.resolve {
stx := stx'
diagnostics := .empty
@@ -249,7 +251,7 @@ where
state? := ( Tactic.saveState)
moreSnaps := #[]
}
next := #[{ stx? := stx', task := promise.resultD default }]
next := #[{ stx? := stx', task := promise.resultD default, cancelTk? }]
}
-- Update `tacSnap?` to old unfolding
withTheReader Term.Context ({ · with tacSnap? := some {

View File

@@ -78,13 +78,14 @@ where
let next IO.Promise.new
let finished IO.Promise.new
let inner IO.Promise.new
let cancelTk? := ( readThe Core.Context).cancelTk?
snap.new.resolve {
desc := tac.getKind.toString
diagnostics := .empty
stx := tac
inner? := some { stx? := tac, task := inner.resultD default }
finished := { stx? := tac, task := finished.resultD default }
next := #[{ stx? := stxs, task := next.resultD default }]
inner? := some { stx? := tac, task := inner.resultD default, cancelTk? }
finished := { stx? := tac, task := finished.resultD default, cancelTk? }
next := #[{ stx? := stxs, task := next.resultD default, cancelTk? }]
}
-- Run `tac` in a fresh info tree state and store resulting state in snapshot for
-- incremental reporting, then add back saved trees. Here we rely on `evalTactic`

View File

@@ -286,14 +286,15 @@ where
-- them, eventually put each of them back in `Context.tacSnap?` in `applyAltStx`
let finished IO.Promise.new
let altPromises altStxs.mapM fun _ => IO.Promise.new
let cancelTk? := ( readThe Core.Context).cancelTk?
tacSnap.new.resolve {
-- save all relevant syntax here for comparison with next document version
stx := mkNullNode altStxs
diagnostics := .empty
inner? := none
finished := { stx? := mkNullNode altStxs, reportingRange? := none, task := finished.resultD default }
finished := { stx? := mkNullNode altStxs, reportingRange? := none, task := finished.resultD default, cancelTk? }
next := Array.zipWith
(fun stx prom => { stx? := some stx, task := prom.resultD default })
(fun stx prom => { stx? := some stx, task := prom.resultD default, cancelTk? })
altStxs altPromises
}
goWithIncremental <| altPromises.mapIdx fun i prom => {

View File

@@ -905,7 +905,8 @@ def addConstAsync (env : Environment) (constName : Name) (kind : ConstantKind) (
}
exts? := guard reportExts *> some (constPromise.result?.map (sync := true) fun
| some (_, exts, _) => exts
| none => env.toKernelEnv.extensions)
-- any value should work here, `base` does not block
| none => env.base.extensions)
consts := constPromise.result?.map (sync := true) fun
| some (_, _, consts) => .mk consts
| none => .mk (α := AsyncConsts) default
@@ -1801,16 +1802,12 @@ private def equivInfo (cinfo₁ cinfo₂ : ConstantInfo) : Bool := Id.run do
&& tval₁.all == tval₂.all
/--
Construct environment from `importModulesCore` results.
Constructs environment from `importModulesCore` results.
If `leakEnv` is true, we mark the environment as persistent, which means it
will not be freed. We set this when the object would survive until the end of
the process anyway. In exchange, RC updates are avoided, which is especially
important when they would be atomic because the environment is shared across
threads (potentially, storing it in an `IO.Ref` is sufficient for marking it
as such). -/
See also `importModules` for parameter documentation.
-/
def finalizeImport (s : ImportState) (imports : Array Import) (opts : Options) (trustLevel : UInt32 := 0)
(leakEnv := false) : IO Environment := do
(leakEnv loadExts : Bool) : IO Environment := do
let numConsts := s.moduleData.foldl (init := 0) fun numConsts mod =>
numConsts + mod.constants.size + mod.extraConstNames.size
let mut const2ModIdx : Std.HashMap Name ModuleIdx := Std.HashMap.emptyWithCapacity (capacity := numConsts)
@@ -1860,14 +1857,15 @@ def finalizeImport (s : ImportState) (imports : Array Import) (opts : Options) (
Safety: There are no concurrent accesses to `env` at this point. -/
env unsafe Runtime.markPersistent env
env finalizePersistentExtensions env s.moduleData opts
if leakEnv then
/- Ensure the final environment including environment extension states is
marked persistent as documented.
if loadExts then
env finalizePersistentExtensions env s.moduleData opts
if leakEnv then
/- Ensure the final environment including environment extension states is
marked persistent as documented.
Safety: There are no concurrent accesses to `env` at this point, assuming
extensions' `addImportFn`s did not spawn any unbound tasks. -/
env unsafe Runtime.markPersistent env
Safety: There are no concurrent accesses to `env` at this point, assuming
extensions' `addImportFn`s did not spawn any unbound tasks. -/
env unsafe Runtime.markPersistent env
return { env with realizedImportedConsts? := some {
-- safety: `RealizationContext` is private
env := unsafe unsafeCast env
@@ -1875,9 +1873,22 @@ def finalizeImport (s : ImportState) (imports : Array Import) (opts : Options) (
constsRef := ( IO.mkRef {})
} }
@[export lean_import_modules]
/--
Creates environment object from given imports.
If `leakEnv` is true, we mark the environment as persistent, which means it will not be freed. We
set this when the object would survive until the end of the process anyway. In exchange, RC updates
are avoided, which is especially important when they would be atomic because the environment is
shared across threads (potentially, storing it in an `IO.Ref` is sufficient for marking it as such).
If `loadExts` is true, we initialize the environment extensions using the imported data. Doing so
may use the interpreter and thus is only safe to do after calling `enableInitializersExecution`; see
also caveats there. If not set, every extension will have its initial value as its state. While the
environment's constant map can be accessed without `loadExts`, many functions that take
`Environment` or are in a monad carrying it such as `CoreM` may not function properly without it.
-/
def importModules (imports : Array Import) (opts : Options) (trustLevel : UInt32 := 0)
(plugins : Array System.FilePath := #[]) (leakEnv := false)
(plugins : Array System.FilePath := #[]) (leakEnv := false) (loadExts := false)
: IO Environment := profileitIO "import" opts do
for imp in imports do
if imp.module matches .anonymous then
@@ -1885,13 +1896,17 @@ def importModules (imports : Array Import) (opts : Options) (trustLevel : UInt32
withImporting do
plugins.forM Lean.loadPlugin
let (_, s) importModulesCore imports |>.run
finalizeImport (leakEnv := leakEnv) s imports opts trustLevel
finalizeImport (leakEnv := leakEnv) (loadExts := loadExts) s imports opts trustLevel
/--
Create environment object from imports and free compacted regions after calling `act`. No live references to the
environment object or imported objects may exist after `act` finishes. -/
unsafe def withImportModules {α : Type} (imports : Array Import) (opts : Options) (trustLevel : UInt32 := 0) (act : Environment IO α) : IO α := do
let env importModules imports opts trustLevel
Creates environment object from imports and frees compacted regions after calling `act`. No live
references to the environment object or imported objects may exist after `act` finishes. As this
cannot be ruled out after loading environment extensions, `importModules`'s `loadExts` is always
unset using this function.
-/
unsafe def withImportModules {α : Type} (imports : Array Import) (opts : Options)
(act : Environment IO α) (trustLevel : UInt32 := 0) : IO α := do
let env importModules (loadExts := false) imports opts trustLevel
try act env finally env.freeRegions
@[inherit_doc Kernel.Environment.enableDiag]

View File

@@ -93,18 +93,17 @@ structure SnapshotTask (α : Type) where
Cancellation token that can be set by the server to cancel the task when it detects the results
are not needed anymore.
-/
cancelTk? : Option IO.CancelToken := none
cancelTk? : Option IO.CancelToken
/-- Underlying task producing the snapshot. -/
task : Task α
deriving Nonempty, Inhabited
/-- Creates a snapshot task from the syntax processed by the task and a `BaseIO` action. -/
def SnapshotTask.ofIO (stx? : Option Syntax)
def SnapshotTask.ofIO (stx? : Option Syntax) (cancelTk? : Option IO.CancelToken)
(reportingRange? : Option String.Range := defaultReportingRange? stx?) (act : BaseIO α) :
BaseIO (SnapshotTask α) := do
return {
stx?
reportingRange?
stx?, reportingRange?, cancelTk?
task := ( BaseIO.asTask act)
}
@@ -114,6 +113,7 @@ def SnapshotTask.finished (stx? : Option Syntax) (a : α) : SnapshotTask α wher
-- irrelevant when already finished
reportingRange? := none
task := .pure a
cancelTk? := none
/-- Transforms a task's output without changing the processed syntax. -/
def SnapshotTask.map (t : SnapshotTask α) (f : α β) (stx? : Option Syntax := t.stx?)

View File

@@ -397,7 +397,7 @@ where
diagnostics := oldProcessed.diagnostics
result? := some {
cmdState := oldProcSuccess.cmdState
firstCmdSnap := { stx? := none, task := prom.result! } } }
firstCmdSnap := { stx? := none, task := prom.result!, cancelTk? := cancelTk } } }
else
return .finished newStx oldProcessed) } }
else return old
@@ -450,7 +450,7 @@ where
processHeader (stx : Syntax) (parserState : Parser.ModuleParserState) :
LeanProcessingM (SnapshotTask HeaderProcessedSnapshot) := do
let ctx read
SnapshotTask.ofIO stx (some 0, ctx.input.endPos) <|
SnapshotTask.ofIO stx none (some 0, ctx.input.endPos) <|
ReaderT.run (r := ctx) <| -- re-enter reader in new task
withHeaderExceptions (α := HeaderProcessedSnapshot) ({ · with result? := none }) do
let setup match ( setupImports stx) with
@@ -507,7 +507,7 @@ where
infoTree? := cmdState.infoState.trees[0]!
result? := some {
cmdState
firstCmdSnap := { stx? := none, task := prom.result! }
firstCmdSnap := { stx? := none, task := prom.result!, cancelTk? := cancelTk }
}
}
@@ -523,17 +523,19 @@ where
-- from `old`
if let some oldNext := old.nextCmdSnap? then do
let newProm IO.Promise.new
let cancelTk IO.CancelToken.new
-- can reuse range, syntax unchanged
BaseIO.chainTask (sync := true) old.resultSnap.task fun oldResult =>
-- also wait on old command parse snapshot as parsing is cheap and may allow for
-- elaboration reuse
BaseIO.chainTask (sync := true) oldNext.task fun oldNext => do
let cancelTk IO.CancelToken.new
parseCmd oldNext newParserState oldResult.cmdState newProm sync cancelTk ctx
prom.resolve <| { old with nextCmdSnap? := some {
stx? := none
reportingRange? := some newParserState.pos, ctx.input.endPos
task := newProm.result! } }
task := newProm.result!
cancelTk? := cancelTk
} }
else prom.resolve old -- terminal command, we're done!
-- fast path, do not even start new task for this snapshot (see [Incremental Parsing])
@@ -615,15 +617,16 @@ where
})
let diagnostics Snapshot.Diagnostics.ofMessageLog msgLog
-- use per-command cancellation token for elaboration so that
-- use per-command cancellation token for elaboration so that cancellation of further commands
-- does not affect current command
let elabCmdCancelTk IO.CancelToken.new
prom.resolve {
diagnostics, nextCmdSnap?
stx := stx', parserState := parserState'
elabSnap := { stx? := stx', task := elabPromise.result!, cancelTk? := some elabCmdCancelTk }
resultSnap := { stx? := stx', reportingRange? := initRange?, task := resultPromise.result! }
infoTreeSnap := { stx? := stx', reportingRange? := initRange?, task := finishedPromise.result! }
reportSnap := { stx? := none, reportingRange? := initRange?, task := reportPromise.result! }
resultSnap := { stx? := stx', reportingRange? := initRange?, task := resultPromise.result!, cancelTk? := none }
infoTreeSnap := { stx? := stx', reportingRange? := initRange?, task := finishedPromise.result!, cancelTk? := none }
reportSnap := { stx? := none, reportingRange? := initRange?, task := reportPromise.result!, cancelTk? := none }
}
let cmdState doElab stx cmdState beginPos
{ old? := old?.map fun old => old.stx, old.elabSnap, new := elabPromise }
@@ -665,8 +668,8 @@ where
-- We want to trace all of `CommandParsedSnapshot` but `traceTask` is part of it, so let's
-- create a temporary snapshot tree containing all tasks but it
let snaps := #[
{ stx? := stx', task := elabPromise.result!.map (sync := true) toSnapshotTree },
{ stx? := stx', task := resultPromise.result!.map (sync := true) toSnapshotTree }] ++
{ stx? := stx', task := elabPromise.result!.map (sync := true) toSnapshotTree, cancelTk? := none },
{ stx? := stx', task := resultPromise.result!.map (sync := true) toSnapshotTree, cancelTk? := none }] ++
cmdState.snapshotTasks
let tree := SnapshotTree.mk { diagnostics := .empty } snaps
BaseIO.bindTask ( tree.waitAll) fun _ => do
@@ -690,6 +693,7 @@ where
stx? := none
reportingRange? := initRange?
task := traceTask
cancelTk? := none
}
if let some next := next? then
-- We're definitely off the fast-forwarding path now

View File

@@ -2279,6 +2279,7 @@ def realizeConst (forConst : Name) (constName : Name) (realize : MetaM Unit) :
initHeartbeats := ( IO.getNumHeartbeats)
}
let (env, exTask, dyn) env.realizeConst forConst constName (realizeAndReport coreCtx)
-- Realizations cannot be cancelled as their result is shared across elaboration runs
let exAct Core.wrapAsyncAsSnapshot (cancelTk? := none) fun
| none => return
| some ex => do
@@ -2286,6 +2287,7 @@ def realizeConst (forConst : Name) (constName : Name) (realize : MetaM Unit) :
Core.logSnapshotTask {
stx? := none
task := ( BaseIO.mapTask (t := exTask) exAct)
cancelTk? := none
}
if let some res := dyn.get? RealizeConstantResult then
let mut snap := res.snap

View File

@@ -56,6 +56,36 @@ elab_rules : tactic
-- can't use a naked promise in `initialize` as marking it persistent would block
initialize unblockedCancelTk : IO.CancelToken IO.CancelToken.new
/--
Waits for `unblock` to be called, which is expected to happen in a subsequent document version that
does not invalidate this tactic. Complains if cancellation token was set before unblocking, i.e. if
the tactic was invalidated after all.
-/
scoped syntax "wait_for_unblock" : tactic
@[incremental]
elab_rules : tactic
| `(tactic| wait_for_unblock) => do
let ctx readThe Core.Context
let some cancelTk := ctx.cancelTk? | unreachable!
dbg_trace "blocked!"
log "blocked"
let ctx readThe Elab.Term.Context
let some tacSnap := ctx.tacSnap? | unreachable!
tacSnap.new.resolve {
diagnostics := ( Language.Snapshot.Diagnostics.ofMessageLog ( Core.getMessageLog))
stx := default
finished := default
}
while true do
if ( unblockedCancelTk.isSet) then
break
IO.sleep 30
if ( cancelTk.isSet) then
IO.eprintln "cancelled!"
log "cancelled (should never be visible)"
/--
Spawns a `logSnapshotTask` that waits for `unblock` to be called, which is expected to happen in a
subsequent document version that does not invalidate this tactic. Complains if cancellation token
@@ -83,6 +113,10 @@ scoped elab "unblock" : tactic => do
dbg_trace "unblocking!"
unblockedCancelTk.set
/--
Like `wait_for_cancel_once` but does the waiting in a separate task and waits for its
cancellation.
-/
scoped syntax "wait_for_cancel_once_async" : tactic
@[incremental]
elab_rules : tactic
@@ -110,3 +144,35 @@ elab_rules : tactic
dbg_trace "blocked!"
log "blocked"
/--
Like `wait_for_cancel_once_async` but waits for the main thread's cancellation token. This is useful
to test main thread cancellation in non-incremental contexts because we otherwise wouldn't be able
to send out the "blocked" message from there.
-/
scoped syntax "wait_for_main_cancel_once_async" : tactic
@[incremental]
elab_rules : tactic
| `(tactic| wait_for_main_cancel_once_async) => do
let prom IO.Promise.new
if let some t := ( onceRef.modifyGet (fun old => (old, old.getD prom.result!))) then
IO.wait t
return
let some cancelTk := ( readThe Core.Context).cancelTk? | unreachable!
let act Elab.Term.wrapAsyncAsSnapshot (cancelTk? := none) fun _ => do
let ctx readThe Core.Context
-- TODO: `CancelToken` should probably use `Promise`
while true do
if ( cancelTk.isSet) then
break
IO.sleep 30
IO.eprintln "cancelled!"
log "cancelled (should never be visible)"
prom.resolve ()
Core.checkInterrupted
let t BaseIO.asTask (act ())
Core.logSnapshotTask { stx? := none, task := t, cancelTk? := cancelTk }
dbg_trace "blocked!"
log "blocked"

View File

@@ -209,14 +209,14 @@ def fetchFileTrace (file : FilePath) (text := false) : JobM BuildTrace := do
/--
Builds `file` using `build` unless it already exists and the current job's
trace matches the trace stored in the `.trace` file. If built, save the new
trace and cache `file`'s hash in a `.hash` file. Otherwise, try to fetch the
trace matches the trace stored in the `.trace` file. If built, saves the new
trace and caches `file`'s hash in a `.hash` file. Otherwise, tries to fetch the
hash from the `.hash` file using `fetchFileTrace`. Build logs (if any) are
saved to the trace file and replayed from there if the build is skipped.
For example, given `file := "foo.c"`, compares `getTrace` with that in
`foo.c.trace` with the hash cached in `foo.c.hash` and the log cached in
`foo.c.trace`.
`foo.c.trace`. If built, the hash is cached in `foo.c.hash` and the new
trace is saved to `foo.c.trace` (including the build log).
If `text := true`, `file` is hashed as a text file rather than a binary file.
-/
@@ -395,6 +395,29 @@ private def mkLinkObjArgs
args := args.push s!"-l{lib.name}"
return args
/--
Topologically sorts the library dependency tree by name.
Libraries come *before* their dependencies.
-/
private partial def mkLinkOrder (libs : Array Dynlib) : JobM (Array Dynlib) := do
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
go lib [] v o
match r with
| .ok (_, order) => pure order
| .error cycle => error s!"library dependency cycle:\n{formatCycle cycle}"
where
go lib (ps : List String) (v : RBMap String Unit compare) (o : Array Dynlib) := do
let o := o.push lib
if v.contains lib.name then
return (v, o)
if ps.contains lib.name then
throw (lib.name :: ps)
let ps := lib.name :: ps
let v := v.insert lib.name ()
let (v, o) lib.deps.foldlM (init := (v, o)) fun (v, o) lib =>
go lib ps v o
return (v, o)
/--
Build a shared library by linking the results of `linkJobs`
using the Lean toolchain's C compiler.
@@ -404,7 +427,7 @@ def buildSharedLib
(linkObjs : Array (Job FilePath)) (linkLibs : Array (Job Dynlib))
(weakArgs traceArgs : Array String := #[]) (linker := "c++")
(extraDepTrace : JobM _ := pure BuildTrace.nil)
(plugin := false)
(plugin := false) (linkDeps := Platform.isWindows)
: SpawnM (Job Dynlib) :=
(Job.collectArray linkObjs).bindM fun objs => do
(Job.collectArray linkLibs).mapM (sync := true) fun libs => do
@@ -412,9 +435,10 @@ def buildSharedLib
addPlatformTrace -- shared libraries are platform-dependent artifacts
addTrace ( extraDepTrace)
buildFileUnlessUpToDate' libFile do
let libs if linkDeps then mkLinkOrder libs else pure #[]
let args := mkLinkObjArgs objs libs ++ weakArgs ++ traceArgs
compileSharedLib libFile args linker
return {name := libName, path := libFile, plugin}
return {name := libName, path := libFile, deps := libs, plugin}
/--
Build a shared library by linking the results of `linkJobs`
@@ -424,6 +448,7 @@ def buildLeanSharedLib
(libName : String) (libFile : FilePath)
(linkObjs : Array (Job FilePath)) (linkLibs : Array (Job Dynlib))
(weakArgs traceArgs : Array String := #[]) (plugin := false)
(linkDeps := Platform.isWindows)
: SpawnM (Job Dynlib) :=
(Job.collectArray linkObjs).bindM fun objs => do
(Job.collectArray linkLibs).mapM (sync := true) fun libs => do
@@ -432,10 +457,11 @@ def buildLeanSharedLib
addPlatformTrace -- shared libraries are platform-dependent artifacts
buildFileUnlessUpToDate' libFile do
let lean getLeanInstall
let libs if linkDeps then mkLinkOrder libs else pure #[]
let args := mkLinkObjArgs objs libs ++
weakArgs ++ traceArgs ++ lean.ccLinkSharedFlags
compileSharedLib libFile args lean.cc
return {name := libName, path := libFile, plugin}
return {name := libName, path := libFile, deps := libs, plugin}
/--
Build an executable by linking the results of `linkJobs`
@@ -453,6 +479,7 @@ def buildLeanExe
addPlatformTrace -- executables are platform-dependent artifacts
buildFileUnlessUpToDate' exeFile do
let lean getLeanInstall
let libs mkLinkOrder libs
let args := mkLinkObjArgs objs libs ++
weakArgs ++ traceArgs ++ lean.ccLinkFlags sharedLean
compileExe exeFile args lean.cc

View File

@@ -36,7 +36,7 @@ abbrev RecBuildT (m : Type → Type) :=
/-- Log build cycle and error. -/
@[specialize] def buildCycleError [MonadError m] (cycle : Cycle BuildKey) : m α :=
error s!"build cycle detected:\n{"\n".intercalate <| cycle.map (s!" {·}")}"
error s!"build cycle detected:\n{formatCycle cycle}"
instance [Monad m] [MonadError m] : MonadCycleOf BuildKey (RecBuildT m) where
throwCycle := buildCycleError

View File

@@ -33,12 +33,12 @@ def buildImportsAndDeps
let precompileImports ( computePrecompileImportsAux leanFile imports).await
let pkgs := precompileImports.foldl (·.insert ·.pkg) OrdPackageSet.empty |>.toArray
let externLibsJob fetchExternLibs pkgs
let impLibsJob fetchImportLibs imports
let impLibsJob fetchImportLibs precompileImports
let dynlibsJob root.dynlibs.fetchIn root
let pluginsJob root.plugins.fetchIn root
modJob.bindM fun _ =>
impLibsJob.bindM fun impLibs =>
dynlibsJob.bindM fun dynlibs =>
pluginsJob.bindM fun plugins =>
externLibsJob.mapM fun externLibs => do
return computeModuleDeps impLibs externLibs dynlibs plugins
impLibsJob.bindM (sync := true) fun impLibs =>
dynlibsJob.bindM (sync := true) fun dynlibs =>
pluginsJob.bindM (sync := true) fun plugins =>
externLibsJob.mapM (sync := true) fun externLibs => do
computeModuleDeps impLibs externLibs dynlibs plugins

View File

@@ -96,27 +96,22 @@ protected def LeanLib.recBuildShared (self : LeanLib) : FetchM (Job Dynlib) := d
let objJobs self.moreLinkObjs.foldlM (init := objJobs)
(·.push <$> ·.fetchIn self.pkg)
let libJobs id do
-- Fetch dependnecy dynlibs
-- for platforms that must link to them (e.g., Windows)
if Platform.isWindows then
let imps mods.foldlM (init := OrdModuleSet.empty) fun imps mod => do
return imps.appendArray ( ( mod.transImports.fetch).await)
let s := (NameSet.empty.insert self.name, #[])
let (_, jobs) imps.foldlM (init := s) fun (libs, jobs) imp => do
if libs.contains imp.lib.name then
return (libs, jobs)
else
let job imp.lib.shared.fetch
let moreJobs imp.lib.moreLinkLibs.mapM (·.fetchIn self.pkg)
let jobs := jobs.push job ++ moreJobs
return (libs.insert imp.lib.name, jobs)
let jobs self.moreLinkLibs.foldlM
(·.push <$> ·.fetchIn self.pkg) jobs
let jobs self.pkg.externLibs.foldlM
(·.push <$> ·.dynlib.fetch) jobs
return jobs
else
return #[]
-- Fetch dependency dynlibs
-- for situations that need them (see `Dynlib.deps`)
let imps mods.foldlM (init := OrdModuleSet.empty) fun imps mod => do
return imps.appendArray ( ( mod.transImports.fetch).await)
let s := (NameSet.empty.insert self.name, #[])
let (_, jobs) imps.foldlM (init := s) fun (libs, jobs) imp => do
if libs.contains imp.lib.name then
return (libs, jobs)
else
let jobs jobs.push <$> imp.lib.shared.fetch
return (libs.insert imp.lib.name, jobs)
let jobs self.moreLinkLibs.foldlM
(·.push <$> ·.fetchIn self.pkg) jobs
let jobs self.pkg.externLibs.foldlM
(·.push <$> ·.dynlib.fetch) jobs
return jobs
buildLeanSharedLib self.libName self.sharedLibFile objJobs libJobs
self.weakLinkArgs self.linkArgs (plugin := self.roots.size == 1)

View File

@@ -106,54 +106,70 @@ def fetchExternLibs (pkgs : Array Package) : FetchM (Job (Array Dynlib)) :=
Job.collectArray <$> pkgs.flatMapM (·.externLibs.mapM (·.dynlib.fetch))
private def Module.fetchImportLibsCore
(self : Module) (imps : Array Module)
(init : NameSet × Array (Job Dynlib)): FetchM (NameSet × Array (Job Dynlib))
:= imps.foldlM (init := init) fun (libs, jobs) imp => do
if libs.contains imp.lib.name then
return (libs, jobs)
else if self.lib.name = imp.lib.name then
let job imp.dynlib.fetch
return (libs, jobs.push job)
else
let jobs imp.lib.moreLinkLibs.foldlM (init := jobs)
(·.push <$> ·.fetchIn imp.pkg)
-- Lean wants the external library symbols before module symbols.
let jobs jobs.push <$> imp.lib.shared.fetch
return (libs.insert imp.lib.name, jobs)
(self : Module) (imps : Array Module) (compileSelf : Bool)
(init : NameSet × Array (Job Dynlib))
: FetchM (NameSet × Array (Job Dynlib)) :=
imps.foldlM (init := init) fun (libs, jobs) imp => do
if libs.contains imp.lib.name then
return (libs, jobs)
else if compileSelf && self.lib.name = imp.lib.name then
let job imp.dynlib.fetch
return (libs, jobs.push job)
else if compileSelf || imp.shouldPrecompile then
let jobs jobs.push <$> imp.lib.shared.fetch
return (libs.insert imp.lib.name, jobs)
else
return (libs, jobs)
/--
Computes the transitive dynamic libraries of a module's imports.
Modules from the same library are loaded individually, while modules
from other libraries are loaded as part of the whole library.
-/
@[inline] private def Module.fetchImportLibs
(self : Module) (imps : Array Module) : FetchM (Job (Array Dynlib))
:= do
let s self.fetchImportLibsCore imps ({}, #[])
return Job.collectArray s.2
(self : Module) (imps : Array Module) (compileSelf : Bool)
: FetchM (Array (Job Dynlib)) :=
(·.2) <$> self.fetchImportLibsCore imps compileSelf ({}, #[])
/-- Fetch the dynlibs of a list of imports. **For internal use.** -/
@[inline] def fetchImportLibs
(mods : Array Module) : FetchM (Job (Array Dynlib))
:= do
let (_, jobs) mods.foldlM (init := ({}, #[])) fun s mod => do
let precompileImports
if mod.shouldPrecompile
then mod.transImports.fetch
else mod.precompileImports.fetch
let precompileImports precompileImports.await
mod.fetchImportLibsCore precompileImports s
let imports ( mod.imports.fetch).await
mod.fetchImportLibsCore imports mod.shouldPrecompile s
let jobs mods.foldlM (init := jobs) fun jobs mod => do
if mod.shouldPrecompile
then jobs.push <$> mod.dynlib.fetch
if mod.shouldPrecompile then
jobs.push <$> mod.dynlib.fetch
else return jobs
return Job.collectArray jobs
/--
Topologically sorts the library dependency tree by name.
Libraries come *after* their dependencies.
-/
private partial def mkLoadOrder (libs : Array Dynlib) : FetchM (Array Dynlib) := do
let r := libs.foldlM (m := Except (Cycle String)) (init := ({}, #[])) fun (v, o) lib =>
go lib [] v o
match r with
| .ok (_, order) => pure order
| .error cycle => error s!"library dependency cycle:\n{formatCycle cycle}"
where
go lib (ps : List String) (v : RBMap String Unit compare) (o : Array Dynlib) := do
if v.contains lib.name then
return (v, o)
if ps.contains lib.name then
throw (lib.name :: ps)
let ps := lib.name :: ps
let v := v.insert lib.name ()
let (v, o) lib.deps.foldlM (init := (v, o)) fun (v, o) lib =>
go lib ps v o
let o := o.push lib
return (v, o)
def computeModuleDeps
(impLibs : Array Dynlib) (externLibs : Array Dynlib)
(dynlibs : Array Dynlib) (plugins : Array Dynlib)
: ModuleDeps := Id.run do
: FetchM ModuleDeps := do
/-
Requirements:
* Lean wants the external library symbols before module symbols.
@@ -162,8 +178,9 @@ def computeModuleDeps
Everything else loads fine with just the augmented library path.
* Linux needs the augmented path to resolve nested dependencies in dynlibs.
-/
let mut plugins := plugins
let impLibs mkLoadOrder impLibs
let mut dynlibs := externLibs ++ dynlibs
let mut plugins := plugins
for impLib in impLibs do
if impLib.plugin then
plugins := plugins.push impLib
@@ -190,20 +207,24 @@ def Module.recBuildDeps (mod : Module) : FetchM (Job ModuleDeps) := ensureJob do
if imp.name = mod.name then
logError s!"{mod.leanFile}: module imports itself"
imp.olean.fetch
/-
Remark: It should be possible to avoid transitive imports here when the module
itself is precompiled, but they are currently kept to perserve the "bad import" errors.
-/
let precompileImports if mod.shouldPrecompile then
mod.transImports.fetch else mod.precompileImports.fetch
let precompileImports precompileImports.await
let importLibsJob mod.fetchImportLibs precompileImports
let pkgs := precompileImports.foldl (·.insert ·.pkg) OrdPackageSet.empty
let pkgs := if mod.shouldPrecompile then pkgs.insert mod.pkg else pkgs
let externLibsJob fetchExternLibs pkgs.toArray
let impLibsJob Job.collectArray <$>
mod.fetchImportLibs precompileImports mod.shouldPrecompile
let externLibsJob Job.collectArray <$>
if mod.shouldPrecompile then mod.pkg.externLibs.mapM (·.dynlib.fetch) else pure #[]
let dynlibsJob mod.dynlibs.fetchIn mod.pkg
let pluginsJob mod.plugins.fetchIn mod.pkg
extraDepJob.bindM fun _ => do
importJob.bindM (sync := true) fun _ => do
let depTrace takeTrace
importLibsJob.bindM (sync := true) fun impLibs => do
impLibsJob.bindM (sync := true) fun impLibs => do
externLibsJob.bindM (sync := true) fun externLibs => do
dynlibsJob.bindM (sync := true) fun dynlibs => do
pluginsJob.mapM (sync := true) fun plugins => do
@@ -211,7 +232,7 @@ def Module.recBuildDeps (mod : Module) : FetchM (Job ModuleDeps) := ensureJob do
| none => addTrace depTrace
| some false => addTrace depTrace; addPlatformTrace
| some true => setTrace depTrace
return computeModuleDeps impLibs externLibs dynlibs plugins
computeModuleDeps impLibs externLibs dynlibs plugins
/-- The `ModuleFacetConfig` for the builtin `depsFacet`. -/
def Module.depsFacetConfig : ModuleFacetConfig depsFacet :=
@@ -366,44 +387,21 @@ def Module.oFacetConfig : ModuleFacetConfig oFacet :=
Recursively build the shared library of a module
(e.g., for `--load-dynlib` or `--plugin`).
-/
-- TODO: Return `Job OrdModuleSet × OrdPackageSet` or `OrdRBSet Dynlib`?
def Module.recBuildDynlib (mod : Module) : FetchM (Job Dynlib) :=
withRegisterJob s!"{mod.name}:dynlib" do
-- Fetch object files
let modLinkJobs mod.nativeFacets true |>.mapM (·.fetch mod)
let modLinksJob := Job.collectArray modLinkJobs
-- Build dynlib
let buildDynlib (moreLibs : Array Dynlib) :=
modLinksJob.mapM fun modLinks => do
addLeanTrace
addPlatformTrace -- shared libraries are platform-dependent artifacts
addPureTrace mod.linkArgs
buildFileUnlessUpToDate' mod.dynlibFile do
let lean getLeanInstall
let args :=
modLinks.map toString ++
moreLibs.map (·.path.toString) ++
mod.weakLinkArgs ++ mod.linkArgs ++ lean.ccLinkSharedFlags
compileSharedLib mod.dynlibFile args lean.cc
return mod.dynlibFile, mod.dynlibName, true
let objJobs (mod.nativeFacets true).mapM (·.fetch mod)
-- Fetch dependencies' dynlibs
-- for platforms that must link to them (e.g., Windows)
if Platform.isWindows then
let imps ( mod.transImports.fetch).await
let impLibs mod.fetchImportLibs imps
let dynlibsJob (mod.dynlibs ++ mod.plugins).fetchIn mod.pkg
let pkgs := imps.foldl (·.insert ·.pkg) OrdPackageSet.empty
|>.insert mod.pkg |>.toArray
let externLibsJob fetchExternLibs pkgs
impLibs.bindM fun impLibs =>
dynlibsJob.bindM (sync := true) fun dynlibs =>
externLibsJob.bindM (sync := true) fun externLibs =>
buildDynlib (impLibs ++ externLibs ++ dynlibs)
else
buildDynlib #[]
let libJobs id do
let imps ( mod.imports.fetch).await
let libJobs mod.fetchImportLibs imps true
let libJobs mod.lib.moreLinkLibs.foldlM
(·.push <$> ·.fetchIn mod.pkg) libJobs
let libJobs mod.pkg.externLibs.foldlM
(·.push <$> ·.dynlib.fetch) libJobs
return libJobs
buildLeanSharedLib mod.dynlibName mod.dynlibFile objJobs libJobs
mod.weakLinkArgs mod.linkArgs (plugin := true)
/-- The `ModuleFacetConfig` for the builtin `dynlibFacet`. -/
def Module.dynlibFacetConfig : ModuleFacetConfig dynlibFacet :=

View File

@@ -4,7 +4,7 @@ Released under Apache 2.0 license as described in the file LICENSE.
Authors: Mac Malone
-/
prelude
import Lake.Build.Data
import Lake.Build.Key
/-!
# Lake Targets

View File

@@ -5,6 +5,7 @@ Authors: Mac Malone
-/
prelude
import Lake.Config.OutFormat
import Lake.Build.Target.Basic
open System Lean
@@ -18,6 +19,11 @@ structure Dynlib where
name : String
/-- Whether this library can be loaded as a plugin. -/
plugin := false
/--
Transitive dependencies of this library for situations that need them
(e.g., linking on Windows, loading via `lean`).
-/
deps : Array Dynlib := #[]
deriving Inhabited, Repr
/-- Optional library directory (for `-L`). -/

View File

@@ -30,7 +30,7 @@ initialize importEnvCache : IO.Ref (Std.HashMap (Array Import) Environment) ←
def importModulesUsingCache (imports : Array Import) (opts : Options) (trustLevel : UInt32) : IO Environment := do
if let some env := ( importEnvCache.get)[imports]? then
return env
let env importModules imports opts trustLevel
let env importModules (loadExts := true) imports opts trustLevel
importEnvCache.modify (·.insert imports env)
return env

View File

@@ -77,7 +77,7 @@ abbrev DepStackT m := CallStackT Name m
/-- Log dependency cycle and error. -/
@[specialize] def depCycleError [MonadError m] (cycle : Cycle Name) : m α :=
error s!"dependency cycle detected:\n{"\n".intercalate <| cycle.map (s!" {·}")}"
error s!"dependency cycle detected:\n{formatCycle cycle}"
instance [Monad m] [MonadError m] : MonadCycleOf Name (DepStackT m) where
throwCycle := depCycleError

View File

@@ -6,6 +6,7 @@ Authors: Mac Malone
prelude
import Init.Control.Except
import Init.Data.List.Basic
import Init.Data.ToString
namespace Lake
@@ -15,6 +16,9 @@ abbrev CallStack κ := List κ
/-- A `CallStack` ending in a cycle. -/
abbrev Cycle κ := CallStack κ
def formatCycle [ToString κ] (cycle : Cycle κ) : String :=
"\n".intercalate <| cycle.map (s!" {·}")
/-- A monad equipped with a call stack. -/
class MonadCallStackOf (κ : semiOutParam (Type u)) (m : Type u Type v) where
getCallStack : m (CallStack κ)

View File

@@ -18,7 +18,8 @@ A Lake configuration file defines the package's basic configuration. It also typ
+ [Build & Run](#build--run)
+ [Test & Lint](#test--lint)
+ [Cloud Releases](#cloud-releases)
* [Defining Build Targets](#defining-build-targets)
* [Specifying Targets](#specifying-targets)
* [Defining Targets](#defining-targets)
+ [Lean Libraries](#lean-libraries)
+ [Binary Executables](#binary-executables)
+ [External Libraries](#external-libraries)
@@ -202,13 +203,16 @@ These options configure how code is built and run in the package. Libraries, exe
* `moreGlobalServerArgs`: An `Array` of additional arguments to pass to `lean --server` which apply both to this package and anything else in the same server session (e.g. when browsing other packages from the same session via go-to-definition)
* `buildType`: The `BuildType` of targets in the package (see [`CMAKE_BUILD_TYPE`](https://stackoverflow.com/a/59314670)). One of `debug`, `relWithDebInfo`, `minSizeRel`, or `release`. Defaults to `release`.
* `leanOptions`: Additional options to pass to both the Lean language server (i.e., `lean --server`) launched by `lake serve` and to `lean` while compiling Lean source files.
* `dynlibs`: An `Array` of `Dynlib` shared library [targets](#specifying-targets) to load during elaboration (e.g., via `lean --load-dynlib`).
* `plugins`: An `Array` of `Dynlib` shared library [targets](#specifying-targets) to load as plugins during elaboration (e.g., via `lean --plugin`). Plugins, unlike `dynlibs`, have an initializer which is executed upon loading the shared library. The initilization function follows the naming convention of a Lean module initializer.
* `moreLeanArgs`: An `Array` of additional arguments to pass to `lean` while compiling Lean source files.
* `weakLeanArgs`: An `Array` of additional arguments to pass to `lean` while compiling Lean source files. Unlike `moreLeanArgs`, these arguments do not affect the trace of the build result, so they can be changed without triggering a rebuild. They come *before* `moreLeanArgs`.
* `moreLeancArgs`: An `Array` of additional arguments to pass to `leanc` while compiling the C source files generated by `lean`. Lake already passes some flags based on the `buildType`, but you can change this by, for example, adding `-O0` and `-UNDEBUG`.
* `weakLeancArgs`: An `Array` of additional arguments to pass to `leanc` while compiling the C source files generated by `lean`. Unlike `moreLeancArgs`, these arguments do not affect the trace of the build result, so they can be changed without triggering a rebuild. They come *before* `moreLeancArgs`.
* `moreLinkArgs`: An `Array` of additional arguments to pass to `leanc` when linking (e.g., binary executables or shared libraries). These will come *after* the paths of `extern_lib` targets.
* `weakLinkArgs`: An `Array` of additional arguments to pass to `leanc` when linking (e.g., binary executables or shared libraries) Unlike `moreLinkArgs`, these arguments do not affect the trace of the build result, so they can be changed without triggering a rebuild. They come *before* `moreLinkArgs`.
* `extraDepTargets`: An `Array` of [target](#custom-targets) names that the package should always build before anything else.
* `moreLinkObjs`: An `Array` of `FilePath` [targets](#specifying-targets) producing additional native objects (e.g., static libraries or `.o` object files) to statically link to the library.
* `moreLinkLibs`: An `Array` of `Dynlib` [targets](#specifying-targets) to dynamically link to the library.
### Test & Lint
@@ -230,9 +234,81 @@ These options define a cloud release for the package. See the section on [GitHub
Defaults to `{(pkg-)name}-{System.Platform.target}.tar.gz`.
* `preferReleaseBuild`: Whether to prefer downloading a prebuilt release (from GitHub) rather than building this package from the source when this package is used as a dependency.
## Defining Build Targets
## Specifying Targets
A Lake package can have many build targets, such as different Lean libraries and multiple binary executables. Any number of these declarations can be marked with the `@[default_target]` attribute to tell Lake to build them on a bare `lake build` of the package.
Some Lake CLI commands take targets (e.g., `lake build`, `lake query`) and certain Lake configuration options (e.g., `needs`, `plugins`, `moreLinkObjs`) accept targets. In general, targets can be specified with the following syntax.
```
[@[<package>]/][<target>|[+]<module>][:<facet>]
```
| Example | What It Specifies |
| ------------| --------------------------------------------------------------|
| `a` | The default facet(s) of target `a` |
| `@a` | the default target(s) of package `a` |
| `+A` | The default facets(s) of module `A` |
| `:foo` | The facet `foo` of the root package |
| `@/a` | The default facet(s) of target `a` of the root package |
| `@a/b` | The default facet(s) of target `b` of package `a` |
| `@a/+A` | The default facet(s) of module `A` of package `a` |
| `@a/+A:c` | The facet `c` of module `A` of package `a` (e.g., its C file) |
On the command line or in a TOML configuration file, targets are specified as strings.
**lakefile.toml**
```toml
name = "example"
[[lean_lib]]
name = "Plugin"
[[lean_exe]]
name = "exe"
plugins = ["Plugin:shared"] # i.e., @example/Plugin:shared
```
```
$ lake build @example/exe
```
In a Lean configuration file, target specifiers are literals that start with
either `` `@ `` or `` `+ ``.
**lakefile.lean**
```lean
package "example"
input_file foo where
path := "inputs/foo.txt"
text := true
@[default_target]
lean_exe exe where
needs := #[`@/foo] -- i.e., `@examole/foo
```
The full literal syntax is:
```
`@[<package>]/[<target>|[+]<module>][:<facet>]
`+<module>[:<facet>]
```
In Lean, targets defined in the same configuration can also be specified by their identifier.
**lakefile.lean**
```lean
input_file foo where
path := "inputs/foo.txt"
text := true
lean_exe exe where
needs := #[foo]
```
## Defining Targets
A Lake package can have many targets, such as different Lean libraries and multiple binary executables. Any number of these declarations can be marked with the `@[default_target]` attribute to tell Lake to build them on a bare `lake build` of the package.
### Lean Libraries
@@ -253,14 +329,21 @@ name = "«target-name»"
**Configuration Options**
* `needs`: An `Array` of [targets](#specifying-targets) to build before the library's modules.
* `srcDir`: The subdirectory of the package's source directory containing the library's source files. Defaults to the package's `srcDir`. (This will be passed to `lean` as the `-R` option.)
* `roots`: An `Array` of root module `Name`(s) of the library. Submodules of these roots (e.g., `Lib.Foo` of `Lib`) are considered part of the library. Defaults to a single root of the target's name.
* `globs`: An `Array` of module `Glob`(s) to build for the library. The term `glob` comes from [file globs](https://en.wikipedia.org/wiki/Glob_(programming)) (e.g., `foo/*`) on Unix. A submodule glob builds every Lean source file within the module's directory (i.e., ``Glob.submodules `Foo`` is essentially equivalent to a theoretical `import Foo.*`). Local imports of glob'ed files (i.e., fellow modules of the workspace) are also recursively built. Defaults to a `Glob.one` of each of the library's `roots`.
* `libName`: The `String` name of the library. Used as a base for the file names of its static and dynamic binaries. Defaults to the name of the target.
* `extraDepTargets`: An `Array` of [target](#custom-targets) names to build before the library's modules.
* `defaultFacets`: An `Array` of library facets to build on a bare `lake build` of the library. For example, setting this to `#[LeanLib.sharedLib]` will build the shared library facet.
* `nativeFacets`: A function `(shouldExport : Bool) → Array` determining the [module facets](#defining-new-facets) to build and combine into the library's static and shared libraries. If `shouldExport` is true, the module facets should export any symbols a user may expect to lookup in the library. For example, the Lean interpreter will use exported symbols in linked libraries. Defaults to a singleton of `Module.oExportFacet` (if `shouldExport`) or `Module.oFacet`. That is, the object files compiled from the Lean sources, potentially with exported Lean symbols.
* `platformIndependent`, `precompileModules`, `buildType`, `leanOptions`, `<more|weak><Lean|Leanc|Link>Args`, `moreServerOptions`: Augments the package's corresponding configuration option. The library's arguments come after, modules are precompiled if either the library or package are, `platformIndependent` falls back to the package on `none`, and the build type is the minimum of the two (`debug` is the lowest, and `release` is the highest).
The following options augment the package's corresponding configuration option.
* `buildType`: Minimum of the two settings — lowest is `debug`, highest is `release`.
* `precompileModules`: `true` if either are `true`.
* `platformIndependent`: Falls back to the package's setting on `none`.
* `leanOptions`, `moreServerOptions`: Merges them and the library's takes precedence.
* `dynlibs`, `plugins`, `<more|weak><Lean|Leanc|Link><Args|Objs|Libs>`: Appends them after the package's.
### Binary Executables
@@ -281,16 +364,25 @@ name = "«target-name»"
**Configuration Options**
* `needs`: An `Array` of [targets](#specifying-targets) to build before the executable's module.
* `srcDir`: The subdirectory of the package's source directory containing the executable's source file. Defaults to the package's `srcDir`. (This will be passed to `lean` as the `-R` option.)
* `root`: The root module `Name` of the binary executable. Should include a `main` definition that will serve as the entry point of the program. The root is built by recursively building its local imports (i.e., fellow modules of the workspace). Defaults to the name of the target.
* `exeName`: The `String` name of the binary executable. Defaults to the target name with any `.` replaced with a `-`.
* `extraDepTargets`: An `Array` of [target](#custom-targets) names to build before the executable's modules.
* `nativeFacets`: A function `(shouldExport : Bool) → Array` determining the [module facets](#defining-new-facets) to build and link into the executable. If `shouldExport` is true, the module facets should export any symbols a user may expect to lookup in the library. For example, the Lean interpreter will use exported symbols in linked libraries. Defaults to a singleton of `Module.oExportFacet` (if `shouldExport`) or `Module.oFacet`. That is, the object file compiled from the Lean source, potentially with exported Lean symbols.
* `supportInterpreter`: Whether to expose symbols within the executable to the Lean interpreter. This allows the executable to interpret Lean files (e.g., via `Lean.Elab.runFrontend`). Implementation-wise, on Windows, the Lean shared libraries are linked to the executable and, on other systems, the executable is linked with `-rdynamic`. This increases the size of the binary on Linux and, on Windows, requires `libInit_shared.dll` and `libleanshared.dll` to be co-located with the executable or part of `PATH` (e.g., via `lake exe`). Thus, this feature should only be enabled when necessary. Defaults to `false`.
* `platformIndependent`, `precompileModules`, `buildType`, `leanOptions`, `<more|weak><Lean|Leanc|Link>Args`, `moreServerOptions`: Augments the package's corresponding configuration option. The library's arguments come after, modules are precompiled if either the library or package are, `platformIndependent` falls back to the package on `none`, and the build type is the minimum of the two (`debug` is the lowest, and `release` is the highest).
The following options augment the package's corresponding configuration option.
* `buildType`: Minimum of the two settings — lowest is `debug`, highest is `release`.
* `precompileModules`: `true` if either are `true`.
* `platformIndependent`: Falls back to the package's setting on `none`.
* `leanOptions`, `moreServerOptions`: Merges them and the executable's takes precedence.
* `dynlibs`, `plugins`, `<more|weak><Lean|Leanc|Link><Args|Objs|Libs>`: Appends them after the package's.
### External Libraries
**NOTE: `extern_lib` targets are deprecated. Use a [custom `target`](#custom-targets) in conjunction with `moreLinkObjs` / `moreLinkLibs` instead.**
A external library target is a non-Lean **static** library that will be linked to the binaries of the package and its dependents (e.g., their shared libraries and executables).
**Important:** For the external library to link properly when `precompileModules` is on, the static library produced by an `extern_lib` target must following the platform's naming conventions for libraries (i.e., be named `foo.a` on Windows and `libfoo.a` on Unix). To make this easy, there is the `Lake.nameToStaticLib` utility function to convert a library name into its proper file name for the platform.

View File

@@ -1,9 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail
exit 0 # TODO: flaky test disabled
LAKE=${LAKE:-../../.lake/build/bin/lake}
source ../common.sh
./clean.sh
@@ -14,36 +10,6 @@ LAKE=${LAKE:-../../.lake/build/bin/lake}
# https://github.com/leanprover/lean4/issues/3351
# https://github.com/leanprover/lean4/issues/3809
test_run() {
echo "[Command]"
echo "$> lake" "$@"
if $LAKE "$@" >produced.out 2>&1; then
rc=$?
else
rc=$?
fi
echo "Lake exited with code $rc"
echo "[Output]"
cat produced.out
return $rc
}
test_err() {
expected=$1; shift
if test_run "$@"; then rc=$?; else rc=$?; fi
echo "[Match \"$expected\"]"
if grep --color -F "$expected" produced.out; then
if [ $rc != 1 ]; then
echo "[Outcome]"
echo "Lake unexpectedly succeeded."
return 1
fi
else
echo "No match found."
return 1
fi
}
# Test a module with a bad import does not kill the whole build
test_err "Building Etc" build Lib.U Etc
# Test importing a missing module from outside the workspace

View File

@@ -1,41 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
exit 0 # TODO: flaky test disabled
LAKE=${LAKE:-../../.lake/build/bin/lake}
source ../common.sh
./clean.sh
# Test that changing `moreLean/Leanc/LinkArgs` triggers a rebuild
# Test that changing `weakLean/Leanc/LinkArgs` does not
test_run() {
echo "[Command]"
echo "$> lake" "$@"
if $LAKE "$@" >produced.out 2>&1; then
rc=$?
else
rc=$?
fi
echo "Lake exited with code $rc"
echo "[Output]"
cat produced.out
return $rc
}
test_out() {
expected=$1; shift
if test_run "$@"; then rc=$?; else rc=$?; fi
echo "[Match \"$expected\"]"
if grep --color -F "$expected" produced.out; then
return $rc
else
echo "No match found."
return 1
fi
}
# Test `leanArgs`
test_run build +Hello -R
# see https://github.com/leanprover/lake/issues/50

83
src/lake/tests/common.sh Normal file
View File

@@ -0,0 +1,83 @@
set -euo pipefail
LAKE=${LAKE:-lake}
if [ "${OS:-}" = Windows_NT ]; then
LIB_PREFIX=
SHARED_LIB_EXT=dll
elif [ "`uname`" = Darwin ]; then
LIB_PREFIX=lib
SHARED_LIB_EXT=dylib
else
LIB_PREFIX=lib
SHARED_LIB_EXT=so
fi
test_run() {
echo "[COMMAND]"
echo "$> lake" "$@"
if $LAKE "$@" >produced.out 2>&1; then
rc=$?
else
rc=$?
fi
echo "Lake exited with code $rc"
echo "[OUTPUT]"
cat produced.out
return $rc
}
match_out() {
expected=$1; shift
echo "[MATCH \"$expected\"]"
if grep --color -F -- "$expected" produced.out; then
return 0
else
echo "No match found."
return 1
fi
}
test_out() {
expected=$1; shift
if test_run "$@"; then rc=$?; else rc=$?; fi
match_out "$expected"
return $rc
}
no_match_out() {
expected=$1; shift
echo "[NO MATCH \"$expected\"]"
if grep --color -F -- "$expected" produced.out; then
return 1
else
echo "No match found."
return 0
fi
}
test_not_out() {
expected=$1; shift
if test_run "$@"; then rc=$?; else rc=$?; fi
no_match_out "$expected"
return $rc
}
test_err() {
expected=$1; shift
if test_run "$@"; then rc=$?; else rc=$?; fi
if match_out "$expected"; then
if [ $rc != 1 ]; then
echo "[OUTCOME]"
echo "Lake unexpectedly succeeded."
return 1
fi
fi
}
test_maybe_err() {
expected=$1; shift
test_run "$@" || true
match_out "$expected"
}

View File

@@ -1 +0,0 @@
rm -rf .lake lake-manifest.json

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -exo pipefail
LAKE=${LAKE:-../../.lake/build/bin/lake}
if [ "$OS" = Windows_NT ]; then
LIB_PREFIX=
SHARED_LIB_EXT=dll
elif [ "`uname`" = Darwin ]; then
LIB_PREFIX=lib
SHARED_LIB_EXT=dylib
else
LIB_PREFIX=lib
SHARED_LIB_EXT=so
fi
./clean.sh
# Test that `moreLinkArgs` are included when linking precompiled modules
($LAKE build -KlinkArgs=-lBaz 2>&1 || true) | grep --color -- "-lBaz"
# Test that dynlibs are part of the module trace unless `platformIndependent` is set
$LAKE build -R
echo foo > .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
($LAKE build 2>&1 --rehash && exit 1 || true) | grep --color "Building Foo"
rm .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
$LAKE build -R -KplatformIndependent=true
echo foo > .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
$LAKE build --rehash --no-build

View File

@@ -0,0 +1,3 @@
import Downstream.Import
#eval greetingRef.get

View File

@@ -0,0 +1 @@
import Foo

View File

@@ -1 +1,2 @@
import Foo.Bar
import Foo.Baz

View File

@@ -0,0 +1,3 @@
import FooDep
builtin_initialize greetingRef : IO.Ref String IO.mkRef greeting

View File

@@ -0,0 +1,3 @@
import FooDepDep
def greeting := hello

View File

@@ -0,0 +1 @@
def hello := "Hello"

View File

@@ -0,0 +1 @@
import Foo

View File

@@ -0,0 +1 @@
rm -rf .lake lake-manifest.json produced.out

View File

@@ -8,3 +8,9 @@ lean_lib Foo where
precompileModules := true
platformIndependent := if get_config? platformIndependent |>.isSome then true else none
moreLinkArgs := if let some cfg := get_config? linkArgs then cfg.splitOn " " |>.toArray else #[]
lean_lib FooDep
lean_lib FooDepDep
lean_exe orderTest
lean_lib Downstream

View File

@@ -0,0 +1,7 @@
-- import a module of lib `Foo` which does not import `FooDep`
import Foo.Bar
-- then, import a module which does
import Foo.Baz
def main : IO Unit := do
IO.println ( greetingRef.get)

View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
source ../common.sh
./clean.sh
# Test that the link & load order of precompiled libraries is correct
# https://github.com/leanprover/lean4/issues/7790
test_run -v exe orderTest
# Test that transitively importing a precompiled module
# from a non-precompiled module works
test_not_out '"pluginPaths":[]' -v setup-file bogus Downstream
test_run -v build Downstream
# Test that `moreLinkArgs` are included when linking precompiled modules
./clean.sh
test_maybe_err "-lBogus" build -KlinkArgs=-lBogus
./clean.sh
# Test that dynlibs are part of the module trace unless `platformIndependent` is set
test_run build -R
echo foo > .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
test_err "Building Foo" build --rehash
rm .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
test_run build -R -KplatformIndependent=true
echo foo > .lake/build/lib/lean/Foo_Bar.$SHARED_LIB_EXT
test_run build --rehash --no-build
# cleanup
rm -f produced.out

View File

@@ -185,11 +185,12 @@ ENDFOREACH(T)
# bootstrap: too slow
# toolchain: requires elan to download toolchain
# online: downloads remote repositories
# badImport/buildArgs: flaky for unknown reasons
file(GLOB_RECURSE LEANLAKETESTS
"${LEAN_SOURCE_DIR}/lake/tests/test.sh"
"${LEAN_SOURCE_DIR}/lake/examples/test.sh")
FOREACH(T ${LEANLAKETESTS})
if(NOT T MATCHES ".*(lake-packages|bootstrap|toolchain|online).*")
if(NOT T MATCHES ".*(lake-packages|bootstrap|toolchain|online|buildArgs|badImport).*")
GET_FILENAME_COMPONENT(T_DIR ${T} DIRECTORY)
GET_FILENAME_COMPONENT(DIR_NAME ${T_DIR} NAME)
add_test(NAME "leanlaketest_${DIR_NAME}"

View File

@@ -4,7 +4,7 @@ open Lean
open Lean.IR
unsafe def main : IO Unit :=
withImportModules #[{module := `Lean.Compiler.IR.Basic}] {} 0 fun env => do
withImportModules #[{module := `Lean.Compiler.IR.Basic}] {} fun env => do
let ctorLayout IO.ofExcept $ getCtorLayout env `Lean.IR.Expr.reuse;
ctorLayout.fieldInfo.forM $ fun finfo => IO.println (format finfo);
IO.println "---";

View File

@@ -25,6 +25,23 @@ theorem t : True := by
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in a declaration should not invalidate elaboration of previous declarations. -/
theorem t1 : True := by
wait_for_unblock
trivial
example : True := by
trivial
--^ waitFor: blocked
--^ insert: "; unblock"
--^ collectDiagnostics
-- (should print "blocked!" exactly once)
-- RESET
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in a declaration should not invalidate async tasks of previous declarations. -/
theorem t1 : True := by
@@ -36,7 +53,7 @@ example : True := by
--^ waitFor: blocked
--^ insert: "; unblock"
--^ collectDiagnostics
-- (should print "blocked" exactly once)
-- (should print "blocked!" exactly once)
-- RESET
import Lean.Server.Test.Cancel
@@ -50,7 +67,22 @@ theorem t1 : True := by
--^ waitFor: blocked
--^ insert: "; unblock"
--^ collectDiagnostics
-- (should print "blocked" exactly once)
-- (should print "blocked!" exactly once)
-- RESET
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in a tactic should not invalidate elaboration of previous tactics. -/
theorem t1 : True := by
wait_for_unblock
skip
--^ waitFor: blocked
--^ insert: "; trivial\nexample : True := by unblock; trivial"
--^ collectDiagnostics
-- (should print "blocked!" exactly once)
-- (must put `unblock` in a separate decl so it's not blocked by `wait_for_unblock`)
-- RESET
import Lean.Server.Test.Cancel
@@ -71,6 +103,21 @@ theorem t1 : True := by
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in a tactic *should* invalidate elaboration of later tactics. -/
theorem t1 : True := by
skip
--^ waitFor: blocked
--^ insert: "; skip"
--^ collectDiagnostics
-- (should never print "blocked")
wait_for_cancel_once
trivial
-- RESET
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in the body should not invalidate header async tasks. -/
theorem t1 : (by wait_for_unblock_async; exact True) := by
@@ -78,7 +125,7 @@ theorem t1 : (by wait_for_unblock_async; exact True) := by
--^ waitFor: blocked
--^ insert: "; unblock"
--^ collectDiagnostics
-- (should print "blocked" exactly once)
-- (should print "blocked!" exactly once)
trivial
-- RESET
@@ -98,6 +145,19 @@ theorem t1 : (by wait_for_cancel_once_async; exact True) := by
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in the header *should* invalidate header elaboration. -/
theorem t1 : (by wait_for_main_cancel_once_async; exact True) := by
--^ waitFor: blocked
--^ insert: "'"
--^ collectDiagnostics
-- (should never print "blocked")
trivial
-- RESET
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in the body (without incrementality) *should* invalidate body async tasks. -/
theorem t1 : True := (by
@@ -108,3 +168,18 @@ theorem t1 : True := (by
-- (should never print "blocked")
wait_for_cancel_once_async
trivial)
-- RESET
import Lean.Server.Test.Cancel
open Lean.Server.Test.Cancel
/-! Changes in the body (without incrementality) *should* invalidate body elaboration. -/
theorem t1 : True := (by
skip
--^ waitFor: blocked
--^ insert: "; skip"
--^ collectDiagnostics
-- (should never print "blocked")
wait_for_main_cancel_once_async
trivial)

View File

@@ -2,6 +2,26 @@ blocked!
cancelled!
rerun!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}
blocked!
unblocking!
{"version": 2,
"uri": "file:///cancellation.lean",
"diagnostics":
[{"source": "Lean 4",
"severity": 1,
"range":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}},
"message": "blocked",
"fullRange":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}}},
{"source": "Lean 4",
"severity": 1,
"range":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}},
"message": "blocked",
"fullRange":
{"start": {"line": 7, "character": 2},
"end": {"line": 7, "character": 18}}}]}
unblocking!
{"version": 2,
"uri": "file:///cancellation.lean",
@@ -27,6 +47,29 @@ unblocking!
{"start": {"line": 7, "character": 2},
"end": {"line": 7, "character": 24}}}]}
blocked!
unblocking!
{"version": 2,
"uri": "file:///cancellation.lean",
"diagnostics":
[{"source": "Lean 4",
"severity": 1,
"range":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}},
"message": "blocked",
"fullRange":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}}},
{"source": "Lean 4",
"severity": 1,
"range":
{"start": {"line": 7, "character": 2}, "end": {"line": 7, "character": 18}},
"message": "blocked",
"fullRange":
{"start": {"line": 7, "character": 2},
"end": {"line": 7, "character": 18}}}]}
blocked!
cancelled!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}
blocked!
cancelled!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}
unblocking!
@@ -47,3 +90,9 @@ cancelled!
blocked!
cancelled!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}
blocked!
cancelled!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}
blocked!
cancelled!
{"version": 2, "uri": "file:///cancellation.lean", "diagnostics": []}

View File

@@ -244,6 +244,7 @@ example (a : Nat) : Nat := _
example (a : Nat) : Nat := sorry
example (a : sorry) : Nat := 0
example (a : Nat) : Nat := by
theorem async (a : Nat) : Nat := by skip
theorem Fin.eqq_of_val_eq {n : Nat} : {x y : Fin n}, x.val = y.val x = y
| _, _, _, rfl => rfl

View File

@@ -58,9 +58,14 @@ a : Nat
⊢ Nat
linterUnusedVariables.lean:244:0-244:7: warning: declaration uses 'sorry'
linterUnusedVariables.lean:245:0-245:7: warning: declaration uses 'sorry'
linterUnusedVariables.lean:246:29-248:7: error: unexpected token 'theorem'; expected '{' or tactic
linterUnusedVariables.lean:246:29-247:7: error: unexpected token 'theorem'; expected '{' or tactic
linterUnusedVariables.lean:246:27-246:29: error: unsolved goals
a : Nat
⊢ Nat
linterUnusedVariables.lean:281:41-281:43: warning: unused variable `ha`
linterUnusedVariables.lean:247:33-247:40: error: unsolved goals
a : Nat
⊢ Nat
linterUnusedVariables.lean:247:0-247:40: error: type of theorem 'async' is not a proposition
Nat → Nat
linterUnusedVariables.lean:282:41-282:43: warning: unused variable `ha`
note: this linter can be disabled with `set_option linter.unusedVariables false`

View File

@@ -6,11 +6,12 @@ open Lean.Meta
instance : ToFormat InstanceEntry where
format e := format e.val
unsafe def tst1 : IO Unit :=
withImportModules #[{module := `Lean}] {} 0 fun env => do
let aux : MetaM Unit := do
let insts getGlobalInstancesIndex
IO.println (format insts)
discard <| aux.run |>.toIO { fileName := "", fileMap := default } { env := env }
unsafe def tst1 : IO Unit := do
let env importModules (loadExts := true) #[{module := `Lean}] {}
let aux : MetaM Unit := do
let insts getGlobalInstancesIndex
assert! insts.size > 0
IO.println (format insts)
discard <| aux.run |>.toIO { fileName := "", fileMap := default } { env := env }
#eval tst1

View File

@@ -3,7 +3,7 @@ import Lean
open Lean
unsafe def tst : IO Unit :=
withImportModules #[{module := `Init.Data.Array}] {} 0 fun env =>
withImportModules #[{module := `Init.Data.Array}] {} fun env =>
match env.find? `Array.foldl with
| some info => do
IO.println (info.instantiateTypeLevelParams [levelZero, levelZero])

View File

@@ -3,18 +3,18 @@ import Lean.Meta
open Lean
open Lean.Meta
unsafe def tstInferType (mods : Array Name) (e : Expr) : IO Unit :=
withImportModules (mods.map $ fun m => {module := m}) {} 0 fun env => do
let (type, _, _) (inferType e : MetaM _).toIO { fileName := "", fileMap := default } { env := env } {} {};
IO.println (toString e ++ " : " ++ toString type)
def tstInferType (e : Expr) : CoreM Unit := do
let env getEnv
let (type, _, _) (inferType e : MetaM _).toIO { fileName := "", fileMap := default } { env := env } {} {};
IO.println (toString e ++ " : " ++ toString type)
unsafe def tstWHNF (mods : Array Name) (e : Expr) (t := TransparencyMode.default) : IO Unit :=
withImportModules (mods.map $ fun m => {module := m}) {} 0 fun env => do
let (s, _, _) (whnf e : MetaM _).toIO { fileName := "", fileMap := default } { env := env };
IO.println (toString e ++ " ==> " ++ toString s)
def tstWHNF (e : Expr) : CoreM Unit := do
let env getEnv
let (s, _, _) (whnf e : MetaM _).toIO { fileName := "", fileMap := default } { env := env };
IO.println (toString e ++ " ==> " ++ toString s)
unsafe def tstIsProp (mods : Array Name) (e : Expr) : IO Unit :=
withImportModules (mods.map $ fun m => {module := m}) {} 0 fun env => do
unsafe def tstIsProp (e : Expr) : CoreM Unit := do
let env getEnv
let (b, _, _) (isProp e : MetaM _).toIO { fileName := "", fileMap := default } { env := env };
IO.println (toString e ++ ", isProp: " ++ toString b)
@@ -26,7 +26,7 @@ mkAppN map #[nat, bool]
/-- info: List.map.{1, 1} Nat Bool : (Nat -> Bool) -> (List.{1} Nat) -> (List.{1} Bool) -/
#guard_msgs in
#eval tstInferType #[`Init.Data.List] t1
#eval tstInferType t1
def t2 : Expr :=
let prop := mkSort levelZero;
@@ -34,7 +34,7 @@ mkForall `x BinderInfo.default prop prop
/-- info: Prop -> Prop : Type -/
#guard_msgs in
#eval tstInferType #[`Init.Core] t2
#eval tstInferType t2
def t3 : Expr :=
let nat := mkConst `Nat [];
@@ -45,7 +45,7 @@ mkForall `x BinderInfo.default nat p
/-- info: forall (x : Nat), Nat.le x 0 : Prop -/
#guard_msgs in
#eval tstInferType #[`Init.Data.Nat] t3
#eval tstInferType t3
def t4 : Expr :=
let nat := mkConst `Nat [];
@@ -54,7 +54,7 @@ mkLambda `x BinderInfo.default nat p
/-- info: fun (x : Nat) => Nat.succ x : Nat -> Nat -/
#guard_msgs in
#eval tstInferType #[`Init.Core] t4
#eval tstInferType t4
def t5 : Expr :=
let add := mkConst `Nat.add [];
@@ -62,11 +62,7 @@ mkAppN add #[mkLit (Literal.natVal 3), mkLit (Literal.natVal 5)]
/-- info: Nat.add 3 5 ==> 8 -/
#guard_msgs in
#eval tstWHNF #[`Init.Data.Nat] t5
/-- info: Nat.add 3 5 ==> 8 -/
#guard_msgs in
#eval tstWHNF #[`Init.Data.Nat] t5 TransparencyMode.reducible
#eval tstWHNF t5
set_option pp.all true
/-- info: @List.cons.{0} Nat : Nat → List.{0} Nat → List.{0} Nat -/
@@ -89,23 +85,23 @@ mkAppN map #[nat, nat, f, xs]
info: List.map.{1, 1} Nat Nat (fun (x : Nat) => Nat.add x 1) (List.cons.{0} Nat 1 (List.cons.{0} Nat 4 (List.nil.{0} Nat))) : List.{1} Nat
-/
#guard_msgs in
#eval tstInferType #[`Init.Data.List] t6
#eval tstInferType t6
/--
info: List.map.{1, 1} Nat Nat (fun (x : Nat) => Nat.add x 1) (List.cons.{0} Nat 1 (List.cons.{0} Nat 4 (List.nil.{0} Nat))) ==> List.cons.{1} Nat ((fun (x : Nat) => Nat.add x 1) 1) (List.map.{1, 1} Nat Nat (fun (x : Nat) => Nat.add x 1) (List.cons.{0} Nat 4 (List.nil.{0} Nat)))
-/
#guard_msgs in
#eval tstWHNF #[`Init.Data.List] t6
#eval tstWHNF t6
/-- info: Prop : Type -/
#guard_msgs in
#eval tstInferType #[] $ mkSort levelZero
#eval tstInferType $ mkSort levelZero
/--
info: fun {a : Type} (x : a) (xs : List.{0} a) => xs : forall {a : Type}, a -> (List.{0} a) -> (List.{0} a)
-/
#guard_msgs in
#eval tstInferType #[`Init.Data.List] $ mkLambda `a BinderInfo.implicit (mkSort levelOne) (mkLambda `x BinderInfo.default (mkBVar 0) (mkLambda `xs BinderInfo.default (mkApp (mkConst `List [levelZero]) (mkBVar 1)) (mkBVar 0)))
#eval tstInferType $ mkLambda `a BinderInfo.implicit (mkSort levelOne) (mkLambda `x BinderInfo.default (mkBVar 0) (mkLambda `xs BinderInfo.default (mkApp (mkConst `List [levelZero]) (mkBVar 1)) (mkBVar 0)))
def t7 : Expr :=
let nat := mkConst `Nat [];
@@ -114,11 +110,11 @@ mkLet `x nat one one
/-- info: let x : Nat := 1; 1 : Nat -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ t7
#eval tstInferType $ t7
/-- info: let x : Nat := 1; 1 ==> 1 -/
#guard_msgs in
#eval tstWHNF #[`Init.Core] $ t7
#eval tstWHNF $ t7
def t8 : Expr :=
let nat := mkConst `Nat [];
@@ -128,11 +124,11 @@ mkLet `x nat one (mkAppN add #[one, mkBVar 0])
/-- info: let x : Nat := 1; Nat.add 1 x : Nat -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ t8
#eval tstInferType $ t8
/-- info: let x : Nat := 1; Nat.add 1 x ==> 2 -/
#guard_msgs in
#eval tstWHNF #[`Init.Core] $ t8
#eval tstWHNF $ t8
def t9 : Expr :=
let nat := mkConst `Nat [];
@@ -140,23 +136,23 @@ mkLet `a (mkSort levelOne) nat (mkForall `x BinderInfo.default (mkBVar 0) (mkBVa
/-- info: let a : Type := Nat; a -> a : Type -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ t9
#eval tstInferType $ t9
/-- info: let a : Type := Nat; a -> a ==> Nat -> Nat -/
#guard_msgs in
#eval tstWHNF #[`Init.Core] $ t9
#eval tstWHNF $ t9
/-- info: 10 : Nat -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ mkLit (Literal.natVal 10)
#eval tstInferType $ mkLit (Literal.natVal 10)
/-- info: "hello" : String -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ mkLit (Literal.strVal "hello")
#eval tstInferType $ mkLit (Literal.strVal "hello")
/-- info: [mdata 10] : Nat -/
#guard_msgs in
#eval tstInferType #[`Init.Core] $ mkMData {} $ mkLit (Literal.natVal 10)
#eval tstInferType $ mkMData {} $ mkLit (Literal.natVal 10)
def t10 : Expr :=
let nat := mkConst `Nat [];
@@ -165,39 +161,39 @@ mkLambda `a BinderInfo.default nat (mkApp refl (mkBVar 0))
/-- info: fun (a : Nat) => Eq.refl.{1} Nat a : forall (a : Nat), Eq.{1} Nat a a -/
#guard_msgs in
#eval tstInferType #[`Init.Core] t10
#eval tstInferType t10
/-- info: fun (a : Nat) => Eq.refl.{1} Nat a, isProp: false -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] t10
#eval tstIsProp t10
/-- info: And True True, isProp: true -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] (mkAppN (mkConst `And []) #[mkConst `True [], mkConst `True []])
#eval tstIsProp (mkAppN (mkConst `And []) #[mkConst `True [], mkConst `True []])
/-- info: And, isProp: false -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] (mkConst `And [])
#eval tstIsProp (mkConst `And [])
-- Example where isPropQuick fails
/-- info: id.{0} Prop (And True True), isProp: true -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] (mkAppN (mkConst `id [levelZero]) #[mkSort levelZero, mkAppN (mkConst `And []) #[mkConst `True [], mkConst
#eval tstIsProp (mkAppN (mkConst `id [levelZero]) #[mkSort levelZero, mkAppN (mkConst `And []) #[mkConst `True [], mkConst
`True []]])
/-- info: Eq.{1} Nat 0 1, isProp: true -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] (mkAppN (mkConst `Eq [levelOne]) #[mkConst `Nat [], mkLit (Literal.natVal 0), mkLit (Literal.natVal 1)])
#eval tstIsProp (mkAppN (mkConst `Eq [levelOne]) #[mkConst `Nat [], mkLit (Literal.natVal 0), mkLit (Literal.natVal 1)])
/-- info: forall (x : Nat), Eq.{1} Nat x 1, isProp: true -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] $
#eval tstIsProp $
mkForall `x BinderInfo.default (mkConst `Nat [])
(mkAppN (mkConst `Eq [levelOne]) #[mkConst `Nat [], mkBVar 0, mkLit (Literal.natVal 1)])
/-- info: (fun (x : Nat) => Eq.{1} Nat x 1) 0, isProp: true -/
#guard_msgs in
#eval tstIsProp #[`Init.Core] $
#eval tstIsProp $
mkApp
(mkLambda `x BinderInfo.default (mkConst `Nat [])
(mkAppN (mkConst `Eq [levelOne]) #[mkConst `Nat [], mkBVar 0, mkLit (Literal.natVal 1)]))

View File

@@ -21,12 +21,6 @@ do let v? ← getExprMVarAssignment? m.mvarId!;
| some v => pure v
| none => throwError "metavariable is not assigned")
unsafe def run (mods : Array Name) (x : MetaM Unit) (opts : Options := dbgOpt) : IO Unit :=
withImportModules (mods.map $ fun m => {module := m}) {} 0 fun env => do
let x : MetaM Unit := do { x; printTraces };
discard $ x.toIO { options := opts, fileName := "", fileMap := default } { env := env };
pure ()
def nat := mkConst `Nat
def succ := mkConst `Nat.succ
def add := mkAppN (mkConst `Add.add [levelZero]) #[nat, mkConst `Nat.add]

View File

@@ -13,4 +13,4 @@ def tst : MetaM Unit := do
unsafe def main : IO Unit := do
initSearchPath ( Lean.findSysroot)
withImportModules #[{ module := `UserAttr.Tst : Import }] {} 0 fun env => pure ()
withImportModules #[{ module := `UserAttr.Tst : Import }] {} fun env => pure ()