Merge branch 'sofia/fix-native-decide' into sofia/async-http-headers

This commit is contained in:
Sofia Rodrigues
2026-03-12 14:17:13 -03:00
1964 changed files with 10626 additions and 5109 deletions

6
.gitattributes vendored
View File

@@ -5,9 +5,3 @@ stage0/** binary linguist-generated
# The following file is often manually edited, so do show it in diffs
stage0/src/stdlib_flags.h -binary -linguist-generated
doc/std/grove/GroveStdlib/Generated/** linguist-generated
# These files should not have line endings translated on Windows, because
# it throws off parser tests. Later lines override earlier ones, so the
# runner code is still treated as ordinary text.
tests/lean/docparse/* eol=lf
tests/lean/docparse/*.lean eol=auto
tests/lean/docparse/*.sh eol=auto

View File

@@ -41,7 +41,7 @@ if(NOT (DEFINED STAGE0_CMAKE_EXECUTABLE_SUFFIX))
set(STAGE0_CMAKE_EXECUTABLE_SUFFIX "${CMAKE_EXECUTABLE_SUFFIX}")
endif()
# Don't do anything with cadical on wasm
# Don't do anything with cadical/leantar on wasm
if(NOT CMAKE_SYSTEM_NAME MATCHES "Emscripten")
find_program(CADICAL cadical)
if(NOT CADICAL)
@@ -77,7 +77,44 @@ if(NOT CMAKE_SYSTEM_NAME MATCHES "Emscripten")
set(CADICAL ${CMAKE_BINARY_DIR}/cadical/cadical${CMAKE_EXECUTABLE_SUFFIX})
list(APPEND EXTRA_DEPENDS cadical)
endif()
list(APPEND CL_ARGS -DCADICAL=${CADICAL})
find_program(LEANTAR leantar)
if(NOT LEANTAR)
set(LEANTAR_VERSION v0.1.18)
if(CMAKE_SYSTEM_NAME MATCHES "Windows")
set(LEANTAR_ARCHIVE_SUFFIX .zip)
set(LEANTAR_TARGET x86_64-pc-windows-msvc)
else()
set(LEANTAR_ARCHIVE_SUFFIX .tar.gz)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64")
set(LEANTAR_TARGET_ARCH aarch64)
else()
set(LEANTAR_TARGET_ARCH x86_64)
endif()
if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
set(LEANTAR_TARGET_OS apple-darwin)
else()
set(LEANTAR_TARGET_OS unknown-linux-musl)
endif()
set(LEANTAR_TARGET ${LEANTAR_TARGET_ARCH}-${LEANTAR_TARGET_OS})
endif()
set(
LEANTAR
${CMAKE_BINARY_DIR}/leantar/leantar-${LEANTAR_VERSION}-${LEANTAR_TARGET}/leantar${CMAKE_EXECUTABLE_SUFFIX}
)
if(NOT EXISTS "${LEANTAR}")
file(
DOWNLOAD
https://github.com/digama0/leangz/releases/download/${LEANTAR_VERSION}/leantar-${LEANTAR_VERSION}-${LEANTAR_TARGET}${LEANTAR_ARCHIVE_SUFFIX}
${CMAKE_BINARY_DIR}/leantar${LEANTAR_ARCHIVE_SUFFIX}
)
file(
ARCHIVE_EXTRACT
INPUT ${CMAKE_BINARY_DIR}/leantar${LEANTAR_ARCHIVE_SUFFIX}
DESTINATION ${CMAKE_BINARY_DIR}/leantar
)
endif()
endif()
list(APPEND CL_ARGS -DCADICAL=${CADICAL} -DLEANTAR=${LEANTAR})
endif()
if(USE_MIMALLOC)

206
LICENSES
View File

@@ -1370,4 +1370,208 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SOFTWARE.
==============================================================================
leantar is by Mario Carneiro and distributed under the Apache 2.0 License:
==============================================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

1
doc/.gitignore vendored
View File

@@ -1 +0,0 @@
out

2
doc/examples/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
*.out.produced
*.exit.produced

View File

@@ -0,0 +1,2 @@
Tree.node (Tree.node (Tree.leaf) 1 "one" (Tree.leaf)) 2 "two" (Tree.node (Tree.leaf) 3 "three" (Tree.leaf))
[(1, "one"), (2, "two"), (3, "three")]

11
doc/examples/compiler/run_test Executable file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
source ../../../tests/env_test.sh
source "$TEST_DIR/util.sh"
leanmake --always-make bin
exec_capture test.lean \
./build/bin/test hello world
check_exit test.lean
check_out test.lean

View File

@@ -0,0 +1 @@
[hello, world]

View File

@@ -0,0 +1,3 @@
30
interp.lean:146:4: warning: declaration uses `sorry`
3628800

View File

@@ -0,0 +1,2 @@
true
false

View File

@@ -0,0 +1,2 @@
"(((fun x_1 => (fun x_2 => (x_1 + x_2))) 1) 2)"
"((((fun x_1 => (fun x_2 => (x_1 + x_2))) 1) 2) + 5)"

9
doc/examples/run_test Executable file
View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
source ../../tests/env_test.sh
source "$TEST_DIR/util.sh"
exec_capture "$1" \
lean -Dlinter.all=false "$1"
check_exit "$1"
check_out "$1"

View File

@@ -1,4 +0,0 @@
#!/usr/bin/env bash
source ../../tests/common.sh
exec_check_raw lean -Dlinter.all=false "$f"

View File

@@ -1,9 +1,11 @@
#!/usr/bin/env python3
"""
Lean name demangler.
Lean name demangler — thin wrapper around the Lean CLI tool.
Demangles C symbol names produced by the Lean 4 compiler back into
readable Lean hierarchical names.
Spawns ``lean --run lean_demangle_cli.lean`` as a persistent subprocess
and communicates via stdin/stdout pipes. This ensures a single source
of truth for demangling logic (the Lean implementation in
``Lean.Compiler.NameDemangling``).
Usage as a filter (like c++filt):
echo "l_Lean_Meta_Sym_main" | python lean_demangle.py
@@ -13,767 +15,68 @@ Usage as a module:
print(demangle_lean_name("l_Lean_Meta_Sym_main"))
"""
import atexit
import os
import subprocess
import sys
# ---------------------------------------------------------------------------
# String.mangle / unmangle
# ---------------------------------------------------------------------------
def _is_ascii_alnum(ch):
"""Check if ch is an ASCII letter or digit (matching Lean's isAlpha/isDigit)."""
return ('a' <= ch <= 'z') or ('A' <= ch <= 'Z') or ('0' <= ch <= '9')
def mangle_string(s):
"""Port of Lean's String.mangle: escape a single string for C identifiers."""
result = []
for ch in s:
if _is_ascii_alnum(ch):
result.append(ch)
elif ch == '_':
result.append('__')
else:
code = ord(ch)
if code < 0x100:
result.append('_x' + format(code, '02x'))
elif code < 0x10000:
result.append('_u' + format(code, '04x'))
else:
result.append('_U' + format(code, '08x'))
return ''.join(result)
def _parse_hex(s, pos, n):
"""Parse n lowercase hex digits at pos. Returns (new_pos, value) or None."""
if pos + n > len(s):
return None
val = 0
for i in range(n):
c = s[pos + i]
if '0' <= c <= '9':
val = (val << 4) | (ord(c) - ord('0'))
elif 'a' <= c <= 'f':
val = (val << 4) | (ord(c) - ord('a') + 10)
else:
return None
return (pos + n, val)
# ---------------------------------------------------------------------------
# Name mangling (for round-trip verification)
# ---------------------------------------------------------------------------
def _check_disambiguation(m):
"""Port of Lean's checkDisambiguation: does mangled string m need a '00' prefix?"""
pos = 0
while pos < len(m):
ch = m[pos]
if ch == '_':
pos += 1
continue
if ch == 'x':
return _parse_hex(m, pos + 1, 2) is not None
if ch == 'u':
return _parse_hex(m, pos + 1, 4) is not None
if ch == 'U':
return _parse_hex(m, pos + 1, 8) is not None
if '0' <= ch <= '9':
return True
return False
# all underscores or empty
return True
def _need_disambiguation(prev_component, mangled_next):
"""Port of Lean's needDisambiguation."""
# Check if previous component (as a string) ends with '_'
prev_ends_underscore = (isinstance(prev_component, str) and
len(prev_component) > 0 and
prev_component[-1] == '_')
return prev_ends_underscore or _check_disambiguation(mangled_next)
def mangle_name(components, prefix="l_"):
"""
Mangle a list of name components (str or int) into a C symbol.
Port of Lean's Name.mangle.
"""
if not components:
return prefix
parts = []
prev = None
for i, comp in enumerate(components):
if isinstance(comp, int):
if i == 0:
parts.append(str(comp) + '_')
else:
parts.append('_' + str(comp) + '_')
else:
m = mangle_string(comp)
if i == 0:
if _check_disambiguation(m):
parts.append('00' + m)
else:
parts.append(m)
else:
if _need_disambiguation(prev, m):
parts.append('_00' + m)
else:
parts.append('_' + m)
prev = comp
return prefix + ''.join(parts)
# ---------------------------------------------------------------------------
# Name demangling
# ---------------------------------------------------------------------------
def demangle_body(s):
"""
Demangle a string produced by Name.mangleAux (without prefix).
Returns a list of components (str or int).
This is a faithful port of Lean's Name.demangleAux from NameMangling.lean.
"""
components = []
length = len(s)
def emit(comp):
components.append(comp)
def decode_num(pos, n):
"""Parse remaining digits, emit numeric component, continue."""
while pos < length:
ch = s[pos]
if '0' <= ch <= '9':
n = n * 10 + (ord(ch) - ord('0'))
pos += 1
else:
# Expect '_' (trailing underscore of numeric encoding)
pos += 1 # skip '_'
emit(n)
if pos >= length:
return pos
# Skip separator '_' and go to name_start
pos += 1
return name_start(pos)
# End of string
emit(n)
return pos
def name_start(pos):
"""Start parsing a new name component."""
if pos >= length:
return pos
ch = s[pos]
pos += 1
if '0' <= ch <= '9':
# Check for '00' disambiguation
if ch == '0' and pos < length and s[pos] == '0':
pos += 1
return demangle_main(pos, "", 0)
else:
return decode_num(pos, ord(ch) - ord('0'))
elif ch == '_':
return demangle_main(pos, "", 1)
else:
return demangle_main(pos, ch, 0)
def demangle_main(pos, acc, ucount):
"""Main demangling loop."""
while pos < length:
ch = s[pos]
pos += 1
if ch == '_':
ucount += 1
continue
if ucount % 2 == 0:
# Even underscores: literal underscores in component name
acc += '_' * (ucount // 2) + ch
ucount = 0
continue
# Odd ucount: separator or escape
if '0' <= ch <= '9':
# End current str component, start number
emit(acc + '_' * (ucount // 2))
if ch == '0' and pos < length and s[pos] == '0':
pos += 1
return demangle_main(pos, "", 0)
else:
return decode_num(pos, ord(ch) - ord('0'))
# Try hex escapes
if ch == 'x':
result = _parse_hex(s, pos, 2)
if result is not None:
new_pos, val = result
acc += '_' * (ucount // 2) + chr(val)
pos = new_pos
ucount = 0
continue
if ch == 'u':
result = _parse_hex(s, pos, 4)
if result is not None:
new_pos, val = result
acc += '_' * (ucount // 2) + chr(val)
pos = new_pos
ucount = 0
continue
if ch == 'U':
result = _parse_hex(s, pos, 8)
if result is not None:
new_pos, val = result
acc += '_' * (ucount // 2) + chr(val)
pos = new_pos
ucount = 0
continue
# Name separator
emit(acc)
acc = '_' * (ucount // 2) + ch
ucount = 0
# End of string
acc += '_' * (ucount // 2)
if acc:
emit(acc)
return pos
name_start(0)
return components
# ---------------------------------------------------------------------------
# Prefix handling for lp_ (package prefix)
# ---------------------------------------------------------------------------
def _is_valid_string_mangle(s):
"""Check if s is a valid output of String.mangle (no trailing bare _)."""
pos = 0
length = len(s)
while pos < length:
ch = s[pos]
if _is_ascii_alnum(ch):
pos += 1
elif ch == '_':
if pos + 1 >= length:
return False # trailing bare _
nch = s[pos + 1]
if nch == '_':
pos += 2
elif nch == 'x' and _parse_hex(s, pos + 2, 2) is not None:
pos = _parse_hex(s, pos + 2, 2)[0]
elif nch == 'u' and _parse_hex(s, pos + 2, 4) is not None:
pos = _parse_hex(s, pos + 2, 4)[0]
elif nch == 'U' and _parse_hex(s, pos + 2, 8) is not None:
pos = _parse_hex(s, pos + 2, 8)[0]
else:
return False
else:
return False
return True
def _skip_string_mangle(s, pos):
"""
Skip past a String.mangle output in s starting at pos.
Returns the position after the mangled string (where we expect the separator '_').
This is a greedy scan.
"""
length = len(s)
while pos < length:
ch = s[pos]
if _is_ascii_alnum(ch):
pos += 1
elif ch == '_':
if pos + 1 < length:
nch = s[pos + 1]
if nch == '_':
pos += 2
elif nch == 'x' and _parse_hex(s, pos + 2, 2) is not None:
pos = _parse_hex(s, pos + 2, 2)[0]
elif nch == 'u' and _parse_hex(s, pos + 2, 4) is not None:
pos = _parse_hex(s, pos + 2, 4)[0]
elif nch == 'U' and _parse_hex(s, pos + 2, 8) is not None:
pos = _parse_hex(s, pos + 2, 8)[0]
else:
return pos # bare '_': separator
else:
return pos
else:
return pos
return pos
def _find_lp_body(s):
"""
Given s = everything after 'lp_' in a symbol, find where the declaration
body (Name.mangleAux output) starts.
Returns the start index of the body within s, or None.
Strategy: try all candidate split points where the package part is a valid
String.mangle output and the body round-trips. Prefer the longest valid
package name (most specific match).
"""
length = len(s)
# Collect candidate split positions: every '_' that could be the separator
candidates = []
pos = 0
while pos < length:
if s[pos] == '_':
candidates.append(pos)
pos += 1
# Try each candidate; collect all valid splits
valid_splits = []
for split_pos in candidates:
pkg_part = s[:split_pos]
if not pkg_part:
continue
if not _is_valid_string_mangle(pkg_part):
continue
body = s[split_pos + 1:]
if not body:
continue
components = demangle_body(body)
if not components:
continue
remangled = mangle_name(components, prefix="")
if remangled == body:
first = components[0]
# Score: prefer first component starting with uppercase
has_upper = isinstance(first, str) and first and first[0].isupper()
valid_splits.append((split_pos, has_upper))
if valid_splits:
# Among splits where first decl component starts uppercase, pick longest pkg.
# Otherwise pick shortest pkg.
upper_splits = [s for s in valid_splits if s[1]]
if upper_splits:
best = max(upper_splits, key=lambda x: x[0])
else:
best = min(valid_splits, key=lambda x: x[0])
return best[0] + 1
# Fallback: greedy String.mangle scan
greedy_pos = _skip_string_mangle(s, 0)
if greedy_pos < length and s[greedy_pos] == '_':
return greedy_pos + 1
return None
# ---------------------------------------------------------------------------
# Format name components for display
# ---------------------------------------------------------------------------
def format_name(components):
"""Format a list of name components as a dot-separated string."""
return '.'.join(str(c) for c in components)
# ---------------------------------------------------------------------------
# Human-friendly postprocessing
# ---------------------------------------------------------------------------
# Compiler-generated suffix components — exact match
_SUFFIX_FLAGS_EXACT = {
'_redArg': 'arity\u2193',
'_boxed': 'boxed',
'_impl': 'impl',
}
# Compiler-generated suffix prefixes — match with optional _N index
# e.g., _lam, _lam_0, _lam_3, _lambda_0, _closed_2
_SUFFIX_FLAGS_PREFIX = {
'_lam': '\u03bb',
'_lambda': '\u03bb',
'_elam': '\u03bb',
'_jp': 'jp',
'_closed': 'closed',
}
def _match_suffix(component):
"""
Check if a string component is a compiler-generated suffix.
Returns the flag label or None.
Handles both exact matches (_redArg, _boxed) and indexed suffixes
(_lam_0, _lambda_2, _closed_0) produced by appendIndexAfter.
"""
if not isinstance(component, str):
return None
if component in _SUFFIX_FLAGS_EXACT:
return _SUFFIX_FLAGS_EXACT[component]
if component in _SUFFIX_FLAGS_PREFIX:
return _SUFFIX_FLAGS_PREFIX[component]
# Check for indexed suffix: prefix + _N
for prefix, label in _SUFFIX_FLAGS_PREFIX.items():
if component.startswith(prefix + '_'):
rest = component[len(prefix) + 1:]
if rest.isdigit():
return label
return None
def _strip_private(components):
"""Strip _private.Module.0. prefix. Returns (stripped_parts, is_private)."""
if (len(components) >= 3 and isinstance(components[0], str) and
components[0] == '_private'):
for i in range(1, len(components)):
if components[i] == 0:
if i + 1 < len(components):
return components[i + 1:], True
break
return components, False
def _strip_spec_suffixes(components):
"""Strip trailing spec_N components (from appendIndexAfter)."""
parts = list(components)
while parts and isinstance(parts[-1], str) and parts[-1].startswith('spec_'):
rest = parts[-1][5:]
if rest.isdigit():
parts.pop()
else:
break
return parts
def _is_spec_index(component):
"""Check if a component is a spec_N index (from appendIndexAfter)."""
return (isinstance(component, str) and
component.startswith('spec_') and component[5:].isdigit())
def _parse_spec_entries(rest):
"""Parse _at_..._spec pairs into separate spec context entries.
Given components starting from the first _at_, returns:
- entries: list of component lists, one per _at_..._spec block
- remaining: components after the last _spec N (trailing suffixes)
"""
entries = []
current_ctx = None
remaining = []
skip_next = False
for p in rest:
if skip_next:
skip_next = False
continue
if isinstance(p, str) and p == '_at_':
if current_ctx is not None:
entries.append(current_ctx)
current_ctx = []
continue
if isinstance(p, str) and p == '_spec':
if current_ctx is not None:
entries.append(current_ctx)
current_ctx = None
skip_next = True
continue
if isinstance(p, str) and p.startswith('_spec'):
if current_ctx is not None:
entries.append(current_ctx)
current_ctx = None
continue
if current_ctx is not None:
current_ctx.append(p)
else:
remaining.append(p)
if current_ctx is not None:
entries.append(current_ctx)
return entries, remaining
def _process_spec_context(components):
"""Process a spec context into a clean name and its flags.
Returns (name_parts, flags) where name_parts are the cleaned components
and flags is a deduplicated list of flag labels from compiler suffixes.
"""
parts = list(components)
parts, _ = _strip_private(parts)
name_parts = []
ctx_flags = []
seen = set()
for p in parts:
flag = _match_suffix(p)
if flag is not None:
if flag not in seen:
ctx_flags.append(flag)
seen.add(flag)
elif _is_spec_index(p):
pass
else:
name_parts.append(p)
return name_parts, ctx_flags
def postprocess_name(components):
"""
Transform raw demangled components into a human-friendly display string.
Applies:
- Private name cleanup: _private.Module.0.Name.foo -> Name.foo [private]
- Hygienic name cleanup: strips _@.module._hygCtx._hyg.N
- Suffix folding: _redArg, _boxed, _lam_0, etc. -> [flags]
- Specialization: f._at_.g._spec.N -> f spec at g
Shown after base [flags], with context flags: spec at g[ctx_flags]
"""
if not components:
return ""
parts = list(components)
flags = []
spec_entries = []
# --- Strip _private prefix ---
parts, is_private = _strip_private(parts)
# --- Strip hygienic suffixes: everything from _@ onward ---
at_idx = None
for i, p in enumerate(parts):
if isinstance(p, str) and p.startswith('_@'):
at_idx = i
break
if at_idx is not None:
parts = parts[:at_idx]
# --- Handle specialization: _at_ ... _spec N ---
at_positions = [i for i, p in enumerate(parts)
if isinstance(p, str) and p == '_at_']
if at_positions:
first_at = at_positions[0]
base = parts[:first_at]
rest = parts[first_at:]
entries, remaining = _parse_spec_entries(rest)
for ctx_components in entries:
ctx_name, ctx_flags = _process_spec_context(ctx_components)
if ctx_name or ctx_flags:
spec_entries.append((ctx_name, ctx_flags))
parts = base + remaining
# --- Collect suffix flags from the end ---
while parts:
last = parts[-1]
flag = _match_suffix(last)
if flag is not None:
flags.append(flag)
parts.pop()
elif isinstance(last, int) and len(parts) >= 2:
prev_flag = _match_suffix(parts[-2])
if prev_flag is not None:
flags.append(prev_flag)
parts.pop() # remove the number
parts.pop() # remove the suffix
else:
break
else:
break
if is_private:
flags.append('private')
# --- Format result ---
name = '.'.join(str(c) for c in parts) if parts else '?'
result = name
if flags:
flag_str = ', '.join(flags)
result += f' [{flag_str}]'
for ctx_name, ctx_flags in spec_entries:
ctx_str = '.'.join(str(c) for c in ctx_name) if ctx_name else '?'
if ctx_flags:
ctx_flag_str = ', '.join(ctx_flags)
result += f' spec at {ctx_str}[{ctx_flag_str}]'
else:
result += f' spec at {ctx_str}'
return result
# ---------------------------------------------------------------------------
# Main demangling entry point
# ---------------------------------------------------------------------------
def demangle_lean_name_raw(mangled):
"""
Demangle a Lean C symbol, preserving all internal name components.
Returns the exact demangled name with all compiler-generated suffixes
intact. Use demangle_lean_name() for human-friendly output.
"""
try:
return _demangle_lean_name_inner(mangled, human_friendly=False)
except Exception:
return mangled
_process = None
_script_dir = os.path.dirname(os.path.abspath(__file__))
_cli_script = os.path.join(_script_dir, "lean_demangle_cli.lean")
def _get_process():
"""Get or create the persistent Lean demangler subprocess."""
global _process
if _process is not None and _process.poll() is None:
return _process
lean = os.environ.get("LEAN", "lean")
_process = subprocess.Popen(
[lean, "--run", _cli_script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
text=True,
bufsize=1, # line buffered
)
atexit.register(_cleanup)
return _process
def _cleanup():
global _process
if _process is not None:
try:
_process.stdin.close()
_process.wait(timeout=5)
except Exception:
_process.kill()
_process = None
def demangle_lean_name(mangled):
"""
Demangle a C symbol name produced by the Lean 4 compiler.
Returns a human-friendly demangled name with compiler suffixes folded
into readable flags. Use demangle_lean_name_raw() to preserve all
internal components.
Returns a human-friendly demangled name, or the original string
if it is not a Lean symbol.
"""
try:
return _demangle_lean_name_inner(mangled, human_friendly=True)
proc = _get_process()
proc.stdin.write(mangled + "\n")
proc.stdin.flush()
result = proc.stdout.readline().rstrip("\n")
return result if result else mangled
except Exception:
return mangled
def _demangle_lean_name_inner(mangled, human_friendly=True):
"""Inner demangle that may raise on malformed input."""
if mangled == "_lean_main":
return "[lean] main"
# Handle lean_ runtime functions
if human_friendly and mangled.startswith("lean_apply_"):
rest = mangled[11:]
if rest.isdigit():
return f"<apply/{rest}>"
# Strip .cold.N suffix (LLVM linker cold function clones)
cold_suffix = ""
core = mangled
dot_pos = core.find('.cold.')
if dot_pos >= 0:
cold_suffix = " " + core[dot_pos:]
core = core[:dot_pos]
elif core.endswith('.cold'):
cold_suffix = " .cold"
core = core[:-5]
result = _demangle_core(core, human_friendly)
if result is None:
return mangled
return result + cold_suffix
def _demangle_core(mangled, human_friendly=True):
"""Demangle a symbol without .cold suffix. Returns None if not a Lean name."""
fmt = postprocess_name if human_friendly else format_name
# _init_ prefix
if mangled.startswith("_init_"):
rest = mangled[6:]
body, pkg_display = _strip_lean_prefix(rest)
if body is None:
return None
components = demangle_body(body)
if not components:
return None
name = fmt(components)
if pkg_display:
return f"[init] {name} ({pkg_display})"
return f"[init] {name}"
# initialize_ prefix (module init functions)
if mangled.startswith("initialize_"):
rest = mangled[11:]
# With package: initialize_lp_{pkg}_{body} or initialize_l_{body}
body, pkg_display = _strip_lean_prefix(rest)
if body is not None:
components = demangle_body(body)
if components:
name = fmt(components)
if pkg_display:
return f"[module_init] {name} ({pkg_display})"
return f"[module_init] {name}"
# Without package: initialize_{Name.mangleAux(moduleName)}
if rest:
components = demangle_body(rest)
if components:
return f"[module_init] {fmt(components)}"
return None
# l_ or lp_ prefix
body, pkg_display = _strip_lean_prefix(mangled)
if body is None:
return None
components = demangle_body(body)
if not components:
return None
name = fmt(components)
if pkg_display:
return f"{name} ({pkg_display})"
return name
def _strip_lean_prefix(s):
"""
Strip the l_ or lp_ prefix from a mangled symbol.
Returns (body, pkg_display) where body is the Name.mangleAux output
and pkg_display is None or a string describing the package.
Returns (None, None) if the string doesn't have a recognized prefix.
"""
if s.startswith("l_"):
return (s[2:], None)
if s.startswith("lp_"):
after_lp = s[3:]
body_start = _find_lp_body(after_lp)
if body_start is not None:
pkg_mangled = after_lp[:body_start - 1]
# Unmangle the package name
pkg_components = demangle_body(pkg_mangled)
if pkg_components and len(pkg_components) == 1 and isinstance(pkg_components[0], str):
pkg_display = pkg_components[0]
else:
pkg_display = pkg_mangled
return (after_lp[body_start:], pkg_display)
# Fallback: treat everything after lp_ as body
return (after_lp, "?")
return (None, None)
# ---------------------------------------------------------------------------
# CLI
# ---------------------------------------------------------------------------
def main():
"""Filter stdin or arguments, demangling Lean names."""
import argparse
parser = argparse.ArgumentParser(
description="Demangle Lean 4 C symbol names (like c++filt for Lean)")
parser.add_argument('names', nargs='*',
help='Names to demangle (reads stdin if none given)')
parser.add_argument('--raw', action='store_true',
help='Output exact demangled names without postprocessing')
args = parser.parse_args()
demangle = demangle_lean_name_raw if args.raw else demangle_lean_name
if args.names:
for name in args.names:
print(demangle(name))
else:
for line in sys.stdin:
print(demangle(line.rstrip('\n')))
"""Filter stdin, demangling Lean names."""
for line in sys.stdin:
print(demangle_lean_name(line.rstrip("\n")))
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,32 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Kim Morrison
-/
module
import Lean.Compiler.NameDemangling
/-!
Lean name demangler CLI tool. Reads mangled symbol names from stdin (one per
line) and writes demangled names to stdout. Non-Lean symbols pass through
unchanged. Like `c++filt` but for Lean names.
Usage:
echo "l_Lean_Meta_foo" | lean --run lean_demangle_cli.lean
cat symbols.txt | lean --run lean_demangle_cli.lean
-/
open Lean.Name.Demangle
def main : IO Unit := do
let stdin IO.getStdin
let stdout IO.getStdout
repeat do
let line stdin.getLine
if line.isEmpty then break
let sym := line.trimRight
match demangleSymbol sym with
| some s => stdout.putStrLn s
| none => stdout.putStrLn sym
stdout.flush

View File

@@ -1,670 +0,0 @@
#!/usr/bin/env python3
"""Tests for the Lean name demangler."""
import unittest
import json
import gzip
import tempfile
import os
from lean_demangle import (
mangle_string, mangle_name, demangle_body, format_name,
demangle_lean_name, demangle_lean_name_raw, postprocess_name,
_parse_hex, _check_disambiguation,
)
class TestStringMangle(unittest.TestCase):
"""Test String.mangle (character-level escaping)."""
def test_alphanumeric(self):
self.assertEqual(mangle_string("hello"), "hello")
self.assertEqual(mangle_string("abc123"), "abc123")
def test_underscore(self):
self.assertEqual(mangle_string("a_b"), "a__b")
self.assertEqual(mangle_string("_"), "__")
self.assertEqual(mangle_string("__"), "____")
def test_special_chars(self):
self.assertEqual(mangle_string("."), "_x2e")
self.assertEqual(mangle_string("a.b"), "a_x2eb")
def test_unicode(self):
self.assertEqual(mangle_string("\u03bb"), "_u03bb")
self.assertEqual(mangle_string("\U0001d55c"), "_U0001d55c")
def test_empty(self):
self.assertEqual(mangle_string(""), "")
class TestNameMangle(unittest.TestCase):
"""Test Name.mangle (hierarchical name mangling)."""
def test_simple(self):
self.assertEqual(mangle_name(["Lean", "Meta", "Sym", "main"]),
"l_Lean_Meta_Sym_main")
def test_single_component(self):
self.assertEqual(mangle_name(["main"]), "l_main")
def test_numeric_component(self):
self.assertEqual(
mangle_name(["_private", "Lean", "Meta", "Basic", 0,
"Lean", "Meta", "withMVarContextImp"]),
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp")
def test_component_with_underscore(self):
self.assertEqual(mangle_name(["a_b"]), "l_a__b")
self.assertEqual(mangle_name(["a_b", "c"]), "l_a__b_c")
def test_disambiguation_digit_start(self):
self.assertEqual(mangle_name(["0foo"]), "l_000foo")
def test_disambiguation_escape_start(self):
self.assertEqual(mangle_name(["a", "x27"]), "l_a_00x27")
def test_numeric_root(self):
self.assertEqual(mangle_name([42]), "l_42_")
self.assertEqual(mangle_name([42, "foo"]), "l_42__foo")
def test_component_ending_with_underscore(self):
self.assertEqual(mangle_name(["a_", "b"]), "l_a___00b")
def test_custom_prefix(self):
self.assertEqual(mangle_name(["foo"], prefix="lp_pkg_"),
"lp_pkg_foo")
class TestDemangleBody(unittest.TestCase):
"""Test demangle_body (the core Name.demangleAux algorithm)."""
def test_simple(self):
self.assertEqual(demangle_body("Lean_Meta_Sym_main"),
["Lean", "Meta", "Sym", "main"])
def test_single(self):
self.assertEqual(demangle_body("main"), ["main"])
def test_empty(self):
self.assertEqual(demangle_body(""), [])
def test_underscore_in_component(self):
self.assertEqual(demangle_body("a__b"), ["a_b"])
self.assertEqual(demangle_body("a__b_c"), ["a_b", "c"])
def test_numeric_component(self):
self.assertEqual(demangle_body("foo_42__bar"), ["foo", 42, "bar"])
def test_numeric_root(self):
self.assertEqual(demangle_body("42_"), [42])
def test_numeric_at_end(self):
self.assertEqual(demangle_body("foo_42_"), ["foo", 42])
def test_disambiguation_00(self):
self.assertEqual(demangle_body("a_00x27"), ["a", "x27"])
def test_disambiguation_00_at_root(self):
self.assertEqual(demangle_body("000foo"), ["0foo"])
def test_hex_escape_x(self):
self.assertEqual(demangle_body("a_x2eb"), ["a.b"])
def test_hex_escape_u(self):
self.assertEqual(demangle_body("_u03bb"), ["\u03bb"])
def test_hex_escape_U(self):
self.assertEqual(demangle_body("_U0001d55c"), ["\U0001d55c"])
def test_private_name(self):
body = "__private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp"
self.assertEqual(demangle_body(body),
["_private", "Lean", "Meta", "Basic", 0,
"Lean", "Meta", "withMVarContextImp"])
def test_boxed_suffix(self):
body = "foo___boxed"
self.assertEqual(demangle_body(body), ["foo", "_boxed"])
def test_redArg_suffix(self):
body = "foo_bar___redArg"
self.assertEqual(demangle_body(body), ["foo", "bar", "_redArg"])
def test_component_ending_underscore_disambiguation(self):
self.assertEqual(demangle_body("a___00b"), ["a_", "b"])
class TestRoundTrip(unittest.TestCase):
"""Test that mangle(demangle(x)) == x for various names."""
def _check_roundtrip(self, components):
mangled = mangle_name(components, prefix="")
demangled = demangle_body(mangled)
self.assertEqual(demangled, components,
f"Round-trip failed: {components} -> '{mangled}' -> {demangled}")
mangled_with_prefix = mangle_name(components, prefix="l_")
self.assertTrue(mangled_with_prefix.startswith("l_"))
body = mangled_with_prefix[2:]
demangled2 = demangle_body(body)
self.assertEqual(demangled2, components)
def test_simple_names(self):
self._check_roundtrip(["Lean", "Meta", "main"])
self._check_roundtrip(["a"])
self._check_roundtrip(["Foo", "Bar", "baz"])
def test_numeric(self):
self._check_roundtrip(["foo", 0, "bar"])
self._check_roundtrip([42])
self._check_roundtrip(["a", 1, "b", 2, "c"])
def test_underscores(self):
self._check_roundtrip(["_private"])
self._check_roundtrip(["a_b", "c_d"])
self._check_roundtrip(["_at_", "_spec"])
def test_private_name(self):
self._check_roundtrip(["_private", "Lean", "Meta", "Basic", 0,
"Lean", "Meta", "withMVarContextImp"])
def test_boxed(self):
self._check_roundtrip(["Lean", "Meta", "foo", "_boxed"])
def test_redArg(self):
self._check_roundtrip(["Lean", "Meta", "foo", "_redArg"])
def test_specialization(self):
self._check_roundtrip(["List", "map", "_at_", "Foo", "bar", "_spec", 3])
def test_lambda(self):
self._check_roundtrip(["Foo", "bar", "_lambda", 0])
self._check_roundtrip(["Foo", "bar", "_lambda", 2])
def test_closed(self):
self._check_roundtrip(["myConst", "_closed", 0])
def test_special_chars(self):
self._check_roundtrip(["a.b"])
self._check_roundtrip(["\u03bb"])
self._check_roundtrip(["a", "b\u2192c"])
def test_disambiguation_cases(self):
self._check_roundtrip(["a", "x27"])
self._check_roundtrip(["0foo"])
self._check_roundtrip(["a_", "b"])
def test_complex_real_names(self):
"""Names modeled after real Lean compiler output."""
self._check_roundtrip(
["Lean", "MVarId", "withContext", "_at_",
"_private", "Lean", "Meta", "Sym", 0,
"Lean", "Meta", "Sym", "BackwardRule", "apply",
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"])
class TestDemangleRaw(unittest.TestCase):
"""Test demangle_lean_name_raw (exact demangling, no postprocessing)."""
def test_l_prefix(self):
self.assertEqual(
demangle_lean_name_raw("l_Lean_Meta_Sym_main"),
"Lean.Meta.Sym.main")
def test_l_prefix_private(self):
result = demangle_lean_name_raw(
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp")
self.assertEqual(result,
"_private.Lean.Meta.Basic.0.Lean.Meta.withMVarContextImp")
def test_l_prefix_boxed(self):
result = demangle_lean_name_raw("l_foo___boxed")
self.assertEqual(result, "foo._boxed")
def test_l_prefix_redArg(self):
result = demangle_lean_name_raw(
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp___redArg")
self.assertEqual(
result,
"_private.Lean.Meta.Basic.0.Lean.Meta.withMVarContextImp._redArg")
def test_lean_main(self):
self.assertEqual(demangle_lean_name_raw("_lean_main"), "[lean] main")
def test_non_lean_names(self):
self.assertEqual(demangle_lean_name_raw("printf"), "printf")
self.assertEqual(demangle_lean_name_raw("malloc"), "malloc")
self.assertEqual(demangle_lean_name_raw("lean_apply_5"), "lean_apply_5")
self.assertEqual(demangle_lean_name_raw(""), "")
def test_init_prefix(self):
result = demangle_lean_name_raw("_init_l_Lean_Meta_foo")
self.assertEqual(result, "[init] Lean.Meta.foo")
def test_lp_prefix_simple(self):
mangled = mangle_name(["Lean", "Meta", "foo"], prefix="lp_std_")
self.assertEqual(mangled, "lp_std_Lean_Meta_foo")
result = demangle_lean_name_raw(mangled)
self.assertEqual(result, "Lean.Meta.foo (std)")
def test_lp_prefix_underscore_pkg(self):
pkg_mangled = mangle_string("my_pkg")
self.assertEqual(pkg_mangled, "my__pkg")
mangled = mangle_name(["Lean", "Meta", "foo"],
prefix=f"lp_{pkg_mangled}_")
self.assertEqual(mangled, "lp_my__pkg_Lean_Meta_foo")
result = demangle_lean_name_raw(mangled)
self.assertEqual(result, "Lean.Meta.foo (my_pkg)")
def test_lp_prefix_private_decl(self):
mangled = mangle_name(
["_private", "X", 0, "Y", "foo"], prefix="lp_pkg_")
self.assertEqual(mangled, "lp_pkg___private_X_0__Y_foo")
result = demangle_lean_name_raw(mangled)
self.assertEqual(result, "_private.X.0.Y.foo (pkg)")
def test_complex_specialization(self):
components = [
"Lean", "MVarId", "withContext", "_at_",
"_private", "Lean", "Meta", "Sym", 0,
"Lean", "Meta", "Sym", "BackwardRule", "apply",
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"
]
mangled = mangle_name(components)
result = demangle_lean_name_raw(mangled)
expected = format_name(components)
self.assertEqual(result, expected)
def test_cold_suffix(self):
result = demangle_lean_name_raw("l_Lean_Meta_foo___redArg.cold.1")
self.assertEqual(result, "Lean.Meta.foo._redArg .cold.1")
def test_cold_suffix_plain(self):
result = demangle_lean_name_raw("l_Lean_Meta_foo.cold")
self.assertEqual(result, "Lean.Meta.foo .cold")
def test_initialize_no_pkg(self):
result = demangle_lean_name_raw("initialize_Init_Control_Basic")
self.assertEqual(result, "[module_init] Init.Control.Basic")
def test_initialize_with_l_prefix(self):
result = demangle_lean_name_raw("initialize_l_Lean_Meta_foo")
self.assertEqual(result, "[module_init] Lean.Meta.foo")
def test_never_crashes(self):
"""Demangling should never raise, just return the original."""
weird_inputs = [
"", "l_", "lp_", "lp_x", "_init_", "initialize_",
"l_____", "lp____", "l_00", "l_0",
"some random string", "l_ space",
]
for inp in weird_inputs:
result = demangle_lean_name_raw(inp)
self.assertIsInstance(result, str)
class TestPostprocess(unittest.TestCase):
"""Test postprocess_name (human-friendly suffix folding, etc.)."""
def test_no_change(self):
self.assertEqual(postprocess_name(["Lean", "Meta", "main"]),
"Lean.Meta.main")
def test_boxed(self):
self.assertEqual(postprocess_name(["foo", "_boxed"]),
"foo [boxed]")
def test_redArg(self):
self.assertEqual(postprocess_name(["foo", "bar", "_redArg"]),
"foo.bar [arity\u2193]")
def test_lambda_separate(self):
# _lam as separate component + numeric index
self.assertEqual(postprocess_name(["foo", "_lam", 0]),
"foo [\u03bb]")
def test_lambda_indexed(self):
# _lam_0 as single string (appendIndexAfter)
self.assertEqual(postprocess_name(["foo", "_lam_0"]),
"foo [\u03bb]")
self.assertEqual(postprocess_name(["foo", "_lambda_2"]),
"foo [\u03bb]")
def test_lambda_boxed(self):
# _lam_0 followed by _boxed
self.assertEqual(
postprocess_name(["Lean", "Meta", "Simp", "simpLambda",
"_lam_0", "_boxed"]),
"Lean.Meta.Simp.simpLambda [boxed, \u03bb]")
def test_closed(self):
self.assertEqual(postprocess_name(["myConst", "_closed", 3]),
"myConst [closed]")
def test_closed_indexed(self):
self.assertEqual(postprocess_name(["myConst", "_closed_0"]),
"myConst [closed]")
def test_multiple_suffixes(self):
self.assertEqual(postprocess_name(["foo", "_redArg", "_boxed"]),
"foo [boxed, arity\u2193]")
def test_redArg_lam(self):
# _redArg followed by _lam_0 (issue #4)
self.assertEqual(
postprocess_name(["Lean", "profileitIOUnsafe",
"_redArg", "_lam_0"]),
"Lean.profileitIOUnsafe [\u03bb, arity\u2193]")
def test_private_name(self):
self.assertEqual(
postprocess_name(["_private", "Lean", "Meta", "Basic", 0,
"Lean", "Meta", "withMVarContextImp"]),
"Lean.Meta.withMVarContextImp [private]")
def test_private_with_suffix(self):
self.assertEqual(
postprocess_name(["_private", "Lean", "Meta", "Basic", 0,
"Lean", "Meta", "foo", "_redArg"]),
"Lean.Meta.foo [arity\u2193, private]")
def test_hygienic_strip(self):
self.assertEqual(
postprocess_name(["Lean", "Meta", "foo", "_@", "Lean", "Meta",
"_hyg", 42]),
"Lean.Meta.foo")
def test_specialization(self):
self.assertEqual(
postprocess_name(["List", "map", "_at_", "Foo", "bar",
"_spec", 3]),
"List.map spec at Foo.bar")
def test_specialization_with_suffix(self):
# Base suffix _boxed appears in [flags] before spec at
self.assertEqual(
postprocess_name(["Lean", "MVarId", "withContext", "_at_",
"Foo", "bar", "_spec", 2, "_boxed"]),
"Lean.MVarId.withContext [boxed] spec at Foo.bar")
def test_spec_context_with_flags(self):
# Compiler suffixes in spec context become context flags
self.assertEqual(
postprocess_name(["Lean", "Meta", "foo", "_at_",
"Lean", "Meta", "bar", "_elam_1", "_redArg",
"_spec", 2]),
"Lean.Meta.foo spec at Lean.Meta.bar[\u03bb, arity\u2193]")
def test_spec_context_flags_dedup(self):
# Duplicate flag labels are deduplicated
self.assertEqual(
postprocess_name(["f", "_at_",
"g", "_lam_0", "_elam_1", "_redArg",
"_spec", 1]),
"f spec at g[\u03bb, arity\u2193]")
def test_multiple_at(self):
# Multiple _at_ entries become separate spec at clauses
self.assertEqual(
postprocess_name(["f", "_at_", "g", "_spec", 1,
"_at_", "h", "_spec", 2]),
"f spec at g spec at h")
def test_multiple_at_with_flags(self):
# Multiple spec at with flags on base and contexts
self.assertEqual(
postprocess_name(["f", "_at_", "g", "_redArg", "_spec", 1,
"_at_", "h", "_lam_0", "_spec", 2,
"_boxed"]),
"f [boxed] spec at g[arity\u2193] spec at h[\u03bb]")
def test_base_flags_before_spec(self):
# Base trailing suffixes appear in [flags] before spec at
self.assertEqual(
postprocess_name(["f", "_at_", "g", "_spec", 1, "_lam_0"]),
"f [\u03bb] spec at g")
def test_spec_context_strip_spec_suffixes(self):
# spec_0 in context should be stripped
self.assertEqual(
postprocess_name(["Lean", "Meta", "transformWithCache", "visit",
"_at_",
"_private", "Lean", "Meta", "Transform", 0,
"Lean", "Meta", "transform",
"Lean", "Meta", "Sym", "unfoldReducible",
"spec_0", "spec_0",
"_spec", 1]),
"Lean.Meta.transformWithCache.visit "
"spec at Lean.Meta.transform.Lean.Meta.Sym.unfoldReducible")
def test_spec_context_strip_private(self):
# _private in spec context should be stripped
self.assertEqual(
postprocess_name(["Array", "mapMUnsafe", "map", "_at_",
"_private", "Lean", "Meta", "Transform", 0,
"Lean", "Meta", "transformWithCache", "visit",
"_spec", 1]),
"Array.mapMUnsafe.map "
"spec at Lean.Meta.transformWithCache.visit")
def test_empty(self):
self.assertEqual(postprocess_name([]), "")
class TestDemangleHumanFriendly(unittest.TestCase):
"""Test demangle_lean_name (human-friendly output)."""
def test_simple(self):
self.assertEqual(demangle_lean_name("l_Lean_Meta_main"),
"Lean.Meta.main")
def test_boxed(self):
self.assertEqual(demangle_lean_name("l_foo___boxed"),
"foo [boxed]")
def test_redArg(self):
self.assertEqual(demangle_lean_name("l_foo___redArg"),
"foo [arity\u2193]")
def test_private(self):
self.assertEqual(
demangle_lean_name(
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo"),
"Lean.Meta.foo [private]")
def test_private_with_redArg(self):
self.assertEqual(
demangle_lean_name(
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo___redArg"),
"Lean.Meta.foo [arity\u2193, private]")
def test_cold_with_suffix(self):
self.assertEqual(
demangle_lean_name("l_Lean_Meta_foo___redArg.cold.1"),
"Lean.Meta.foo [arity\u2193] .cold.1")
def test_lean_apply(self):
self.assertEqual(demangle_lean_name("lean_apply_5"), "<apply/5>")
self.assertEqual(demangle_lean_name("lean_apply_12"), "<apply/12>")
def test_lean_apply_raw_unchanged(self):
self.assertEqual(demangle_lean_name_raw("lean_apply_5"),
"lean_apply_5")
def test_init_private(self):
self.assertEqual(
demangle_lean_name(
"_init_l___private_X_0__Y_foo"),
"[init] Y.foo [private]")
def test_complex_specialization(self):
components = [
"Lean", "MVarId", "withContext", "_at_",
"_private", "Lean", "Meta", "Sym", 0,
"Lean", "Meta", "Sym", "BackwardRule", "apply",
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"
]
mangled = mangle_name(components)
result = demangle_lean_name(mangled)
# Base: Lean.MVarId.withContext with trailing _redArg, _lambda 0, _boxed
# Spec context: Lean.Meta.Sym.BackwardRule.apply (private stripped)
self.assertEqual(
result,
"Lean.MVarId.withContext [boxed, \u03bb, arity\u2193] "
"spec at Lean.Meta.Sym.BackwardRule.apply")
def test_non_lean_unchanged(self):
self.assertEqual(demangle_lean_name("printf"), "printf")
self.assertEqual(demangle_lean_name("malloc"), "malloc")
self.assertEqual(demangle_lean_name(""), "")
class TestDemangleProfile(unittest.TestCase):
"""Test the profile rewriter."""
def _make_profile_shared(self, strings):
"""Create a profile with shared.stringArray (newer format)."""
return {
"meta": {"version": 28},
"libs": [],
"shared": {
"stringArray": list(strings),
},
"threads": [{
"name": "main",
"pid": "1",
"tid": 1,
"funcTable": {
"name": list(range(len(strings))),
"isJS": [False] * len(strings),
"relevantForJS": [False] * len(strings),
"resource": [-1] * len(strings),
"fileName": [None] * len(strings),
"lineNumber": [None] * len(strings),
"columnNumber": [None] * len(strings),
"length": len(strings),
},
"frameTable": {"length": 0},
"stackTable": {"length": 0},
"samples": {"length": 0},
"markers": {"length": 0},
"resourceTable": {"length": 0},
"nativeSymbols": {"length": 0},
}],
"pages": [],
"counters": [],
}
def _make_profile_per_thread(self, strings):
"""Create a profile with per-thread stringArray (samply format)."""
return {
"meta": {"version": 28},
"libs": [],
"threads": [{
"name": "main",
"pid": "1",
"tid": 1,
"stringArray": list(strings),
"funcTable": {
"name": list(range(len(strings))),
"isJS": [False] * len(strings),
"relevantForJS": [False] * len(strings),
"resource": [-1] * len(strings),
"fileName": [None] * len(strings),
"lineNumber": [None] * len(strings),
"columnNumber": [None] * len(strings),
"length": len(strings),
},
"frameTable": {"length": 0},
"stackTable": {"length": 0},
"samples": {"length": 0},
"markers": {"length": 0},
"resourceTable": {"length": 0},
"nativeSymbols": {"length": 0},
}],
"pages": [],
"counters": [],
}
def test_profile_rewrite_shared(self):
from lean_demangle_profile import rewrite_profile
strings = [
"l_Lean_Meta_Sym_main",
"printf",
"lean_apply_5",
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo",
]
profile = self._make_profile_shared(strings)
rewrite_profile(profile)
sa = profile["shared"]["stringArray"]
self.assertEqual(sa[0], "Lean.Meta.Sym.main")
self.assertEqual(sa[1], "printf")
self.assertEqual(sa[2], "<apply/5>")
self.assertEqual(sa[3], "Lean.Meta.foo [private]")
def test_profile_rewrite_per_thread(self):
from lean_demangle_profile import rewrite_profile
strings = [
"l_Lean_Meta_Sym_main",
"printf",
"lean_apply_5",
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo",
]
profile = self._make_profile_per_thread(strings)
count = rewrite_profile(profile)
sa = profile["threads"][0]["stringArray"]
self.assertEqual(sa[0], "Lean.Meta.Sym.main")
self.assertEqual(sa[1], "printf")
self.assertEqual(sa[2], "<apply/5>")
self.assertEqual(sa[3], "Lean.Meta.foo [private]")
self.assertEqual(count, 3)
def test_profile_json_roundtrip(self):
from lean_demangle_profile import process_profile_file
strings = ["l_Lean_Meta_main", "malloc"]
profile = self._make_profile_shared(strings)
with tempfile.NamedTemporaryFile(mode='w', suffix='.json',
delete=False) as f:
json.dump(profile, f)
inpath = f.name
outpath = inpath.replace('.json', '-demangled.json')
try:
process_profile_file(inpath, outpath)
with open(outpath) as f:
result = json.load(f)
self.assertEqual(result["shared"]["stringArray"][0],
"Lean.Meta.main")
self.assertEqual(result["shared"]["stringArray"][1], "malloc")
finally:
os.unlink(inpath)
if os.path.exists(outpath):
os.unlink(outpath)
def test_profile_gzip_roundtrip(self):
from lean_demangle_profile import process_profile_file
strings = ["l_Lean_Meta_main", "malloc"]
profile = self._make_profile_shared(strings)
with tempfile.NamedTemporaryFile(suffix='.json.gz',
delete=False) as f:
with gzip.open(f, 'wt') as gz:
json.dump(profile, gz)
inpath = f.name
outpath = inpath.replace('.json.gz', '-demangled.json.gz')
try:
process_profile_file(inpath, outpath)
with gzip.open(outpath, 'rt') as f:
result = json.load(f)
self.assertEqual(result["shared"]["stringArray"][0],
"Lean.Meta.main")
finally:
os.unlink(inpath)
if os.path.exists(outpath):
os.unlink(outpath)
if __name__ == '__main__':
unittest.main()

View File

@@ -1014,7 +1014,7 @@ def main():
version_minor_correct = False
is_release_correct = any(
l.strip().startswith("set(LEAN_VERSION_IS_RELEASE 0)")
re.match(r'set\(LEAN_VERSION_IS_RELEASE\s+0[\s)]', l.strip())
for l in cmake_lines
)

View File

@@ -87,6 +87,8 @@ option(USE_GITHASH "GIT_HASH" ON)
option(INSTALL_LICENSE "INSTALL_LICENSE" ON)
# When ON we install a copy of cadical
option(INSTALL_CADICAL "Install a copy of cadical" ON)
# When ON we install a copy of leantar
option(INSTALL_LEANTAR "Install a copy of leantar" ON)
# FLAGS for disabling optimizations and debugging
option(FREE_VAR_RANGE_OPT "FREE_VAR_RANGE_OPT" ON)
@@ -757,6 +759,14 @@ if(STAGE GREATER 0 AND CADICAL AND INSTALL_CADICAL)
add_dependencies(leancpp copy-cadical)
endif()
if(STAGE GREATER 0 AND LEANTAR AND INSTALL_LEANTAR)
add_custom_target(
copy-leantar
COMMAND cmake -E copy_if_different "${LEANTAR}" "${CMAKE_BINARY_DIR}/bin/leantar${CMAKE_EXECUTABLE_SUFFIX}"
)
add_dependencies(leancpp copy-leantar)
endif()
# MSYS2 bash usually handles Windows paths relatively well, but not when putting them in the PATH
string(REGEX REPLACE "^([a-zA-Z]):" "/\\1" LEAN_BIN "${CMAKE_BINARY_DIR}/bin")
@@ -913,6 +923,10 @@ if(STAGE GREATER 0 AND CADICAL AND INSTALL_CADICAL)
install(PROGRAMS "${CADICAL}" DESTINATION bin)
endif()
if(STAGE GREATER 0 AND LEANTAR AND INSTALL_LEANTAR)
install(PROGRAMS "${LEANTAR}" DESTINATION bin)
endif()
add_custom_target(
clean-stdlib
COMMAND rm -rf "${CMAKE_BINARY_DIR}/lib" || true

View File

@@ -69,9 +69,11 @@ theorem em (p : Prop) : p ¬p :=
theorem exists_true_of_nonempty {α : Sort u} : Nonempty α _ : α, True
| x => x, trivial
@[implicit_reducible]
noncomputable def inhabited_of_nonempty {α : Sort u} (h : Nonempty α) : Inhabited α :=
choice h
@[implicit_reducible]
noncomputable def inhabited_of_exists {α : Sort u} {p : α Prop} (h : x, p x) : Inhabited α :=
inhabited_of_nonempty (Exists.elim h (fun w _ => w))
@@ -81,6 +83,7 @@ noncomputable scoped instance (priority := low) propDecidable (a : Prop) : Decid
| Or.inl h => isTrue h
| Or.inr h => isFalse h
@[implicit_reducible]
noncomputable def decidableInhabited (a : Prop) : Inhabited (Decidable a) where
default := inferInstance

View File

@@ -18,3 +18,4 @@ public import Init.Control.StateCps
public import Init.Control.ExceptCps
public import Init.Control.MonadAttach
public import Init.Control.EState
public import Init.Control.Do

View File

@@ -49,6 +49,7 @@ instance : Monad Id where
/--
The identity monad has a `bind` operator.
-/
@[implicit_reducible]
def hasBind : Bind Id :=
inferInstance
@@ -58,7 +59,7 @@ Runs a computation in the identity monad.
This function is the identity function. Because its parameter has type `Id α`, it causes
`do`-notation in its arguments to use the `Monad Id` instance.
-/
@[always_inline, inline, expose]
@[always_inline, inline, expose, implicit_reducible]
protected def run (x : Id α) : α := x
instance [OfNat α n] : OfNat (Id α) n :=

View File

@@ -280,7 +280,7 @@ resulting in `t'`, which becomes the new target subgoal. -/
syntax (name := convConvSeq) "conv" " => " convSeq : conv
/-- `· conv` focuses on the main conv goal and tries to solve it using `s`. -/
macro dot:patternIgnore("· " <|> ". ") s:convSeq : conv => `(conv| {%$dot ($s) })
macro dot:unicode("· ", ". ") s:convSeq : conv => `(conv| {%$dot ($s) })
/-- `fail_if_success t` fails if the tactic `t` succeeds. -/

View File

@@ -34,3 +34,4 @@ public import Init.Data.Array.MinMax
public import Init.Data.Array.Nat
public import Init.Data.Array.Int
public import Init.Data.Array.Count
public import Init.Data.Array.Sort

View File

@@ -148,6 +148,9 @@ end List
namespace Array
@[simp, grind =] theorem getElem!_toList [Inhabited α] {xs : Array α} {i : Nat} : xs.toList[i]! = xs[i]! := by
rw [List.getElem!_toArray]
theorem size_eq_length_toList {xs : Array α} : xs.size = xs.toList.length := rfl
/-! ### Externs -/

View File

@@ -78,7 +78,7 @@ private theorem cons_lex_cons [BEq α] {lt : αα → Bool} {a b : α} {xs
simp only [lex, size_append, List.size_toArray, List.length_cons, List.length_nil, Nat.zero_add,
Nat.add_min_add_left, Nat.add_lt_add_iff_left, Std.Rco.forIn'_eq_forIn'_toList]
rw [cons_lex_cons.forIn'_congr_aux (Nat.toList_rco_eq_cons (by omega)) rfl (fun _ _ _ => rfl)]
simp only [bind_pure_comp, map_pure, Nat.toList_rco_succ_succ, Nat.add_comm 1]
simp only [Nat.toList_rco_succ_succ, Nat.add_comm 1]
cases h : lt a b
· cases h' : a == b <;> simp [bne, *]
· simp [*]

View File

@@ -0,0 +1,10 @@
/-
Copyright (c) 2026 Lean FRO. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Array.Sort.Basic
public import Init.Data.Array.Sort.Lemmas

View File

@@ -0,0 +1,55 @@
/-
Copyright (c) 2026 Lean FRO. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Array.Subarray.Split
public import Init.Data.Slice.Array
import Init.Omega
public section
private def Array.MergeSort.Internal.merge (xs ys : Array α) (le : α α Bool := by exact (· ·)) :
Array α :=
if hxs : 0 < xs.size then
if hys : 0 < ys.size then
go xs[*...*] ys[*...*] (by simp only [Array.size_mkSlice_rii]; omega) (by simp only [Array.size_mkSlice_rii]; omega) (Array.emptyWithCapacity (xs.size + ys.size))
else
xs
else
ys
where
go (xs ys : Subarray α) (hxs : 0 < xs.size) (hys : 0 < ys.size) (acc : Array α) : Array α :=
let x := xs[0]
let y := ys[0]
if le x y then
if hi : 1 < xs.size then
go (xs.drop 1) ys (by simp only [Subarray.size_drop]; omega) hys (acc.push x)
else
ys.foldl (init := acc.push x) (fun acc y => acc.push y)
else
if hj : 1 < ys.size then
go xs (ys.drop 1) hxs (by simp only [Subarray.size_drop]; omega) (acc.push y)
else
xs.foldl (init := acc.push y) (fun acc x => acc.push x)
termination_by xs.size + ys.size
def Subarray.mergeSort (xs : Subarray α) (le : α α Bool := by exact (· ·)) : Array α :=
if h : 1 < xs.size then
let splitIdx := (xs.size + 1) / 2 -- We follow the same splitting convention as `List.mergeSort`
let left := xs[*...splitIdx]
let right := xs[splitIdx...*]
Array.MergeSort.Internal.merge (mergeSort left le) (mergeSort right le) le
else
xs.toArray
termination_by xs.size
decreasing_by
· simp only [Subarray.size_mkSlice_rio]; omega
· simp only [Subarray.size_mkSlice_rci]; omega
@[inline]
def Array.mergeSort (xs : Array α) (le : α α Bool := by exact (· ·)) : Array α :=
xs[*...*].mergeSort le

View File

@@ -0,0 +1,240 @@
/-
Copyright (c) 2026 Lean FRO. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Array.Sort.Basic
public import Init.Data.List.Sort.Basic
public import Init.Data.Array.Perm
import all Init.Data.Array.Sort.Basic
import all Init.Data.List.Sort.Basic
import Init.Data.List.Sort.Lemmas
import Init.Data.Slice.Array.Lemmas
import Init.Data.Slice.List.Lemmas
import Init.Data.Array.Bootstrap
import Init.Data.Array.Lemmas
import Init.Data.Array.MapIdx
import Init.ByCases
public section
private theorem Array.MergeSort.merge.go_eq_listMerge {xs ys : Subarray α} {hxs hys le acc} :
(Array.MergeSort.Internal.merge.go le xs ys hxs hys acc).toList = acc.toList ++ List.merge xs.toList ys.toList le := by
fun_induction Array.MergeSort.Internal.merge.go le xs ys hxs hys acc
· rename_i xs ys _ _ _ _ _ _ _ _
rw [List.merge.eq_def]
split
· have : xs.size = 0 := by simp [ Subarray.length_toList, *]
omega
· have : ys.size = 0 := by simp [ Subarray.length_toList, *]
omega
· rename_i x' xs' y' ys' _ _
simp +zetaDelta only at *
have h₁ : x' = xs[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
have h₂ : y' = ys[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
cases h₁
cases h₂
simp [Subarray.toList_drop, *]
· rename_i xs ys _ _ _ _ _ _ _
rw [List.merge.eq_def]
split
· have : xs.size = 0 := by simp [ Subarray.length_toList, *]
omega
· have : ys.size = 0 := by simp [ Subarray.length_toList, *]
omega
· rename_i x' xs' y' ys' _ _
simp +zetaDelta only at *
have h₁ : x' = xs[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
have h₂ : y' = ys[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
cases h₁
cases h₂
simp [*]
have : xs.size = xs'.length + 1 := by simp [ Subarray.length_toList, *]
have : xs' = [] := List.eq_nil_of_length_eq_zero (by omega)
simp only [this]
rw [ Subarray.foldl_toList]
simp [*]
· rename_i xs ys _ _ _ _ _ _ _ _
rw [List.merge.eq_def]
split
· have : xs.size = 0 := by simp [ Subarray.length_toList, *]
omega
· have : ys.size = 0 := by simp [ Subarray.length_toList, *]
omega
· rename_i x' xs' y' ys' _ _
simp +zetaDelta only at *
have h₁ : x' = xs[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
have h₂ : y' = ys[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
cases h₁
cases h₂
simp [Subarray.toList_drop, *]
· rename_i xs ys _ _ _ _ _ _ _
rw [List.merge.eq_def]
split
· have : xs.size = 0 := by simp [ Subarray.length_toList, *]
omega
· have : ys.size = 0 := by simp [ Subarray.length_toList, *]
omega
· rename_i x' xs' y' ys' _ _
simp +zetaDelta only at *
have h₁ : x' = xs[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
have h₂ : y' = ys[0] := by simp [Subarray.getElem_eq_getElem_toList, *]
cases h₁
cases h₂
simp [*]
have : ys.size = ys'.length + 1 := by simp [ Subarray.length_toList, *]
have : ys' = [] := List.eq_nil_of_length_eq_zero (by omega)
simp [this]
rw [ Subarray.foldl_toList]
simp [*]
private theorem Array.MergeSort.merge_eq_listMerge {xs ys : Array α} {le} :
(Array.MergeSort.Internal.merge xs ys le).toList = List.merge xs.toList ys.toList le := by
rw [Array.MergeSort.Internal.merge]
split <;> rename_i heq₁
· split <;> rename_i heq₂
· simp [Array.MergeSort.merge.go_eq_listMerge]
· have : ys.toList = [] := by simp_all
simp [this]
· have : xs.toList = [] := by simp_all
simp [this]
private theorem List.mergeSort_eq_merge_mkSlice {xs : List α} :
xs.mergeSort le =
if 1 < xs.length then
merge (xs[*...((xs.length + 1) / 2)].toList.mergeSort le) (xs[((xs.length + 1) / 2)...*].toList.mergeSort le) le
else
xs := by
fun_cases xs.mergeSort le
· simp
· simp
· rename_i x y ys lr hl hr
simp [lr]
theorem Subarray.toList_mergeSort {xs : Subarray α} {le : α α Bool} :
(xs.mergeSort le).toList = xs.toList.mergeSort le := by
fun_induction xs.mergeSort le
· rw [List.mergeSort_eq_merge_mkSlice]
simp +zetaDelta [Array.MergeSort.merge_eq_listMerge, *]
· simp [List.mergeSort_eq_merge_mkSlice, *]
@[simp, grind =]
theorem Subarray.mergeSort_eq_mergeSort_toArray {xs : Subarray α} {le : α α Bool} :
xs.mergeSort le = xs.toArray.mergeSort le := by
simp [ Array.toList_inj, toList_mergeSort, Array.mergeSort]
theorem Subarray.mergeSort_toArray {xs : Subarray α} {le : α α Bool} :
xs.toArray.mergeSort le = xs.mergeSort le := by
simp
theorem Array.toList_mergeSort {xs : Array α} {le : α α Bool} :
(xs.mergeSort le).toList = xs.toList.mergeSort le := by
rw [Array.mergeSort, Subarray.toList_mergeSort, Array.toList_mkSlice_rii]
theorem Array.mergeSort_eq_toArray_mergeSort_toList {xs : Array α} {le : α α Bool} :
xs.mergeSort le = (xs.toList.mergeSort le).toArray := by
simp [ toList_mergeSort]
/-!
# Basic properties of `Array.mergeSort`.
* `pairwise_mergeSort`: `mergeSort` produces a sorted array.
* `mergeSort_perm`: `mergeSort` is a permutation of the input array.
* `mergeSort_of_pairwise`: `mergeSort` does not change a sorted array.
* `sublist_mergeSort`: if `c` is a sorted sublist of `l`, then `c` is still a sublist of `mergeSort le l`.
-/
namespace Array
-- Enable this instance locally so we can write `Pairwise le` instead of `Pairwise (le · ·)` everywhere.
attribute [local instance] boolRelToRel
@[simp] theorem mergeSort_empty : (#[] : Array α).mergeSort r = #[] := by
simp [mergeSort_eq_toArray_mergeSort_toList]
@[simp] theorem mergeSort_singleton {a : α} : #[a].mergeSort r = #[a] := by
simp [mergeSort_eq_toArray_mergeSort_toList]
theorem mergeSort_perm {xs : Array α} {le} : (xs.mergeSort le).Perm xs := by
simpa [mergeSort_eq_toArray_mergeSort_toList, Array.perm_iff_toList_perm] using List.mergeSort_perm _ _
@[simp] theorem size_mergeSort {xs : Array α} : (mergeSort xs le).size = xs.size := by
simp [mergeSort_eq_toArray_mergeSort_toList]
@[simp] theorem mem_mergeSort {a : α} {xs : Array α} : a mergeSort xs le a xs := by
simp [mergeSort_eq_toArray_mergeSort_toList]
/--
The result of `Array.mergeSort` is sorted,
as long as the comparison function is transitive (`le a b → le b c → le a c`)
and total in the sense that `le a b || le b a`.
The comparison function need not be irreflexive, i.e. `le a b` and `le b a` is allowed even when `a ≠ b`.
-/
theorem pairwise_mergeSort
(trans : (a b c : α), le a b le b c le a c)
(total : (a b : α), le a b || le b a)
{xs : Array α} :
(mergeSort xs le).toList.Pairwise (le · ·) := by
simpa [mergeSort_eq_toArray_mergeSort_toList] using List.pairwise_mergeSort trans total _
/--
If the input array is already sorted, then `mergeSort` does not change the array.
-/
theorem mergeSort_of_pairwise {le : α α Bool} {xs : Array α} (_ : xs.toList.Pairwise (le · ·)) :
mergeSort xs le = xs := by
simpa [mergeSort_eq_toArray_mergeSort_toList, List.toArray_eq_iff] using List.mergeSort_of_pairwise _
/--
This merge sort algorithm is stable,
in the sense that breaking ties in the ordering function using the position in the array
has no effect on the output.
That is, elements which are equal with respect to the ordering function will remain
in the same order in the output array as they were in the input array.
See also:
* `sublist_mergeSort`: if `c <+ l` and `c.Pairwise le`, then `c <+ (mergeSort le l).toList`.
* `pair_sublist_mergeSort`: if `[a, b] <+ l` and `le a b`, then `[a, b] <+ (mergeSort le l).toList`)
-/
theorem mergeSort_zipIdx {xs : Array α} :
(mergeSort (xs.zipIdx.map fun (a, i) => (a, i)) (List.zipIdxLE le)).map (·.1) = mergeSort xs le := by
simpa [mergeSort_eq_toArray_mergeSort_toList, Array.toList_zipIdx] using List.mergeSort_zipIdx
/--
Another statement of stability of merge sort.
If `c` is a sorted sublist of `xs.toList`,
then `c` is still a sublist of `(mergeSort le xs).toList`.
-/
theorem sublist_mergeSort {le : α α Bool}
(trans : (a b c : α), le a b le b c le a c)
(total : (a b : α), le a b || le b a)
{ys : List α} (_ : ys.Pairwise (le · ·)) (_ : List.Sublist ys xs.toList) :
List.Sublist ys (mergeSort xs le).toList := by
simpa [mergeSort_eq_toArray_mergeSort_toList, Array.toList_zipIdx] using
List.sublist_mergeSort trans total _ _
/--
Another statement of stability of merge sort.
If a pair `[a, b]` is a sublist of `xs.toList` and `le a b`,
then `[a, b]` is still a sublist of `(mergeSort le xs).toList`.
-/
theorem pair_sublist_mergeSort
(trans : (a b c : α), le a b le b c le a c)
(total : (a b : α), le a b || le b a)
(hab : le a b) (h : List.Sublist [a, b] xs.toList) :
List.Sublist [a, b] (mergeSort xs le).toList := by
simpa [mergeSort_eq_toArray_mergeSort_toList, Array.toList_zipIdx] using
List.pair_sublist_mergeSort trans total _ _
theorem map_mergeSort {r : α α Bool} {s : β β Bool} {f : α β}
{xs : Array α} (hxs : a xs, b xs, r a b = s (f a) (f b)) :
(xs.mergeSort r).map f = (xs.map f).mergeSort s := by
simp only [mergeSort_eq_toArray_mergeSort_toList, List.map_toArray, toList_map, mk.injEq]
apply List.map_mergeSort
simpa
end Array

View File

@@ -629,6 +629,7 @@ export Bool (cond_eq_if cond_eq_ite xor and or not)
This should not be turned on globally as an instance because it degrades performance in Mathlib,
but may be used locally.
-/
@[implicit_reducible]
def boolPredToPred : Coe (α Bool) (α Prop) where
coe r := fun a => Eq (r a) true

View File

@@ -129,6 +129,14 @@ The ASCII digits are the following: `0123456789`.
@[inline] def isDigit (c : Char) : Bool :=
c.val '0'.val && c.val '9'.val
/--
Returns `true` if the character is an ASCII hexadecimal digit.
The ASCII hexadecimal digits are the following: `0123456789abcdefABCDEF`.
-/
@[inline] def isHexDigit (c : Char) : Bool :=
c.isDigit || (c.val 'a'.val && c.val 'f'.val) || (c.val 'A'.val && c.val 'F'.val)
/--
Returns `true` if the character is an ASCII letter or digit.

View File

@@ -62,7 +62,7 @@ instance ltTrichotomous : Std.Trichotomous (· < · : Char → Char → Prop) wh
trichotomous _ _ h₁ h₂ := Char.le_antisymm (by simpa using h₂) (by simpa using h₁)
@[deprecated ltTrichotomous (since := "2025-10-27")]
def notLTAntisymm : Std.Antisymm (¬ · < · : Char Char Prop) where
theorem notLTAntisymm : Std.Antisymm (¬ · < · : Char Char Prop) where
antisymm := Char.ltTrichotomous.trichotomous
instance ltAsymm : Std.Asymm (· < · : Char Char Prop) where
@@ -73,7 +73,7 @@ instance leTotal : Std.Total (· ≤ · : Char → Char → Prop) where
-- This instance is useful while setting up instances for `String`.
@[deprecated ltAsymm (since := "2025-08-01")]
def notLTTotal : Std.Total (¬ · < · : Char Char Prop) where
theorem notLTTotal : Std.Total (¬ · < · : Char Char Prop) where
total := fun x y => by simpa using Char.le_total y x
@[simp] theorem ofNat_toNat (c : Char) : Char.ofNat c.toNat = c := by

View File

@@ -6,6 +6,7 @@ Authors: Paul Reichert
module
prelude
public import Init.Data.Iterators.Combinators.Append
public import Init.Data.Iterators.Combinators.Monadic
public import Init.Data.Iterators.Combinators.FilterMap
public import Init.Data.Iterators.Combinators.FlatMap

View File

@@ -0,0 +1,79 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Iterators.Combinators.Monadic.Append
public section
namespace Std
open Std.Iterators Std.Iterators.Types
/--
Given two iterators `it₁` and `it₂`, `it₁.append it₂` is an iterator that first outputs all values
of `it₁` in order and then all values of `it₂` in order.
**Marble diagram:**
```text
it₁ ---a----b---c--
it₂ --d--e--
it₁.append it₂ ---a----b---c-----d--e--
```
**Termination properties:**
* `Finite` instance: only if `it₁` and `it₂` are finite
* `Productive` instance: only if `it₁` and `it₂` are productive
Note: If `it₁` is not finite, then `it₁.append it₂` can be productive while `it₂` is not.
The standard library does not provide a `Productive` instance for this case.
**Performance:**
This combinator incurs an additional O(1) cost with each output of `it₁` and `it₂`.
-/
@[inline, expose]
def Iter.append {α₁ : Type w} {α₂ : Type w} {β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β]
(it₁ : Iter (α := α₁) β) (it₂ : Iter (α := α₂) β) :
Iter (α := Append α₁ α₂ Id β) β :=
(it₁.toIterM.append it₂.toIterM).toIter
/--
This combinator is only useful for advanced use cases.
Given an iterator `it₂`, returns an iterator that behaves exactly like `it₂` but is of the same
type as `it₁.append it₂` (after `it₁` has been exhausted).
This is useful for constructing intermediate states of the append iterator.
**Marble diagram:**
```text
it₂ --a--b--
Iter.appendSnd α₁ it₂ --a--b--
```
**Termination properties:**
* `Finite` instance: only if `it₂` and iterators of type `α₁` are finite
* `Productive` instance: only if `it₂` and iterators of type `α₁` are productive
Note: If iterators of type `α₁` are not finite, then `append α₁ it₂` can be productive while `it₂` is not.
The standard library does not provide a `Productive` instance for this case.
**Performance:**
This combinator incurs an additional O(1) cost with each output of `it₂`.
-/
@[inline, expose]
def Iter.Intermediate.appendSnd {α₂ : Type w} {β : Type w}
[Iterator α₂ Id β] (α₁ : Type w) (it₂ : Iter (α := α₂) β) :
Iter (α := Append α₁ α₂ Id β) β :=
(IterM.Intermediate.appendSnd α₁ it₂.toIterM).toIter
end Std

View File

@@ -6,6 +6,7 @@ Authors: Paul Reichert
module
prelude
public import Init.Data.Iterators.Combinators.Monadic.Append
public import Init.Data.Iterators.Combinators.Monadic.FilterMap
public import Init.Data.Iterators.Combinators.Monadic.FlatMap
public import Init.Data.Iterators.Combinators.Monadic.Take

View File

@@ -0,0 +1,261 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Iterators.Consumers.Monadic.Loop
public import Init.Classical
import Init.Data.Option.Lemmas
import Init.ByCases
import Init.Omega
public section
/-!
This module provides the iterator combinator `IterM.append`.
-/
namespace Std
variable {α : Type w} {m : Type w Type w'} {β : Type w}
/--
The internal state of the `IterM.append` iterator combinator.
-/
inductive Iterators.Types.Append (α₁ α₂ : Type w) (m : Type w Type w') (β : Type w) where
| fst : IterM (α := α₁) m β IterM (α := α₂) m β Append α₁ α₂ m β
| snd : IterM (α := α₂) m β Append α₁ α₂ m β
open Std.Iterators Std.Iterators.Types
/--
Given two iterators `it₁` and `it₂`, `it₁.append it₂` is an iterator that first outputs all values
of `it₁` in order and then all values of `it₂` in order.
**Marble diagram:**
```text
it₁ ---a----b---c--
it₂ --d--e--
it₁.append it₂ ---a----b---c-----d--e--
```
**Termination properties:**
* `Finite` instance: only if `it₁` and `it₂` are finite
* `Productive` instance: only if `it₁` and `it₂` are productive
Note: If `it₁` is not finite, then `it₁.append it₂` can be productive while `it₂` is not.
The standard library does not provide a `Productive` instance for this case.
**Performance:**
This combinator incurs an additional O(1) cost with each output of `it₁` and `it₂`.
-/
@[inline, expose]
def IterM.append [Iterator α₁ m β] [Iterator α₂ m β]
(it₁ : IterM (α := α₁) m β) (it₂ : IterM (α := α₂) m β) :=
(Iterators.Types.Append.fst it₁ it₂ : IterM m β)
/--
This combinator is only useful for advanced use cases.
Given an iterator `it₂`, `IterM.Intermediate.appendSnd α₁ it₂` returns an iterator that behaves
exactly like `it₂` but has the same type as `it₁.append it₂` (after `it₁` has been exhausted).
This is useful for constructing intermediate states of the append iterator.
**Marble diagram:**
```text
it₂ --a--b--
IterM.Intermediate.appendSnd α₁ it₂ --a--b--
```
**Termination properties:**
* `Finite` instance: only if `it₂` and iterators of type `α₁` are finite
* `Productive` instance: only if `it₂` and iterators of type `α₁` are productive
Note: If iterators of type `α₁` are not finite, then `appendSnd α₁ it₂` can be productive
while `it₂` is not. The standard library does not provide a `Productive` instance for this case.
**Performance:**
This combinator incurs an additional O(1) cost with each output of `it₂`.
-/
@[inline, expose]
def IterM.Intermediate.appendSnd [Iterator α₂ m β] (α₁ : Type w) (it₂ : IterM (α := α₂) m β) :=
(Iterators.Types.Append.snd (α₁ := α₁) it₂ : IterM m β)
namespace Iterators.Types
inductive Append.PlausibleStep [Iterator α₁ m β] [Iterator α₂ m β] :
IterM (α := Append α₁ α₂ m β) m β IterStep (IterM (α := Append α₁ α₂ m β) m β) β Prop where
| fstYield {it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
it₁.IsPlausibleStep (.yield it₁' out) PlausibleStep (it₁.append it₂) (.yield (it₁'.append it₂) out)
| fstSkip {it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
it₁.IsPlausibleStep (.skip it₁') PlausibleStep (it₁.append it₂) (.skip (it₁'.append it₂))
| fstDone {it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
it₁.IsPlausibleStep .done PlausibleStep (it₁.append it₂) (.skip (IterM.Intermediate.appendSnd α₁ it₂))
| sndYield {it₂ : IterM (α := α₂) m β} :
it₂.IsPlausibleStep (.yield it₂' out)
PlausibleStep (IterM.Intermediate.appendSnd α₁ it₂) (.yield (IterM.Intermediate.appendSnd α₁ it₂') out)
| sndSkip {it₂ : IterM (α := α₂) m β} :
it₂.IsPlausibleStep (.skip it₂')
PlausibleStep (IterM.Intermediate.appendSnd α₁ it₂) (.skip (IterM.Intermediate.appendSnd α₁ it₂'))
| sndDone {it₂ : IterM (α := α₂) m β} :
it₂.IsPlausibleStep .done PlausibleStep (IterM.Intermediate.appendSnd α₁ it₂) .done
@[inline]
instance Append.instIterator [Monad m] [Iterator α₁ m β] [Iterator α₂ m β] :
Iterator (Append α₁ α₂ m β) m β where
IsPlausibleStep := Append.PlausibleStep
step
| .fst it₁ it₂ => do
match ( it₁.step).inflate with
| .yield it₁' out h => return .deflate <| .yield (it₁'.append it₂) out (.fstYield h)
| .skip it₁' h => return .deflate <| .skip (it₁'.append it₂) (.fstSkip h)
| .done h => return .deflate <| .skip (IterM.Intermediate.appendSnd α₁ it₂) (.fstDone h)
| .snd it₂ => do
match ( it₂.step).inflate with
| .yield it₂' out h => return .deflate <| .yield (IterM.Intermediate.appendSnd α₁ it₂') out (.sndYield h)
| .skip it₂' h => return .deflate <| .skip (IterM.Intermediate.appendSnd α₁ it₂') (.sndSkip h)
| .done h => return .deflate <| .done (.sndDone h)
instance Append.instIteratorLoop {n : Type x Type x'} [Monad m] [Monad n]
[Iterator α₁ m β] [Iterator α₂ m β] :
IteratorLoop (Append α₁ α₂ m β) m n :=
.defaultImplementation
section Finite
variable {α₁ : Type w} {α₂ : Type w} {m : Type w Type w'} {β : Type w}
variable (α₁ α₂ m β) in
def Append.Rel [Monad m] [Iterator α₁ m β] [Iterator α₂ m β] [Finite α₁ m] [Finite α₂ m] :
IterM (α := Append α₁ α₂ m β) m β IterM (α := Append α₁ α₂ m β) m β Prop :=
InvImage
(Prod.Lex
(Option.lt (InvImage IterM.TerminationMeasures.Finite.Rel IterM.finitelyManySteps))
(InvImage IterM.TerminationMeasures.Finite.Rel IterM.finitelyManySteps))
(fun it => match it.internalState with
| .fst it₁ it₂ => (some it₁, it₂)
| .snd it₂ => (none, it₂))
theorem Append.rel_of_fst [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] {it₁ it₁' : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β}
(h : it₁'.finitelyManySteps.Rel it₁.finitelyManySteps) :
Append.Rel α₁ α₂ m β (it₁'.append it₂) (it₁.append it₂) := by
exact Prod.Lex.left _ _ h
theorem Append.rel_fstDone [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] {it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
Append.Rel α₁ α₂ m β (IterM.Intermediate.appendSnd α₁ it₂) (it₁.append it₂) := by
exact Prod.Lex.left _ _ trivial
theorem Append.rel_of_snd [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] {it₂ it₂' : IterM (α := α₂) m β}
(h : it₂'.finitelyManySteps.Rel it₂.finitelyManySteps) :
Append.Rel α₁ α₂ m β (IterM.Intermediate.appendSnd α₁ it₂') (IterM.Intermediate.appendSnd α₁ it₂) := by
exact Prod.Lex.right _ h
def Append.instFinitenessRelation [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] :
FinitenessRelation (Append α₁ α₂ m β) m where
Rel := Append.Rel α₁ α₂ m β
wf := by
apply InvImage.wf
refine fun (a, b) => Prod.lexAccessible (WellFounded.apply ?_ a) (WellFounded.apply ?_) b
· exact Option.wellFounded_lt <| InvImage.wf _ WellFoundedRelation.wf
· exact InvImage.wf _ WellFoundedRelation.wf
subrelation {it it'} h := by
obtain step, h, h' := h
cases h' <;> cases h
case fstYield =>
apply Append.rel_of_fst
exact IterM.TerminationMeasures.Finite.rel_of_yield _
case fstSkip =>
apply Append.rel_of_fst
exact IterM.TerminationMeasures.Finite.rel_of_skip _
case fstDone =>
exact Append.rel_fstDone
case sndYield =>
apply Append.rel_of_snd
exact IterM.TerminationMeasures.Finite.rel_of_yield _
case sndSkip =>
apply Append.rel_of_snd
exact IterM.TerminationMeasures.Finite.rel_of_skip _
@[no_expose]
public instance Append.instFinite [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Finite α₁ m] [Finite α₂ m] : Finite (Append α₁ α₂ m β) m :=
.of_finitenessRelation instFinitenessRelation
end Finite
section Productive
variable {α₁ : Type w} {α₂ : Type w} {m : Type w Type w'} {β : Type w}
variable (α₁ α₂ m β) in
def Append.ProductiveRel [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] :
IterM (α := Append α₁ α₂ m β) m β IterM (α := Append α₁ α₂ m β) m β Prop :=
InvImage
(Prod.Lex
(Option.lt (InvImage IterM.TerminationMeasures.Productive.Rel IterM.finitelyManySkips))
(InvImage IterM.TerminationMeasures.Productive.Rel IterM.finitelyManySkips))
(fun it => match it.internalState with
| .fst it₁ it₂ => (some it₁, it₂)
| .snd it₂ => (none, it₂))
theorem Append.productiveRel_of_fst [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] {it₁ it₁' : IterM (α := α₁) m β}
{it₂ : IterM (α := α₂) m β}
(h : it₁'.finitelyManySkips.Rel it₁.finitelyManySkips) :
Append.ProductiveRel α₁ α₂ m β (it₁'.append it₂) (it₁.append it₂) := by
exact Prod.Lex.left _ _ h
theorem Append.productiveRel_fstDone [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] {it₁ : IterM (α := α₁) m β}
{it₂ : IterM (α := α₂) m β} :
Append.ProductiveRel α₁ α₂ m β (IterM.Intermediate.appendSnd α₁ it₂) (it₁.append it₂) := by
exact Prod.Lex.left _ _ trivial
theorem Append.productiveRel_of_snd [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] {it₂ it₂' : IterM (α := α₂) m β}
(h : it₂'.finitelyManySkips.Rel it₂.finitelyManySkips) :
Append.ProductiveRel α₁ α₂ m β
(IterM.Intermediate.appendSnd α₁ it₂') (IterM.Intermediate.appendSnd α₁ it₂) := by
exact Prod.Lex.right _ h
private def Append.instProductivenessRelation [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] :
ProductivenessRelation (Append α₁ α₂ m β) m where
Rel := Append.ProductiveRel α₁ α₂ m β
wf := by
apply InvImage.wf
refine fun (a, b) => Prod.lexAccessible (WellFounded.apply ?_ a) (WellFounded.apply ?_) b
· exact Option.wellFounded_lt <| InvImage.wf _ WellFoundedRelation.wf
· exact InvImage.wf _ WellFoundedRelation.wf
subrelation {it it'} h := by
cases h
case fstSkip =>
apply Append.productiveRel_of_fst
exact IterM.TerminationMeasures.Productive.rel_of_skip _
case fstDone =>
exact Append.productiveRel_fstDone
case sndSkip =>
apply Append.productiveRel_of_snd
exact IterM.TerminationMeasures.Productive.rel_of_skip _
instance Append.instProductive [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
[Productive α₁ m] [Productive α₂ m] : Productive (Append α₁ α₂ m β) m :=
.of_productivenessRelation instProductivenessRelation
end Productive
end Std.Iterators.Types

View File

@@ -362,8 +362,7 @@ def Flatten.instProductivenessRelation [Monad m] [Iterator α m (IterM (α := α
case innerDone =>
apply Flatten.productiveRel_of_right₂
@[no_expose]
public def Flatten.instProductive [Monad m] [Iterator α m (IterM (α := α₂) m β)] [Iterator α₂ m β]
public theorem Flatten.instProductive [Monad m] [Iterator α m (IterM (α := α₂) m β)] [Iterator α₂ m β]
[Finite α m] [Productive α₂ m] : Productive (Flatten α α₂ β m) m :=
.of_productivenessRelation instProductivenessRelation

View File

@@ -35,7 +35,7 @@ A `ForIn'` instance for iterators. Its generic membership relation is not easy t
so this is not marked as `instance`. This way, more convenient instances can be built on top of it
or future library improvements will make it more comfortable.
-/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def Iter.instForIn' {α : Type w} {β : Type w} {n : Type x Type x'} [Monad n]
[Iterator α Id β] [IteratorLoop α Id n] :
ForIn' n (Iter (α := α) β) β fun it out => it.IsPlausibleIndirectOutput out where
@@ -53,7 +53,7 @@ instance (α : Type w) (β : Type w) (n : Type x → Type x') [Monad n]
/--
An implementation of `for h : ... in ... do ...` notation for partial iterators.
-/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def Iter.Partial.instForIn' {α : Type w} {β : Type w} {n : Type x Type x'} [Monad n]
[Iterator α Id β] [IteratorLoop α Id n] :
ForIn' n (Iter.Partial (α := α) β) β fun it out => it.it.IsPlausibleIndirectOutput out where
@@ -71,7 +71,7 @@ instance (α : Type w) (β : Type w) (n : Type x → Type x') [Monad n]
A `ForIn'` instance for iterators that is guaranteed to terminate after finitely many steps.
It is not marked as an instance because the membership predicate is difficult to work with.
-/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def Iter.Total.instForIn' {α : Type w} {β : Type w} {n : Type x Type x'} [Monad n]
[Iterator α Id β] [IteratorLoop α Id n] [Finite α Id] :
ForIn' n (Iter.Total (α := α) β) β fun it out => it.it.IsPlausibleIndirectOutput out where

View File

@@ -159,7 +159,7 @@ This is the default implementation of the `IteratorLoop` class.
It simply iterates through the iterator using `IterM.step`. For certain iterators, more efficient
implementations are possible and should be used instead.
-/
@[always_inline, inline, expose]
@[always_inline, inline, expose, implicit_reducible]
def IteratorLoop.defaultImplementation {α : Type w} {m : Type w Type w'} {n : Type x Type x'}
[Monad n] [Iterator α m β] :
IteratorLoop α m n where
@@ -211,7 +211,7 @@ theorem IteratorLoop.wellFounded_of_productive {α β : Type w} {m : Type w →
/--
This `ForIn'`-style loop construct traverses a finite iterator using an `IteratorLoop` instance.
-/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def IteratorLoop.finiteForIn' {m : Type w Type w'} {n : Type x Type x'}
{α : Type w} {β : Type w} [Iterator α m β] [IteratorLoop α m n] [Monad n]
(lift : γ δ, (γ n δ) m γ n δ) :
@@ -224,7 +224,7 @@ A `ForIn'` instance for iterators. Its generic membership relation is not easy t
so this is not marked as `instance`. This way, more convenient instances can be built on top of it
or future library improvements will make it more comfortable.
-/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def IterM.instForIn' {m : Type w Type w'} {n : Type w Type w''}
{α : Type w} {β : Type w} [Iterator α m β] [IteratorLoop α m n] [Monad n]
[MonadLiftT m n] :
@@ -239,7 +239,7 @@ instance IterM.instForInOfIteratorLoop {m : Type w → Type w'} {n : Type w →
instForInOfForIn'
/-- Internal implementation detail of the iterator library. -/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def IterM.Partial.instForIn' {m : Type w Type w'} {n : Type w Type w''}
{α : Type w} {β : Type w} [Iterator α m β] [IteratorLoop α m n] [MonadLiftT m n] [Monad n] :
ForIn' n (IterM.Partial (α := α) m β) β fun it out => it.it.IsPlausibleIndirectOutput out where
@@ -247,7 +247,7 @@ def IterM.Partial.instForIn' {m : Type w → Type w'} {n : Type w → Type w''}
haveI := @IterM.instForIn'; forIn' it.it init f
/-- Internal implementation detail of the iterator library. -/
@[always_inline, inline]
@[always_inline, inline, expose, implicit_reducible]
def IterM.Total.instForIn' {m : Type w Type w'} {n : Type w Type w''}
{α : Type w} {β : Type w} [Iterator α m β] [IteratorLoop α m n] [MonadLiftT m n] [Monad n]
[Finite α m] :

View File

@@ -70,7 +70,7 @@ theorem LawfulMonadLiftFunction.lift_seqRight [LawfulMonad m] [LawfulMonad n]
abbrev idToMonad [Monad m] α : Type u (x : Id α) : m α :=
pure x.run
def LawfulMonadLiftFunction.idToMonad [Monad m] [LawfulMonad m] :
theorem LawfulMonadLiftFunction.idToMonad [LawfulMonad m] :
LawfulMonadLiftFunction (m := Id) (n := m) idToMonad where
lift_pure := by simp [Internal.idToMonad]
lift_bind := by simp [Internal.idToMonad]
@@ -95,7 +95,7 @@ instance [LawfulMonadLiftBindFunction (n := n) (fun _ _ f x => lift x >>= f)] [L
simpa using LawfulMonadLiftBindFunction.liftBind_bind (n := n)
(liftBind := fun _ _ f x => lift x >>= f) (β := β) (γ := γ) (δ := γ) pure x g
def LawfulMonadLiftBindFunction.id [Monad m] [LawfulMonad m] :
theorem LawfulMonadLiftBindFunction.id [LawfulMonad m] :
LawfulMonadLiftBindFunction (m := Id) (n := m) (fun _ _ f x => f x.run) where
liftBind_pure := by simp
liftBind_bind := by simp

View File

@@ -6,6 +6,7 @@ Authors: Paul Reichert
module
prelude
public import Init.Data.Iterators.Lemmas.Combinators.Append
public import Init.Data.Iterators.Lemmas.Combinators.Attach
public import Init.Data.Iterators.Lemmas.Combinators.Monadic
public import Init.Data.Iterators.Lemmas.Combinators.FilterMap

View File

@@ -0,0 +1,193 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Iterators.Combinators.Append
public import Init.Data.Iterators.Lemmas.Combinators.Monadic.Append
public import Init.Data.Iterators.Consumers.Collect
public import Init.Data.Iterators.Consumers.Access
import Init.Data.Iterators.Lemmas.Consumers.Collect
import Init.Data.Iterators.Lemmas.Consumers.Access
import Init.Data.Iterators.Lemmas.Basic
import Init.Omega
public section
namespace Std
open Std.Iterators Std.Iterators.Types
theorem Iter.append_eq_toIter_append_toIterM {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} :
it₁.append it₂ = (it₁.toIterM.append it₂.toIterM).toIter :=
rfl
theorem Iter.Intermediate.appendSnd_eq_toIter_appendSnd_toIterM {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β]
{it₂ : Iter (α := α₂) β} :
Iter.Intermediate.appendSnd α₁ it₂ = (IterM.Intermediate.appendSnd α₁ it₂.toIterM).toIter :=
rfl
theorem Iter.step_append {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} :
(it₁.append it₂).step =
match it₁.step with
| .yield it₁' out h => .yield (it₁'.append it₂) out (.fstYield h)
| .skip it₁' h => .skip (it₁'.append it₂) (.fstSkip h)
| .done h => .skip (Iter.Intermediate.appendSnd α₁ it₂) (.fstDone h) := by
simp only [Iter.step, append_eq_toIter_append_toIterM, toIterM_toIter, IterM.step_append,
Id.run_bind]
cases it₁.toIterM.step.run.inflate using PlausibleIterStep.casesOn <;>
simp [Intermediate.appendSnd_eq_toIter_appendSnd_toIterM]
theorem Iter.Intermediate.step_appendSnd {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β]
{it₂ : Iter (α := α₂) β} :
(Iter.Intermediate.appendSnd α₁ it₂).step =
match it₂.step with
| .yield it₂' out h => .yield (Iter.Intermediate.appendSnd α₁ it₂') out (.sndYield h)
| .skip it₂' h => .skip (Iter.Intermediate.appendSnd α₁ it₂') (.sndSkip h)
| .done h => .done (.sndDone h) := by
simp only [Iter.step, appendSnd, toIterM_toIter, IterM.Intermediate.step_appendSnd, Id.run_bind]
cases it₂.toIterM.step.run.inflate using PlausibleIterStep.casesOn <;> simp
@[simp]
theorem Iter.toList_append {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Finite α₁ Id] [Finite α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} :
(it₁.append it₂).toList = it₁.toList ++ it₂.toList := by
simp [append_eq_toIter_append_toIterM, toList_eq_toList_toIterM]
@[simp]
theorem Iter.toListRev_append {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Finite α₁ Id] [Finite α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} :
(it₁.append it₂).toListRev = it₂.toListRev ++ it₁.toListRev := by
simp [append_eq_toIter_append_toIterM, toListRev_eq_toListRev_toIterM]
@[simp]
theorem Iter.toArray_append {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Finite α₁ Id] [Finite α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} :
(it₁.append it₂).toArray = it₁.toArray ++ it₂.toArray := by
simp [append_eq_toIter_append_toIterM, toArray_eq_toArray_toIterM]
@[simp]
theorem Iter.atIdxSlow?_appendSnd {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Productive α₁ Id] [Productive α₂ Id]
{it₂ : Iter (α := α₂) β} {n : Nat} :
(Iter.Intermediate.appendSnd α₁ it₂).atIdxSlow? n = it₂.atIdxSlow? n := by
induction n, it₂ using Iter.atIdxSlow?.induct_unfolding with
| yield_zero it it' out h h' =>
simp only [atIdxSlow?_eq_match (it := Iter.Intermediate.appendSnd α₁ it),
Intermediate.step_appendSnd, h']
| yield_succ it it' out h h' n ih =>
simp only [atIdxSlow?_eq_match (it := Iter.Intermediate.appendSnd α₁ it),
Intermediate.step_appendSnd, h', ih]
| skip_case n it it' h h' ih =>
simp only [atIdxSlow?_eq_match (it := Iter.Intermediate.appendSnd α₁ it),
Intermediate.step_appendSnd, h', ih]
| done_case n it h h' =>
simp only [atIdxSlow?_eq_match (it := Iter.Intermediate.appendSnd α₁ it),
Intermediate.step_appendSnd, h']
theorem Iter.atIdxSlow?_append_of_eq_some {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Productive α₁ Id] [Productive α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} {n : Nat} {b : β}
(h : it₁.atIdxSlow? n = some b) :
(it₁.append it₂).atIdxSlow? n = some b := by
induction n, it₁ using Iter.atIdxSlow?.induct_unfolding generalizing it₂ with
| yield_zero it it' out hp h' =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
cases h
simp [step_append, h']
| yield_succ it it' out hp h' n ih =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
simp [step_append, h', ih h]
| skip_case n it it' hp h' ih =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
simp [step_append, h', ih h]
| done_case n it hp h' =>
cases h
theorem Iter.atIdxSlow?_append {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Finite α₁ Id] [Productive α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} {n : Nat} :
(it₁.append it₂).atIdxSlow? n =
if n < it₁.toList.length then it₁.atIdxSlow? n
else it₂.atIdxSlow? (n - it₁.toList.length) := by
induction n, it₁ using Iter.atIdxSlow?.induct_unfolding generalizing it₂ with
| yield_zero it it' out h h' =>
simp only [atIdxSlow?_eq_match (it := it.append it₂), step_append, h']
rw [toList_eq_match_step (it := it)]
simp [h']
| yield_succ it it' out h h' n ih =>
simp only [atIdxSlow?_eq_match (it := it.append it₂), step_append, h', ih]
rw [toList_eq_match_step (it := it)]
simp [h', Nat.succ_lt_succ_iff, Nat.succ_sub_succ]
| skip_case n it it' h h' ih =>
simp only [atIdxSlow?_eq_match (it := it.append it₂), step_append, h', ih]
rw [toList_eq_match_step (it := it)]
simp [h']
| done_case n it h h' =>
simp [atIdxSlow?_eq_match (it := it.append it₂), step_append, h',
atIdxSlow?_appendSnd, toList_eq_match_step]
theorem Iter.atIdxSlow?_append_of_productive {α₁ α₂ β : Type w}
[Iterator α₁ Id β] [Iterator α₂ Id β] [Productive α₁ Id] [Productive α₂ Id]
{it₁ : Iter (α := α₁) β} {it₂ : Iter (α := α₂) β} {n k : Nat}
(hk : it₁.atIdxSlow? k = none)
(hmin : j, j < k (it₁.atIdxSlow? j).isSome)
(hle : k n) :
(it₁.append it₂).atIdxSlow? n = it₂.atIdxSlow? (n - k) := by
induction n, it₁ using Iter.atIdxSlow?.induct_unfolding generalizing k it₂ with
| yield_zero it it' out hp h' =>
exfalso
have : k = 0 := by omega
subst this
rw [atIdxSlow?_eq_match (it := it)] at hk
simp [h'] at hk
| yield_succ it it' out hp h' n ih =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
simp only [step_append, h']
match k with
| 0 =>
rw [atIdxSlow?_eq_match (it := it)] at hk
simp [h'] at hk
| k + 1 =>
rw [atIdxSlow?_eq_match (it := it)] at hk
simp [h'] at hk
have hmin' : j, j < k (it'.atIdxSlow? j).isSome := by
intro j hj
have h := hmin (j + 1) (by omega)
rw [atIdxSlow?_eq_match (it := it)] at h
simpa [h'] using h
rw [ih hk hmin' (by omega)]
congr 1
omega
| skip_case n it it' hp h' ih =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
simp only [step_append, h']
rw [atIdxSlow?_eq_match (it := it)] at hk; simp [h'] at hk
have hmin' : j, j < k (it'.atIdxSlow? j).isSome := by
intro j hj
have h := hmin j hj
rw [atIdxSlow?_eq_match (it := it)] at h
simpa [h'] using h
exact ih hk hmin' hle
| done_case n it hp h' =>
rw [atIdxSlow?_eq_match (it := it.append it₂)]
simp only [step_append, h', atIdxSlow?_appendSnd]
have hk0 : k = 0 := by
false_or_by_contra
have h := hmin 0 (by omega)
rw [atIdxSlow?_eq_match (it := it)] at h
simp [h'] at h
simp [hk0]
end Std

View File

@@ -121,22 +121,22 @@ public theorem Iter.step_flatMapAfterM {α : Type w} {β : Type w} {α₂ : Type
[Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] [Iterator α Id β] [Iterator α₂ m γ]
{f : β m (IterM (α := α₂) m γ)} {it₁ : Iter (α := α) β} {it₂ : Option (IterM (α := α₂) m γ)} :
(it₁.flatMapAfterM f it₂).step = (do
match it₂ with
match hit : it₂ with
| none =>
match it₁.step with
| .yield it₁' b h =>
let fx MonadAttach.attach (f b)
return .deflate (.skip (it₁'.flatMapAfterM f (some fx.val)) (.outerYield_flatMapM_pure h fx.property))
| .skip it₁' h => return .deflate (.skip (it₁'.flatMapAfterM f none) (.outerSkip_flatMapM_pure h))
| .done h => return .deflate (.done (.outerDone_flatMapM_pure h))
return .deflate (.skip (it₁'.flatMapAfterM f (some fx.val)) (hit .outerYield_flatMapM_pure h fx.property))
| .skip it₁' h => return .deflate (.skip (it₁'.flatMapAfterM f it₂) (hit .outerSkip_flatMapM_pure h))
| .done h => return .deflate (.done (hit .outerDone_flatMapM_pure h))
| some it₂ =>
match ( it₂.step).inflate with
| .yield it₂' out h =>
return .deflate (.yield (it₁.flatMapAfterM f (some it₂')) out (.innerYield_flatMapM_pure h))
return .deflate (.yield (it₁.flatMapAfterM f (some it₂')) out (hit .innerYield_flatMapM_pure h))
| .skip it₂' h =>
return .deflate (.skip (it₁.flatMapAfterM f (some it₂')) (.innerSkip_flatMapM_pure h))
return .deflate (.skip (it₁.flatMapAfterM f (some it₂')) (hit .innerSkip_flatMapM_pure h))
| .done h =>
return .deflate (.skip (it₁.flatMapAfterM f none) (.innerDone_flatMapM_pure h))) := by
return .deflate (.skip (it₁.flatMapAfterM f none) (hit .innerDone_flatMapM_pure h))) := by
simp only [flatMapAfterM, IterM.step_flatMapAfterM, Iter.step_mapWithPostcondition,
PostconditionT.operation_pure]
split

View File

@@ -6,6 +6,7 @@ Authors: Paul Reichert
module
prelude
public import Init.Data.Iterators.Lemmas.Combinators.Monadic.Append
public import Init.Data.Iterators.Lemmas.Combinators.Monadic.Attach
public import Init.Data.Iterators.Lemmas.Combinators.Monadic.FilterMap
public import Init.Data.Iterators.Lemmas.Combinators.Monadic.FlatMap

View File

@@ -0,0 +1,107 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Paul Reichert
-/
module
prelude
public import Init.Data.Iterators.Combinators.Monadic.Append
public import Init.Data.Iterators.Consumers.Monadic.Collect
import Init.Data.Iterators.Lemmas.Consumers.Monadic.Collect
import Init.Data.Iterators.Lemmas.Monadic.Basic
import Init.Data.List.Lemmas
import Init.Data.List.ToArray
public section
namespace Std
open Std.Iterators Std.Iterators.Types
variable {α₁ α₂ β : Type w} {m : Type w Type w'}
theorem IterM.step_append [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
{it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
(it₁.append it₂).step = (do
match ( it₁.step).inflate with
| .yield it₁' out h =>
pure <| .deflate <| .yield (it₁'.append it₂) out (.fstYield h)
| .skip it₁' h =>
pure <| .deflate <| .skip (it₁'.append it₂) (.fstSkip h)
| .done h =>
pure <| .deflate <| .skip (IterM.Intermediate.appendSnd α₁ it₂) (.fstDone h)) := by
simp only [append, Intermediate.appendSnd, step, Iterator.step]
apply bind_congr; intro step
cases step.inflate using PlausibleIterStep.casesOn <;> rfl
theorem IterM.Intermediate.step_appendSnd [Monad m] [Iterator α₁ m β] [Iterator α₂ m β]
{it₂ : IterM (α := α₂) m β} :
(IterM.Intermediate.appendSnd α₁ it₂).step = (do
match ( it₂.step).inflate with
| .yield it₂' out h =>
pure <| .deflate <| .yield (IterM.Intermediate.appendSnd α₁ it₂') out (.sndYield h)
| .skip it₂' h =>
pure <| .deflate <| .skip (IterM.Intermediate.appendSnd α₁ it₂') (.sndSkip h)
| .done h =>
pure <| .deflate <| .done (.sndDone h)) := by
simp only [Intermediate.appendSnd, step, Iterator.step]
apply bind_congr; intro step
cases step.inflate using PlausibleIterStep.casesOn <;> rfl
@[simp]
theorem IterM.toList_appendSnd [Monad m] [LawfulMonad m]
[Iterator α₁ m β] [Iterator α₂ m β] [Finite α₁ m] [Finite α₂ m]
{it₂ : IterM (α := α₂) m β} :
(IterM.Intermediate.appendSnd α₁ it₂).toList = it₂.toList := by
induction it₂ using IterM.inductSteps with | step it₂ ihy ihs
rw [toList_eq_match_step (it := IterM.Intermediate.appendSnd α₁ it₂),
toList_eq_match_step (it := it₂)]
simp only [Intermediate.step_appendSnd, bind_assoc]
apply bind_congr; intro step
cases step.inflate using PlausibleIterStep.casesOn
· simp [ihy _]
· simp [ihs _]
· simp
@[simp]
theorem IterM.toList_append [Monad m] [LawfulMonad m]
[Iterator α₁ m β] [Iterator α₂ m β] [Finite α₁ m] [Finite α₂ m]
{it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
(it₁.append it₂).toList = (do
let l₁ it₁.toList
let l₂ it₂.toList
pure (l₁ ++ l₂)) := by
induction it₁ using IterM.inductSteps with | step it₁ ihy ihs
rw [toList_eq_match_step (it := it₁.append it₂), toList_eq_match_step (it := it₁)]
simp only [step_append, bind_assoc]
apply bind_congr; intro step
cases step.inflate using PlausibleIterStep.casesOn
· simp [ihy _, - bind_pure_comp]
· simp [ihs _]
· simp [toList_appendSnd, - bind_pure_comp]
@[simp]
theorem IterM.toListRev_append [Monad m] [LawfulMonad m]
[Iterator α₁ m β] [Iterator α₂ m β] [Finite α₁ m] [Finite α₂ m]
{it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
(it₁.append it₂).toListRev = (do
let l₁ it₁.toListRev
let l₂ it₂.toListRev
pure (l₂ ++ l₁)) := by
rw [toListRev_eq (it := it₁.append it₂), toList_append,
toListRev_eq (it := it₁), toListRev_eq (it := it₂)]
simp [map_bind, bind_pure_comp, List.reverse_append]
@[simp]
theorem IterM.toArray_append [Monad m] [LawfulMonad m]
[Iterator α₁ m β] [Iterator α₂ m β] [Finite α₁ m] [Finite α₂ m]
{it₁ : IterM (α := α₁) m β} {it₂ : IterM (α := α₂) m β} :
(it₁.append it₂).toArray = (do
let a₁ it₁.toArray
let a₂ it₂.toArray
pure (a₁ ++ a₂)) := by
rw [ toArray_toList (it := it₁.append it₂), toList_append,
toArray_toList (it := it₁), toArray_toList (it := it₂)]
simp [map_bind, - bind_pure_comp, List.toArray_appendList, - toArray_toList]
end Std

View File

@@ -21,14 +21,14 @@ open Std.Internal Std.Iterators
theorem IterM.step_flattenAfter {α α₂ β : Type w} {m : Type w Type w'} [Monad m]
[Iterator α m (IterM (α := α₂) m β)] [Iterator α₂ m β]
{it₁ : IterM (α := α) m (IterM (α := α₂) m β)} {it₂ : Option (IterM (α := α₂) m β)} :
(it₁.flattenAfter it₂).step = (do
(it₁.flattenAfter it₂).step = (
match it₂ with
| none =>
| none => do
match ( it₁.step).inflate with
| .yield it₁' it₂' h => return .deflate (.skip (it₁'.flattenAfter (some it₂')) (.outerYield h))
| .skip it₁' h => return .deflate (.skip (it₁'.flattenAfter none) (.outerSkip h))
| .done h => return .deflate (.done (.outerDone h))
| some it₂ =>
| some it₂ => do
match ( it₂.step).inflate with
| .yield it₂' out h => return .deflate (.yield (it₁.flattenAfter (some it₂')) out (.innerYield h))
| .skip it₂' h => return .deflate (.skip (it₁.flattenAfter (some it₂')) (.innerSkip h))
@@ -130,16 +130,16 @@ public theorem IterM.step_flatMapAfterM {α : Type w} {β : Type w} {α₂ : Typ
{γ : Type w} {m : Type w Type w'} [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m]
[Iterator α m β] [Iterator α₂ m γ] {f : β m (IterM (α := α₂) m γ)} {it₁ : IterM (α := α) m β}
{it₂ : Option (IterM (α := α₂) m γ)} :
(it₁.flatMapAfterM f it₂).step = (do
(it₁.flatMapAfterM f it₂).step = (
match it₂ with
| none =>
| none => do
match ( it₁.step).inflate with
| .yield it₁' b h =>
let fx MonadAttach.attach (f b)
return .deflate (.skip (it₁'.flatMapAfterM f (some fx.val)) (.outerYield_flatMapM h fx.property))
| .skip it₁' h => return .deflate (.skip (it₁'.flatMapAfterM f none) (.outerSkip_flatMapM h))
| .done h => return .deflate (.done (.outerDone_flatMapM h))
| some it₂ =>
| some it₂ => do
match ( it₂.step).inflate with
| .yield it₂' out h => return .deflate (.yield (it₁.flatMapAfterM f (some it₂')) out (.innerYield_flatMapM h))
| .skip it₂' h => return .deflate (.skip (it₁.flatMapAfterM f (some it₂')) (.innerSkip_flatMapM h))
@@ -171,15 +171,15 @@ public theorem IterM.step_flatMapM {α : Type w} {β : Type w} {α₂ : Type w}
public theorem IterM.step_flatMapAfter {α : Type w} {β : Type w} {α₂ : Type w}
{γ : Type w} {m : Type w Type w'} [Monad m] [LawfulMonad m] [Iterator α m β] [Iterator α₂ m γ]
{f : β IterM (α := α₂) m γ} {it₁ : IterM (α := α) m β} {it₂ : Option (IterM (α := α₂) m γ)} :
(it₁.flatMapAfter f it₂).step = (do
(it₁.flatMapAfter f it₂).step = (
match it₂ with
| none =>
| none => do
match ( it₁.step).inflate with
| .yield it₁' b h =>
return .deflate (.skip (it₁'.flatMapAfter f (some (f b))) (.outerYield_flatMap h))
| .skip it₁' h => return .deflate (.skip (it₁'.flatMapAfter f none) (.outerSkip_flatMap h))
| .done h => return .deflate (.done (.outerDone_flatMap h))
| some it₂ =>
| some it₂ => do
match ( it₂.step).inflate with
| .yield it₂' out h => return .deflate (.yield (it₁.flatMapAfter f (some it₂')) out (.innerYield_flatMap h))
| .skip it₂' h => return .deflate (.skip (it₁.flatMapAfter f (some it₂')) (.innerSkip_flatMap h))

View File

@@ -32,7 +32,7 @@ theorem Iter.forIn'_eq {α β : Type w} [Iterator α Id β] [Finite α Id]
IterM.DefaultConsumers.forIn' (n := m) (fun _ _ f x => f x.run) γ (fun _ _ _ => True)
it.toIterM init _ (fun _ => id)
(fun out h acc => return f out (Iter.isPlausibleIndirectOutput_iff_isPlausibleIndirectOutput_toIterM.mpr h) acc, trivial) := by
simp +instances only [instForIn', ForIn'.forIn', IteratorLoop.finiteForIn']
simp +instances only [ForIn'.forIn']
have : a b c, f a b c = (Subtype.val <$> (·, trivial) <$> f a b c) := by simp
simp +singlePass only [this]
rw [hl.lawful (fun _ _ f x => f x.run) (wf := IteratorLoop.wellFounded_of_finite)]
@@ -81,7 +81,7 @@ theorem Iter.forIn'_eq_forIn'_toIterM {α β : Type w} [Iterator α Id β]
letI : ForIn' m (IterM (α := α) Id β) β _ := IterM.instForIn'
ForIn'.forIn' it.toIterM init
(fun out h acc => f out (isPlausibleIndirectOutput_iff_isPlausibleIndirectOutput_toIterM.mpr h) acc) := by
simp +instances [ForIn'.forIn', Iter.instForIn', IterM.instForIn', monadLift]
simp +instances [ForIn'.forIn', monadLift]
theorem Iter.forIn_eq_forIn_toIterM {α β : Type w} [Iterator α Id β]
[Finite α Id] {m : Type w Type w''} [Monad m] [LawfulMonad m]
@@ -395,7 +395,7 @@ theorem Iter.fold_eq_fold_toIterM {α β : Type w} {γ : Type w} [Iterator α Id
[Finite α Id] [IteratorLoop α Id Id]
{f : γ β γ} {init : γ} {it : Iter (α := α) β} :
it.fold (init := init) f = (it.toIterM.fold (init := init) f).run := by
rw [fold_eq_foldM, foldM_eq_foldM_toIterM, IterM.fold_eq_foldM]; rfl
rw [fold_eq_foldM, foldM_eq_foldM_toIterM, IterM.fold_eq_foldM]
@[simp]
theorem Iter.forIn_pure_yield_eq_fold {α β : Type w} {γ : Type x} [Iterator α Id β]

View File

@@ -109,7 +109,7 @@ theorem IterM.forIn'_eq {α β : Type w} {m : Type w → Type w'} [Iterator α m
letI : ForIn' n (IterM (α := α) m β) β _ := IterM.instForIn'
ForIn'.forIn' (α := β) (m := n) it init f = IterM.DefaultConsumers.forIn' (n := n)
(fun _ _ f x => monadLift x >>= f) γ (fun _ _ _ => True) it init _ (fun _ => id) (return f · · ·, trivial) := by
simp +instances only [instForIn', ForIn'.forIn', IteratorLoop.finiteForIn']
simp +instances only [ForIn'.forIn']
have : f = (Subtype.val <$> (·, trivial) <$> f · · ·) := by simp
rw [this, hl.lawful (fun _ _ f x => monadLift x >>= f) (wf := IteratorLoop.wellFounded_of_finite)]
simp +instances [IteratorLoop.defaultImplementation]

View File

@@ -32,14 +32,14 @@ def ToIterator.iter [ToIterator γ Id α β] (x : γ) : Iter (α := α) β :=
ToIterator.iterM x |>.toIter
/-- Creates a monadic `ToIterator` instance. -/
@[always_inline, inline, expose, instance_reducible]
@[always_inline, inline, expose, implicit_reducible]
def ToIterator.ofM (α : Type w)
(iterM : γ IterM (α := α) m β) :
ToIterator γ m α β where
iterMInternal x := iterM x
/-- Creates a pure `ToIterator` instance. -/
@[always_inline, inline, expose, instance_reducible]
@[always_inline, inline, expose, implicit_reducible]
def ToIterator.of (α : Type w)
(iter : γ Iter (α := α) β) :
ToIterator γ Id α β where

View File

@@ -236,7 +236,6 @@ theorem getElem?_eq_some_iff {l : List α} : l[i]? = some a ↔ ∃ h : i < l.le
· match i, h with
| i + 1, h => simp [getElem?_eq_some_iff, Nat.succ_lt_succ_iff]
@[grind ]
theorem getElem_of_getElem? {l : List α} : l[i]? = some a h : i < l.length, l[i] = a :=
getElem?_eq_some_iff.mp
@@ -937,6 +936,12 @@ theorem getElem_zero_eq_head {l : List α} (h : 0 < l.length) :
| nil => simp at h
| cons _ _ => simp
theorem head!_eq_getElem! [Inhabited α] {l : List α} : head! l = l[0]! := by
cases l <;> rfl
theorem headD_eq_getD {l : List α} {fallback} : headD l fallback = l.getD 0 fallback := by
cases l <;> rfl
theorem head_eq_iff_head?_eq_some {xs : List α} (h) : xs.head h = a xs.head? = some a := by
cases xs with
| nil => simp at h

View File

@@ -225,7 +225,7 @@ theorem forM_toArray [Monad m] (l : List α) (f : α → m PUnit) :
@[simp, grind =] theorem findSomeM?_toArray [Monad m] [LawfulMonad m] (f : α m (Option β)) (l : List α) :
l.toArray.findSomeM? f = l.findSomeM? f := by
rw [Array.findSomeM?]
simp only [bind_pure_comp, map_pure, forIn_toArray]
simp only [forIn_toArray]
induction l with
| nil => simp
| cons a l ih =>
@@ -258,7 +258,7 @@ theorem findRevM?_toArray [Monad m] [LawfulMonad m] (f : α → m Bool) (l : Lis
@[simp, grind =] theorem findM?_toArray [Monad m] [LawfulMonad m] (f : α m Bool) (l : List α) :
l.toArray.findM? f = l.findM? f := by
rw [Array.findM?]
simp only [bind_pure_comp, map_pure, forIn_toArray]
simp only [forIn_toArray]
induction l with
| nil => simp
| cons a l ih =>

View File

@@ -478,7 +478,7 @@ instance : Std.Trichotomous (. < . : Nat → Nat → Prop) where
set_option linter.missingDocs false in
@[deprecated Nat.instTrichotomousLt (since := "2025-10-27")]
def Nat.instAntisymmNotLt : Std.Antisymm (¬ . < . : Nat Nat Prop) where
theorem Nat.instAntisymmNotLt : Std.Antisymm (¬ . < . : Nat Nat Prop) where
antisymm := Nat.instTrichotomousLt.trichotomous
protected theorem add_le_add_left {n m : Nat} (h : n m) (k : Nat) : k + n k + m :=

View File

@@ -172,10 +172,10 @@ instance [Monad m] : ForM m (Option α) α :=
Option.forM
instance [Monad m] : ForIn' m (Option α) α inferInstance where
forIn' x init f := do
forIn' x init f :=
match x with
| none => return init
| some a =>
| some a => do
match f a rfl init with
| .done r | .yield r => return r

View File

@@ -147,7 +147,7 @@ public theorem LawfulOrderMin.of_min_le {α : Type u} [Min α] [LE α]
This lemma characterizes in terms of `LE α` when a `Max α` instance "behaves like a supremum
operator".
-/
public def LawfulOrderSup.of_le {α : Type u} [Max α] [LE α]
public theorem LawfulOrderSup.of_le {α : Type u} [Max α] [LE α]
(max_le_iff : a b c : α, max a b c a c b c) : LawfulOrderSup α where
max_le_iff := max_le_iff
@@ -159,7 +159,7 @@ instances.
The produced instance entails `LawfulOrderSup α` and `MaxEqOr α`.
-/
public def LawfulOrderMax.of_max_le_iff {α : Type u} [Max α] [LE α]
public theorem LawfulOrderMax.of_max_le_iff {α : Type u} [Max α] [LE α]
(max_le_iff : a b c : α, max a b c a c b c := by exact LawfulOrderInf.le_min_iff)
(max_eq_or : a b : α, max a b = a max a b = b := by exact MaxEqOr.max_eq_or) :
LawfulOrderMax α where
@@ -196,7 +196,7 @@ Creates a *total* `LE α` instance from an `LT α` instance.
This only makes sense for asymmetric `LT α` instances (see `Std.Asymm`).
-/
@[inline]
@[inline, implicit_reducible, expose]
public def _root_.LE.ofLT (α : Type u) [LT α] : LE α where
le a b := ¬ b < a
@@ -208,7 +208,7 @@ public instance LawfulOrderLT.of_lt {α : Type u} [LT α] [i : Asymm (α := α)
haveI := LE.ofLT α
LawfulOrderLT α :=
letI := LE.ofLT α
{ lt_iff a b := by simp +instances [LE.ofLT, LE.le]; apply Asymm.asymm }
{ lt_iff a b := by simp +instances [LE.le]; apply Asymm.asymm }
/--
If an `LT α` instance is asymmetric and its negation is transitive, then `LE.ofLT α` represents a
@@ -253,7 +253,7 @@ public theorem LawfulOrderInf.of_lt {α : Type u} [Min α] [LT α]
letI := LE.ofLT α
{ le_min_iff a b c := by
open Classical in
simp +instances only [LE.ofLT, LE.le]
simp +instances only [LE.le]
simp [ not_or, Decidable.not_iff_not]
simpa [Decidable.imp_iff_not_or] using min_lt_iff a b c }
@@ -276,14 +276,14 @@ public theorem LawfulOrderMin.of_lt {α : Type u} [Min α] [LT α]
This lemma characterizes in terms of `LT α` when a `Max α` instance
"behaves like an supremum operator" with respect to `LE.ofLT α`.
-/
public def LawfulOrderSup.of_lt {α : Type u} [Max α] [LT α]
public theorem LawfulOrderSup.of_lt {α : Type u} [Max α] [LT α]
(lt_max_iff : a b c : α, c < max a b c < a c < b) :
haveI := LE.ofLT α
LawfulOrderSup α :=
letI := LE.ofLT α
{ max_le_iff a b c := by
open Classical in
simp +instances only [LE.ofLT, LE.le]
simp +instances only [LE.le]
simp [ not_or, Decidable.not_iff_not]
simpa [Decidable.imp_iff_not_or] using lt_max_iff a b c }
@@ -293,7 +293,7 @@ Derives a `LawfulOrderMax α` instance for `LE.ofLT` from two properties involvi
The produced instance entails `LawfulOrderSup α` and `MaxEqOr α`.
-/
public def LawfulOrderMax.of_lt {α : Type u} [Max α] [LT α]
public theorem LawfulOrderMax.of_lt {α : Type u} [Max α] [LT α]
(lt_max_iff : a b c : α, c < max a b c < a c < b)
(max_eq_or : a b : α, max a b = a max a b = b) :
haveI := LE.ofLT α

View File

@@ -26,7 +26,7 @@ public def _root_.LE.ofOrd (α : Type u) [Ord α] : LE α where
/--
Creates an `DecidableLE α` instance using a well-behaved `Ord α` instance.
-/
@[inline, expose]
@[inline, expose, implicit_reducible]
public def _root_.DecidableLE.ofOrd (α : Type u) [LE α] [Ord α] [LawfulOrderOrd α] :
DecidableLE α :=
fun a b => match h : (compare a b).isLE with
@@ -93,7 +93,7 @@ grind_pattern compare_ne_eq => compare a b, Ordering.eq where
/--
Creates a `DecidableLT α` instance using a well-behaved `Ord α` instance.
-/
@[inline, expose]
@[inline, expose, implicit_reducible]
public def _root_.DecidableLT.ofOrd (α : Type u) [LE α] [LT α] [Ord α] [LawfulOrderOrd α]
[LawfulOrderLT α] :
DecidableLT α :=

View File

@@ -52,7 +52,8 @@ def max' [LE α] [DecidableLE α] (a b : α) : α :=
Without the `open scoped` command, Lean would not find the required {lit}`DecidableLE α`
instance for the opposite order.
-/
@[implicit_reducible] def LE.opposite (le : LE α) : LE α where
@[implicit_reducible]
def LE.opposite (le : LE α) : LE α where
le a b := b a
theorem LE.opposite_def {le : LE α} :
@@ -89,6 +90,7 @@ example [LE α] [LT α] [Std.LawfulOrderLT α] [Std.IsLinearOrder α] {x y : α}
Without the `open scoped` command, Lean would not find the {lit}`LawfulOrderLT α`
and {lit}`IsLinearOrder α` instances for the opposite order that are required by {name}`not_le`.
-/
@[implicit_reducible]
def LT.opposite (lt : LT α) : LT α where
lt a b := b < a
@@ -125,6 +127,7 @@ example [LE α] [DecidableLE α] [Min α] [Std.LawfulOrderLeftLeaningMin α] {a
Without the `open scoped` command, Lean would not find the {lit}`LawfulOrderLeftLeaningMax α`
instance for the opposite order that is required by {name}`max_eq_if`.
-/
@[implicit_reducible]
def Min.oppositeMax (min : Min α) : Max α where
max a b := Min.min a b
@@ -161,6 +164,7 @@ example [LE α] [DecidableLE α] [Max α] [Std.LawfulOrderLeftLeaningMax α] {a
Without the `open scoped` command, Lean would not find the {lit}`LawfulOrderLeftLeaningMin α`
instance for the opposite order that is required by {name}`min_eq_if`.
-/
@[implicit_reducible]
def Max.oppositeMin (max : Max α) : Min α where
min a b := Max.max a b

View File

@@ -47,7 +47,7 @@ public instance instLawfulOrderBEqOfDecidableLE {α : Type u} [LE α] [Decidable
beq_iff_le_and_ge := by simp [BEq.beq]
/-- If `LT` can be characterized in terms of a decidable `LE`, then `LT` is decidable either. -/
@[expose, instance_reducible]
@[inline, expose, implicit_reducible]
public def decidableLTOfLE {α : Type u} [LE α] {_ : LT α} [DecidableLE α] [LawfulOrderLT α] :
DecidableLT α :=
fun a b =>
@@ -171,7 +171,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, namely `le_refl` and `le_trans`, can be omitted if `Refl` and `Trans`
instances can be synthesized.
-/
@[expose, implicit_reducible]
@[inline, expose, implicit_reducible]
public def PreorderPackage.ofLE (α : Type u)
(args : Packages.PreorderOfLEArgs α := by exact {}) : PreorderPackage α where
toLE := args.le
@@ -256,7 +256,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, namely `le_refl`, `le_trans` and `le_antisymm`, can be omitted if `Refl`,
`Trans` and `Antisymm` instances can be synthesized.
-/
@[expose, implicit_reducible]
@[inline, expose, implicit_reducible]
public def PartialOrderPackage.ofLE (α : Type u)
(args : Packages.PartialOrderOfLEArgs α := by exact {}) : PartialOrderPackage α where
toPreorderPackage := .ofLE α args.toPreorderOfLEArgs
@@ -385,7 +385,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, namely `le_total` and `le_trans`, can be omitted if `Total` and `Trans`
instances can be synthesized.
-/
@[expose, implicit_reducible]
@[inline, expose, implicit_reducible]
public def LinearPreorderPackage.ofLE (α : Type u)
(args : Packages.LinearPreorderOfLEArgs α := by exact {}) : LinearPreorderPackage α where
toPreorderPackage := .ofLE α args.toPreorderOfLEArgs
@@ -487,7 +487,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, namely `le_total`, `le_trans` and `le_antisymm`, can be omitted if
`Total`, `Trans` and `Antisymm` instances can be synthesized.
-/
@[expose, implicit_reducible]
@[inline, expose, implicit_reducible]
public def LinearOrderPackage.ofLE (α : Type u)
(args : Packages.LinearOrderOfLEArgs α := by exact {}) : LinearOrderPackage α where
toLinearPreorderPackage := .ofLE α args.toLinearPreorderOfLEArgs
@@ -647,7 +647,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, for example `transOrd`, can be omitted if a matching instance can be
synthesized.
-/
@[expose, instance_reducible]
@[inline, expose, implicit_reducible]
public def LinearPreorderPackage.ofOrd (α : Type u)
(args : Packages.LinearPreorderOfOrdArgs α := by exact {}) : LinearPreorderPackage α :=
letI := args.ord
@@ -793,7 +793,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
* Other proof obligations, such as `transOrd`, can be omitted if matching instances can be
synthesized.
-/
@[expose, instance_reducible]
@[inline, expose, implicit_reducible]
public def LinearOrderPackage.ofOrd (α : Type u)
(args : Packages.LinearOrderOfOrdArgs α := by exact {}) : LinearOrderPackage α :=
-- set_option backward.isDefEq.respectTransparency false in

View File

@@ -597,8 +597,7 @@ instance Iterator.instLawfulIteratorLoop [UpwardEnumerable α] [LE α] [Decidabl
LawfulIteratorLoop (Rxc.Iterator α) Id n where
lawful := by
intro lift instLawfulMonadLiftFunction γ it init Pl wf f
simp +instances only [IteratorLoop.defaultImplementation, IteratorLoop.forIn,
IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
simp +instances only [IteratorLoop.forIn, IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
rw [IterM.DefaultConsumers.forIn'.wf]
split; rotate_left
· simp only [IterM.step_eq,
@@ -1173,8 +1172,7 @@ instance Iterator.instLawfulIteratorLoop [UpwardEnumerable α] [LT α] [Decidabl
LawfulIteratorLoop (Rxo.Iterator α) Id n where
lawful := by
intro lift instLawfulMonadLiftFunction γ it init Pl wf f
simp +instances only [IteratorLoop.defaultImplementation, IteratorLoop.forIn,
IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
simp +instances only [IteratorLoop.forIn, IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
rw [IterM.DefaultConsumers.forIn'.wf]
split; rotate_left
· simp [IterM.step_eq, Monadic.step, Internal.LawfulMonadLiftBindFunction.liftBind_pure (liftBind := lift)]
@@ -1639,8 +1637,7 @@ instance Iterator.instLawfulIteratorLoop [UpwardEnumerable α]
LawfulIteratorLoop (Rxi.Iterator α) Id n where
lawful := by
intro lift instLawfulMonadLiftFunction γ it init Pl wf f
simp +instances only [IteratorLoop.defaultImplementation, IteratorLoop.forIn,
IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
simp +instances only [IteratorLoop.forIn, IterM.DefaultConsumers.forIn'_eq_wf Pl wf]
rw [IterM.DefaultConsumers.forIn'.wf]
split; rotate_left
· simp [Monadic.step_eq_step, Monadic.step, Internal.LawfulMonadLiftBindFunction.liftBind_pure]

View File

@@ -438,6 +438,7 @@ protected theorem UpwardEnumerable.le_iff {α : Type u} [LE α] [UpwardEnumerabl
[LawfulUpwardEnumerableLE α] {a b : α} : a b UpwardEnumerable.LE a b :=
LawfulUpwardEnumerableLE.le_iff a b
@[expose, implicit_reducible]
def UpwardEnumerable.instLETransOfLawfulUpwardEnumerableLE {α : Type u} [LE α]
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α] :
Trans (α := α) (· ·) (· ·) (· ·) where
@@ -502,12 +503,13 @@ protected theorem UpwardEnumerable.lt_succ_iff {α : Type u} [UpwardEnumerable
succMany?_eq_some_iff_succMany] at hn
exact n, hn
@[expose, implicit_reducible]
def UpwardEnumerable.instLTTransOfLawfulUpwardEnumerableLT {α : Type u} [LT α]
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α] :
Trans (α := α) (· < ·) (· < ·) (· < ·) where
trans := by simpa [UpwardEnumerable.lt_iff] using @UpwardEnumerable.lt_trans
def UpwardEnumerable.instLawfulOrderLTOfLawfulUpwardEnumerableLT {α : Type u} [LT α] [LE α]
theorem UpwardEnumerable.instLawfulOrderLTOfLawfulUpwardEnumerableLT {α : Type u} [LT α] [LE α]
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
[LawfulUpwardEnumerableLE α] :
LawfulOrderLT α where

View File

@@ -8,6 +8,12 @@ module
prelude
public import Init.Data.String.Pattern.Char
public import Init.Data.String.Lemmas.Pattern.Basic
public import Init.Data.String.Slice
public import Init.Data.String.Lemmas.Pattern.Pred
public import Init.Data.String.Search
import all Init.Data.String.Slice
import all Init.Data.String.Pattern.Char
import all Init.Data.String.Search
import Init.Data.Option.Lemmas
import Init.Data.String.Lemmas.Basic
import Init.Data.String.Lemmas.Order
@@ -54,8 +60,8 @@ instance {c : Char} : LawfulForwardPatternModel c where
dropPrefix?_eq_some_iff {s} pos := by
simp [isLongestMatch_iff, ForwardPattern.dropPrefix?, and_comm, eq_comm (b := pos)]
instance {c : Char} : LawfulToForwardSearcherModel c :=
.defaultImplementation
theorem toSearcher_eq {c : Char} {s : Slice} :
ToForwardSearcher.toSearcher c s = ToForwardSearcher.toSearcher (· == c) s := (rfl)
theorem matchesAt_iff {c : Char} {s : Slice} {pos : s.Pos} :
MatchesAt c pos (h : pos s.endPos), pos.get h = c := by
@@ -80,4 +86,153 @@ theorem matchAt?_eq {s : Slice} {pos : s.Pos} {c : Char} :
if h₀ : (h : pos s.endPos), pos.get h = c then some (pos.next h₀.1) else none := by
split <;> simp_all [isLongestMatchAt_iff, matchesAt_iff]
end String.Slice.Pattern.Model.Char
theorem isMatch_iff_isMatch_beq {c : Char} {s : Slice} {pos : s.Pos} :
IsMatch c pos IsMatch (· == c) pos := by
simp [isMatch_iff, CharPred.isMatch_iff, beq_iff_eq]
theorem isLongestMatch_iff_isLongestMatch_beq {c : Char} {s : Slice} {pos : s.Pos} :
IsLongestMatch c pos IsLongestMatch (· == c) pos := by
simp [isLongestMatch_iff_isMatch, isMatch_iff_isMatch_beq]
theorem isLongestMatchAt_iff_isLongestMatchAt_beq {c : Char} {s : Slice}
{pos pos' : s.Pos} :
IsLongestMatchAt c pos pos' IsLongestMatchAt (· == c) pos pos' := by
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff_isLongestMatch_beq]
theorem matchesAt_iff_matchesAt_beq {c : Char} {s : Slice} {pos : s.Pos} :
MatchesAt c pos MatchesAt (· == c) pos := by
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_beq]
theorem matchAt?_eq_matchAt?_beq {c : Char} {s : Slice} {pos : s.Pos} :
matchAt? c pos = matchAt? (· == c) pos := by
refine Option.ext (fun pos' => ?_)
simp [matchAt?_eq_some_iff, isLongestMatchAt_iff_isLongestMatchAt_beq]
theorem isValidSearchFrom_iff_isValidSearchFrom_beq {c : Char} {s : Slice} {p : s.Pos}
{l : List (SearchStep s)} : IsValidSearchFrom c p l IsValidSearchFrom (· == c) p l := by
refine fun h => ?_, fun h => ?_
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_beq]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_beq]
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_beq]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_beq]
instance {c : Char} : LawfulToForwardSearcherModel c where
isValidSearchFrom_toList s := by
simpa [toSearcher_eq, isValidSearchFrom_iff_isValidSearchFrom_beq] using
LawfulToForwardSearcherModel.isValidSearchFrom_toList (pat := (· == c)) (s := s)
end Pattern.Model.Char
theorem startsWith_char_eq_startsWith_beq {c : Char} {s : Slice} :
s.startsWith c = s.startsWith (· == c) := (rfl)
theorem dropPrefix?_char_eq_dropPrefix?_beq {c : Char} {s : Slice} :
s.dropPrefix? c = s.dropPrefix? (· == c) := (rfl)
theorem dropPrefix_char_eq_dropPrefix_beq {c : Char} {s : Slice} :
s.dropPrefix c = s.dropPrefix (· == c) := (rfl)
theorem Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq {c : Char} {s : Slice} :
dropPrefix? c s = dropPrefix? (· == c) s := (rfl)
private theorem dropWhileGo_eq {c : Char} {s : Slice} (curr : s.Pos) :
dropWhile.go s c curr = dropWhile.go s (· == c) curr := by
fun_induction dropWhile.go s c curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq, h, ih]
| case3 pos h =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq]
theorem dropWhile_char_eq_dropWhile_beq {c : Char} {s : Slice} :
s.dropWhile c = s.dropWhile (· == c) := by
simpa only [dropWhile] using dropWhileGo_eq s.startPos
private theorem takeWhileGo_eq {c : Char} {s : Slice} (curr : s.Pos) :
takeWhile.go s c curr = takeWhile.go s (· == c) curr := by
fun_induction takeWhile.go s c curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq, h, ih]
| case3 pos h =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_char_eq_dropPrefix?_beq]
theorem takeWhile_char_eq_takeWhile_beq {c : Char} {s : Slice} :
s.takeWhile c = s.takeWhile (· == c) := by
simp only [takeWhile]; exact takeWhileGo_eq s.startPos
theorem all_char_eq_all_beq {c : Char} {s : Slice} :
s.all c = s.all (· == c) := by
simp only [all, dropWhile_char_eq_dropWhile_beq]
theorem find?_char_eq_find?_beq {c : Char} {s : Slice} :
s.find? c = s.find? (· == c) :=
(rfl)
theorem Pos.find?_char_eq_find?_beq {c : Char} {s : Slice} {p : s.Pos} :
p.find? c = p.find? (· == c) :=
(rfl)
theorem contains_char_eq_contains_beq {c : Char} {s : Slice} :
s.contains c = s.contains (· == c) :=
(rfl)
theorem endsWith_char_eq_endsWith_beq {c : Char} {s : Slice} :
s.endsWith c = s.endsWith (· == c) := (rfl)
theorem dropSuffix?_char_eq_dropSuffix?_beq {c : Char} {s : Slice} :
s.dropSuffix? c = s.dropSuffix? (· == c) := (rfl)
theorem dropSuffix_char_eq_dropSuffix_beq {c : Char} {s : Slice} :
s.dropSuffix c = s.dropSuffix (· == c) := (rfl)
theorem Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq {c : Char} {s : Slice} :
dropSuffix? c s = dropSuffix? (· == c) s := (rfl)
private theorem dropEndWhileGo_eq {c : Char} {s : Slice} (curr : s.Pos) :
dropEndWhile.go s c curr = dropEndWhile.go s (· == c) curr := by
fun_induction dropEndWhile.go s c curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq, h, ih]
| case3 pos h =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq]
theorem dropEndWhile_char_eq_dropEndWhile_beq {c : Char} {s : Slice} :
s.dropEndWhile c = s.dropEndWhile (· == c) := by
simpa only [dropEndWhile] using dropEndWhileGo_eq s.endPos
private theorem takeEndWhileGo_eq {c : Char} {s : Slice} (curr : s.Pos) :
takeEndWhile.go s c curr = takeEndWhile.go s (· == c) curr := by
fun_induction takeEndWhile.go s c curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq, h, ih]
| case3 pos h =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_char_eq_dropSuffix?_beq]
theorem takeEndWhile_char_eq_takeEndWhile_beq {c : Char} {s : Slice} :
s.takeEndWhile c = s.takeEndWhile (· == c) := by
simpa only [takeEndWhile] using takeEndWhileGo_eq s.endPos
end String.Slice

View File

@@ -7,6 +7,8 @@ module
prelude
public import Init.Data.String.Slice
public import Init.Data.String.Search
public import Init.Data.String.Lemmas.Splits
import Init.Data.String.Lemmas.Pattern.Find.Basic
import Init.Data.String.Lemmas.Pattern.Char
import Init.Data.String.Lemmas.Basic
@@ -17,6 +19,8 @@ import Init.Grind
import Init.Data.Option.Lemmas
import Init.Data.String.OrderInstances
public section
namespace String.Slice
theorem find?_char_eq_some_iff {c : Char} {s : Slice} {pos : s.Pos} :
@@ -98,7 +102,7 @@ end Slice
theorem Pos.find?_char_eq_some_iff {c : Char} {s : String} {pos pos' : s.Pos} :
pos.find? c = some pos'
pos pos' ( h, pos'.get h = c)
pos'', pos pos'' (h' : pos'' < pos') pos''.get (Pos.ne_endPos_of_lt h') c := by
pos'', pos pos'' (h' : pos'' < pos') pos''.get (by exact Pos.ne_endPos_of_lt h') c := by
simp only [Pos.find?_eq_find?_toSlice, Option.map_eq_some_iff,
Slice.Pos.find?_char_eq_some_iff, ne_eq, endPos_toSlice]
refine ?_, ?_
@@ -143,9 +147,21 @@ theorem Pos.find?_char_eq_none_iff_not_mem_of_splits {c : Char} {s : String} {po
rw [Pos.find?_eq_find?_toSlice, Option.map_eq_none_iff]
exact Slice.Pos.find?_char_eq_none_iff_not_mem_of_splits (Pos.splits_toSlice_iff.mpr hs)
theorem Pos.find?_char_eq_find?_beq {c : Char} {s : String} {pos : s.Pos} :
pos.find? c = pos.find? (· == c) := by
simp only [Pos.find?_eq_find?_toSlice, Slice.Pos.find?_char_eq_find?_beq]
theorem find?_char_eq_find?_beq {c : Char} {s : String} :
s.find? c = s.find? (· == c) := by
simp only [find?_eq_find?_toSlice, Slice.find?_char_eq_find?_beq]
theorem contains_char_eq_contains_beq {c : Char} {s : String} :
s.contains c = s.contains (· == c) := by
simp only [contains_eq_contains_toSlice, Slice.contains_char_eq_contains_beq]
theorem find?_char_eq_some_iff {c : Char} {s : String} {pos : s.Pos} :
s.find? c = some pos
h, pos.get h = c pos', (h' : pos' < pos) pos'.get (Pos.ne_endPos_of_lt h') c := by
h, pos.get h = c pos', (h' : pos' < pos) pos'.get (by exact Pos.ne_endPos_of_lt h') c := by
simp only [find?_eq_find?_toSlice, Option.map_eq_some_iff, Slice.find?_char_eq_some_iff, ne_eq,
endPos_toSlice, exists_and_right]
refine ?_, ?_

View File

@@ -7,6 +7,10 @@ module
prelude
public import Init.Data.String.Slice
public import Init.Data.String.Search
public import Init.Data.String.Lemmas.Splits
import all Init.Data.String.Slice
import all Init.Data.String.Search
import Init.Data.String.Lemmas.Pattern.Find.Basic
import Init.Data.String.Lemmas.Pattern.Pred
import Init.Data.String.Lemmas.Basic
@@ -17,6 +21,8 @@ import Init.Grind
import Init.Data.Option.Lemmas
import Init.Data.String.OrderInstances
public section
namespace String.Slice
theorem find?_bool_eq_some_iff {p : Char Bool} {s : Slice} {pos : s.Pos} :
@@ -27,7 +33,8 @@ theorem find?_bool_eq_some_iff {p : Char → Bool} {s : Slice} {pos : s.Pos} :
theorem find?_prop_eq_some_iff {p : Char Prop} [DecidablePred p] {s : Slice} {pos : s.Pos} :
s.find? p = some pos
h, p (pos.get h) pos', (h' : pos' < pos) ¬ p (pos'.get (Pos.ne_endPos_of_lt h')) := by
grind [Pattern.Model.find?_eq_some_iff, Pattern.Model.CharPred.Decidable.matchesAt_iff]
simp only [find?_prop_eq_find?_decide, find?_bool_eq_some_iff, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem find?_bool_eq_some_iff_splits {p : Char Bool} {s : Slice} {pos : s.Pos} :
s.find? p = some pos
@@ -48,17 +55,8 @@ theorem find?_prop_eq_some_iff_splits {p : Char → Prop} [DecidablePred p] {s :
{pos : s.Pos} :
s.find? p = some pos
t c u, pos.Splits t (singleton c ++ u) p c d t.toList, ¬ p d := by
rw [find?_prop_eq_some_iff]
refine ?_, ?_
· rintro h, hp, hmin
exact _, _, _, pos.splits_next_right h, hp, fun d hd => by
obtain pos', hlt, hpget := (pos.splits_next_right h).mem_toList_left_iff.mp hd
subst hpget; exact hmin _ hlt
· rintro t, c, u, hs, hpc, hmin
have hne := hs.ne_endPos_of_singleton
refine hne, ?_, fun pos' hlt => hmin _ (hs.mem_toList_left_iff.mpr pos', hlt, rfl)
rw [(singleton_append_inj.mp (hs.eq_right (pos.splits_next_right hne))).1.symm]
exact hpc
simp only [find?_prop_eq_find?_decide, find?_bool_eq_some_iff_splits, decide_eq_true_eq,
decide_eq_false_iff_not]
@[simp]
theorem contains_bool_eq {p : Char Bool} {s : Slice} : s.contains p = s.copy.toList.any p := by
@@ -70,10 +68,7 @@ theorem contains_bool_eq {p : Char → Bool} {s : Slice} : s.contains p = s.copy
@[simp]
theorem contains_prop_eq {p : Char Prop} [DecidablePred p] {s : Slice} :
s.contains p = s.copy.toList.any p := by
rw [Bool.eq_iff_iff, Pattern.Model.contains_eq_true_iff]
simp only [Pattern.Model.CharPred.Decidable.matchesAt_iff, ne_eq, List.any_eq_true,
mem_toList_copy_iff_exists_get, decide_eq_true_eq]
exact fun pos, h, hp => _, _, _, rfl, hp, fun _, p, h, h', hp => p, h, h' hp
rw [contains_prop_eq_contains_decide, contains_bool_eq]
theorem Pos.find?_bool_eq_some_iff {p : Char Bool} {s : Slice} {pos pos' : s.Pos} :
pos.find? p = some pos'
@@ -141,69 +136,53 @@ theorem Pos.find?_prop_eq_some_iff {p : Char → Prop} [DecidablePred p] {s : Sl
pos pos' ( h, p (pos'.get h))
pos'', pos pos'' (h' : pos'' < pos')
¬ p (pos''.get (Pos.ne_endPos_of_lt h')) := by
grind [Pattern.Model.posFind?_eq_some_iff, Pattern.Model.CharPred.Decidable.matchesAt_iff]
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_some_iff, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_some_iff_splits {p : Char Prop} [DecidablePred p] {s : Slice}
{pos : s.Pos} {t u : String} (hs : pos.Splits t u) {pos' : s.Pos} :
pos.find? p = some pos'
v c w, pos'.Splits (t ++ v) (singleton c ++ w) p c d v.toList, ¬ p d := by
rw [Pos.find?_prop_eq_some_iff]
refine ?_, ?_
· rintro hle, hne, hp, hmin
have hsplit := pos'.splits_next_right hne
obtain v, hv1, hv2 := (hs.le_iff_exists_eq_append hsplit).mp hle
refine v, pos'.get hne, _, hsplit.of_eq hv1 rfl, hp, fun d hd => ?_
obtain _, hcopy :=
Slice.copy_slice_eq_iff_splits.mpr t, _, hs.of_eq rfl hv2, hsplit.of_eq hv1 rfl
rw [ hcopy] at hd
obtain q, hq, hqget := mem_toList_copy_iff_exists_get.mp hd
have hlt : Pos.ofSlice q < pos' := by
simpa [ Slice.Pos.lt_endPos_iff, Pos.ofSlice_lt_ofSlice_iff] using hq
subst hqget; rw [Slice.Pos.get_eq_get_ofSlice]; exact hmin _ Pos.le_ofSlice hlt
· rintro v, c, w, hsplit, hpc, hmin
have hne := hsplit.ne_endPos_of_singleton
have hu : u = v ++ (singleton c ++ w) :=
append_right_inj t |>.mp (hs.eq_append.symm.trans (by rw [hsplit.eq_append, append_assoc]))
have hle : pos pos' := (hs.le_iff_exists_eq_append hsplit).mpr v, rfl, hu
refine hle, hne, ?_, fun pos'' hle' hlt => hmin _ ?_
· rw [(singleton_append_inj.mp (hsplit.eq_right (pos'.splits_next_right hne))).1.symm]
exact hpc
· obtain _, hcopy :=
Slice.copy_slice_eq_iff_splits.mpr t, _, hs.of_eq rfl hu, hsplit
rw [ hcopy]
exact mem_toList_copy_iff_exists_get.mpr
pos''.slice pos pos' hle' (Std.le_of_lt hlt),
fun h => Std.ne_of_lt hlt
(by rw [ Slice.Pos.ofSlice_slice (h₁ := hle') (h₂ := Std.le_of_lt hlt), h,
Slice.Pos.ofSlice_endPos]),
by rw [Slice.Pos.get_eq_get_ofSlice]
simp [Slice.Pos.ofSlice_slice]
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_some_iff_splits hs, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_none_iff {p : Char Prop} [DecidablePred p] {s : Slice} {pos : s.Pos} :
pos.find? p = none
pos', pos pos' (h : pos' s.endPos) ¬ p (pos'.get h) := by
grind [Pattern.Model.posFind?_eq_none_iff, Pattern.Model.CharPred.Decidable.matchesAt_iff]
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_none_iff, decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_none_iff_of_splits {p : Char Prop} [DecidablePred p] {s : Slice}
{pos : s.Pos} {t u : String} (hs : pos.Splits t u) :
pos.find? p = none c u.toList, ¬ p c := by
rw [Pos.find?_prop_eq_none_iff]
constructor
· intro h c hc
obtain pos', hle, hne, hget := hs.mem_toList_right_iff.mp hc
subst hget; exact h pos' hle hne
· intro h pos' hle hne
exact h _ (hs.mem_toList_right_iff.mpr pos', hle, hne, rfl)
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_none_iff_of_splits hs,
decide_eq_false_iff_not]
end String.Slice
namespace String
theorem Pos.find?_prop_eq_find?_decide {p : Char Prop} [DecidablePred p] {s : String}
{pos : s.Pos} : pos.find? p = pos.find? (decide <| p ·) := by
show (pos.toSlice.find? p).map Pos.ofToSlice = (pos.toSlice.find? (decide <| p ·)).map Pos.ofToSlice
simp only [show pos.toSlice.find? p = pos.toSlice.find? (decide <| p ·) from
Slice.Pos.find?_prop_eq_find?_decide]
theorem find?_prop_eq_find?_decide {p : Char Prop} [DecidablePred p] {s : String} :
s.find? p = s.find? (decide <| p ·) := by
show (s.toSlice.find? p).map Pos.ofToSlice = (s.toSlice.find? (decide <| p ·)).map Pos.ofToSlice
simp only [show s.toSlice.find? p = s.toSlice.find? (decide <| p ·) from
Slice.find?_prop_eq_find?_decide]
theorem contains_prop_eq_contains_decide {p : Char Prop} [DecidablePred p] {s : String} :
s.contains p = s.contains (decide <| p ·) := by
show s.toSlice.contains p = s.toSlice.contains (decide <| p ·)
exact Slice.contains_prop_eq_contains_decide
theorem Pos.find?_bool_eq_some_iff {p : Char Bool} {s : String} {pos pos' : s.Pos} :
pos.find? p = some pos'
pos pos' ( h, p (pos'.get h))
pos'', pos pos'' (h' : pos'' < pos')
p (pos''.get (Pos.ne_endPos_of_lt h')) = false := by
p (pos''.get (by exact Pos.ne_endPos_of_lt h')) = false := by
simp only [Pos.find?_eq_find?_toSlice, Option.map_eq_some_iff,
Slice.Pos.find?_bool_eq_some_iff, endPos_toSlice]
refine ?_, ?_
@@ -256,57 +235,32 @@ theorem Pos.find?_prop_eq_some_iff {p : Char → Prop} [DecidablePred p] {s : St
pos.find? p = some pos'
pos pos' ( h, p (pos'.get h))
pos'', pos pos'' (h' : pos'' < pos')
¬ p (pos''.get (Pos.ne_endPos_of_lt h')) := by
simp only [Pos.find?_eq_find?_toSlice, Option.map_eq_some_iff,
Slice.Pos.find?_prop_eq_some_iff, endPos_toSlice]
refine ?_, ?_
· rintro pos', h₁, h₂, hp, h₃, rfl
refine by simpa [Pos.ofToSlice_le_iff] using h₁,
by simpa [ Pos.ofToSlice_inj] using h₂, by simpa [Pos.get_ofToSlice] using hp, ?_
intro pos'' h₄ h₅
simpa using h₃ pos''.toSlice (by simpa [Pos.toSlice_le] using h₄) (by simpa using h₅)
· rintro h₁, h₂, hp, h₃
refine pos'.toSlice, by simpa [Pos.toSlice_le] using h₁,
by simpa [ Pos.toSlice_inj] using h₂, by simpa using hp, fun p hp₁ hp₂ => ?_,
by simp
simpa using h₃ (Pos.ofToSlice p)
(by simpa [Pos.ofToSlice_le_iff] using hp₁) (by simpa using hp₂)
¬ p (pos''.get (by exact Pos.ne_endPos_of_lt h')) := by
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_some_iff, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_some_iff_splits {p : Char Prop} [DecidablePred p] {s : String}
{pos : s.Pos} {t u : String} (hs : pos.Splits t u) {pos' : s.Pos} :
pos.find? p = some pos'
v c w, pos'.Splits (t ++ v) (singleton c ++ w) p c d v.toList, ¬ p d := by
simp only [Pos.find?_eq_find?_toSlice, Option.map_eq_some_iff,
Slice.Pos.find?_prop_eq_some_iff_splits (Pos.splits_toSlice_iff.mpr hs)]
constructor
· rintro q, v, c, w, hsplit, hpc, hmin, rfl
exact v, c, w, Slice.Pos.splits_ofToSlice_iff.mpr hsplit, hpc, hmin
· rintro v, c, w, hsplit, hpc, hmin
exact pos'.toSlice, v, c, w, Pos.splits_toSlice_iff.mpr hsplit, hpc, hmin, by simp
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_some_iff_splits hs, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_none_iff {p : Char Prop} [DecidablePred p] {s : String}
{pos : s.Pos} :
pos.find? p = none
pos', pos pos' (h : pos' s.endPos) ¬ p (pos'.get h) := by
simp only [Pos.find?_eq_find?_toSlice, Option.map_eq_none_iff,
Slice.Pos.find?_prop_eq_none_iff, endPos_toSlice]
refine ?_, ?_
· intro h pos' h₁ h₂
simpa [Pos.get_ofToSlice] using
h pos'.toSlice (by simpa [Pos.toSlice_le] using h₁) (by simpa [ Pos.toSlice_inj] using h₂)
· intro h pos' h₁ h₂
simpa using h (Pos.ofToSlice pos')
(by simpa [Pos.ofToSlice_le_iff] using h₁) (by simpa [ Pos.ofToSlice_inj] using h₂)
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_none_iff, decide_eq_false_iff_not]
theorem Pos.find?_prop_eq_none_iff_of_splits {p : Char Prop} [DecidablePred p] {s : String}
{pos : s.Pos} {t u : String} (hs : pos.Splits t u) :
pos.find? p = none c u.toList, ¬ p c := by
rw [Pos.find?_eq_find?_toSlice, Option.map_eq_none_iff]
exact Slice.Pos.find?_prop_eq_none_iff_of_splits (Pos.splits_toSlice_iff.mpr hs)
simp only [Pos.find?_prop_eq_find?_decide, Pos.find?_bool_eq_none_iff_of_splits hs,
decide_eq_false_iff_not]
theorem find?_bool_eq_some_iff {p : Char Bool} {s : String} {pos : s.Pos} :
s.find? p = some pos
h, p (pos.get h) pos', (h' : pos' < pos) p (pos'.get (Pos.ne_endPos_of_lt h')) = false := by
h, p (pos.get h) pos', (h' : pos' < pos) p (pos'.get (by exact Pos.ne_endPos_of_lt h')) = false := by
simp only [find?_eq_find?_toSlice, Option.map_eq_some_iff, Slice.find?_bool_eq_some_iff,
endPos_toSlice, exists_and_right]
refine ?_, ?_
@@ -331,29 +285,16 @@ theorem find?_bool_eq_some_iff_splits {p : Char → Bool} {s : String} {pos : s.
theorem find?_prop_eq_some_iff {p : Char Prop} [DecidablePred p] {s : String} {pos : s.Pos} :
s.find? p = some pos
h, p (pos.get h) pos', (h' : pos' < pos) ¬ p (pos'.get (Pos.ne_endPos_of_lt h')) := by
simp only [find?_eq_find?_toSlice, Option.map_eq_some_iff, Slice.find?_prop_eq_some_iff,
endPos_toSlice, exists_and_right]
refine ?_, ?_
· rintro pos, h, hp, h', rfl
refine by simpa [ Pos.ofToSlice_inj] using h, by simpa [Pos.get_ofToSlice] using hp, ?_
intro pos' hp
simpa using h' pos'.toSlice hp
· rintro h, hp, hmin
exact pos.toSlice, by simpa [ Pos.toSlice_inj] using h, by simpa using hp,
fun pos' hp => by simpa using hmin (Pos.ofToSlice pos') hp, by simp
h, p (pos.get h) pos', (h' : pos' < pos) ¬ p (pos'.get (by exact Pos.ne_endPos_of_lt h')) := by
simp only [find?_prop_eq_find?_decide, find?_bool_eq_some_iff, decide_eq_true_eq,
decide_eq_false_iff_not]
theorem find?_prop_eq_some_iff_splits {p : Char Prop} [DecidablePred p] {s : String}
{pos : s.Pos} :
s.find? p = some pos
t c u, pos.Splits t (singleton c ++ u) p c d t.toList, ¬ p d := by
simp only [find?_eq_find?_toSlice, Option.map_eq_some_iff,
Slice.find?_prop_eq_some_iff_splits]
constructor
· rintro q, t, c, u, hsplit, hpc, hmin, rfl
exact t, c, u, Slice.Pos.splits_ofToSlice_iff.mpr hsplit, hpc, hmin
· rintro t, c, u, hsplit, hpc, hmin
exact pos.toSlice, t, c, u, Pos.splits_toSlice_iff.mpr hsplit, hpc, hmin, by simp
simp only [find?_prop_eq_find?_decide, find?_bool_eq_some_iff_splits, decide_eq_true_eq,
decide_eq_false_iff_not]
@[simp]
theorem contains_bool_eq {p : Char Bool} {s : String} : s.contains p = s.toList.any p := by
@@ -362,6 +303,6 @@ theorem contains_bool_eq {p : Char → Bool} {s : String} : s.contains p = s.toL
@[simp]
theorem contains_prop_eq {p : Char Prop} [DecidablePred p] {s : String} :
s.contains p = s.toList.any p := by
simp [contains_eq_contains_toSlice, Slice.contains_prop_eq, copy_toSlice]
rw [contains_prop_eq_contains_decide, contains_bool_eq]
end String

View File

@@ -8,6 +8,7 @@ module
prelude
public import Init.Data.String.Search
import all Init.Data.String.Slice
import all Init.Data.String.Pattern.String
import Init.ByCases
import Init.Data.String.Lemmas.Pattern.Find.Basic
import Init.Data.String.Lemmas.Pattern.String.ForwardSearcher
@@ -55,11 +56,7 @@ theorem contains_slice_iff {t s : Slice} :
@[simp]
theorem contains_string_iff {t : String} {s : Slice} :
s.contains t t.toList <:+: s.copy.toList := by
by_cases ht : t = ""
· simp [contains_string_eq_true_of_empty ht s, ht]
· have := Pattern.Model.ForwardStringSearcher.lawfulToForwardSearcherModel (pat := t) ht
simp only [Pattern.Model.contains_eq_true_iff,
Pattern.Model.ForwardStringSearcher.exists_matchesAt_iff_eq_append ht, isInfix_toList_iff]
simp [contains_string_eq_contains_toSlice, contains_slice_iff, copy_toSlice]
@[simp]
theorem contains_slice_eq_false_iff {t s : Slice} :

View File

@@ -8,6 +8,11 @@ module
prelude
public import Init.Data.String.Pattern.Pred
public import Init.Data.String.Lemmas.Pattern.Basic
public import Init.Data.String.Slice
public import Init.Data.String.Search
import all Init.Data.String.Slice
import all Init.Data.String.Pattern.Pred
import all Init.Data.String.Search
import Init.Data.Option.Lemmas
import Init.Data.String.Lemmas.Basic
import Init.Data.String.Lemmas.Order
@@ -112,6 +117,15 @@ theorem isLongestMatchAt_of_get {p : Char → Prop} [DecidablePred p] {s : Slice
{h : pos s.endPos} (hc : p (pos.get h)) : IsLongestMatchAt p pos (pos.next h) :=
isLongestMatchAt_iff.2 h, by simp [hc]
theorem matchesAt_iff_matchesAt_decide {p : Char Prop} [DecidablePred p] {s : Slice}
{pos : s.Pos} : MatchesAt p pos MatchesAt (decide <| p ·) pos := by
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_decide]
theorem matchAt?_eq_matchAt?_decide {p : Char Prop} [DecidablePred p] {s : Slice}
{pos : s.Pos} : matchAt? p pos = matchAt? (decide <| p ·) pos := by
ext endPos
simp [isLongestMatchAt_iff_isLongestMatchAt_decide]
theorem dropPrefix?_eq_dropPrefix?_decide {p : Char Prop} [DecidablePred p] :
ForwardPattern.dropPrefix? p = ForwardPattern.dropPrefix? (decide <| p ·) := rfl
@@ -120,8 +134,26 @@ instance {p : Char → Prop} [DecidablePred p] : LawfulForwardPatternModel p whe
rw [dropPrefix?_eq_dropPrefix?_decide, isLongestMatch_iff_isLongestMatch_decide]
exact LawfulForwardPatternModel.dropPrefix?_eq_some_iff ..
instance {p : Char Prop} [DecidablePred p] : LawfulToForwardSearcherModel p :=
.defaultImplementation
theorem toSearcher_eq {p : Char Prop} [DecidablePred p] {s : Slice} :
ToForwardSearcher.toSearcher p s = ToForwardSearcher.toSearcher (decide <| p ·) s := (rfl)
theorem isValidSearchFrom_iff_isValidSearchFrom_decide {p : Char Prop} [DecidablePred p]
{s : Slice} {pos : s.Pos} {l : List (SearchStep s)} :
IsValidSearchFrom p pos l IsValidSearchFrom (decide <| p ·) pos l := by
refine fun h => ?_, fun h => ?_
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_decide]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_decide]
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_decide]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_decide]
instance {p : Char Prop} [DecidablePred p] : LawfulToForwardSearcherModel p where
isValidSearchFrom_toList s := by
simpa [toSearcher_eq, isValidSearchFrom_iff_isValidSearchFrom_decide] using
LawfulToForwardSearcherModel.isValidSearchFrom_toList (pat := (decide <| p ·)) (s := s)
theorem matchesAt_iff {p : Char Prop} [DecidablePred p] {s : Slice} {pos : s.Pos} :
MatchesAt p pos (h : pos s.endPos), p (pos.get h) := by
@@ -138,4 +170,121 @@ theorem matchAt?_eq {s : Slice} {pos : s.Pos} {p : Char → Prop} [DecidablePred
end Decidable
end String.Slice.Pattern.Model.CharPred
end Pattern.Model.CharPred
theorem startsWith_prop_eq_startsWith_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.startsWith p = s.startsWith (decide <| p ·) := (rfl)
theorem dropPrefix?_prop_eq_dropPrefix?_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.dropPrefix? p = s.dropPrefix? (decide <| p ·) := (rfl)
theorem dropPrefix_prop_eq_dropPrefix_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.dropPrefix p = s.dropPrefix (decide <| p ·) := (rfl)
theorem Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide
{p : Char Prop} [DecidablePred p] {s : Slice} :
dropPrefix? p s = dropPrefix? (decide <| p ·) s := (rfl)
private theorem dropWhileGo_eq {p : Char Prop} [DecidablePred p] {s : Slice} (curr : s.Pos) :
dropWhile.go s p curr = dropWhile.go s (decide <| p ·) curr := by
fun_induction dropWhile.go s p curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide, h, ih]
| case3 pos h =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide]
theorem dropWhile_prop_eq_dropWhile_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.dropWhile p = s.dropWhile (decide <| p ·) := by
simpa only [dropWhile] using dropWhileGo_eq s.startPos
private theorem takeWhileGo_eq {p : Char Prop} [DecidablePred p] {s : Slice} (curr : s.Pos) :
takeWhile.go s p curr = takeWhile.go s (decide <| p ·) curr := by
fun_induction takeWhile.go s p curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide, h, ih]
| case3 pos h =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_prop_eq_dropPrefix?_decide]
theorem takeWhile_prop_eq_takeWhile_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.takeWhile p = s.takeWhile (decide <| p ·) := by
simp only [takeWhile]; exact takeWhileGo_eq s.startPos
theorem all_prop_eq_all_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.all p = s.all (decide <| p ·) := by
simp only [all, dropWhile_prop_eq_dropWhile_decide]
theorem find?_prop_eq_find?_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.find? p = s.find? (decide <| p ·) :=
(rfl)
theorem Pos.find?_prop_eq_find?_decide {p : Char Prop} [DecidablePred p] {s : Slice}
{pos : s.Pos} :
pos.find? p = pos.find? (decide <| p ·) :=
(rfl)
theorem contains_prop_eq_contains_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.contains p = s.contains (decide <| p ·) :=
(rfl)
theorem endsWith_prop_eq_endsWith_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.endsWith p = s.endsWith (decide <| p ·) := (rfl)
theorem dropSuffix?_prop_eq_dropSuffix?_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.dropSuffix? p = s.dropSuffix? (decide <| p ·) := (rfl)
theorem dropSuffix_prop_eq_dropSuffix_decide {p : Char Prop} [DecidablePred p] {s : Slice} :
s.dropSuffix p = s.dropSuffix (decide <| p ·) := (rfl)
theorem Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide
{p : Char Prop} [DecidablePred p] {s : Slice} :
dropSuffix? p s = dropSuffix? (decide <| p ·) s := (rfl)
private theorem dropEndWhileGo_eq {p : Char Prop} [DecidablePred p] {s : Slice}
(curr : s.Pos) :
dropEndWhile.go s p curr = dropEndWhile.go s (decide <| p ·) curr := by
fun_induction dropEndWhile.go s p curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide, h, ih]
| case3 pos h =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide]
theorem dropEndWhile_prop_eq_dropEndWhile_decide {p : Char Prop} [DecidablePred p]
{s : Slice} :
s.dropEndWhile p = s.dropEndWhile (decide <| p ·) := by
simpa only [dropEndWhile] using dropEndWhileGo_eq s.endPos
private theorem takeEndWhileGo_eq {p : Char Prop} [DecidablePred p] {s : Slice}
(curr : s.Pos) :
takeEndWhile.go s p curr = takeEndWhile.go s (decide <| p ·) curr := by
fun_induction takeEndWhile.go s p curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide, h, ih]
| case3 pos h =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_prop_eq_dropSuffix?_decide]
theorem takeEndWhile_prop_eq_takeEndWhile_decide {p : Char Prop} [DecidablePred p]
{s : Slice} :
s.takeEndWhile p = s.takeEndWhile (decide <| p ·) := by
simpa only [takeEndWhile] using takeEndWhileGo_eq s.endPos
end String.Slice

View File

@@ -9,10 +9,14 @@ prelude
public import Init.Data.String.Slice
public import Init.Data.String.Search
public import Init.Data.List.SplitOn.Basic
public import Init.Data.String.Lemmas.Pattern.Split.Basic
public import Init.Data.String.Lemmas.Pattern.Char
import all Init.Data.String.Lemmas.Pattern.Split.Basic
import Init.Data.String.Termination
import Init.Data.Order.Lemmas
import Init.Data.Iterators.Lemmas.Combinators.FilterMap
import Init.Data.String.Lemmas.Pattern.Split.Basic
import Init.Data.String.Lemmas.Pattern.Split.Pred
import Init.Data.String.Lemmas.Pattern.Char
import Init.ByCases
import Init.Data.String.OrderInstances
@@ -26,28 +30,26 @@ namespace String.Slice
open Pattern.Model Pattern.Model.Char
theorem Pattern.Model.split_char_eq_split_beq {c : Char} {s : Slice}
(f curr : s.Pos) (hle : f curr) :
Model.split c f curr hle = Model.split (· == c) f curr hle := by
fun_induction Model.split c f curr hle with
| case1 fr h => simp
| case2 fr curr hle h pos h₁ h₂ ih =>
conv => rhs; rw [Model.split]
simp only [h, reduceDIte]
split <;> simp_all [matchAt?_eq_matchAt?_beq]
| case3 fr curr hle h pos ih =>
conv => rhs; rw [Model.split]
simp only [h, reduceDIte]
split <;> simp_all [matchAt?_eq_matchAt?_beq]
theorem toList_splitToSubslice_char {s : Slice} {c : Char} :
(s.splitToSubslice c).toList.map (Slice.copy Subslice.toSlice) =
(s.copy.toList.splitOn c).map String.ofList := by
simp only [Pattern.toList_splitToSubslice_eq_modelSplit]
suffices (f p : s.Pos) (hle : f p) (t₁ t₂ : String),
p.Splits t₁ t₂ (Pattern.Model.split c f p hle).map (copy Subslice.toSlice) =
(t₂.toList.splitOnPPrepend (· == c) (s.subslice f p hle).copy.toList.reverse).map String.ofList by
simpa [List.splitOn_eq_splitOnP] using this s.startPos s.startPos (Std.le_refl _) "" s.copy
intro f p hle t₁ t₂ hp
induction p using Pos.next_induction generalizing f t₁ t₂ with
| next p h ih =>
obtain t₂, rfl := hp.exists_eq_singleton_append h
by_cases hpc : p.get h = c
· simp [split_eq_of_isLongestMatchAt (isLongestMatchAt_of_get_eq hpc),
ih _ (Std.le_refl _) _ _ hp.next,
List.splitOnPPrepend_cons_pos (p := (· == c)) (beq_iff_eq.2 hpc)]
· rw [split_eq_next_of_not_matchesAt h (not_matchesAt_of_get_ne hpc)]
simp only [toList_append, toList_singleton, List.cons_append, List.nil_append, Subslice.copy_eq]
rw [ih _ _ _ _ hp.next, List.splitOnPPrepend_cons_neg (by simpa)]
have := (splits_slice (Std.le_trans hle (by simp)) (p.slice f (p.next h) hle (by simp))).eq_append
simp_all
| endPos => simp_all
have : (s.splitToSubslice c).toList = (s.splitToSubslice (· == c)).toList := by
simp only [Pattern.toList_splitToSubslice_eq_modelSplit, Pattern.Model.split_char_eq_split_beq]
simp only [this, List.splitOn_eq_splitOnP, toList_splitToSubslice_bool]
theorem toList_split_char {s : Slice} {c : Char} :
(s.split c).toList.map Slice.copy = (s.copy.toList.splitOn c).map String.ofList := by

View File

@@ -9,7 +9,10 @@ prelude
public import Init.Data.String.Slice
public import Init.Data.String.Search
public import Init.Data.List.SplitOn.Basic
public import Init.Data.String.Lemmas.Pattern.Split.Basic
public import Init.Data.String.Lemmas.Pattern.Pred
import Init.Data.String.Termination
import all Init.Data.String.Lemmas.Pattern.Split.Basic
import Init.Data.Order.Lemmas
import Init.Data.Iterators.Lemmas.Combinators.FilterMap
import Init.Data.String.Lemmas.Pattern.Split.Basic
@@ -61,28 +64,27 @@ section
open Pattern.Model Pattern.Model.CharPred.Decidable
theorem Pattern.Model.split_eq_split_decide {p : Char Prop} [DecidablePred p] {s : Slice}
(f curr : s.Pos) (hle : f curr) :
Model.split p f curr hle = Model.split (decide <| p ·) f curr hle := by
fun_induction Model.split p f curr hle with
| case1 fr h => simp
| case2 fr curr hle h pos h₁ h₂ ih =>
conv => rhs; rw [Model.split]
simp only [h, reduceDIte]
split <;> simp_all [matchAt?_eq_matchAt?_decide]
| case3 fr curr hle h pos ih =>
conv => rhs; rw [Model.split]
simp only [h, reduceDIte]
split <;> simp_all [matchAt?_eq_matchAt?_decide]
theorem toList_splitToSubslice_prop {s : Slice} {p : Char Prop} [DecidablePred p] :
(s.splitToSubslice p).toList.map (Slice.copy Subslice.toSlice) =
(s.copy.toList.splitOnP p).map String.ofList := by
simp only [Pattern.toList_splitToSubslice_eq_modelSplit]
suffices (f pos : s.Pos) (hle : f pos) (t₁ t₂ : String),
pos.Splits t₁ t₂ (Pattern.Model.split p f pos hle).map (copy Subslice.toSlice) =
(t₂.toList.splitOnPPrepend p (s.subslice f pos hle).copy.toList.reverse).map String.ofList by
simpa using this s.startPos s.startPos (Std.le_refl _) "" s.copy
intro f pos hle t₁ t₂ hp
induction pos using Pos.next_induction generalizing f t₁ t₂ with
| next pos h ih =>
obtain t₂, rfl := hp.exists_eq_singleton_append h
by_cases hpc : p (pos.get h)
· simp [split_eq_of_isLongestMatchAt (isLongestMatchAt_of_get hpc),
ih _ (Std.le_refl _) _ _ hp.next,
List.splitOnPPrepend_cons_pos (p := (decide <| p ·)) (by simpa using hpc)]
· rw [split_eq_next_of_not_matchesAt h (not_matchesAt_of_get hpc)]
simp only [toList_append, toList_singleton, List.cons_append, List.nil_append, Subslice.copy_eq]
rw [ih _ _ _ _ hp.next, List.splitOnPPrepend_cons_neg (by simpa)]
have := (splits_slice (Std.le_trans hle (by simp)) (pos.slice f (pos.next h) hle (by simp))).eq_append
simp_all
| endPos => simp_all
have : (s.splitToSubslice p).toList = (s.splitToSubslice (decide <| p ·)).toList := by
simp only [Pattern.toList_splitToSubslice_eq_modelSplit, split_eq_split_decide]
rw [this]
exact toList_splitToSubslice_bool
theorem toList_split_prop {s : Slice} {p : Char Prop} [DecidablePred p] :
(s.split p).toList.map Slice.copy = (s.copy.toList.splitOnP p).map String.ofList := by

View File

@@ -6,6 +6,7 @@ Author: Markus Himmel
module
prelude
public import Init.Data.String.Pattern.String
public import Init.Data.String.Lemmas.Pattern.Basic
import Init.Data.String.Lemmas.IsEmpty
import Init.Data.String.Lemmas.Basic
@@ -150,19 +151,19 @@ theorem isMatch_iff_slice {pat : String} {s : Slice} {pos : s.Pos} :
IsMatch (ρ := String) pat pos IsMatch (ρ := Slice) pat.toSlice pos := by
simp only [Model.isMatch_iff, ForwardPatternModel.Matches, copy_toSlice]
theorem isLongestMatch_iff_slice {pat : String} {s : Slice} {pos : s.Pos} :
theorem isLongestMatch_iff_isLongestMatch_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
IsLongestMatch (ρ := String) pat pos IsLongestMatch (ρ := Slice) pat.toSlice pos where
mp h := isMatch_iff_slice.1 h.isMatch, fun p hp hm => h.not_isMatch p hp (isMatch_iff_slice.2 hm)
mpr h := isMatch_iff_slice.2 h.isMatch, fun p hp hm => h.not_isMatch p hp (isMatch_iff_slice.1 hm)
theorem isLongestMatchAt_iff_slice {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
theorem isLongestMatchAt_iff_isLongestMatchAt_toSlice {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
IsLongestMatchAt (ρ := String) pat pos₁ pos₂
IsLongestMatchAt (ρ := Slice) pat.toSlice pos₁ pos₂ := by
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff_slice]
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff_isLongestMatch_toSlice]
theorem matchesAt_iff_slice {pat : String} {s : Slice} {pos : s.Pos} :
theorem matchesAt_iff_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
MatchesAt (ρ := String) pat pos MatchesAt (ρ := Slice) pat.toSlice pos := by
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_slice]
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_toSlice]
private theorem toSlice_isEmpty (h : pat "") : pat.toSlice.isEmpty = false := by
rwa [isEmpty_toSlice, isEmpty_eq_false_iff]
@@ -178,7 +179,7 @@ theorem isLongestMatch_iff {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠
theorem isLongestMatchAt_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} (h : pat "") :
IsLongestMatchAt pat pos₁ pos₂ h, (s.slice pos₁ pos₂ h).copy = pat := by
rw [isLongestMatchAt_iff_slice,
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
ForwardSliceSearcher.isLongestMatchAt_iff (toSlice_isEmpty h)]
simp
@@ -186,7 +187,7 @@ theorem isLongestMatchAt_iff_splits {pat : String} {s : Slice} {pos₁ pos₂ :
(h : pat "") :
IsLongestMatchAt pat pos₁ pos₂
t₁ t₂, pos₁.Splits t₁ (pat ++ t₂) pos₂.Splits (t₁ ++ pat) t₂ := by
rw [isLongestMatchAt_iff_slice,
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
ForwardSliceSearcher.isLongestMatchAt_iff_splits (toSlice_isEmpty h)]
simp
@@ -194,7 +195,7 @@ theorem isLongestMatchAt_iff_extract {pat : String} {s : Slice} {pos₁ pos₂ :
(h : pat "") :
IsLongestMatchAt pat pos₁ pos₂
s.copy.toByteArray.extract pos₁.offset.byteIdx pos₂.offset.byteIdx = pat.toByteArray := by
rw [isLongestMatchAt_iff_slice,
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
ForwardSliceSearcher.isLongestMatchAt_iff_extract (toSlice_isEmpty h)]
simp
@@ -203,11 +204,11 @@ theorem offset_of_isLongestMatchAt {pat : String} {s : Slice} {pos₁ pos₂ : s
pos₂.offset = pos₁.offset.increaseBy pat.utf8ByteSize := by
rw [show pat.utf8ByteSize = pat.toSlice.utf8ByteSize from utf8ByteSize_toSlice.symm]
exact ForwardSliceSearcher.offset_of_isLongestMatchAt (toSlice_isEmpty h)
(isLongestMatchAt_iff_slice.1 h')
(isLongestMatchAt_iff_isLongestMatchAt_toSlice.1 h')
theorem matchesAt_iff_splits {pat : String} {s : Slice} {pos : s.Pos} (h : pat "") :
MatchesAt pat pos t₁ t₂, pos.Splits t₁ (pat ++ t₂) := by
rw [matchesAt_iff_slice,
rw [matchesAt_iff_toSlice,
ForwardSliceSearcher.matchesAt_iff_splits (toSlice_isEmpty h)]
simp
@@ -229,8 +230,8 @@ theorem matchesAt_iff_isLongestMatchAt {pat : String} {s : Slice} {pos : s.Pos}
IsLongestMatchAt pat pos (s.pos _ h) := by
have key := ForwardSliceSearcher.matchesAt_iff_isLongestMatchAt (pat := pat.toSlice)
(toSlice_isEmpty h) (pos := pos)
simp only [utf8ByteSize_toSlice, isLongestMatchAt_iff_slice] at key
rwa [matchesAt_iff_slice]
simp only [utf8ByteSize_toSlice, isLongestMatchAt_iff_isLongestMatchAt_toSlice] at key
rwa [matchesAt_iff_toSlice]
theorem matchesAt_iff_getElem {pat : String} {s : Slice} {pos : s.Pos} (h : pat "") :
MatchesAt pat pos
@@ -240,13 +241,33 @@ theorem matchesAt_iff_getElem {pat : String} {s : Slice} {pos : s.Pos} (h : pat
have key := ForwardSliceSearcher.matchesAt_iff_getElem (pat := pat.toSlice)
(toSlice_isEmpty h) (pos := pos)
simp only [copy_toSlice] at key
rwa [matchesAt_iff_slice]
rwa [matchesAt_iff_toSlice]
theorem le_of_matchesAt {pat : String} {s : Slice} {pos : s.Pos} (h : pat "")
(h' : MatchesAt pat pos) : pos.offset.increaseBy pat.utf8ByteSize s.rawEndPos := by
rw [show pat.utf8ByteSize = pat.toSlice.utf8ByteSize from utf8ByteSize_toSlice.symm]
exact ForwardSliceSearcher.le_of_matchesAt (toSlice_isEmpty h)
(matchesAt_iff_slice.1 h')
(matchesAt_iff_toSlice.1 h')
theorem matchesAt_iff_matchesAt_toSlice {pat : String} {s : Slice}
{pos : s.Pos} : MatchesAt pat pos MatchesAt pat.toSlice pos := by
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_toSlice]
theorem toSearcher_eq {pat : String} {s : Slice} :
ToForwardSearcher.toSearcher pat s = ToForwardSearcher.toSearcher pat.toSlice s := (rfl)
theorem isValidSearchFrom_iff_isValidSearchFrom_toSlice {pat : String}
{s : Slice} {pos : s.Pos} {l : List (SearchStep s)} :
IsValidSearchFrom pat pos l IsValidSearchFrom pat.toSlice pos l := by
refine fun h => ?_, fun h => ?_
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_toSlice]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_toSlice]
· induction h with
| endPos => simpa using IsValidSearchFrom.endPos
| matched => simp_all [IsValidSearchFrom.matched, isLongestMatchAt_iff_isLongestMatchAt_toSlice]
| mismatched => simp_all [IsValidSearchFrom.mismatched, matchesAt_iff_matchesAt_toSlice]
end ForwardStringSearcher

View File

@@ -8,7 +8,10 @@ module
prelude
public import Init.Data.String.Lemmas.Pattern.String.Basic
public import Init.Data.String.Pattern.String
public import Init.Data.String.Slice
import all Init.Data.String.Pattern.String
import all Init.Data.String.Slice
import Init.Data.String.Lemmas.Pattern.Pred
import Init.Data.String.Lemmas.Pattern.Memcmp
import Init.Data.String.Lemmas.Basic
import Init.Data.ByteArray.Lemmas
@@ -86,4 +89,104 @@ public theorem lawfulForwardPatternModel {pat : String} (hpat : pat ≠ "") :
end Model.ForwardStringSearcher
end String.Slice.Pattern
end Pattern
public theorem startsWith_string_eq_startsWith_toSlice {pat : String} {s : Slice} :
s.startsWith pat = s.startsWith pat.toSlice := (rfl)
public theorem dropPrefix?_string_eq_dropPrefix?_toSlice {pat : String} {s : Slice} :
s.dropPrefix? pat = s.dropPrefix? pat.toSlice := (rfl)
public theorem dropPrefix_string_eq_dropPrefix_toSlice {pat : String} {s : Slice} :
s.dropPrefix pat = s.dropPrefix pat.toSlice := (rfl)
public theorem Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice
{pat : String} {s : Slice} :
dropPrefix? pat s = dropPrefix? pat.toSlice s := (rfl)
private theorem dropWhileGo_string_eq {pat : String} {s : Slice} (curr : s.Pos) :
dropWhile.go s pat curr = dropWhile.go s pat.toSlice curr := by
fun_induction dropWhile.go s pat curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice, h, ih]
| case3 pos h =>
conv => rhs; rw [dropWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice]
public theorem dropWhile_string_eq_dropWhile_toSlice {pat : String} {s : Slice} :
s.dropWhile pat = s.dropWhile pat.toSlice := by
simpa only [dropWhile] using dropWhileGo_string_eq s.startPos
private theorem takeWhileGo_string_eq {pat : String} {s : Slice} (curr : s.Pos) :
takeWhile.go s pat curr = takeWhile.go s pat.toSlice curr := by
fun_induction takeWhile.go s pat curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice, h, ih]
| case3 pos h =>
conv => rhs; rw [takeWhile.go]
simp [ Pattern.ForwardPattern.dropPrefix?_string_eq_dropPrefix?_toSlice]
public theorem takeWhile_string_eq_takeWhile_toSlice {pat : String} {s : Slice} :
s.takeWhile pat = s.takeWhile pat.toSlice := by
simp only [takeWhile]; exact takeWhileGo_string_eq s.startPos
public theorem all_string_eq_all_toSlice {pat : String} {s : Slice} :
s.all pat = s.all pat.toSlice := by
simp only [all, dropWhile_string_eq_dropWhile_toSlice]
public theorem endsWith_string_eq_endsWith_toSlice {pat : String} {s : Slice} :
s.endsWith pat = s.endsWith pat.toSlice := (rfl)
public theorem dropSuffix?_string_eq_dropSuffix?_toSlice {pat : String} {s : Slice} :
s.dropSuffix? pat = s.dropSuffix? pat.toSlice := (rfl)
public theorem dropSuffix_string_eq_dropSuffix_toSlice {pat : String} {s : Slice} :
s.dropSuffix pat = s.dropSuffix pat.toSlice := (rfl)
public theorem Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice
{pat : String} {s : Slice} :
dropSuffix? pat s = dropSuffix? pat.toSlice s := (rfl)
private theorem dropEndWhileGo_string_eq {pat : String} {s : Slice} (curr : s.Pos) :
dropEndWhile.go s pat curr = dropEndWhile.go s pat.toSlice curr := by
fun_induction dropEndWhile.go s pat curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice, h, ih]
| case3 pos h =>
conv => rhs; rw [dropEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice]
public theorem dropEndWhile_string_eq_dropEndWhile_toSlice {pat : String} {s : Slice} :
s.dropEndWhile pat = s.dropEndWhile pat.toSlice := by
simpa only [dropEndWhile] using dropEndWhileGo_string_eq s.endPos
private theorem takeEndWhileGo_string_eq {pat : String} {s : Slice} (curr : s.Pos) :
takeEndWhile.go s pat curr = takeEndWhile.go s pat.toSlice curr := by
fun_induction takeEndWhile.go s pat curr with
| case1 pos nextCurr h₁ h₂ ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice, h₁, h₂, ih]
| case2 pos nextCurr h ih =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice, h, ih]
| case3 pos h =>
conv => rhs; rw [takeEndWhile.go]
simp [ Pattern.BackwardPattern.dropSuffix?_string_eq_dropSuffix?_toSlice]
public theorem takeEndWhile_string_eq_takeEndWhile_toSlice {pat : String} {s : Slice} :
s.takeEndWhile pat = s.takeEndWhile pat.toSlice := by
simpa only [takeEndWhile] using takeEndWhileGo_string_eq s.endPos
end String.Slice

View File

@@ -8,6 +8,10 @@ module
prelude
public import Init.Data.String.Lemmas.Pattern.String.Basic
public import Init.Data.String.Pattern.String
public import Init.Data.String.Slice
public import Init.Data.String.Search
import all Init.Data.String.Slice
import all Init.Data.String.Search
import all Init.Data.String.Pattern.String
import Init.Data.String.Lemmas.IsEmpty
import Init.Data.Vector.Lemmas
@@ -435,7 +439,6 @@ theorem Invariants.of_prefixFunction_eq {pat s : Slice} {stackPos needlePos : St
rw [Nat.sub_add_cancel (by simp at h'; omega)] at this
exact hk (h.partialMatch.partialMatch_iff.1 this).2
set_option backward.isDefEq.respectTransparency false in
theorem Invariants.isValidSearchFrom_toList {pat s : Slice} {stackPos needlePos : String.Pos.Raw}
(it : Std.Iter (α := ForwardSliceSearcher s) (SearchStep s))
(h : Invariants pat s needlePos stackPos)
@@ -600,30 +603,11 @@ end ForwardSliceSearcher
namespace ForwardStringSearcher
private theorem isValidSearchFrom_iff_slice {pat : String} {s : Slice} {pos : s.Pos}
{l : List (SearchStep s)} :
IsValidSearchFrom (ρ := String) pat pos l
IsValidSearchFrom (ρ := Slice) pat.toSlice pos l := by
constructor
· intro h
induction h with
| endPos => exact .endPos
| matched hm _ ih => exact .matched (isLongestMatchAt_iff_slice.1 hm) ih
| mismatched hlt hnm _ ih =>
exact .mismatched hlt (fun p hp₁ hp₂ hm => hnm p hp₁ hp₂ (matchesAt_iff_slice.2 hm)) ih
· intro h
induction h with
| endPos => exact .endPos
| matched hm _ ih => exact .matched (isLongestMatchAt_iff_slice.2 hm) ih
| mismatched hlt hnm _ ih =>
exact .mismatched hlt (fun p hp₁ hp₂ hm => hnm p hp₁ hp₂ (matchesAt_iff_slice.1 hm)) ih
public theorem lawfulToForwardSearcherModel {pat : String} (hpat : pat "") :
LawfulToForwardSearcherModel pat where
isValidSearchFrom_toList s :=
isValidSearchFrom_iff_slice.2
((ForwardSliceSearcher.lawfulToForwardSearcherModel
(by rwa [isEmpty_toSlice, isEmpty_eq_false_iff])).isValidSearchFrom_toList s)
isValidSearchFrom_toList s := by
simpa [toSearcher_eq, isValidSearchFrom_iff_isValidSearchFrom_toSlice] using
(ForwardSliceSearcher.lawfulToForwardSearcherModel (by simpa)).isValidSearchFrom_toList (pat := pat.toSlice) (s := s)
public theorem toSearcher_empty (s : Slice) : ToForwardSearcher.toSearcher "" s =
Std.Iter.mk (.emptyBefore s.startPos : ForwardSliceSearcher s) := by
@@ -632,4 +616,18 @@ public theorem toSearcher_empty (s : Slice) : ToForwardSearcher.toSearcher "" s
end ForwardStringSearcher
end String.Slice.Pattern.Model
end Pattern.Model
public theorem contains_string_eq_contains_toSlice {pat : String} {s : Slice} :
s.contains pat = s.contains pat.toSlice :=
(rfl)
public theorem find?_string_eq_find?_toSlice {pat : String} {s : Slice} :
s.find? pat = s.find? pat.toSlice :=
(rfl)
public theorem Pos.find?_string_eq_find?_toSlice {pat : String} {s : Slice} {p : s.Pos} :
p.find? pat = p.find? pat.toSlice :=
(rfl)
end String.Slice

View File

@@ -245,7 +245,7 @@ the given {name}`ForwardPattern` instance.
It is sometimes possible to give a more efficient implementation; see {name}`ToForwardSearcher`
for more details.
-/
@[inline]
@[inline, implicit_reducible]
def defaultImplementation [ForwardPattern pat] :
ToForwardSearcher pat (DefaultForwardSearcher pat) where
toSearcher := DefaultForwardSearcher.iter pat
@@ -450,7 +450,7 @@ the given {name}`BackwardPattern` instance.
It is sometimes possible to give a more efficient implementation; see {name}`ToBackwardSearcher`
for more details.
-/
@[inline]
@[inline, implicit_reducible]
def defaultImplementation [BackwardPattern pat] :
ToBackwardSearcher pat (DefaultBackwardSearcher pat) where
toSearcher := DefaultBackwardSearcher.iter pat

View File

@@ -6,12 +6,7 @@ Authors: Henrik Böving, Markus Himmel
module
prelude
public import Init.Data.String.Pattern.Basic
import Init.Data.String.Lemmas.FindPos
import Init.Data.String.Termination
import Init.Data.String.Lemmas.IsEmpty
import Init.Data.String.Lemmas.Order
import Init.Data.Option.Lemmas
public import Init.Data.String.Pattern.Pred
set_option doc.verso true
@@ -26,75 +21,31 @@ namespace String.Slice.Pattern
namespace Char
instance {c : Char} : ForwardPattern c where
dropPrefixOfNonempty? s h :=
if s.startPos.get (by exact Slice.startPos_ne_endPos h) = c then
some (s.startPos.next (by exact Slice.startPos_ne_endPos h))
else
none
dropPrefix? s :=
if h : s.startPos = s.endPos then
none
else if s.startPos.get h = c then
some (s.startPos.next h)
else
none
startsWith s :=
if h : s.startPos = s.endPos then
false
else
s.startPos.get h = c
dropPrefixOfNonempty? s h := ForwardPattern.dropPrefixOfNonempty? (· == c) s h
dropPrefix? s := ForwardPattern.dropPrefix? (· == c) s
startsWith s := ForwardPattern.startsWith (· == c) s
instance {c : Char} : StrictForwardPattern c where
ne_startPos {s h q} := by
simp only [ForwardPattern.dropPrefixOfNonempty?, Option.ite_none_right_eq_some,
Option.some.injEq, ne_eq, and_imp]
rintro _ rfl
simp
ne_startPos h q := StrictForwardPattern.ne_startPos (pat := (· == c)) h q
instance {c : Char} : LawfulForwardPattern c where
dropPrefixOfNonempty?_eq {s h} := by
simp [ForwardPattern.dropPrefixOfNonempty?, ForwardPattern.dropPrefix?,
Slice.startPos_eq_endPos_iff, h]
startsWith_eq {s} := by
simp only [ForwardPattern.startsWith, ForwardPattern.dropPrefix?]
split <;> (try split) <;> simp_all
dropPrefixOfNonempty?_eq h := LawfulForwardPattern.dropPrefixOfNonempty?_eq (pat := (· == c)) h
startsWith_eq s := LawfulForwardPattern.startsWith_eq (pat := (· == c)) s
instance {c : Char} : ToForwardSearcher c (ToForwardSearcher.DefaultForwardSearcher c) :=
.defaultImplementation
instance {c : Char} : ToForwardSearcher c (ToForwardSearcher.DefaultForwardSearcher (· == c)) where
toSearcher s := ToForwardSearcher.toSearcher (· == c) s
instance {c : Char} : BackwardPattern c where
dropSuffixOfNonempty? s h :=
if (s.endPos.prev (Ne.symm (by exact Slice.startPos_ne_endPos h))).get (by simp) = c then
some (s.endPos.prev (Ne.symm (by exact Slice.startPos_ne_endPos h)))
else
none
dropSuffix? s :=
if h : s.endPos = s.startPos then
none
else if (s.endPos.prev h).get (by simp) = c then
some (s.endPos.prev h)
else
none
endsWith s :=
if h : s.endPos = s.startPos then
false
else
(s.endPos.prev h).get (by simp) = c
dropSuffixOfNonempty? s h := BackwardPattern.dropSuffixOfNonempty? (· == c) s h
dropSuffix? s := BackwardPattern.dropSuffix? (· == c) s
endsWith s := BackwardPattern.endsWith (· == c) s
instance {c : Char} : StrictBackwardPattern c where
ne_endPos {s h q} := by
simp only [BackwardPattern.dropSuffixOfNonempty?, Option.ite_none_right_eq_some,
Option.some.injEq, ne_eq, and_imp]
rintro _ rfl
simp
ne_endPos h q := StrictBackwardPattern.ne_endPos (pat := (· == c)) h q
instance {c : Char} : LawfulBackwardPattern c where
dropSuffixOfNonempty?_eq {s h} := by
simp [BackwardPattern.dropSuffixOfNonempty?, BackwardPattern.dropSuffix?,
Eq.comm (a := s.endPos), Slice.startPos_eq_endPos_iff, h]
endsWith_eq {s} := by
simp only [BackwardPattern.endsWith, BackwardPattern.dropSuffix?]
split <;> (try split) <;> simp_all
dropSuffixOfNonempty?_eq h := LawfulBackwardPattern.dropSuffixOfNonempty?_eq (pat := (· == c)) h
endsWith_eq s := LawfulBackwardPattern.endsWith_eq (pat := (· == c)) s
instance {c : Char} : ToBackwardSearcher c (ToBackwardSearcher.DefaultBackwardSearcher c) :=
.defaultImplementation

View File

@@ -81,8 +81,8 @@ instance {p : Char → Prop} [DecidablePred p] : LawfulForwardPattern p where
dropPrefixOfNonempty?_eq h := LawfulForwardPattern.dropPrefixOfNonempty?_eq (pat := (decide <| p ·)) h
startsWith_eq s := LawfulForwardPattern.startsWith_eq (pat := (decide <| p ·)) s
instance {p : Char Prop} [DecidablePred p] : ToForwardSearcher p (ToForwardSearcher.DefaultForwardSearcher p) :=
.defaultImplementation
instance {p : Char Prop} [DecidablePred p] : ToForwardSearcher p (ToForwardSearcher.DefaultForwardSearcher (decide <| p ·)) where
toSearcher s := ToForwardSearcher.toSearcher (decide <| p ·) s
end Decidable

View File

@@ -278,7 +278,7 @@ def mapFinIdxM {α : Type u} {β : Type v} {m : Type v → Type w} [Monad m]
(xs : Vector α n) (f : (i : Nat) α (h : i < n) m β) : m (Vector β n) :=
let rec @[specialize] map (i : Nat) (j : Nat) (inv : i + j = n) (ys : Vector β (n - i)) : m (Vector β n) := do
match i, inv with
| 0, _ => pure ys
| 0, inv => return ys.cast (by omega)
| i+1, inv =>
have j_lt : j < n := by
rw [ inv, Nat.add_assoc, Nat.add_comm 1 j, Nat.add_comm]

View File

@@ -862,7 +862,6 @@ grind_pattern Vector.getElem?_eq_none => xs[i]? where
theorem getElem?_eq_some_iff {xs : Vector α n} : xs[i]? = some b h : i < n, xs[i] = b :=
_root_.getElem?_eq_some_iff
@[grind ]
theorem getElem_of_getElem? {xs : Vector α n} : xs[i]? = some a h : i < n, xs[i] = a :=
getElem?_eq_some_iff.mp

View File

@@ -363,7 +363,7 @@ theorem toArray_mapFinIdxM [Monad m] [LawfulMonad m] {xs : Vector α n}
= Array.mapFinIdxM.map xs.toArray (fun i x h => f i x (size_toArray xs h))
i j (size_toArray _ inv) bs.toArray := by
match i with
| 0 => simp only [mapFinIdxM.map, map_pure, Array.mapFinIdxM.map, Nat.sub_zero]
| 0 => simp [mapFinIdxM.map, map_pure, Array.mapFinIdxM.map, Nat.sub_zero]
| k + 1 =>
simp only [mapFinIdxM.map, map_bind, Array.mapFinIdxM.map, getElem_toArray]
conv => lhs; arg 2; intro; rw [go]

View File

@@ -47,6 +47,7 @@ Creates a `TypeName` instance.
For safety, it is required that the constant `typeName` is definitionally equal
to `α`.
-/
@[implicit_reducible]
unsafe def TypeName.mk (α : Type u) (typeName : Name) : TypeName α :=
unsafeCast typeName

View File

@@ -177,7 +177,7 @@ syntax (name := next) "next " binderIdent* " => " grindSeq : grind
`· grindSeq` focuses on the main `grind` goal and tries to solve it using the given
sequence of `grind` tactics.
-/
macro dot:patternIgnore("· " <|> ". ") s:grindSeq : grind => `(grind| next%$dot =>%$dot $s:grindSeq )
macro dot:unicode("· ", ". ") s:grindSeq : grind => `(grind| next%$dot =>%$dot $s:grindSeq )
/--
`any_goals tac` applies the tactic `tac` to every goal,

View File

@@ -266,6 +266,7 @@ export NoNatZeroDivisors (no_nat_zero_divisors)
namespace NoNatZeroDivisors
/-- Alternative constructor for `NoNatZeroDivisors` when we have an `IntModule`. -/
@[implicit_reducible]
def mk' {α} [IntModule α]
(eq_zero_of_mul_eq_zero : (k : Nat) (a : α), k 0 k a = 0 a = 0) :
NoNatZeroDivisors α where

View File

@@ -501,6 +501,7 @@ private theorem mk'_aux {x y : Nat} (p : Nat) (h : y ≤ x) :
omega
/-- Alternative constructor when `α` is a `Ring`. -/
@[implicit_reducible]
def mk' (p : Nat) (α : Type u) [Ring α]
(ofNat_eq_zero_iff : (x : Nat), OfNat.ofNat (α := α) x = 0 x % p = 0) : IsCharP α p where
ofNat_ext_iff {x y} := by

View File

@@ -223,6 +223,7 @@ theorem natCast_div_of_dvd {x y : Nat} (h : y x) (w : (y : α) ≠ 0) :
mul_inv_cancel w, Semiring.mul_one]
-- This is expensive as an instance. Let's see what breaks without it.
@[implicit_reducible]
def noNatZeroDivisors.ofIsCharPZero [IsCharP α 0] : NoNatZeroDivisors α := NoNatZeroDivisors.mk' <| by
intro a b h w
have := IsCharP.natCast_eq_zero_iff (α := α) 0 a

View File

@@ -15,6 +15,7 @@ public section
namespace Lean.Grind
/-- A `ToInt` instance on a semiring preserves powers if it preserves numerals and multiplication. -/
@[implicit_reducible]
def ToInt.pow_of_semiring [Semiring α] [ToInt α I] [ToInt.OfNat α I] [ToInt.Mul α I]
(h₁ : I.isFinite) : ToInt.Pow α I where
toInt_pow x n := by

View File

@@ -350,6 +350,7 @@ theorem wrap_toInt (I : IntInterval) [ToInt α I] (x : α) :
/-- Construct a `ToInt.Sub` instance from a `ToInt.Add` and `ToInt.Neg` instance and
a `sub_eq_add_neg` assumption. -/
@[implicit_reducible]
def Sub.of_sub_eq_add_neg {α : Type u} [_root_.Add α] [_root_.Neg α] [_root_.Sub α]
(sub_eq_add_neg : x y : α, x - y = x + -y)
{I : IntInterval} (h : I.isFinite) [ToInt α I] [Add α I] [Neg α I] : ToInt.Sub α I where

View File

@@ -366,16 +366,19 @@ recommended_spelling "shiftLeft" for "<<<" in [HShiftLeft.hShiftLeft, «term_<<<
recommended_spelling "shiftRight" for ">>>" in [HShiftRight.hShiftRight, «term_>>>_»]
recommended_spelling "not" for "~~~" in [Complement.complement, «term~~~_»]
-- declare ASCII alternatives first so that the latter Unicode unexpander wins
@[inherit_doc] infix:50 " <= " => LE.le
@[inherit_doc] infix:50 " " => LE.le
@[inherit_doc] infix:50 " < " => LT.lt
@[inherit_doc] infix:50 " >= " => GE.ge
@[inherit_doc] infix:50 "" => GE.ge
@[inherit_doc] infix:50 " > " => GT.gt
@[inherit_doc] infix:50 " = " => Eq
@[inherit_doc] infix:50 " == " => BEq.beq
@[inherit_doc] infix:50 " " => HEq
-- TODO(kmill) remove these after stage0 update. There are builtin macros still using `«term_>=_»`
@[inherit_doc] infix:50 (priority := low) " >= " => GE.ge
@[inherit_doc] infix:50 (priority := low) " <= " => LE.le
macro_rules | `($x >= $y) => `(binrel% GE.ge $x $y)
macro_rules | `($x <= $y) => `(binrel% LE.le $x $y)
@[inherit_doc] infix:50 unicode(" ", " <= ") => LE.le
@[inherit_doc] infix:50 " < " => LT.lt
@[inherit_doc] infix:50 unicode("", " >= ") => GE.ge
@[inherit_doc] infix:50 " > " => GT.gt
@[inherit_doc] infix:50 " = " => Eq
@[inherit_doc] infix:50 " == " => BEq.beq
@[inherit_doc] infix:50 "" => HEq
/-!
Remark: the infix commands above ensure a delaborator is generated for each relations.
@@ -383,39 +386,27 @@ recommended_spelling "not" for "~~~" in [Complement.complement, «term~~~_»]
It has better support for applying coercions. For example, suppose we have `binrel% Eq n i` where `n : Nat` and
`i : Int`. The default elaborator fails because we don't have a coercion from `Int` to `Nat`, but
`binrel%` succeeds because it also tries a coercion from `Nat` to `Int` even when the nat occurs before the int. -/
macro_rules | `($x <= $y) => `(binrel% LE.le $x $y)
macro_rules | `($x $y) => `(binrel% LE.le $x $y)
macro_rules | `($x < $y) => `(binrel% LT.lt $x $y)
macro_rules | `($x > $y) => `(binrel% GT.gt $x $y)
macro_rules | `($x >= $y) => `(binrel% GE.ge $x $y)
macro_rules | `($x $y) => `(binrel% GE.ge $x $y)
macro_rules | `($x = $y) => `(binrel% Eq $x $y)
macro_rules | `($x == $y) => `(binrel_no_prop% BEq.beq $x $y)
recommended_spelling "le" for "" in [LE.le, «term__»]
/-- prefer `≤` over `<=` -/
recommended_spelling "le" for "<=" in [LE.le, «term_<=_»]
recommended_spelling "lt" for "<" in [LT.lt, «term_<_»]
recommended_spelling "gt" for ">" in [GT.gt, «term_>_»]
recommended_spelling "ge" for "" in [GE.ge, «term__»]
/-- prefer `≥` over `>=` -/
recommended_spelling "ge" for ">=" in [GE.ge, «term_>=_»]
recommended_spelling "eq" for "=" in [Eq, «term_=_»]
recommended_spelling "beq" for "==" in [BEq.beq, «term_==_»]
recommended_spelling "heq" for "" in [HEq, «term__»]
@[inherit_doc] infixr:35 " /\\ " => And
@[inherit_doc] infixr:35 " " => And
@[inherit_doc] infixr:30 " \\/ " => Or
@[inherit_doc] infixr:30 " " => Or
@[inherit_doc] infixr:35 unicode("", " /\\ ") => And
@[inherit_doc] infixr:30 unicode(" ", " \\/ ") => Or
@[inherit_doc] notation:max "¬" p:40 => Not p
recommended_spelling "and" for "" in [And, «term__»]
/-- prefer `∧` over `/\` -/
recommended_spelling "and" for "/\\" in [And, «term_/\_»]
recommended_spelling "or" for "" in [Or, «term__»]
/-- prefer `` over `\/` -/
recommended_spelling "or" for "\\/" in [Or, «term_\/_»]
recommended_spelling "not" for "¬" in [Not, «term¬_»]
@[inherit_doc] infixl:35 " && " => and

View File

@@ -25,9 +25,9 @@ syntax explicitBinders := (ppSpace bracketedExplicitBinders)+ <|> unb
open TSyntax.Compat in
meta def expandExplicitBindersAux (combinator : Syntax) (idents : Array Syntax) (type? : Option Syntax) (body : Syntax) : MacroM Syntax :=
let rec loop (i : Nat) (h : i idents.size) (acc : Syntax) := do
match i with
| 0 => pure acc
| i + 1 =>
match i, h with
| 0, _ => pure acc
| i + 1, h =>
let ident := idents[i][0]
let acc match ident.isIdent, type? with
| true, none => `($combinator fun $ident => $acc)
@@ -39,9 +39,9 @@ meta def expandExplicitBindersAux (combinator : Syntax) (idents : Array Syntax)
meta def expandBracketedBindersAux (combinator : Syntax) (binders : Array Syntax) (body : Syntax) : MacroM Syntax :=
let rec loop (i : Nat) (h : i binders.size) (acc : Syntax) := do
match i with
| 0 => pure acc
| i+1 =>
match i, h with
| 0, _ => pure acc
| i+1, h =>
let idents := binders[i][1].getArgs
let type := binders[i][3]
loop i (Nat.le_of_succ_le h) ( expandExplicitBindersAux combinator idents (some type) acc)
@@ -63,7 +63,7 @@ meta def expandBracketedBinders (combinatorDeclName : Name) (bracketedExplicitBi
let combinator := mkCIdentFrom ( getRef) combinatorDeclName
expandBracketedBindersAux combinator #[bracketedExplicitBinders] body
syntax unifConstraint := term patternIgnore(" =?= " <|> " ") term
syntax unifConstraint := term unicode(" ", " =?= ") term
syntax unifConstraintElem := colGe unifConstraint ", "?
syntax (docComment)? attrKind "unif_hint" (ppSpace ident)? (ppSpace bracketedBinder)*
@@ -317,7 +317,7 @@ macro_rules
attribute [instance] $ctor)
namespace Lean
syntax cdotTk := patternIgnore("· " <|> ". ")
syntax cdotTk := unicode("· ", ". ")
/-- `· tac` focuses on the main goal and tries to solve it using `tac`, or else fails. -/
syntax (name := cdot) cdotTk tacticSeqIndentGt : tactic

View File

@@ -1287,7 +1287,7 @@ export Max (max)
Constructs a `Max` instance from a decidable `≤` operation.
-/
-- Marked inline so that `min x y + max x y` can be optimized to a single branch.
@[inline]
@[inline, implicit_reducible]
def maxOfLe [LE α] [DecidableRel (@LE.le α _)] : Max α where
max x y := ite (LE.le x y) y x
@@ -1304,7 +1304,7 @@ export Min (min)
Constructs a `Min` instance from a decidable `≤` operation.
-/
-- Marked inline so that `min x y + max x y` can be optimized to a single branch.
@[inline]
@[inline, implicit_reducible]
def minOfLe [LE α] [DecidableRel (@LE.le α _)] : Min α where
min x y := ite (LE.le x y) x y

View File

@@ -33,15 +33,27 @@ theorem ite_cond_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a b : α)
theorem ite_true {α : Sort u} (c : Prop) {inst : Decidable c} (a b : α) {ht : c} : @ite α c inst a b = a := by
simp [*]
theorem ite_true_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a b : α) (c' : Prop) (h : c = c') {ht : c'} : @ite α c inst a b = a := by
simp [*]
theorem ite_false {α : Sort u} (c : Prop) {inst : Decidable c} (a b : α) {ht : ¬ c} : @ite α c inst a b = b := by
simp [*]
theorem ite_false_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a b : α) (c' : Prop) (h : c = c') {ht : ¬ c'} : @ite α c inst a b = b := by
simp [*]
theorem dite_true {α : Sort u} (c : Prop) {inst : Decidable c} (a : c α) (b : ¬ c α) {ht : c} : @dite α c inst a b = a ht := by
simp [*]
theorem dite_true_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a : c α) (b : ¬ c α) (c' : Prop) (h : c = c') {ht : c'} : @dite α c inst a b = a (h.mpr_prop ht) := by
subst h; simp [*]
theorem dite_false {α : Sort u} (c : Prop) {inst : Decidable c} (a : c α) (b : ¬ c α) {ht : ¬ c} : @dite α c inst a b = b ht := by
simp [*]
theorem dite_false_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a : c α) (b : ¬ c α) (c' : Prop) (h : c = c') {ht : ¬ c'} : @dite α c inst a b = b (h.mpr_not ht) := by
subst h; simp [*]
theorem dite_cond_congr {α : Sort u} (c : Prop) {inst : Decidable c} (a : c α) (b : ¬ c α)
(c' : Prop) {inst' : Decidable c'} (h : c = c')
: @dite α c inst a b = @dite α c' inst' (fun h' => a (h.mpr_prop h')) (fun h' => b (h.mpr_not h')) := by

View File

@@ -6,6 +6,7 @@ Authors: Luke Nelson, Jared Roesch, Leonardo de Moura, Sebastian Ullrich, Mac Ma
module
prelude
public import Init.Control.Do
public import Init.System.IOError
public import Init.System.FilePath
import Init.Data.String.TakeDrop

View File

@@ -272,6 +272,7 @@ Creates a `MonadStateOf` instance from a reference cell.
This allows programs written against the [state monad](lean-manual://section/state-monads) API to
be executed using a mutable reference cell to track the state.
-/
@[implicit_reducible]
def Ref.toMonadStateOf (r : Ref σ α) : MonadStateOf α m where
get := r.get
set := r.set

View File

@@ -577,7 +577,7 @@ If `thm` is a theorem `a = b`, then as a rewrite rule,
* `thm` means to replace `a` with `b`, and
* `← thm` means to replace `b` with `a`.
-/
syntax rwRule := patternIgnore("" <|> "<- ")? term
syntax rwRule := unicode("", "<- ")? term
/-- A `rwRuleSeq` is a list of `rwRule` in brackets. -/
syntax rwRuleSeq := " [" withoutPosition(rwRule,*,?) "]"
@@ -653,7 +653,7 @@ A simp lemma specification is:
* optional `←` to use the lemma backward
* `thm` for the theorem to rewrite with
-/
syntax simpLemma := ppGroup((simpPre <|> simpPost)? patternIgnore("" <|> "<- ")? term)
syntax simpLemma := ppGroup((simpPre <|> simpPost)? unicode("", "<- ")? term)
/-- An erasure specification `-thm` says to remove `thm` from the simp set -/
syntax simpErase := "-" term:max
/-- The simp lemma specification `*` means to rewrite with all hypotheses -/
@@ -2395,7 +2395,7 @@ If there are several with the same priority, it is uses the "most recent one". E
cases d <;> rfl
```
-/
syntax (name := simp) "simp" (Tactic.simpPre <|> Tactic.simpPost)? patternIgnore("" <|> "<- ")? (ppSpace prio)? : attr
syntax (name := simp) "simp" (Tactic.simpPre <|> Tactic.simpPost)? unicode("", "<- ")? (ppSpace prio)? : attr
/--
Theorems tagged with the `wf_preprocess` attribute are used during the processing of functions defined
@@ -2409,7 +2409,7 @@ that diverges as compiled to be accepted without an explicit `partial` keyword,
remove irrelevant subterms or change the evaluation order by hiding terms under binders. Therefore
avoid tagging theorems with `[wf_preprocess]` unless they preserve also operational behavior.
-/
syntax (name := wf_preprocess) "wf_preprocess" (Tactic.simpPre <|> Tactic.simpPost)? patternIgnore("" <|> "<- ")? (ppSpace prio)? : attr
syntax (name := wf_preprocess) "wf_preprocess" (Tactic.simpPre <|> Tactic.simpPost)? unicode("", "<- ")? (ppSpace prio)? : attr
/--
Theorems tagged with the `method_specs_simp` attribute are used by `@[method_specs]` to further
@@ -2421,7 +2421,7 @@ The `method_specs` theorems are created on demand (using the realizable constant
this simp set should behave the same in all modules. Do not add theorems to it except in the module
defining the thing you are rewriting.
-/
syntax (name := method_specs_simp) "method_specs_simp" (Tactic.simpPre <|> Tactic.simpPost)? patternIgnore("" <|> "<- ")? (ppSpace prio)? : attr
syntax (name := method_specs_simp) "method_specs_simp" (Tactic.simpPre <|> Tactic.simpPost)? unicode("", "<- ")? (ppSpace prio)? : attr
/--
Register a theorem as a rewrite rule for `cbv` evaluation of a given definition.
@@ -2431,7 +2431,7 @@ You can instruct `cbv` to rewrite the lemma from right-to-left:
@[cbv_eval ←] theorem my_thm : rhs = lhs := ...
```
-/
syntax (name := cbv_eval) "cbv_eval" patternIgnore("" <|> "<- ")? (ppSpace ident)? : attr
syntax (name := cbv_eval) "cbv_eval" unicode("", "<- ")? (ppSpace ident)? : attr
/-- The possible `norm_cast` kinds: `elim`, `move`, or `squash`. -/
syntax normCastLabel := &"elim" <|> &"move" <|> &"squash"

View File

@@ -18,4 +18,5 @@ public import Lean.Compiler.FFI
public import Lean.Compiler.MetaAttr
public import Lean.Compiler.NoncomputableAttr
public import Lean.Compiler.Main
public import Lean.Compiler.NameDemangling
public import Lean.Compiler.Old -- TODO: delete after we port code generator to Lean

View File

@@ -257,7 +257,7 @@ def run (x : CheckM α) : CompilerM α :=
end Pure
end Check
def Decl.check (decl : Decl pu) : CompilerM Unit := do
def Decl.check (decl : Decl pu) : CompilerM Unit :=
match pu with
| .pure => Check.Pure.run do decl.value.forCodeM (Check.Pure.checkFunDeclCore decl.name decl.params decl.type)
| .impure => return () -- TODO: port the IR check once it actually makes sense to

View File

@@ -14,6 +14,7 @@ import Std.Data.Iterators.Combinators.Zip
import Lean.Compiler.LCNF.MonadScope
import Lean.Compiler.LCNF.FVarUtil
import Lean.Compiler.LCNF.PhaseExt
import Lean.Compiler.LCNF.PrettyPrinter
/-!
This pass is responsible for inferring borrow annotations to the parameters of functions and join
@@ -152,6 +153,48 @@ instance : MonadScope InferM where
getScope := return ( read).paramSet
withScope f mx := withReader (fun ctx => { ctx with paramSet := f ctx.paramSet }) mx
/--
Reasons why a variable is marked as owned during borrow inference.
-/
inductive OwnReason where
/-- Target or source of a reset/reuse operation. -/
| resetReuse (resultFVar : FVarId)
/-- Result of a constructor allocation. -/
| constructorResult (resultFVar : FVarId)
/-- Parameter packed into a constructor. -/
| constructorArg (resultFVar : FVarId)
/-- Bidirectional ownership propagation through `oproj`. -/
| projectionPropagation (resultFVar : FVarId)
/-- Result of a function application. -/
| functionCallResult (resultFVar : FVarId)
/-- Argument to a function whose corresponding parameter is owned. -/
| functionCallArg (resultFVar : FVarId)
/-- Argument or callee in a call to a free variable. -/
| fvarCall (resultFVar : FVarId)
/-- Argument in a partial application. -/
| partialApplication (resultFVar : FVarId)
/-- Tail call preservation for a self-recursive function call. -/
| tailCallPreservation (funcName : Name)
/-- Argument propagation at a join point jump. -/
| jpArgPropagation (jpFVar : FVarId)
/-- Tail call preservation at a join point jump. -/
| jpTailCallPreservation (jpFVar : FVarId)
def OwnReason.toString (reason : OwnReason) : CompilerM String := do
PP.run do
match reason with
| .resetReuse resultFVar => return s!"used in reset reuse {← PP.ppFVar resultFVar}"
| .constructorResult resultFVar => return s!"result of ctor call {← PP.ppFVar resultFVar}"
| .constructorArg resultFVar => return s!"argument to constructor call {← PP.ppFVar resultFVar}"
| .projectionPropagation resultFVar => return s!"projection propagation {← PP.ppFVar resultFVar}"
| .functionCallResult resultFVar => return s!"result of function call {← PP.ppFVar resultFVar}"
| .functionCallArg resultFVar => return s!"owned function argument {← PP.ppFVar resultFVar}"
| .fvarCall resultFVar => return s!"argument to closure call {← PP.ppFVar resultFVar}"
| .partialApplication resultFVar => return s!"argument to pap {← PP.ppFVar resultFVar}"
| .tailCallPreservation funcName => return s!"tail call preservation of {funcName}"
| .jpArgPropagation jpFVar => return s!"backward propagation from JP {← PP.ppFVar jpFVar}"
| .jpTailCallPreservation jpFVar => return s!"JP tail call preservation {← PP.ppFVar jpFVar}"
/--
Infer the borrowing annotations in a SCC through dataflow analysis.
-/
@@ -170,18 +213,16 @@ where
step : InferM Unit := do
( read).decls.forM collectDecl
ownFVar (fvarId : FVarId) : InferM Unit := do
modify fun s =>
if s.owned.contains fvarId then
s
else
{ s with owned := s.owned.insert fvarId, modified := true }
ownFVar (fvarId : FVarId) (reason : OwnReason) : InferM Unit := do
unless ( get).owned.contains fvarId do
trace[Compiler.inferBorrow] "own {← PP.run <| PP.ppFVar fvarId}: {← reason.toString}"
modify fun s => { s with owned := s.owned.insert fvarId, modified := true }
ownArg (a : Arg .impure) : InferM Unit := do
a.forFVarM ownFVar
ownArg (reason : OwnReason) (a : Arg .impure) : InferM Unit := do
a.forFVarM (ownFVar · reason)
ownArgs (as : Array (Arg .impure)) : InferM Unit :=
as.forM ownArg
ownArgs (reason : OwnReason) (as : Array (Arg .impure)) : InferM Unit :=
as.forM (ownArg reason)
isOwned (fvarId : FVarId) : InferM Bool := return ( get).owned.contains fvarId
@@ -219,23 +260,25 @@ where
return sig.params
/-- For each ps[i], if ps[i] is owned, then mark args[i] as owned. -/
ownArgsUsingParams (args : Array (Arg .impure)) (ps : Array (Param .impure)) : InferM Unit := do
ownArgsUsingParams (args : Array (Arg .impure)) (ps : Array (Param .impure))
(reason : OwnReason) : InferM Unit := do
for h : i in 0...args.size do
let arg := args[i]
let p := ps[i]!
unless p.borrow || p.type.isScalar do ownArg arg
unless p.borrow || p.type.isScalar do ownArg reason arg
/-- For each args[i], if args[i] is owned, then mark ps[i] as owned.
We use this action to preserve tail calls. That is, if we have
a tail call `f xs`, if the i-th parameter is borrowed, but `args[i]` is owned
we would have to insert a `dec args[i]` after `f args` and consequently
"break" the tail call. -/
ownParamsUsingArgs (args : Array (Arg .impure)) (ps : Array (Param .impure)) : InferM Unit :=
ownParamsUsingArgs (args : Array (Arg .impure)) (ps : Array (Param .impure))
(reason : OwnReason) : InferM Unit :=
for h : i in 0...args.size do
let arg := args[i]
let p := ps[i]!
if let .fvar x := arg then
if ( isOwned x) then ownFVar p.fvarId
if ( isOwned x) then ownFVar p.fvarId reason
/-- Mark `args[i]` as owned if it is one of the parameters that are currently in scope.
@@ -248,26 +291,27 @@ where
ret z
```
-/
ownArgsIfParam (args : Array (Arg .impure)) : InferM Unit := do
ownArgsIfParam (z : FVarId) (args : Array (Arg .impure)) : InferM Unit := do
for arg in args do
if let .fvar x := arg then
if ( read).paramSet.contains x then
ownFVar x
ownFVar x (.constructorArg z)
collectLetValue (z : FVarId) (v : LetValue .impure) : InferM Unit := do
match v with
| .reset _ x => ownFVar z; ownFVar x
| .reuse x _ _ args => ownFVar z; ownFVar x; ownArgsIfParam args
| .ctor _ args => ownFVar z; ownArgsIfParam args
| .reset _ x => ownFVar z (.resetReuse z); ownFVar x (.resetReuse z)
| .reuse x _ _ args => ownFVar z (.resetReuse z); ownFVar x (.resetReuse z); ownArgsIfParam z args
| .ctor _ args => ownFVar z (.constructorResult z); ownArgsIfParam z args
| .oproj _ x _ =>
if isOwned x then ownFVar z
if isOwned z then ownFVar x
if isOwned x then ownFVar z (.projectionPropagation z)
if isOwned z then ownFVar x (.projectionPropagation z)
| .fap f args =>
let ps getParamInfo (.decl f)
ownFVar z
ownArgsUsingParams args ps
| .fvar x args => ownFVar z; ownFVar x; ownArgs args
| .pap _ args => ownFVar z; ownArgs args
ownFVar z (.functionCallResult z)
ownArgsUsingParams args ps (.functionCallArg z)
| .fvar x args =>
ownFVar z (.functionCallResult z); ownFVar x (.fvarCall z); ownArgs (.fvarCall z) args
| .pap _ args => ownFVar z (.functionCallResult z); ownArgs (.partialApplication z) args
| _ => return ()
preserveTailCall (x : FVarId) (v : LetValue .impure) (k : Code .impure) : InferM Unit := do
@@ -277,7 +321,7 @@ where
-- expanded
if ( read).currDecl == f && x == ret then
let ps getParamInfo (.decl f)
ownParamsUsingArgs args ps
ownParamsUsingArgs args ps (.tailCallPreservation f)
| _, _ => return ()
collectCode (code : Code .impure) : InferM Unit := do
@@ -295,8 +339,8 @@ where
| .jmp jpId args =>
let ctx read
let ps getParamInfo (.jp ctx.currDecl jpId)
ownArgsUsingParams args ps -- for making sure the join point can reuse
ownParamsUsingArgs args ps -- for making sure the tail call is preserved
ownArgsUsingParams args ps (.jpArgPropagation jpId)
ownParamsUsingArgs args ps (.jpTailCallPreservation jpId)
| .cases cs => cs.alts.forM (·.forCodeM collectCode)
| .uset _ _ _ k _ | .sset _ _ _ _ _ k _ => collectCode k
| .return .. | .unreach .. => return ()

View File

@@ -267,13 +267,13 @@ def LetValue.inferType (e : LetValue pu) : CompilerM Expr :=
| .pure => InferType.Pure.inferLetValueType e |>.run {}
| .impure => panic! "Infer type for impure unimplemented" -- TODO
def Code.inferType (code : Code pu) : CompilerM Expr := do
def Code.inferType (code : Code pu) : CompilerM Expr :=
match pu with
| .pure =>
match code with
| .let _ k | .fun _ k _ | .jp _ k => k.inferType
| .return fvarId => getType fvarId
| .jmp fvarId args => InferType.Pure.inferAppTypeCore ( getType fvarId) args |>.run {}
| .jmp fvarId args => do InferType.Pure.inferAppTypeCore ( getType fvarId) args |>.run {}
| .unreach type => return type
| .cases c => return c.resultType
| .impure => panic! "Infer type for impure unimplemented" -- TODO

View File

@@ -12,17 +12,36 @@ public section
namespace Lean.Compiler.LCNF
private def refreshBinderName (binderName : Name) : CompilerM Name := do
match binderName with
| .num p _ =>
let r := .num p ( get).nextIdx
modify fun s => { s with nextIdx := s.nextIdx + 1 }
return r
| _ => return binderName
namespace Internalize
abbrev InternalizeM (pu : Purity) := StateRefT (FVarSubst pu) CompilerM
structure Context where
uniqueIdents : Bool := false
abbrev InternalizeM (pu : Purity) := ReaderT Context StateRefT (FVarSubst pu) CompilerM
@[inline]
def InternalizeM.run (x : InternalizeM pu α) (state : FVarSubst pu) (ctx : Context := {}) :
CompilerM (α × FVarSubst pu) :=
StateRefT'.run (ReaderT.run x ctx) state
@[inline]
def InternalizeM.run' (x : InternalizeM pu α) (state : FVarSubst pu) (ctx : Context := {}) :
CompilerM α :=
StateRefT'.run' (ReaderT.run x ctx) state
private def refreshBinderName (binderName : Name) : InternalizeM pu Name := do
match binderName with
| .num p _ =>
let r := .num p ( getThe CompilerM.State).nextIdx
modifyThe CompilerM.State fun s => { s with nextIdx := s.nextIdx + 1 }
return r
| _ =>
if ( read).uniqueIdents then
let r := .num binderName ( getThe CompilerM.State).nextIdx
modifyThe CompilerM.State fun s => { s with nextIdx := s.nextIdx + 1 }
return r
else
return binderName
/--
The `InternalizeM` monad is a translator. It "translates" the free variables
@@ -227,12 +246,14 @@ end Internalize
/--
Refresh free variables ids in `code`, and store their declarations in the local context.
-/
partial def Code.internalize (code : Code pu) (s : FVarSubst pu := {}) : CompilerM (Code pu) :=
Internalize.internalizeCode code |>.run' s
partial def Code.internalize (code : Code pu) (s : FVarSubst pu := {})
(uniqueIdents : Bool := false) : CompilerM (Code pu) :=
Internalize.internalizeCode code |>.run' s { uniqueIdents }
open Internalize in
def Decl.internalize (decl : Decl pu) (s : FVarSubst pu := {}): CompilerM (Decl pu) :=
go decl |>.run' s
def Decl.internalize (decl : Decl pu) (s : FVarSubst pu := {}) (uniqueIdents : Bool := false) :
CompilerM (Decl pu) :=
go decl |>.run' s { uniqueIdents }
where
go (decl : Decl pu) : InternalizeM pu (Decl pu) := do
let type internalizeExpr decl.type

View File

@@ -38,6 +38,7 @@ def shouldGenerateCode (declName : Name) : CoreM Bool := do
if hasMacroInlineAttribute env declName then return false
if (getImplementedBy? env declName).isSome then return false
if ( Meta.isMatcher declName) then return false
if ( Meta.isMatcherLike declName) then return false
if isCasesOnLike env declName then return false
-- TODO: check if type class instance
return true
@@ -57,7 +58,10 @@ def checkpoint (stepName : Name) (decls : Array (Decl pu)) (shouldCheck : Bool)
withOptions (fun opts => opts.set `pp.motives.pi false) do
let clsName := `Compiler ++ stepName
if ( Lean.isTracingEnabledFor clsName) then
Lean.addTrace clsName m!"size: {decl.size}\n{← ppDecl' decl (← getPhase)}"
if compiler.traceUnnormalized.get ( getOptions) then
Lean.addTrace clsName m!"size: {decl.size}\n{← ppDecl decl}"
else
Lean.addTrace clsName m!"size: {decl.size}\n{← ppDecl' decl (← getPhase)}"
if shouldCheck then
decl.check

View File

@@ -213,7 +213,7 @@ def getDecl? (declName : Name) : CompilerM (Option ((pu : Purity) × Decl pu)) :
let some decl getDeclAt? declName ( getPhase) | return none
return some _, decl
def getLocalDeclAt? (declName : Name) (phase : Phase) : CompilerM (Option (Decl phase.toPurity)) := do
def getLocalDeclAt? (declName : Name) (phase : Phase) : CompilerM (Option (Decl phase.toPurity)) :=
match phase with
| .base => return baseExt.getState ( getEnv) |>.find? declName
| .mono => return monoExt.getState ( getEnv) |>.find? declName

View File

@@ -106,6 +106,7 @@ private def getLitAux (fvarId : FVarId) (ofNat : Nat → α) (ofNatName : Name)
let some natLit getLit fvarId | return none
return ofNat natLit
@[implicit_reducible]
def mkNatWrapperInstance (ofNat : Nat α) (ofNatName : Name) (toNat : α Nat) : Literal α where
getLit := (getLitAux · ofNat ofNatName)
mkLit x := do
@@ -114,6 +115,7 @@ def mkNatWrapperInstance (ofNat : Nat → α) (ofNatName : Name) (toNat : α
instance : Literal Char := mkNatWrapperInstance Char.ofNat ``Char.ofNat Char.toNat
@[implicit_reducible]
def mkUIntInstance (matchLit : LitValue Option α) (litValueCtor : α LitValue) : Literal α where
getLit fvarId := do
let some (.lit litVal) findLetValue? (pu := .pure) fvarId | return none

View File

@@ -35,33 +35,45 @@ private def normalizeAlt (e : Expr) (numParams : Nat) : MetaM Expr :=
Meta.mkLambdaFVars (xs ++ ys) (mkAppN e ys)
/--
Inline auxiliary `matcher` applications.
Inline auxiliary `matcher` applications and matcher-like declarations (e.g. `match_on_same_ctor.het`)
-/
partial def inlineMatchers (e : Expr) : CoreM Expr :=
Meta.MetaM.run' <| Meta.transform e fun e => do
let .const declName us := e.getAppFn | return .continue
let some info Meta.getMatcherInfo? declName | return .continue
let numArgs := e.getAppNumArgs
if numArgs > info.arity then
return .continue
else if numArgs < info.arity then
Meta.forallBoundedTelescope ( Meta.inferType e) (info.arity - numArgs) fun xs _ =>
return .visit ( Meta.mkLambdaFVars xs (mkAppN e xs))
if let some info Meta.getMatcherInfo? declName then
let numArgs := e.getAppNumArgs
if numArgs > info.arity then
return .continue
else if numArgs < info.arity then
Meta.forallBoundedTelescope ( Meta.inferType e) (info.arity - numArgs) fun xs _ =>
return .visit ( Meta.mkLambdaFVars xs (mkAppN e xs))
else
let mut args := e.getAppArgs
let altNumParams := info.altNumParams
let rec inlineMatcher (i : Nat) (args : Array Expr) (letFVars : Array Expr) : MetaM Expr := do
if h : i < altNumParams.size then
let altIdx := i + info.getFirstAltPos
let numParams := altNumParams[i]
let alt normalizeAlt args[altIdx]! numParams
Meta.withLetDecl ( mkFreshUserName `_alt) ( Meta.inferType alt) alt fun altFVar =>
inlineMatcher (i+1) (args.set! altIdx altFVar) (letFVars.push altFVar)
else
let info getConstInfo declName
let value := ( Core.instantiateValueLevelParams info us).beta args
Meta.mkLetFVars letFVars value
return .visit ( inlineMatcher 0 args #[])
else if Meta.isMatcherLike declName then
let info getConstInfo declName
let value Core.instantiateValueLevelParams info us
/-
Currently the only declarations that are "matcher like" are `match_on_same_ctor.het`, for them
each alternative is used uniquely so we can just beta reduce without introducing code
duplication. If more "matcher like" things are introduced we might have to extend this with a
generalized notion of matcher information.
-/
return .visit (value.beta e.getAppArgs)
else
let mut args := e.getAppArgs
let altNumParams := info.altNumParams
let rec inlineMatcher (i : Nat) (args : Array Expr) (letFVars : Array Expr) : MetaM Expr := do
if h : i < altNumParams.size then
let altIdx := i + info.getFirstAltPos
let numParams := altNumParams[i]
let alt normalizeAlt args[altIdx]! numParams
Meta.withLetDecl ( mkFreshUserName `_alt) ( Meta.inferType alt) alt fun altFVar =>
inlineMatcher (i+1) (args.set! altIdx altFVar) (letFVars.push altFVar)
else
let info getConstInfo declName
let value := ( Core.instantiateValueLevelParams info us).beta args
Meta.mkLetFVars letFVars value
return .visit ( inlineMatcher 0 args #[])
return .continue
/--
Replace nested occurrences of `unsafeRec` names with the safe ones.

View File

@@ -34,6 +34,10 @@ structure BuilderState where
we access an fvar in the conversion.
-/
subst : LCNF.FVarSubst .pure := {}
/--
For each join point, tracks which parameters are computationally relevant (i.e. not void or erased).
-/
jpParamMask : Std.HashMap FVarId (Array Bool) := {}
abbrev ToImpureM := StateRefT BuilderState CompilerM
@@ -227,13 +231,21 @@ partial def Code.toImpure (c : Code .pure) : ToImpureM (Code .impure) := do
| .let decl k => lowerLet decl k
| .jp decl k =>
let params decl.params.mapM (·.toImpure)
let keepMask := params.map fun p => !(p.type.isVoid || p.type.isErased)
modify fun s => { s with jpParamMask := s.jpParamMask.insert decl.fvarId keepMask }
let filteredParams := (params.zip keepMask).foldl (init := #[]) fun acc (p, keep) =>
if keep then acc.push p else acc
let value decl.value.toImpure
let k k.toImpure
let type lowerResultType decl.type params.size
let decl := decl.fvarId, decl.binderName, params, type, value
let decl := decl.fvarId, decl.binderName, filteredParams, type, value
modifyLCtx fun lctx => lctx.addFunDecl decl
return .jp decl k
| .jmp fvarId args => return .jmp fvarId ( args.mapM (·.toImpure))
| .jmp fvarId args =>
let keepMask := ( get).jpParamMask[fvarId]!
let filteredArgs (args.zip keepMask).foldlM (init := #[]) fun acc (a, keep) => do
if keep then return acc.push ( a.toImpure) else return acc
return .jmp fvarId filteredArgs
| .cases c =>
if let some info hasTrivialImpureStructure? c.typeName then
assert! c.alts.size == 1

View File

@@ -452,7 +452,7 @@ where
visitCore (e : Expr) : M (Arg .pure) := withIncRecDepth do
if let some arg := ( get).cache.find? e then
return arg
let r : Arg match e with
let r : Arg .pure match e with
| .app .. => visitApp e
| .const .. => visitApp e
| .proj s i e => visitProj s i e

View File

@@ -0,0 +1,343 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Kim Morrison
-/
module
prelude
import Init.While
import Init.Data.String.TakeDrop
import Init.Data.String.Search
import Init.Data.String.Iterate
import Lean.Data.NameTrie
public import Lean.Compiler.NameMangling
/-! Human-friendly demangling of Lean compiler symbol names, extending
`Name.demangle` with prefix handling, compiler suffix folding, and backtrace
line parsing. Called from the C runtime via `@[export]` for backtrace display. -/
namespace Lean.Name.Demangle
/-- `String.dropPrefix?` returning a `String` instead of a `Slice`. -/
private def dropPrefix? (s : String) (pre : String) : Option String :=
(s.dropPrefix? pre).map (·.toString)
private def isAllDigits (s : String) : Bool :=
!s.isEmpty && s.all (·.isDigit)
private def nameToNameParts (n : Name) : Array NamePart :=
go n [] |>.toArray
where
go : Name List NamePart List NamePart
| .anonymous, acc => acc
| .str pre s, acc => go pre (NamePart.str s :: acc)
| .num pre n, acc => go pre (NamePart.num n :: acc)
private def namePartsToName (parts : Array NamePart) : Name :=
parts.foldl (fun acc p =>
match p with
| .str s => acc.mkStr s
| .num n => acc.mkNum n) .anonymous
/-- Format name parts using `Name.toString` for correct escaping. -/
private def formatNameParts (comps : Array NamePart) : String :=
if comps.isEmpty then "" else (namePartsToName comps).toString
private def matchSuffix (c : NamePart) : Option String :=
match c with
| NamePart.str s =>
if s == "_redArg" then some "arity↓"
else if s == "_boxed" then some "boxed"
else if s == "_impl" then some "impl"
else if s == "_lam" || s == "_lambda" || s == "_elam" then some "λ"
else if s == "_jp" then some "jp"
else if s == "_closed" then some "closed"
else if (dropPrefix? s "_lam_").any isAllDigits then some "λ"
else if (dropPrefix? s "_elam_").any isAllDigits then some "λ"
else none
| _ => none
private def isSpecIndex (c : NamePart) : Bool :=
match c with
| NamePart.str s => (dropPrefix? s "spec_").any isAllDigits
| _ => false
private def stripPrivate (comps : Array NamePart) (start stop : Nat) :
Nat × Bool :=
if stop - start >= 3 && comps[start]? == some (NamePart.str "_private") then
Id.run do
for i in [start + 1 : stop] do
if comps[i]? == some (NamePart.num 0) then
if i + 1 < stop then return (i + 1, true)
else return (start, false)
return (start, false)
else
(start, false)
private structure SpecEntry where
name : String
flags : Array String
private def processSpecContext (comps : Array NamePart) : SpecEntry := Id.run do
let mut begin_ := 0
if comps.size >= 3 && comps[0]? == some (NamePart.str "_private") then
for i in [1:comps.size] do
if comps[i]? == some (NamePart.num 0) && i + 1 < comps.size then
begin_ := i + 1
break
let mut parts : Array NamePart := #[]
let mut flags : Array String := #[]
for i in [begin_:comps.size] do
let c := comps[i]!
match matchSuffix c with
| some flag =>
if !flags.contains flag then
flags := flags.push flag
| none =>
if isSpecIndex c then pure ()
else parts := parts.push c
{ name := formatNameParts parts, flags }
private def postprocessNameParts (components : Array NamePart) : String := Id.run do
if components.isEmpty then return ""
let (privStart, isPrivate) := stripPrivate components 0 components.size
let mut parts := components.extract privStart components.size
-- Collect suffix flags from the end first, before stripping hygienic suffixes,
-- since _boxed etc. may appear after _@ sections.
let mut flags : Array String := #[]
let mut cont := true
while cont && !parts.isEmpty do
let last := parts.back!
match matchSuffix last with
| some flag =>
flags := flags.push flag
parts := parts.pop
| none =>
match last with
| NamePart.num _ =>
if parts.size >= 2 then
match matchSuffix parts[parts.size - 2]! with
| some flag =>
flags := flags.push flag
parts := parts.pop.pop
| none => cont := false
else cont := false
| _ => cont := false
if isPrivate then
flags := flags.push "private"
-- Strip hygienic suffixes (_@ onward)
for i in [:parts.size] do
match parts[i]! with
| NamePart.str s =>
if s.startsWith "_@" then
parts := parts.extract 0 i
break
| _ => pure ()
-- Handle specialization contexts (_at_ ... _spec N)
let mut specEntries : Array SpecEntry := #[]
let mut firstAt : Option Nat := none
for i in [:parts.size] do
if parts[i]! == NamePart.str "_at_" then
firstAt := some i
break
if let some fa := firstAt then
let base := parts.extract 0 fa
let rest := parts.extract fa parts.size
let mut entries : Array (Array NamePart) := #[]
let mut currentCtx : Option (Array NamePart) := none
let mut remaining : Array NamePart := #[]
let mut skipNext := false
for i in [:rest.size] do
if skipNext then
skipNext := false
continue
let p := rest[i]!
if p == NamePart.str "_at_" then
if let some ctx := currentCtx then
entries := entries.push ctx
currentCtx := some #[]
else if p == NamePart.str "_spec" then
if let some ctx := currentCtx then
entries := entries.push ctx
currentCtx := none
skipNext := true
else if match p with | NamePart.str s => s.startsWith "_spec" | _ => false then
if let some ctx := currentCtx then
entries := entries.push ctx
currentCtx := none
else
match currentCtx with
| some ctx => currentCtx := some (ctx.push p)
| none => remaining := remaining.push p
if let some ctx := currentCtx then
if !ctx.isEmpty then
entries := entries.push ctx
for entry in entries do
let se := processSpecContext entry
if !se.name.isEmpty || !se.flags.isEmpty then
specEntries := specEntries.push se
parts := base ++ remaining
let name := if parts.isEmpty then "?" else formatNameParts parts
let mut result := name
if !flags.isEmpty then
result := result ++ " [" ++ String.intercalate ", " flags.toList ++ "]"
for entry in specEntries do
let ctxStr := if entry.name.isEmpty then "?" else entry.name
if !entry.flags.isEmpty then
result := result ++ " spec at " ++ ctxStr ++ "["
++ String.intercalate ", " entry.flags.toList ++ "]"
else
result := result ++ " spec at " ++ ctxStr
return result
private def demangleBody (body : String) : String :=
let name := Name.demangle body
postprocessNameParts (nameToNameParts name)
/-- Split a `lp_`-prefixed symbol into (demangled body, package name).
Tries each underscore as a split point; the first valid split (shortest single-component
package where the remainder is a valid mangled name) is correct. -/
private def demangleWithPkg (s : String) : Option (String × String) := do
for pos, h in s.positions do
if pos.get h == '_' && pos s.startPos then
let nextPos := pos.next h
if nextPos == s.endPos then continue
let pkg := s.extract s.startPos pos
let body := s.extract nextPos s.endPos
let validPkg := match Name.demangle? pkg with
| some (.str .anonymous _) => true
| _ => false
if validPkg && (Name.demangle? body).isSome then
let pkgName := match Name.demangle pkg with
| .str .anonymous s => s
| _ => pkg
return (demangleBody body, pkgName)
none
private def stripColdSuffix (s : String) : String × String :=
match s.find? ".cold" with
| some pos => (s.extract s.startPos pos, s.extract pos s.endPos)
| none => (s, "")
private def demangleCore (s : String) : Option String := do
-- _init_l_
if let some body := dropPrefix? s "_init_l_" then
if !body.isEmpty then return s!"[init] {demangleBody body}"
-- _init_lp_
if let some after := dropPrefix? s "_init_lp_" then
if let some (name, pkg) := demangleWithPkg after then
return s!"[init] {name} ({pkg})"
-- initialize_l_
if let some body := dropPrefix? s "initialize_l_" then
if !body.isEmpty then return s!"[module_init] {demangleBody body}"
-- initialize_lp_
if let some after := dropPrefix? s "initialize_lp_" then
if let some (name, pkg) := demangleWithPkg after then
return s!"[module_init] {name} ({pkg})"
-- initialize_ (bare module init)
if let some body := dropPrefix? s "initialize_" then
if !body.isEmpty then return s!"[module_init] {demangleBody body}"
-- l_
if let some body := dropPrefix? s "l_" then
if !body.isEmpty then return demangleBody body
-- lp_
if let some after := dropPrefix? s "lp_" then
if let some (name, pkg) := demangleWithPkg after then
return s!"{name} ({pkg})"
none
public def demangleSymbol (symbol : String) : Option String := do
if symbol.isEmpty then none
-- Strip .cold suffix first
let (core, coldSuffix) := stripColdSuffix symbol
-- Handle lean_apply_N
if let some rest := dropPrefix? core "lean_apply_" then
if isAllDigits rest then
let r := s!"<apply/{rest}>"
if coldSuffix.isEmpty then return r else return s!"{r} {coldSuffix}"
-- Handle _lean_main
if core == "_lean_main" then
if coldSuffix.isEmpty then return "[lean] main"
else return s!"[lean] main {coldSuffix}"
-- Try prefix-based demangling
let result demangleCore core
if coldSuffix.isEmpty then return result
else return s!"{result} {coldSuffix}"
private def skipWhile (s : String) (pos : s.Pos) (pred : Char Bool) : s.Pos :=
if h : pos = s.endPos then pos
else if pred (pos.get h) then skipWhile s (pos.next h) pred
else pos
termination_by pos
private def splitAt₂ (s : String) (p₁ p₂ : s.Pos) : String × String × String :=
(s.extract s.startPos p₁, s.extract p₁ p₂, s.extract p₂ s.endPos)
/-- Extract the symbol from a backtrace line (Linux glibc or macOS format). -/
private def extractSymbol (line : String) :
Option (String × String × String) :=
tryLinux line |>.orElse (fun _ => tryMacOS line)
where
-- Linux glibc: ./lean(SYMBOL+0x2a) [0x555...]
tryLinux (line : String) : Option (String × String × String) := do
let parenPos line.find? '('
if h : parenPos = line.endPos then none else
let symStart := parenPos.next h
let delimPos symStart.find? (fun c => c == '+' || c == ')')
if delimPos == symStart then none else
some (splitAt₂ line symStart delimPos)
-- macOS: N lib 0xADDR SYMBOL + offset
tryMacOS (line : String) : Option (String × String × String) := do
let zxPos line.find? "0x"
if h : zxPos = line.endPos then none else
let afterZero := zxPos.next h
if h2 : afterZero = line.endPos then none else
let afterX := afterZero.next h2
let afterHex := skipWhile line afterX (·.isHexDigit)
let symStart := skipWhile line afterHex (· == ' ')
if symStart == line.endPos then none else
let symEnd := (symStart.find? " + ").getD line.endPos
if symEnd == symStart then none else
some (splitAt₂ line symStart symEnd)
public def demangleBtLine (line : String) : Option String := do
let (pfx, sym, sfx) extractSymbol line
let demangled demangleSymbol sym
return pfx ++ demangled ++ sfx
@[export lean_demangle_bt_line_cstr]
def demangleBtLineCStr (line : @& String) : String :=
(demangleBtLine line).getD ""
@[export lean_demangle_symbol_cstr]
def demangleSymbolCStr (symbol : @& String) : String :=
(demangleSymbol symbol).getD ""
end Lean.Name.Demangle

Some files were not shown because too many files have changed in this diff Show More