mirror of
https://github.com/leanprover/lean4.git
synced 2026-04-04 19:24:09 +00:00
Compare commits
92 Commits
simplc-reb
...
implicit_r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcd8bcb24e | ||
|
|
eb9846fe7f | ||
|
|
f1934c8d52 | ||
|
|
7fa7ec1887 | ||
|
|
043b8a765a | ||
|
|
ac7c8e157e | ||
|
|
481f6b6d64 | ||
|
|
c0b9ff1148 | ||
|
|
ad64f7c1ba | ||
|
|
6c671ffe6f | ||
|
|
c7457fc219 | ||
|
|
170eaf719a | ||
|
|
f3cbdca6e2 | ||
|
|
12d3ffc15b | ||
|
|
e9cc84b7c9 | ||
|
|
abc96e9ead | ||
|
|
f2d9161780 | ||
|
|
91a150939f | ||
|
|
ede936c20f | ||
|
|
a79c6c44ff | ||
|
|
e2407589ff | ||
|
|
61e09dd57e | ||
|
|
4dd6a99fec | ||
|
|
7272e761be | ||
|
|
63675d29d1 | ||
|
|
b491d868ed | ||
|
|
c5b58092e3 | ||
|
|
50ca285237 | ||
|
|
424fbbdf26 | ||
|
|
200f65649a | ||
|
|
bfc5d43ad3 | ||
|
|
a5d0ab510e | ||
|
|
91bd6e19a7 | ||
|
|
642bcdf55a | ||
|
|
2a8650f975 | ||
|
|
69393c4d9e | ||
|
|
6833b6dba8 | ||
|
|
76c95a085b | ||
|
|
0a19fe7d98 | ||
|
|
52db0be2b0 | ||
|
|
af7b3866b2 | ||
|
|
bf8ca518e7 | ||
|
|
8059477292 | ||
|
|
2f8c85af89 | ||
|
|
b2446552b4 | ||
|
|
440d686313 | ||
|
|
a166d6ee20 | ||
|
|
c5c0ddcc56 | ||
|
|
9a032cd261 | ||
|
|
4979fa8415 | ||
|
|
4a9a3eaf6b | ||
|
|
620ef3bb86 | ||
|
|
f084ce1497 | ||
|
|
838ff5e850 | ||
|
|
efcfd967e0 | ||
|
|
5c7a508e21 | ||
|
|
bda15f6c25 | ||
|
|
fb13783f5c | ||
|
|
8051e39a17 | ||
|
|
0d2a511bde | ||
|
|
6a283751b9 | ||
|
|
fadb1e6b9b | ||
|
|
97a7fed6c5 | ||
|
|
0721313dcc | ||
|
|
022bf2f822 | ||
|
|
c12c783f62 | ||
|
|
57a2dc0146 | ||
|
|
d77efe6a5f | ||
|
|
cce6ce9577 | ||
|
|
103ed5b54b | ||
|
|
187a8c1ce3 | ||
|
|
e579dfdb14 | ||
|
|
c1ad6aa0db | ||
|
|
c6f4fa8678 | ||
|
|
db9293ee3b | ||
|
|
dae150a976 | ||
|
|
f3b8f76ec4 | ||
|
|
6ae413a56a | ||
|
|
92aec45057 | ||
|
|
c8462354c6 | ||
|
|
4eabd86604 | ||
|
|
9f64f53fef | ||
|
|
6ca23a7b8b | ||
|
|
e760b8ddf5 | ||
|
|
d7e57b66d5 | ||
|
|
56bd8cd0c2 | ||
|
|
6cbaada1bf | ||
|
|
db12e64845 | ||
|
|
3b2944205b | ||
|
|
d9cea67e24 | ||
|
|
01173b195f | ||
|
|
7b29425361 |
@@ -84,6 +84,27 @@ leading quantifiers are stripped when creating a pattern.
|
||||
|
||||
If you're unsure which label applies, it's fine to omit the label and let reviewers add it.
|
||||
|
||||
## Module System for `src/` Files
|
||||
|
||||
Files in `src/Lean/`, `src/Std/`, and `src/lake/Lake/` must have both `module` and `prelude` (CI enforces `^prelude$` on its own line). With `prelude`, nothing is auto-imported — you must explicitly import `Init.*` modules for standard library features. Check existing files in the same directory for the pattern, e.g.:
|
||||
|
||||
```lean
|
||||
module
|
||||
|
||||
prelude
|
||||
import Init.While -- needed for while/repeat
|
||||
import Init.Data.String.TakeDrop -- needed for String.startsWith
|
||||
public import Lean.Compiler.NameMangling -- public if types are used in public signatures
|
||||
```
|
||||
|
||||
Files outside these directories (e.g. `tests/`, `script/`) use just `module`.
|
||||
|
||||
## CI Log Retrieval
|
||||
|
||||
When CI jobs fail, investigate immediately - don't wait for other jobs to complete. Individual job logs are often available even while other jobs are still running. Try `gh run view <run-id> --log` or `gh run view <run-id> --log-failed`, or use `gh run view <run-id> --job=<job-id>` to target the specific failed job. Sleeping is fine when asked to monitor CI and no failures exist yet, but once any job fails, investigate that failure immediately.
|
||||
|
||||
## Copyright Headers
|
||||
|
||||
New files require a copyright header. To get the year right, always run `date +%Y` rather than relying on memory. The copyright holder should be the author or their current employer — check other recent files by the same author in the repository to determine the correct entity (e.g., "Lean FRO, LLC", "Amazon.com, Inc. or its affiliates").
|
||||
|
||||
Test files (in `tests/`) do not need copyright headers.
|
||||
|
||||
@@ -103,6 +103,15 @@ Every time you run `release_checklist.py`, you MUST:
|
||||
This summary should be provided EVERY time you run the checklist, not just after creating new PRs.
|
||||
The user needs to see the complete picture of what's waiting for review.
|
||||
|
||||
## Checking PR Status When Asked
|
||||
|
||||
When the user asks for "status" or you need to report on PRs between checklist runs:
|
||||
- **ALWAYS check actual PR state** using `gh pr view <number> --repo <repo> --json state,mergedAt`
|
||||
- Do NOT rely on cached CI results or previous checklist output
|
||||
- The user may have merged PRs since your last check
|
||||
- Report which PRs are MERGED, which are OPEN with CI status, and which are still pending
|
||||
- After discovering merged PRs, rerun `release_checklist.py` to advance the release process
|
||||
|
||||
## Nightly Infrastructure
|
||||
|
||||
The nightly build system uses branches and tags across two repositories:
|
||||
|
||||
35
.github/workflows/ci.yml
vendored
35
.github/workflows/ci.yml
vendored
@@ -60,10 +60,23 @@ jobs:
|
||||
if [[ -n '${{ secrets.PUSH_NIGHTLY_TOKEN }}' ]]; then
|
||||
git remote add nightly https://foo:'${{ secrets.PUSH_NIGHTLY_TOKEN }}'@github.com/${{ github.repository_owner }}/lean4-nightly.git
|
||||
git fetch nightly --tags
|
||||
LEAN_VERSION_STRING="nightly-$(date -u +%F)"
|
||||
# do nothing if commit already has a different tag
|
||||
if [[ "$(git name-rev --name-only --tags --no-undefined HEAD 2> /dev/null || echo "$LEAN_VERSION_STRING")" == "$LEAN_VERSION_STRING" ]]; then
|
||||
if [[ '${{ github.event_name }}' == 'workflow_dispatch' ]]; then
|
||||
# Manual re-release: create a revision of the most recent nightly
|
||||
BASE_NIGHTLY=$(git tag -l 'nightly-*' | sort -rV | head -1)
|
||||
# Strip any existing -revK suffix to get the base date tag
|
||||
BASE_NIGHTLY="${BASE_NIGHTLY%%-rev*}"
|
||||
REV=1
|
||||
while git rev-parse "refs/tags/${BASE_NIGHTLY}-rev${REV}" >/dev/null 2>&1; do
|
||||
REV=$((REV + 1))
|
||||
done
|
||||
LEAN_VERSION_STRING="${BASE_NIGHTLY}-rev${REV}"
|
||||
echo "nightly=$LEAN_VERSION_STRING" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
# Scheduled: do nothing if commit already has a different tag
|
||||
LEAN_VERSION_STRING="nightly-$(date -u +%F)"
|
||||
if [[ "$(git name-rev --name-only --tags --no-undefined HEAD 2> /dev/null || echo "$LEAN_VERSION_STRING")" == "$LEAN_VERSION_STRING" ]]; then
|
||||
echo "nightly=$LEAN_VERSION_STRING" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -475,7 +488,7 @@ jobs:
|
||||
git tag "${{ needs.configure.outputs.nightly }}"
|
||||
git push nightly "${{ needs.configure.outputs.nightly }}"
|
||||
git push -f origin refs/tags/${{ needs.configure.outputs.nightly }}:refs/heads/nightly
|
||||
last_tag="$(git log HEAD^ --simplify-by-decoration --pretty="format:%d" | grep -o "nightly-[-0-9]*" | head -n 1)"
|
||||
last_tag="$(git log HEAD^ --simplify-by-decoration --pretty="format:%d" | grep -o "nightly-[^ ,)]*" | head -n 1)"
|
||||
echo -e "*Changes since ${last_tag}:*\n\n" > diff.md
|
||||
git show "$last_tag":RELEASES.md > old.md
|
||||
#./script/diff_changelogs.py old.md doc/changes.md >> diff.md
|
||||
@@ -498,8 +511,18 @@ jobs:
|
||||
gh workflow -R leanprover/release-index run update-index.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_INDEX_TOKEN }}
|
||||
- name: Generate mathlib nightly-testing app token
|
||||
id: mathlib-app-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
continue-on-error: true
|
||||
with:
|
||||
app-id: ${{ secrets.MATHLIB_NIGHTLY_TESTING_APP_ID }}
|
||||
private-key: ${{ secrets.MATHLIB_NIGHTLY_TESTING_PRIVATE_KEY }}
|
||||
owner: leanprover-community
|
||||
repositories: mathlib4-nightly-testing
|
||||
- name: Update toolchain on mathlib4's nightly-testing branch
|
||||
if: steps.mathlib-app-token.outcome == 'success'
|
||||
run: |
|
||||
gh workflow -R leanprover-community/mathlib4-nightly-testing run nightly_bump_toolchain.yml
|
||||
gh workflow -R leanprover-community/mathlib4-nightly-testing run nightly_bump_and_merge.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.MATHLIB4_BOT }}
|
||||
GITHUB_TOKEN: ${{ steps.mathlib-app-token.outputs.token }}
|
||||
|
||||
@@ -65,7 +65,14 @@ We'll use `v4.6.0` as the intended release version as a running example.
|
||||
- The `lakefile.toml` should always refer to dependencies via their `main` or `master` branch,
|
||||
not a toolchain tag
|
||||
(with the exception of `ProofWidgets4`, which *must* use a sequential version tag).
|
||||
- **Important:** After creating and pushing the ProofWidgets4 tag (see above),
|
||||
the mathlib4 lakefile must be updated to reference the new tag (e.g. `v0.0.87`).
|
||||
The `release_steps.py` script handles this automatically by looking up the latest
|
||||
ProofWidgets4 tag compatible with the target toolchain.
|
||||
- Push the PR branch to the main Mathlib repository rather than a fork, or CI may not work reliably
|
||||
- The "Verify Transient and Automated Commits" CI check on toolchain bump PRs can be ignored —
|
||||
it often fails on automated commits (`x:` prefixed) from the nightly-testing history that can't be
|
||||
reproduced in CI. This does not block merging.
|
||||
- `repl`:
|
||||
There are two copies of `lean-toolchain`/`lakefile.lean`:
|
||||
in the root, and in `test/Mathlib/`. Edit both, and run `lake update` in both directories.
|
||||
@@ -146,6 +153,9 @@ We'll use `v4.7.0-rc1` as the intended release version in this example.
|
||||
* The repository does not need any changes to move to the new version.
|
||||
* Note that sometimes there are *unreviewed* but necessary changes on the `nightly-testing` branch of the repository.
|
||||
If so, you will need to merge these into the `bump_to_v4.7.0-rc1` branch manually.
|
||||
* The `nightly-testing` branch may also contain temporary fix scripts (e.g. `fix_backward_defeq.py`,
|
||||
`fix_deprecations.py`) that were used to adapt to breaking changes during the nightly cycle.
|
||||
These should be reviewed and removed if no longer needed, as they can interfere with CI checks.
|
||||
- For each of the repositories listed in `script/release_repos.yml`,
|
||||
- Run `script/release_steps.py v4.7.0-rc1 <repo>` (e.g. replacing `<repo>` with `batteries`), which will walk you through the following steps:
|
||||
- Create a new branch off `master`/`main` (as specified in the `branch` field), called `bump_to_v4.7.0-rc1`.
|
||||
|
||||
6
doc/examples/IJCAR2026/README.md
Normal file
6
doc/examples/IJCAR2026/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# IJCAR 2026: `grind`, An SMT-Inspired Tactic for Lean 4
|
||||
|
||||
Ancillary materials for the paper.
|
||||
|
||||
- `examples.lean`: interactive examples from the paper
|
||||
- `analyze_grind_loc.py`: script used for the evaluation section, analyzing `grind` adoption and lines-of-code changes in Mathlib
|
||||
401
doc/examples/IJCAR2026/analyze_grind_loc.py
Executable file
401
doc/examples/IJCAR2026/analyze_grind_loc.py
Executable file
@@ -0,0 +1,401 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Analyze grind adoption LoC changes in mathlib.
|
||||
|
||||
For each theorem/lemma in master that uses grind, find the most recent
|
||||
commit where it didn't use grind, and measure the LoC change.
|
||||
|
||||
This script was used in preparing the "Evaluation" section of the grind paper.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import re
|
||||
import csv
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from typing import Iterator
|
||||
from functools import lru_cache
|
||||
|
||||
|
||||
@dataclass
|
||||
class GrindUsage:
|
||||
file: str
|
||||
line_no: int
|
||||
decl_name: str
|
||||
decl_type: str # theorem, lemma, def, example, etc.
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocChange:
|
||||
file: str
|
||||
decl_name: str
|
||||
decl_type: str
|
||||
old_loc: int
|
||||
new_loc: int
|
||||
loc_saved: int
|
||||
commit_sha: str
|
||||
commit_date: str
|
||||
|
||||
|
||||
def run_git(args: list[str], repo: str = ".") -> str:
|
||||
"""Run a git command and return stdout."""
|
||||
result = subprocess.run(
|
||||
["git", "-C", repo] + args,
|
||||
capture_output=True, text=True, check=True
|
||||
)
|
||||
return result.stdout
|
||||
|
||||
|
||||
def run_git_safe(args: list[str], repo: str = ".") -> str | None:
|
||||
"""Run a git command, return None on failure."""
|
||||
result = subprocess.run(
|
||||
["git", "-C", repo] + args,
|
||||
capture_output=True, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
return result.stdout
|
||||
|
||||
|
||||
@lru_cache(maxsize=4096)
|
||||
def get_file_at_commit(repo: str, commit: str, file_path: str) -> str | None:
|
||||
"""Get file contents at a specific commit (cached)."""
|
||||
return run_git_safe(["show", f"{commit}:{file_path}"], repo)
|
||||
|
||||
|
||||
def find_grind_usages(repo: str = ".") -> tuple[list[GrindUsage], int, int]:
|
||||
"""Find all declarations using grind in current master.
|
||||
Returns (usages, total_grind_calls, grind_in_decls) where:
|
||||
- total_grind_calls is the count of grind tactic calls (after filtering comments/attrs)
|
||||
- grind_in_decls is the count of those that are inside named declarations
|
||||
"""
|
||||
# Use git grep to find lines containing 'grind' (excludes lake packages)
|
||||
result = run_git(["grep", "-n", "grind", "master", "--", "Mathlib/"], repo)
|
||||
|
||||
usages = []
|
||||
seen = set() # (file, decl_name) to dedupe
|
||||
total_grind_calls = 0
|
||||
grind_in_decls = 0
|
||||
|
||||
for line in result.strip().split('\n'):
|
||||
if not line:
|
||||
continue
|
||||
# Format: master:path/to/file.lean:123:line content
|
||||
match = re.match(r'^master:(.+\.lean):(\d+):(.*)$', line)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
file_path, line_no_str, content = match.groups()
|
||||
line_no = int(line_no_str)
|
||||
|
||||
# Skip comments and attributes (not tactic calls)
|
||||
content_stripped = content.strip()
|
||||
if content_stripped.startswith('--') or content_stripped.startswith('/-'):
|
||||
continue
|
||||
if content_stripped.startswith('attribute'):
|
||||
continue
|
||||
if '@[' in content and 'grind' in content:
|
||||
# Could be an attribute like @[grind =], skip
|
||||
if 'by' not in content and ':=' not in content:
|
||||
continue
|
||||
|
||||
total_grind_calls += 1
|
||||
|
||||
# Find the declaration this grind belongs to
|
||||
decl_name, decl_type = find_decl_at_line(repo, file_path, line_no)
|
||||
if decl_name is None:
|
||||
continue
|
||||
|
||||
grind_in_decls += 1
|
||||
|
||||
key = (file_path, decl_name)
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
|
||||
usages.append(GrindUsage(
|
||||
file=file_path,
|
||||
line_no=line_no,
|
||||
decl_name=decl_name,
|
||||
decl_type=decl_type
|
||||
))
|
||||
|
||||
return usages, total_grind_calls, grind_in_decls
|
||||
|
||||
|
||||
def find_decl_at_line(repo: str, file_path: str, grind_line: int) -> tuple[str | None, str | None]:
|
||||
"""
|
||||
Find the declaration name and type that contains the grind at the given line.
|
||||
Search backwards from grind_line to find the most recent declaration.
|
||||
"""
|
||||
# Get file content at master
|
||||
content = get_file_at_commit(repo, "master", file_path)
|
||||
if content is None:
|
||||
return None, None
|
||||
|
||||
lines = content.split('\n')
|
||||
|
||||
# Search backwards from grind_line for a declaration
|
||||
# Match declarations with optional leading modifiers and attributes
|
||||
decl_pattern = re.compile(r'^(?:@\[.*?\]\s*)*(?:private\s+|protected\s+|noncomputable\s+|scoped\s+)*(theorem|lemma|def|example|instance|abbrev|structure|class)\s+(\w+)')
|
||||
|
||||
for i in range(grind_line - 1, -1, -1):
|
||||
if i >= len(lines):
|
||||
continue
|
||||
line = lines[i]
|
||||
match = decl_pattern.match(line)
|
||||
if match:
|
||||
return match.group(2), match.group(1)
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
def find_grind_introduction_commit(repo: str, file_path: str, decl_name: str) -> str | None:
|
||||
"""
|
||||
Find the commit that introduced grind to this declaration.
|
||||
Returns None if the declaration was born with grind.
|
||||
"""
|
||||
# First, find the line range of the declaration in master
|
||||
content = get_file_at_commit(repo, "master", file_path)
|
||||
if content is None:
|
||||
return None
|
||||
|
||||
lines = content.split('\n')
|
||||
decl_start = None
|
||||
decl_end = None
|
||||
|
||||
# Find declaration start
|
||||
decl_pattern = re.compile(rf'^(?:@\[.*?\]\s*)*(?:private\s+|protected\s+|noncomputable\s+|scoped\s+)*(theorem|lemma|def|example|instance|abbrev|structure|class)\s+{re.escape(decl_name)}\b')
|
||||
for i, line in enumerate(lines):
|
||||
if decl_pattern.match(line):
|
||||
decl_start = i
|
||||
break
|
||||
|
||||
if decl_start is None:
|
||||
return None
|
||||
|
||||
# Find declaration end (next top-level declaration or EOF)
|
||||
end_patterns = re.compile(r'^(?:private\s+|protected\s+|noncomputable\s+|scoped\s+)*(theorem|lemma|def|example|instance|abbrev|structure|class|namespace|section|end\s|@\[|#|/-)')
|
||||
for i in range(decl_start + 1, len(lines)):
|
||||
line = lines[i]
|
||||
if line and not line[0].isspace() and end_patterns.match(line):
|
||||
decl_end = i
|
||||
break
|
||||
if decl_end is None:
|
||||
decl_end = len(lines)
|
||||
|
||||
# Find grind line within declaration
|
||||
grind_line = None
|
||||
for i in range(decl_start, decl_end):
|
||||
if 'grind' in lines[i]:
|
||||
grind_line = i + 1 # 1-indexed
|
||||
break
|
||||
|
||||
if grind_line is None:
|
||||
return None
|
||||
|
||||
# Use git blame to find when that grind line was added
|
||||
blame_result = run_git_safe(["blame", "-L", f"{grind_line},{grind_line}", "--porcelain", "master", "--", file_path], repo)
|
||||
if blame_result is None:
|
||||
return None
|
||||
|
||||
# First line of porcelain output is the commit SHA
|
||||
first_line = blame_result.split('\n')[0]
|
||||
commit_sha = first_line.split()[0]
|
||||
|
||||
# Check if this declaration existed before this commit (without grind)
|
||||
parent_sha = run_git_safe(["rev-parse", f"{commit_sha}^"], repo)
|
||||
if parent_sha is None:
|
||||
return None # Initial commit, born with grind
|
||||
parent_sha = parent_sha.strip()
|
||||
|
||||
# Check if declaration existed in parent
|
||||
parent_content = get_file_at_commit(repo, parent_sha, file_path)
|
||||
if parent_content is None:
|
||||
# File didn't exist in parent - might be new file or renamed
|
||||
return None
|
||||
|
||||
# Check if declaration existed and didn't have grind
|
||||
if decl_name not in parent_content:
|
||||
return None # Declaration didn't exist - born with grind
|
||||
|
||||
# Check if it already had grind in parent
|
||||
parent_lines = parent_content.split('\n')
|
||||
in_decl = False
|
||||
for line in parent_lines:
|
||||
if decl_pattern.match(line):
|
||||
in_decl = True
|
||||
elif in_decl:
|
||||
if line and not line[0].isspace() and end_patterns.match(line):
|
||||
break
|
||||
if 'grind' in line:
|
||||
# Already had grind in parent — not the introduction commit
|
||||
return None
|
||||
|
||||
return commit_sha
|
||||
|
||||
|
||||
def extract_proof_loc(repo: str, file_path: str, decl_name: str, commit: str) -> int | None:
|
||||
"""
|
||||
Extract the number of lines in a declaration's proof at a given commit.
|
||||
Returns None if the declaration doesn't exist at that commit.
|
||||
"""
|
||||
content = get_file_at_commit(repo, commit, file_path)
|
||||
if content is None:
|
||||
return None
|
||||
|
||||
lines = content.split('\n')
|
||||
|
||||
# Find declaration start
|
||||
decl_pattern = re.compile(rf'^(?:@\[.*?\]\s*)*(?:private\s+|protected\s+|noncomputable\s+|scoped\s+)*(theorem|lemma|def|example|instance|abbrev|structure|class)\s+{re.escape(decl_name)}\b')
|
||||
decl_start = None
|
||||
for i, line in enumerate(lines):
|
||||
if decl_pattern.match(line):
|
||||
decl_start = i
|
||||
break
|
||||
|
||||
if decl_start is None:
|
||||
return None
|
||||
|
||||
# Find declaration end
|
||||
end_patterns = re.compile(r'^(?:private\s+|protected\s+|noncomputable\s+|scoped\s+)*(theorem|lemma|def|example|instance|abbrev|structure|class|namespace|section|end\s|@\[|#|/-)')
|
||||
decl_end = None
|
||||
for i in range(decl_start + 1, len(lines)):
|
||||
line = lines[i]
|
||||
if line and not line[0].isspace() and end_patterns.match(line):
|
||||
decl_end = i
|
||||
break
|
||||
if decl_end is None:
|
||||
decl_end = len(lines)
|
||||
|
||||
# Count non-empty lines in declaration
|
||||
loc = sum(1 for i in range(decl_start, decl_end) if lines[i].strip())
|
||||
return loc
|
||||
|
||||
|
||||
def get_commit_date(repo: str, sha: str) -> str:
|
||||
"""Get the date of a commit."""
|
||||
result = run_git(["log", "-1", "--format=%ci", sha], repo)
|
||||
return result.strip().split()[0] # Just the date part
|
||||
|
||||
|
||||
def analyze_usage_detailed(repo: str, usage: GrindUsage) -> tuple[LocChange | None, str]:
|
||||
"""Analyze a single grind usage, returning (result, skip_reason)."""
|
||||
commit = find_grind_introduction_commit(repo, usage.file, usage.decl_name)
|
||||
if commit is None:
|
||||
return None, "born_with_grind"
|
||||
|
||||
parent = run_git_safe(["rev-parse", f"{commit}^"], repo)
|
||||
if parent is None:
|
||||
return None, "no_parent"
|
||||
parent = parent.strip()
|
||||
|
||||
old_loc = extract_proof_loc(repo, usage.file, usage.decl_name, parent)
|
||||
new_loc = extract_proof_loc(repo, usage.file, usage.decl_name, "master")
|
||||
|
||||
if old_loc is None:
|
||||
return None, "old_loc_failed"
|
||||
if new_loc is None:
|
||||
return None, "new_loc_failed"
|
||||
|
||||
commit_date = get_commit_date(repo, commit)
|
||||
|
||||
return LocChange(
|
||||
file=usage.file,
|
||||
decl_name=usage.decl_name,
|
||||
decl_type=usage.decl_type,
|
||||
old_loc=old_loc,
|
||||
new_loc=new_loc,
|
||||
loc_saved=old_loc - new_loc,
|
||||
commit_sha=commit[:12],
|
||||
commit_date=commit_date
|
||||
), "success"
|
||||
|
||||
|
||||
def main(repo: str = "."):
|
||||
print("Finding grind usages in master...", file=sys.stderr)
|
||||
usages, total_grind_calls, grind_in_decls = find_grind_usages(repo)
|
||||
print(f"Found {len(usages)} declarations using grind ({grind_in_decls}/{total_grind_calls} grind calls)", file=sys.stderr)
|
||||
|
||||
print("Analyzing git history (this may take a while)...", file=sys.stderr)
|
||||
results: list[LocChange] = []
|
||||
skip_reasons: dict[str, int] = {}
|
||||
|
||||
with ThreadPoolExecutor(max_workers=64) as executor:
|
||||
futures = {executor.submit(analyze_usage_detailed, repo, usage): usage for usage in usages}
|
||||
|
||||
for i, future in enumerate(as_completed(futures)):
|
||||
if (i + 1) % 50 == 0:
|
||||
print(f" Progress: {i + 1}/{len(usages)}", file=sys.stderr, flush=True)
|
||||
|
||||
result, reason = future.result()
|
||||
if result:
|
||||
results.append(result)
|
||||
else:
|
||||
skip_reasons[reason] = skip_reasons.get(reason, 0) + 1
|
||||
|
||||
total_skipped = sum(skip_reasons.values())
|
||||
print(f"\nAnalyzed {len(results)} declarations, skipped {total_skipped}:", file=sys.stderr)
|
||||
for reason, count in sorted(skip_reasons.items(), key=lambda x: -x[1]):
|
||||
print(f" - {reason}: {count}", file=sys.stderr)
|
||||
|
||||
# Sort by LoC saved (descending)
|
||||
results.sort(key=lambda r: r.loc_saved, reverse=True)
|
||||
|
||||
# Output CSV
|
||||
writer = csv.writer(sys.stdout)
|
||||
writer.writerow(["file", "declaration", "type", "old_loc", "new_loc", "loc_saved", "commit", "date"])
|
||||
for r in results:
|
||||
writer.writerow([r.file, r.decl_name, r.decl_type, r.old_loc, r.new_loc, r.loc_saved, r.commit_sha, r.commit_date])
|
||||
|
||||
# Summary stats to stderr
|
||||
total_old = sum(r.old_loc for r in results) if results else 0
|
||||
total_new = sum(r.new_loc for r in results) if results else 0
|
||||
total_saved = sum(r.loc_saved for r in results) if results else 0
|
||||
avg_saved = total_saved / len(results) if results else 0
|
||||
|
||||
print("\n" + "=" * 60, file=sys.stderr)
|
||||
print("GRIND ADOPTION LOC ANALYSIS", file=sys.stderr)
|
||||
print("=" * 60, file=sys.stderr)
|
||||
|
||||
print("\n## Declaration Counts\n", file=sys.stderr)
|
||||
print(f" Total grind tactic calls: {total_grind_calls}", file=sys.stderr)
|
||||
print(f" In named declarations: {grind_in_decls} ({total_grind_calls - grind_in_decls} in anonymous/other)", file=sys.stderr)
|
||||
print(f" Unique declarations: {len(usages)}", file=sys.stderr)
|
||||
print(f" Converted to grind: {len(results)}", file=sys.stderr)
|
||||
print(f" Born with grind: {skip_reasons.get('born_with_grind', 0)}", file=sys.stderr)
|
||||
if skip_reasons.get('old_loc_failed', 0) > 0:
|
||||
print(f" Could not trace history: {skip_reasons.get('old_loc_failed', 0)}", file=sys.stderr)
|
||||
|
||||
print("\n## Lines of Code Impact\n", file=sys.stderr)
|
||||
print(f" Total LoC before grind: {total_old}", file=sys.stderr)
|
||||
print(f" Total LoC after grind: {total_new}", file=sys.stderr)
|
||||
print(f" Total LoC saved: {total_saved}", file=sys.stderr)
|
||||
print(f" Average LoC saved per theorem: {avg_saved:.1f}", file=sys.stderr)
|
||||
big_savings = sum(1 for r in results if r.loc_saved >= 10)
|
||||
print(f" Declarations shrunk by 10+ lines: {big_savings}", file=sys.stderr)
|
||||
|
||||
if results:
|
||||
print("\n## Top 10 Biggest LoC Savings\n", file=sys.stderr)
|
||||
for r in results[:10]:
|
||||
print(f" {r.loc_saved:+4d} lines: {r.decl_name} ({r.file})", file=sys.stderr)
|
||||
|
||||
# Show any that got bigger (negative savings)
|
||||
got_bigger = [r for r in results if r.loc_saved < 0]
|
||||
if got_bigger:
|
||||
print(f"\n## Declarations That Got Bigger ({len(got_bigger)} total)\n", file=sys.stderr)
|
||||
print(" (showing 5 worst):", file=sys.stderr)
|
||||
for r in got_bigger[-5:]: # Show worst 5
|
||||
print(f" {r.loc_saved:+4d} lines: {r.decl_name} ({r.file})", file=sys.stderr)
|
||||
|
||||
print("\n" + "=" * 60, file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Analyze grind LoC savings")
|
||||
parser.add_argument("--repo", "-r", default=".", help="Repository path")
|
||||
args = parser.parse_args()
|
||||
main(args.repo)
|
||||
127
doc/examples/IJCAR2026/examples.lean
Normal file
127
doc/examples/IJCAR2026/examples.lean
Normal file
@@ -0,0 +1,127 @@
|
||||
/- Examples from the paper "grind: An SMT-Inspired Tactic for Lean 4" -/
|
||||
open Lean Grind
|
||||
|
||||
/- Congruence closure. -/
|
||||
|
||||
example (f : Nat → Nat) (h : a = b) : f (f b) = f (f a) := by grind
|
||||
|
||||
/-
|
||||
E-matching.
|
||||
|
||||
Any `f` that is the left inverse of `g` would work on this example.
|
||||
-/
|
||||
def f (x : Nat) := x - 1
|
||||
def g (x : Nat) := x + 1
|
||||
|
||||
@[grind =] theorem fg : f (g x) = x := by simp [f, g]
|
||||
example : f a = b → a = g c → b = c := by grind
|
||||
|
||||
/-
|
||||
Any `R` that is transitive and symmetric would work on this example.
|
||||
-/
|
||||
def R : Nat → Nat → Prop := (· % 7 = · % 7)
|
||||
@[grind →] theorem Rtrans : R x y → R y z → R x z := by grind [R]
|
||||
@[grind →] theorem Rsymm : R x y → R y x := by grind [R]
|
||||
example : R a b → R c b → R d c → R a d := by grind
|
||||
|
||||
/- Big step operational semantics example. -/
|
||||
|
||||
abbrev Variable := String
|
||||
def State := Variable → Nat
|
||||
inductive Stmt : Type where
|
||||
| skip : Stmt
|
||||
| assign : Variable → (State → Nat) → Stmt
|
||||
| seq : Stmt → Stmt → Stmt
|
||||
| ifThenElse : (State → Prop) → Stmt → Stmt → Stmt
|
||||
| whileDo : (State → Prop) → Stmt → Stmt
|
||||
infix:60 ";; " => Stmt.seq
|
||||
export Stmt (skip assign seq ifThenElse whileDo)
|
||||
set_option quotPrecheck false in
|
||||
notation s:70 "[" x:70 "↦" n:70 "]" => (fun v ↦ if v = x then n else s v)
|
||||
inductive BigStep : Stmt → State → State → Prop where
|
||||
| skip (s : State) : BigStep skip s s
|
||||
| assign (x : Variable) (a : State → Nat) (s : State) : BigStep (assign x a) s (s[x ↦ a s])
|
||||
| seq {S T : Stmt} {s t u : State} (hS : BigStep S s t) (hT : BigStep T t u) :
|
||||
BigStep (S;; T) s u
|
||||
| if_true {B : State → Prop} {s t : State} (hcond : B s) (S T : Stmt) (hbody : BigStep S s t) :
|
||||
BigStep (ifThenElse B S T) s t
|
||||
| if_false {B : State → Prop} {s t : State} (hcond : ¬ B s) (S T : Stmt) (hbody : BigStep T s t) :
|
||||
BigStep (ifThenElse B S T) s t
|
||||
| while_true {B S s t u} (hcond : B s) (hbody : BigStep S s t) (hrest : BigStep (whileDo B S) t u) :
|
||||
BigStep (whileDo B S) s u
|
||||
| while_false {B S s} (hcond : ¬ B s) : BigStep (whileDo B S) s s
|
||||
notation:55 "(" S:55 "," s:55 ")" " ==> " t:55 => BigStep S s t
|
||||
|
||||
example {B S T s t} (hcond : B s) : (ifThenElse B S T, s) ==> t → (S, s) ==> t := by
|
||||
grind [cases BigStep]
|
||||
|
||||
theorem cases_if_of_true {B S T s t} (hcond : B s) : (ifThenElse B S T, s) ==> t → (S, s) ==> t := by
|
||||
grind [cases BigStep]
|
||||
|
||||
theorem cases_if_of_false {B S T s t} (hcond : ¬ B s) : (ifThenElse B S T, s) ==> t → (T, s) ==> t := by
|
||||
grind [cases BigStep]
|
||||
|
||||
example {B S T s t} : (ifThenElse B S T, s) ==> t ↔ (B s ∧ (S, s) ==> t) ∨ (¬ B s ∧ (T, s) ==> t) := by
|
||||
grind [BigStep] -- shortcut for `cases BigStep` and `intro BigStep`
|
||||
|
||||
attribute [grind] BigStep
|
||||
theorem if_iff {B S T s t} : (ifThenElse B S T, s) ==>
|
||||
t ↔ (B s ∧ (S, s) ==> t) ∨ (¬ B s ∧ (T, s) ==> t) := by grind
|
||||
|
||||
/- Dependent pattern matching. -/
|
||||
|
||||
inductive Vec (α : Type u) : Nat → Type u
|
||||
| nil : Vec α 0
|
||||
| cons : α → Vec α n → Vec α (n+1)
|
||||
@[grind =] def Vec.head : Vec α (n+1) → α
|
||||
| .cons a _ => a
|
||||
example (as bs : Vec Int (n+1)) : as.head = bs.head
|
||||
→ (match as, bs with
|
||||
| .cons a _, .cons b _ => a + b) = 2 * as.head := by grind
|
||||
|
||||
/- Theory solvers. -/
|
||||
|
||||
example [CommRing α] (a b c : α) :
|
||||
a + b + c = 3 →
|
||||
a^2 + b^2 + c^2 = 5 →
|
||||
a^3 + b^3 + c^3 = 7 →
|
||||
a^4 + b^4 + c^4 = 9 := by grind
|
||||
|
||||
example (x : BitVec 8) : (x - 16) * (x + 16) = x^2 := by grind
|
||||
|
||||
example [CommSemiring α] [AddRightCancel α] (x y : α) :
|
||||
x^2*y = 1 → x*y^2 = y → y*x = 1 := by grind
|
||||
|
||||
example (a b : UInt32) : a ≤ 2 → b ≤ 3 → a + b ≤ 5 := by grind
|
||||
|
||||
example [LE α] [Std.IsLinearPreorder α] (a b c d : α) :
|
||||
a ≤ b → ¬ (c ≤ b) → ¬ (d ≤ c) → a ≤ d := by grind
|
||||
|
||||
/- Theory combination. -/
|
||||
|
||||
example [CommRing α] [NoNatZeroDivisors α]
|
||||
(a b c : α) (f : α → Nat) :
|
||||
a + b + c = 3 → a^2 + b^2 + c^2 = 5 → a^3 + b^3 + c^3 = 7 →
|
||||
f (a^4 + b^4) + f (9 - c^4) ≠ 1 := by grind
|
||||
|
||||
/- Interactive mode. -/
|
||||
|
||||
-- Remark: Mathlib contains the definition of `Real`, `sin`, and `cos`.
|
||||
axiom Real : Type
|
||||
instance : Lean.Grind.CommRing Real := sorry
|
||||
|
||||
axiom cos : Real → Real
|
||||
axiom sin : Real → Real
|
||||
axiom trig_identity : ∀ x, (cos x)^2 + (sin x)^2 = 1
|
||||
|
||||
-- Manually specify the patterns for `trig_identity`
|
||||
grind_pattern trig_identity => cos x
|
||||
grind_pattern trig_identity => sin x
|
||||
|
||||
example : (cos x + sin x)^2 = 2 * cos x * sin x + 1 := by
|
||||
grind? -- Provides code action
|
||||
|
||||
example : (cos x + sin x)^2 = 2 * cos x * sin x + 1 := by
|
||||
grind =>
|
||||
instantiate only [trig_identity]
|
||||
ring
|
||||
@@ -1,6 +0,0 @@
|
||||
**Breaking Changes**
|
||||
|
||||
* The functions `Lean.Environment.importModules` and `Lean.Environment.finalizeImport` have been extended with a new parameter `loadExts : Bool := false` that enables environment extension state loading.
|
||||
Their previous behavior corresponds to setting the flag to `true` but is only safe to do in combination with `enableInitializersExecution`; see also the `importModules` docstring.
|
||||
The new default value `false` ensures the functions can be used correctly multiple times within the same process when environment extension access is not needed.
|
||||
The wrapper function `Lean.Environment.withImportModules` now always calls `importModules` with `loadExts := false` as it is incompatible with extension loading.
|
||||
@@ -1,54 +0,0 @@
|
||||
This release introduces the Lean module system, which allows files to
|
||||
control the visibility of their contents for other files. In previous
|
||||
releases, this feature was available as a preview when the option
|
||||
`experimental.module` was set to `true`; it is now a fully supported
|
||||
feature of Lean.
|
||||
|
||||
# Benefits
|
||||
|
||||
Because modules reduce the amount of information exposed to other
|
||||
code, they speed up rebuilds because irrelevant changes can be
|
||||
ignored, they make it possible to be deliberate about API evolution by
|
||||
hiding details that may change from clients, they help proofs be
|
||||
checked faster by avoiding accidentally unfolding definitions, and
|
||||
they lead to smaller executable files through improved dead code
|
||||
elimination.
|
||||
|
||||
# Visibility
|
||||
|
||||
A source file is a module if it begins with the `module` keyword. By
|
||||
default, declarations in a module are private; the `public` modifier
|
||||
exports them. Proofs of theorems and bodies of definitions are private
|
||||
by default even when their signatures are public; the bodies of
|
||||
definitions can be made public by adding the `@[expose]`
|
||||
attribute. Theorems and opaque constants never expose their bodies.
|
||||
|
||||
`public section` and `@[expose] section` change the default visibility
|
||||
of declarations in the section.
|
||||
|
||||
# Imports
|
||||
|
||||
Modules may only import other modules. By default, `import` adds the
|
||||
public information of the imported module to the private scope of the
|
||||
current module. Adding the `public` modifier to an import places the
|
||||
imported modules's public information in the public scope of the
|
||||
current module, exposing it in turn to the current module's clients.
|
||||
|
||||
Within a package, `import all` can be used to import another module's
|
||||
private scope into the current module; this can be used to separate
|
||||
lemmas or tests from definition modules without exposing details to
|
||||
downstream clients.
|
||||
|
||||
# Meta Code
|
||||
|
||||
Code used in metaprograms must be marked `meta`. This ensures that the
|
||||
code is compiled and available for execution when it is needed during
|
||||
elaboration. Meta code may only reference other meta code. A whole
|
||||
module can be made available in the meta phase using `meta import`;
|
||||
this allows code to be shared across phases by importing the module in
|
||||
each phase. Code that is reachable from public metaprograms must be
|
||||
imported via `public meta import`, while local metaprograms can use
|
||||
plain `meta import` for their dependencies.
|
||||
|
||||
|
||||
The module system is described in detail in [the Lean language reference](https://lean-reference-manual-review.netlify.app/find/?domain=Verso.Genre.Manual.section&name=files).
|
||||
178
script/PROFILER_README.md
Normal file
178
script/PROFILER_README.md
Normal file
@@ -0,0 +1,178 @@
|
||||
# Lean Profiler
|
||||
|
||||
Profile Lean programs with demangled names using
|
||||
[samply](https://github.com/mstange/samply) and
|
||||
[Firefox Profiler](https://profiler.firefox.com).
|
||||
|
||||
Python 3, no external dependencies.
|
||||
|
||||
## Quick start
|
||||
|
||||
```bash
|
||||
# One command: record, symbolicate, demangle, and open in Firefox Profiler
|
||||
script/lean_profile.sh ./my_lean_binary [args...]
|
||||
|
||||
# See all options
|
||||
script/lean_profile.sh --help
|
||||
```
|
||||
|
||||
Requirements: `samply` (`cargo install samply`), `python3`.
|
||||
|
||||
## Reading demangled names
|
||||
|
||||
The demangler transforms low-level C symbol names into readable Lean names
|
||||
and annotates them with compact modifiers.
|
||||
|
||||
### Basic names
|
||||
|
||||
| Raw symbol | Demangled |
|
||||
|---|---|
|
||||
| `l_Lean_Meta_Sym_main` | `Lean.Meta.Sym.main` |
|
||||
| `lp_std_List_map` | `List.map (std)` |
|
||||
| `_init_l_Foo_bar` | `[init] Foo.bar` |
|
||||
| `initialize_Init_Data` | `[module_init] Init.Data` |
|
||||
| `_lean_main` | `[lean] main` |
|
||||
|
||||
### Modifier flags `[...]`
|
||||
|
||||
Compiler-generated suffixes are folded into a bracket annotation after the
|
||||
name. These indicate *how* the function was derived from the original source
|
||||
definition.
|
||||
|
||||
| Flag | Meaning | Compiler suffix |
|
||||
|---|---|---|
|
||||
| `arity`↓ | Reduced-arity specialization | `_redArg` |
|
||||
| `boxed` | Boxed calling-convention wrapper | `_boxed` |
|
||||
| `impl` | Implementation detail | `_impl` |
|
||||
| λ | Lambda-lifted closure | `_lam_N`, `_lambda_N`, `_elam_N` |
|
||||
| `jp` | Join point | `_jp_N` |
|
||||
| `closed` | Extracted closed subterm | `_closed_N` |
|
||||
| `private` | Private (module-scoped) definition | `_private.Module.0.` prefix |
|
||||
|
||||
Examples:
|
||||
|
||||
```
|
||||
Lean.Meta.Simp.simpLambda [boxed, λ] -- boxed wrapper of a lambda-lifted closure
|
||||
Lean.Meta.foo [arity↓, private] -- reduced-arity version of a private def
|
||||
```
|
||||
|
||||
Multiple flags are comma-separated. Order reflects how they were collected
|
||||
(innermost suffix first).
|
||||
|
||||
### Specializations `spec at ...`
|
||||
|
||||
When the compiler specializes a function at a particular call site, the
|
||||
demangled name shows `spec at <context>` after the base name and its flags.
|
||||
The context names the function whose body triggered the specialization, and
|
||||
may carry its own modifier flags:
|
||||
|
||||
```
|
||||
<base-name> [<base-flags>] spec at <context>[<context-flags>]
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```
|
||||
-- foo specialized at call site in bar
|
||||
Lean.Meta.foo spec at Lean.Meta.bar
|
||||
|
||||
-- foo (with a lambda closure) specialized at bar (with reduced arity and a lambda)
|
||||
Lean.Meta.foo [λ] spec at Lean.Meta.bar[λ, arity↓]
|
||||
|
||||
-- chained specialization: foo specialized at bar, then at baz
|
||||
Lean.Meta.foo spec at Lean.Meta.bar spec at Lean.Meta.baz[arity↓]
|
||||
```
|
||||
|
||||
Context flags use the same symbols as base flags. When a context has no
|
||||
flags, the brackets are omitted.
|
||||
|
||||
### Other annotations
|
||||
|
||||
| Pattern | Meaning |
|
||||
|---|---|
|
||||
| `<apply/N>` | Lean runtime apply function (N arguments) |
|
||||
| `.cold.N` suffix | LLVM cold-path clone (infrequently executed) |
|
||||
| `(pkg)` suffix | Function from package `pkg` |
|
||||
|
||||
## Tools
|
||||
|
||||
### `script/lean_profile.sh` -- Full profiling pipeline
|
||||
|
||||
Records a profile, symbolicates it via samply's API, demangles Lean names,
|
||||
and opens the result in Firefox Profiler. This is the recommended workflow.
|
||||
|
||||
```bash
|
||||
script/lean_profile.sh ./build/release/stage1/bin/lean src/Lean/Elab/Term.lean
|
||||
```
|
||||
|
||||
Environment variables:
|
||||
|
||||
| Variable | Default | Description |
|
||||
|---|---|---|
|
||||
| `SAMPLY_RATE` | 1000 | Sampling rate in Hz |
|
||||
| `SAMPLY_PORT` | 3756 | Port for samply symbolication server |
|
||||
| `SERVE_PORT` | 3757 | Port for serving the demangled profile |
|
||||
| `PROFILE_KEEP` | 0 | Set to 1 to keep the temp directory |
|
||||
|
||||
### `script/profiler/lean_demangle.py` -- Name demangler
|
||||
|
||||
Demangles individual symbol names. Works as a stdin filter (like `c++filt`)
|
||||
or with arguments.
|
||||
|
||||
```bash
|
||||
echo "l_Lean_Meta_Sym_main" | python3 script/profiler/lean_demangle.py
|
||||
# Lean.Meta.Sym.main
|
||||
|
||||
python3 script/profiler/lean_demangle.py --raw l_foo___redArg
|
||||
# foo._redArg (exact name, no postprocessing)
|
||||
```
|
||||
|
||||
As a Python module:
|
||||
|
||||
```python
|
||||
from lean_demangle import demangle_lean_name, demangle_lean_name_raw
|
||||
|
||||
demangle_lean_name("l_foo___redArg") # "foo [arity↓]"
|
||||
demangle_lean_name_raw("l_foo___redArg") # "foo._redArg"
|
||||
```
|
||||
|
||||
### `script/profiler/symbolicate_profile.py` -- Profile symbolicator
|
||||
|
||||
Calls samply's symbolication API to resolve raw addresses into symbol names,
|
||||
then demangles them. Used internally by `lean_profile.sh`.
|
||||
|
||||
### `script/profiler/serve_profile.py` -- Profile server
|
||||
|
||||
Serves a profile JSON file to Firefox Profiler without re-symbolication
|
||||
(which would overwrite demangled names). Used internally by `lean_profile.sh`.
|
||||
|
||||
### `script/profiler/lean_demangle_profile.py` -- Standalone profile rewriter
|
||||
|
||||
Demangles names in an already-symbolicated profile file (if you have one
|
||||
from another source).
|
||||
|
||||
```bash
|
||||
python3 script/profiler/lean_demangle_profile.py profile.json.gz -o demangled.json.gz
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
```bash
|
||||
cd script/profiler && python3 -m unittest test_demangle -v
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
The demangler is a faithful port of Lean 4's `Name.demangleAux` from
|
||||
`src/Lean/Compiler/NameMangling.lean`. It reverses the encoding used by
|
||||
`Name.mangle` / `Name.mangleAux` which turns hierarchical Lean names into
|
||||
valid C identifiers:
|
||||
|
||||
- `_` separates name components (`Lean.Meta` -> `Lean_Meta`)
|
||||
- `__` encodes a literal underscore in a component name
|
||||
- `_xHH`, `_uHHHH`, `_UHHHHHHHH` encode special characters
|
||||
- `_N_` encodes numeric name components
|
||||
- `_00` is a disambiguation prefix for ambiguous patterns
|
||||
|
||||
After demangling, a postprocessing pass folds compiler-generated suffixes
|
||||
into human-readable annotations (see [Reading demangled names](#reading-demangled-names)).
|
||||
@@ -36,7 +36,7 @@ sys.path.insert(0, str(Path(__file__).parent))
|
||||
import build_artifact
|
||||
|
||||
# Constants
|
||||
NIGHTLY_PATTERN = re.compile(r'^nightly-(\d{4})-(\d{2})-(\d{2})$')
|
||||
NIGHTLY_PATTERN = re.compile(r'^nightly-(\d{4})-(\d{2})-(\d{2})(-rev(\d+))?$')
|
||||
VERSION_PATTERN = re.compile(r'^v4\.(\d+)\.(\d+)(-rc\d+)?$')
|
||||
# Accept short SHAs (7+ chars) - we'll resolve to full SHA later
|
||||
SHA_PATTERN = re.compile(r'^[0-9a-f]{7,40}$')
|
||||
@@ -158,7 +158,7 @@ def parse_identifier(s: str) -> Tuple[str, str]:
|
||||
return ('sha', full_sha)
|
||||
error(f"Invalid identifier format: '{s}'\n"
|
||||
f"Expected one of:\n"
|
||||
f" - nightly-YYYY-MM-DD (e.g., nightly-2024-06-15)\n"
|
||||
f" - nightly-YYYY-MM-DD or nightly-YYYY-MM-DD-revK (e.g., nightly-2024-06-15, nightly-2024-06-15-rev1)\n"
|
||||
f" - v4.X.Y or v4.X.Y-rcK (e.g., v4.8.0, v4.9.0-rc1)\n"
|
||||
f" - commit SHA (short or full)")
|
||||
|
||||
@@ -244,8 +244,13 @@ def fetch_nightly_tags() -> List[str]:
|
||||
if len(data) < 100:
|
||||
break
|
||||
|
||||
# Sort by date (nightly-YYYY-MM-DD format sorts lexicographically)
|
||||
tags.sort()
|
||||
# Sort by date and revision (nightly-YYYY-MM-DD-revK needs numeric comparison on rev)
|
||||
def nightly_sort_key(tag):
|
||||
m = NIGHTLY_PATTERN.match(tag)
|
||||
if m:
|
||||
return (int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(5) or 0))
|
||||
return (0, 0, 0, 0)
|
||||
tags.sort(key=nightly_sort_key)
|
||||
return tags
|
||||
|
||||
def get_commit_for_nightly(nightly: str) -> str:
|
||||
@@ -1024,6 +1029,7 @@ Range Syntax:
|
||||
Identifier Formats:
|
||||
|
||||
nightly-YYYY-MM-DD Nightly build date (e.g., nightly-2024-06-15)
|
||||
nightly-YYYY-MM-DD-revK Revised nightly (e.g., nightly-2024-06-15-rev1)
|
||||
Uses pre-built toolchains from leanprover/lean4-nightly.
|
||||
Fast: downloads via elan (~30s each).
|
||||
|
||||
@@ -1151,9 +1157,9 @@ Examples:
|
||||
# Validate --nightly-only
|
||||
if args.nightly_only:
|
||||
if from_val is not None and from_type != 'nightly':
|
||||
error("--nightly-only requires FROM to be a nightly identifier (nightly-YYYY-MM-DD)")
|
||||
error("--nightly-only requires FROM to be a nightly identifier (nightly-YYYY-MM-DD or nightly-YYYY-MM-DD-revK)")
|
||||
if to_type != 'nightly':
|
||||
error("--nightly-only requires TO to be a nightly identifier (nightly-YYYY-MM-DD)")
|
||||
error("--nightly-only requires TO to be a nightly identifier (nightly-YYYY-MM-DD or nightly-YYYY-MM-DD-revK)")
|
||||
|
||||
if from_val:
|
||||
info(f"From: {from_val} ({from_type})")
|
||||
|
||||
133
script/lean_profile.sh
Executable file
133
script/lean_profile.sh
Executable file
@@ -0,0 +1,133 @@
|
||||
#!/bin/bash
|
||||
# Profile a Lean binary with demangled names.
|
||||
#
|
||||
# Usage:
|
||||
# script/lean_profile.sh ./my_lean_binary [args...]
|
||||
#
|
||||
# Records a profile with samply, symbolicates via samply's API,
|
||||
# demangles Lean symbol names, and opens the result in Firefox Profiler.
|
||||
#
|
||||
# Requirements: samply (cargo install samply), python3
|
||||
#
|
||||
# Options (via environment variables):
|
||||
# SAMPLY_RATE — sampling rate in Hz (default: 1000)
|
||||
# SAMPLY_PORT — port for samply symbolication server (default: 3756)
|
||||
# SERVE_PORT — port for serving the demangled profile (default: 3757)
|
||||
# PROFILE_KEEP — set to 1 to keep the raw profile after demangling
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROFILER_DIR="$SCRIPT_DIR/profiler"
|
||||
SYMBOLICATE="$PROFILER_DIR/symbolicate_profile.py"
|
||||
SERVE_PROFILE="$PROFILER_DIR/serve_profile.py"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<EOF
|
||||
Usage: $0 [options] <lean-binary> [args...]
|
||||
|
||||
Profile a Lean binary and view the results in Firefox Profiler
|
||||
with demangled Lean names.
|
||||
|
||||
Requirements:
|
||||
samply cargo install samply
|
||||
python3 (included with macOS / most Linux distros)
|
||||
|
||||
Environment variables:
|
||||
SAMPLY_RATE sampling rate in Hz (default: 1000)
|
||||
SAMPLY_PORT port for samply symbolication server (default: 3756)
|
||||
SERVE_PORT port for serving the demangled profile (default: 3757)
|
||||
PROFILE_KEEP set to 1 to keep the temp directory after profiling
|
||||
|
||||
Reading demangled names:
|
||||
Compiler suffixes are shown as modifier flags after the name:
|
||||
[arity↓] reduced-arity specialization (_redArg)
|
||||
[boxed] boxed calling-convention wrapper (_boxed)
|
||||
[λ] lambda-lifted closure (_lam_N, _lambda_N, _elam_N)
|
||||
[jp] join point (_jp_N)
|
||||
[closed] extracted closed subterm (_closed_N)
|
||||
[private] private (module-scoped) def (_private.Module.0. prefix)
|
||||
[impl] implementation detail (_impl)
|
||||
|
||||
Specializations appear after the flags:
|
||||
Lean.Meta.foo [λ] spec at Lean.Meta.bar[λ, arity↓]
|
||||
= foo (with lambda closure), specialized at bar (lambda, reduced arity)
|
||||
|
||||
Multiple "spec at" entries indicate chained specializations.
|
||||
See script/PROFILER_README.md for full documentation.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
usage 1
|
||||
fi
|
||||
|
||||
case "${1:-}" in
|
||||
-h|--help) usage 0 ;;
|
||||
esac
|
||||
|
||||
if ! command -v samply &>/dev/null; then
|
||||
echo "error: samply not found. Install with: cargo install samply" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RATE="${SAMPLY_RATE:-1000}"
|
||||
PORT="${SAMPLY_PORT:-3756}"
|
||||
SERVE="${SERVE_PORT:-3757}"
|
||||
TMPDIR=$(mktemp -d /tmp/lean-profile-XXXXXX)
|
||||
TMPFILE="$TMPDIR/profile.json.gz"
|
||||
DEMANGLED="$TMPDIR/profile-demangled.json.gz"
|
||||
SAMPLY_LOG="$TMPDIR/samply.log"
|
||||
SAMPLY_PID=""
|
||||
|
||||
cleanup() {
|
||||
if [ -n "$SAMPLY_PID" ]; then
|
||||
kill "$SAMPLY_PID" 2>/dev/null || true
|
||||
wait "$SAMPLY_PID" 2>/dev/null || true
|
||||
fi
|
||||
# Safety net: kill anything still on the symbolication port
|
||||
lsof -ti :"$PORT" 2>/dev/null | xargs kill 2>/dev/null || true
|
||||
[ "${PROFILE_KEEP:-0}" = "1" ] || rm -rf "$TMPDIR"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# Step 1: Record
|
||||
echo "Recording profile (rate=${RATE} Hz)..." >&2
|
||||
samply record --save-only -o "$TMPFILE" -r "$RATE" "$@"
|
||||
|
||||
# Step 2: Start samply server for symbolication
|
||||
echo "Starting symbolication server..." >&2
|
||||
samply load --no-open -P "$PORT" "$TMPFILE" > "$SAMPLY_LOG" 2>&1 &
|
||||
SAMPLY_PID=$!
|
||||
|
||||
# Wait for server to be ready
|
||||
for i in $(seq 1 30); do
|
||||
if grep -q "Local server listening" "$SAMPLY_LOG" 2>/dev/null; then
|
||||
break
|
||||
fi
|
||||
sleep 0.2
|
||||
done
|
||||
|
||||
# Extract the token from samply's output
|
||||
TOKEN=$(grep -oE '[a-z0-9]{30,}' "$SAMPLY_LOG" | head -1)
|
||||
|
||||
if [ -z "$TOKEN" ]; then
|
||||
echo "error: could not get samply server token" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SERVER_URL="http://127.0.0.1:${PORT}/${TOKEN}"
|
||||
|
||||
# Step 3: Symbolicate + demangle
|
||||
echo "Symbolicating and demangling..." >&2
|
||||
python3 "$SYMBOLICATE" --server "$SERVER_URL" "$TMPFILE" -o "$DEMANGLED"
|
||||
|
||||
# Step 4: Kill symbolication server
|
||||
kill "$SAMPLY_PID" 2>/dev/null || true
|
||||
wait "$SAMPLY_PID" 2>/dev/null || true
|
||||
SAMPLY_PID=""
|
||||
|
||||
# Step 5: Serve the demangled profile directly (without samply's re-symbolication)
|
||||
echo "Opening in Firefox Profiler..." >&2
|
||||
python3 "$SERVE_PROFILE" "$DEMANGLED" -P "$SERVE"
|
||||
779
script/profiler/lean_demangle.py
Normal file
779
script/profiler/lean_demangle.py
Normal file
@@ -0,0 +1,779 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Lean name demangler.
|
||||
|
||||
Demangles C symbol names produced by the Lean 4 compiler back into
|
||||
readable Lean hierarchical names.
|
||||
|
||||
Usage as a filter (like c++filt):
|
||||
echo "l_Lean_Meta_Sym_main" | python lean_demangle.py
|
||||
|
||||
Usage as a module:
|
||||
from lean_demangle import demangle_lean_name
|
||||
print(demangle_lean_name("l_Lean_Meta_Sym_main"))
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# String.mangle / unmangle
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _is_ascii_alnum(ch):
|
||||
"""Check if ch is an ASCII letter or digit (matching Lean's isAlpha/isDigit)."""
|
||||
return ('a' <= ch <= 'z') or ('A' <= ch <= 'Z') or ('0' <= ch <= '9')
|
||||
|
||||
|
||||
def mangle_string(s):
|
||||
"""Port of Lean's String.mangle: escape a single string for C identifiers."""
|
||||
result = []
|
||||
for ch in s:
|
||||
if _is_ascii_alnum(ch):
|
||||
result.append(ch)
|
||||
elif ch == '_':
|
||||
result.append('__')
|
||||
else:
|
||||
code = ord(ch)
|
||||
if code < 0x100:
|
||||
result.append('_x' + format(code, '02x'))
|
||||
elif code < 0x10000:
|
||||
result.append('_u' + format(code, '04x'))
|
||||
else:
|
||||
result.append('_U' + format(code, '08x'))
|
||||
return ''.join(result)
|
||||
|
||||
|
||||
def _parse_hex(s, pos, n):
|
||||
"""Parse n lowercase hex digits at pos. Returns (new_pos, value) or None."""
|
||||
if pos + n > len(s):
|
||||
return None
|
||||
val = 0
|
||||
for i in range(n):
|
||||
c = s[pos + i]
|
||||
if '0' <= c <= '9':
|
||||
val = (val << 4) | (ord(c) - ord('0'))
|
||||
elif 'a' <= c <= 'f':
|
||||
val = (val << 4) | (ord(c) - ord('a') + 10)
|
||||
else:
|
||||
return None
|
||||
return (pos + n, val)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Name mangling (for round-trip verification)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _check_disambiguation(m):
|
||||
"""Port of Lean's checkDisambiguation: does mangled string m need a '00' prefix?"""
|
||||
pos = 0
|
||||
while pos < len(m):
|
||||
ch = m[pos]
|
||||
if ch == '_':
|
||||
pos += 1
|
||||
continue
|
||||
if ch == 'x':
|
||||
return _parse_hex(m, pos + 1, 2) is not None
|
||||
if ch == 'u':
|
||||
return _parse_hex(m, pos + 1, 4) is not None
|
||||
if ch == 'U':
|
||||
return _parse_hex(m, pos + 1, 8) is not None
|
||||
if '0' <= ch <= '9':
|
||||
return True
|
||||
return False
|
||||
# all underscores or empty
|
||||
return True
|
||||
|
||||
|
||||
def _need_disambiguation(prev_component, mangled_next):
|
||||
"""Port of Lean's needDisambiguation."""
|
||||
# Check if previous component (as a string) ends with '_'
|
||||
prev_ends_underscore = (isinstance(prev_component, str) and
|
||||
len(prev_component) > 0 and
|
||||
prev_component[-1] == '_')
|
||||
return prev_ends_underscore or _check_disambiguation(mangled_next)
|
||||
|
||||
|
||||
def mangle_name(components, prefix="l_"):
|
||||
"""
|
||||
Mangle a list of name components (str or int) into a C symbol.
|
||||
Port of Lean's Name.mangle.
|
||||
"""
|
||||
if not components:
|
||||
return prefix
|
||||
|
||||
parts = []
|
||||
prev = None
|
||||
for i, comp in enumerate(components):
|
||||
if isinstance(comp, int):
|
||||
if i == 0:
|
||||
parts.append(str(comp) + '_')
|
||||
else:
|
||||
parts.append('_' + str(comp) + '_')
|
||||
else:
|
||||
m = mangle_string(comp)
|
||||
if i == 0:
|
||||
if _check_disambiguation(m):
|
||||
parts.append('00' + m)
|
||||
else:
|
||||
parts.append(m)
|
||||
else:
|
||||
if _need_disambiguation(prev, m):
|
||||
parts.append('_00' + m)
|
||||
else:
|
||||
parts.append('_' + m)
|
||||
prev = comp
|
||||
|
||||
return prefix + ''.join(parts)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Name demangling
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def demangle_body(s):
|
||||
"""
|
||||
Demangle a string produced by Name.mangleAux (without prefix).
|
||||
Returns a list of components (str or int).
|
||||
|
||||
This is a faithful port of Lean's Name.demangleAux from NameMangling.lean.
|
||||
"""
|
||||
components = []
|
||||
length = len(s)
|
||||
|
||||
def emit(comp):
|
||||
components.append(comp)
|
||||
|
||||
def decode_num(pos, n):
|
||||
"""Parse remaining digits, emit numeric component, continue."""
|
||||
while pos < length:
|
||||
ch = s[pos]
|
||||
if '0' <= ch <= '9':
|
||||
n = n * 10 + (ord(ch) - ord('0'))
|
||||
pos += 1
|
||||
else:
|
||||
# Expect '_' (trailing underscore of numeric encoding)
|
||||
pos += 1 # skip '_'
|
||||
emit(n)
|
||||
if pos >= length:
|
||||
return pos
|
||||
# Skip separator '_' and go to name_start
|
||||
pos += 1
|
||||
return name_start(pos)
|
||||
# End of string
|
||||
emit(n)
|
||||
return pos
|
||||
|
||||
def name_start(pos):
|
||||
"""Start parsing a new name component."""
|
||||
if pos >= length:
|
||||
return pos
|
||||
ch = s[pos]
|
||||
pos += 1
|
||||
if '0' <= ch <= '9':
|
||||
# Check for '00' disambiguation
|
||||
if ch == '0' and pos < length and s[pos] == '0':
|
||||
pos += 1
|
||||
return demangle_main(pos, "", 0)
|
||||
else:
|
||||
return decode_num(pos, ord(ch) - ord('0'))
|
||||
elif ch == '_':
|
||||
return demangle_main(pos, "", 1)
|
||||
else:
|
||||
return demangle_main(pos, ch, 0)
|
||||
|
||||
def demangle_main(pos, acc, ucount):
|
||||
"""Main demangling loop."""
|
||||
while pos < length:
|
||||
ch = s[pos]
|
||||
pos += 1
|
||||
|
||||
if ch == '_':
|
||||
ucount += 1
|
||||
continue
|
||||
|
||||
if ucount % 2 == 0:
|
||||
# Even underscores: literal underscores in component name
|
||||
acc += '_' * (ucount // 2) + ch
|
||||
ucount = 0
|
||||
continue
|
||||
|
||||
# Odd ucount: separator or escape
|
||||
if '0' <= ch <= '9':
|
||||
# End current str component, start number
|
||||
emit(acc + '_' * (ucount // 2))
|
||||
if ch == '0' and pos < length and s[pos] == '0':
|
||||
pos += 1
|
||||
return demangle_main(pos, "", 0)
|
||||
else:
|
||||
return decode_num(pos, ord(ch) - ord('0'))
|
||||
|
||||
# Try hex escapes
|
||||
if ch == 'x':
|
||||
result = _parse_hex(s, pos, 2)
|
||||
if result is not None:
|
||||
new_pos, val = result
|
||||
acc += '_' * (ucount // 2) + chr(val)
|
||||
pos = new_pos
|
||||
ucount = 0
|
||||
continue
|
||||
|
||||
if ch == 'u':
|
||||
result = _parse_hex(s, pos, 4)
|
||||
if result is not None:
|
||||
new_pos, val = result
|
||||
acc += '_' * (ucount // 2) + chr(val)
|
||||
pos = new_pos
|
||||
ucount = 0
|
||||
continue
|
||||
|
||||
if ch == 'U':
|
||||
result = _parse_hex(s, pos, 8)
|
||||
if result is not None:
|
||||
new_pos, val = result
|
||||
acc += '_' * (ucount // 2) + chr(val)
|
||||
pos = new_pos
|
||||
ucount = 0
|
||||
continue
|
||||
|
||||
# Name separator
|
||||
emit(acc)
|
||||
acc = '_' * (ucount // 2) + ch
|
||||
ucount = 0
|
||||
|
||||
# End of string
|
||||
acc += '_' * (ucount // 2)
|
||||
if acc:
|
||||
emit(acc)
|
||||
return pos
|
||||
|
||||
name_start(0)
|
||||
return components
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Prefix handling for lp_ (package prefix)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _is_valid_string_mangle(s):
|
||||
"""Check if s is a valid output of String.mangle (no trailing bare _)."""
|
||||
pos = 0
|
||||
length = len(s)
|
||||
while pos < length:
|
||||
ch = s[pos]
|
||||
if _is_ascii_alnum(ch):
|
||||
pos += 1
|
||||
elif ch == '_':
|
||||
if pos + 1 >= length:
|
||||
return False # trailing bare _
|
||||
nch = s[pos + 1]
|
||||
if nch == '_':
|
||||
pos += 2
|
||||
elif nch == 'x' and _parse_hex(s, pos + 2, 2) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 2)[0]
|
||||
elif nch == 'u' and _parse_hex(s, pos + 2, 4) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 4)[0]
|
||||
elif nch == 'U' and _parse_hex(s, pos + 2, 8) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 8)[0]
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _skip_string_mangle(s, pos):
|
||||
"""
|
||||
Skip past a String.mangle output in s starting at pos.
|
||||
Returns the position after the mangled string (where we expect the separator '_').
|
||||
This is a greedy scan.
|
||||
"""
|
||||
length = len(s)
|
||||
while pos < length:
|
||||
ch = s[pos]
|
||||
if _is_ascii_alnum(ch):
|
||||
pos += 1
|
||||
elif ch == '_':
|
||||
if pos + 1 < length:
|
||||
nch = s[pos + 1]
|
||||
if nch == '_':
|
||||
pos += 2
|
||||
elif nch == 'x' and _parse_hex(s, pos + 2, 2) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 2)[0]
|
||||
elif nch == 'u' and _parse_hex(s, pos + 2, 4) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 4)[0]
|
||||
elif nch == 'U' and _parse_hex(s, pos + 2, 8) is not None:
|
||||
pos = _parse_hex(s, pos + 2, 8)[0]
|
||||
else:
|
||||
return pos # bare '_': separator
|
||||
else:
|
||||
return pos
|
||||
else:
|
||||
return pos
|
||||
return pos
|
||||
|
||||
|
||||
def _find_lp_body(s):
|
||||
"""
|
||||
Given s = everything after 'lp_' in a symbol, find where the declaration
|
||||
body (Name.mangleAux output) starts.
|
||||
Returns the start index of the body within s, or None.
|
||||
|
||||
Strategy: try all candidate split points where the package part is a valid
|
||||
String.mangle output and the body round-trips. Prefer the longest valid
|
||||
package name (most specific match).
|
||||
"""
|
||||
length = len(s)
|
||||
|
||||
# Collect candidate split positions: every '_' that could be the separator
|
||||
candidates = []
|
||||
pos = 0
|
||||
while pos < length:
|
||||
if s[pos] == '_':
|
||||
candidates.append(pos)
|
||||
pos += 1
|
||||
|
||||
# Try each candidate; collect all valid splits
|
||||
valid_splits = []
|
||||
for split_pos in candidates:
|
||||
pkg_part = s[:split_pos]
|
||||
if not pkg_part:
|
||||
continue
|
||||
if not _is_valid_string_mangle(pkg_part):
|
||||
continue
|
||||
body = s[split_pos + 1:]
|
||||
if not body:
|
||||
continue
|
||||
components = demangle_body(body)
|
||||
if not components:
|
||||
continue
|
||||
remangled = mangle_name(components, prefix="")
|
||||
if remangled == body:
|
||||
first = components[0]
|
||||
# Score: prefer first component starting with uppercase
|
||||
has_upper = isinstance(first, str) and first and first[0].isupper()
|
||||
valid_splits.append((split_pos, has_upper))
|
||||
|
||||
if valid_splits:
|
||||
# Among splits where first decl component starts uppercase, pick longest pkg.
|
||||
# Otherwise pick shortest pkg.
|
||||
upper_splits = [s for s in valid_splits if s[1]]
|
||||
if upper_splits:
|
||||
best = max(upper_splits, key=lambda x: x[0])
|
||||
else:
|
||||
best = min(valid_splits, key=lambda x: x[0])
|
||||
return best[0] + 1
|
||||
|
||||
# Fallback: greedy String.mangle scan
|
||||
greedy_pos = _skip_string_mangle(s, 0)
|
||||
if greedy_pos < length and s[greedy_pos] == '_':
|
||||
return greedy_pos + 1
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Format name components for display
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def format_name(components):
|
||||
"""Format a list of name components as a dot-separated string."""
|
||||
return '.'.join(str(c) for c in components)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Human-friendly postprocessing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Compiler-generated suffix components — exact match
|
||||
_SUFFIX_FLAGS_EXACT = {
|
||||
'_redArg': 'arity\u2193',
|
||||
'_boxed': 'boxed',
|
||||
'_impl': 'impl',
|
||||
}
|
||||
|
||||
# Compiler-generated suffix prefixes — match with optional _N index
|
||||
# e.g., _lam, _lam_0, _lam_3, _lambda_0, _closed_2
|
||||
_SUFFIX_FLAGS_PREFIX = {
|
||||
'_lam': '\u03bb',
|
||||
'_lambda': '\u03bb',
|
||||
'_elam': '\u03bb',
|
||||
'_jp': 'jp',
|
||||
'_closed': 'closed',
|
||||
}
|
||||
|
||||
|
||||
def _match_suffix(component):
|
||||
"""
|
||||
Check if a string component is a compiler-generated suffix.
|
||||
Returns the flag label or None.
|
||||
|
||||
Handles both exact matches (_redArg, _boxed) and indexed suffixes
|
||||
(_lam_0, _lambda_2, _closed_0) produced by appendIndexAfter.
|
||||
"""
|
||||
if not isinstance(component, str):
|
||||
return None
|
||||
if component in _SUFFIX_FLAGS_EXACT:
|
||||
return _SUFFIX_FLAGS_EXACT[component]
|
||||
if component in _SUFFIX_FLAGS_PREFIX:
|
||||
return _SUFFIX_FLAGS_PREFIX[component]
|
||||
# Check for indexed suffix: prefix + _N
|
||||
for prefix, label in _SUFFIX_FLAGS_PREFIX.items():
|
||||
if component.startswith(prefix + '_'):
|
||||
rest = component[len(prefix) + 1:]
|
||||
if rest.isdigit():
|
||||
return label
|
||||
return None
|
||||
|
||||
|
||||
def _strip_private(components):
|
||||
"""Strip _private.Module.0. prefix. Returns (stripped_parts, is_private)."""
|
||||
if (len(components) >= 3 and isinstance(components[0], str) and
|
||||
components[0] == '_private'):
|
||||
for i in range(1, len(components)):
|
||||
if components[i] == 0:
|
||||
if i + 1 < len(components):
|
||||
return components[i + 1:], True
|
||||
break
|
||||
return components, False
|
||||
|
||||
|
||||
def _strip_spec_suffixes(components):
|
||||
"""Strip trailing spec_N components (from appendIndexAfter)."""
|
||||
parts = list(components)
|
||||
while parts and isinstance(parts[-1], str) and parts[-1].startswith('spec_'):
|
||||
rest = parts[-1][5:]
|
||||
if rest.isdigit():
|
||||
parts.pop()
|
||||
else:
|
||||
break
|
||||
return parts
|
||||
|
||||
|
||||
def _is_spec_index(component):
|
||||
"""Check if a component is a spec_N index (from appendIndexAfter)."""
|
||||
return (isinstance(component, str) and
|
||||
component.startswith('spec_') and component[5:].isdigit())
|
||||
|
||||
|
||||
def _parse_spec_entries(rest):
|
||||
"""Parse _at_..._spec pairs into separate spec context entries.
|
||||
|
||||
Given components starting from the first _at_, returns:
|
||||
- entries: list of component lists, one per _at_..._spec block
|
||||
- remaining: components after the last _spec N (trailing suffixes)
|
||||
"""
|
||||
entries = []
|
||||
current_ctx = None
|
||||
remaining = []
|
||||
skip_next = False
|
||||
|
||||
for p in rest:
|
||||
if skip_next:
|
||||
skip_next = False
|
||||
continue
|
||||
if isinstance(p, str) and p == '_at_':
|
||||
if current_ctx is not None:
|
||||
entries.append(current_ctx)
|
||||
current_ctx = []
|
||||
continue
|
||||
if isinstance(p, str) and p == '_spec':
|
||||
if current_ctx is not None:
|
||||
entries.append(current_ctx)
|
||||
current_ctx = None
|
||||
skip_next = True
|
||||
continue
|
||||
if isinstance(p, str) and p.startswith('_spec'):
|
||||
if current_ctx is not None:
|
||||
entries.append(current_ctx)
|
||||
current_ctx = None
|
||||
continue
|
||||
if current_ctx is not None:
|
||||
current_ctx.append(p)
|
||||
else:
|
||||
remaining.append(p)
|
||||
|
||||
if current_ctx is not None:
|
||||
entries.append(current_ctx)
|
||||
|
||||
return entries, remaining
|
||||
|
||||
|
||||
def _process_spec_context(components):
|
||||
"""Process a spec context into a clean name and its flags.
|
||||
|
||||
Returns (name_parts, flags) where name_parts are the cleaned components
|
||||
and flags is a deduplicated list of flag labels from compiler suffixes.
|
||||
"""
|
||||
parts = list(components)
|
||||
parts, _ = _strip_private(parts)
|
||||
|
||||
name_parts = []
|
||||
ctx_flags = []
|
||||
seen = set()
|
||||
|
||||
for p in parts:
|
||||
flag = _match_suffix(p)
|
||||
if flag is not None:
|
||||
if flag not in seen:
|
||||
ctx_flags.append(flag)
|
||||
seen.add(flag)
|
||||
elif _is_spec_index(p):
|
||||
pass
|
||||
else:
|
||||
name_parts.append(p)
|
||||
|
||||
return name_parts, ctx_flags
|
||||
|
||||
|
||||
def postprocess_name(components):
|
||||
"""
|
||||
Transform raw demangled components into a human-friendly display string.
|
||||
|
||||
Applies:
|
||||
- Private name cleanup: _private.Module.0.Name.foo -> Name.foo [private]
|
||||
- Hygienic name cleanup: strips _@.module._hygCtx._hyg.N
|
||||
- Suffix folding: _redArg, _boxed, _lam_0, etc. -> [flags]
|
||||
- Specialization: f._at_.g._spec.N -> f spec at g
|
||||
Shown after base [flags], with context flags: spec at g[ctx_flags]
|
||||
"""
|
||||
if not components:
|
||||
return ""
|
||||
|
||||
parts = list(components)
|
||||
flags = []
|
||||
spec_entries = []
|
||||
|
||||
# --- Strip _private prefix ---
|
||||
parts, is_private = _strip_private(parts)
|
||||
|
||||
# --- Strip hygienic suffixes: everything from _@ onward ---
|
||||
at_idx = None
|
||||
for i, p in enumerate(parts):
|
||||
if isinstance(p, str) and p.startswith('_@'):
|
||||
at_idx = i
|
||||
break
|
||||
if at_idx is not None:
|
||||
parts = parts[:at_idx]
|
||||
|
||||
# --- Handle specialization: _at_ ... _spec N ---
|
||||
at_positions = [i for i, p in enumerate(parts)
|
||||
if isinstance(p, str) and p == '_at_']
|
||||
if at_positions:
|
||||
first_at = at_positions[0]
|
||||
base = parts[:first_at]
|
||||
rest = parts[first_at:]
|
||||
|
||||
entries, remaining = _parse_spec_entries(rest)
|
||||
for ctx_components in entries:
|
||||
ctx_name, ctx_flags = _process_spec_context(ctx_components)
|
||||
if ctx_name or ctx_flags:
|
||||
spec_entries.append((ctx_name, ctx_flags))
|
||||
|
||||
parts = base + remaining
|
||||
|
||||
# --- Collect suffix flags from the end ---
|
||||
while parts:
|
||||
last = parts[-1]
|
||||
flag = _match_suffix(last)
|
||||
if flag is not None:
|
||||
flags.append(flag)
|
||||
parts.pop()
|
||||
elif isinstance(last, int) and len(parts) >= 2:
|
||||
prev_flag = _match_suffix(parts[-2])
|
||||
if prev_flag is not None:
|
||||
flags.append(prev_flag)
|
||||
parts.pop() # remove the number
|
||||
parts.pop() # remove the suffix
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
if is_private:
|
||||
flags.append('private')
|
||||
|
||||
# --- Format result ---
|
||||
name = '.'.join(str(c) for c in parts) if parts else '?'
|
||||
result = name
|
||||
if flags:
|
||||
flag_str = ', '.join(flags)
|
||||
result += f' [{flag_str}]'
|
||||
|
||||
for ctx_name, ctx_flags in spec_entries:
|
||||
ctx_str = '.'.join(str(c) for c in ctx_name) if ctx_name else '?'
|
||||
if ctx_flags:
|
||||
ctx_flag_str = ', '.join(ctx_flags)
|
||||
result += f' spec at {ctx_str}[{ctx_flag_str}]'
|
||||
else:
|
||||
result += f' spec at {ctx_str}'
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Main demangling entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def demangle_lean_name_raw(mangled):
|
||||
"""
|
||||
Demangle a Lean C symbol, preserving all internal name components.
|
||||
|
||||
Returns the exact demangled name with all compiler-generated suffixes
|
||||
intact. Use demangle_lean_name() for human-friendly output.
|
||||
"""
|
||||
try:
|
||||
return _demangle_lean_name_inner(mangled, human_friendly=False)
|
||||
except Exception:
|
||||
return mangled
|
||||
|
||||
|
||||
def demangle_lean_name(mangled):
|
||||
"""
|
||||
Demangle a C symbol name produced by the Lean 4 compiler.
|
||||
|
||||
Returns a human-friendly demangled name with compiler suffixes folded
|
||||
into readable flags. Use demangle_lean_name_raw() to preserve all
|
||||
internal components.
|
||||
"""
|
||||
try:
|
||||
return _demangle_lean_name_inner(mangled, human_friendly=True)
|
||||
except Exception:
|
||||
return mangled
|
||||
|
||||
|
||||
def _demangle_lean_name_inner(mangled, human_friendly=True):
|
||||
"""Inner demangle that may raise on malformed input."""
|
||||
|
||||
if mangled == "_lean_main":
|
||||
return "[lean] main"
|
||||
|
||||
# Handle lean_ runtime functions
|
||||
if human_friendly and mangled.startswith("lean_apply_"):
|
||||
rest = mangled[11:]
|
||||
if rest.isdigit():
|
||||
return f"<apply/{rest}>"
|
||||
|
||||
# Strip .cold.N suffix (LLVM linker cold function clones)
|
||||
cold_suffix = ""
|
||||
core = mangled
|
||||
dot_pos = core.find('.cold.')
|
||||
if dot_pos >= 0:
|
||||
cold_suffix = " " + core[dot_pos:]
|
||||
core = core[:dot_pos]
|
||||
elif core.endswith('.cold'):
|
||||
cold_suffix = " .cold"
|
||||
core = core[:-5]
|
||||
|
||||
result = _demangle_core(core, human_friendly)
|
||||
if result is None:
|
||||
return mangled
|
||||
return result + cold_suffix
|
||||
|
||||
|
||||
def _demangle_core(mangled, human_friendly=True):
|
||||
"""Demangle a symbol without .cold suffix. Returns None if not a Lean name."""
|
||||
|
||||
fmt = postprocess_name if human_friendly else format_name
|
||||
|
||||
# _init_ prefix
|
||||
if mangled.startswith("_init_"):
|
||||
rest = mangled[6:]
|
||||
body, pkg_display = _strip_lean_prefix(rest)
|
||||
if body is None:
|
||||
return None
|
||||
components = demangle_body(body)
|
||||
if not components:
|
||||
return None
|
||||
name = fmt(components)
|
||||
if pkg_display:
|
||||
return f"[init] {name} ({pkg_display})"
|
||||
return f"[init] {name}"
|
||||
|
||||
# initialize_ prefix (module init functions)
|
||||
if mangled.startswith("initialize_"):
|
||||
rest = mangled[11:]
|
||||
# With package: initialize_lp_{pkg}_{body} or initialize_l_{body}
|
||||
body, pkg_display = _strip_lean_prefix(rest)
|
||||
if body is not None:
|
||||
components = demangle_body(body)
|
||||
if components:
|
||||
name = fmt(components)
|
||||
if pkg_display:
|
||||
return f"[module_init] {name} ({pkg_display})"
|
||||
return f"[module_init] {name}"
|
||||
# Without package: initialize_{Name.mangleAux(moduleName)}
|
||||
if rest:
|
||||
components = demangle_body(rest)
|
||||
if components:
|
||||
return f"[module_init] {fmt(components)}"
|
||||
return None
|
||||
|
||||
# l_ or lp_ prefix
|
||||
body, pkg_display = _strip_lean_prefix(mangled)
|
||||
if body is None:
|
||||
return None
|
||||
components = demangle_body(body)
|
||||
if not components:
|
||||
return None
|
||||
name = fmt(components)
|
||||
if pkg_display:
|
||||
return f"{name} ({pkg_display})"
|
||||
return name
|
||||
|
||||
|
||||
def _strip_lean_prefix(s):
|
||||
"""
|
||||
Strip the l_ or lp_ prefix from a mangled symbol.
|
||||
Returns (body, pkg_display) where body is the Name.mangleAux output
|
||||
and pkg_display is None or a string describing the package.
|
||||
Returns (None, None) if the string doesn't have a recognized prefix.
|
||||
"""
|
||||
if s.startswith("l_"):
|
||||
return (s[2:], None)
|
||||
|
||||
if s.startswith("lp_"):
|
||||
after_lp = s[3:]
|
||||
body_start = _find_lp_body(after_lp)
|
||||
if body_start is not None:
|
||||
pkg_mangled = after_lp[:body_start - 1]
|
||||
# Unmangle the package name
|
||||
pkg_components = demangle_body(pkg_mangled)
|
||||
if pkg_components and len(pkg_components) == 1 and isinstance(pkg_components[0], str):
|
||||
pkg_display = pkg_components[0]
|
||||
else:
|
||||
pkg_display = pkg_mangled
|
||||
return (after_lp[body_start:], pkg_display)
|
||||
# Fallback: treat everything after lp_ as body
|
||||
return (after_lp, "?")
|
||||
|
||||
return (None, None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CLI
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def main():
|
||||
"""Filter stdin or arguments, demangling Lean names."""
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Demangle Lean 4 C symbol names (like c++filt for Lean)")
|
||||
parser.add_argument('names', nargs='*',
|
||||
help='Names to demangle (reads stdin if none given)')
|
||||
parser.add_argument('--raw', action='store_true',
|
||||
help='Output exact demangled names without postprocessing')
|
||||
args = parser.parse_args()
|
||||
|
||||
demangle = demangle_lean_name_raw if args.raw else demangle_lean_name
|
||||
|
||||
if args.names:
|
||||
for name in args.names:
|
||||
print(demangle(name))
|
||||
else:
|
||||
for line in sys.stdin:
|
||||
print(demangle(line.rstrip('\n')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
117
script/profiler/lean_demangle_profile.py
Normal file
117
script/profiler/lean_demangle_profile.py
Normal file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Lean name demangler for samply / Firefox Profiler profiles.
|
||||
|
||||
Reads a profile JSON (plain or gzipped), demangles Lean function names
|
||||
in the string table, and writes the result back.
|
||||
|
||||
Usage:
|
||||
python lean_demangle_profile.py profile.json -o profile-demangled.json
|
||||
python lean_demangle_profile.py profile.json.gz -o profile-demangled.json.gz
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import json
|
||||
import sys
|
||||
|
||||
from lean_demangle import demangle_lean_name
|
||||
|
||||
|
||||
def _demangle_string_array(string_array):
|
||||
"""Demangle Lean names in a string array in-place. Returns count."""
|
||||
count = 0
|
||||
for i, s in enumerate(string_array):
|
||||
if not isinstance(s, str):
|
||||
continue
|
||||
demangled = demangle_lean_name(s)
|
||||
if demangled != s:
|
||||
string_array[i] = demangled
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def rewrite_profile(profile):
|
||||
"""
|
||||
Demangle Lean names in a Firefox Profiler profile dict (in-place).
|
||||
|
||||
Handles two profile formats:
|
||||
- Newer: shared.stringArray (single shared string table)
|
||||
- Older/samply: per-thread stringArray (each thread has its own)
|
||||
"""
|
||||
count = 0
|
||||
|
||||
# Shared string table (newer Firefox Profiler format)
|
||||
shared = profile.get("shared")
|
||||
if shared is not None:
|
||||
sa = shared.get("stringArray")
|
||||
if sa is not None:
|
||||
count += _demangle_string_array(sa)
|
||||
|
||||
# Per-thread string tables (samply format)
|
||||
for thread in profile.get("threads", []):
|
||||
sa = thread.get("stringArray")
|
||||
if sa is not None:
|
||||
count += _demangle_string_array(sa)
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def process_profile_file(input_path, output_path):
|
||||
"""Read a profile, demangle names, write it back."""
|
||||
is_gzip = input_path.endswith('.gz')
|
||||
|
||||
if is_gzip:
|
||||
with gzip.open(input_path, 'rt', encoding='utf-8') as f:
|
||||
profile = json.load(f)
|
||||
else:
|
||||
with open(input_path, 'r', encoding='utf-8') as f:
|
||||
profile = json.load(f)
|
||||
|
||||
count = rewrite_profile(profile)
|
||||
|
||||
out_gzip = output_path.endswith('.gz') if output_path else is_gzip
|
||||
|
||||
if output_path:
|
||||
if out_gzip:
|
||||
with gzip.open(output_path, 'wt', encoding='utf-8') as f:
|
||||
json.dump(profile, f, ensure_ascii=False)
|
||||
else:
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(profile, f, ensure_ascii=False)
|
||||
else:
|
||||
json.dump(profile, sys.stdout, ensure_ascii=False)
|
||||
sys.stdout.write('\n')
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Demangle Lean names in samply/Firefox Profiler profiles")
|
||||
parser.add_argument('input', help='Input profile (JSON or .json.gz)')
|
||||
parser.add_argument('-o', '--output',
|
||||
help='Output path (default: stdout for JSON, '
|
||||
'or input with -demangled suffix)')
|
||||
args = parser.parse_args()
|
||||
|
||||
output = args.output
|
||||
if output is None and not sys.stdout.isatty():
|
||||
output = None # write to stdout
|
||||
elif output is None:
|
||||
# Generate output filename
|
||||
inp = args.input
|
||||
if inp.endswith('.json.gz'):
|
||||
output = inp[:-8] + '-demangled.json.gz'
|
||||
elif inp.endswith('.json'):
|
||||
output = inp[:-5] + '-demangled.json'
|
||||
else:
|
||||
output = inp + '-demangled'
|
||||
|
||||
count = process_profile_file(args.input, output)
|
||||
if output:
|
||||
print(f"Demangled {count} names, wrote {output}", file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
94
script/profiler/serve_profile.py
Normal file
94
script/profiler/serve_profile.py
Normal file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Serve a Firefox Profiler JSON file and open it in the browser.
|
||||
|
||||
Unlike `samply load`, this does NOT provide a symbolication API,
|
||||
so Firefox Profiler will use the names already in the profile as-is.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import http.server
|
||||
import io
|
||||
import sys
|
||||
import threading
|
||||
import webbrowser
|
||||
import urllib.parse
|
||||
|
||||
|
||||
class ProfileHandler(http.server.BaseHTTPRequestHandler):
|
||||
"""Serve the profile JSON and handle CORS for Firefox Profiler."""
|
||||
|
||||
profile_data = None # set by main()
|
||||
|
||||
def do_GET(self):
|
||||
if self.path == "/profile.json":
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.send_header("Content-Encoding", "gzip")
|
||||
self.send_header("Access-Control-Allow-Origin", "*")
|
||||
self.end_headers()
|
||||
self.wfile.write(self.profile_data)
|
||||
else:
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
|
||||
def do_OPTIONS(self):
|
||||
# CORS preflight
|
||||
self.send_response(200)
|
||||
self.send_header("Access-Control-Allow-Origin", "*")
|
||||
self.send_header("Access-Control-Allow-Methods", "GET")
|
||||
self.send_header("Access-Control-Allow-Headers", "Content-Type")
|
||||
self.end_headers()
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass # suppress request logs
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Serve a profile JSON for Firefox Profiler")
|
||||
parser.add_argument("profile", help="Profile file (.json or .json.gz)")
|
||||
parser.add_argument("-P", "--port", type=int, default=3457,
|
||||
help="Port to serve on (default: 3457)")
|
||||
parser.add_argument("-n", "--no-open", action="store_true",
|
||||
help="Do not open the browser")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read the profile data (keep it gzipped for efficient serving)
|
||||
if args.profile.endswith(".gz"):
|
||||
with open(args.profile, "rb") as f:
|
||||
ProfileHandler.profile_data = f.read()
|
||||
else:
|
||||
with open(args.profile, "rb") as f:
|
||||
raw = f.read()
|
||||
buf = io.BytesIO()
|
||||
with gzip.GzipFile(fileobj=buf, mode="wb") as gz:
|
||||
gz.write(raw)
|
||||
ProfileHandler.profile_data = buf.getvalue()
|
||||
|
||||
http.server.HTTPServer.allow_reuse_address = True
|
||||
server = http.server.HTTPServer(("127.0.0.1", args.port), ProfileHandler)
|
||||
profile_url = f"http://127.0.0.1:{args.port}/profile.json"
|
||||
encoded = urllib.parse.quote(profile_url, safe="")
|
||||
viewer_url = f"https://profiler.firefox.com/from-url/{encoded}"
|
||||
|
||||
if not args.no_open:
|
||||
# Open browser after a short delay to let server start
|
||||
def open_browser():
|
||||
webbrowser.open(viewer_url)
|
||||
threading.Timer(0.5, open_browser).start()
|
||||
|
||||
print(f"Serving profile at {profile_url}", file=sys.stderr)
|
||||
print(f"Firefox Profiler: {viewer_url}", file=sys.stderr)
|
||||
print("Press Ctrl+C to stop.", file=sys.stderr)
|
||||
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nStopped.", file=sys.stderr)
|
||||
server.server_close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
198
script/profiler/symbolicate_profile.py
Normal file
198
script/profiler/symbolicate_profile.py
Normal file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Symbolicate a raw samply profile using samply's symbolication API,
|
||||
then demangle Lean names.
|
||||
|
||||
Usage:
|
||||
python symbolicate_profile.py --server http://127.0.0.1:3000/TOKEN \
|
||||
raw-profile.json.gz -o symbolicated-demangled.json.gz
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import json
|
||||
import sys
|
||||
import urllib.request
|
||||
|
||||
from lean_demangle import demangle_lean_name
|
||||
|
||||
|
||||
def symbolicate_and_demangle(profile, server_url):
|
||||
"""
|
||||
Symbolicate a raw samply profile via the symbolication API,
|
||||
then demangle Lean names. Modifies the profile in-place.
|
||||
Returns the number of names resolved.
|
||||
"""
|
||||
libs = profile.get("libs", [])
|
||||
memory_map = [[lib["debugName"], lib["breakpadId"]] for lib in libs]
|
||||
|
||||
count = 0
|
||||
for thread in profile.get("threads", []):
|
||||
count += _process_thread(thread, libs, memory_map, server_url)
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def _process_thread(thread, libs, memory_map, server_url):
|
||||
"""Symbolicate and demangle one thread. Returns count of resolved names."""
|
||||
sa = thread.get("stringArray")
|
||||
ft = thread.get("frameTable")
|
||||
func_t = thread.get("funcTable")
|
||||
rt = thread.get("resourceTable")
|
||||
|
||||
if not all([sa, ft, func_t, rt]):
|
||||
return 0
|
||||
|
||||
# Build mapping: func_index -> (lib_index, address)
|
||||
# A function may be referenced by multiple frames; pick any address.
|
||||
func_info = {} # func_idx -> (lib_idx, address)
|
||||
for i in range(ft.get("length", 0)):
|
||||
addr = ft["address"][i]
|
||||
func_idx = ft["func"][i]
|
||||
if func_idx in func_info:
|
||||
continue
|
||||
res_idx = func_t["resource"][func_idx]
|
||||
if res_idx < 0 or res_idx >= rt.get("length", 0):
|
||||
continue
|
||||
lib_idx = rt["lib"][res_idx]
|
||||
if lib_idx < 0 or lib_idx >= len(libs):
|
||||
continue
|
||||
func_info[func_idx] = (lib_idx, addr)
|
||||
|
||||
if not func_info:
|
||||
return 0
|
||||
|
||||
# Batch symbolication: group by lib, send all addresses at once
|
||||
frames_to_symbolicate = []
|
||||
func_order = [] # track which func each frame corresponds to
|
||||
for func_idx, (lib_idx, addr) in func_info.items():
|
||||
frames_to_symbolicate.append([lib_idx, addr])
|
||||
func_order.append(func_idx)
|
||||
|
||||
# Call the symbolication API
|
||||
symbols = _call_symbolication_api(
|
||||
server_url, memory_map, frames_to_symbolicate)
|
||||
|
||||
if not symbols:
|
||||
return 0
|
||||
|
||||
# Update stringArray with demangled names
|
||||
count = 0
|
||||
for func_idx, symbol_name in zip(func_order, symbols):
|
||||
if symbol_name is None:
|
||||
continue
|
||||
demangled = demangle_lean_name(symbol_name)
|
||||
name_idx = func_t["name"][func_idx]
|
||||
if name_idx < len(sa):
|
||||
sa[name_idx] = demangled
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def _call_symbolication_api(server_url, memory_map, frames):
|
||||
"""
|
||||
Call the Firefox Profiler symbolication API v5.
|
||||
frames: list of [lib_index, address]
|
||||
Returns: list of symbol names (or None for unresolved frames).
|
||||
"""
|
||||
url = server_url.rstrip("/") + "/symbolicate/v5"
|
||||
|
||||
# Send all frames as one "stack" in one job
|
||||
req_body = json.dumps({
|
||||
"memoryMap": memory_map,
|
||||
"stacks": [frames],
|
||||
}).encode()
|
||||
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
data=req_body,
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=60) as resp:
|
||||
result = json.loads(resp.read())
|
||||
except Exception as e:
|
||||
print(f"Symbolication API error: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
if "error" in result:
|
||||
print(f"Symbolication API error: {result['error']}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# Extract symbol names from result
|
||||
results = result.get("results", [])
|
||||
if not results:
|
||||
return None
|
||||
|
||||
stacks = results[0].get("stacks", [[]])
|
||||
if not stacks:
|
||||
return None
|
||||
|
||||
symbols = []
|
||||
for frame_result in stacks[0]:
|
||||
if isinstance(frame_result, dict):
|
||||
symbols.append(frame_result.get("function"))
|
||||
elif isinstance(frame_result, str):
|
||||
symbols.append(frame_result)
|
||||
else:
|
||||
symbols.append(None)
|
||||
|
||||
return symbols
|
||||
|
||||
|
||||
def process_file(input_path, output_path, server_url):
|
||||
"""Read a raw profile, symbolicate + demangle, write it back."""
|
||||
is_gzip = input_path.endswith('.gz')
|
||||
|
||||
if is_gzip:
|
||||
with gzip.open(input_path, 'rt', encoding='utf-8') as f:
|
||||
profile = json.load(f)
|
||||
else:
|
||||
with open(input_path, 'r', encoding='utf-8') as f:
|
||||
profile = json.load(f)
|
||||
|
||||
count = symbolicate_and_demangle(profile, server_url)
|
||||
|
||||
out_gzip = output_path.endswith('.gz') if output_path else is_gzip
|
||||
if output_path:
|
||||
if out_gzip:
|
||||
with gzip.open(output_path, 'wt', encoding='utf-8') as f:
|
||||
json.dump(profile, f, ensure_ascii=False)
|
||||
else:
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(profile, f, ensure_ascii=False)
|
||||
else:
|
||||
json.dump(profile, sys.stdout, ensure_ascii=False)
|
||||
sys.stdout.write('\n')
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Symbolicate a raw samply profile and demangle Lean names")
|
||||
parser.add_argument('input', help='Raw profile (JSON or .json.gz)')
|
||||
parser.add_argument('-o', '--output', help='Output path')
|
||||
parser.add_argument('--server', required=True,
|
||||
help='Samply server URL (e.g., http://127.0.0.1:3000/TOKEN)')
|
||||
args = parser.parse_args()
|
||||
|
||||
output = args.output
|
||||
if output is None:
|
||||
inp = args.input
|
||||
if inp.endswith('.json.gz'):
|
||||
output = inp[:-8] + '-demangled.json.gz'
|
||||
elif inp.endswith('.json'):
|
||||
output = inp[:-5] + '-demangled.json'
|
||||
else:
|
||||
output = inp + '-demangled'
|
||||
|
||||
count = process_file(args.input, output, args.server)
|
||||
print(f"Symbolicated and demangled {count} names, wrote {output}",
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
670
script/profiler/test_demangle.py
Normal file
670
script/profiler/test_demangle.py
Normal file
@@ -0,0 +1,670 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Tests for the Lean name demangler."""
|
||||
|
||||
import unittest
|
||||
import json
|
||||
import gzip
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
from lean_demangle import (
|
||||
mangle_string, mangle_name, demangle_body, format_name,
|
||||
demangle_lean_name, demangle_lean_name_raw, postprocess_name,
|
||||
_parse_hex, _check_disambiguation,
|
||||
)
|
||||
|
||||
|
||||
class TestStringMangle(unittest.TestCase):
|
||||
"""Test String.mangle (character-level escaping)."""
|
||||
|
||||
def test_alphanumeric(self):
|
||||
self.assertEqual(mangle_string("hello"), "hello")
|
||||
self.assertEqual(mangle_string("abc123"), "abc123")
|
||||
|
||||
def test_underscore(self):
|
||||
self.assertEqual(mangle_string("a_b"), "a__b")
|
||||
self.assertEqual(mangle_string("_"), "__")
|
||||
self.assertEqual(mangle_string("__"), "____")
|
||||
|
||||
def test_special_chars(self):
|
||||
self.assertEqual(mangle_string("."), "_x2e")
|
||||
self.assertEqual(mangle_string("a.b"), "a_x2eb")
|
||||
|
||||
def test_unicode(self):
|
||||
self.assertEqual(mangle_string("\u03bb"), "_u03bb")
|
||||
self.assertEqual(mangle_string("\U0001d55c"), "_U0001d55c")
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(mangle_string(""), "")
|
||||
|
||||
|
||||
class TestNameMangle(unittest.TestCase):
|
||||
"""Test Name.mangle (hierarchical name mangling)."""
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(mangle_name(["Lean", "Meta", "Sym", "main"]),
|
||||
"l_Lean_Meta_Sym_main")
|
||||
|
||||
def test_single_component(self):
|
||||
self.assertEqual(mangle_name(["main"]), "l_main")
|
||||
|
||||
def test_numeric_component(self):
|
||||
self.assertEqual(
|
||||
mangle_name(["_private", "Lean", "Meta", "Basic", 0,
|
||||
"Lean", "Meta", "withMVarContextImp"]),
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp")
|
||||
|
||||
def test_component_with_underscore(self):
|
||||
self.assertEqual(mangle_name(["a_b"]), "l_a__b")
|
||||
self.assertEqual(mangle_name(["a_b", "c"]), "l_a__b_c")
|
||||
|
||||
def test_disambiguation_digit_start(self):
|
||||
self.assertEqual(mangle_name(["0foo"]), "l_000foo")
|
||||
|
||||
def test_disambiguation_escape_start(self):
|
||||
self.assertEqual(mangle_name(["a", "x27"]), "l_a_00x27")
|
||||
|
||||
def test_numeric_root(self):
|
||||
self.assertEqual(mangle_name([42]), "l_42_")
|
||||
self.assertEqual(mangle_name([42, "foo"]), "l_42__foo")
|
||||
|
||||
def test_component_ending_with_underscore(self):
|
||||
self.assertEqual(mangle_name(["a_", "b"]), "l_a___00b")
|
||||
|
||||
def test_custom_prefix(self):
|
||||
self.assertEqual(mangle_name(["foo"], prefix="lp_pkg_"),
|
||||
"lp_pkg_foo")
|
||||
|
||||
|
||||
class TestDemangleBody(unittest.TestCase):
|
||||
"""Test demangle_body (the core Name.demangleAux algorithm)."""
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(demangle_body("Lean_Meta_Sym_main"),
|
||||
["Lean", "Meta", "Sym", "main"])
|
||||
|
||||
def test_single(self):
|
||||
self.assertEqual(demangle_body("main"), ["main"])
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(demangle_body(""), [])
|
||||
|
||||
def test_underscore_in_component(self):
|
||||
self.assertEqual(demangle_body("a__b"), ["a_b"])
|
||||
self.assertEqual(demangle_body("a__b_c"), ["a_b", "c"])
|
||||
|
||||
def test_numeric_component(self):
|
||||
self.assertEqual(demangle_body("foo_42__bar"), ["foo", 42, "bar"])
|
||||
|
||||
def test_numeric_root(self):
|
||||
self.assertEqual(demangle_body("42_"), [42])
|
||||
|
||||
def test_numeric_at_end(self):
|
||||
self.assertEqual(demangle_body("foo_42_"), ["foo", 42])
|
||||
|
||||
def test_disambiguation_00(self):
|
||||
self.assertEqual(demangle_body("a_00x27"), ["a", "x27"])
|
||||
|
||||
def test_disambiguation_00_at_root(self):
|
||||
self.assertEqual(demangle_body("000foo"), ["0foo"])
|
||||
|
||||
def test_hex_escape_x(self):
|
||||
self.assertEqual(demangle_body("a_x2eb"), ["a.b"])
|
||||
|
||||
def test_hex_escape_u(self):
|
||||
self.assertEqual(demangle_body("_u03bb"), ["\u03bb"])
|
||||
|
||||
def test_hex_escape_U(self):
|
||||
self.assertEqual(demangle_body("_U0001d55c"), ["\U0001d55c"])
|
||||
|
||||
def test_private_name(self):
|
||||
body = "__private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp"
|
||||
self.assertEqual(demangle_body(body),
|
||||
["_private", "Lean", "Meta", "Basic", 0,
|
||||
"Lean", "Meta", "withMVarContextImp"])
|
||||
|
||||
def test_boxed_suffix(self):
|
||||
body = "foo___boxed"
|
||||
self.assertEqual(demangle_body(body), ["foo", "_boxed"])
|
||||
|
||||
def test_redArg_suffix(self):
|
||||
body = "foo_bar___redArg"
|
||||
self.assertEqual(demangle_body(body), ["foo", "bar", "_redArg"])
|
||||
|
||||
def test_component_ending_underscore_disambiguation(self):
|
||||
self.assertEqual(demangle_body("a___00b"), ["a_", "b"])
|
||||
|
||||
|
||||
class TestRoundTrip(unittest.TestCase):
|
||||
"""Test that mangle(demangle(x)) == x for various names."""
|
||||
|
||||
def _check_roundtrip(self, components):
|
||||
mangled = mangle_name(components, prefix="")
|
||||
demangled = demangle_body(mangled)
|
||||
self.assertEqual(demangled, components,
|
||||
f"Round-trip failed: {components} -> '{mangled}' -> {demangled}")
|
||||
mangled_with_prefix = mangle_name(components, prefix="l_")
|
||||
self.assertTrue(mangled_with_prefix.startswith("l_"))
|
||||
body = mangled_with_prefix[2:]
|
||||
demangled2 = demangle_body(body)
|
||||
self.assertEqual(demangled2, components)
|
||||
|
||||
def test_simple_names(self):
|
||||
self._check_roundtrip(["Lean", "Meta", "main"])
|
||||
self._check_roundtrip(["a"])
|
||||
self._check_roundtrip(["Foo", "Bar", "baz"])
|
||||
|
||||
def test_numeric(self):
|
||||
self._check_roundtrip(["foo", 0, "bar"])
|
||||
self._check_roundtrip([42])
|
||||
self._check_roundtrip(["a", 1, "b", 2, "c"])
|
||||
|
||||
def test_underscores(self):
|
||||
self._check_roundtrip(["_private"])
|
||||
self._check_roundtrip(["a_b", "c_d"])
|
||||
self._check_roundtrip(["_at_", "_spec"])
|
||||
|
||||
def test_private_name(self):
|
||||
self._check_roundtrip(["_private", "Lean", "Meta", "Basic", 0,
|
||||
"Lean", "Meta", "withMVarContextImp"])
|
||||
|
||||
def test_boxed(self):
|
||||
self._check_roundtrip(["Lean", "Meta", "foo", "_boxed"])
|
||||
|
||||
def test_redArg(self):
|
||||
self._check_roundtrip(["Lean", "Meta", "foo", "_redArg"])
|
||||
|
||||
def test_specialization(self):
|
||||
self._check_roundtrip(["List", "map", "_at_", "Foo", "bar", "_spec", 3])
|
||||
|
||||
def test_lambda(self):
|
||||
self._check_roundtrip(["Foo", "bar", "_lambda", 0])
|
||||
self._check_roundtrip(["Foo", "bar", "_lambda", 2])
|
||||
|
||||
def test_closed(self):
|
||||
self._check_roundtrip(["myConst", "_closed", 0])
|
||||
|
||||
def test_special_chars(self):
|
||||
self._check_roundtrip(["a.b"])
|
||||
self._check_roundtrip(["\u03bb"])
|
||||
self._check_roundtrip(["a", "b\u2192c"])
|
||||
|
||||
def test_disambiguation_cases(self):
|
||||
self._check_roundtrip(["a", "x27"])
|
||||
self._check_roundtrip(["0foo"])
|
||||
self._check_roundtrip(["a_", "b"])
|
||||
|
||||
def test_complex_real_names(self):
|
||||
"""Names modeled after real Lean compiler output."""
|
||||
self._check_roundtrip(
|
||||
["Lean", "MVarId", "withContext", "_at_",
|
||||
"_private", "Lean", "Meta", "Sym", 0,
|
||||
"Lean", "Meta", "Sym", "BackwardRule", "apply",
|
||||
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"])
|
||||
|
||||
|
||||
class TestDemangleRaw(unittest.TestCase):
|
||||
"""Test demangle_lean_name_raw (exact demangling, no postprocessing)."""
|
||||
|
||||
def test_l_prefix(self):
|
||||
self.assertEqual(
|
||||
demangle_lean_name_raw("l_Lean_Meta_Sym_main"),
|
||||
"Lean.Meta.Sym.main")
|
||||
|
||||
def test_l_prefix_private(self):
|
||||
result = demangle_lean_name_raw(
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp")
|
||||
self.assertEqual(result,
|
||||
"_private.Lean.Meta.Basic.0.Lean.Meta.withMVarContextImp")
|
||||
|
||||
def test_l_prefix_boxed(self):
|
||||
result = demangle_lean_name_raw("l_foo___boxed")
|
||||
self.assertEqual(result, "foo._boxed")
|
||||
|
||||
def test_l_prefix_redArg(self):
|
||||
result = demangle_lean_name_raw(
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_withMVarContextImp___redArg")
|
||||
self.assertEqual(
|
||||
result,
|
||||
"_private.Lean.Meta.Basic.0.Lean.Meta.withMVarContextImp._redArg")
|
||||
|
||||
def test_lean_main(self):
|
||||
self.assertEqual(demangle_lean_name_raw("_lean_main"), "[lean] main")
|
||||
|
||||
def test_non_lean_names(self):
|
||||
self.assertEqual(demangle_lean_name_raw("printf"), "printf")
|
||||
self.assertEqual(demangle_lean_name_raw("malloc"), "malloc")
|
||||
self.assertEqual(demangle_lean_name_raw("lean_apply_5"), "lean_apply_5")
|
||||
self.assertEqual(demangle_lean_name_raw(""), "")
|
||||
|
||||
def test_init_prefix(self):
|
||||
result = demangle_lean_name_raw("_init_l_Lean_Meta_foo")
|
||||
self.assertEqual(result, "[init] Lean.Meta.foo")
|
||||
|
||||
def test_lp_prefix_simple(self):
|
||||
mangled = mangle_name(["Lean", "Meta", "foo"], prefix="lp_std_")
|
||||
self.assertEqual(mangled, "lp_std_Lean_Meta_foo")
|
||||
result = demangle_lean_name_raw(mangled)
|
||||
self.assertEqual(result, "Lean.Meta.foo (std)")
|
||||
|
||||
def test_lp_prefix_underscore_pkg(self):
|
||||
pkg_mangled = mangle_string("my_pkg")
|
||||
self.assertEqual(pkg_mangled, "my__pkg")
|
||||
mangled = mangle_name(["Lean", "Meta", "foo"],
|
||||
prefix=f"lp_{pkg_mangled}_")
|
||||
self.assertEqual(mangled, "lp_my__pkg_Lean_Meta_foo")
|
||||
result = demangle_lean_name_raw(mangled)
|
||||
self.assertEqual(result, "Lean.Meta.foo (my_pkg)")
|
||||
|
||||
def test_lp_prefix_private_decl(self):
|
||||
mangled = mangle_name(
|
||||
["_private", "X", 0, "Y", "foo"], prefix="lp_pkg_")
|
||||
self.assertEqual(mangled, "lp_pkg___private_X_0__Y_foo")
|
||||
result = demangle_lean_name_raw(mangled)
|
||||
self.assertEqual(result, "_private.X.0.Y.foo (pkg)")
|
||||
|
||||
def test_complex_specialization(self):
|
||||
components = [
|
||||
"Lean", "MVarId", "withContext", "_at_",
|
||||
"_private", "Lean", "Meta", "Sym", 0,
|
||||
"Lean", "Meta", "Sym", "BackwardRule", "apply",
|
||||
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"
|
||||
]
|
||||
mangled = mangle_name(components)
|
||||
result = demangle_lean_name_raw(mangled)
|
||||
expected = format_name(components)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_cold_suffix(self):
|
||||
result = demangle_lean_name_raw("l_Lean_Meta_foo___redArg.cold.1")
|
||||
self.assertEqual(result, "Lean.Meta.foo._redArg .cold.1")
|
||||
|
||||
def test_cold_suffix_plain(self):
|
||||
result = demangle_lean_name_raw("l_Lean_Meta_foo.cold")
|
||||
self.assertEqual(result, "Lean.Meta.foo .cold")
|
||||
|
||||
def test_initialize_no_pkg(self):
|
||||
result = demangle_lean_name_raw("initialize_Init_Control_Basic")
|
||||
self.assertEqual(result, "[module_init] Init.Control.Basic")
|
||||
|
||||
def test_initialize_with_l_prefix(self):
|
||||
result = demangle_lean_name_raw("initialize_l_Lean_Meta_foo")
|
||||
self.assertEqual(result, "[module_init] Lean.Meta.foo")
|
||||
|
||||
def test_never_crashes(self):
|
||||
"""Demangling should never raise, just return the original."""
|
||||
weird_inputs = [
|
||||
"", "l_", "lp_", "lp_x", "_init_", "initialize_",
|
||||
"l_____", "lp____", "l_00", "l_0",
|
||||
"some random string", "l_ space",
|
||||
]
|
||||
for inp in weird_inputs:
|
||||
result = demangle_lean_name_raw(inp)
|
||||
self.assertIsInstance(result, str)
|
||||
|
||||
|
||||
class TestPostprocess(unittest.TestCase):
|
||||
"""Test postprocess_name (human-friendly suffix folding, etc.)."""
|
||||
|
||||
def test_no_change(self):
|
||||
self.assertEqual(postprocess_name(["Lean", "Meta", "main"]),
|
||||
"Lean.Meta.main")
|
||||
|
||||
def test_boxed(self):
|
||||
self.assertEqual(postprocess_name(["foo", "_boxed"]),
|
||||
"foo [boxed]")
|
||||
|
||||
def test_redArg(self):
|
||||
self.assertEqual(postprocess_name(["foo", "bar", "_redArg"]),
|
||||
"foo.bar [arity\u2193]")
|
||||
|
||||
def test_lambda_separate(self):
|
||||
# _lam as separate component + numeric index
|
||||
self.assertEqual(postprocess_name(["foo", "_lam", 0]),
|
||||
"foo [\u03bb]")
|
||||
|
||||
def test_lambda_indexed(self):
|
||||
# _lam_0 as single string (appendIndexAfter)
|
||||
self.assertEqual(postprocess_name(["foo", "_lam_0"]),
|
||||
"foo [\u03bb]")
|
||||
self.assertEqual(postprocess_name(["foo", "_lambda_2"]),
|
||||
"foo [\u03bb]")
|
||||
|
||||
def test_lambda_boxed(self):
|
||||
# _lam_0 followed by _boxed
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "Meta", "Simp", "simpLambda",
|
||||
"_lam_0", "_boxed"]),
|
||||
"Lean.Meta.Simp.simpLambda [boxed, \u03bb]")
|
||||
|
||||
def test_closed(self):
|
||||
self.assertEqual(postprocess_name(["myConst", "_closed", 3]),
|
||||
"myConst [closed]")
|
||||
|
||||
def test_closed_indexed(self):
|
||||
self.assertEqual(postprocess_name(["myConst", "_closed_0"]),
|
||||
"myConst [closed]")
|
||||
|
||||
def test_multiple_suffixes(self):
|
||||
self.assertEqual(postprocess_name(["foo", "_redArg", "_boxed"]),
|
||||
"foo [boxed, arity\u2193]")
|
||||
|
||||
def test_redArg_lam(self):
|
||||
# _redArg followed by _lam_0 (issue #4)
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "profileitIOUnsafe",
|
||||
"_redArg", "_lam_0"]),
|
||||
"Lean.profileitIOUnsafe [\u03bb, arity\u2193]")
|
||||
|
||||
def test_private_name(self):
|
||||
self.assertEqual(
|
||||
postprocess_name(["_private", "Lean", "Meta", "Basic", 0,
|
||||
"Lean", "Meta", "withMVarContextImp"]),
|
||||
"Lean.Meta.withMVarContextImp [private]")
|
||||
|
||||
def test_private_with_suffix(self):
|
||||
self.assertEqual(
|
||||
postprocess_name(["_private", "Lean", "Meta", "Basic", 0,
|
||||
"Lean", "Meta", "foo", "_redArg"]),
|
||||
"Lean.Meta.foo [arity\u2193, private]")
|
||||
|
||||
def test_hygienic_strip(self):
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "Meta", "foo", "_@", "Lean", "Meta",
|
||||
"_hyg", 42]),
|
||||
"Lean.Meta.foo")
|
||||
|
||||
def test_specialization(self):
|
||||
self.assertEqual(
|
||||
postprocess_name(["List", "map", "_at_", "Foo", "bar",
|
||||
"_spec", 3]),
|
||||
"List.map spec at Foo.bar")
|
||||
|
||||
def test_specialization_with_suffix(self):
|
||||
# Base suffix _boxed appears in [flags] before spec at
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "MVarId", "withContext", "_at_",
|
||||
"Foo", "bar", "_spec", 2, "_boxed"]),
|
||||
"Lean.MVarId.withContext [boxed] spec at Foo.bar")
|
||||
|
||||
def test_spec_context_with_flags(self):
|
||||
# Compiler suffixes in spec context become context flags
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "Meta", "foo", "_at_",
|
||||
"Lean", "Meta", "bar", "_elam_1", "_redArg",
|
||||
"_spec", 2]),
|
||||
"Lean.Meta.foo spec at Lean.Meta.bar[\u03bb, arity\u2193]")
|
||||
|
||||
def test_spec_context_flags_dedup(self):
|
||||
# Duplicate flag labels are deduplicated
|
||||
self.assertEqual(
|
||||
postprocess_name(["f", "_at_",
|
||||
"g", "_lam_0", "_elam_1", "_redArg",
|
||||
"_spec", 1]),
|
||||
"f spec at g[\u03bb, arity\u2193]")
|
||||
|
||||
def test_multiple_at(self):
|
||||
# Multiple _at_ entries become separate spec at clauses
|
||||
self.assertEqual(
|
||||
postprocess_name(["f", "_at_", "g", "_spec", 1,
|
||||
"_at_", "h", "_spec", 2]),
|
||||
"f spec at g spec at h")
|
||||
|
||||
def test_multiple_at_with_flags(self):
|
||||
# Multiple spec at with flags on base and contexts
|
||||
self.assertEqual(
|
||||
postprocess_name(["f", "_at_", "g", "_redArg", "_spec", 1,
|
||||
"_at_", "h", "_lam_0", "_spec", 2,
|
||||
"_boxed"]),
|
||||
"f [boxed] spec at g[arity\u2193] spec at h[\u03bb]")
|
||||
|
||||
def test_base_flags_before_spec(self):
|
||||
# Base trailing suffixes appear in [flags] before spec at
|
||||
self.assertEqual(
|
||||
postprocess_name(["f", "_at_", "g", "_spec", 1, "_lam_0"]),
|
||||
"f [\u03bb] spec at g")
|
||||
|
||||
def test_spec_context_strip_spec_suffixes(self):
|
||||
# spec_0 in context should be stripped
|
||||
self.assertEqual(
|
||||
postprocess_name(["Lean", "Meta", "transformWithCache", "visit",
|
||||
"_at_",
|
||||
"_private", "Lean", "Meta", "Transform", 0,
|
||||
"Lean", "Meta", "transform",
|
||||
"Lean", "Meta", "Sym", "unfoldReducible",
|
||||
"spec_0", "spec_0",
|
||||
"_spec", 1]),
|
||||
"Lean.Meta.transformWithCache.visit "
|
||||
"spec at Lean.Meta.transform.Lean.Meta.Sym.unfoldReducible")
|
||||
|
||||
def test_spec_context_strip_private(self):
|
||||
# _private in spec context should be stripped
|
||||
self.assertEqual(
|
||||
postprocess_name(["Array", "mapMUnsafe", "map", "_at_",
|
||||
"_private", "Lean", "Meta", "Transform", 0,
|
||||
"Lean", "Meta", "transformWithCache", "visit",
|
||||
"_spec", 1]),
|
||||
"Array.mapMUnsafe.map "
|
||||
"spec at Lean.Meta.transformWithCache.visit")
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(postprocess_name([]), "")
|
||||
|
||||
|
||||
class TestDemangleHumanFriendly(unittest.TestCase):
|
||||
"""Test demangle_lean_name (human-friendly output)."""
|
||||
|
||||
def test_simple(self):
|
||||
self.assertEqual(demangle_lean_name("l_Lean_Meta_main"),
|
||||
"Lean.Meta.main")
|
||||
|
||||
def test_boxed(self):
|
||||
self.assertEqual(demangle_lean_name("l_foo___boxed"),
|
||||
"foo [boxed]")
|
||||
|
||||
def test_redArg(self):
|
||||
self.assertEqual(demangle_lean_name("l_foo___redArg"),
|
||||
"foo [arity\u2193]")
|
||||
|
||||
def test_private(self):
|
||||
self.assertEqual(
|
||||
demangle_lean_name(
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo"),
|
||||
"Lean.Meta.foo [private]")
|
||||
|
||||
def test_private_with_redArg(self):
|
||||
self.assertEqual(
|
||||
demangle_lean_name(
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo___redArg"),
|
||||
"Lean.Meta.foo [arity\u2193, private]")
|
||||
|
||||
def test_cold_with_suffix(self):
|
||||
self.assertEqual(
|
||||
demangle_lean_name("l_Lean_Meta_foo___redArg.cold.1"),
|
||||
"Lean.Meta.foo [arity\u2193] .cold.1")
|
||||
|
||||
def test_lean_apply(self):
|
||||
self.assertEqual(demangle_lean_name("lean_apply_5"), "<apply/5>")
|
||||
self.assertEqual(demangle_lean_name("lean_apply_12"), "<apply/12>")
|
||||
|
||||
def test_lean_apply_raw_unchanged(self):
|
||||
self.assertEqual(demangle_lean_name_raw("lean_apply_5"),
|
||||
"lean_apply_5")
|
||||
|
||||
def test_init_private(self):
|
||||
self.assertEqual(
|
||||
demangle_lean_name(
|
||||
"_init_l___private_X_0__Y_foo"),
|
||||
"[init] Y.foo [private]")
|
||||
|
||||
def test_complex_specialization(self):
|
||||
components = [
|
||||
"Lean", "MVarId", "withContext", "_at_",
|
||||
"_private", "Lean", "Meta", "Sym", 0,
|
||||
"Lean", "Meta", "Sym", "BackwardRule", "apply",
|
||||
"_spec", 2, "_redArg", "_lambda", 0, "_boxed"
|
||||
]
|
||||
mangled = mangle_name(components)
|
||||
result = demangle_lean_name(mangled)
|
||||
# Base: Lean.MVarId.withContext with trailing _redArg, _lambda 0, _boxed
|
||||
# Spec context: Lean.Meta.Sym.BackwardRule.apply (private stripped)
|
||||
self.assertEqual(
|
||||
result,
|
||||
"Lean.MVarId.withContext [boxed, \u03bb, arity\u2193] "
|
||||
"spec at Lean.Meta.Sym.BackwardRule.apply")
|
||||
|
||||
def test_non_lean_unchanged(self):
|
||||
self.assertEqual(demangle_lean_name("printf"), "printf")
|
||||
self.assertEqual(demangle_lean_name("malloc"), "malloc")
|
||||
self.assertEqual(demangle_lean_name(""), "")
|
||||
|
||||
|
||||
class TestDemangleProfile(unittest.TestCase):
|
||||
"""Test the profile rewriter."""
|
||||
|
||||
def _make_profile_shared(self, strings):
|
||||
"""Create a profile with shared.stringArray (newer format)."""
|
||||
return {
|
||||
"meta": {"version": 28},
|
||||
"libs": [],
|
||||
"shared": {
|
||||
"stringArray": list(strings),
|
||||
},
|
||||
"threads": [{
|
||||
"name": "main",
|
||||
"pid": "1",
|
||||
"tid": 1,
|
||||
"funcTable": {
|
||||
"name": list(range(len(strings))),
|
||||
"isJS": [False] * len(strings),
|
||||
"relevantForJS": [False] * len(strings),
|
||||
"resource": [-1] * len(strings),
|
||||
"fileName": [None] * len(strings),
|
||||
"lineNumber": [None] * len(strings),
|
||||
"columnNumber": [None] * len(strings),
|
||||
"length": len(strings),
|
||||
},
|
||||
"frameTable": {"length": 0},
|
||||
"stackTable": {"length": 0},
|
||||
"samples": {"length": 0},
|
||||
"markers": {"length": 0},
|
||||
"resourceTable": {"length": 0},
|
||||
"nativeSymbols": {"length": 0},
|
||||
}],
|
||||
"pages": [],
|
||||
"counters": [],
|
||||
}
|
||||
|
||||
def _make_profile_per_thread(self, strings):
|
||||
"""Create a profile with per-thread stringArray (samply format)."""
|
||||
return {
|
||||
"meta": {"version": 28},
|
||||
"libs": [],
|
||||
"threads": [{
|
||||
"name": "main",
|
||||
"pid": "1",
|
||||
"tid": 1,
|
||||
"stringArray": list(strings),
|
||||
"funcTable": {
|
||||
"name": list(range(len(strings))),
|
||||
"isJS": [False] * len(strings),
|
||||
"relevantForJS": [False] * len(strings),
|
||||
"resource": [-1] * len(strings),
|
||||
"fileName": [None] * len(strings),
|
||||
"lineNumber": [None] * len(strings),
|
||||
"columnNumber": [None] * len(strings),
|
||||
"length": len(strings),
|
||||
},
|
||||
"frameTable": {"length": 0},
|
||||
"stackTable": {"length": 0},
|
||||
"samples": {"length": 0},
|
||||
"markers": {"length": 0},
|
||||
"resourceTable": {"length": 0},
|
||||
"nativeSymbols": {"length": 0},
|
||||
}],
|
||||
"pages": [],
|
||||
"counters": [],
|
||||
}
|
||||
|
||||
def test_profile_rewrite_shared(self):
|
||||
from lean_demangle_profile import rewrite_profile
|
||||
strings = [
|
||||
"l_Lean_Meta_Sym_main",
|
||||
"printf",
|
||||
"lean_apply_5",
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo",
|
||||
]
|
||||
profile = self._make_profile_shared(strings)
|
||||
rewrite_profile(profile)
|
||||
sa = profile["shared"]["stringArray"]
|
||||
self.assertEqual(sa[0], "Lean.Meta.Sym.main")
|
||||
self.assertEqual(sa[1], "printf")
|
||||
self.assertEqual(sa[2], "<apply/5>")
|
||||
self.assertEqual(sa[3], "Lean.Meta.foo [private]")
|
||||
|
||||
def test_profile_rewrite_per_thread(self):
|
||||
from lean_demangle_profile import rewrite_profile
|
||||
strings = [
|
||||
"l_Lean_Meta_Sym_main",
|
||||
"printf",
|
||||
"lean_apply_5",
|
||||
"l___private_Lean_Meta_Basic_0__Lean_Meta_foo",
|
||||
]
|
||||
profile = self._make_profile_per_thread(strings)
|
||||
count = rewrite_profile(profile)
|
||||
sa = profile["threads"][0]["stringArray"]
|
||||
self.assertEqual(sa[0], "Lean.Meta.Sym.main")
|
||||
self.assertEqual(sa[1], "printf")
|
||||
self.assertEqual(sa[2], "<apply/5>")
|
||||
self.assertEqual(sa[3], "Lean.Meta.foo [private]")
|
||||
self.assertEqual(count, 3)
|
||||
|
||||
def test_profile_json_roundtrip(self):
|
||||
from lean_demangle_profile import process_profile_file
|
||||
strings = ["l_Lean_Meta_main", "malloc"]
|
||||
profile = self._make_profile_shared(strings)
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.json',
|
||||
delete=False) as f:
|
||||
json.dump(profile, f)
|
||||
inpath = f.name
|
||||
|
||||
outpath = inpath.replace('.json', '-demangled.json')
|
||||
try:
|
||||
process_profile_file(inpath, outpath)
|
||||
with open(outpath) as f:
|
||||
result = json.load(f)
|
||||
self.assertEqual(result["shared"]["stringArray"][0],
|
||||
"Lean.Meta.main")
|
||||
self.assertEqual(result["shared"]["stringArray"][1], "malloc")
|
||||
finally:
|
||||
os.unlink(inpath)
|
||||
if os.path.exists(outpath):
|
||||
os.unlink(outpath)
|
||||
|
||||
def test_profile_gzip_roundtrip(self):
|
||||
from lean_demangle_profile import process_profile_file
|
||||
strings = ["l_Lean_Meta_main", "malloc"]
|
||||
profile = self._make_profile_shared(strings)
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.json.gz',
|
||||
delete=False) as f:
|
||||
with gzip.open(f, 'wt') as gz:
|
||||
json.dump(profile, gz)
|
||||
inpath = f.name
|
||||
|
||||
outpath = inpath.replace('.json.gz', '-demangled.json.gz')
|
||||
try:
|
||||
process_profile_file(inpath, outpath)
|
||||
with gzip.open(outpath, 'rt') as f:
|
||||
result = json.load(f)
|
||||
self.assertEqual(result["shared"]["stringArray"][0],
|
||||
"Lean.Meta.main")
|
||||
finally:
|
||||
os.unlink(inpath)
|
||||
if os.path.exists(outpath):
|
||||
os.unlink(outpath)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -836,6 +836,14 @@ def main():
|
||||
continue
|
||||
print(f" ✅ On compatible toolchain (>= {toolchain})")
|
||||
|
||||
# For reference-manual, check that the release notes title is correct BEFORE tagging.
|
||||
# This catches the case where the toolchain bump PR was merged without updating
|
||||
# the release notes title (e.g., still showing "-rc1" for a stable release).
|
||||
if name == "reference-manual":
|
||||
if not check_reference_manual_release_title(url, toolchain, branch, github_token):
|
||||
repo_status[name] = False
|
||||
continue
|
||||
|
||||
# Special handling for ProofWidgets4
|
||||
if name == "ProofWidgets4":
|
||||
if not check_proofwidgets4_release(url, toolchain, github_token):
|
||||
|
||||
@@ -65,13 +65,6 @@ repositories:
|
||||
branch: master
|
||||
dependencies: [lean4-unicode-basic]
|
||||
|
||||
- name: doc-gen4
|
||||
url: https://github.com/leanprover/doc-gen4
|
||||
toolchain-tag: true
|
||||
stable-branch: false
|
||||
branch: main
|
||||
dependencies: [lean4-cli, BibtexQuery]
|
||||
|
||||
- name: reference-manual
|
||||
url: https://github.com/leanprover/reference-manual
|
||||
toolchain-tag: true
|
||||
@@ -84,8 +77,7 @@ repositories:
|
||||
toolchain-tag: false
|
||||
stable-branch: false
|
||||
branch: main
|
||||
dependencies:
|
||||
- batteries
|
||||
dependencies: []
|
||||
|
||||
- name: aesop
|
||||
url: https://github.com/leanprover-community/aesop
|
||||
@@ -107,10 +99,16 @@ repositories:
|
||||
- lean4checker
|
||||
- batteries
|
||||
- lean4-cli
|
||||
- doc-gen4
|
||||
- import-graph
|
||||
- plausible
|
||||
|
||||
- name: doc-gen4
|
||||
url: https://github.com/leanprover/doc-gen4
|
||||
toolchain-tag: true
|
||||
stable-branch: false
|
||||
branch: main
|
||||
dependencies: [lean4-cli, BibtexQuery, mathlib4]
|
||||
|
||||
- name: cslib
|
||||
url: https://github.com/leanprover/cslib
|
||||
toolchain-tag: true
|
||||
|
||||
@@ -24,6 +24,7 @@ What this script does:
|
||||
- Safety checks for repositories using bump branches
|
||||
- Custom build and test procedures
|
||||
- lean-fro.org: runs scripts/update.sh to regenerate site content
|
||||
- mathlib4: updates ProofWidgets4 pin (v0.0.X sequential tags, not v4.X.Y)
|
||||
|
||||
6. Commits the changes with message "chore: bump toolchain to {version}"
|
||||
|
||||
@@ -59,6 +60,8 @@ import re
|
||||
import subprocess
|
||||
import shutil
|
||||
import json
|
||||
import requests
|
||||
import base64
|
||||
from pathlib import Path
|
||||
|
||||
# Color functions for terminal output
|
||||
@@ -115,6 +118,60 @@ def find_repo(repo_name, config):
|
||||
sys.exit(1)
|
||||
return matching_repos[0]
|
||||
|
||||
def get_github_token():
|
||||
try:
|
||||
result = subprocess.run(['gh', 'auth', 'token'], capture_output=True, text=True)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def find_proofwidgets_tag(version):
|
||||
"""Find the latest ProofWidgets4 tag that uses the given toolchain version.
|
||||
|
||||
ProofWidgets4 uses sequential version tags (v0.0.X) rather than toolchain-based tags.
|
||||
This function finds the most recent tag whose lean-toolchain matches the target version
|
||||
exactly, checking the 20 most recent tags.
|
||||
"""
|
||||
github_token = get_github_token()
|
||||
api_base = "https://api.github.com/repos/leanprover-community/ProofWidgets4"
|
||||
headers = {'Authorization': f'token {github_token}'} if github_token else {}
|
||||
|
||||
response = requests.get(f"{api_base}/git/matching-refs/tags/v0.0.", headers=headers, timeout=30)
|
||||
if response.status_code != 200:
|
||||
return None
|
||||
|
||||
tags = response.json()
|
||||
tag_names = []
|
||||
for tag in tags:
|
||||
ref = tag['ref']
|
||||
if ref.startswith('refs/tags/v0.0.'):
|
||||
tag_name = ref.replace('refs/tags/', '')
|
||||
try:
|
||||
version_num = int(tag_name.split('.')[-1])
|
||||
tag_names.append((version_num, tag_name))
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
|
||||
if not tag_names:
|
||||
return None
|
||||
|
||||
# Sort by version number (descending) and check recent tags
|
||||
tag_names.sort(reverse=True)
|
||||
target = f"leanprover/lean4:{version}"
|
||||
for _, tag_name in tag_names[:20]:
|
||||
# Fetch lean-toolchain for this tag
|
||||
api_url = f"{api_base}/contents/lean-toolchain?ref={tag_name}"
|
||||
resp = requests.get(api_url, headers=headers, timeout=30)
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
content = base64.b64decode(resp.json().get("content", "").replace("\n", "")).decode('utf-8').strip()
|
||||
if content == target:
|
||||
return tag_name
|
||||
|
||||
return None
|
||||
|
||||
def setup_downstream_releases_dir():
|
||||
"""Create the downstream_releases directory if it doesn't exist."""
|
||||
downstream_dir = Path("downstream_releases")
|
||||
@@ -426,6 +483,62 @@ def execute_release_steps(repo, version, config):
|
||||
run_command(f'perl -pi -e \'s/"v4\\.[0-9]+(\\.[0-9]+)?(-rc[0-9]+)?"/"' + version + '"/g\' lakefile.*', cwd=repo_path)
|
||||
run_command("lake update", cwd=repo_path, stream_output=True)
|
||||
|
||||
# For reference-manual, update the release notes title to match the target version.
|
||||
# e.g., for a stable release, change "Lean 4.28.0-rc1 (date)" to "Lean 4.28.0 (date)"
|
||||
# e.g., for rc2, change "Lean 4.28.0-rc1 (date)" to "Lean 4.28.0-rc2 (date)"
|
||||
if repo_name == "reference-manual":
|
||||
base_version = version.lstrip('v').split('-')[0] # "4.28.0"
|
||||
file_name = f"v{base_version.replace('.', '_')}.lean"
|
||||
release_notes_file = repo_path / "Manual" / "Releases" / file_name
|
||||
|
||||
if release_notes_file.exists():
|
||||
is_rc = "-rc" in version
|
||||
if is_rc:
|
||||
# For RC releases, update to the exact RC version
|
||||
display_version = version.lstrip('v') # "4.28.0-rc2"
|
||||
else:
|
||||
# For stable releases, strip any RC suffix
|
||||
display_version = base_version # "4.28.0"
|
||||
|
||||
print(blue(f"Updating release notes title in {file_name}..."))
|
||||
content = release_notes_file.read_text()
|
||||
# Match the #doc line title: "Lean X.Y.Z-rcN (date)" or "Lean X.Y.Z (date)"
|
||||
new_content = re.sub(
|
||||
r'(#doc\s+\(Manual\)\s+"Lean\s+)\d+\.\d+\.\d+(-rc\d+)?(\s+\([^)]*\)"\s*=>)',
|
||||
rf'\g<1>{display_version}\3',
|
||||
content
|
||||
)
|
||||
if new_content != content:
|
||||
release_notes_file.write_text(new_content)
|
||||
print(green(f"Updated release notes title to Lean {display_version}"))
|
||||
else:
|
||||
print(green("Release notes title already correct"))
|
||||
else:
|
||||
print(yellow(f"Release notes file {file_name} not found, skipping title update"))
|
||||
|
||||
# For mathlib4, update ProofWidgets4 pin (it uses sequential v0.0.X tags, not v4.X.Y)
|
||||
if repo_name == "mathlib4":
|
||||
print(blue("Checking ProofWidgets4 version pin..."))
|
||||
pw_tag = find_proofwidgets_tag(version)
|
||||
if pw_tag:
|
||||
print(blue(f"Updating ProofWidgets4 pin to {pw_tag}..."))
|
||||
for lakefile in repo_path.glob("lakefile.*"):
|
||||
content = lakefile.read_text()
|
||||
# Only update the ProofWidgets4 dependency line, not other v0.0.X pins
|
||||
new_content = re.sub(
|
||||
r'(require\s+"leanprover-community"\s*/\s*"proofwidgets"\s*@\s*git\s+"v)0\.0\.\d+(")',
|
||||
rf'\g<1>{pw_tag.removeprefix("v")}\2',
|
||||
content
|
||||
)
|
||||
if new_content != content:
|
||||
lakefile.write_text(new_content)
|
||||
print(green(f"Updated ProofWidgets4 pin in {lakefile.name}"))
|
||||
run_command("lake update proofwidgets", cwd=repo_path, stream_output=True)
|
||||
print(green(f"Updated ProofWidgets4 to {pw_tag}"))
|
||||
else:
|
||||
print(yellow(f"Could not find a ProofWidgets4 tag for toolchain {version}"))
|
||||
print(yellow("You may need to update the ProofWidgets4 pin manually"))
|
||||
|
||||
# Commit changes (only if there are changes)
|
||||
print(blue("Checking for changes to commit..."))
|
||||
try:
|
||||
|
||||
@@ -10,7 +10,7 @@ endif()
|
||||
include(ExternalProject)
|
||||
project(LEAN CXX C)
|
||||
set(LEAN_VERSION_MAJOR 4)
|
||||
set(LEAN_VERSION_MINOR 29)
|
||||
set(LEAN_VERSION_MINOR 30)
|
||||
set(LEAN_VERSION_PATCH 0)
|
||||
set(LEAN_VERSION_IS_RELEASE 0) # This number is 1 in the release revision, and 0 otherwise.
|
||||
set(LEAN_SPECIAL_VERSION_DESC "" CACHE STRING "Additional version description like 'nightly-2018-03-11'")
|
||||
|
||||
@@ -30,6 +30,8 @@ namespace ExceptT
|
||||
simp [run] at h
|
||||
assumption
|
||||
|
||||
@[simp] theorem stM_eq [Monad m] : stM m (ExceptT ε m) α = Except ε α := rfl
|
||||
|
||||
@[simp, grind =] theorem run_mk (x : m (Except ε α)) : run (mk x : ExceptT ε m α) = x := rfl
|
||||
|
||||
@[simp, grind =] theorem run_pure [Monad m] (x : α) : run (pure x : ExceptT ε m α) = pure (Except.ok x) := rfl
|
||||
@@ -118,7 +120,7 @@ instance [Monad m] [LawfulMonad m] : LawfulMonad (ExceptT ε m) where
|
||||
|
||||
@[simp] theorem run_controlAt [Monad m] [LawfulMonad m] (f : ({β : Type u} → ExceptT ε m β → m (stM m (ExceptT ε m) β)) → m (stM m (ExceptT ε m) α)) :
|
||||
ExceptT.run (controlAt m f) = f fun x => x.run := by
|
||||
simp [controlAt, run_bind, bind_map_left]
|
||||
simp [controlAt, run_bind]
|
||||
|
||||
@[simp] theorem run_control [Monad m] [LawfulMonad m] (f : ({β : Type u} → ExceptT ε m β → m (stM m (ExceptT ε m) β)) → m (stM m (ExceptT ε m) α)) :
|
||||
ExceptT.run (control f) = f fun x => x.run := run_controlAt f
|
||||
@@ -256,6 +258,7 @@ instance [Monad m] [LawfulMonad m] : LawfulMonad (OptionT m) where
|
||||
rw [← bind_pure_comp]
|
||||
rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp] theorem run_controlAt [Monad m] [LawfulMonad m] (f : ({β : Type u} → OptionT m β → m (stM m (OptionT m) β)) → m (stM m (OptionT m) α)) :
|
||||
OptionT.run (controlAt m f) = f fun x => x.run := by
|
||||
simp [controlAt, Option.elimM, Option.elim]
|
||||
@@ -343,6 +346,7 @@ instance [Monad m] [LawfulMonad m] : LawfulMonad (ReaderT ρ m) where
|
||||
ReaderT.run (liftWith f) ctx = (f fun x => x.run ctx) :=
|
||||
rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp] theorem run_controlAt [Monad m] [LawfulMonad m] (f : ({β : Type u} → ReaderT ρ m β → m (stM m (ReaderT ρ m) β)) → m (stM m (ReaderT ρ m) α)) (ctx : ρ) :
|
||||
ReaderT.run (controlAt m f) ctx = f fun x => x.run ctx := by
|
||||
simp [controlAt]
|
||||
@@ -443,6 +447,7 @@ instance [Monad m] [LawfulMonad m] : LawfulMonad (StateT σ m) where
|
||||
StateT.run (liftWith f) s = ((·, s) <$> f fun x => x.run s) := by
|
||||
simp [liftWith, MonadControl.liftWith, Function.comp_def]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp] theorem run_controlAt [Monad m] [LawfulMonad m] (f : ({β : Type u} → StateT σ m β → m (stM m (StateT σ m) β)) → m (stM m (StateT σ m) α)) (s : σ) :
|
||||
StateT.run (controlAt m f) s = f fun x => x.run s := by
|
||||
simp [controlAt]
|
||||
|
||||
@@ -15,7 +15,8 @@ public import Init.Ext
|
||||
public instance [Monad m] [LawfulMonad m] [MonadAttach m] [WeaklyLawfulMonadAttach m] :
|
||||
WeaklyLawfulMonadAttach (ReaderT ρ m) where
|
||||
map_attach := by
|
||||
simp only [Functor.map, MonadAttach.attach, Functor.map_map, WeaklyLawfulMonadAttach.map_attach]
|
||||
simp only [Functor.map, MonadAttach.attach, Functor.map_map, WeaklyLawfulMonadAttach.map_attach,
|
||||
MonadAttach.CanReturn]
|
||||
intros; rfl
|
||||
|
||||
public instance [Monad m] [LawfulMonad m] [MonadAttach m] [LawfulMonadAttach m] :
|
||||
@@ -30,7 +31,7 @@ public instance [Monad m] [LawfulMonad m] [MonadAttach m] [WeaklyLawfulMonadAtta
|
||||
map_attach := by
|
||||
intro α x
|
||||
simp only [Functor.map, StateT, funext_iff, StateT.map, bind_pure_comp, MonadAttach.attach,
|
||||
Functor.map_map]
|
||||
Functor.map_map, MonadAttach.CanReturn]
|
||||
exact fun s => WeaklyLawfulMonadAttach.map_attach
|
||||
|
||||
public instance [Monad m] [LawfulMonad m] [MonadAttach m] [LawfulMonadAttach m] :
|
||||
@@ -45,7 +46,7 @@ public instance [Monad m] [LawfulMonad m] [MonadAttach m] [LawfulMonadAttach m]
|
||||
public instance [Monad m] [LawfulMonad m] [MonadAttach m] [WeaklyLawfulMonadAttach m] :
|
||||
WeaklyLawfulMonadAttach (ExceptT ε m) where
|
||||
map_attach {α} x := by
|
||||
simp only [Functor.map, MonadAttach.attach, ExceptT.map]
|
||||
simp only [Functor.map, MonadAttach.attach, ExceptT.map, MonadAttach.CanReturn]
|
||||
simp
|
||||
conv => rhs; rw [← WeaklyLawfulMonadAttach.map_attach (m := m) (x := x)]
|
||||
simp only [map_eq_pure_bind]
|
||||
@@ -83,6 +84,6 @@ attribute [local instance] MonadAttach.trivial
|
||||
|
||||
public instance [Monad m] [LawfulMonad m] :
|
||||
WeaklyLawfulMonadAttach m where
|
||||
map_attach := by simp [MonadAttach.attach]
|
||||
map_attach := by simp [MonadAttach.attach, MonadAttach.CanReturn]
|
||||
|
||||
end
|
||||
|
||||
@@ -51,8 +51,19 @@ scoped syntax (name := withAnnotateState)
|
||||
/-- `skip` does nothing. -/
|
||||
syntax (name := skip) "skip" : conv
|
||||
|
||||
/-- `cbv` performs simplification that closely mimics call-by-value evaluation,
|
||||
using equations associated with definitions and the matchers. -/
|
||||
/--
|
||||
`cbv` performs simplification that closely mimics call-by-value evaluation.
|
||||
It reduces the target term by unfolding definitions using their defining equations and
|
||||
applying matcher equations. The unfolding is propositional, so `cbv` also works
|
||||
with functions defined via well-founded recursion or partial fixpoints.
|
||||
|
||||
The proofs produced by `cbv` only use the three standard axioms.
|
||||
In particular, they do not require trust in the correctness of the code
|
||||
generator.
|
||||
|
||||
This tactic is experimental and its behavior is likely to change in upcoming
|
||||
releases of Lean.
|
||||
-/
|
||||
syntax (name := cbv) "cbv" : conv
|
||||
|
||||
/--
|
||||
|
||||
@@ -2313,6 +2313,13 @@ instance Pi.instSubsingleton {α : Sort u} {β : α → Sort v} [∀ a, Subsingl
|
||||
|
||||
/-! # Squash -/
|
||||
|
||||
theorem equivalence_true (α : Sort u) : Equivalence fun _ _ : α => True :=
|
||||
⟨fun _ => trivial, fun _ => trivial, fun _ _ => trivial⟩
|
||||
|
||||
/-- Always-true relation as a `Setoid`. -/
|
||||
protected def Setoid.trivial (α : Sort u) : Setoid α :=
|
||||
⟨_, equivalence_true α⟩
|
||||
|
||||
/--
|
||||
The quotient of `α` by the universal relation. The elements of `Squash α` are those of `α`, but all
|
||||
of them are equal and cannot be distinguished.
|
||||
@@ -2326,8 +2333,11 @@ and its representation in compiled code is identical to that of `α`.
|
||||
|
||||
Consequently, `Squash.lift` may extract an `α` value into any subsingleton type `β`, while
|
||||
`Nonempty.rec` can only do the same when `β` is a proposition.
|
||||
|
||||
`Squash` is defined in terms of `Quotient`, so `Squash` can be used when a `Quotient` argument is
|
||||
expected.
|
||||
-/
|
||||
def Squash (α : Sort u) := Quot (fun (_ _ : α) => True)
|
||||
def Squash (α : Sort u) := Quotient (Setoid.trivial α)
|
||||
|
||||
/--
|
||||
Places a value into its squash type, in which it cannot be distinguished from any other.
|
||||
@@ -2583,3 +2593,11 @@ class Trichotomous (r : α → α → Prop) : Prop where
|
||||
trichotomous (a b : α) : ¬ r a b → ¬ r b a → a = b
|
||||
|
||||
end Std
|
||||
|
||||
@[simp] theorem flip_flip {α : Sort u} {β : Sort v} {φ : Sort w} {f : α → β → φ} :
|
||||
flip (flip f) = f := by
|
||||
apply funext
|
||||
intro a
|
||||
apply funext
|
||||
intro b
|
||||
rw [flip, flip]
|
||||
|
||||
@@ -93,7 +93,7 @@ theorem ext' {xs ys : Array α} (h : xs.toList = ys.toList) : xs = ys := by
|
||||
|
||||
@[simp, grind =] theorem getElem?_toList {xs : Array α} {i : Nat} : xs.toList[i]? = xs[i]? := by
|
||||
simp only [getElem?_def, getElem_toList]
|
||||
simp only [Array.size]
|
||||
simp only [Array.size]; rfl
|
||||
|
||||
/-- `a ∈ as` is a predicate which asserts that `a` is in the array `as`. -/
|
||||
-- NB: This is defined as a structure rather than a plain def so that a lemma
|
||||
@@ -2125,7 +2125,7 @@ Examples:
|
||||
|
||||
/-! ### Repr and ToString -/
|
||||
|
||||
protected def Array.repr {α : Type u} [Repr α] (xs : Array α) : Std.Format :=
|
||||
protected def repr {α : Type u} [Repr α] (xs : Array α) : Std.Format :=
|
||||
let _ : Std.ToFormat α := ⟨repr⟩
|
||||
if xs.size == 0 then
|
||||
"#[]"
|
||||
|
||||
@@ -52,7 +52,9 @@ theorem foldrM_eq_reverse_foldlM_toList.aux [Monad m]
|
||||
unfold foldrM.fold
|
||||
match i with
|
||||
| 0 => simp
|
||||
| i+1 => rw [← List.take_concat_get h]; simp [← aux]
|
||||
| i+1 =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
rw [← List.take_concat_get h]; simp [← aux]
|
||||
|
||||
theorem foldrM_eq_reverse_foldlM_toList [Monad m] {f : α → β → m β} {init : β} {xs : Array α} :
|
||||
xs.foldrM f init = xs.toList.reverse.foldlM (fun x y => f y x) init := by
|
||||
|
||||
@@ -117,11 +117,13 @@ grind_pattern Std.Internal.Array.not_of_countP_eq_zero_of_mem => xs.countP p, x
|
||||
theorem countP_replicate {a : α} {n : Nat} : countP p (replicate n a) = if p a then n else 0 := by
|
||||
simp [← List.toArray_replicate, List.countP_replicate]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem boole_getElem_le_countP {xs : Array α} {i : Nat} (h : i < xs.size) :
|
||||
(if p xs[i] then 1 else 0) ≤ xs.countP p := by
|
||||
rcases xs with ⟨xs⟩
|
||||
simp [List.boole_getElem_le_countP]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[grind =]
|
||||
theorem countP_set {xs : Array α} {i : Nat} {a : α} (h : i < xs.size) :
|
||||
(xs.set i a).countP p = xs.countP p - (if p xs[i] then 1 else 0) + (if p a then 1 else 0) := by
|
||||
|
||||
@@ -76,7 +76,7 @@ theorem isEqv_eq_decide (xs ys : Array α) (r) :
|
||||
simpa [isEqv_iff_rel] using h'
|
||||
|
||||
@[simp, grind =] theorem isEqv_toList [BEq α] (xs ys : Array α) : (xs.toList.isEqv ys.toList r) = (xs.isEqv ys r) := by
|
||||
simp [isEqv_eq_decide, List.isEqv_eq_decide, Array.size]
|
||||
simp [isEqv_eq_decide, List.isEqv_eq_decide, Array.size]; rfl
|
||||
|
||||
theorem eq_of_isEqv [DecidableEq α] (xs ys : Array α) (h : Array.isEqv xs ys (fun x y => x = y)) : xs = ys := by
|
||||
have ⟨h, h'⟩ := rel_of_isEqv h
|
||||
@@ -87,6 +87,7 @@ private theorem isEqvAux_self (r : α → α → Bool) (hr : ∀ a, r a a) (xs :
|
||||
induction i with
|
||||
| zero => simp [Array.isEqvAux]
|
||||
| succ i ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp_all only [isEqvAux, Bool.and_self]
|
||||
|
||||
theorem isEqv_self_beq [BEq α] [ReflBEq α] (xs : Array α) : Array.isEqv xs xs (· == ·) = true := by
|
||||
@@ -153,7 +154,7 @@ theorem beq_eq_decide [BEq α] (xs ys : Array α) :
|
||||
simp [BEq.beq, isEqv_eq_decide]
|
||||
|
||||
@[simp, grind =] theorem beq_toList [BEq α] (xs ys : Array α) : (xs.toList == ys.toList) = (xs == ys) := by
|
||||
simp [beq_eq_decide, List.beq_eq_decide, Array.size]
|
||||
simp [beq_eq_decide, List.beq_eq_decide, Array.size]; rfl
|
||||
|
||||
end Array
|
||||
|
||||
|
||||
@@ -329,7 +329,7 @@ theorem eraseIdx_eq_take_drop_succ {xs : Array α} {i : Nat} (h) :
|
||||
rcases xs with ⟨xs⟩
|
||||
simp only [List.size_toArray] at h
|
||||
simp only [List.eraseIdx_toArray, List.eraseIdx_eq_take_drop_succ, take_eq_extract,
|
||||
List.extract_toArray, List.extract_eq_drop_take, Nat.sub_zero, List.drop_zero, drop_eq_extract,
|
||||
List.extract_toArray, List.extract_eq_take_drop, Nat.sub_zero, List.drop_zero, drop_eq_extract,
|
||||
List.size_toArray, List.append_toArray, mk.injEq, List.append_cancel_left_eq]
|
||||
rw [List.take_of_length_le]
|
||||
simp
|
||||
|
||||
@@ -83,6 +83,10 @@ theorem findSome?_eq_some_iff {f : α → Option β} {xs : Array α} {b : β} :
|
||||
· rintro ⟨xs, a, ys, h₀, h₁, h₂⟩
|
||||
exact ⟨xs.toList, a, ys.toList, by simpa using congrArg toList h₀, h₁, by simpa⟩
|
||||
|
||||
theorem isSome_findSome? {xs : Array α} {f : α → Option β} :
|
||||
(xs.findSome? f).isSome = xs.any (f · |>.isSome) := by
|
||||
simp [← findSome?_toList, List.isSome_findSome?]
|
||||
|
||||
@[simp, grind =] theorem findSome?_guard {xs : Array α} : findSome? (Option.guard p) xs = find? p xs := by
|
||||
cases xs; simp
|
||||
|
||||
@@ -197,6 +201,10 @@ theorem find?_eq_some_iff_append {xs : Array α} :
|
||||
exact ⟨as.toList, ⟨l, by simpa using congrArg Array.toList h'⟩,
|
||||
by simpa using h⟩
|
||||
|
||||
theorem isSome_find? {xs : Array α} {f : α → Bool} :
|
||||
(xs.find? f).isSome = xs.any (f ·) := by
|
||||
simp [← find?_toList, List.isSome_find?]
|
||||
|
||||
theorem find?_push {xs : Array α} : (xs.push a).find? p = (xs.find? p).or (if p a then some a else none) := by
|
||||
cases xs; simp
|
||||
|
||||
@@ -425,6 +433,7 @@ theorem lt_findIdx_of_not {p : α → Bool} {xs : Array α} {i : Nat} (h : i < x
|
||||
simp only [Nat.not_lt] at f
|
||||
exact absurd (@findIdx_getElem _ p xs (Nat.lt_of_le_of_lt f h)) (h2 (xs.findIdx p) f)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
/-- `xs.findIdx p = i` iff `p xs[i]` and `¬ p xs [j]` for all `j < i`. -/
|
||||
theorem findIdx_eq {p : α → Bool} {xs : Array α} {i : Nat} (h : i < xs.size) :
|
||||
xs.findIdx p = i ↔ p xs[i] ∧ ∀ j (hji : j < i), p (xs[j]'(Nat.lt_trans hji h)) = false := by
|
||||
@@ -613,12 +622,12 @@ theorem findIdx?_eq_some_le_of_findIdx?_eq_some {xs : Array α} {p q : α → Bo
|
||||
/-! ### findFinIdx? -/
|
||||
|
||||
@[grind =]
|
||||
theorem findFinIdx?_empty {p : α → Bool} : findFinIdx? p #[] = none := by simp
|
||||
theorem findFinIdx?_empty {p : α → Bool} : findFinIdx? p #[] = none := by simp; rfl
|
||||
|
||||
@[grind =]
|
||||
theorem findFinIdx?_singleton {a : α} {p : α → Bool} :
|
||||
#[a].findFinIdx? p = if p a then some ⟨0, by simp⟩ else none := by
|
||||
simp
|
||||
simp; rfl
|
||||
|
||||
-- We can't mark this as a `@[congr]` lemma since the head of the RHS is not `findFinIdx?`.
|
||||
theorem findFinIdx?_congr {p : α → Bool} {xs ys : Array α} (w : xs = ys) :
|
||||
@@ -714,6 +723,7 @@ theorem findFinIdx?_eq_bind_find?_finIdxOf? [BEq α] [LawfulBEq α] {xs : Array
|
||||
xs.findFinIdx? p = (xs.find? p).bind (xs.finIdxOf? ·) := by
|
||||
cases xs
|
||||
simp [List.findFinIdx?_eq_bind_find?_finIdxOf?]
|
||||
rfl
|
||||
|
||||
theorem findIdx_eq_getD_bind_find?_idxOf? [BEq α] [LawfulBEq α] {xs : Array α} {p : α → Bool} :
|
||||
xs.findIdx p = ((xs.find? p).bind (xs.idxOf? ·)).getD xs.size := by
|
||||
@@ -792,7 +802,7 @@ theorem idxOf?_eq_map_finIdxOf?_val [BEq α] {xs : Array α} {a : α} :
|
||||
xs.idxOf? a = (xs.finIdxOf? a).map (·.val) := by
|
||||
simp [idxOf?, finIdxOf?]
|
||||
|
||||
@[grind =] theorem finIdxOf?_empty [BEq α] : (#[] : Array α).finIdxOf? a = none := by simp
|
||||
@[grind =] theorem finIdxOf?_empty [BEq α] : (#[] : Array α).finIdxOf? a = none := by simp; rfl
|
||||
|
||||
@[simp, grind =] theorem finIdxOf?_eq_none_iff [BEq α] [LawfulBEq α] {xs : Array α} {a : α} :
|
||||
xs.finIdxOf? a = none ↔ a ∉ xs := by
|
||||
|
||||
@@ -170,6 +170,7 @@ theorem getD_getElem? {xs : Array α} {i : Nat} {d : α} :
|
||||
|
||||
@[simp] theorem getElem?_empty {i : Nat} : (#[] : Array α)[i]? = none := rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem getElem_push_lt {xs : Array α} {x : α} {i : Nat} (h : i < xs.size) :
|
||||
have : i < (xs.push x).size := by simp [*, Nat.lt_succ_of_le, Nat.le_of_lt]
|
||||
(xs.push x)[i] = xs[i] := by
|
||||
@@ -895,7 +896,7 @@ theorem all_push {xs : Array α} {a : α} {p : α → Bool} :
|
||||
@[simp] theorem getElem_set_ne {xs : Array α} {i : Nat} (h' : i < xs.size) {v : α} {j : Nat}
|
||||
(pj : j < xs.size) (h : i ≠ j) :
|
||||
(xs.set i v)[j]'(by simp [*]) = xs[j] := by
|
||||
simp only [set, ← getElem_toList, List.getElem_set_ne h]
|
||||
simp only [set, ← getElem_toList, List.getElem_set_ne h]; rfl
|
||||
|
||||
@[simp] theorem getElem?_set_ne {xs : Array α} {i : Nat} (h : i < xs.size) {v : α} {j : Nat}
|
||||
(ne : i ≠ j) : (xs.set i v)[j]? = xs[j]? := by
|
||||
@@ -2854,7 +2855,7 @@ theorem getElem?_extract {xs : Array α} {start stop : Nat} :
|
||||
· simp only [length_toList, size_extract, List.length_take, List.length_drop]
|
||||
omega
|
||||
· intro n h₁ h₂
|
||||
simp
|
||||
simp; rfl
|
||||
|
||||
@[simp] theorem extract_size {xs : Array α} : xs.extract 0 xs.size = xs := by
|
||||
apply ext
|
||||
@@ -3974,6 +3975,7 @@ theorem all_filterMap {xs : Array α} {f : α → Option β} {p : β → Bool} :
|
||||
· simp only [Id.run_pure]
|
||||
rw [if_neg (mt (by rintro rfl; exact h) (by simp_all))]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem toList_modify {xs : Array α} {f : α → α} {i : Nat} :
|
||||
(xs.modify i f).toList = xs.toList.modify i f := by
|
||||
apply List.ext_getElem
|
||||
@@ -4146,7 +4148,7 @@ variable [LawfulBEq α]
|
||||
(xs.replace a b)[i]? = if xs[i]? == some a then if a ∈ xs.take i then some a else some b else xs[i]? := by
|
||||
rcases xs with ⟨xs⟩
|
||||
simp only [List.replace_toArray, List.getElem?_toArray, List.getElem?_replace, take_eq_extract,
|
||||
List.extract_toArray, List.extract_eq_drop_take, Nat.sub_zero, List.drop_zero, List.mem_toArray]
|
||||
List.extract_toArray, List.extract_eq_take_drop, Nat.sub_zero, List.drop_zero, List.mem_toArray]
|
||||
|
||||
theorem getElem?_replace_of_ne {xs : Array α} {i : Nat} (h : xs[i]? ≠ some a) :
|
||||
(xs.replace a b)[i]? = xs[i]? := by
|
||||
@@ -4259,6 +4261,7 @@ private theorem getElem_ofFn_go {f : Fin n → α} {acc i k} (h : i ≤ n) (w₁
|
||||
· simp
|
||||
omega
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp] theorem getElem_ofFn {f : Fin n → α} {i : Nat} (h : i < (ofFn f).size) :
|
||||
(ofFn f)[i] = f ⟨i, size_ofFn (f := f) ▸ h⟩ := by
|
||||
unfold ofFn
|
||||
@@ -4490,11 +4493,13 @@ theorem getElem?_push_eq {xs : Array α} {x : α} : (xs.push x)[xs.size]? = some
|
||||
cases xs
|
||||
simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem finIdxOf?_toList [BEq α] {a : α} {xs : Array α} :
|
||||
xs.toList.finIdxOf? a = (xs.finIdxOf? a).map (Fin.cast (by simp)) := by
|
||||
cases xs
|
||||
simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem findFinIdx?_toList {p : α → Bool} {xs : Array α} :
|
||||
xs.toList.findFinIdx? p = (xs.findFinIdx? p).map (Fin.cast (by simp)) := by
|
||||
cases xs
|
||||
@@ -4619,6 +4624,7 @@ namespace List
|
||||
as.toArray.unzip = Prod.map List.toArray List.toArray as.unzip := by
|
||||
ext1 <;> simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem firstM_toArray [Alternative m] {as : List α} {f : α → m β} :
|
||||
as.toArray.firstM f = as.firstM f := by
|
||||
unfold Array.firstM
|
||||
|
||||
@@ -72,6 +72,7 @@ theorem mapFinIdx_spec {xs : Array α} {f : (i : Nat) → α → (h : i < xs.siz
|
||||
simp only [getElem?_def, size_mapFinIdx, getElem_mapFinIdx]
|
||||
split <;> simp_all
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem toList_mapFinIdx {xs : Array α} {f : (i : Nat) → α → (h : i < xs.size) → β} :
|
||||
(xs.mapFinIdx f).toList = xs.toList.mapFinIdx (fun i a h => f i a (by simpa)) := by
|
||||
apply List.ext_getElem <;> simp
|
||||
@@ -105,6 +106,7 @@ theorem mapIdx_spec {f : Nat → α → β} {xs : Array α}
|
||||
xs[i]?.map (f i) := by
|
||||
simp [getElem?_def, size_mapIdx, getElem_mapIdx]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =] theorem toList_mapIdx {f : Nat → α → β} {xs : Array α} :
|
||||
(xs.mapIdx f).toList = xs.toList.mapIdx (fun i a => f i a) := by
|
||||
apply List.ext_getElem <;> simp
|
||||
|
||||
@@ -89,7 +89,7 @@ public theorem _root_.List.min_toArray [Min α] {l : List α} {h} :
|
||||
· rename_i x xs
|
||||
simp only [List.getElem_toArray, List.getElem_cons_zero, List.size_toArray, List.length_cons]
|
||||
rw [List.toArray_cons, foldl_eq_foldl_extract]
|
||||
rw [← Array.foldl_toList, Array.toList_extract, List.extract_eq_drop_take]
|
||||
rw [← Array.foldl_toList, Array.toList_extract, List.extract_eq_take_drop]
|
||||
simp [List.min]
|
||||
|
||||
public theorem _root_.List.min_eq_min_toArray [Min α] {l : List α} {h} :
|
||||
@@ -129,7 +129,7 @@ public theorem _root_.List.max_toArray [Max α] {l : List α} {h} :
|
||||
· rename_i x xs
|
||||
simp only [List.getElem_toArray, List.getElem_cons_zero, List.size_toArray, List.length_cons]
|
||||
rw [List.toArray_cons, foldl_eq_foldl_extract]
|
||||
rw [← Array.foldl_toList, Array.toList_extract, List.extract_eq_drop_take]
|
||||
rw [← Array.foldl_toList, Array.toList_extract, List.extract_eq_take_drop]
|
||||
simp [List.max]
|
||||
|
||||
public theorem _root_.List.max_eq_max_toArray [Max α] {l : List α} {h} :
|
||||
|
||||
@@ -41,6 +41,7 @@ theorem ofFn_succ {f : Fin (n+1) → α} :
|
||||
intro h₃
|
||||
simp only [show i = n by omega]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem ofFn_add {n m} {f : Fin (n + m) → α} :
|
||||
ofFn f = (ofFn (fun i => f (i.castLE (Nat.le_add_right n m)))) ++ (ofFn (fun i => f (i.natAdd n))) := by
|
||||
induction m with
|
||||
@@ -107,6 +108,7 @@ theorem ofFnM_succ {n} [Monad m] [LawfulMonad m] {f : Fin (n + 1) → m α} :
|
||||
pure (as.push a)) := by
|
||||
simp [ofFnM, Fin.foldlM_succ_last]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem ofFnM_add {n m} [Monad m] [LawfulMonad m] {f : Fin (n + k) → m α} :
|
||||
ofFnM f = (do
|
||||
let as ← ofFnM fun i : Fin n => f (i.castLE (Nat.le_add_right n k))
|
||||
|
||||
@@ -135,7 +135,7 @@ theorem extract {xs ys : Array α} (h : xs ~ ys) {lo hi : Nat}
|
||||
rcases xs with ⟨xs⟩
|
||||
rcases ys with ⟨ys⟩
|
||||
simp_all only [perm_iff_toList_perm, List.getElem?_toArray, List.extract_toArray,
|
||||
List.extract_eq_drop_take]
|
||||
List.extract_eq_take_drop]
|
||||
apply List.Perm.take_of_getElem? (w := fun i h => by simpa using whi (lo + i) (by omega))
|
||||
apply List.Perm.drop_of_getElem? (w := wlo)
|
||||
exact h
|
||||
|
||||
@@ -210,7 +210,7 @@ protected def toHex {n : Nat} (x : BitVec n) : String :=
|
||||
String.Internal.append t s
|
||||
|
||||
/-- `BitVec` representation. -/
|
||||
protected def BitVec.repr (a : BitVec n) : Std.Format :=
|
||||
protected def repr (a : BitVec n) : Std.Format :=
|
||||
"0x" ++ (a.toHex : Std.Format) ++ "#" ++ repr n
|
||||
|
||||
instance : Repr (BitVec n) where
|
||||
|
||||
@@ -2192,6 +2192,7 @@ def uppcRec {w} (x : BitVec w) (s : Nat) (hs : s < w) : Bool :=
|
||||
| 0 => x.msb
|
||||
| i + 1 => x[w - 1 - i] || uppcRec x i (by omega)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
/-- The unsigned parallel prefix of `x` at `s` is `true` if and only if x interpreted
|
||||
as a natural number is greater or equal than `2 ^ (w - 1 - (s - 1))`. -/
|
||||
@[simp]
|
||||
|
||||
@@ -1198,7 +1198,7 @@ let x' = x.extractLsb' 7 5 = _ _ 9 8 7
|
||||
(decide (0 < len) &&
|
||||
(decide (start + len ≤ w) &&
|
||||
x.getMsbD (w - (start + len)))) := by
|
||||
simp [BitVec.msb, getMsbD_extractLsb']
|
||||
simp [BitVec.msb, getMsbD_extractLsb']; rfl
|
||||
|
||||
@[simp, grind =] theorem getElem_extract {hi lo : Nat} {x : BitVec n} {i : Nat} (h : i < hi - lo + 1) :
|
||||
(extractLsb hi lo x)[i] = getLsbD x (lo+i) := by
|
||||
@@ -1234,7 +1234,7 @@ let x' = x.extractLsb' 7 5 = _ _ 9 8 7
|
||||
|
||||
@[simp, grind =] theorem msb_extractLsb {hi lo : Nat} {x : BitVec w} :
|
||||
(extractLsb hi lo x).msb = (decide (max hi lo < w) && x.getMsbD (w - 1 - max hi lo)) := by
|
||||
simp [BitVec.msb]
|
||||
simp [BitVec.msb]; rfl
|
||||
|
||||
theorem extractLsb'_eq_extractLsb {w : Nat} (x : BitVec w) (start len : Nat) (h : len > 0) :
|
||||
x.extractLsb' start len = (x.extractLsb (len - 1 + start) start).cast (by omega) := by
|
||||
@@ -2581,6 +2581,19 @@ theorem msb_signExtend {x : BitVec w} :
|
||||
· simp [h, BitVec.msb, getMsbD_signExtend, show v - w = 0 by omega]
|
||||
· simp [h, BitVec.msb, getMsbD_signExtend, show ¬ (v - w = 0) by omega]
|
||||
|
||||
/-- Sign-extending to `w + n` bits, extracting bits `[w - 1 + n..n]`, and setting width
|
||||
back to `w` is equivalent to arithmetic right shift by `n`, since both sides discard the `n`
|
||||
least significant bits and replicate the sign bit into the upper bits. -/
|
||||
@[simp]
|
||||
theorem signExtend_extractLsb_setWidth {x : BitVec w} {n : Nat} :
|
||||
((x.signExtend (w + n)).extractLsb (w - 1 + n) n).setWidth w = x.sshiftRight n := by
|
||||
ext i hi
|
||||
simp only [getElem_sshiftRight, getElem_setWidth, getLsbD_extract,
|
||||
Nat.add_sub_cancel, show i ≤ w - 1 by omega, decide_true, getLsbD_signExtend,
|
||||
Bool.true_and]
|
||||
by_cases hni : n + i < w
|
||||
<;> (simp [hni]; omega)
|
||||
|
||||
/-- Sign extending to a width smaller than the starting width is a truncation. -/
|
||||
theorem signExtend_eq_setWidth_of_le (x : BitVec w) {v : Nat} (hv : v ≤ w) :
|
||||
x.signExtend v = x.setWidth v := by
|
||||
@@ -2771,8 +2784,9 @@ theorem msb_append {x : BitVec w} {y : BitVec v} :
|
||||
@[simp] theorem append_zero_width (x : BitVec w) (y : BitVec 0) : x ++ y = x := by
|
||||
ext i ih
|
||||
rw [getElem_append] -- Why does this not work with `simp [getElem_append]`?
|
||||
simp
|
||||
simp; rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[grind =]
|
||||
theorem toInt_append {x : BitVec n} {y : BitVec m} :
|
||||
(x ++ y).toInt = if n == 0 then y.toInt else (2 ^ m) * x.toInt + y.toNat := by
|
||||
@@ -5278,6 +5292,7 @@ theorem and_one_eq_setWidth_ofBool_getLsbD {x : BitVec w} :
|
||||
theorem replicate_zero {x : BitVec w} : x.replicate 0 = 0#0 := by
|
||||
simp [replicate]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp, grind =]
|
||||
theorem replicate_one {w : Nat} {x : BitVec w} :
|
||||
(x.replicate 1) = x.cast (by rw [Nat.mul_one]) := by
|
||||
@@ -5329,6 +5344,7 @@ theorem append_assoc {x₁ : BitVec w₁} {x₂ : BitVec w₂} {x₃ : BitVec w
|
||||
theorem append_assoc' {x₁ : BitVec w₁} {x₂ : BitVec w₂} {x₃ : BitVec w₃} :
|
||||
(x₁ ++ (x₂ ++ x₃)) = ((x₁ ++ x₂) ++ x₃).cast (by omega) := by simp [append_assoc]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem replicate_append_self {x : BitVec w} :
|
||||
x ++ x.replicate n = (x.replicate n ++ x).cast (by omega) := by
|
||||
induction n with
|
||||
|
||||
@@ -111,13 +111,13 @@ theorem getElem_eq_getElem_data {a : ByteArray} {i : Nat} {h : i < a.size} :
|
||||
theorem getElem_append_left {i : Nat} {a b : ByteArray} {h : i < (a ++ b).size}
|
||||
(hlt : i < a.size) : (a ++ b)[i] = a[i] := by
|
||||
simp only [getElem_eq_getElem_data, data_append]
|
||||
rw [Array.getElem_append_left (by simpa)]
|
||||
rw [Array.getElem_append_left (by simpa)]; rfl
|
||||
|
||||
theorem getElem_append_right {i : Nat} {a b : ByteArray} {h : i < (a ++ b).size}
|
||||
(hle : a.size ≤ i) : (a ++ b)[i] = b[i - a.size]'(by simp_all; omega) := by
|
||||
simp only [getElem_eq_getElem_data, data_append]
|
||||
rw [Array.getElem_append_right (by simpa)]
|
||||
simp
|
||||
simp; rfl
|
||||
|
||||
@[simp]
|
||||
theorem _root_.List.getElem_toByteArray {l : List UInt8} {i : Nat} {h : i < l.toByteArray.size} :
|
||||
@@ -223,7 +223,7 @@ theorem getElem_extract_aux {xs : ByteArray} {start stop : Nat} (h : i < (xs.ext
|
||||
|
||||
theorem getElem_extract {i : Nat} {b : ByteArray} {start stop : Nat}
|
||||
(h) : (b.extract start stop)[i]'h = b[start + i]'(getElem_extract_aux h) := by
|
||||
simp [getElem_eq_getElem_data]
|
||||
simp [getElem_eq_getElem_data]; rfl
|
||||
|
||||
theorem extract_eq_extract_left {a : ByteArray} {i i' j : Nat} :
|
||||
a.extract i j = a.extract i' j ↔ min j a.size - i = min j a.size - i' := by
|
||||
@@ -236,25 +236,25 @@ theorem extract_add_one {a : ByteArray} {i : Nat} (ha : i + 1 ≤ a.size) :
|
||||
omega
|
||||
· rename_i j hj hj'
|
||||
obtain rfl : j = 0 := by simpa using hj'
|
||||
simp [ByteArray.getElem_eq_getElem_data]
|
||||
simp [ByteArray.getElem_eq_getElem_data]; rfl
|
||||
|
||||
theorem extract_add_two {a : ByteArray} {i : Nat} (ha : i + 2 ≤ a.size) :
|
||||
a.extract i (i + 2) = [a[i], a[i + 1]].toByteArray := by
|
||||
rw [extract_eq_extract_append_extract (i + 1) (by simp) (by omega),
|
||||
extract_add_one (by omega), extract_add_one (by omega)]
|
||||
simp [← List.toByteArray_append]
|
||||
simp [← List.toByteArray_append]; rfl
|
||||
|
||||
theorem extract_add_three {a : ByteArray} {i : Nat} (ha : i + 3 ≤ a.size) :
|
||||
a.extract i (i + 3) = [a[i], a[i + 1], a[i + 2]].toByteArray := by
|
||||
rw [extract_eq_extract_append_extract (i + 1) (by simp) (by omega),
|
||||
extract_add_one (by omega), extract_add_two (by omega)]
|
||||
simp [← List.toByteArray_append]
|
||||
simp [← List.toByteArray_append]; rfl
|
||||
|
||||
theorem extract_add_four {a : ByteArray} {i : Nat} (ha : i + 4 ≤ a.size) :
|
||||
a.extract i (i + 4) = [a[i], a[i + 1], a[i + 2], a[i + 3]].toByteArray := by
|
||||
rw [extract_eq_extract_append_extract (i + 1) (by simp) (by omega),
|
||||
extract_add_one (by omega), extract_add_three (by omega)]
|
||||
simp [← List.toByteArray_append]
|
||||
simp [← List.toByteArray_append]; rfl
|
||||
|
||||
theorem append_assoc {a b c : ByteArray} : a ++ b ++ c = a ++ (b ++ c) := by
|
||||
ext1
|
||||
|
||||
@@ -83,6 +83,7 @@ def notLTTotal : Std.Total (¬ · < · : Char → Char → Prop) where
|
||||
@[simp]
|
||||
theorem toUInt8_val {c : Char} : c.val.toUInt8 = c.toUInt8 := rfl
|
||||
|
||||
@[simp]
|
||||
theorem toString_eq_singleton {c : Char} : c.toString = String.singleton c := rfl
|
||||
|
||||
end Char
|
||||
|
||||
@@ -4,7 +4,6 @@ Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: François G. Dorais
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Control.Lawful.Basic
|
||||
public import Init.Ext
|
||||
@@ -13,7 +12,7 @@ import Init.Data.Nat.Lemmas
|
||||
import Init.Omega
|
||||
import Init.TacticsExtra
|
||||
import Init.WFTactics
|
||||
|
||||
import Init.Hints
|
||||
public section
|
||||
|
||||
namespace Fin
|
||||
@@ -165,7 +164,7 @@ theorem foldlM_add [Monad m] [LawfulMonad m] (f : α → Fin (n + k) → m α) :
|
||||
simp
|
||||
| succ k ih =>
|
||||
funext x
|
||||
simp [foldlM_succ_last, ← Nat.add_assoc, ih]
|
||||
simp [foldlM_succ_last, ← Nat.add_assoc, ih]; rfl
|
||||
|
||||
/-! ### foldrM -/
|
||||
|
||||
@@ -223,7 +222,7 @@ theorem foldrM_add [Monad m] [LawfulMonad m] (f : Fin (n + k) → α → m α) :
|
||||
simp
|
||||
| succ k ih =>
|
||||
funext x
|
||||
simp [foldrM_succ_last, ← Nat.add_assoc, ih]
|
||||
simp [foldrM_succ_last, ← Nat.add_assoc, ih]; rfl
|
||||
|
||||
/-! ### foldl -/
|
||||
|
||||
@@ -269,7 +268,7 @@ theorem foldl_add (f : α → Fin (n + m) → α) (x) :
|
||||
(foldl n (fun x i => f x (i.castLE (Nat.le_add_right n m))) x):= by
|
||||
induction m with
|
||||
| zero => simp
|
||||
| succ m ih => simp [foldl_succ_last, ih, ← Nat.add_assoc]
|
||||
| succ m ih => simp [foldl_succ_last, ih, ← Nat.add_assoc]; rfl
|
||||
|
||||
theorem foldl_eq_foldlM (f : α → Fin n → α) (x) :
|
||||
foldl n f x = (foldlM (m := Id) n (pure <| f · ·) x).run := by
|
||||
@@ -322,7 +321,7 @@ theorem foldr_add (f : Fin (n + m) → α → α) (x) :
|
||||
(foldr m (fun i => f (i.natAdd n)) x) := by
|
||||
induction m generalizing x with
|
||||
| zero => simp
|
||||
| succ m ih => simp [foldr_succ_last, ih, ← Nat.add_assoc]
|
||||
| succ m ih => simp [foldr_succ_last, ih, ← Nat.add_assoc]; rfl
|
||||
|
||||
theorem foldr_eq_foldrM (f : Fin n → α → α) (x) :
|
||||
foldr n f x = (foldrM (m := Id) n (pure <| f · ·) x).run := by
|
||||
|
||||
@@ -4,14 +4,12 @@ Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Joe Hendrix
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.Fin.Basic
|
||||
import Init.PropLemmas
|
||||
import Init.WFTactics
|
||||
|
||||
import Init.Hints
|
||||
public section
|
||||
|
||||
namespace Fin
|
||||
|
||||
/--
|
||||
|
||||
@@ -4,7 +4,6 @@ Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Mario Carneiro, Leonardo de Moura
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Ext
|
||||
public import Init.Data.Nat.Div.Basic
|
||||
@@ -15,7 +14,7 @@ import Init.Data.Nat.Lemmas
|
||||
import Init.Data.Nat.Linear
|
||||
import Init.Omega
|
||||
import Init.TacticsExtra
|
||||
|
||||
import Init.Hints
|
||||
@[expose] public section
|
||||
|
||||
open Std
|
||||
@@ -998,7 +997,7 @@ For the induction:
|
||||
|
||||
@[simp, grind =] theorem reverseInduction_last {n : Nat} {motive : Fin (n + 1) → Sort _} {zero succ} :
|
||||
(reverseInduction zero succ (Fin.last n) : motive (Fin.last n)) = zero := by
|
||||
rw [reverseInduction, reverseInduction.go]; simp
|
||||
rw [reverseInduction, reverseInduction.go]; simp; rfl
|
||||
|
||||
private theorem reverseInduction_castSucc_aux {n : Nat} {motive : Fin (n + 1) → Sort _} {succ}
|
||||
(i : Fin n) (j : Nat) (h) (h2 : i.1 < j) (zero : motive ⟨j, h⟩) :
|
||||
@@ -1009,9 +1008,9 @@ private theorem reverseInduction_castSucc_aux {n : Nat} {motive : Fin (n + 1)
|
||||
| succ j ih =>
|
||||
rw [reverseInduction.go, dif_neg (by exact Nat.ne_of_lt h2)]
|
||||
by_cases hij : i = j
|
||||
· subst hij; simp [reverseInduction.go]
|
||||
dsimp only
|
||||
rw [ih _ _ (by omega), eq_comm, reverseInduction.go, dif_neg (by change i.1 + 1 ≠ _; omega)]
|
||||
· subst hij; simp [reverseInduction.go]; rfl
|
||||
· dsimp only
|
||||
rw [ih _ _ (by omega), eq_comm, reverseInduction.go, dif_neg (by change i.1 + 1 ≠ _; omega)]
|
||||
|
||||
@[simp, grind =] theorem reverseInduction_castSucc {n : Nat} {motive : Fin (n + 1) → Sort _} {zero succ}
|
||||
(i : Fin n) : reverseInduction (motive := motive) zero succ (castSucc i) =
|
||||
|
||||
@@ -750,6 +750,7 @@ theorem Iter.anyM_filterMapM {α β β' : Type w} {m : Type w → Type w'}
|
||||
simp only [filterMapM_eq_toIter_filterMapM_toIterM, IterM.anyM_filterMapM]
|
||||
rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
-- There is hope to generalize the following theorem as soon there is a `Shrink` type.
|
||||
/--
|
||||
This lemma expresses `Iter.anyM` in terms of `IterM.anyM`.
|
||||
|
||||
@@ -232,6 +232,7 @@ public theorem Iter.toArray_flatMapM {α α₂ β γ : Type w} {m : Type w → T
|
||||
(it₁.flatMapM f).toArray = Array.flatten <$> (it₁.mapM fun b => do (← f b).toArray).toArray := by
|
||||
simp [flatMapM, toArray_flatMapAfterM]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
public theorem Iter.toList_flatMapAfter {α α₂ β γ : Type w} [Iterator α Id β] [Iterator α₂ Id γ]
|
||||
[Finite α Id] [Finite α₂ Id]
|
||||
{f : β → Iter (α := α₂) γ} {it₁ : Iter (α := α) β} {it₂ : Option (Iter (α := α₂) γ)} :
|
||||
@@ -242,6 +243,7 @@ public theorem Iter.toList_flatMapAfter {α α₂ β γ : Type w} [Iterator α I
|
||||
simp only [flatMapAfter, Iter.toList, toIterM_toIter, IterM.toList_flatMapAfter]
|
||||
cases it₂ <;> simp [map, IterM.toList_map_eq_toList_mapM, - IterM.toList_map]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
public theorem Iter.toArray_flatMapAfter {α α₂ β γ : Type w} [Iterator α Id β] [Iterator α₂ Id γ]
|
||||
[Finite α Id] [Finite α₂ Id]
|
||||
{f : β → Iter (α := α₂) γ} {it₁ : Iter (α := α) β} {it₂ : Option (Iter (α := α₂) γ)} :
|
||||
|
||||
@@ -600,6 +600,7 @@ theorem IterM.toList_map_mapM {α β γ δ : Type w}
|
||||
toList_filterMapM_mapM]
|
||||
congr <;> simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem IterM.toList_filterMapWithPostcondition {α β γ : Type w} {m : Type w → Type w'}
|
||||
[Monad m] [LawfulMonad m]
|
||||
@@ -623,6 +624,7 @@ theorem IterM.toList_filterMapWithPostcondition {α β γ : Type w} {m : Type w
|
||||
· simp [ihs ‹_›, heq]
|
||||
· simp [heq]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem IterM.toList_mapWithPostcondition {α β γ : Type w} {m : Type w → Type w'}
|
||||
[Monad m] [LawfulMonad m] [Iterator α Id β] [Finite α Id]
|
||||
@@ -643,6 +645,7 @@ theorem IterM.toList_mapWithPostcondition {α β γ : Type w} {m : Type w → Ty
|
||||
· simp [ihs ‹_›, heq]
|
||||
· simp [heq]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem IterM.toList_filterMapM {α β γ : Type w} {m : Type w → Type w'}
|
||||
[Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m]
|
||||
@@ -652,6 +655,7 @@ theorem IterM.toList_filterMapM {α β γ : Type w} {m : Type w → Type w'}
|
||||
simp [toList_filterMapM_eq_toList_filterMapWithPostcondition, toList_filterMapWithPostcondition,
|
||||
PostconditionT.attachLift, PostconditionT.run_eq_map, WeaklyLawfulMonadAttach.map_attach]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem IterM.toList_mapM {α β γ : Type w} {m : Type w → Type w'}
|
||||
[Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m]
|
||||
@@ -1297,6 +1301,7 @@ theorem IterM.forIn_filterMap
|
||||
rw [filterMap, forIn_filterMapWithPostcondition]
|
||||
simp [PostconditionT.run_eq_map]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem IterM.forIn_mapWithPostcondition
|
||||
[Monad m] [LawfulMonad m] [Monad n] [LawfulMonad n] [Monad o] [LawfulMonad o]
|
||||
[MonadLiftT m n] [LawfulMonadLiftT m n] [MonadLiftT n o] [LawfulMonadLiftT n o]
|
||||
|
||||
@@ -36,7 +36,7 @@ theorem IterM.step_flattenAfter {α α₂ β : Type w} {m : Type w → Type w'}
|
||||
cases it₂
|
||||
all_goals
|
||||
· apply bind_congr; intro step
|
||||
cases step.inflate using PlausibleIterStep.casesOn <;> simp [IterM.flattenAfter]
|
||||
cases step.inflate using PlausibleIterStep.casesOn <;> simp [IterM.flattenAfter] <;> rfl
|
||||
|
||||
namespace Iterators.Types
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ theorem IterM.step_uLift [Iterator α m β] [Monad n] {it : IterM (α := α) m
|
||||
| .done h => return .deflate (.done ⟨_, h, rfl⟩)) := by
|
||||
simp only [IterM.step, Iterator.step, IterM.uLift]
|
||||
apply bind_congr; intro step
|
||||
split <;> simp [Types.ULiftIterator.Monadic.modifyStep, *]
|
||||
split <;> simp [Types.ULiftIterator.Monadic.modifyStep, *] <;> rfl
|
||||
|
||||
@[simp]
|
||||
theorem IterM.toList_uLift [Iterator α m β] [Monad m] [Monad n] {it : IterM (α := α) m β}
|
||||
|
||||
@@ -26,6 +26,7 @@ theorem Iter.uLift_eq_toIter_uLift_toIterM {it : Iter (α := α) β} :
|
||||
it.uLift = (it.toIterM.uLift Id).toIter :=
|
||||
rfl
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem Iter.step_uLift [Iterator α Id β] {it : Iter (α := α) β} :
|
||||
it.uLift.step = match it.step with
|
||||
| .yield it' out h => .yield it'.uLift (.up out) ⟨_, h, rfl⟩
|
||||
@@ -38,6 +39,7 @@ theorem Iter.step_uLift [Iterator α Id β] {it : Iter (α := α) β} :
|
||||
PlausibleIterStep.done, pure_bind]
|
||||
cases it.toIterM.step.run.inflate using PlausibleIterStep.casesOn <;> simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Iter.toList_uLift [Iterator α Id β] {it : Iter (α := α) β}
|
||||
[Finite α Id] :
|
||||
@@ -59,6 +61,7 @@ theorem Iter.toArray_uLift [Iterator α Id β] {it : Iter (α := α) β}
|
||||
rw [← toArray_toList, ← toArray_toList, toList_uLift]
|
||||
simp [-toArray_toList]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Iter.length_uLift [Iterator α Id β] {it : Iter (α := α) β}
|
||||
[Finite α Id] [IteratorLoop α Id Id] [LawfulIteratorLoop α Id Id] :
|
||||
|
||||
@@ -398,7 +398,7 @@ theorem Iter.fold_eq_fold_toIterM {α β : Type w} {γ : Type w} [Iterator α Id
|
||||
[Finite α Id] [IteratorLoop α Id Id]
|
||||
{f : γ → β → γ} {init : γ} {it : Iter (α := α) β} :
|
||||
it.fold (init := init) f = (it.toIterM.fold (init := init) f).run := by
|
||||
rw [fold_eq_foldM, foldM_eq_foldM_toIterM, IterM.fold_eq_foldM]
|
||||
rw [fold_eq_foldM, foldM_eq_foldM_toIterM, IterM.fold_eq_foldM]; rfl
|
||||
|
||||
@[simp]
|
||||
theorem Iter.forIn_pure_yield_eq_fold {α β : Type w} {γ : Type x} [Iterator α Id β]
|
||||
|
||||
@@ -23,6 +23,7 @@ open Std Std.Iterators
|
||||
|
||||
variable {β : Type w}
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem List.step_iter_nil :
|
||||
(([] : List β).iter).step = ⟨.done, rfl⟩ := by
|
||||
@@ -31,7 +32,7 @@ theorem List.step_iter_nil :
|
||||
@[simp]
|
||||
theorem List.step_iter_cons {x : β} {xs : List β} :
|
||||
((x :: xs).iter).step = ⟨.yield xs.iter x, rfl⟩ := by
|
||||
simp [List.iter, List.iterM, IterM.toIter, Iter.step_eq]
|
||||
simp [List.iter, List.iterM, IterM.toIter, Iter.step_eq]; rfl
|
||||
|
||||
@[simp, grind =]
|
||||
theorem List.toArray_iter {l : List β} :
|
||||
|
||||
@@ -31,7 +31,7 @@ theorem List.step_iterM_nil :
|
||||
@[simp]
|
||||
theorem List.step_iterM_cons {x : β} {xs : List β} :
|
||||
((x :: xs).iterM m).step = pure (.deflate ⟨.yield (xs.iterM m) x, rfl⟩) := by
|
||||
simp only [List.iterM, IterM.step, Iterator.step]
|
||||
simp only [List.iterM, IterM.step, Iterator.step]; rfl
|
||||
|
||||
theorem List.step_iterM {l : List β} :
|
||||
(l.iterM m).step = match l with
|
||||
|
||||
@@ -35,3 +35,4 @@ public import Init.Data.List.OfFn
|
||||
public import Init.Data.List.FinRange
|
||||
public import Init.Data.List.Lex
|
||||
public import Init.Data.List.Range
|
||||
public import Init.Data.List.Scan
|
||||
|
||||
@@ -955,9 +955,13 @@ Examples:
|
||||
abbrev extract (l : List α) (start : Nat := 0) (stop : Nat := l.length) : List α :=
|
||||
(l.drop start).take (stop - start)
|
||||
|
||||
@[simp] theorem extract_eq_drop_take {l : List α} {start stop : Nat} :
|
||||
@[simp] theorem extract_eq_take_drop {l : List α} {start stop : Nat} :
|
||||
l.extract start stop = (l.drop start).take (stop - start) := rfl
|
||||
|
||||
set_option linter.missingDocs false in
|
||||
@[deprecated extract_eq_take_drop (since := "2026-02-06")]
|
||||
def extract_eq_drop_take := @extract_eq_take_drop
|
||||
|
||||
/-! ### takeWhile -/
|
||||
|
||||
/--
|
||||
|
||||
@@ -132,7 +132,9 @@ theorem boole_getElem_le_countP {p : α → Bool} {l : List α} {i : Nat} (h : i
|
||||
| nil => simp at h
|
||||
| cons x l ih =>
|
||||
cases i with
|
||||
| zero => simp [countP_cons]
|
||||
| zero =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [countP_cons]
|
||||
| succ i =>
|
||||
simp only [length_cons, add_one_lt_add_one_iff] at h
|
||||
simp only [getElem_cons_succ, countP_cons]
|
||||
@@ -263,7 +265,9 @@ theorem count_eq_length_filter {a : α} {l : List α} : count a l = (filter (·
|
||||
theorem count_tail : ∀ {l : List α} {a : α},
|
||||
l.tail.count a = l.count a - if l.head? == some a then 1 else 0
|
||||
| [], a => by simp
|
||||
| _ :: _, a => by simp [count_cons]
|
||||
| _ :: _, a => by
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [count_cons]
|
||||
|
||||
theorem count_le_length {a : α} {l : List α} : count a l ≤ l.length := countP_le_length
|
||||
|
||||
|
||||
@@ -97,6 +97,12 @@ theorem findSome?_eq_some_iff {f : α → Option β} {l : List α} {b : β} :
|
||||
obtain ⟨⟨rfl, rfl⟩, rfl⟩ := h₁
|
||||
exact ⟨l₁, a, l₂, rfl, h₂, fun a' w => h₃ a' (mem_cons_of_mem p w)⟩
|
||||
|
||||
theorem isSome_findSome? {xs : List α} {f : α → Option β} :
|
||||
(xs.findSome? f).isSome = xs.any (f · |>.isSome) := by
|
||||
rw [Bool.eq_iff_iff]
|
||||
simp only [Option.isSome_iff_ne_none, ne_eq, findSome?_eq_none_iff, Classical.not_forall]
|
||||
simp [← Option.isSome_iff_ne_none]
|
||||
|
||||
@[simp, grind =] theorem findSome?_guard {l : List α} : findSome? (Option.guard p) l = find? p l := by
|
||||
induction l with
|
||||
| nil => simp
|
||||
@@ -270,6 +276,11 @@ theorem find?_eq_some_iff_append :
|
||||
cases h₁
|
||||
simp
|
||||
|
||||
theorem isSome_find? {xs : List α} {f : α → Bool} :
|
||||
(xs.find? f).isSome = xs.any (f ·) := by
|
||||
rw [Bool.eq_iff_iff]
|
||||
simp [Option.isSome_iff_ne_none, ne_eq, find?_eq_none, Classical.not_forall]
|
||||
|
||||
@[simp]
|
||||
theorem find?_cons_eq_some : (a :: xs).find? p = some b ↔ (p a ∧ a = b) ∨ (!p a ∧ xs.find? p = some b) := by
|
||||
rw [find?_cons]
|
||||
@@ -654,6 +665,7 @@ theorem lt_findIdx_of_not {p : α → Bool} {xs : List α} {i : Nat} (h : i < xs
|
||||
simp only [Nat.not_lt] at f
|
||||
exact absurd (@findIdx_getElem _ p xs (Nat.lt_of_le_of_lt f h)) (h2 (xs.findIdx p) f)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
/-- `xs.findIdx p = i` iff `p xs[i]` and `¬ p xs [j]` for all `j < i`. -/
|
||||
theorem findIdx_eq {p : α → Bool} {xs : List α} {i : Nat} (h : i < xs.length) :
|
||||
xs.findIdx p = i ↔ p xs[i] ∧ ∀ j (hji : j < i), p (xs[j]'(Nat.lt_trans hji h)) = false := by
|
||||
@@ -1038,7 +1050,7 @@ theorem findFinIdx?_append {xs ys : List α} {p : α → Bool} :
|
||||
|
||||
@[simp, grind =] theorem findFinIdx?_singleton {a : α} {p : α → Bool} :
|
||||
[a].findFinIdx? p = if p a then some ⟨0, by simp⟩ else none := by
|
||||
simp [findFinIdx?_cons, findFinIdx?_nil]
|
||||
simp [findFinIdx?_cons, findFinIdx?_nil]; rfl
|
||||
|
||||
@[simp, grind =] theorem findFinIdx?_eq_none_iff {l : List α} {p : α → Bool} :
|
||||
l.findFinIdx? p = none ↔ ∀ x ∈ l, ¬ p x := by
|
||||
@@ -1080,6 +1092,7 @@ theorem isNone_findFinIdx? {l : List α} {p : α → Bool} :
|
||||
induction l with
|
||||
| nil => simp
|
||||
| cons a l ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [hf, findFinIdx?_cons]
|
||||
split <;> simp [ih, Function.comp_def]
|
||||
|
||||
|
||||
@@ -2542,6 +2542,9 @@ grind_pattern flatMap_reverse => l.reverse.flatMap f where
|
||||
⟨by rw [length_reverse, length_replicate],
|
||||
fun _ h => eq_of_mem_replicate (mem_reverse.1 h)⟩
|
||||
|
||||
theorem reverse_singleton {a : α} : [a].reverse = [a] := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem append_singleton_inj {as bs : List α} : as ++ [a] = bs ++ [b] ↔ as = bs ∧ a = b := by
|
||||
rw [← List.reverse_inj, And.comm]; simp
|
||||
@@ -3522,7 +3525,7 @@ theorem getElem?_insert_succ {l : List α} {a : α} {i : Nat} :
|
||||
· split
|
||||
· rfl
|
||||
· have h' : i - 1 < l.length := Nat.lt_of_le_of_lt (Nat.pred_le _) h
|
||||
simp [h']
|
||||
simp [h']; rfl
|
||||
|
||||
theorem head?_insert {l : List α} {a : α} :
|
||||
(l.insert a).head? = some (if h : a ∈ l then l.head (ne_nil_of_mem h) else a) := by
|
||||
@@ -3645,6 +3648,40 @@ theorem eraseDups_append [BEq α] [LawfulBEq α] {as bs : List α} :
|
||||
simp [removeAll_cons]
|
||||
termination_by as.length
|
||||
|
||||
/-- Loop invariant for `eraseDupsBy.loop`: membership in the result equals
|
||||
membership in the remaining list or the accumulator. -/
|
||||
private theorem mem_eraseDupsBy_loop [BEq α] [LawfulBEq α] {a : α} {l acc : List α} :
|
||||
a ∈ eraseDupsBy.loop (· == ·) l acc ↔ a ∈ l ∨ a ∈ acc := by
|
||||
induction l generalizing acc with
|
||||
| nil => simp [eraseDupsBy.loop]
|
||||
| cons x xs ih =>
|
||||
unfold eraseDupsBy.loop; split
|
||||
· next h =>
|
||||
rw [ih]; simp only [mem_cons]
|
||||
apply Iff.intro (fun
|
||||
| .inl hxs => Or.inl (Or.inr hxs)
|
||||
| .inr hacc => Or.inr hacc) (fun
|
||||
| .inl (.inl rfl) =>
|
||||
have ⟨y, hy, heq⟩ := any_eq_true.mp h
|
||||
.inr (LawfulBEq.eq_of_beq heq ▸ hy)
|
||||
| .inl (.inr hxs) => .inl hxs
|
||||
| .inr hacc => .inr hacc)
|
||||
· rw [ih]; simp only [mem_cons]
|
||||
apply Iff.intro (fun
|
||||
| .inl hxs => Or.inl (Or.inr hxs)
|
||||
| .inr (.inl rfl) => Or.inl (Or.inl rfl)
|
||||
| .inr (.inr hacc) => Or.inr hacc) (fun
|
||||
| .inl (.inl rfl) => Or.inr (Or.inl rfl)
|
||||
| .inl (.inr hxs) => .inl hxs
|
||||
| .inr hacc => Or.inr (Or.inr hacc))
|
||||
|
||||
/-- Membership is preserved by `eraseDups`: an element is in the deduplicated list
|
||||
iff it was in the original list. -/
|
||||
@[simp]
|
||||
theorem mem_eraseDups [BEq α] [LawfulBEq α] {a : α} {l : List α} :
|
||||
a ∈ l.eraseDups ↔ a ∈ l := by
|
||||
simp only [eraseDups, eraseDupsBy, mem_eraseDupsBy_loop, not_mem_nil, or_false]
|
||||
|
||||
/-! ### Legacy lemmas about `get`, `get?`, and `get!`.
|
||||
|
||||
Hopefully these should not be needed, in favour of lemmas about `xs[i]`, `xs[i]?`, and `xs[i]!`,
|
||||
@@ -3676,11 +3713,13 @@ theorem get_of_eq {l l' : List α} (h : l = l') (i : Fin l.length) :
|
||||
theorem getElem!_nil [Inhabited α] {n : Nat} : ([] : List α)[n]! = default := rfl
|
||||
|
||||
theorem getElem!_cons_zero [Inhabited α] {l : List α} : (a::l)[0]! = a := by
|
||||
rw [getElem!_pos] <;> simp
|
||||
rw [getElem!_pos]; rfl; simp
|
||||
|
||||
theorem getElem!_cons_succ [Inhabited α] {l : List α} : (a::l)[i+1]! = l[i]! := by
|
||||
by_cases h : i < l.length
|
||||
· rw [getElem!_pos, getElem!_pos] <;> simp_all [Nat.succ_lt_succ_iff]
|
||||
· rw [getElem!_pos, getElem!_pos]
|
||||
· rfl
|
||||
· simp; apply Nat.succ_lt_succ; assumption
|
||||
· rw [getElem!_neg, getElem!_neg] <;> simp_all [Nat.succ_lt_succ_iff]
|
||||
|
||||
theorem getElem!_of_getElem? [Inhabited α] : ∀ {l : List α} {i : Nat}, l[i]? = some a → l[i]! = a
|
||||
|
||||
@@ -350,6 +350,7 @@ theorem getElem?_mapIdx_go : ∀ {l : List α} {acc : Array β} {i : Nat},
|
||||
| [], acc, i => by
|
||||
simp only [mapIdx.go, getElem?_def, Array.length_toList,
|
||||
← Array.getElem_toList, length_nil, Nat.not_lt_zero, ↓reduceDIte, Option.map_none]
|
||||
rfl
|
||||
| a :: l, acc, i => by
|
||||
rw [mapIdx.go, getElem?_mapIdx_go]
|
||||
simp only [Array.size_push]
|
||||
|
||||
@@ -410,6 +410,7 @@ private theorem minIdxOn_append_aux [LE β] [DecidableLE β]
|
||||
match xs with
|
||||
| [] => simp [minIdxOn_cons_aux (xs := ys) ‹_›]
|
||||
| z :: zs =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp +singlePass only [cons_append]
|
||||
simp only [minIdxOn_cons_aux (xs := z :: zs ++ ys) (by simp), ih (by simp),
|
||||
minIdxOn_cons_aux (xs := z :: zs) (by simp), combineMinIdxOn_assoc]
|
||||
|
||||
@@ -25,6 +25,7 @@ namespace List
|
||||
|
||||
open Nat
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[grind =]
|
||||
theorem countP_set {p : α → Bool} {l : List α} {i : Nat} {a : α} (h : i < l.length) :
|
||||
(l.set i a).countP p = l.countP p - (if p l[i] then 1 else 0) + (if p a then 1 else 0) := by
|
||||
|
||||
@@ -53,10 +53,12 @@ theorem sublist_eq_map_getElem {l l' : List α} (h : l' <+ l) : ∃ is : List (F
|
||||
| cons _ _ IH =>
|
||||
let ⟨is, IH⟩ := IH
|
||||
refine ⟨is.map (·.succ), ?_⟩
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simpa [Function.comp_def, pairwise_map]
|
||||
| cons₂ _ _ IH =>
|
||||
rcases IH with ⟨is,IH⟩
|
||||
refine ⟨⟨0, by simp [Nat.zero_lt_succ]⟩ :: is.map (·.succ), ?_⟩
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [Function.comp_def, pairwise_map, IH, ← get_eq_getElem, get_cons_zero, get_cons_succ']
|
||||
|
||||
set_option linter.listVariables false in
|
||||
|
||||
@@ -387,6 +387,22 @@ theorem drop_take : ∀ {i j : Nat} {l : List α}, drop i (take j l) = take (j -
|
||||
rw [drop_take]
|
||||
simp
|
||||
|
||||
set_option doc.verso true in
|
||||
/--
|
||||
This lemma will be renamed to {lit}`List.extract_eq_drop_take` as soon as the current deprecated
|
||||
lemma {name}`List.extract_eq_drop_take` has been removed.
|
||||
-/
|
||||
theorem extract_eq_drop_take' {l : List α} {start stop : Nat} :
|
||||
l.extract start stop = (l.take stop).drop start := by
|
||||
simp only [take_drop]
|
||||
by_cases start ≤ stop
|
||||
· rw [add_sub_of_le ‹_›]
|
||||
· have h₁ : stop - start = 0 := by omega
|
||||
have h₂ : min stop l.length ≤ stop := by omega
|
||||
simp only [Nat.add_zero, List.drop_take_self, List.nil_eq, List.drop_eq_nil_iff,
|
||||
List.length_take, ge_iff_le, h₁]
|
||||
omega
|
||||
|
||||
@[simp]
|
||||
theorem drop_eq_drop_iff :
|
||||
∀ {l : List α} {i j : Nat}, l.drop i = l.drop j ↔ min i l.length = min j l.length
|
||||
|
||||
@@ -98,7 +98,9 @@ theorem ofFn_add {n m} {f : Fin (n + m) → α} :
|
||||
ofFn f = (ofFn fun i => f (i.castLE (Nat.le_add_right n m))) ++ (ofFn fun i => f (i.natAdd n)) := by
|
||||
induction m with
|
||||
| zero => simp
|
||||
| succ m ih => simp [-ofFn_succ, ofFn_succ_last, ih]
|
||||
| succ m ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [-ofFn_succ, ofFn_succ_last, ih]
|
||||
|
||||
@[simp]
|
||||
theorem ofFn_eq_nil_iff {f : Fin n → α} : ofFn f = [] ↔ n = 0 := by
|
||||
@@ -154,8 +156,9 @@ theorem ofFnM_add {n m} [Monad m] [LawfulMonad m] {f : Fin (n + k) → m α} :
|
||||
pure (as ++ bs)) := by
|
||||
induction k with
|
||||
| zero => simp
|
||||
| succ k ih => simp [ofFnM_succ_last, ih]
|
||||
|
||||
| succ k ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [ofFnM_succ_last, ih]
|
||||
|
||||
end List
|
||||
|
||||
|
||||
10
src/Init/Data/List/Scan.lean
Normal file
10
src/Init/Data/List/Scan.lean
Normal file
@@ -0,0 +1,10 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Paul Reichert
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.List.Scan.Basic
|
||||
public import Init.Data.List.Scan.Lemmas
|
||||
62
src/Init/Data/List/Scan/Basic.lean
Normal file
62
src/Init/Data/List/Scan/Basic.lean
Normal file
@@ -0,0 +1,62 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Mario Carneiro, Chad Sharp
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.List.Basic
|
||||
public import Init.Control.Id
|
||||
|
||||
public section
|
||||
|
||||
namespace List
|
||||
|
||||
/-- Tail-recursive helper function for `scanlM` and `scanrM` -/
|
||||
@[inline]
|
||||
private def scanAuxM [Monad m] (f : β → α → m β) (init : β) (l : List α) : m (List β) :=
|
||||
go l init []
|
||||
where
|
||||
/-- Auxiliary for `scanAuxM` -/
|
||||
@[specialize] go : List α → β → List β → m (List β)
|
||||
| [], last, acc => pure <| last :: acc
|
||||
| x :: xs, last, acc => do go xs (← f last x) (last :: acc)
|
||||
|
||||
/--
|
||||
Folds a monadic function over a list from the left, accumulating partial results starting with
|
||||
`init`. The accumulated values are combined with the each element of the list in order, using `f`.
|
||||
-/
|
||||
@[inline]
|
||||
def scanlM [Monad m] (f : β → α → m β) (init : β) (l : List α) : m (List β) :=
|
||||
List.reverse <$> scanAuxM f init l
|
||||
|
||||
/--
|
||||
Folds a monadic function over a list from the right, accumulating partial results starting with
|
||||
`init`. The accumulated values are combined with the each element of the list in order, using `f`.
|
||||
-/
|
||||
@[inline]
|
||||
def scanrM [Monad m] (f : α → β → m β) (init : β) (xs : List α) : m (List β) :=
|
||||
scanAuxM (flip f) init xs.reverse
|
||||
|
||||
/--
|
||||
Fold a function `f` over the list from the left, returning the list of partial results.
|
||||
```
|
||||
scanl (+) 0 [1, 2, 3] = [0, 1, 3, 6]
|
||||
```
|
||||
-/
|
||||
@[inline]
|
||||
def scanl (f : β → α → β) (init : β) (as : List α) : List β :=
|
||||
Id.run <| as.scanlM (pure <| f · ·) init
|
||||
|
||||
/--
|
||||
Fold a function `f` over the list from the right, returning the list of partial results.
|
||||
```
|
||||
scanr (+) 0 [1, 2, 3] = [6, 5, 3, 0]
|
||||
```
|
||||
-/
|
||||
@[inline]
|
||||
def scanr (f : α → β → β) (init : β) (as : List α) : List β :=
|
||||
Id.run <| as.scanrM (pure <| f · ·) init
|
||||
|
||||
end List
|
||||
339
src/Init/Data/List/Scan/Lemmas.lean
Normal file
339
src/Init/Data/List/Scan/Lemmas.lean
Normal file
@@ -0,0 +1,339 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Parikshit Khanna, Jeremy Avigad, Leonardo de Moura, Floris van Doorn, Mario Carneiro, Chad Sharp
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.List.Scan.Basic
|
||||
public import Init.Data.List.Lemmas
|
||||
import all Init.Data.List.Scan.Basic
|
||||
import Init.Data.List.TakeDrop
|
||||
import Init.Data.Option.Lemmas
|
||||
import Init.Data.Nat.Lemmas
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# List scan
|
||||
|
||||
Prove basic results about `List.scanl`, `List.scanr`, `List.scanlM` and `List.scanrM`.
|
||||
-/
|
||||
|
||||
namespace List
|
||||
|
||||
/-! ### `List.scanlM` and `List.scanrM` -/
|
||||
|
||||
@[local simp]
|
||||
private theorem scanAuxM.go_eq_append_map [Monad m] [LawfulMonad m] {f : α → β → m α} :
|
||||
go f xs last acc = (· ++ acc) <$> scanAuxM f last xs := by
|
||||
unfold scanAuxM
|
||||
induction xs generalizing last acc with
|
||||
| nil => simp [scanAuxM.go]
|
||||
| cons _ _ ih => simp [scanAuxM.go, ih (acc := last :: acc), ih (acc := [last])]
|
||||
|
||||
private theorem scanAuxM_nil [Monad m] {f : α → β → m α} :
|
||||
scanAuxM f init [] = return [init] := rfl
|
||||
|
||||
private theorem scanAuxM_cons [Monad m] [LawfulMonad m] {f : α → β → m α} :
|
||||
scanAuxM f init (x :: xs) = return (← scanAuxM f (← f init x) xs) ++ [init] := by
|
||||
rw [scanAuxM, scanAuxM.go]
|
||||
simp
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanlM_nil [Monad m] [LawfulMonad m] {f : α → β → m α} :
|
||||
scanlM f init [] = return [init] := by
|
||||
simp [scanlM, scanAuxM_nil]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanlM_cons [Monad m] [LawfulMonad m] {f : α → β → m α} :
|
||||
scanlM f init (x :: xs) = return init :: (← scanlM f (← f init x) xs) := by
|
||||
simp [scanlM, scanAuxM_cons]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanrM_concat [Monad m] [LawfulMonad m] {f : α → β → m β} :
|
||||
scanrM f init (xs ++ [x]) = return (← scanrM f (← f x init) xs) ++ [init] := by
|
||||
simp [scanrM, flip, scanAuxM_cons]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanrM_nil [Monad m] {f : α → β → m β} :
|
||||
scanrM f init [] = return [init] :=
|
||||
(rfl)
|
||||
|
||||
theorem scanlM_eq_scanrM_reverse [Monad m] {f : β → α → m β} :
|
||||
scanlM f init as = reverse <$> (scanrM (flip f) init as.reverse) := by
|
||||
simp only [scanrM, reverse_reverse]
|
||||
rfl
|
||||
|
||||
theorem scanrM_eq_scanlM_reverse [Monad m] [LawfulMonad m] {f : α → β → m β} :
|
||||
scanrM f init as = reverse <$> (scanlM (flip f) init as.reverse) := by
|
||||
simp only [scanlM_eq_scanrM_reverse, reverse_reverse, id_map', Functor.map_map]
|
||||
rfl
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanrM_reverse [Monad m] [LawfulMonad m] {f : α → β → m β} :
|
||||
scanrM f init as.reverse = reverse <$> (scanlM (flip f) init as) := by
|
||||
simp [scanrM_eq_scanlM_reverse (as := as.reverse)]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanlM_reverse [Monad m] {f : β → α → m β} :
|
||||
scanlM f init as.reverse = reverse <$> (scanrM (flip f) init as) := by
|
||||
simp [scanlM_eq_scanrM_reverse (as := as.reverse)]
|
||||
|
||||
theorem scanlM_pure [Monad m] [LawfulMonad m] {f: β → α → β} {as : List α} :
|
||||
as.scanlM (m := m) (pure <| f · ·) init = pure (as.scanl f init) := by
|
||||
induction as generalizing init with simp_all [scanlM_cons, scanl]
|
||||
|
||||
theorem scanrM_pure [Monad m] [LawfulMonad m] {f : α → β → β} {as : List α} :
|
||||
as.scanrM (m := m) (pure <| f · · ) init = pure (as.scanr f init) := by
|
||||
simp only [scanrM_eq_scanlM_reverse]
|
||||
unfold flip
|
||||
simp only [scanlM_pure, map_pure, scanr, scanrM_eq_scanlM_reverse]
|
||||
rfl
|
||||
|
||||
theorem idRun_scanlM {f : β → α → Id β} {as : List α} :
|
||||
(as.scanlM f init).run = as.scanl (f · · |>.run) init :=
|
||||
scanlM_pure
|
||||
|
||||
theorem idRun_scanrM {f : α → β → Id β} {as : List α} :
|
||||
(as.scanrM f init).run = as.scanr (f · · |>.run) init :=
|
||||
scanrM_pure
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanlM_map [Monad m] [LawfulMonad m]
|
||||
{f : α₁ → α₂} {g: β → α₂ → m β} {as : List α₁} :
|
||||
(as.map f).scanlM g init = as.scanlM (g · <| f ·) init := by
|
||||
induction as generalizing g init with simp [*]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanrM_map [Monad m] [LawfulMonad m]
|
||||
{f : α₁ → α₂} {g: α₂ → β → m β} {as : List α₁} :
|
||||
(as.map f).scanrM g init = as.scanrM (fun a b => g (f a) b) init := by
|
||||
simp only [← map_reverse, scanlM_map, scanrM_eq_scanlM_reverse]
|
||||
rfl
|
||||
|
||||
/-! ### `List.scanl` and `List.scanr` -/
|
||||
|
||||
@[simp]
|
||||
theorem length_scanl {f : β → α → β} : (scanl f init as).length = as.length + 1 := by
|
||||
induction as generalizing init <;> simp_all [scanl, pure, bind, Id.run]
|
||||
|
||||
grind_pattern length_scanl => scanl f init as
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanl_nil {f : β → α → β} : scanl f init [] = [init] := by simp [scanl]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanl_cons {f : β → α → β} : scanl f b (a :: l) = b :: scanl f (f b a) l := by
|
||||
simp [scanl]
|
||||
|
||||
theorem scanl_singleton {f : β → α → β} : scanl f b [a] = [b, f b a] := by simp
|
||||
|
||||
@[simp]
|
||||
theorem scanl_ne_nil {f : β → α → β} : scanl f b l ≠ [] := by
|
||||
cases l <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem scanl_iff_nil {f : β → α → β} (c : β) : scanl f b l = [c] ↔ c = b ∧ l = [] := by
|
||||
cases l
|
||||
· simp [eq_comm]
|
||||
· simp
|
||||
|
||||
@[simp, grind =]
|
||||
theorem getElem_scanl {f : α → β → α} (h : i < (scanl f a l).length) :
|
||||
(scanl f a l)[i] = foldl f a (l.take i) := by
|
||||
induction l generalizing a i
|
||||
· simp
|
||||
· cases i <;> simp [*]
|
||||
|
||||
@[grind =]
|
||||
theorem getElem?_scanl {f : α → β → α} :
|
||||
(scanl f a l)[i]? = if i ≤ l.length then some (foldl f a (l.take i)) else none := by
|
||||
split
|
||||
· rw [getElem?_pos _ _ (by simpa using Nat.lt_add_one_iff.mpr ‹_›), getElem_scanl]
|
||||
· rw [getElem?_neg]
|
||||
simpa only [length_scanl, Nat.lt_add_one_iff]
|
||||
|
||||
@[grind _=_]
|
||||
theorem take_scanl {f : β → α → β} (init : β) (as : List α) (i : Nat) :
|
||||
(scanl f init as).take (i + 1) = scanl f init (as.take i) := by
|
||||
induction as generalizing init i
|
||||
· simp
|
||||
· cases i
|
||||
· simp
|
||||
· simp [*]
|
||||
|
||||
theorem getElem?_scanl_zero {f : β → α → β} : (scanl f b l)[0]? = some b := by
|
||||
simp
|
||||
|
||||
theorem getElem_scanl_zero {f : β → α → β} : (scanl f b l)[0] = b := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem head_scanl {f : β → α → β} (h : scanl f b l ≠ []) : (scanl f b l).head h = b := by
|
||||
simp [head_eq_getElem]
|
||||
|
||||
@[simp]
|
||||
theorem head?_scanl {f : β → α → β} : (scanl f b l).head? = some b := by
|
||||
simp [head?_eq_getElem?]
|
||||
|
||||
theorem getLast_scanl {f : β → α → β} (h : scanl f b l ≠ []) :
|
||||
(scanl f b l).getLast h = foldl f b l := by
|
||||
simp [getLast_eq_getElem]
|
||||
|
||||
theorem getLast?_scanl {f : β → α → β} : (scanl f b l).getLast? = some (foldl f b l) := by
|
||||
simp [getLast?_eq_getElem?]
|
||||
|
||||
@[grind =]
|
||||
theorem tail_scanl {f : β → α → β} (h : 0 < l.length) :
|
||||
(scanl f b l).tail = scanl f (f b (l.head (ne_nil_of_length_pos h))) l.tail := by
|
||||
induction l
|
||||
· simp at h
|
||||
· simp
|
||||
|
||||
theorem getElem?_succ_scanl {f : β → α → β} :
|
||||
(scanl f b l)[i + 1]? = (scanl f b l)[i]?.bind fun x => l[i]?.map fun y => f x y := by
|
||||
simp only [getElem?_scanl, take_add_one]
|
||||
split
|
||||
· have : i < l.length := Nat.add_one_le_iff.mp ‹_›
|
||||
have : i ≤ l.length := Nat.le_of_lt ‹_›
|
||||
simp [*, - take_append_getElem]
|
||||
· split
|
||||
· apply Eq.symm
|
||||
simpa using Nat.lt_add_one_iff.mp (Nat.not_le.mp ‹_›)
|
||||
· simp
|
||||
|
||||
theorem getElem_succ_scanl {f : β → α → β} (h : i + 1 < (scanl f b l).length) :
|
||||
(scanl f b l)[i + 1] = f ((l.scanl f b)[i]'(Nat.lt_trans (Nat.lt_add_one _) h)) (l[i]'(by simpa using h)) := by
|
||||
simp only [length_scanl, Nat.add_lt_add_iff_right] at h
|
||||
simp [take_add_one, *, - take_append_getElem]
|
||||
|
||||
@[grind =]
|
||||
theorem scanl_append {f : β → α → β} {l₁ l₂ : List α} :
|
||||
scanl f b (l₁ ++ l₂) = scanl f b l₁ ++ (scanl f (foldl f b l₁) l₂).tail := by
|
||||
induction l₁ generalizing b
|
||||
case nil => cases l₂ <;> simp
|
||||
case cons head tail ih => simp [ih]
|
||||
|
||||
@[grind =]
|
||||
theorem scanl_map {f : β → γ → β} {g : α → γ} {as : List α} :
|
||||
scanl f init (as.map g) = scanl (fun acc x => f acc (g x)) init as := by
|
||||
induction as generalizing init with simp [*]
|
||||
|
||||
theorem scanl_eq_scanr_reverse {f : β → α → β} :
|
||||
scanl f init as = reverse (scanr (flip f) init as.reverse) := by
|
||||
simp only [scanl, scanr, Id.run, scanrM_reverse, Functor.map, reverse_reverse]
|
||||
rfl
|
||||
|
||||
theorem scanr_eq_scanl_reverse {f : α → β → β} :
|
||||
scanr f init as = reverse (scanl (flip f) init as.reverse) := by
|
||||
simp only [scanl_eq_scanr_reverse, reverse_reverse]
|
||||
rfl
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanl_reverse {f : β → α → β} {as : List α} :
|
||||
scanl f init as.reverse = reverse (scanr (flip f) init as) := by
|
||||
simp [scanr_eq_scanl_reverse]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanr_reverse {f : α → β → β} {as : List α} :
|
||||
scanr f init as.reverse = reverse (scanl (flip f) init as) := by
|
||||
simp [scanl_eq_scanr_reverse]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanr_nil {f : α → β → β} : scanr f init [] = [init] := by simp [scanr]
|
||||
|
||||
@[simp, grind =]
|
||||
theorem scanr_cons {f : α → β → β} :
|
||||
scanr f b (a :: l) = foldr f b (a :: l) :: scanr f b l := by
|
||||
simp [scanr_eq_scanl_reverse, reverse_cons, scanl_append, flip, - scanl_reverse]
|
||||
|
||||
@[simp]
|
||||
theorem scanr_ne_nil {f : α → β → β} : scanr f b l ≠ [] := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse]
|
||||
|
||||
theorem scanr_singleton {f : α → β → β} : scanr f b [a] = [f a b, b] := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem length_scanr {f : α → β → β} {as : List α} :
|
||||
length (scanr f init as) = as.length + 1 := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse]
|
||||
|
||||
grind_pattern length_scanr => scanr f init as
|
||||
|
||||
@[simp]
|
||||
theorem scanr_iff_nil {f : α → β → β} (c : β) : scanr f b l = [c] ↔ c = b ∧ l = [] := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse]
|
||||
|
||||
@[grind =]
|
||||
theorem scanr_append {f : α → β → β} (l₁ l₂ : List α) :
|
||||
scanr f b (l₁ ++ l₂) = (scanr f (foldr f b l₂) l₁) ++ (scanr f b l₂).tail := by
|
||||
induction l₁ <;> induction l₂ <;> simp [*]
|
||||
|
||||
@[simp]
|
||||
theorem head_scanr {f : α → β → β} (h : scanr f b l ≠ []) :
|
||||
(scanr f b l).head h = foldr f b l := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse, getLast_scanl, flip]
|
||||
|
||||
@[grind =]
|
||||
theorem getLast_scanr {f : α → β → β} (h : scanr f b l ≠ []) :
|
||||
(scanr f b l).getLast h = b := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse]
|
||||
|
||||
theorem getLast?_scanr {f : α → β → β} : (scanr f b l).getLast? = some b := by
|
||||
simp [scanr_eq_scanl_reverse, - scanl_reverse]
|
||||
|
||||
@[grind =]
|
||||
theorem tail_scanr {f : α → β → β} (h : 0 < l.length) :
|
||||
(scanr f b l).tail = scanr f b l.tail := by
|
||||
induction l with simp_all
|
||||
|
||||
@[grind _=_]
|
||||
theorem drop_scanr {f : α → β → β} (h : i ≤ l.length) :
|
||||
(scanr f b l).drop i = scanr f b (l.drop i) := by
|
||||
induction i generalizing l
|
||||
· simp
|
||||
· rename_i i ih
|
||||
rw [drop_add_one_eq_tail_drop (i := i), drop_add_one_eq_tail_drop (i := i), ih]
|
||||
· rw [tail_scanr]
|
||||
simpa [length_drop, Nat.lt_sub_iff_add_lt]
|
||||
· exact Nat.le_of_lt (Nat.add_one_le_iff.mp ‹_›)
|
||||
|
||||
@[simp, grind =]
|
||||
theorem getElem_scanr {f : α → β → β} (h : i < (scanr f b l).length) :
|
||||
(scanr f b l)[i] = foldr f b (l.drop i) := by
|
||||
induction l generalizing b i
|
||||
· simp
|
||||
· cases i <;> simp [*]
|
||||
|
||||
@[grind =]
|
||||
theorem getElem?_scanr {f : α → β → β} :
|
||||
(scanr f b l)[i]? = if i < l.length + 1 then some (foldr f b (l.drop i)) else none := by
|
||||
split
|
||||
· rw [getElem?_pos _ _ (by simpa), getElem_scanr]
|
||||
· rename_i h
|
||||
simpa [getElem?_neg, length_scanr] using h
|
||||
|
||||
@[simp]
|
||||
theorem head?_scanr {f : α → β → β} : (scanr f b l).head? = some (foldr f b l) := by
|
||||
simp [head?_eq_getElem?]
|
||||
|
||||
theorem getElem_scanr_zero {f : α → β → β} : (scanr f b l)[0] = foldr f b l := by
|
||||
simp
|
||||
|
||||
theorem getElem?_scanr_zero {f : α → β → β} : (scanr f b l)[0]? = some (foldr f b l) := by
|
||||
simp
|
||||
|
||||
theorem getElem?_scanr_of_lt {f : α → β → β} (h : i < l.length + 1) :
|
||||
(scanr f b l)[i]? = some (foldr f b (l.drop i)) := by
|
||||
simp [h]
|
||||
|
||||
@[grind =]
|
||||
theorem scanr_map {f : α → β → β} {g : γ → α} (b : β) (l : List γ) :
|
||||
scanr f b (l.map g) = scanr (fun x acc => f (g x) acc) b l := by
|
||||
suffices ∀ l, foldr f b (l.map g) = foldr (fun x acc => f (g x) acc) b l from by
|
||||
induction l generalizing b with simp [*]
|
||||
intro l
|
||||
induction l with simp [*]
|
||||
@@ -64,6 +64,7 @@ def MergeSort.Internal.splitInTwo (l : { l : List α // l.length = n }) :
|
||||
|
||||
open MergeSort.Internal in
|
||||
set_option linter.unusedVariables false in
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
/--
|
||||
A stable merge sort.
|
||||
|
||||
|
||||
@@ -182,14 +182,14 @@ private theorem mergeSortTR_run_eq_mergeSort : {n : Nat} → (l : { l : List α
|
||||
simp only [mergeSortTR.run, mergeSortTR.run, mergeSort]
|
||||
rw [merge_eq_mergeTR]
|
||||
rw [mergeSortTR_run_eq_mergeSort, mergeSortTR_run_eq_mergeSort]
|
||||
rfl
|
||||
|
||||
-- We don't make this a `@[csimp]` lemma because `mergeSort_eq_mergeSortTR₂` is faster.
|
||||
theorem mergeSort_eq_mergeSortTR : @mergeSort = @mergeSortTR := by
|
||||
funext
|
||||
rw [mergeSortTR, mergeSortTR_run_eq_mergeSort]
|
||||
|
||||
-- This mutual block is unfortunately quite slow to elaborate.
|
||||
set_option maxHeartbeats 400000 in
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
mutual
|
||||
private theorem mergeSortTR₂_run_eq_mergeSort : {n : Nat} → (l : { l : List α // l.length = n }) → mergeSortTR₂.run le l = mergeSort l.1 le
|
||||
| 0, ⟨[], _⟩
|
||||
|
||||
@@ -268,7 +268,7 @@ theorem drop_eq_extract {l : List α} {k : Nat} :
|
||||
| 0 => simp
|
||||
| _ + 1 =>
|
||||
simp only [List.drop_succ_cons, List.length_cons, ih]
|
||||
simp only [List.extract_eq_drop_take, List.drop_succ_cons, Nat.succ_sub_succ]
|
||||
simp only [List.extract_eq_take_drop, List.drop_succ_cons, Nat.succ_sub_succ]
|
||||
|
||||
/-! ### takeWhile and dropWhile -/
|
||||
|
||||
|
||||
@@ -280,6 +280,7 @@ theorem findRevM?_toArray [Monad m] [LawfulMonad m] (f : α → m Bool) (l : Lis
|
||||
simp only [forIn_cons, find?]
|
||||
by_cases f a <;> simp_all
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
private theorem findFinIdx?_loop_toArray (w : l' = l.drop j) :
|
||||
Array.findFinIdx?.loop p l.toArray j = List.findFinIdx?.go p l l' j h := by
|
||||
unfold findFinIdx?.loop
|
||||
@@ -316,6 +317,7 @@ termination_by l.length - j
|
||||
rw [Array.findIdx?_eq_map_findFinIdx?_val, findIdx?_eq_map_findFinIdx?_val]
|
||||
simp [Array.size]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
private theorem idxAuxOf_toArray [BEq α] (a : α) (l : List α) (j : Nat) (w : l' = l.drop j) (h) :
|
||||
l.toArray.idxOfAux a j = findFinIdx?.go (fun x => x == a) l l' j h := by
|
||||
unfold idxOfAux
|
||||
@@ -361,6 +363,7 @@ termination_by l.length - j
|
||||
as.toArray.idxOf a = as.idxOf a := by
|
||||
rw [Array.idxOf, findIdx_toArray, idxOf]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem isPrefixOfAux_toArray_succ [BEq α] (l₁ l₂ : List α) (hle : l₁.length ≤ l₂.length) (i : Nat) :
|
||||
Array.isPrefixOfAux l₁.toArray l₂.toArray hle (i + 1) =
|
||||
Array.isPrefixOfAux l₁.tail.toArray l₂.tail.toArray (by simp; omega) i := by
|
||||
@@ -586,7 +589,7 @@ theorem flatMap_toArray_cons {β} (f : α → Array β) (a : α) (as : List α)
|
||||
@[simp, grind =] theorem swap_toArray (l : List α) (i j : Nat) {hi hj}:
|
||||
l.toArray.swap i j hi hj = ((l.set i l[j]).set j l[i]).toArray := by
|
||||
apply ext'
|
||||
simp
|
||||
simp; rfl
|
||||
|
||||
@[simp, grind =] theorem eraseIdx_toArray (l : List α) (i : Nat) (h : i < l.toArray.size) :
|
||||
l.toArray.eraseIdx i h = (l.eraseIdx i).toArray := by
|
||||
@@ -616,13 +619,13 @@ decreasing_by
|
||||
@[simp, grind =] theorem eraseP_toArray {as : List α} {p : α → Bool} :
|
||||
as.toArray.eraseP p = (as.eraseP p).toArray := by
|
||||
rw [Array.eraseP, List.eraseP_eq_eraseIdx, findFinIdx?_toArray]
|
||||
split <;> simp [*, findIdx?_eq_map_findFinIdx?_val]
|
||||
split <;> simp [*, findIdx?_eq_map_findFinIdx?_val] <;> rfl
|
||||
|
||||
@[simp, grind =] theorem erase_toArray [BEq α] {as : List α} {a : α} :
|
||||
as.toArray.erase a = (as.erase a).toArray := by
|
||||
rw [Array.erase, finIdxOf?_toArray, List.erase_eq_eraseIdx]
|
||||
rw [idxOf?_eq_map_finIdxOf?_val]
|
||||
split <;> simp_all
|
||||
split <;> simp_all <;> rfl
|
||||
|
||||
private theorem insertIdx_loop_toArray (i : Nat) (l : List α) (j : Nat) (hj : j < l.toArray.size) (h : i ≤ j) :
|
||||
insertIdx.loop i l.toArray ⟨j, hj⟩ = (l.take i ++ l[j] :: (l.take j).drop i ++ l.drop (j + 1)).toArray := by
|
||||
@@ -639,10 +642,10 @@ private theorem insertIdx_loop_toArray (i : Nat) (l : List α) (j : Nat) (hj : j
|
||||
getElem_set_self, take_set_of_le (j := j - 1) (by omega),
|
||||
take_set_of_le (j := j - 1) (by omega), take_eq_append_getElem_of_pos (by omega) hj,
|
||||
drop_append_of_le_length (by simp; omega)]
|
||||
simp only [append_assoc, cons_append, nil_append, append_cancel_right_eq]
|
||||
simp only [append_assoc, cons_append, nil_append]
|
||||
cases i with
|
||||
| zero => simp
|
||||
| succ i => rw [take_set_of_le (by omega)]
|
||||
| zero => simp; rfl
|
||||
| succ i => rw [take_set_of_le (by omega)]; rfl
|
||||
· simp only [Nat.not_lt] at h'
|
||||
have : i = j := by omega
|
||||
subst this
|
||||
|
||||
@@ -7,6 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
public import Init.Prelude
|
||||
import Init.Data.List.Basic
|
||||
|
||||
public section
|
||||
|
||||
|
||||
@@ -26,3 +26,4 @@ public import Init.Data.Nat.Compare
|
||||
public import Init.Data.Nat.Simproc
|
||||
public import Init.Data.Nat.Fold
|
||||
public import Init.Data.Nat.Order
|
||||
public import Init.Data.Nat.ToString
|
||||
|
||||
@@ -400,6 +400,7 @@ theorem dfold_add
|
||||
induction m with
|
||||
| zero => simp; rfl
|
||||
| succ m ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [dfold_congr (Nat.add_assoc n m 1).symm, ih]
|
||||
|
||||
@[simp] theorem dfoldRev_zero
|
||||
@@ -434,7 +435,9 @@ theorem dfoldRev_add
|
||||
(dfoldRev m (α := fun i h => α (n + i)) (fun i h => f (n + i) (by omega)) init) := by
|
||||
induction m with
|
||||
| zero => simp; rfl
|
||||
| succ m ih => simp [← Nat.add_assoc, ih]
|
||||
| succ m ih =>
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
simp [← Nat.add_assoc, ih]
|
||||
|
||||
end Nat
|
||||
|
||||
|
||||
197
src/Init/Data/Nat/ToString.lean
Normal file
197
src/Init/Data/Nat/ToString.lean
Normal file
@@ -0,0 +1,197 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Marcus Rossel, Paul Reichert
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.Repr
|
||||
public import Init.Data.Char.Basic
|
||||
public import Init.Data.ToString.Basic
|
||||
public import Init.Data.String.Basic
|
||||
import Init.NotationExtra
|
||||
import all Init.Data.Repr
|
||||
import Init.Omega
|
||||
import Init.RCases
|
||||
import Init.Data.Nat.Lemmas
|
||||
import Init.Data.Nat.Bitwise
|
||||
import Init.Data.Nat.Simproc
|
||||
import Init.WFTactics
|
||||
import Init.Data.Char.Lemmas
|
||||
|
||||
public section
|
||||
|
||||
-- todo: lemmas about `ToString Nat` and `ToString Int`
|
||||
|
||||
namespace Nat
|
||||
|
||||
variable {b : Nat}
|
||||
|
||||
|
||||
@[simp]
|
||||
theorem isDigit_digitChar : n.digitChar.isDigit = decide (n < 10) :=
|
||||
match n with
|
||||
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 => by simp [digitChar]
|
||||
| _ + 10 => by
|
||||
simp only [digitChar, ↓reduceIte, Nat.reduceEqDiff]
|
||||
(repeat' split) <;> simp
|
||||
|
||||
private theorem isDigit_of_mem_toDigitsCore
|
||||
(hc : c ∈ cs → c.isDigit) (hb₁ : 0 < b) (hb₂ : b ≤ 10) (h : c ∈ toDigitsCore b fuel n cs) :
|
||||
c.isDigit := by
|
||||
induction fuel generalizing n cs with rw [toDigitsCore] at h
|
||||
| zero => exact hc h
|
||||
| succ _ ih =>
|
||||
split at h
|
||||
case' isFalse => apply ih (fun h => ?_) h
|
||||
all_goals
|
||||
cases h with
|
||||
| head => simp [Nat.lt_of_lt_of_le (mod_lt _ hb₁) hb₂]
|
||||
| tail _ hm => exact hc hm
|
||||
|
||||
theorem isDigit_of_mem_toDigits (hb₁ : 0 < b) (hb₂ : b ≤ 10) (hc : c ∈ toDigits b n) : c.isDigit :=
|
||||
isDigit_of_mem_toDigitsCore (fun _ => by contradiction) hb₁ hb₂ hc
|
||||
|
||||
private theorem toDigitsCore_of_lt_base (hb : n < b) (hf : n < fuel) :
|
||||
toDigitsCore b fuel n cs = n.digitChar :: cs := by
|
||||
unfold toDigitsCore
|
||||
split <;> simp_all [mod_eq_of_lt]
|
||||
|
||||
theorem toDigits_of_lt_base (h : n < b) : toDigits b n = [n.digitChar] :=
|
||||
toDigitsCore_of_lt_base h (lt_succ_self _)
|
||||
|
||||
@[simp, grind =]
|
||||
theorem toDigits_zero : (b : Nat) → toDigits b 0 = ['0']
|
||||
| 0 => rfl
|
||||
| _ + 1 => toDigits_of_lt_base (zero_lt_succ _)
|
||||
|
||||
private theorem toDigitsCore_append :
|
||||
toDigitsCore b fuel n cs₁ ++ cs₂ = toDigitsCore b fuel n (cs₁ ++ cs₂) := by
|
||||
induction fuel generalizing n cs₁ with simp only [toDigitsCore]
|
||||
| succ => split <;> simp_all
|
||||
|
||||
private theorem toDigitsCore_eq_toDigitsCore_nil_append :
|
||||
toDigitsCore b fuel n cs₁ = toDigitsCore b fuel n [] ++ cs₁ := by
|
||||
simp [toDigitsCore_append]
|
||||
|
||||
private theorem toDigitsCore_eq_of_lt_fuel (hb : 1 < b) (h₁ : n < fuel₁) (h₂ : n < fuel₂) :
|
||||
toDigitsCore b fuel₁ n cs = toDigitsCore b fuel₂ n cs := by
|
||||
cases fuel₁ <;> cases fuel₂ <;> try contradiction
|
||||
simp only [toDigitsCore, Nat.div_eq_zero_iff]
|
||||
split
|
||||
· simp
|
||||
· have := Nat.div_lt_self (by omega : 0 < n) hb
|
||||
exact toDigitsCore_eq_of_lt_fuel hb (by omega) (by omega)
|
||||
|
||||
private theorem toDigitsCore_toDigitsCore
|
||||
(hb : 1 < b) (hn : 0 < n) (hd : d < b) (hf : b * n + d < fuel) (hnf : n < nf) (hdf : d < df) :
|
||||
toDigitsCore b nf n (toDigitsCore b df d cs) = toDigitsCore b fuel (b * n + d) cs := by
|
||||
cases fuel with
|
||||
| zero => contradiction
|
||||
| succ fuel =>
|
||||
rw [toDigitsCore]
|
||||
split
|
||||
case isTrue h =>
|
||||
have : b ≤ b * n + d := Nat.le_trans (Nat.le_mul_of_pos_right _ hn) (le_add_right _ _)
|
||||
cases Nat.div_eq_zero_iff.mp h <;> omega
|
||||
case isFalse =>
|
||||
have h : (b * n + d) / b = n := by
|
||||
rw [mul_add_div (by omega), Nat.div_eq_zero_iff.mpr (.inr hd), Nat.add_zero]
|
||||
have := (Nat.lt_mul_iff_one_lt_left hn).mpr hb
|
||||
simp only [toDigitsCore_of_lt_base hd hdf, mul_add_mod_self_left, mod_eq_of_lt hd, h]
|
||||
apply toDigitsCore_eq_of_lt_fuel hb hnf (by omega)
|
||||
|
||||
theorem toDigits_append_toDigits (hb : 1 < b) (hn : 0 < n) (hd : d < b) :
|
||||
(toDigits b n) ++ (toDigits b d) = toDigits b (b * n + d) := by
|
||||
rw [toDigits, toDigitsCore_append]
|
||||
exact toDigitsCore_toDigitsCore hb hn hd (lt_succ_self _) (lt_succ_self _) (lt_succ_self _)
|
||||
|
||||
theorem toDigits_of_base_le (hb : 1 < b) (h : b ≤ n) :
|
||||
toDigits b n = toDigits b (n / b) ++ [digitChar (n % b)] := by
|
||||
have := Nat.div_add_mod n b
|
||||
rw (occs := [1]) [← Nat.div_add_mod n b,
|
||||
← toDigits_append_toDigits (by omega) (Nat.div_pos_iff.mpr (by omega)) (Nat.mod_lt n (by omega))]
|
||||
rw [toDigits_of_lt_base (n := n % b) (Nat.mod_lt n (by omega))]
|
||||
|
||||
theorem toDigits_eq_if (hb : 1 < b) :
|
||||
toDigits b n = if n < b then [digitChar n] else toDigits b (n / b) ++ [digitChar (n % b)] := by
|
||||
split
|
||||
· rw [toDigits_of_lt_base ‹_›]
|
||||
· rw [toDigits_of_base_le hb (by omega)]
|
||||
|
||||
theorem length_toDigits_pos {b n : Nat} :
|
||||
0 < (Nat.toDigits b n).length := by
|
||||
simp [toDigits]
|
||||
rw [toDigitsCore]
|
||||
split
|
||||
· simp
|
||||
· rw [toDigitsCore_eq_toDigitsCore_nil_append]
|
||||
simp
|
||||
|
||||
theorem length_toDigits_le_iff {n k : Nat} (hb : 1 < b) (h : 0 < k) :
|
||||
(Nat.toDigits b n).length ≤ k ↔ n < b ^ k := by
|
||||
match k with
|
||||
| 0 => contradiction
|
||||
| k + 1 =>
|
||||
induction k generalizing n
|
||||
· rw [toDigits_eq_if hb]
|
||||
split <;> simp [*, length_toDigits_pos, ← Nat.pos_iff_ne_zero, - List.length_eq_zero_iff]
|
||||
· rename_i ih
|
||||
rw [toDigits_eq_if hb]
|
||||
split
|
||||
· rename_i hlt
|
||||
simp [Nat.pow_add]
|
||||
refine Nat.lt_of_lt_of_le hlt ?_
|
||||
apply Nat.le_mul_of_pos_left
|
||||
apply Nat.mul_pos
|
||||
· apply Nat.pow_pos
|
||||
omega
|
||||
· omega
|
||||
· simp [ih (n := n / b) (by omega), Nat.div_lt_iff_lt_mul (k := b) (by omega), Nat.pow_add]
|
||||
|
||||
theorem repr_eq_ofList_toDigits {n : Nat} :
|
||||
n.repr = .ofList (toDigits 10 n) :=
|
||||
(rfl)
|
||||
|
||||
theorem toString_eq_ofList_toDigits {n : Nat} :
|
||||
toString n = .ofList (toDigits 10 n) :=
|
||||
(rfl)
|
||||
|
||||
@[simp, grind norm]
|
||||
theorem toString_eq_repr {n : Nat} :
|
||||
toString n = n.repr :=
|
||||
(rfl)
|
||||
|
||||
@[simp, grind norm]
|
||||
theorem reprPrec_eq_repr {n i : Nat} :
|
||||
reprPrec n i = n.repr :=
|
||||
(rfl)
|
||||
|
||||
@[simp, grind norm]
|
||||
theorem repr_eq_repr {n : Nat} :
|
||||
repr n = n.repr :=
|
||||
(rfl)
|
||||
|
||||
theorem repr_of_lt {n : Nat} (h : n < 10) :
|
||||
n.repr = .singleton (digitChar n) := by
|
||||
rw [repr_eq_ofList_toDigits, toDigits_of_lt_base h, String.singleton_eq_ofList]
|
||||
|
||||
theorem repr_of_ge {n : Nat} (h : 10 ≤ n) :
|
||||
n.repr = (n / 10).repr ++ .singleton (digitChar (n % 10)) := by
|
||||
simp [repr_eq_ofList_toDigits, toDigits_of_base_le (by omega) h, String.singleton_eq_ofList,
|
||||
String.ofList_append]
|
||||
|
||||
theorem repr_eq_repr_append_repr {n : Nat} (h : 10 ≤ n) :
|
||||
n.repr = (n / 10).repr ++ (n % 10).repr := by
|
||||
rw [repr_of_ge h, repr_of_lt (n := n % 10) (by omega)]
|
||||
|
||||
theorem length_repr_pos {n : Nat} :
|
||||
0 < n.repr.length := by
|
||||
simpa [repr_eq_ofList_toDigits] using length_toDigits_pos
|
||||
|
||||
theorem length_repr_le_iff {n k : Nat} (h : 0 < k) :
|
||||
n.repr.length ≤ k ↔ n < 10 ^ k := by
|
||||
simpa [repr_eq_ofList_toDigits] using length_toDigits_le_iff (by omega) h
|
||||
|
||||
end Nat
|
||||
@@ -444,6 +444,7 @@ instance : MonadAttach Option where
|
||||
CanReturn x a := x = some a
|
||||
attach x := x.attach
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
public instance : LawfulMonadAttach Option where
|
||||
map_attach {α} x := by simp [MonadAttach.attach]
|
||||
canReturn_map_imp {α P x a} := by
|
||||
@@ -455,6 +456,7 @@ end Option
|
||||
|
||||
namespace OptionT
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
public instance [Monad m] [MonadAttach m] [LawfulMonad m] [WeaklyLawfulMonadAttach m] :
|
||||
WeaklyLawfulMonadAttach (OptionT m) where
|
||||
map_attach {α} x := by
|
||||
|
||||
@@ -744,7 +744,7 @@ theorem elim_guard : (guard p a).elim b f = if p a then f a else b := by
|
||||
cases h : p a <;> simp [*, guard]
|
||||
|
||||
@[simp]
|
||||
theorem Option.elim_map {f : α → β} {g' : γ} {g : β → γ} (o : Option α) :
|
||||
theorem elim_map {f : α → β} {g' : γ} {g : β → γ} (o : Option α) :
|
||||
(o.map f).elim g' g = o.elim g' (g ∘ f) := by
|
||||
cases o <;> simp
|
||||
|
||||
|
||||
@@ -794,6 +794,7 @@ automatically. If it fails, it is necessary to provide some of the fields manual
|
||||
@[expose]
|
||||
public def LinearOrderPackage.ofOrd (α : Type u)
|
||||
(args : Packages.LinearOrderOfOrdArgs α := by exact {}) : LinearOrderPackage α :=
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
letI := LinearPreorderPackage.ofOrd α args.toLinearPreorderOfOrdArgs
|
||||
haveI : LawfulEqOrd α := ⟨args.eq_of_compare _ _⟩
|
||||
letI : Min α := args.min
|
||||
|
||||
@@ -535,6 +535,14 @@ public theorem Rxc.Iterator.pairwise_toList_upwardEnumerableLt [LE α] [Decidabl
|
||||
· apply ihy (out := a)
|
||||
simp_all [Rxc.Iterator.isPlausibleStep_iff, Rxc.Iterator.step]
|
||||
|
||||
theorem Rxc.Iterator.nodup_toList [LE α] [DecidableLE α]
|
||||
[PRange.UpwardEnumerable α] [Rxc.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLE α]
|
||||
{it : Iter (α := Rxc.Iterator α) α} :
|
||||
it.toList.Nodup := by
|
||||
apply (Rxc.Iterator.pairwise_toList_upwardEnumerableLt it).imp
|
||||
apply PRange.UpwardEnumerable.ne_of_lt
|
||||
|
||||
public theorem Rxo.Iterator.pairwise_toList_upwardEnumerableLt [LT α] [DecidableLT α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
[Rxo.IsAlwaysFinite α]
|
||||
@@ -558,6 +566,14 @@ public theorem Rxo.Iterator.pairwise_toList_upwardEnumerableLt [LT α] [Decidabl
|
||||
· apply ihy (out := a)
|
||||
simp_all [Rxo.Iterator.isPlausibleStep_iff, Rxo.Iterator.step]
|
||||
|
||||
theorem Rxo.Iterator.nodup_toList [LT α] [DecidableLT α]
|
||||
[PRange.UpwardEnumerable α] [Rxo.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLT α]
|
||||
{it : Iter (α := Rxo.Iterator α) α} :
|
||||
it.toList.Nodup := by
|
||||
apply (Rxo.Iterator.pairwise_toList_upwardEnumerableLt it).imp
|
||||
apply PRange.UpwardEnumerable.ne_of_lt
|
||||
|
||||
public theorem Rxi.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α]
|
||||
[Rxi.IsAlwaysFinite α]
|
||||
@@ -581,6 +597,13 @@ public theorem Rxi.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
· apply ihy (out := a)
|
||||
simp_all [Rxi.Iterator.isPlausibleStep_iff, Rxi.Iterator.step]
|
||||
|
||||
theorem Rxi.Iterator.nodup_toList
|
||||
[PRange.UpwardEnumerable α] [Rxi.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
{it : Iter (α := Rxi.Iterator α) α} :
|
||||
it.toList.Nodup := by
|
||||
apply (Rxi.Iterator.pairwise_toList_upwardEnumerableLt it).imp
|
||||
apply PRange.UpwardEnumerable.ne_of_lt
|
||||
|
||||
namespace Rcc
|
||||
|
||||
variable {r : Rcc α}
|
||||
@@ -658,6 +681,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LE α] [DecidableLE α]
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxc.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList [LE α] [DecidableLE α]
|
||||
[PRange.UpwardEnumerable α] [Rxc.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLE α]
|
||||
{a b : α} :
|
||||
(a...=b).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxc.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LE α] [DecidableLE α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α]
|
||||
[Rxc.IsAlwaysFinite α] :
|
||||
@@ -913,6 +943,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LE α] [LT α] [DecidableLT
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxo.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList [LT α] [DecidableLT α]
|
||||
[PRange.UpwardEnumerable α] [Rxo.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLT α]
|
||||
{a b : α} :
|
||||
(a...b).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxo.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LE α] [LT α] [DecidableLT α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
[Rxo.IsAlwaysFinite α] :
|
||||
@@ -1124,6 +1161,11 @@ public theorem pairwise_toList_upwardEnumerableLt [LE α]
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxi.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList
|
||||
[PRange.UpwardEnumerable α] [Rxi.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
{a : α} : (a...*).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxi.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LE α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [Rxi.IsAlwaysFinite α] :
|
||||
r.toList.Pairwise (fun a b => a ≠ b) :=
|
||||
@@ -1363,6 +1405,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LE α] [DecidableLE α]
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxc.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList [LE α] [DecidableLE α]
|
||||
[PRange.UpwardEnumerable α] [Rxc.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLE α]
|
||||
{a b : α} :
|
||||
(a<...=b).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxc.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LE α] [DecidableLE α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α]
|
||||
[Rxc.IsAlwaysFinite α] :
|
||||
@@ -1588,6 +1637,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LT α] [DecidableLT α]
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxo.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList [LT α] [DecidableLT α]
|
||||
[PRange.UpwardEnumerable α] [Rxo.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLT α]
|
||||
{a b : α} :
|
||||
(a<...b).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxo.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LT α] [DecidableLT α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
[Rxo.IsAlwaysFinite α] :
|
||||
@@ -1823,6 +1879,11 @@ public theorem pairwise_toList_upwardEnumerableLt
|
||||
rw [Internal.toList_eq_toList_iter]
|
||||
apply Rxi.Iterator.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList
|
||||
[PRange.UpwardEnumerable α] [Rxi.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
{a : α} : (a<...*).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxi.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [Rxi.IsAlwaysFinite α] :
|
||||
r.toList.Pairwise (fun a b => a ≠ b) :=
|
||||
@@ -2072,6 +2133,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LE α] [DecidableLE α] [Leas
|
||||
r.toList.Pairwise (fun a b => UpwardEnumerable.LT a b) := by
|
||||
simp [toList_eq_toList_rcc, Rcc.pairwise_toList_upwardEnumerableLt]
|
||||
|
||||
public theorem nodup_toList [LE α] [DecidableLE α] [Least? α]
|
||||
[PRange.UpwardEnumerable α] [Rxc.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLE α]
|
||||
{a : α} :
|
||||
(*...=a).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxc.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LE α] [DecidableLE α] [Least? α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α]
|
||||
[LawfulUpwardEnumerableLeast? α] [Rxc.IsAlwaysFinite α] :
|
||||
@@ -2395,6 +2463,13 @@ public theorem pairwise_toList_upwardEnumerableLt [LT α] [DecidableLT α] [Leas
|
||||
· exact Roo.pairwise_toList_upwardEnumerableLt
|
||||
· simp
|
||||
|
||||
public theorem nodup_toList [LT α] [DecidableLT α] [Least? α]
|
||||
[PRange.UpwardEnumerable α] [Rxo.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α]
|
||||
[PRange.LawfulUpwardEnumerableLT α]
|
||||
{a : α} :
|
||||
(*...a).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxo.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [LT α] [DecidableLT α] [Least? α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
[LawfulUpwardEnumerableLeast? α] [Rxo.IsAlwaysFinite α] :
|
||||
@@ -2688,6 +2763,11 @@ public theorem pairwise_toList_upwardEnumerableLt [Least? α]
|
||||
· simp
|
||||
· exact Rci.pairwise_toList_upwardEnumerableLt
|
||||
|
||||
public theorem nodup_toList [Least? α]
|
||||
[PRange.UpwardEnumerable α] [Rxi.IsAlwaysFinite α] [PRange.LawfulUpwardEnumerable α] :
|
||||
(*...* : Std.Rii α).toList.Nodup := by
|
||||
simpa [Internal.toList_eq_toList_iter] using Std.Rxi.Iterator.nodup_toList
|
||||
|
||||
public theorem pairwise_toList_ne [Least? α]
|
||||
[UpwardEnumerable α] [LawfulUpwardEnumerable α]
|
||||
[LawfulUpwardEnumerableLeast? α] [Rxi.IsAlwaysFinite α] :
|
||||
|
||||
@@ -102,7 +102,7 @@ theorem Iterator.Monadic.isPlausibleStep_iff [UpwardEnumerable α] [LE α] [Deci
|
||||
theorem Iterator.Monadic.step_eq_step [UpwardEnumerable α] [LE α] [DecidableLE α]
|
||||
{it : IterM (α := Rxc.Iterator α) Id α} :
|
||||
Std.Iterator.step it = pure (.deflate ⟨Iterator.Monadic.step it, isPlausibleStep_iff.mpr rfl⟩) := by
|
||||
simp [Std.Iterator.step]
|
||||
simp [Std.Iterator.step]; rfl
|
||||
|
||||
theorem Iterator.isPlausibleStep_iff [UpwardEnumerable α] [LE α] [DecidableLE α]
|
||||
{it : Iter (α := Rxc.Iterator α) α} {step} :
|
||||
@@ -535,6 +535,7 @@ private theorem Iterator.instIteratorLoop.loop_eq_wf [UpwardEnumerable α] [LE
|
||||
· rw [WellFounded.fix_eq]
|
||||
simp_all
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
private theorem Iterator.instIteratorLoop.loopWf_eq [UpwardEnumerable α] [LE α] [DecidableLE α]
|
||||
[LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α]
|
||||
{n : Type u → Type w} [Monad n] [LawfulMonad n] (γ : Type u)
|
||||
@@ -586,6 +587,7 @@ termination_by IteratorLoop.WithWF.mk ⟨⟨some next, upperBound⟩⟩ acc (hwf
|
||||
decreasing_by
|
||||
simp [IteratorLoop.rel, Monadic.isPlausibleStep_iff, Monadic.step, *]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
instance Iterator.instLawfulIteratorLoop [UpwardEnumerable α] [LE α] [DecidableLE α]
|
||||
[LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLE α]
|
||||
{n : Type u → Type w} [Monad n] [LawfulMonad n] :
|
||||
@@ -678,7 +680,7 @@ theorem Iterator.Monadic.isPlausibleStep_iff [UpwardEnumerable α] [LT α] [Deci
|
||||
theorem Iterator.Monadic.step_eq_step [UpwardEnumerable α] [LT α] [DecidableLT α]
|
||||
{it : IterM (α := Rxo.Iterator α) Id α} :
|
||||
Std.Iterator.step it = pure (.deflate ⟨Iterator.Monadic.step it, isPlausibleStep_iff.mpr rfl⟩) := by
|
||||
simp [Std.Iterator.step]
|
||||
simp [Std.Iterator.step]; rfl
|
||||
|
||||
theorem Iterator.isPlausibleStep_iff [UpwardEnumerable α] [LT α] [DecidableLT α]
|
||||
{it : Iter (α := Rxo.Iterator α) α} {step} :
|
||||
@@ -1107,6 +1109,7 @@ private theorem Iterator.instIteratorLoop.loop_eq_wf [UpwardEnumerable α] [LT
|
||||
· rw [WellFounded.fix_eq]
|
||||
simp_all
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
private theorem Iterator.instIteratorLoop.loopWf_eq [UpwardEnumerable α] [LT α] [DecidableLT α]
|
||||
[LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
{n : Type u → Type w} [Monad n] [LawfulMonad n] (γ : Type u)
|
||||
@@ -1158,6 +1161,7 @@ termination_by IteratorLoop.WithWF.mk ⟨⟨some next, upperBound⟩⟩ acc (hwf
|
||||
decreasing_by
|
||||
simp [IteratorLoop.rel, Monadic.isPlausibleStep_iff, Monadic.step, *]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
instance Iterator.instLawfulIteratorLoop [UpwardEnumerable α] [LT α] [DecidableLT α]
|
||||
[LawfulUpwardEnumerable α] [LawfulUpwardEnumerableLT α]
|
||||
{n : Type u → Type w} [Monad n] [LawfulMonad n] :
|
||||
@@ -1240,7 +1244,7 @@ theorem Iterator.Monadic.isPlausibleStep_iff [UpwardEnumerable α]
|
||||
theorem Iterator.Monadic.step_eq_step [UpwardEnumerable α]
|
||||
{it : IterM (α := Rxi.Iterator α) Id α} :
|
||||
it.step = pure (.deflate ⟨Iterator.Monadic.step it, isPlausibleStep_iff.mpr rfl⟩) := by
|
||||
simp [IterM.step, Std.Iterator.step]
|
||||
simp [IterM.step, Std.Iterator.step]; rfl
|
||||
|
||||
theorem Iterator.isPlausibleStep_iff [UpwardEnumerable α]
|
||||
{it : Iter (α := Rxi.Iterator α) α} {step} :
|
||||
|
||||
@@ -1296,12 +1296,12 @@ theorem ceil_lt {x : Rat} :
|
||||
-/
|
||||
|
||||
@[simp, grind =]
|
||||
theorem Rat.abs_zero :
|
||||
protected theorem abs_zero :
|
||||
(0 : Rat).abs = 0 := by
|
||||
simp [Rat.abs]
|
||||
|
||||
@[simp]
|
||||
theorem Rat.abs_nonneg {x : Rat} :
|
||||
protected theorem abs_nonneg {x : Rat} :
|
||||
0 ≤ x.abs := by
|
||||
simp only [Rat.abs]
|
||||
split <;> rename_i hle
|
||||
@@ -1310,11 +1310,11 @@ theorem Rat.abs_nonneg {x : Rat} :
|
||||
simp only [Rat.not_le] at hle
|
||||
rwa [Rat.lt_neg_iff, Rat.neg_zero]
|
||||
|
||||
theorem Rat.abs_of_nonneg {x : Rat} (h : 0 ≤ x) :
|
||||
protected theorem abs_of_nonneg {x : Rat} (h : 0 ≤ x) :
|
||||
x.abs = x := by
|
||||
rw [Rat.abs, if_pos h]
|
||||
|
||||
theorem Rat.abs_of_nonpos {x : Rat} (h : x ≤ 0) :
|
||||
protected theorem abs_of_nonpos {x : Rat} (h : x ≤ 0) :
|
||||
x.abs = -x := by
|
||||
rw [Rat.abs]
|
||||
split
|
||||
@@ -1322,7 +1322,7 @@ theorem Rat.abs_of_nonpos {x : Rat} (h : x ≤ 0) :
|
||||
· rfl
|
||||
|
||||
@[simp, grind =]
|
||||
theorem Rat.abs_neg {x : Rat} :
|
||||
protected theorem abs_neg {x : Rat} :
|
||||
(-x).abs = x.abs := by
|
||||
simp only [Rat.abs]
|
||||
split <;> split
|
||||
@@ -1337,12 +1337,12 @@ theorem Rat.abs_neg {x : Rat} :
|
||||
apply Rat.le_of_lt
|
||||
assumption
|
||||
|
||||
theorem Rat.abs_sub_comm {x y : Rat} :
|
||||
protected theorem abs_sub_comm {x y : Rat} :
|
||||
(x - y).abs = (y - x).abs := by
|
||||
rw [← Rat.neg_sub, Rat.abs_neg]
|
||||
|
||||
@[simp]
|
||||
theorem Rat.abs_eq_zero_iff {x : Rat} :
|
||||
protected theorem abs_eq_zero_iff {x : Rat} :
|
||||
x.abs = 0 ↔ x = 0 := by
|
||||
simp only [Rat.abs]
|
||||
split
|
||||
@@ -1352,7 +1352,7 @@ theorem Rat.abs_eq_zero_iff {x : Rat} :
|
||||
rw [← Rat.neg_neg (a := x), h, Rat.neg_zero]
|
||||
· simp +contextual
|
||||
|
||||
theorem Rat.abs_pos_iff {x : Rat} :
|
||||
protected theorem abs_pos_iff {x : Rat} :
|
||||
0 < x.abs ↔ x ≠ 0 := by
|
||||
apply Iff.intro
|
||||
· intro hpos
|
||||
@@ -1371,8 +1371,10 @@ theorem Rat.abs_pos_iff {x : Rat} :
|
||||
# instances
|
||||
-/
|
||||
|
||||
instance Rat.instAssociativeHAdd : Std.Associative (α := Rat) (· + ·) := ⟨Rat.add_assoc⟩
|
||||
instance Rat.instCommutativeHAdd : Std.Commutative (α := Rat) (· + ·) := ⟨Rat.add_comm⟩
|
||||
instance instAssociativeHAdd : Std.Associative (α := Rat) (· + ·) := ⟨Rat.add_assoc⟩
|
||||
instance instCommutativeHAdd : Std.Commutative (α := Rat) (· + ·) := ⟨Rat.add_comm⟩
|
||||
instance : Std.LawfulIdentity (· + ·) (0 : Rat) where
|
||||
left_id := Rat.zero_add
|
||||
right_id := Rat.add_zero
|
||||
|
||||
end Rat
|
||||
|
||||
@@ -28,6 +28,7 @@ open Std Std.Iterators Std.PRange Std.Slice
|
||||
|
||||
namespace SubarrayIterator
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem step_eq {it : Iter (α := SubarrayIterator α) α} :
|
||||
it.step = if h : it.1.xs.start < it.1.xs.stop then
|
||||
haveI := it.1.xs.start_le_stop
|
||||
@@ -66,6 +67,7 @@ theorem val_step_eq {it : Iter (α := SubarrayIterator α) α} :
|
||||
simp only [step_eq]
|
||||
split <;> simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem toList_eq {α : Type u} {it : Iter (α := SubarrayIterator α) α} :
|
||||
it.toList =
|
||||
(it.internalState.xs.array.toList.take it.internalState.xs.stop).drop it.internalState.xs.start := by
|
||||
@@ -100,15 +102,17 @@ end SubarrayIterator
|
||||
|
||||
namespace Subarray
|
||||
|
||||
theorem internalIter_eq {α : Type u} {s : Subarray α} :
|
||||
theorem Internal.iter_eq {α : Type u} {s : Subarray α} :
|
||||
Internal.iter s = ⟨⟨s⟩⟩ :=
|
||||
rfl
|
||||
|
||||
theorem toList_internalIter {α : Type u} {s : Subarray α} :
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem Internal.toList_iter {α : Type u} {s : Subarray α} :
|
||||
(Internal.iter s).toList =
|
||||
(s.array.toList.take s.stop).drop s.start := by
|
||||
simp [SubarrayIterator.toList_eq, Internal.iter_eq_toIteratorIter, ToIterator.iter_eq]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
public instance : LawfulSliceSize (Internal.SubarrayData α) where
|
||||
lawful s := by
|
||||
simp [SliceSize.size, ToIterator.iter_eq,
|
||||
@@ -223,7 +227,7 @@ public theorem Subarray.toList_eq {xs : Subarray α} :
|
||||
change aslice.toList = _
|
||||
have : aslice.toList = lslice.toList := by
|
||||
rw [ListSlice.toList_eq]
|
||||
simp +instances only [aslice, lslice, Std.Slice.toList, toList_internalIter]
|
||||
simp +instances only [aslice, lslice, Std.Slice.toList, Internal.toList_iter]
|
||||
apply List.ext_getElem
|
||||
· have : stop - start ≤ array.size - start := by omega
|
||||
simp [Subarray.start, Subarray.stop, *, Subarray.array]
|
||||
@@ -274,7 +278,7 @@ public theorem Subarray.getElem_eq_getElem_array {xs : Subarray α} {h : i < xs.
|
||||
|
||||
public theorem Subarray.getElem_toList {xs : Subarray α} {h : i < xs.toList.length} :
|
||||
xs.toList[i]'h = xs[i]'(by simpa using h) := by
|
||||
simp [getElem_eq_getElem_array, toList_eq_drop_take]
|
||||
simp [getElem_eq_getElem_array, toList_eq_drop_take]; rfl
|
||||
|
||||
public theorem Subarray.getElem_eq_getElem_toList {xs : Subarray α} {h : i < xs.size} :
|
||||
xs[i]'h = xs.toList[i]'(by simpa using h) := by
|
||||
@@ -344,7 +348,7 @@ public theorem toList_mkSlice_rco {xs : Array α} {lo hi : Nat} :
|
||||
public theorem toArray_mkSlice_rco {xs : Array α} {lo hi : Nat} :
|
||||
xs[lo...hi].toArray = xs.extract lo hi := by
|
||||
simp only [← Subarray.toArray_toList, toList_mkSlice_rco]
|
||||
rw [show xs = xs.toList.toArray by simp, List.extract_toArray, List.extract_eq_drop_take]
|
||||
rw [show xs = xs.toList.toArray by simp, List.extract_toArray, List.extract_eq_take_drop]
|
||||
simp only [List.take_drop, mk.injEq]
|
||||
by_cases h : lo ≤ hi
|
||||
· congr 1
|
||||
|
||||
@@ -25,47 +25,45 @@ theorem Internal.iter_eq_toIteratorIter {γ : Type u}
|
||||
Internal.iter s = ToIterator.iter s :=
|
||||
(rfl)
|
||||
|
||||
theorem forIn_internalIter {γ : Type u} {β : Type v}
|
||||
theorem Internal.forIn_iter {γ : Type u} {β : Type v}
|
||||
{m : Type w → Type x} [Monad m] {δ : Type w}
|
||||
[ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
[IteratorLoop α Id m]
|
||||
[LawfulIteratorLoop α Id m]
|
||||
[Finite α Id] {s : Slice γ}
|
||||
[Iterator α Id β] [IteratorLoop α Id m]
|
||||
{s : Slice γ}
|
||||
{init : δ} {f : β → δ → m (ForInStep δ)} :
|
||||
ForIn.forIn (Internal.iter s) init f = ForIn.forIn s init f :=
|
||||
(rfl)
|
||||
|
||||
theorem Internal.size_eq_length_internalIter [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.size_eq_length_iter [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [Finite α Id]
|
||||
[IteratorLoop α Id Id] [LawfulIteratorLoop α Id Id]
|
||||
{s : Slice γ} [SliceSize γ] [LawfulSliceSize γ] :
|
||||
s.size = (Internal.iter s).length := by
|
||||
simp only [Slice.size, iter, LawfulSliceSize.lawful, ← Iter.length_toList_eq_length]
|
||||
|
||||
theorem Internal.toArray_eq_toArray_internalIter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.toArray_eq_toArray_iter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
[Finite α Id] :
|
||||
s.toArray = (Internal.iter s).toArray :=
|
||||
(rfl)
|
||||
|
||||
theorem Internal.toList_eq_toList_internalIter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.toList_eq_toList_iter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
[Finite α Id] :
|
||||
s.toList = (Internal.iter s).toList :=
|
||||
(rfl)
|
||||
|
||||
theorem Internal.toListRev_eq_toListRev_internalIter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.toListRev_eq_toListRev_iter {s : Slice γ} [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [Finite α Id] :
|
||||
s.toListRev = (Internal.iter s).toListRev :=
|
||||
(rfl)
|
||||
|
||||
theorem fold_internalIter [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.fold_iter [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id Id] [Iterators.Finite α Id] {s : Slice γ} :
|
||||
(Internal.iter s).fold (init := init) f = s.foldl (init := init) f := by
|
||||
rfl
|
||||
|
||||
theorem foldM_internalIter {m : Type w → Type w'} [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
theorem Internal.foldM_iter {m : Type w → Type w'} [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id m] [Iterators.Finite α Id] {s : Slice γ} {f : δ → β → m δ} :
|
||||
(Internal.iter s).foldM (init := init) f = s.foldlM (init := init) f := by
|
||||
rfl
|
||||
|
||||
@@ -11,9 +11,10 @@ import all Init.Data.Slice.Operations
|
||||
import Init.Data.Iterators.Lemmas.Consumers
|
||||
public import Init.Data.List.Control
|
||||
public import Init.Data.Iterators.Consumers.Collect
|
||||
|
||||
import Init.Data.Slice.InternalLemmas
|
||||
|
||||
public section
|
||||
|
||||
open Std Std.Iterators
|
||||
|
||||
namespace Std.Slice
|
||||
@@ -21,7 +22,7 @@ namespace Std.Slice
|
||||
variable {γ : Type u} {α β : Type v}
|
||||
|
||||
@[simp]
|
||||
public theorem forIn_toList {γ : Type u} {β : Type v}
|
||||
theorem forIn_toList {γ : Type u} {β : Type v}
|
||||
{m : Type w → Type x} [Monad m] [LawfulMonad m] {δ : Type w}
|
||||
[ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
@@ -30,10 +31,10 @@ public theorem forIn_toList {γ : Type u} {β : Type v}
|
||||
[Finite α Id] {s : Slice γ}
|
||||
{init : δ} {f : β → δ → m (ForInStep δ)} :
|
||||
ForIn.forIn s.toList init f = ForIn.forIn s init f := by
|
||||
rw [← forIn_internalIter, ← Iter.forIn_toList, Slice.toList]
|
||||
rw [← Internal.forIn_iter, ← Iter.forIn_toList, Slice.toList]
|
||||
|
||||
@[simp]
|
||||
public theorem forIn_toArray {γ : Type u} {β : Type v}
|
||||
theorem forIn_toArray {γ : Type u} {β : Type v}
|
||||
{m : Type w → Type x} [Monad m] [LawfulMonad m] {δ : Type w}
|
||||
[ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
@@ -42,46 +43,70 @@ public theorem forIn_toArray {γ : Type u} {β : Type v}
|
||||
[Finite α Id] {s : Slice γ}
|
||||
{init : δ} {f : β → δ → m (ForInStep δ)} :
|
||||
ForIn.forIn s.toArray init f = ForIn.forIn s init f := by
|
||||
rw [← forIn_internalIter, ← Iter.forIn_toArray, Slice.toArray]
|
||||
rw [← Internal.forIn_iter, ← Iter.forIn_toArray, Slice.toArray]
|
||||
|
||||
theorem Internal.foldlM_iter [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id m]
|
||||
{s : Slice γ} {init : δ} {f : δ → β → m δ} :
|
||||
(Internal.iter s).foldM (init := init) f = s.foldlM (init := init) f :=
|
||||
(rfl)
|
||||
|
||||
theorem foldlM_toList [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id m] [LawfulIteratorLoop α Id m]
|
||||
[Finite α Id] [LawfulMonad m] {s : Slice γ} {init : δ} {f : δ → β → m δ} :
|
||||
s.toList.foldlM (init := init) f = s.foldlM (init := init) f := by
|
||||
simp [← Internal.foldlM_iter, ← Iter.foldlM_toList, Slice.toList]
|
||||
|
||||
theorem foldlM_toArray [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id m] [LawfulIteratorLoop α Id m]
|
||||
[Finite α Id] [LawfulMonad m] {s : Slice γ} {init : δ} {f : δ → β → m δ} :
|
||||
s.toArray.foldlM (init := init) f = s.foldlM (init := init) f := by
|
||||
simp [← Internal.foldlM_iter, ← Iter.foldlM_toArray, Slice.toArray]
|
||||
|
||||
theorem Internal.foldl_iter [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id Id]
|
||||
{s : Slice γ} {init : δ} {f : δ → β → δ} :
|
||||
(Internal.iter s).fold (init := init) f = s.foldl (init := init) f :=
|
||||
(rfl)
|
||||
|
||||
theorem foldl_toList [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id Id] [LawfulIteratorLoop α Id Id]
|
||||
[Finite α Id] {s : Slice γ} {init : δ} {f : δ → β → δ} :
|
||||
s.toList.foldl (init := init) f = s.foldl (init := init) f := by
|
||||
simp [← Internal.foldl_iter, ← Iter.foldl_toList, Slice.toList]
|
||||
|
||||
theorem foldl_toArray [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id Id] [LawfulIteratorLoop α Id Id]
|
||||
[Finite α Id] {s : Slice γ} {init : δ} {f : δ → β → δ} :
|
||||
s.toArray.foldl (init := init) f = s.foldl (init := init) f := by
|
||||
simp [← Internal.foldl_iter, ← Iter.foldl_toArray, Slice.toArray]
|
||||
|
||||
@[simp, grind =, suggest_for ListSlice.size_toArray ListSlice.size_toArray_eq_size]
|
||||
public theorem size_toArray_eq_size [ToIterator (Slice γ) Id α β]
|
||||
theorem size_toArray_eq_size [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [SliceSize γ] [LawfulSliceSize γ]
|
||||
[Finite α Id]
|
||||
{s : Slice γ} :
|
||||
s.toArray.size = s.size := by
|
||||
letI : IteratorLoop α Id Id := .defaultImplementation
|
||||
rw [Internal.size_eq_length_internalIter, Internal.toArray_eq_toArray_internalIter, Iter.size_toArray_eq_length]
|
||||
rw [Internal.size_eq_length_iter, Internal.toArray_eq_toArray_iter, Iter.size_toArray_eq_length]
|
||||
|
||||
@[simp, grind =, suggest_for ListSlice.length_toList ListSlice.length_toList_eq_size]
|
||||
public theorem length_toList_eq_size [ToIterator (Slice γ) Id α β]
|
||||
theorem length_toList_eq_size [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] {s : Slice γ}
|
||||
[SliceSize γ] [LawfulSliceSize γ]
|
||||
[Finite α Id] :
|
||||
s.toList.length = s.size := by
|
||||
letI : IteratorLoop α Id Id := .defaultImplementation
|
||||
rw [Internal.size_eq_length_internalIter, Internal.toList_eq_toList_internalIter, Iter.length_toList_eq_length]
|
||||
rw [Internal.size_eq_length_iter, Internal.toList_eq_toList_iter, Iter.length_toList_eq_length]
|
||||
|
||||
@[simp, grind =]
|
||||
public theorem length_toListRev_eq_size [ToIterator (Slice γ) Id α β]
|
||||
theorem length_toListRev_eq_size [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] {s : Slice γ}
|
||||
[IteratorLoop α Id Id.{v}] [SliceSize γ] [LawfulSliceSize γ]
|
||||
[Finite α Id]
|
||||
[LawfulIteratorLoop α Id Id] :
|
||||
s.toListRev.length = s.size := by
|
||||
rw [Internal.size_eq_length_internalIter, Internal.toListRev_eq_toListRev_internalIter,
|
||||
rw [Internal.size_eq_length_iter, Internal.toListRev_eq_toListRev_iter,
|
||||
Iter.length_toListRev_eq_length]
|
||||
|
||||
public theorem foldlM_toList {m} [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [LawfulMonad m] [IteratorLoop α Id m] [LawfulIteratorLoop α Id m]
|
||||
[Iterators.Finite α Id] {s : Slice γ} {f} :
|
||||
s.toList.foldlM (init := init) f = s.foldlM (m := m) (init := init) f := by
|
||||
simp [Internal.toList_eq_toList_internalIter, Iter.foldlM_toList, foldM_internalIter]
|
||||
|
||||
public theorem foldl_toList [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] [IteratorLoop α Id Id] [LawfulIteratorLoop α Id Id]
|
||||
[Iterators.Finite α Id] {s : Slice γ} :
|
||||
s.toList.foldl (init := init) f = s.foldl (init := init) f := by
|
||||
simp [Internal.toList_eq_toList_internalIter, Iter.foldl_toList, fold_internalIter]
|
||||
|
||||
end Std.Slice
|
||||
|
||||
@@ -27,7 +27,7 @@ theorem internalIter_eq {α : Type u} {s : ListSlice α} :
|
||||
Internal.iter s = match s.internalRepresentation.stop with
|
||||
| some stop => s.internalRepresentation.list.iter.take stop
|
||||
| none => s.internalRepresentation.list.iter.toTake := by
|
||||
simp only [Internal.iter, ToIterator.iter_eq]; rfl
|
||||
simp only [Internal.iter]; rfl
|
||||
|
||||
theorem toList_internalIter {α : Type u} {s : ListSlice α} :
|
||||
(Internal.iter s).toList = match s.internalRepresentation.stop with
|
||||
|
||||
@@ -41,8 +41,6 @@ terminating.
|
||||
-/
|
||||
class LawfulSliceSize (γ : Type u) [SliceSize γ] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β] where
|
||||
/-- The iterator for every `Slice α` is finite. -/
|
||||
[finite : Finite α Id]
|
||||
/-- The iterator of a slice `s` of type `Slice γ` emits exactly `SliceSize.size s` elements. -/
|
||||
lawful :
|
||||
letI : IteratorLoop α Id Id := .defaultImplementation
|
||||
@@ -60,26 +58,23 @@ def size (s : Slice γ) [SliceSize γ] :=
|
||||
/-- Allocates a new array that contains the elements of the slice. -/
|
||||
@[always_inline, inline]
|
||||
def toArray [ToIterator (Slice γ) Id α β] [Iterator α Id β]
|
||||
[Finite α Id] (s : Slice γ) : Array β :=
|
||||
(s : Slice γ) : Array β :=
|
||||
Internal.iter s |>.toArray
|
||||
|
||||
/-- Allocates a new list that contains the elements of the slice. -/
|
||||
@[always_inline, inline]
|
||||
def toList [ToIterator (Slice γ) Id α β] [Iterator α Id β]
|
||||
[Finite α Id]
|
||||
(s : Slice γ) : List β :=
|
||||
Internal.iter s |>.toList
|
||||
|
||||
/-- Allocates a new list that contains the elements of the slice in reverse order. -/
|
||||
@[always_inline, inline]
|
||||
def toListRev [ToIterator (Slice γ) Id α β] [Iterator α Id β]
|
||||
[Finite α Id] (s : Slice γ) : List β :=
|
||||
(s : Slice γ) : List β :=
|
||||
Internal.iter s |>.toListRev
|
||||
|
||||
instance {γ : Type u} {β : Type v} [Monad m] [ToIterator (Slice γ) Id α β]
|
||||
[Iterator α Id β]
|
||||
[IteratorLoop α Id m]
|
||||
[Finite α Id] :
|
||||
[Iterator α Id β] [IteratorLoop α Id m] :
|
||||
ForIn m (Slice γ) β where
|
||||
forIn s init f :=
|
||||
forIn (Internal.iter s) init f
|
||||
@@ -112,7 +107,7 @@ none
|
||||
def foldlM {γ : Type u} {β : Type v}
|
||||
{δ : Type w} {m : Type w → Type w'} [Monad m] (f : δ → β → m δ) (init : δ)
|
||||
[ToIterator (Slice γ) Id α β] [Iterator α Id β]
|
||||
[IteratorLoop α Id m] [Finite α Id]
|
||||
[IteratorLoop α Id m]
|
||||
(s : Slice γ) : m δ :=
|
||||
Internal.iter s |>.foldM f init
|
||||
|
||||
@@ -128,7 +123,7 @@ Examples for the special case of subarrays:
|
||||
def foldl {γ : Type u} {β : Type v}
|
||||
{δ : Type w} (f : δ → β → δ) (init : δ)
|
||||
[ToIterator (Slice γ) Id α β] [Iterator α Id β]
|
||||
[IteratorLoop α Id Id] [Finite α Id]
|
||||
[IteratorLoop α Id Id]
|
||||
(s : Slice γ) : δ :=
|
||||
Internal.iter s |>.fold f init
|
||||
|
||||
|
||||
@@ -29,3 +29,4 @@ public import Init.Data.String.OrderInstances
|
||||
public import Init.Data.String.FindPos
|
||||
public import Init.Data.String.Subslice
|
||||
public import Init.Data.String.Iter
|
||||
public import Init.Data.String.Iterate
|
||||
|
||||
@@ -208,7 +208,7 @@ theorem String.ofList_nil : String.ofList [] = "" :=
|
||||
theorem List.asString_nil : String.ofList [] = "" :=
|
||||
String.ofList_nil
|
||||
|
||||
@[simp]
|
||||
@[simp, grind =]
|
||||
theorem String.ofList_append {l₁ l₂ : List Char} :
|
||||
String.ofList (l₁ ++ l₂) = String.ofList l₁ ++ String.ofList l₂ := by
|
||||
simp [← String.toByteArray_inj]
|
||||
@@ -748,6 +748,7 @@ theorem _root_.ByteArray.IsValidUTF8.isUTF8FirstByte_getElem_zero {b : ByteArray
|
||||
theorem isUTF8FirstByte_getUTF8Byte_zero {b : String} {h} : (b.getUTF8Byte 0 h).IsUTF8FirstByte :=
|
||||
b.isValidUTF8.isUTF8FirstByte_getElem_zero _
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem Pos.Raw.isValidUTF8_extract_iff {s : String} (p₁ p₂ : Pos.Raw) (hle : p₁ ≤ p₂) (hle' : p₂ ≤ s.rawEndPos) :
|
||||
(s.toByteArray.extract p₁.byteIdx p₂.byteIdx).IsValidUTF8 ↔ p₁ = p₂ ∨ (p₁.IsValid s ∧ p₂.IsValid s) := by
|
||||
have hle'' : p₂.byteIdx ≤ s.toByteArray.size := by simpa [le_iff] using hle'
|
||||
@@ -1642,25 +1643,6 @@ theorem Pos.Raw.isValidForSlice_prevAux {s : Slice} (pos : s.Pos) (h : pos ≠ s
|
||||
(pos.prevAux h).IsValidForSlice s :=
|
||||
isValidForSlice_prevAuxGo ..
|
||||
|
||||
/-- Returns the previous valid position before the given position, given a proof that the position
|
||||
is not the start position, which guarantees that such a position exists. -/
|
||||
@[inline, expose]
|
||||
def Slice.Pos.prev {s : Slice} (pos : s.Pos) (h : pos ≠ s.startPos) : s.Pos where
|
||||
offset := prevAux pos h
|
||||
isValidForSlice := Pos.Raw.isValidForSlice_prevAux _ _
|
||||
|
||||
/-- Returns the previous valid position before the given position, or `none` if the position is
|
||||
the start position. -/
|
||||
@[expose]
|
||||
def Slice.Pos.prev? {s : Slice} (pos : s.Pos) : Option s.Pos :=
|
||||
if h : pos = s.startPos then none else some (pos.prev h)
|
||||
|
||||
/-- Returns the previous valid position before the given position, or panics if the position is
|
||||
the start position. -/
|
||||
@[expose]
|
||||
def Slice.Pos.prev! {s : Slice} (pos : s.Pos) : s.Pos :=
|
||||
if h : pos = s.startPos then panic! "The start position has no previous position" else pos.prev h
|
||||
|
||||
/-- Constructs a valid position on `s` from a position and a proof that it is valid. -/
|
||||
@[inline, expose]
|
||||
def Slice.pos (s : Slice) (off : String.Pos.Raw) (h : off.IsValidForSlice s) : s.Pos where
|
||||
@@ -1713,24 +1695,6 @@ position is the past-the-end position. -/
|
||||
def Pos.next! {s : String} (pos : s.Pos) : s.Pos :=
|
||||
ofToSlice pos.toSlice.next!
|
||||
|
||||
/-- Returns the previous valid position before the given position, given a proof that the position
|
||||
is not the start position, which guarantees that such a position exists. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev {s : String} (pos : s.Pos) (h : pos ≠ s.startPos) : s.Pos :=
|
||||
ofToSlice (pos.toSlice.prev (ne_of_apply_ne Pos.ofToSlice (by simpa)))
|
||||
|
||||
/-- Returns the previous valid position before the given position, or `none` if the position is
|
||||
the start position. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev? {s : String} (pos : s.Pos) : Option s.Pos :=
|
||||
pos.toSlice.prev?.map Pos.ofToSlice
|
||||
|
||||
/-- Returns the previous valid position before the given position, or panics if the position is
|
||||
the start position. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev! {s : String} (pos : s.Pos) : s.Pos :=
|
||||
ofToSlice pos.toSlice.prev!
|
||||
|
||||
/-- Constructs a valid position on `s` from a position and a proof that it is valid. -/
|
||||
@[inline, expose]
|
||||
def pos (s : String) (off : Pos.Raw) (h : off.IsValid s) : s.Pos :=
|
||||
@@ -1826,30 +1790,6 @@ theorem Slice.Pos.prevAux_lt_self {s : Slice} {p : s.Pos} {h} : p.prevAux h < p.
|
||||
theorem Slice.Pos.prevAux_lt_rawEndPos {s : Slice} {p : s.Pos} {h} : p.prevAux h < s.rawEndPos :=
|
||||
Pos.Raw.lt_of_lt_of_le prevAux_lt_self p.isValidForSlice.le_rawEndPos
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.prev_ne_endPos {s : Slice} {p : s.Pos} {h} : p.prev h ≠ s.endPos := by
|
||||
simpa [Pos.ext_iff, prev] using Pos.Raw.ne_of_lt prevAux_lt_rawEndPos
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_ne_endPos {s : String} {p : s.Pos} {h} : p.prev h ≠ s.endPos :=
|
||||
mt (congrArg (·.toSlice)) (Slice.Pos.prev_ne_endPos (h := mt (congrArg Pos.ofToSlice) (by simpa)))
|
||||
|
||||
theorem Pos.toSlice_prev {s : String} {p : s.Pos} {h} :
|
||||
(p.prev h).toSlice = p.toSlice.prev (ne_of_apply_ne Pos.ofToSlice (by simpa)) := by
|
||||
simp [prev]
|
||||
|
||||
theorem Slice.Pos.offset_prev_lt_offset {s : Slice} {p : s.Pos} {h} : (p.prev h).offset < p.offset := by
|
||||
simpa [prev] using prevAux_lt_self
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.prev_lt {s : Slice} {p : s.Pos} {h} : p.prev h < p :=
|
||||
lt_iff.2 offset_prev_lt_offset
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_lt {s : String} {p : s.Pos} {h} : p.prev h < p := by
|
||||
simp [← toSlice_lt, toSlice_prev]
|
||||
|
||||
|
||||
@[expose]
|
||||
def Pos.Raw.utf8GetAux : List Char → Pos.Raw → Pos.Raw → Char
|
||||
| [], _, _ => default
|
||||
@@ -1989,6 +1929,7 @@ theorem Pos.ne_startPos_of_lt {s : String} {p q : s.Pos} :
|
||||
Pos.Raw.byteIdx_zero]
|
||||
omega
|
||||
|
||||
@[simp]
|
||||
theorem Pos.next_ne_startPos {s : String} {p : s.Pos} {h} :
|
||||
p.next h ≠ s.startPos :=
|
||||
ne_startPos_of_lt p.lt_next
|
||||
@@ -2637,20 +2578,6 @@ def Slice.Pos.nextn {s : Slice} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
else
|
||||
p
|
||||
|
||||
/--
|
||||
Iterates `p.prev` `n` times.
|
||||
|
||||
If this would move `p` past the start of `s`, the result is `s.endPos`.
|
||||
-/
|
||||
def Slice.Pos.prevn {s : Slice} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
match n with
|
||||
| 0 => p
|
||||
| n + 1 =>
|
||||
if h : p ≠ s.startPos then
|
||||
prevn (p.prev h) n
|
||||
else
|
||||
p
|
||||
|
||||
/--
|
||||
Advances the position `p` `n` times.
|
||||
|
||||
@@ -2660,14 +2587,6 @@ If this would move `p` past the end of `s`, the result is `s.endPos`.
|
||||
def Pos.nextn {s : String} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
ofToSlice (p.toSlice.nextn n)
|
||||
|
||||
/--
|
||||
Iterates `p.prev` `n` times.
|
||||
|
||||
If this would move `p` past the start of `s`, the result is `s.startPos`.
|
||||
-/
|
||||
@[inline]
|
||||
def Pos.prevn {s : String} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
ofToSlice (p.toSlice.prevn n)
|
||||
|
||||
theorem Slice.Pos.le_nextn {s : Slice} {p : s.Pos} {n : Nat} : p ≤ p.nextn n := by
|
||||
fun_induction nextn with
|
||||
@@ -2681,17 +2600,6 @@ theorem Pos.le_nextn {s : String} {p : s.Pos} {n : Nat} :
|
||||
p ≤ p.nextn n := by
|
||||
simpa [nextn, Pos.le_iff, ← offset_toSlice] using Slice.Pos.le_nextn
|
||||
|
||||
theorem Slice.Pos.prevn_le {s : Slice} {p : s.Pos} {n : Nat} : p.prevn n ≤ p := by
|
||||
fun_induction prevn with
|
||||
| case1 => simp
|
||||
| case2 p n h ih =>
|
||||
simp only [Pos.le_iff] at *
|
||||
exact Pos.Raw.le_of_lt (Pos.Raw.lt_of_le_of_lt ih prev_lt)
|
||||
| case3 => simp
|
||||
|
||||
theorem Pos.prevn_le {s : String} {p : s.Pos} {n : Nat} :
|
||||
p.prevn n ≤ p := by
|
||||
simpa [nextn, Pos.le_iff, ← offset_toSlice] using Slice.Pos.prevn_le
|
||||
/--
|
||||
Returns the next position in a string after position `p`. If `p` is not a valid position or
|
||||
`p = s.endPos`, returns the position one byte after `p`.
|
||||
@@ -3085,7 +2993,8 @@ end String
|
||||
|
||||
namespace Char
|
||||
|
||||
@[simp] theorem length_toString (c : Char) : c.toString.length = 1 := by
|
||||
simp [toString_eq_singleton]
|
||||
@[deprecated String.length_singleton (since := "2026-02-12")]
|
||||
theorem length_toString (c : Char) : c.toString.length = 1 := by
|
||||
simp
|
||||
|
||||
end Char
|
||||
|
||||
@@ -1121,30 +1121,35 @@ theorem utf8Size_le_of_utf8DecodeChar?_eq_some {b : ByteArray} {c : Char} :
|
||||
| case8 => simp
|
||||
| case9 => simp
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem utf8DecodeChar?_eq_assemble₁ {b : ByteArray} (hb : 1 ≤ b.size) (h : parseFirstByte b[0] = .done) :
|
||||
b.utf8DecodeChar? 0 = assemble₁ b[0] h := by
|
||||
fun_cases ByteArray.utf8DecodeChar?
|
||||
all_goals try (simp_all; done)
|
||||
all_goals omega
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem utf8DecodeChar?_eq_assemble₂ {b : ByteArray} (hb : 2 ≤ b.size) (h : parseFirstByte b[0] = .oneMore) :
|
||||
b.utf8DecodeChar? 0 = assemble₂ b[0] b[1] := by
|
||||
fun_cases ByteArray.utf8DecodeChar?
|
||||
all_goals try (simp_all; done)
|
||||
all_goals omega
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem utf8DecodeChar?_eq_assemble₃ {b : ByteArray} (hb : 3 ≤ b.size) (h : parseFirstByte b[0] = .twoMore) :
|
||||
b.utf8DecodeChar? 0 = assemble₃ b[0] b[1] b[2] := by
|
||||
fun_cases ByteArray.utf8DecodeChar?
|
||||
all_goals try (simp_all; done)
|
||||
all_goals omega
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem utf8DecodeChar?_eq_assemble₄ {b : ByteArray} (hb : 4 ≤ b.size) (h : parseFirstByte b[0] = .threeMore) :
|
||||
b.utf8DecodeChar? 0 = assemble₄ b[0] b[1] b[2] b[3] := by
|
||||
fun_cases ByteArray.utf8DecodeChar?
|
||||
all_goals try (simp_all; done)
|
||||
all_goals omega
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem utf8DecodeChar?_append_eq_assemble₁ {l : List UInt8} {b : ByteArray} (hl : l.length = 1) (h : parseFirstByte l[0] = .done) :
|
||||
(l.toByteArray ++ b).utf8DecodeChar? 0 = assemble₁ l[0] h := by
|
||||
have : (l.toByteArray ++ b)[0]'(by simp [hl]; omega) = l[0] := by
|
||||
|
||||
@@ -8,6 +8,8 @@ module
|
||||
prelude
|
||||
public import Init.Data.String.Basic
|
||||
import Init.Omega
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
@@ -22,17 +24,15 @@ namespace String
|
||||
/--
|
||||
Obtains the smallest valid position that is greater than or equal to the given byte position.
|
||||
-/
|
||||
def Slice.posGE (s : Slice) (offset : String.Pos.Raw) (_h : offset ≤ s.rawEndPos) : s.Pos :=
|
||||
if h : offset < s.rawEndPos then
|
||||
if h' : (s.getUTF8Byte offset h).IsUTF8FirstByte then
|
||||
s.pos offset (Pos.Raw.isValidForSlice_iff_isUTF8FirstByte.2 (Or.inr ⟨_, h'⟩))
|
||||
else
|
||||
s.posGE offset.inc (by simpa)
|
||||
def Slice.posGE (s : Slice) (offset : String.Pos.Raw) (h : offset ≤ s.rawEndPos) : s.Pos :=
|
||||
if h' : offset.IsValidForSlice s then
|
||||
s.pos offset h'
|
||||
else
|
||||
s.endPos
|
||||
have : offset < s.rawEndPos := Std.not_le.1 (fun h₁ => h' (Std.le_antisymm h h₁ ▸ Pos.Raw.isValidForSlice_rawEndPos))
|
||||
s.posGE offset.inc (by simpa)
|
||||
termination_by s.utf8ByteSize - offset.byteIdx
|
||||
decreasing_by
|
||||
simp only [Pos.Raw.lt_iff, byteIdx_rawEndPos, utf8ByteSize_eq, Pos.Raw.byteIdx_inc] at h ⊢
|
||||
simp only [Pos.Raw.lt_iff, byteIdx_rawEndPos, Pos.Raw.byteIdx_inc] at this ⊢
|
||||
omega
|
||||
|
||||
/--
|
||||
@@ -60,4 +60,99 @@ Obtains the smallest valid position that is strictly greater than the given byte
|
||||
def posGT (s : String) (offset : String.Pos.Raw) (h : offset < s.rawEndPos) : s.Pos :=
|
||||
Pos.ofToSlice (s.toSlice.posGT offset (by simpa))
|
||||
|
||||
/--
|
||||
Obtains the largest valid position that is less than or equal to the given byte position.
|
||||
-/
|
||||
@[expose]
|
||||
def Slice.posLE (s : Slice) (offset : String.Pos.Raw) : s.Pos :=
|
||||
if h' : offset.IsValidForSlice s then
|
||||
s.pos offset h'
|
||||
else
|
||||
have : offset ≠ 0 := by rintro rfl; simp at h'
|
||||
s.posLE offset.dec
|
||||
termination_by offset.byteIdx
|
||||
decreasing_by simp only [ne_eq, Pos.Raw.eq_zero_iff, Pos.Raw.byteIdx_dec] at ⊢ this; omega
|
||||
|
||||
/--
|
||||
Obtains the largest valid position that is strictly less than the given byte position.
|
||||
-/
|
||||
@[inline, expose]
|
||||
def Slice.posLT (s : Slice) (offset : String.Pos.Raw) (_h : 0 < offset) : s.Pos :=
|
||||
s.posLE offset.dec
|
||||
|
||||
/--
|
||||
Obtains the largest valid position that is less than or equal to the given byte position.
|
||||
-/
|
||||
@[inline]
|
||||
def posLE (s : String) (offset : String.Pos.Raw) : s.Pos :=
|
||||
Pos.ofToSlice (s.toSlice.posLE offset)
|
||||
|
||||
/--
|
||||
Obtains the largest valid position that is strictly less than the given byte position.
|
||||
-/
|
||||
@[inline]
|
||||
def posLT (s : String) (offset : String.Pos.Raw) (h : 0 < offset) : s.Pos :=
|
||||
Pos.ofToSlice (s.toSlice.posLT offset h)
|
||||
|
||||
/--
|
||||
Returns the previous valid position before the given position, given a proof that the position
|
||||
is not the start position, which guarantees that such a position exists.
|
||||
-/
|
||||
@[inline, expose]
|
||||
def Slice.Pos.prev {s : Slice} (pos : s.Pos) (h : pos ≠ s.startPos) : s.Pos :=
|
||||
s.posLT pos.offset (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h)
|
||||
|
||||
/-- Returns the previous valid position before the given position, or {lean}`none` if the position is
|
||||
the start position. -/
|
||||
@[expose]
|
||||
def Slice.Pos.prev? {s : Slice} (pos : s.Pos) : Option s.Pos :=
|
||||
if h : pos = s.startPos then none else some (pos.prev h)
|
||||
|
||||
/-- Returns the previous valid position before the given position, or panics if the position is
|
||||
the start position. -/
|
||||
@[expose]
|
||||
def Slice.Pos.prev! {s : Slice} (pos : s.Pos) : s.Pos :=
|
||||
if h : pos = s.startPos then panic! "The start position has no previous position" else pos.prev h
|
||||
|
||||
/-- Returns the previous valid position before the given position, given a proof that the position
|
||||
is not the start position, which guarantees that such a position exists. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev {s : String} (pos : s.Pos) (h : pos ≠ s.startPos) : s.Pos :=
|
||||
ofToSlice (pos.toSlice.prev (ne_of_apply_ne Pos.ofToSlice (by simpa)))
|
||||
|
||||
/-- Returns the previous valid position before the given position, or {lean}`none` if the position is
|
||||
the start position. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev? {s : String} (pos : s.Pos) : Option s.Pos :=
|
||||
pos.toSlice.prev?.map Pos.ofToSlice
|
||||
|
||||
/-- Returns the previous valid position before the given position, or panics if the position is
|
||||
the start position. -/
|
||||
@[inline, expose]
|
||||
def Pos.prev! {s : String} (pos : s.Pos) : s.Pos :=
|
||||
ofToSlice pos.toSlice.prev!
|
||||
|
||||
/--
|
||||
Iterates {lean}`p.prev` {name}`n` times.
|
||||
|
||||
If this would move {name}`p` past the start of {name}`s`, the result is {lean}`s.endPos`.
|
||||
-/
|
||||
def Slice.Pos.prevn {s : Slice} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
match n with
|
||||
| 0 => p
|
||||
| n + 1 =>
|
||||
if h : p ≠ s.startPos then
|
||||
prevn (p.prev h) n
|
||||
else
|
||||
p
|
||||
|
||||
/--
|
||||
Iterates {lean}`p.prev` {name}`n` times.
|
||||
|
||||
If this would move {name}`p` past the start of {name}`s`, the result is {lean}`s.startPos`.
|
||||
-/
|
||||
@[inline]
|
||||
def Pos.prevn {s : String} (p : s.Pos) (n : Nat) : s.Pos :=
|
||||
ofToSlice (p.toSlice.prevn n)
|
||||
|
||||
end String
|
||||
|
||||
465
src/Init/Data/String/Iterate.lean
Normal file
465
src/Init/Data/String/Iterate.lean
Normal file
@@ -0,0 +1,465 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving, Markus Himmel
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.String.Basic
|
||||
public import Init.Data.String.FindPos
|
||||
public import Init.Data.Iterators.Combinators.FilterMap
|
||||
public import Init.Data.Iterators.Consumers.Loop
|
||||
import Init.Omega
|
||||
import Init.Data.Iterators.Consumers.Collect
|
||||
import Init.Data.String.Lemmas.FindPos
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
public section
|
||||
|
||||
namespace String.Slice
|
||||
|
||||
structure PosIterator (s : Slice) where
|
||||
currPos : s.Pos
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Creates an iterator over the valid positions within {name}`s`, starting at {name}`p`.
|
||||
-/
|
||||
def positionsFrom {s : Slice} (p : s.Pos) :
|
||||
Std.Iter (α := PosIterator s) { p : s.Pos // p ≠ s.endPos } :=
|
||||
{ internalState := { currPos := p } }
|
||||
|
||||
set_option doc.verso false
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s`.
|
||||
|
||||
Examples:
|
||||
* {lean}`("abc".toSlice.positions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['a', 'b', 'c']`
|
||||
* {lean}`("abc".toSlice.positions.map (·.val.offset.byteIdx) |>.toList) = [0, 1, 2]`
|
||||
* {lean}`("ab∀c".toSlice.positions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['a', 'b', '∀', 'c']`
|
||||
* {lean}`("ab∀c".toSlice.positions.map (·.val.offset.byteIdx) |>.toList) = [0, 1, 2, 5]`
|
||||
-/
|
||||
def positions (s : Slice) : Std.Iter (α := PosIterator s) { p : s.Pos // p ≠ s.endPos } :=
|
||||
s.positionsFrom s.startPos
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
namespace PosIterator
|
||||
|
||||
instance [Pure m] :
|
||||
Std.Iterator (PosIterator s) m { p : s.Pos // p ≠ s.endPos } where
|
||||
IsPlausibleStep it
|
||||
| .yield it' out =>
|
||||
∃ h : it.internalState.currPos ≠ s.endPos,
|
||||
it'.internalState.currPos = it.internalState.currPos.next h ∧
|
||||
it.internalState.currPos = out
|
||||
| .skip _ => False
|
||||
| .done => it.internalState.currPos = s.endPos
|
||||
step := fun ⟨⟨currPos⟩⟩ =>
|
||||
if h : currPos = s.endPos then
|
||||
pure (.deflate ⟨.done, by simp [h]⟩)
|
||||
else
|
||||
pure (.deflate ⟨.yield ⟨⟨currPos.next h⟩⟩ ⟨currPos, h⟩, by simp [h]⟩)
|
||||
|
||||
private def finitenessRelation [Pure m] :
|
||||
Std.Iterators.FinitenessRelation (PosIterator s) m where
|
||||
Rel := InvImage WellFoundedRelation.rel
|
||||
(fun it => s.utf8ByteSize - it.internalState.currPos.offset.byteIdx)
|
||||
wf := InvImage.wf _ WellFoundedRelation.wf
|
||||
subrelation {it it'} h := by
|
||||
simp_wf
|
||||
obtain ⟨step, h, h'⟩ := h
|
||||
cases step
|
||||
· cases h
|
||||
obtain ⟨h1, h2, _⟩ := h'
|
||||
have h3 := Char.utf8Size_pos (it.internalState.currPos.get h1)
|
||||
have h4 := it.internalState.currPos.isValidForSlice.le_utf8ByteSize
|
||||
simp [Pos.ext_iff, String.Pos.Raw.ext_iff] at h1 h2 h4
|
||||
omega
|
||||
· cases h'
|
||||
· cases h
|
||||
|
||||
@[no_expose]
|
||||
instance [Pure m] : Std.Iterators.Finite (PosIterator s) m :=
|
||||
.of_finitenessRelation finitenessRelation
|
||||
|
||||
instance [Monad m] [Monad n] : Std.IteratorLoop (PosIterator s) m n :=
|
||||
.defaultImplementation
|
||||
|
||||
docs_to_verso positions
|
||||
|
||||
end PosIterator
|
||||
|
||||
/--
|
||||
Creates an iterator over all characters (Unicode code points) in {name}`s`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".toSlice.chars.toList = ['a', 'b', 'c']`
|
||||
* {lean}`"ab∀c".toSlice.chars.toList = ['a', 'b', '∀', 'c']`
|
||||
-/
|
||||
@[expose, inline]
|
||||
def chars (s : Slice) :=
|
||||
Std.Iter.map (fun ⟨pos, h⟩ => pos.get h) (positions s)
|
||||
|
||||
@[deprecated "There is no constant-time length function on slices. Use `s.positions.length` instead, or `isEmpty` if you only need to know whether the slice is empty." (since := "2025-11-20")]
|
||||
def length (s : Slice) : Nat :=
|
||||
s.positions.length
|
||||
|
||||
structure RevPosIterator (s : Slice) where
|
||||
currPos : s.Pos
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s` that are strictly smaller than
|
||||
{name}`p`, starting from the position before {name}`p` and iterating towards the first one.
|
||||
-/
|
||||
def revPositionsFrom (s : Slice) (p : s.Pos) : Std.Iter (α := RevPosIterator s) { p : s.Pos // p ≠ s.endPos } :=
|
||||
{ internalState := { currPos := p } }
|
||||
|
||||
set_option doc.verso false
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s`, starting from the last valid
|
||||
position and iterating towards the first one.
|
||||
|
||||
Examples
|
||||
* {lean}`("abc".toSlice.revPositions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['c', 'b', 'a']`
|
||||
* {lean}`("abc".toSlice.revPositions.map (·.val.offset.byteIdx) |>.toList) = [2, 1, 0]`
|
||||
* {lean}`("ab∀c".toSlice.revPositions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['c', '∀', 'b', 'a']`
|
||||
* {lean}`("ab∀c".toSlice.revPositions.map (·.val.offset.byteIdx) |>.toList) = [5, 2, 1, 0]`
|
||||
-/
|
||||
def revPositions (s : Slice) : Std.Iter (α := RevPosIterator s) { p : s.Pos // p ≠ s.endPos } :=
|
||||
s.revPositionsFrom s.endPos
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
namespace RevPosIterator
|
||||
|
||||
instance [Pure m] :
|
||||
Std.Iterator (RevPosIterator s) m { p : s.Pos // p ≠ s.endPos } where
|
||||
IsPlausibleStep it
|
||||
| .yield it' out =>
|
||||
∃ h : it.internalState.currPos ≠ s.startPos,
|
||||
it'.internalState.currPos = it.internalState.currPos.prev h ∧
|
||||
it.internalState.currPos.prev h = out
|
||||
| .skip _ => False
|
||||
| .done => it.internalState.currPos = s.startPos
|
||||
step := fun ⟨⟨currPos⟩⟩ =>
|
||||
if h : currPos = s.startPos then
|
||||
pure (.deflate ⟨.done, by simp [h]⟩)
|
||||
else
|
||||
let prevPos := currPos.prev h
|
||||
pure (.deflate ⟨.yield ⟨⟨prevPos⟩⟩ ⟨prevPos, by exact Pos.prev_ne_endPos⟩, by simp [h, prevPos]⟩)
|
||||
|
||||
private def finitenessRelation [Pure m] :
|
||||
Std.Iterators.FinitenessRelation (RevPosIterator s) m where
|
||||
Rel := InvImage WellFoundedRelation.rel
|
||||
(fun it => it.internalState.currPos.offset.byteIdx)
|
||||
wf := InvImage.wf _ WellFoundedRelation.wf
|
||||
subrelation {it it'} h := by
|
||||
simp_wf
|
||||
obtain ⟨step, h, h'⟩ := h
|
||||
cases step
|
||||
· cases h
|
||||
obtain ⟨h1, h2, _⟩ := h'
|
||||
have h3 := Pos.prev_lt (h := h1)
|
||||
simp [Pos.ext_iff, Pos.lt_iff, String.Pos.Raw.ext_iff, String.Pos.Raw.lt_iff] at h2 h3
|
||||
omega
|
||||
· cases h'
|
||||
· cases h
|
||||
|
||||
@[no_expose]
|
||||
instance [Pure m] : Std.Iterators.Finite (RevPosIterator s) m :=
|
||||
.of_finitenessRelation finitenessRelation
|
||||
|
||||
instance [Monad m] [Monad n] : Std.IteratorLoop (RevPosIterator s) m n :=
|
||||
.defaultImplementation
|
||||
|
||||
docs_to_verso revPositions
|
||||
|
||||
end RevPosIterator
|
||||
|
||||
/--
|
||||
Creates an iterator over all characters (Unicode code points) in {name}`s`, starting from the end
|
||||
of the slice and iterating towards the start.
|
||||
|
||||
Example:
|
||||
* {lean}`"abc".toSlice.revChars.toList = ['c', 'b', 'a']`
|
||||
* {lean}`"ab∀c".toSlice.revChars.toList = ['c', '∀', 'b', 'a']`
|
||||
-/
|
||||
@[expose, inline]
|
||||
def revChars (s : Slice) :=
|
||||
Std.Iter.map (fun ⟨pos, h⟩ => pos.get h) (revPositions s)
|
||||
|
||||
structure ByteIterator where
|
||||
s : Slice
|
||||
offset : String.Pos.Raw
|
||||
deriving Inhabited
|
||||
|
||||
set_option doc.verso false
|
||||
/--
|
||||
Creates an iterator over all bytes in {name}`s`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".toSlice.bytes.toList = [97, 98, 99]`
|
||||
* {lean}`"ab∀c".toSlice.bytes.toList = [97, 98, 226, 136, 128, 99]`
|
||||
-/
|
||||
def bytes (s : Slice) : Std.Iter (α := ByteIterator) UInt8 :=
|
||||
{ internalState := { s, offset := s.startPos.offset }}
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
namespace ByteIterator
|
||||
|
||||
instance [Pure m] : Std.Iterator ByteIterator m UInt8 where
|
||||
IsPlausibleStep it
|
||||
| .yield it' out =>
|
||||
∃ h1 : it.internalState.offset < it.internalState.s.rawEndPos,
|
||||
it.internalState.s = it'.internalState.s ∧
|
||||
it'.internalState.offset = it.internalState.offset.inc ∧
|
||||
it.internalState.s.getUTF8Byte it.internalState.offset h1 = out
|
||||
| .skip _ => False
|
||||
| .done => ¬ it.internalState.offset < it.internalState.s.rawEndPos
|
||||
step := fun ⟨s, offset⟩ =>
|
||||
if h : offset < s.rawEndPos then
|
||||
pure (.deflate ⟨.yield ⟨s, offset.inc⟩ (s.getUTF8Byte offset h), by simp [h]⟩)
|
||||
else
|
||||
pure (.deflate ⟨.done, by simp [h]⟩)
|
||||
|
||||
private def finitenessRelation [Pure m] :
|
||||
Std.Iterators.FinitenessRelation (ByteIterator) m where
|
||||
Rel := InvImage WellFoundedRelation.rel
|
||||
(fun it => it.internalState.s.utf8ByteSize - it.internalState.offset.byteIdx)
|
||||
wf := InvImage.wf _ WellFoundedRelation.wf
|
||||
subrelation {it it'} h := by
|
||||
simp_wf
|
||||
obtain ⟨step, h, h'⟩ := h
|
||||
cases step
|
||||
· cases h
|
||||
obtain ⟨h1, h2, h3, h4⟩ := h'
|
||||
clear h4
|
||||
generalize it'.internalState.s = s at *
|
||||
cases h2
|
||||
simp [String.Pos.Raw.ext_iff, String.Pos.Raw.lt_iff] at h1 h3
|
||||
omega
|
||||
· cases h'
|
||||
· cases h
|
||||
|
||||
@[no_expose]
|
||||
instance [Pure m] : Std.Iterators.Finite ByteIterator m :=
|
||||
.of_finitenessRelation finitenessRelation
|
||||
|
||||
instance [Monad m] [Monad n] : Std.IteratorLoop ByteIterator m n :=
|
||||
.defaultImplementation
|
||||
|
||||
docs_to_verso bytes
|
||||
|
||||
end ByteIterator
|
||||
|
||||
structure RevByteIterator where
|
||||
s : Slice
|
||||
offset : String.Pos.Raw
|
||||
hinv : offset ≤ s.rawEndPos
|
||||
|
||||
set_option doc.verso false
|
||||
/--
|
||||
Creates an iterator over all bytes in {name}`s`, starting from the last one and iterating towards
|
||||
the first one.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".toSlice.revBytes.toList = [99, 98, 97]`
|
||||
* {lean}`"ab∀c".toSlice.revBytes.toList = [99, 128, 136, 226, 98, 97]`
|
||||
-/
|
||||
def revBytes (s : Slice) : Std.Iter (α := RevByteIterator) UInt8 :=
|
||||
{ internalState := { s, offset := s.endPos.offset, hinv := by simp }}
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
instance : Inhabited RevByteIterator where
|
||||
default :=
|
||||
let s := default
|
||||
{ s := s, offset := s.endPos.offset, hinv := by simp}
|
||||
|
||||
namespace RevByteIterator
|
||||
|
||||
instance [Pure m] : Std.Iterator RevByteIterator m UInt8 where
|
||||
IsPlausibleStep it
|
||||
| .yield it' out =>
|
||||
∃ h1 : it.internalState.offset.dec < it.internalState.s.rawEndPos,
|
||||
it.internalState.s = it'.internalState.s ∧
|
||||
it.internalState.offset ≠ 0 ∧
|
||||
it'.internalState.offset = it.internalState.offset.dec ∧
|
||||
it.internalState.s.getUTF8Byte it.internalState.offset.dec h1 = out
|
||||
| .skip _ => False
|
||||
| .done => it.internalState.offset = 0
|
||||
step := fun ⟨s, offset, hinv⟩ =>
|
||||
if h : offset ≠ 0 then
|
||||
let nextOffset := offset.dec
|
||||
have hbound := by
|
||||
simp [String.Pos.Raw.le_iff, nextOffset, String.Pos.Raw.lt_iff] at h hinv ⊢
|
||||
omega
|
||||
have hinv := by
|
||||
simp [String.Pos.Raw.le_iff, nextOffset] at hinv ⊢
|
||||
omega
|
||||
have hiter := by simp [nextOffset, hbound, h]
|
||||
pure (.deflate ⟨.yield ⟨s, nextOffset, hinv⟩ (s.getUTF8Byte nextOffset hbound), hiter⟩)
|
||||
else
|
||||
pure (.deflate ⟨.done, by simpa using h⟩)
|
||||
|
||||
private def finitenessRelation [Pure m] :
|
||||
Std.Iterators.FinitenessRelation (RevByteIterator) m where
|
||||
Rel := InvImage WellFoundedRelation.rel
|
||||
(fun it => it.internalState.offset.byteIdx)
|
||||
wf := InvImage.wf _ WellFoundedRelation.wf
|
||||
subrelation {it it'} h := by
|
||||
simp_wf
|
||||
obtain ⟨step, h, h'⟩ := h
|
||||
cases step
|
||||
· cases h
|
||||
obtain ⟨h1, h2, h3, h4, h5⟩ := h'
|
||||
rw [h4]
|
||||
simp at h1 h3 ⊢
|
||||
omega
|
||||
· cases h'
|
||||
· cases h
|
||||
|
||||
@[no_expose]
|
||||
instance [Pure m] : Std.Iterators.Finite RevByteIterator m :=
|
||||
.of_finitenessRelation finitenessRelation
|
||||
|
||||
instance [Monad m] [Monad n] : Std.IteratorLoop RevByteIterator m n :=
|
||||
.defaultImplementation
|
||||
|
||||
docs_to_verso revBytes
|
||||
|
||||
instance {m : Type u → Type v} [Monad m] : ForIn m String.Slice Char where
|
||||
forIn s b f := ForIn.forIn s.chars b f
|
||||
|
||||
end RevByteIterator
|
||||
|
||||
/--
|
||||
Folds a function over a slice from the start, accumulating a value starting with {name}`init`. The
|
||||
accumulated value is combined with each character in order, using {name}`f`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"coffee tea water".toSlice.foldl (fun n c => if c.isWhitespace then n + 1 else n) 0 = 2`
|
||||
* {lean}`"coffee tea and water".toSlice.foldl (fun n c => if c.isWhitespace then n + 1 else n) 0 = 3`
|
||||
* {lean}`"coffee tea water".toSlice.foldl (·.push ·) "" = "coffee tea water"`
|
||||
-/
|
||||
@[inline]
|
||||
def foldl {α : Type u} (f : α → Char → α) (init : α) (s : Slice) : α :=
|
||||
Std.Iter.fold f init (chars s)
|
||||
|
||||
/--
|
||||
Folds a function over a slice from the end, accumulating a value starting with {name}`init`. The
|
||||
accumulated value is combined with each character in reverse order, using {name}`f`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"coffee tea water".toSlice.foldr (fun c n => if c.isWhitespace then n + 1 else n) 0 = 2`
|
||||
* {lean}`"coffee tea and water".toSlice.foldr (fun c n => if c.isWhitespace then n + 1 else n) 0 = 3`
|
||||
* {lean}`"coffee tea water".toSlice.foldr (fun c s => s.push c) "" = "retaw aet eeffoc"`
|
||||
-/
|
||||
@[inline]
|
||||
def foldr {α : Type u} (f : Char → α → α) (init : α) (s : Slice) : α :=
|
||||
Std.Iter.fold (flip f) init (revChars s)
|
||||
|
||||
end Slice
|
||||
|
||||
@[inline]
|
||||
def Internal.ofToSliceWithProof {s : String} :
|
||||
{ p : s.toSlice.Pos // p ≠ s.toSlice.endPos } → { p : s.Pos // p ≠ s.endPos } :=
|
||||
fun ⟨p, h⟩ => ⟨Pos.ofToSlice p, by simpa [← Pos.toSlice_inj]⟩
|
||||
|
||||
/--
|
||||
Creates an iterator over the valid positions within {name}`s`, starting at {name}`p`.
|
||||
-/
|
||||
def positionsFrom (s : String) (p : s.Pos) :=
|
||||
((s.toSlice.positionsFrom p.toSlice).map Internal.ofToSliceWithProof :
|
||||
Std.Iter { p : s.Pos // p ≠ s.endPos })
|
||||
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s`.
|
||||
|
||||
Examples
|
||||
* {lean}`("abc".positions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['a', 'b', 'c']`
|
||||
* {lean}`("abc".positions.map (·.val.offset.byteIdx) |>.toList) = [0, 1, 2]`
|
||||
* {lean}`("ab∀c".positions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['a', 'b', '∀', 'c']`
|
||||
* {lean}`("ab∀c".positions.map (·.val.offset.byteIdx) |>.toList) = [0, 1, 2, 5]`
|
||||
-/
|
||||
@[inline]
|
||||
def positions (s : String) :=
|
||||
s.positionsFrom s.startPos
|
||||
|
||||
/--
|
||||
Creates an iterator over all characters (Unicode code points) in {name}`s`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".chars.toList = ['a', 'b', 'c']`
|
||||
* {lean}`"ab∀c".chars.toList = ['a', 'b', '∀', 'c']`
|
||||
-/
|
||||
@[inline]
|
||||
def chars (s : String) :=
|
||||
(s.toSlice.chars : Std.Iter Char)
|
||||
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s` that are strictly smaller than
|
||||
{name}`p`, starting from the position before {name}`p` and iterating towards the first one.
|
||||
-/
|
||||
def revPositionsFrom (s : String) (p : s.Pos) :=
|
||||
((s.toSlice.revPositionsFrom p.toSlice).map Internal.ofToSliceWithProof :
|
||||
Std.Iter { p : s.Pos // p ≠ s.endPos })
|
||||
|
||||
/--
|
||||
Creates an iterator over all valid positions within {name}`s`, starting from the last valid
|
||||
position and iterating towards the first one.
|
||||
|
||||
Examples
|
||||
* {lean}`("abc".revPositions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['c', 'b', 'a']`
|
||||
* {lean}`("abc".revPositions.map (·.val.offset.byteIdx) |>.toList) = [2, 1, 0]`
|
||||
* {lean}`("ab∀c".revPositions.map (fun ⟨p, h⟩ => p.get h) |>.toList) = ['c', '∀', 'b', 'a']`
|
||||
* {lean}`("ab∀c".toSlice.revPositions.map (·.val.offset.byteIdx) |>.toList) = [5, 2, 1, 0]`
|
||||
-/
|
||||
@[inline]
|
||||
def revPositions (s : String) :=
|
||||
s.revPositionsFrom s.endPos
|
||||
|
||||
/--
|
||||
Creates an iterator over all characters (Unicode code points) in {name}`s`, starting from the end
|
||||
of the slice and iterating towards the start.
|
||||
|
||||
Example:
|
||||
* {lean}`"abc".revChars.toList = ['c', 'b', 'a']`
|
||||
* {lean}`"ab∀c".revChars.toList = ['c', '∀', 'b', 'a']`
|
||||
-/
|
||||
@[inline]
|
||||
def revChars (s : String) :=
|
||||
(s.toSlice.revChars : Std.Iter Char)
|
||||
|
||||
/--
|
||||
Creates an iterator over all bytes in {name}`s`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".byteIterator.toList = [97, 98, 99]`
|
||||
* {lean}`"ab∀c".byteIterator.toList = [97, 98, 226, 136, 128, 99]`
|
||||
-/
|
||||
@[inline]
|
||||
def byteIterator (s : String) :=
|
||||
(s.toSlice.bytes : Std.Iter UInt8)
|
||||
|
||||
/--
|
||||
Creates an iterator over all bytes in {name}`s`, starting from the last one and iterating towards
|
||||
the first one.
|
||||
|
||||
Examples:
|
||||
* {lean}`"abc".revBytes.toList = [99, 98, 97]`
|
||||
* {lean}`"ab∀c".revBytes.toList = [99, 128, 136, 226, 98, 97]`
|
||||
-/
|
||||
@[inline]
|
||||
def revBytes (s : String) :=
|
||||
(s.toSlice.revBytes : Std.Iter UInt8)
|
||||
|
||||
instance {m : Type u → Type v} [Monad m] : ForIn m String Char where
|
||||
forIn s b f := ForIn.forIn s.toSlice b f
|
||||
|
||||
end String
|
||||
@@ -15,6 +15,7 @@ public import Init.Data.String.Lemmas.Order
|
||||
public import Init.Data.String.Lemmas.IsEmpty
|
||||
public import Init.Data.String.Lemmas.Pattern
|
||||
public import Init.Data.String.Lemmas.Slice
|
||||
public import Init.Data.String.Lemmas.Iterate
|
||||
import Init.Data.Order.Lemmas
|
||||
public import Init.Data.String.Basic
|
||||
import Init.Data.Char.Lemmas
|
||||
|
||||
@@ -106,25 +106,28 @@ These lemmas are slightly evil because they are non-definitional equalities betw
|
||||
are useful and they are at least equalities between slices with definitionally equal underlying
|
||||
strings, so it should be fine.
|
||||
-/
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Slice.sliceTo_sliceFrom {s : Slice} {pos pos'} :
|
||||
(s.sliceFrom pos).sliceTo pos' =
|
||||
s.slice pos (Slice.Pos.ofSliceFrom pos') Slice.Pos.le_ofSliceFrom := by
|
||||
ext <;> simp [String.Pos.ext_iff, Pos.Raw.offsetBy_assoc]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Slice.sliceFrom_sliceTo {s : Slice} {pos pos'} :
|
||||
(s.sliceTo pos).sliceFrom pos' =
|
||||
s.slice (Slice.Pos.ofSliceTo pos') pos Slice.Pos.ofSliceTo_le := by
|
||||
ext <;> simp [String.Pos.ext_iff]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Slice.sliceFrom_sliceFrom {s : Slice} {pos pos'} :
|
||||
(s.sliceFrom pos).sliceFrom pos' =
|
||||
s.sliceFrom (Slice.Pos.ofSliceFrom pos') := by
|
||||
ext <;> simp [String.Pos.ext_iff, Pos.Raw.offsetBy_assoc]
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem Slice.sliceTo_sliceTo {s : Slice} {pos pos'} :
|
||||
(s.sliceTo pos).sliceTo pos' = s.sliceTo (Slice.Pos.ofSliceTo pos') := by
|
||||
@@ -168,4 +171,9 @@ theorem Pos.Raw.isValidForSlice_zero {s : Slice} : (0 : Pos.Raw).IsValidForSlice
|
||||
le_rawEndPos := by simp [Pos.Raw.le_iff]
|
||||
isValid_offsetBy := by simpa using s.startInclusive.isValid
|
||||
|
||||
@[simp]
|
||||
theorem Pos.get_ofToSlice {s : String} {p : (s.toSlice).Pos} {h} :
|
||||
(ofToSlice p).get h = p.get (by simpa [← ofToSlice_inj]) := by
|
||||
simp [get_eq_get_toSlice]
|
||||
|
||||
end String
|
||||
|
||||
@@ -24,7 +24,6 @@ theorem le_offset_posGE {s : Slice} {p : Pos.Raw} {h : p ≤ s.rawEndPos} :
|
||||
fun_induction posGE with
|
||||
| case1 => simp
|
||||
| case2 => exact Std.le_trans (Std.le_of_lt (Pos.Raw.lt_inc)) ‹_›
|
||||
| case3 => assumption
|
||||
|
||||
@[simp]
|
||||
theorem posGE_le_iff {s : Slice} {p : Pos.Raw} {h : p ≤ s.rawEndPos} {q : s.Pos} :
|
||||
@@ -33,10 +32,7 @@ theorem posGE_le_iff {s : Slice} {p : Pos.Raw} {h : p ≤ s.rawEndPos} {q : s.Po
|
||||
| case1 => simp [Pos.le_iff]
|
||||
| case2 r h₁ h₂ h₃ ih =>
|
||||
suffices r ≠ q.offset by simp [ih, Pos.Raw.inc_le, Std.le_iff_lt_or_eq (a := r), this]
|
||||
exact fun h => h₃ (h ▸ q.isUTF8FirstByte_getUTF8Byte_offset)
|
||||
| case3 r h₁ h₂ =>
|
||||
obtain rfl : r = s.rawEndPos := Std.le_antisymm h₁ (Std.not_lt.1 h₂)
|
||||
simp only [Pos.endPos_le, ← offset_endPos, ← Pos.le_iff]
|
||||
exact fun h => by simp [h, q.isValidForSlice] at h₂
|
||||
|
||||
@[simp]
|
||||
theorem lt_posGE_iff {s : Slice} {p : Pos.Raw} {h : p ≤ s.rawEndPos} {q : s.Pos} :
|
||||
@@ -103,10 +99,124 @@ theorem posGT_eq_next {s : Slice} {p : String.Pos.Raw} {h} (h' : p.IsValidForSli
|
||||
s.posGT p h = (s.pos p h').next (by simpa [Pos.ext_iff] using Pos.Raw.ne_of_lt h) := by
|
||||
simpa using Pos.posGT_offset (h := h) (p := s.pos p h')
|
||||
|
||||
theorem next_eq_posGT {s : Slice} {p : s.Pos} {h} :
|
||||
theorem Pos.next_eq_posGT {s : Slice} {p : s.Pos} {h} :
|
||||
p.next h = s.posGT p.offset (by simpa) := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLE_le {s : Slice} {p : Pos.Raw} : (s.posLE p).offset ≤ p := by
|
||||
fun_induction posLE with
|
||||
| case1 => simp
|
||||
| case2 => exact Std.le_trans ‹_› (Std.le_of_lt (Pos.Raw.dec_lt ‹_›))
|
||||
|
||||
@[simp]
|
||||
theorem le_posLE_iff {s : Slice} {p : s.Pos} {q : Pos.Raw} :
|
||||
p ≤ s.posLE q ↔ p.offset ≤ q := by
|
||||
fun_induction posLE with
|
||||
| case1 => simp [Pos.le_iff]
|
||||
| case2 r h₁ h₂ ih =>
|
||||
suffices p.offset ≠ r by simp [ih, Pos.Raw.le_dec h₂, Std.le_iff_lt_or_eq (b := r), this]
|
||||
exact fun h => by simp [← h, p.isValidForSlice] at h₁
|
||||
|
||||
@[simp]
|
||||
theorem posLE_lt_iff {s : Slice} {p : s.Pos} {q : Pos.Raw} :
|
||||
s.posLE q < p ↔ q < p.offset := by
|
||||
rw [← Std.not_le, le_posLE_iff, Std.not_le]
|
||||
|
||||
theorem posLE_eq_iff {s : Slice} {p : Pos.Raw} {q : s.Pos} :
|
||||
s.posLE p = q ↔ q.offset ≤ p ∧ ∀ q', q'.offset ≤ p → q' ≤ q :=
|
||||
⟨by rintro rfl; simp, fun ⟨h₁, h₂⟩ => Std.le_antisymm (h₂ _ (by simp)) (by simpa)⟩
|
||||
|
||||
theorem posLT_eq_posLE {s : Slice} {p : Pos.Raw} {h : 0 < p} :
|
||||
s.posLT p h = s.posLE p.dec := (rfl)
|
||||
|
||||
theorem posLE_dec {s : Slice} {p : Pos.Raw} (h : 0 < p) :
|
||||
s.posLE p.dec = s.posLT p h := (rfl)
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLT_lt {s : Slice} {p : Pos.Raw} {h : 0 < p} :
|
||||
(s.posLT p h).offset < p :=
|
||||
Std.lt_of_le_of_lt (by simp [posLT_eq_posLE]) (Pos.Raw.dec_lt (Pos.Raw.pos_iff_ne_zero.1 h))
|
||||
|
||||
@[simp]
|
||||
theorem le_posLT_iff {s : Slice} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
q ≤ s.posLT p h ↔ q.offset < p := by
|
||||
rw [posLT_eq_posLE, le_posLE_iff, Pos.Raw.le_dec (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
@[simp]
|
||||
theorem posLT_lt_iff {s : Slice} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
s.posLT p h < q ↔ p ≤ q.offset := by
|
||||
rw [posLT_eq_posLE, posLE_lt_iff, Pos.Raw.dec_lt_iff (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
theorem posLT_eq_iff {s : Slice} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
s.posLT p h = q ↔ q.offset < p ∧ ∀ q', q'.offset < p → q' ≤ q := by
|
||||
simp [posLT_eq_posLE, posLE_eq_iff, Pos.Raw.le_dec (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.posLE_offset {s : Slice} {p : s.Pos} : s.posLE p.offset = p := by
|
||||
simp [posLE_eq_iff, Pos.le_iff]
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLE_eq_self_iff {s : Slice} {p : String.Pos.Raw} :
|
||||
(s.posLE p).offset = p ↔ p.IsValidForSlice s :=
|
||||
⟨fun h' => by simpa [h'] using (s.posLE p).isValidForSlice,
|
||||
fun h' => by simpa using congrArg Pos.offset (Pos.posLE_offset (p := s.pos p h'))⟩
|
||||
|
||||
theorem posLE_eq_pos {s : Slice} {p : String.Pos.Raw} (h : p.IsValidForSlice s) :
|
||||
s.posLE p = s.pos p h := by
|
||||
simpa using Pos.posLE_offset (p := s.pos p h)
|
||||
|
||||
theorem pos_eq_posLE {s : Slice} {p : String.Pos.Raw} {h} :
|
||||
s.pos p h = s.posLE p := by
|
||||
simp [posLE_eq_pos h]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.posLT_offset {s : Slice} {p : s.Pos} {h} :
|
||||
s.posLT p.offset h = p.prev (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simp [prev]
|
||||
|
||||
theorem posLT_eq_prev {s : Slice} {p : String.Pos.Raw} {h} (h' : p.IsValidForSlice s) :
|
||||
s.posLT p h = (s.pos p h').prev (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simpa using Pos.posLT_offset (h := h) (p := s.pos p h')
|
||||
|
||||
theorem Pos.prev_eq_posLT {s : Slice} {p : s.Pos} {h} :
|
||||
p.prev h = s.posLT p.offset (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.le_prev_iff_lt {s : Slice} {p q : s.Pos} {h} : p ≤ q.prev h ↔ p < q := by
|
||||
simp [prev_eq_posLT, -posLT_offset, Pos.lt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_lt_iff_le {s : Slice} {p q : s.Pos} {h} : p.prev h < q ↔ p ≤ q := by
|
||||
simp [prev_eq_posLT, -posLT_offset, Pos.le_iff]
|
||||
|
||||
theorem Pos.prev_eq_iff {s : Slice} {p q : s.Pos} {h} :
|
||||
p.prev h = q ↔ q < p ∧ ∀ q', q' < p → q' ≤ q := by
|
||||
simp only [prev_eq_posLT, posLT_eq_iff, Pos.lt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_lt {s : Slice} {p : s.Pos} {h} : p.prev h < p := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_ne_endPos {s : Slice} {p : s.Pos} {h} : p.prev h ≠ s.endPos :=
|
||||
ne_endPos_of_lt prev_lt
|
||||
|
||||
theorem Pos.prevn_le {s : Slice} {p : s.Pos} {n : Nat} : p.prevn n ≤ p := by
|
||||
fun_induction prevn with
|
||||
| case1 => simp
|
||||
| case2 p n h ih => exact Std.le_of_lt (by simpa using ih)
|
||||
| case3 => simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_next {s : Slice} {p : s.Pos} {h} : (p.next h).prev (by simp) = p :=
|
||||
prev_eq_iff.2 (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Pos.next_prev {s : Slice} {p : s.Pos} {h} : (p.prev h).next (by simp) = p :=
|
||||
next_eq_iff.2 (by simp)
|
||||
|
||||
end Slice
|
||||
|
||||
@[simp]
|
||||
@@ -204,4 +314,134 @@ theorem next_eq_posGT {s : String} {p : s.Pos} {h} :
|
||||
p.next h = s.posGT p.offset (by simpa) := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLE_le {s : String} {p : Pos.Raw} : (s.posLE p).offset ≤ p := by
|
||||
simp [posLE]
|
||||
|
||||
@[simp]
|
||||
theorem le_posLE_iff {s : String} {p : s.Pos} {q : Pos.Raw} :
|
||||
p ≤ s.posLE q ↔ p.offset ≤ q := by
|
||||
simp [posLE, Pos.le_ofToSlice_iff]
|
||||
|
||||
@[simp]
|
||||
theorem posLE_lt_iff {s : String} {p : s.Pos} {q : Pos.Raw} :
|
||||
s.posLE q < p ↔ q < p.offset := by
|
||||
rw [← Std.not_le, le_posLE_iff, Std.not_le]
|
||||
|
||||
theorem posLE_eq_iff {s : String} {p : Pos.Raw} {q : s.Pos} :
|
||||
s.posLE p = q ↔ q.offset ≤ p ∧ ∀ q', q'.offset ≤ p → q' ≤ q :=
|
||||
⟨by rintro rfl; simp, fun ⟨h₁, h₂⟩ => Std.le_antisymm (h₂ _ (by simp)) (by simpa)⟩
|
||||
|
||||
theorem posLT_eq_posLE {s : String} {p : Pos.Raw} {h : 0 < p} :
|
||||
s.posLT p h = s.posLE p.dec := (rfl)
|
||||
|
||||
theorem posLE_dec {s : String} {p : Pos.Raw} (h : 0 < p) :
|
||||
s.posLE p.dec = s.posLT p h := (rfl)
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLT_lt {s : String} {p : Pos.Raw} {h : 0 < p} :
|
||||
(s.posLT p h).offset < p :=
|
||||
Std.lt_of_le_of_lt (by simp [posLT_eq_posLE]) (Pos.Raw.dec_lt (Pos.Raw.pos_iff_ne_zero.1 h))
|
||||
|
||||
@[simp]
|
||||
theorem le_posLT_iff {s : String} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
q ≤ s.posLT p h ↔ q.offset < p := by
|
||||
rw [posLT_eq_posLE, le_posLE_iff, Pos.Raw.le_dec (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
@[simp]
|
||||
theorem posLT_lt_iff {s : String} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
s.posLT p h < q ↔ p ≤ q.offset := by
|
||||
rw [posLT_eq_posLE, posLE_lt_iff, Pos.Raw.dec_lt_iff (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
theorem posLT_eq_iff {s : String} {p : Pos.Raw} {h : 0 < p} {q : s.Pos} :
|
||||
s.posLT p h = q ↔ q.offset < p ∧ ∀ q', q'.offset < p → q' ≤ q := by
|
||||
simp [posLT_eq_posLE, posLE_eq_iff, Pos.Raw.le_dec (Pos.Raw.pos_iff_ne_zero.1 h)]
|
||||
|
||||
theorem posLE_toSlice {s : String} {p : Pos.Raw} :
|
||||
s.toSlice.posLE p = (s.posLE p).toSlice := by
|
||||
simp [posLE]
|
||||
|
||||
theorem posLE_eq_posLE_toSlice {s : String} {p : Pos.Raw} :
|
||||
s.posLE p = Pos.ofToSlice (s.toSlice.posLE p) := by
|
||||
simp [posLE]
|
||||
|
||||
theorem posLT_toSlice {s : String} {p : Pos.Raw} (h : 0 < p) :
|
||||
s.toSlice.posLT p h = (s.posLT p h).toSlice := by
|
||||
simp [posLT]
|
||||
|
||||
theorem posLT_eq_posLT_toSlice {s : String} {p : Pos.Raw} (h : 0 < p) :
|
||||
s.posLT p h = Pos.ofToSlice (s.toSlice.posLT p h) := by
|
||||
simp [posLT]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.posLE_offset {s : String} {p : s.Pos} : s.posLE p.offset = p := by
|
||||
simp [posLE_eq_iff, Pos.le_iff]
|
||||
|
||||
@[simp]
|
||||
theorem offset_posLE_eq_self_iff {s : String} {p : String.Pos.Raw} :
|
||||
(s.posLE p).offset = p ↔ p.IsValid s :=
|
||||
⟨fun h' => by simpa [h'] using (s.posLE p).isValid,
|
||||
fun h' => by simpa using congrArg Pos.offset (Pos.posLE_offset (p := s.pos p h'))⟩
|
||||
|
||||
theorem posLE_eq_pos {s : String} {p : String.Pos.Raw} (h : p.IsValid s) :
|
||||
s.posLE p = s.pos p h := by
|
||||
simpa using Pos.posLE_offset (p := s.pos p h)
|
||||
|
||||
theorem pos_eq_posLE {s : String} {p : String.Pos.Raw} {h} :
|
||||
s.pos p h = s.posLE p := by
|
||||
simp [posLE_eq_pos h]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.posLT_offset {s : String} {p : s.Pos} {h} :
|
||||
s.posLT p.offset h = p.prev (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simp [posLT, prev, Slice.Pos.prev, offset_toSlice]
|
||||
|
||||
theorem posLT_eq_prev {s : String} {p : String.Pos.Raw} {h} (h' : p.IsValid s) :
|
||||
s.posLT p h = (s.pos p h').prev (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simpa using Pos.posLT_offset (h := h) (p := s.pos p h')
|
||||
|
||||
theorem Pos.prev_eq_posLT {s : String} {p : s.Pos} {h} :
|
||||
p.prev h = s.posLT p.offset (by simpa [Pos.Raw.pos_iff_ne_zero, Pos.ext_iff] using h) := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.le_prev_iff_lt {s : String} {p q : s.Pos} {h} : p ≤ q.prev h ↔ p < q := by
|
||||
simp [prev_eq_posLT, -posLT_offset, Pos.lt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_lt_iff_le {s : String} {p q : s.Pos} {h} : p.prev h < q ↔ p ≤ q := by
|
||||
simp [prev_eq_posLT, -posLT_offset, Pos.le_iff]
|
||||
|
||||
theorem Pos.prev_eq_iff {s : String} {p q : s.Pos} {h} :
|
||||
p.prev h = q ↔ q < p ∧ ∀ q', q' < p → q' ≤ q := by
|
||||
simp only [prev_eq_posLT, posLT_eq_iff, Pos.lt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_lt {s : String} {p : s.Pos} {h} : p.prev h < p := by
|
||||
simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_ne_endPos {s : String} {p : s.Pos} {h} : p.prev h ≠ s.endPos :=
|
||||
ne_endPos_of_lt prev_lt
|
||||
|
||||
theorem Pos.toSlice_prev {s : String} {p : s.Pos} {h} :
|
||||
(p.prev h).toSlice = p.toSlice.prev (by simpa [toSlice_inj]) := by
|
||||
simp [prev]
|
||||
|
||||
theorem Pos.prev_toSlice {s : String} {p : s.Pos} {h} :
|
||||
p.toSlice.prev h = (p.prev (by simpa [← toSlice_inj])).toSlice := by
|
||||
simp [prev]
|
||||
|
||||
theorem Pos.prevn_le {s : String} {p : s.Pos} {n : Nat} :
|
||||
p.prevn n ≤ p := by
|
||||
simpa [Pos.le_iff, ← offset_toSlice] using Slice.Pos.prevn_le
|
||||
|
||||
@[simp]
|
||||
theorem Pos.prev_next {s : String} {p : s.Pos} {h} : (p.next h).prev (by simp) = p :=
|
||||
prev_eq_iff.2 (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Pos.next_prev {s : String} {p : s.Pos} {h} : (p.prev h).next (by simp) = p :=
|
||||
next_eq_iff.2 (by simp)
|
||||
|
||||
end String
|
||||
|
||||
267
src/Init/Data/String/Lemmas/Iterate.lean
Normal file
267
src/Init/Data/String/Lemmas/Iterate.lean
Normal file
@@ -0,0 +1,267 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Markus Himmel
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.String.Iterate
|
||||
public import Init.Data.Iterators.Consumers.Collect
|
||||
public import Init.Data.String.Lemmas.Splits
|
||||
import all Init.Data.String.Iterate
|
||||
import Init.Data.String.Termination
|
||||
import Init.Data.Iterators.Lemmas.Consumers.Collect
|
||||
import Init.ByCases
|
||||
import Init.Data.Iterators.Lemmas.Combinators.FilterMap
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
import Init.Data.Iterators.Lemmas.Consumers.Loop
|
||||
|
||||
set_option doc.verso true
|
||||
|
||||
public section
|
||||
|
||||
namespace String
|
||||
|
||||
namespace Slice
|
||||
|
||||
/--
|
||||
A list of all positions starting at {name}`p`.
|
||||
|
||||
This function is not meant to be used in actual progams. Actual programs should use
|
||||
{name}`Slice.positionsFrom` or {name}`Slice.positions`.
|
||||
-/
|
||||
protected def Model.positionsFrom {s : Slice} (p : s.Pos) : List { p : s.Pos // p ≠ s.endPos } :=
|
||||
if h : p.IsAtEnd then
|
||||
[]
|
||||
else
|
||||
⟨p, h⟩ :: Model.positionsFrom (p.next h)
|
||||
termination_by p
|
||||
|
||||
@[simp]
|
||||
theorem Model.positionsFrom_endPos {s : Slice} : Model.positionsFrom s.endPos = [] := by
|
||||
simp [Model.positionsFrom]
|
||||
|
||||
theorem Model.positionsFrom_eq_cons {s : Slice} {p : s.Pos} (hp : p ≠ s.endPos) :
|
||||
Model.positionsFrom p = ⟨p, hp⟩ :: Model.positionsFrom (p.next hp) := by
|
||||
rw [Model.positionsFrom]
|
||||
simp [hp]
|
||||
|
||||
theorem Model.map_get_positionsFrom_of_splits {s : Slice} {p : s.Pos} {t₁ t₂ : String}
|
||||
(hp : p.Splits t₁ t₂) : (Model.positionsFrom p).map (fun p => p.1.get p.2) = t₂.toList := by
|
||||
induction p using Pos.next_induction generalizing t₁ t₂ with
|
||||
| next p h ih =>
|
||||
obtain ⟨t₂, rfl⟩ := hp.exists_eq_singleton_append h
|
||||
rw [Model.positionsFrom_eq_cons h, List.map_cons, String.toList_append, toList_singleton,
|
||||
List.singleton_append, ih hp.next]
|
||||
| endPos => simpa using (splits_endPos_iff.1 hp).2
|
||||
|
||||
theorem Model.map_get_positionsFrom_startPos {s : Slice} :
|
||||
(Model.positionsFrom s.startPos).map (fun p => p.1.get p.2) = s.copy.toList :=
|
||||
Model.map_get_positionsFrom_of_splits (splits_startPos s)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem toList_positionsFrom {s : Slice} {p : s.Pos} :
|
||||
(s.positionsFrom p).toList = Model.positionsFrom p := by
|
||||
rw [positionsFrom]
|
||||
induction p using WellFounded.induction Pos.wellFounded_gt with | h p ih
|
||||
rw [Std.Iter.toList_eq_match_step, Std.Iter.step_eq]
|
||||
simp only [ne_eq, Std.Iter.toIterM_mk, Std.IterM.internalState_mk]
|
||||
by_cases h : p = s.endPos
|
||||
· simp [h]
|
||||
· simp [h, ih (p.next h) (by simp), Model.positionsFrom_eq_cons]
|
||||
|
||||
@[simp]
|
||||
theorem toList_positions {s : Slice} : s.positions.toList = Model.positionsFrom s.startPos := by
|
||||
simp [positions]
|
||||
|
||||
@[simp]
|
||||
theorem toList_chars {s : Slice} : s.chars.toList = s.copy.toList := by
|
||||
simp [chars, Model.map_get_positionsFrom_startPos]
|
||||
|
||||
/--
|
||||
A list of all positions strictly before {name}`p`, ordered from largest to smallest.
|
||||
|
||||
This function is not meant to be used in actual programs. Actual programs should use
|
||||
{name}`Slice.revPositionsFrom` and {name}`Slice.revPositions`.
|
||||
-/
|
||||
protected def Model.revPositionsFrom {s : Slice} (p : s.Pos) : List { p : s.Pos // p ≠ s.endPos } :=
|
||||
if h : p = s.startPos then
|
||||
[]
|
||||
else
|
||||
⟨p.prev h, by simp⟩ :: Model.revPositionsFrom (p.prev h)
|
||||
termination_by p.down
|
||||
|
||||
@[simp]
|
||||
theorem Model.revPositionsFrom_startPos {s : Slice} : Model.revPositionsFrom s.startPos = [] := by
|
||||
simp [Model.revPositionsFrom]
|
||||
|
||||
theorem Model.revPositionsFrom_eq_cons {s : Slice} {p : s.Pos} (hp : p ≠ s.startPos) :
|
||||
Model.revPositionsFrom p = ⟨p.prev hp, by simp⟩ :: Model.revPositionsFrom (p.prev hp) := by
|
||||
rw [Model.revPositionsFrom]
|
||||
simp [hp]
|
||||
|
||||
theorem Model.map_get_revPositionsFrom_of_splits {s : Slice} {p : s.Pos} {t₁ t₂ : String}
|
||||
(hp : p.Splits t₁ t₂) : (Model.revPositionsFrom p).map (fun p => p.1.get p.2) = t₁.toList.reverse := by
|
||||
induction p using Pos.prev_induction generalizing t₁ t₂ with
|
||||
| startPos => simpa using (splits_startPos_iff.1 hp).1
|
||||
| prev p h ih =>
|
||||
obtain ⟨t₁, rfl⟩ := hp.exists_eq_append_singleton_of_ne_startPos h
|
||||
rw [Model.revPositionsFrom_eq_cons h, List.map_cons, String.toList_append, toList_singleton,
|
||||
List.reverse_append, List.reverse_singleton, List.singleton_append, ih hp.prev]
|
||||
|
||||
theorem Model.map_get_revPositionsFrom_endPos {s : Slice} :
|
||||
(Model.revPositionsFrom s.endPos).map (fun p => p.1.get p.2) = s.copy.toList.reverse :=
|
||||
Model.map_get_revPositionsFrom_of_splits (splits_endPos s)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem toList_revPositionsFrom {s : Slice} {p : s.Pos} :
|
||||
(s.revPositionsFrom p).toList = Model.revPositionsFrom p := by
|
||||
rw [revPositionsFrom]
|
||||
induction p using WellFounded.induction Pos.wellFounded_lt with | h p ih
|
||||
rw [Std.Iter.toList_eq_match_step, Std.Iter.step_eq]
|
||||
simp only [ne_eq, Std.Iter.toIterM_mk, Std.IterM.internalState_mk]
|
||||
by_cases h : p = s.startPos
|
||||
· simp [h]
|
||||
· simp [h, ih (p.prev h) (by simp), Model.revPositionsFrom_eq_cons]
|
||||
|
||||
@[simp]
|
||||
theorem toList_revPositions {s : Slice} : s.revPositions.toList = Model.revPositionsFrom s.endPos := by
|
||||
simp [revPositions]
|
||||
|
||||
@[simp]
|
||||
theorem toList_revChars {s : Slice} : s.revChars.toList = s.copy.toList.reverse := by
|
||||
simp [revChars, Model.map_get_revPositionsFrom_endPos]
|
||||
|
||||
theorem forIn_eq_forIn_chars {m : Type u → Type v} [Monad m] {s : Slice} {b} {f : Char → β → m (ForInStep β)} :
|
||||
ForIn.forIn s b f = ForIn.forIn s.chars b f := rfl
|
||||
|
||||
@[simp]
|
||||
theorem forIn_eq_forIn_toList {m : Type u → Type v} [Monad m] [LawfulMonad m] {s : Slice} {b}
|
||||
{f : Char → β → m (ForInStep β)} :
|
||||
ForIn.forIn s b f = ForIn.forIn s.copy.toList b f := by
|
||||
rw [forIn_eq_forIn_chars, ← Std.Iter.forIn_toList, toList_chars]
|
||||
|
||||
end Slice
|
||||
|
||||
/--
|
||||
A list of all positions starting at {name}`p`.
|
||||
|
||||
This function is not meant to be used in actual progams. Actual programs should use
|
||||
{name}`Slice.positionsFrom` or {name}`Slice.positions`.
|
||||
-/
|
||||
protected def Model.positionsFrom {s : String} (p : s.Pos) : List { p : s.Pos // p ≠ s.endPos } :=
|
||||
if h : p.IsAtEnd then
|
||||
[]
|
||||
else
|
||||
⟨p, h⟩ :: Model.positionsFrom (p.next h)
|
||||
termination_by p
|
||||
|
||||
@[simp]
|
||||
theorem Model.positionsFrom_endPos {s : String} : Model.positionsFrom s.endPos = [] := by
|
||||
simp [Model.positionsFrom]
|
||||
|
||||
theorem Model.positionsFrom_eq_cons {s : String} {p : s.Pos} (hp : p ≠ s.endPos) :
|
||||
Model.positionsFrom p = ⟨p, hp⟩ :: Model.positionsFrom (p.next hp) := by
|
||||
rw [Model.positionsFrom]
|
||||
simp [hp]
|
||||
|
||||
theorem Model.positionsFrom_eq_map {s : String} {p : s.Pos} :
|
||||
Model.positionsFrom p = (Slice.Model.positionsFrom p.toSlice).map
|
||||
(fun p => ⟨Pos.ofToSlice p.1, by simpa [← Pos.toSlice_inj] using p.2⟩) := by
|
||||
induction p using Pos.next_induction with
|
||||
| next p h ih =>
|
||||
rw [positionsFrom_eq_cons h, Slice.Model.positionsFrom_eq_cons (by simpa [Pos.toSlice_inj])]
|
||||
simp [ih, Pos.toSlice_next]
|
||||
| endPos => simp [← endPos_toSlice]
|
||||
|
||||
theorem Model.map_get_positionsFrom_of_splits {s : String} {p : s.Pos} {t₁ t₂ : String}
|
||||
(hp : p.Splits t₁ t₂) : (Model.positionsFrom p).map (fun p => p.1.get p.2) = t₂.toList := by
|
||||
simp [Model.positionsFrom_eq_map,
|
||||
← Slice.Model.map_get_positionsFrom_of_splits (Pos.splits_toSlice_iff.2 hp)]
|
||||
|
||||
theorem Model.map_get_positionsFrom_startPos {s : String} :
|
||||
(Model.positionsFrom s.startPos).map (fun p => p.1.get p.2) = s.toList :=
|
||||
Model.map_get_positionsFrom_of_splits (splits_startPos s)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem toList_positionsFrom {s : String} {p : s.Pos} :
|
||||
(s.positionsFrom p).toList = Model.positionsFrom p := by
|
||||
simp [positionsFrom, Internal.ofToSliceWithProof, Model.positionsFrom_eq_map]
|
||||
|
||||
theorem toList_positions {s : String} : s.positions.toList = Model.positionsFrom s.startPos := by
|
||||
simp [positions]
|
||||
|
||||
@[simp]
|
||||
theorem toList_chars {s : String} : s.chars.toList = s.toList := by
|
||||
simp [chars]
|
||||
|
||||
/--
|
||||
A list of all positions strictly before {name}`p`, ordered from largest to smallest.
|
||||
|
||||
This function is not meant to be used in actual programs. Actual programs should use
|
||||
{name}`Slice.revPositionsFrom` and {name}`Slice.revPositions`.
|
||||
-/
|
||||
protected def Model.revPositionsFrom {s : String} (p : s.Pos) : List { p : s.Pos // p ≠ s.endPos } :=
|
||||
if h : p = s.startPos then
|
||||
[]
|
||||
else
|
||||
⟨p.prev h, by simp⟩ :: Model.revPositionsFrom (p.prev h)
|
||||
termination_by p.down
|
||||
|
||||
@[simp]
|
||||
theorem Model.revPositionsFrom_startPos {s : String} : Model.revPositionsFrom s.startPos = [] := by
|
||||
simp [Model.revPositionsFrom]
|
||||
|
||||
theorem Model.revPositionsFrom_eq_cons {s : String} {p : s.Pos} (hp : p ≠ s.startPos) :
|
||||
Model.revPositionsFrom p = ⟨p.prev hp, by simp⟩ :: Model.revPositionsFrom (p.prev hp) := by
|
||||
rw [Model.revPositionsFrom]
|
||||
simp [hp]
|
||||
|
||||
theorem Model.revPositionsFrom_eq_map {s : String} {p : s.Pos} :
|
||||
Model.revPositionsFrom p = (Slice.Model.revPositionsFrom p.toSlice).map
|
||||
(fun p => ⟨Pos.ofToSlice p.1, by simpa [← Pos.toSlice_inj] using p.2⟩) := by
|
||||
induction p using Pos.prev_induction with
|
||||
| prev p h ih =>
|
||||
rw [revPositionsFrom_eq_cons h, Slice.Model.revPositionsFrom_eq_cons (by simpa [Pos.toSlice_inj])]
|
||||
simp only [ne_eq, ih, List.map_cons, List.cons.injEq, Subtype.mk.injEq]
|
||||
simp [Pos.prev_toSlice]
|
||||
| startPos => simp [← startPos_toSlice]
|
||||
|
||||
theorem Model.map_get_revPositionsFrom_of_splits {s : String} {p : s.Pos} {t₁ t₂ : String}
|
||||
(hp : p.Splits t₁ t₂) : (Model.revPositionsFrom p).map (fun p => p.1.get p.2) = t₁.toList.reverse := by
|
||||
simp [Model.revPositionsFrom_eq_map,
|
||||
← Slice.Model.map_get_revPositionsFrom_of_splits (Pos.splits_toSlice_iff.2 hp)]
|
||||
|
||||
theorem Model.map_get_revPositionsFrom_endPos {s : String} :
|
||||
(Model.revPositionsFrom s.endPos).map (fun p => p.1.get p.2) = s.toList.reverse :=
|
||||
Model.map_get_revPositionsFrom_of_splits (splits_endPos s)
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
@[simp]
|
||||
theorem toList_revPositionsFrom {s : String} {p : s.Pos} :
|
||||
(s.revPositionsFrom p).toList = Model.revPositionsFrom p := by
|
||||
simp [revPositionsFrom, Internal.ofToSliceWithProof, Model.revPositionsFrom_eq_map]
|
||||
|
||||
@[simp]
|
||||
theorem toList_revPositions {s : String} :
|
||||
s.revPositions.toList = Model.revPositionsFrom s.endPos := by
|
||||
simp [revPositions]
|
||||
|
||||
@[simp]
|
||||
theorem toList_revChars {s : String} : s.revChars.toList = s.toList.reverse := by
|
||||
simp [revChars]
|
||||
|
||||
theorem forIn_eq_forIn_chars {m : Type u → Type v} [Monad m] {s : String} {b} {f : Char → β → m (ForInStep β)} :
|
||||
ForIn.forIn s b f = ForIn.forIn s.chars b f := (rfl)
|
||||
|
||||
@[simp]
|
||||
theorem forIn_eq_forIn_toList {m : Type u → Type v} [Monad m] [LawfulMonad m] {s : String} {b}
|
||||
{f : Char → β → m (ForInStep β)} :
|
||||
ForIn.forIn s b f = ForIn.forIn s.toList b f := by
|
||||
rw [forIn_eq_forIn_chars, ← Std.Iter.forIn_toList, toList_chars]
|
||||
|
||||
end String
|
||||
@@ -23,11 +23,23 @@ theorem Slice.Pos.next_le_iff_lt {s : Slice} {p q : s.Pos} {h} : p.next h ≤ q
|
||||
theorem Slice.Pos.lt_next_iff_le {s : Slice} {p q : s.Pos} {h} : p < q.next h ↔ p ≤ q := by
|
||||
rw [← Decidable.not_iff_not, Std.not_lt, next_le_iff_lt, Std.not_le]
|
||||
|
||||
theorem Slice.Pos.next_eq_iff {s : Slice} {p q : s.Pos} {h} :
|
||||
p.next h = q ↔ p < q ∧ ∀ (q' : s.Pos), p < q' → q ≤ q' :=
|
||||
⟨by rintro rfl; simp, fun ⟨h₁, h₂⟩ => Std.le_antisymm (by simpa) (h₂ _ (by simp))⟩
|
||||
|
||||
@[simp]
|
||||
theorem Pos.next_le_iff_lt {s : String} {p q : s.Pos} {h} : p.next h ≤ q ↔ p < q := by
|
||||
rw [next, Pos.ofToSlice_le_iff, ← Pos.toSlice_lt_toSlice_iff]
|
||||
exact Slice.Pos.next_le_iff_lt
|
||||
|
||||
@[simp]
|
||||
theorem Pos.lt_next_iff_le {s : String} {p q : s.Pos} {h} : p < q.next h ↔ p ≤ q := by
|
||||
rw [← Std.not_le, next_le_iff_lt, Std.not_lt]
|
||||
|
||||
theorem Pos.next_eq_iff {s : String} {p q : s.Pos} {h} :
|
||||
p.next h = q ↔ p < q ∧ ∀ (q' : s.Pos), p < q' → q ≤ q' :=
|
||||
⟨by rintro rfl; simp, fun ⟨h₁, h₂⟩ => Std.le_antisymm (by simpa) (h₂ _ (by simp))⟩
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.le_startPos {s : Slice} (p : s.Pos) : p ≤ s.startPos ↔ p = s.startPos :=
|
||||
⟨fun h => Std.le_antisymm h (startPos_le _), by simp +contextual⟩
|
||||
@@ -48,6 +60,10 @@ theorem Slice.Pos.lt_endPos_iff {s : Slice} (p : s.Pos) : p < s.endPos ↔ p ≠
|
||||
theorem Pos.le_startPos {s : String} (p : s.Pos) : p ≤ s.startPos ↔ p = s.startPos :=
|
||||
⟨fun h => Std.le_antisymm h (startPos_le _), by simp +contextual⟩
|
||||
|
||||
@[simp]
|
||||
theorem Pos.startPos_lt_iff {s : String} {p : s.Pos} : s.startPos < p ↔ p ≠ s.startPos := by
|
||||
simp [← le_startPos, Std.not_le]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.endPos_le {s : String} (p : s.Pos) : s.endPos ≤ p ↔ p = s.endPos :=
|
||||
⟨fun h => Std.le_antisymm (le_endPos _) h, by simp +contextual [Std.le_refl]⟩
|
||||
@@ -60,6 +76,22 @@ theorem Slice.Pos.ne_startPos_of_lt {s : Slice} {p q : s.Pos} : p < q → q ≠
|
||||
rintro h rfl
|
||||
simp at h
|
||||
|
||||
@[simp]
|
||||
theorem Pos.not_lt_startPos {s : String} {p : s.Pos} : ¬ p < s.startPos :=
|
||||
fun h => Std.lt_irrefl (Std.lt_of_lt_of_le h (Pos.startPos_le _))
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.not_endPos_lt {s : Slice} {p : s.Pos} : ¬ s.endPos < p :=
|
||||
fun h => Std.lt_irrefl (Std.lt_of_le_of_lt (Slice.Pos.le_endPos _) h)
|
||||
|
||||
@[simp]
|
||||
theorem Pos.not_endPos_lt {s : String} {p : s.Pos} : ¬ s.endPos < p :=
|
||||
fun h => Std.lt_irrefl (Std.lt_of_le_of_lt (Pos.le_endPos _) h)
|
||||
|
||||
theorem Pos.ne_endPos_of_lt {s : String} {p q : s.Pos} : p < q → p ≠ s.endPos := by
|
||||
rintro h rfl
|
||||
simp at h
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.le_next {s : Slice} {p : s.Pos} {h} : p ≤ p.next h :=
|
||||
Std.le_of_lt (by simp)
|
||||
|
||||
@@ -296,6 +296,7 @@ class LawfulToForwardSearcherModel {ρ : Type} (pat : ρ) [ForwardPatternModel p
|
||||
[∀ s, Std.Iterators.Finite (σ s) Id] : Prop where
|
||||
isValidSearchFrom_toList (s) : IsValidSearchFrom pat s.startPos (ToForwardSearcher.toSearcher pat s).toList
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem LawfulToForwardSearcherModel.defaultImplementation {pat : ρ} [ForwardPattern pat] [StrictForwardPattern pat]
|
||||
[ForwardPatternModel pat] [LawfulForwardPatternModel pat] :
|
||||
letI : ToForwardSearcher pat (ToForwardSearcher.DefaultForwardSearcher pat) := .defaultImplementation
|
||||
|
||||
@@ -197,6 +197,7 @@ theorem IsValidSearchFrom.splitFromSteps_eq_extend_split {ρ : Type} (pat : ρ)
|
||||
· exact h' p hp₁ hp
|
||||
· exact rej _ (Std.not_lt.1 hp) hp₂
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem SplitIterator.toList_eq_splitFromSteps {ρ : Type} {pat : ρ} {σ : Slice → Type}
|
||||
[ToForwardSearcher pat σ]
|
||||
[∀ s, Std.Iterator (σ s) Id (SearchStep s)] [∀ s, Std.Iterators.Finite (σ s) Id] {s : Slice}
|
||||
|
||||
@@ -294,6 +294,7 @@ theorem IsTable.push {b : ByteArray} {v : Array Nat} (h : IsTable b v) {d : Nat}
|
||||
obtain rfl : i = v.size := by omega
|
||||
exact hd
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem computeDistance_eq_prefixFunctionRecurrence {s : Slice} (i : Nat)
|
||||
(hi : i < s.copy.toByteArray.size) {patByte : UInt8}
|
||||
(hpat : patByte = s.copy.toByteArray[i])
|
||||
@@ -403,6 +404,7 @@ theorem Invariants.isLongestMatchAt {pat s : Slice} {stackPos needlePos : String
|
||||
cases h'
|
||||
exact h.partialMatch.isLongestMatchAt h.isEmpty_eq_false h.isValidForSlice
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem Invariants.not_matchesAt_of_prefixFunction_eq {pat s : Slice}
|
||||
{stackPos needlePos : String.Pos.Raw} (h : Invariants pat s needlePos stackPos)
|
||||
{k : Nat} {hki} (hk : prefixFunction pat.copy.toByteArray (needlePos.byteIdx - 1) hki = k)
|
||||
@@ -433,6 +435,7 @@ theorem Invariants.of_prefixFunction_eq {pat s : Slice} {stackPos needlePos : St
|
||||
rw [Nat.sub_add_cancel (by simp at h'; omega)] at this
|
||||
exact hk ▸ (h.partialMatch.partialMatch_iff.1 this).2
|
||||
|
||||
set_option backward.isDefEq.respectTransparency false in
|
||||
theorem Invariants.isValidSearchFrom_toList {pat s : Slice} {stackPos needlePos : String.Pos.Raw}
|
||||
(it : Std.Iter (α := ForwardSliceSearcher s) (SearchStep s))
|
||||
(h : Invariants pat s needlePos stackPos)
|
||||
|
||||
@@ -7,6 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
public import Init.Data.String.Basic
|
||||
public import Init.Data.String.FindPos
|
||||
import Init.Data.ByteArray.Lemmas
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
import Init.Data.Nat.MinMax
|
||||
@@ -15,6 +16,7 @@ import Init.Data.String.Lemmas.Order
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.Nat.Order
|
||||
import Init.Omega
|
||||
import Init.Data.String.Lemmas.FindPos
|
||||
|
||||
/-!
|
||||
# `Splits` predicates on `String.Pos` and `String.Slice.Pos`.
|
||||
@@ -205,6 +207,18 @@ theorem Pos.splits_next {s : String} (p : s.Pos) (hp : p ≠ s.endPos) :
|
||||
eq_append := p.eq_copy_sliceTo_append_get hp
|
||||
offset_eq_rawEndPos := by simp
|
||||
|
||||
theorem Pos.splits_prev_right {s : String} (p : s.Pos) (hp : p ≠ s.startPos) :
|
||||
p.Splits ((s.sliceTo (p.prev hp)).copy ++ singleton ((p.prev hp).get (by simp))) (s.sliceFrom p).copy := by
|
||||
obtain ⟨q, hq, rfl⟩ : ∃ (q : s.Pos), ∃ (hq : q ≠ s.endPos), p = q.next hq :=
|
||||
⟨p.prev hp, by simp, by simp⟩
|
||||
simpa using splits_next q hq
|
||||
|
||||
theorem Pos.splits_prev {s : String} (p : s.Pos) (hp : p ≠ s.startPos) :
|
||||
(p.prev hp).Splits (s.sliceTo (p.prev hp)).copy (singleton ((p.prev hp).get (by simp)) ++ (s.sliceFrom p).copy) := by
|
||||
obtain ⟨q, hq, rfl⟩ : ∃ (q : s.Pos), ∃ (hq : q ≠ s.endPos), p = q.next hq :=
|
||||
⟨p.prev hp, by simp, by simp⟩
|
||||
simpa using splits_next_right q hq
|
||||
|
||||
theorem Slice.Pos.splits_next_right {s : Slice} (p : s.Pos) (hp : p ≠ s.endPos) :
|
||||
p.Splits (s.sliceTo p).copy (singleton (p.get hp) ++ (s.sliceFrom (p.next hp)).copy) where
|
||||
eq_append := by simpa [← append_assoc] using p.copy_eq_copy_sliceTo_append_get hp
|
||||
@@ -215,6 +229,18 @@ theorem Slice.Pos.splits_next {s : Slice} (p : s.Pos) (hp : p ≠ s.endPos) :
|
||||
eq_append := p.copy_eq_copy_sliceTo_append_get hp
|
||||
offset_eq_rawEndPos := by simp
|
||||
|
||||
theorem Slice.Pos.splits_prev_right {s : Slice} (p : s.Pos) (hp : p ≠ s.startPos) :
|
||||
p.Splits ((s.sliceTo (p.prev hp)).copy ++ singleton ((p.prev hp).get (by simp))) (s.sliceFrom p).copy := by
|
||||
obtain ⟨q, hq, rfl⟩ : ∃ (q : s.Pos), ∃ (hq : q ≠ s.endPos), p = q.next hq :=
|
||||
⟨p.prev hp, by simp, by simp⟩
|
||||
simpa using splits_next q hq
|
||||
|
||||
theorem Slice.Pos.splits_prev {s : Slice} (p : s.Pos) (hp : p ≠ s.startPos) :
|
||||
(p.prev hp).Splits (s.sliceTo (p.prev hp)).copy (singleton ((p.prev hp).get (by simp)) ++ (s.sliceFrom p).copy) := by
|
||||
obtain ⟨q, hq, rfl⟩ : ∃ (q : s.Pos), ∃ (hq : q ≠ s.endPos), p = q.next hq :=
|
||||
⟨p.prev hp, by simp, by simp⟩
|
||||
simpa using splits_next_right q hq
|
||||
|
||||
theorem Pos.Splits.exists_eq_singleton_append {s : String} {p : s.Pos}
|
||||
(hp : p ≠ s.endPos) (h : p.Splits t₁ t₂) : ∃ t₂', t₂ = singleton (p.get hp) ++ t₂' :=
|
||||
⟨(s.sliceFrom (p.next hp)).copy, h.eq_right (p.splits_next_right hp)⟩
|
||||
@@ -223,6 +249,14 @@ theorem Pos.Splits.exists_eq_append_singleton {s : String} {p : s.Pos}
|
||||
(hp : p ≠ s.endPos) (h : (p.next hp).Splits t₁ t₂) : ∃ t₁', t₁ = t₁' ++ singleton (p.get hp) :=
|
||||
⟨(s.sliceTo p).copy, h.eq_left (p.splits_next hp)⟩
|
||||
|
||||
theorem Pos.Splits.exists_eq_append_singleton_of_ne_startPos {s : String} {p : s.Pos}
|
||||
(hp : p ≠ s.startPos) (h : p.Splits t₁ t₂) : ∃ t₁', t₁ = t₁' ++ singleton ((p.prev hp).get (by simp)) :=
|
||||
⟨_, h.eq_left (p.splits_prev_right hp)⟩
|
||||
|
||||
theorem Pos.Splits.exists_eq_singleton_append_of_ne_startPos {s : String} {p : s.Pos}
|
||||
(hp : p ≠ s.startPos) (h : (p.prev hp).Splits t₁ t₂) : ∃ t₂', t₂ = singleton ((p.prev hp).get (by simp)) ++ t₂' :=
|
||||
⟨_, h.eq_right (p.splits_prev hp)⟩
|
||||
|
||||
theorem Slice.Pos.Splits.exists_eq_singleton_append {s : Slice} {p : s.Pos}
|
||||
(hp : p ≠ s.endPos) (h : p.Splits t₁ t₂) : ∃ t₂', t₂ = singleton (p.get hp) ++ t₂' :=
|
||||
⟨(s.sliceFrom (p.next hp)).copy, h.eq_right (p.splits_next_right hp)⟩
|
||||
@@ -231,6 +265,14 @@ theorem Slice.Pos.Splits.exists_eq_append_singleton {s : Slice} {p : s.Pos}
|
||||
(hp : p ≠ s.endPos) (h : (p.next hp).Splits t₁ t₂) : ∃ t₁', t₁ = t₁' ++ singleton (p.get hp) :=
|
||||
⟨(s.sliceTo p).copy, h.eq_left (p.splits_next hp)⟩
|
||||
|
||||
theorem Slice.Pos.Splits.exists_eq_append_singleton_of_ne_startPos {s : Slice} {p : s.Pos}
|
||||
(hp : p ≠ s.startPos) (h : p.Splits t₁ t₂) : ∃ t₁', t₁ = t₁' ++ singleton ((p.prev hp).get (by simp)) :=
|
||||
⟨_, h.eq_left (p.splits_prev_right hp)⟩
|
||||
|
||||
theorem Slice.Pos.Splits.exists_eq_singleton_append_of_ne_startPos {s : Slice} {p : s.Pos}
|
||||
(hp : p ≠ s.startPos) (h : (p.prev hp).Splits t₁ t₂) : ∃ t₂', t₂ = singleton ((p.prev hp).get (by simp)) ++ t₂' :=
|
||||
⟨_, h.eq_right (p.splits_prev hp)⟩
|
||||
|
||||
theorem Pos.Splits.ne_endPos_of_singleton {s : String} {p : s.Pos}
|
||||
(h : p.Splits t₁ (singleton c ++ t₂)) : p ≠ s.endPos := by
|
||||
simp [h.eq_endPos_iff]
|
||||
@@ -261,6 +303,20 @@ theorem Slice.Pos.Splits.next {s : Slice} {p : s.Pos}
|
||||
obtain ⟨rfl, rfl, rfl⟩ := by simpa using h.eq (splits_next_right p hp)
|
||||
exact splits_next p hp
|
||||
|
||||
/-- You might want to invoke `Pos.Splits.exists_eq_singleton_append_of_ne_startPos` to be able to apply this. -/
|
||||
theorem Pos.Splits.prev {s : String} {p : s.Pos}
|
||||
(h : p.Splits (t₁ ++ singleton c) t₂) : (p.prev h.ne_startPos_of_singleton).Splits t₁ (singleton c ++ t₂) := by
|
||||
generalize h.ne_startPos_of_singleton = hp
|
||||
obtain ⟨⟨rfl, rfl⟩, rfl⟩ := by simpa using h.eq (splits_prev_right p hp)
|
||||
exact splits_prev p hp
|
||||
|
||||
/-- You might want to invoke `Slice.Pos.Splits.exists_eq_singleton_append_of_ne_startPos` to be able to apply this. -/
|
||||
theorem Slice.Pos.Splits.prev {s : Slice} {p : s.Pos}
|
||||
(h : p.Splits (t₁ ++ singleton c) t₂) : (p.prev h.ne_startPos_of_singleton).Splits t₁ (singleton c ++ t₂) := by
|
||||
generalize h.ne_startPos_of_singleton = hp
|
||||
obtain ⟨⟨rfl, rfl⟩, rfl⟩ := by simpa using h.eq (splits_prev_right p hp)
|
||||
exact splits_prev p hp
|
||||
|
||||
theorem Slice.sliceTo_copy_eq_iff_exists_splits {s : Slice} {p : s.Pos} {t₁ : String} :
|
||||
(s.sliceTo p).copy = t₁ ↔ ∃ t₂, p.Splits t₁ t₂ := by
|
||||
refine ⟨?_, ?_⟩
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user