mirror of
https://github.com/leanprover/lean4.git
synced 2026-04-04 11:14:09 +00:00
Compare commits
675 Commits
grind_none
...
sofia/asyn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
808f3a7753 | ||
|
|
dda9e3c6d5 | ||
|
|
5198a449f9 | ||
|
|
7e628ada8b | ||
|
|
da91aed2e2 | ||
|
|
e57d84bba0 | ||
|
|
772b5663d2 | ||
|
|
c7983a8c65 | ||
|
|
d3b04871f5 | ||
|
|
acae2b44fd | ||
|
|
6ee95db055 | ||
|
|
89e52c3359 | ||
|
|
fcc070f18f | ||
|
|
9aad86a576 | ||
|
|
2bcbb676f5 | ||
|
|
f7ec39d6a1 | ||
|
|
aaf0f6e7f5 | ||
|
|
5bf590e710 | ||
|
|
159f069863 | ||
|
|
aa1144602b | ||
|
|
ffc2c0ab1a | ||
|
|
8dc4c16fce | ||
|
|
861bc19e0c | ||
|
|
77bbbc3b16 | ||
|
|
125ac55801 | ||
|
|
74d425f584 | ||
|
|
d6b2e0b890 | ||
|
|
83df67ff34 | ||
|
|
0ac6746e3a | ||
|
|
b2791f1564 | ||
|
|
c69f5d63dc | ||
|
|
41470c1c0a | ||
|
|
a5551e3291 | ||
|
|
96253d357f | ||
|
|
db1d553245 | ||
|
|
286182df24 | ||
|
|
3eee136224 | ||
|
|
38f189dab2 | ||
|
|
55ce4dc2b0 | ||
|
|
bb90f72a40 | ||
|
|
c485824d11 | ||
|
|
afe1676e4a | ||
|
|
64889857b2 | ||
|
|
0ac5d75bac | ||
|
|
e4f2f5717c | ||
|
|
abbe36c0d2 | ||
|
|
7ef652911e | ||
|
|
9ef386d7c3 | ||
|
|
b9b2e08181 | ||
|
|
33caa4e82f | ||
|
|
8c292c70ee | ||
|
|
4f4ee7c789 | ||
|
|
d7ea3a5984 | ||
|
|
33c36c7466 | ||
|
|
7fbecca6f0 | ||
|
|
ae5a3d2c8b | ||
|
|
1a270555ae | ||
|
|
72702c3538 | ||
|
|
e86dbf3992 | ||
|
|
d71f0bdae7 | ||
|
|
6ae49d7639 | ||
|
|
232d173af3 | ||
|
|
3a4a309aed | ||
|
|
9c87a9f044 | ||
|
|
34c9cafc12 | ||
|
|
014dd1d263 | ||
|
|
2a7a407875 | ||
|
|
e359001026 | ||
|
|
72244398dc | ||
|
|
c0e60b797c | ||
|
|
400908a2f4 | ||
|
|
394c999c2a | ||
|
|
b7e88dadeb | ||
|
|
a39a0575a0 | ||
|
|
5815f33342 | ||
|
|
4fdf94ed3d | ||
|
|
66743e80a6 | ||
|
|
2d0d63f5d3 | ||
|
|
10951fdb57 | ||
|
|
71d3967338 | ||
|
|
34dbcb2ca5 | ||
|
|
abb60e47c8 | ||
|
|
7a852aedb6 | ||
|
|
1554f57525 | ||
|
|
1fa01cdadb | ||
|
|
758e5afb07 | ||
|
|
11516bbf09 | ||
|
|
f76dca5bba | ||
|
|
fe6ac812af | ||
|
|
51a00843ea | ||
|
|
c8c702af8d | ||
|
|
5b5b0fad70 | ||
|
|
eab144bbb2 | ||
|
|
cfe282f024 | ||
|
|
e7f06c8fa2 | ||
|
|
beb85dd6b0 | ||
|
|
debafcf0ef | ||
|
|
2668f07808 | ||
|
|
e3928b7b1a | ||
|
|
2f3a97ed8a | ||
|
|
0315d56389 | ||
|
|
b9e489cc8f | ||
|
|
135b049080 | ||
|
|
4005bd027b | ||
|
|
fbf03e31f9 | ||
|
|
39ab2b289c | ||
|
|
6c6f9a5d83 | ||
|
|
a7aea9a12d | ||
|
|
9517b5bc2d | ||
|
|
71debba5a2 | ||
|
|
a2c5f3c79e | ||
|
|
fd9117fc12 | ||
|
|
1b6357dc03 | ||
|
|
38cb50d629 | ||
|
|
74af777707 | ||
|
|
3dfb5e002a | ||
|
|
3075e5091b | ||
|
|
af12f7e9be | ||
|
|
a2f9f74740 | ||
|
|
13fb8a5980 | ||
|
|
41d2984f25 | ||
|
|
f63639d42b | ||
|
|
6df74943e0 | ||
|
|
865b147a91 | ||
|
|
c2f2b3cf32 | ||
|
|
4173713f94 | ||
|
|
53c9277209 | ||
|
|
f14977f495 | ||
|
|
cfa5cf76fc | ||
|
|
238925a681 | ||
|
|
8cb236e9eb | ||
|
|
3d039f8dba | ||
|
|
203d5362d4 | ||
|
|
6189d4c130 | ||
|
|
58f14d34d7 | ||
|
|
710eee2b49 | ||
|
|
bd4af50d04 | ||
|
|
8cb30347b6 | ||
|
|
d8e6b09b90 | ||
|
|
df8abc2b3f | ||
|
|
5a852bdffd | ||
|
|
11d3860c69 | ||
|
|
5a253001b3 | ||
|
|
083fec29c8 | ||
|
|
d41753a5f9 | ||
|
|
a086a817e0 | ||
|
|
e434a4d44b | ||
|
|
7295389284 | ||
|
|
f8e1bc685a | ||
|
|
5e1204e70d | ||
|
|
a00ec10261 | ||
|
|
cb9b182824 | ||
|
|
61d7c151da | ||
|
|
f9f1bdc77b | ||
|
|
f3452c09a9 | ||
|
|
2bed27681a | ||
|
|
5bb3b08698 | ||
|
|
82645d0953 | ||
|
|
2ab52fb864 | ||
|
|
1bba3082f0 | ||
|
|
7ed7a1b69d | ||
|
|
bd10d0193e | ||
|
|
67822f4c42 | ||
|
|
e7f6fbb473 | ||
|
|
1cb3d56618 | ||
|
|
d99485dd79 | ||
|
|
f85b9b8d09 | ||
|
|
5fb254b7ef | ||
|
|
6e202e34a4 | ||
|
|
843c814778 | ||
|
|
c7d4d8d799 | ||
|
|
91c60f801c | ||
|
|
ae30f55728 | ||
|
|
63b0cc17c4 | ||
|
|
c9a5111dcc | ||
|
|
8e12a4181c | ||
|
|
33393a7c00 | ||
|
|
7434b97511 | ||
|
|
29c8f8cfa1 | ||
|
|
36b2d99e3d | ||
|
|
4b8a48c817 | ||
|
|
e0862a0220 | ||
|
|
10fc7da3fa | ||
|
|
a1f535d9d8 | ||
|
|
993c87dd80 | ||
|
|
742e3080c9 | ||
|
|
3de1d21c86 | ||
|
|
83a0756b05 | ||
|
|
b8f2cd94aa | ||
|
|
64ff045559 | ||
|
|
109ab8eb68 | ||
|
|
bf09ea8ff5 | ||
|
|
7ce9fe9f97 | ||
|
|
aff9e0c459 | ||
|
|
a74df33feb | ||
|
|
dd63b614eb | ||
|
|
515e6e20c0 | ||
|
|
cc45fc9cc2 | ||
|
|
bc9c18f0b0 | ||
|
|
8ee21a7176 | ||
|
|
92aa9f2b8a | ||
|
|
c2243a0ea5 | ||
|
|
efbd23a6d9 | ||
|
|
26440fcf6a | ||
|
|
ac4c5451e4 | ||
|
|
c94c5cb7e4 | ||
|
|
78ca6edc99 | ||
|
|
d92dc22df3 | ||
|
|
48ab74f044 | ||
|
|
da68a63902 | ||
|
|
db99fd2d7d | ||
|
|
a61712c962 | ||
|
|
ea36555588 | ||
|
|
b02bc4d6d2 | ||
|
|
c836fe8723 | ||
|
|
8068ed317c | ||
|
|
0bd44ab745 | ||
|
|
172d12c75c | ||
|
|
6b6b9fffff | ||
|
|
f3fa5c8242 | ||
|
|
b0c5667f06 | ||
|
|
2d262c9755 | ||
|
|
571898bf63 | ||
|
|
0570277a2e | ||
|
|
557709d9bb | ||
|
|
0229508ca7 | ||
|
|
ace10ee42b | ||
|
|
4e36dcc98f | ||
|
|
a93ea184fe | ||
|
|
c309a3c07e | ||
|
|
30641c617f | ||
|
|
37fcb2ce55 | ||
|
|
97cd66afde | ||
|
|
6dbb6b8d0e | ||
|
|
4306782b93 | ||
|
|
6935306439 | ||
|
|
1aa23cd92b | ||
|
|
0bb4ba72d4 | ||
|
|
57a4d9ad4b | ||
|
|
bfc6617c12 | ||
|
|
c1b5b64797 | ||
|
|
9b563220b2 | ||
|
|
0eb4a6e8c6 | ||
|
|
4614def4cd | ||
|
|
c97dfe585a | ||
|
|
74ecbca430 | ||
|
|
6fa6d2e3f7 | ||
|
|
05c4d9202a | ||
|
|
3a4e9f6eca | ||
|
|
aa09ab0cd9 | ||
|
|
8affe05767 | ||
|
|
3aa02eede3 | ||
|
|
c86f926d1b | ||
|
|
ff4419357c | ||
|
|
3c131da050 | ||
|
|
5fd94a1e1d | ||
|
|
fcc4185bb2 | ||
|
|
bae251d15a | ||
|
|
6edc0c7427 | ||
|
|
563189fec9 | ||
|
|
25d7db2e62 | ||
|
|
e569c9ef64 | ||
|
|
c467175336 | ||
|
|
7562c103dd | ||
|
|
1be8c11cee | ||
|
|
ea6c1e65f6 | ||
|
|
67300c640c | ||
|
|
625e1c9a32 | ||
|
|
b09946684b | ||
|
|
beedfa1e4e | ||
|
|
f68c2420e7 | ||
|
|
cdfd24171a | ||
|
|
718e549de3 | ||
|
|
81f76a24d8 | ||
|
|
292f297006 | ||
|
|
b7be57272a | ||
|
|
a0dc1dbbc0 | ||
|
|
2e604884dd | ||
|
|
2049542833 | ||
|
|
caf19b8458 | ||
|
|
c5180b2dfc | ||
|
|
91c5b717f0 | ||
|
|
cb6f540efb | ||
|
|
ec833b52ee | ||
|
|
ba36c1dee2 | ||
|
|
5cb510cdf7 | ||
|
|
a72de461cd | ||
|
|
228f0d24a7 | ||
|
|
73cf41d7e5 | ||
|
|
819d4c6c1f | ||
|
|
4de3e40349 | ||
|
|
03f1d47462 | ||
|
|
a88908572c | ||
|
|
55d357dbb4 | ||
|
|
49d00ae056 | ||
|
|
e9eed5cbe4 | ||
|
|
2652ae0fb8 | ||
|
|
3f48ef4af9 | ||
|
|
a9de308aea | ||
|
|
405d03aac9 | ||
|
|
d5a819f30f | ||
|
|
81c3e5034a | ||
|
|
c971d3f490 | ||
|
|
26bcd2d065 | ||
|
|
9c1054adca | ||
|
|
cba7bfbbe7 | ||
|
|
2990b41d44 | ||
|
|
f543206d4a | ||
|
|
1cd2cba130 | ||
|
|
a009ad2a68 | ||
|
|
6a19fc5a21 | ||
|
|
91275b3747 | ||
|
|
df80ac720a | ||
|
|
6797ca9345 | ||
|
|
c266649454 | ||
|
|
7160b92bfb | ||
|
|
6d1a0ecc8a | ||
|
|
fd96be3870 | ||
|
|
3a3620e8aa | ||
|
|
11fd4c8244 | ||
|
|
2731e1d942 | ||
|
|
0ef3c83ed8 | ||
|
|
edad8a090b | ||
|
|
74dc55152f | ||
|
|
bf2471b8f1 | ||
|
|
21821ef062 | ||
|
|
5ba3a6d4fc | ||
|
|
8492e58a82 | ||
|
|
e65e20e1cb | ||
|
|
de7c029c9f | ||
|
|
89c992a3c9 | ||
|
|
0b76c3de69 | ||
|
|
ff99979855 | ||
|
|
9ddbb59fe1 | ||
|
|
36f87f98f8 | ||
|
|
5914fe3a4a | ||
|
|
29f651a89c | ||
|
|
2e1bdd922e | ||
|
|
ab5d50cbc3 | ||
|
|
7902db17c2 | ||
|
|
5626ee369c | ||
|
|
682e2b99f3 | ||
|
|
6ed32edec0 | ||
|
|
662bed5a28 | ||
|
|
d0e884dc54 | ||
|
|
abf3305397 | ||
|
|
a6f42abe62 | ||
|
|
7a50344af4 | ||
|
|
c7bcd4fbed | ||
|
|
d367a9fe80 | ||
|
|
0e0578eacb | ||
|
|
663eec9dc3 | ||
|
|
e62f8d608d | ||
|
|
0fb57a405f | ||
|
|
ce009e2dca | ||
|
|
c9cf60f173 | ||
|
|
5263c32ea4 | ||
|
|
89191367b7 | ||
|
|
999ce40ca6 | ||
|
|
bfa18ef30c | ||
|
|
a850879adf | ||
|
|
34c5c70ec6 | ||
|
|
81492aa5b2 | ||
|
|
e0efb8aec9 | ||
|
|
530f6865f9 | ||
|
|
f97d86cf4b | ||
|
|
781b9f561e | ||
|
|
a9ac33d994 | ||
|
|
c457a98d6a | ||
|
|
8d8439bf0b | ||
|
|
7cf419491a | ||
|
|
4cbdb39211 | ||
|
|
54ac93fb32 | ||
|
|
eddb5e139d | ||
|
|
5a53207723 | ||
|
|
0d3f6e5481 | ||
|
|
96a017262c | ||
|
|
04c73b64a5 | ||
|
|
02adf1fae0 | ||
|
|
9291e925ff | ||
|
|
1d0e26e494 | ||
|
|
5528f97c8f | ||
|
|
32d42b52e9 | ||
|
|
f1ed971f26 | ||
|
|
b5610a43db | ||
|
|
a182a6652e | ||
|
|
cf51a32ffb | ||
|
|
11cc11bc2f | ||
|
|
8cef903224 | ||
|
|
f5492db7fa | ||
|
|
cf603cdc7c | ||
|
|
d07e1a6341 | ||
|
|
549e16f069 | ||
|
|
2e1406b683 | ||
|
|
bfdfabd4a5 | ||
|
|
004c076236 | ||
|
|
93a6ecbbbc | ||
|
|
3c877f9604 | ||
|
|
d317c0208b | ||
|
|
4716725e81 | ||
|
|
4f15fe36e0 | ||
|
|
8bcc838f47 | ||
|
|
462e3d02dd | ||
|
|
541f9b2dc9 | ||
|
|
86107e2b5a | ||
|
|
5cc0026f3d | ||
|
|
c5db47444e | ||
|
|
fffc2b5633 | ||
|
|
637f260529 | ||
|
|
469f466832 | ||
|
|
ecb7480b37 | ||
|
|
42800e4037 | ||
|
|
b52bbc9ae4 | ||
|
|
eaa1390a36 | ||
|
|
b38f01ef51 | ||
|
|
73bf2b5e04 | ||
|
|
c8c92fcf92 | ||
|
|
cf6b159da5 | ||
|
|
330e1c5340 | ||
|
|
b40bc2e89c | ||
|
|
e8347e9e9b | ||
|
|
d051b967ed | ||
|
|
cf4776ef92 | ||
|
|
b1ff312ef5 | ||
|
|
319214cfb3 | ||
|
|
e75049b604 | ||
|
|
836cdf47a5 | ||
|
|
01f9c257e8 | ||
|
|
3d07f4fd56 | ||
|
|
7dc97a02fd | ||
|
|
afd2f12242 | ||
|
|
5faf0572f6 | ||
|
|
8d349ccbaa | ||
|
|
9c35a91e0f | ||
|
|
2da4e1b572 | ||
|
|
5368b134bb | ||
|
|
d1f090ee98 | ||
|
|
f311c9594f | ||
|
|
c6a3ab0a77 | ||
|
|
ba25ab3490 | ||
|
|
1095ebbeed | ||
|
|
299b15c8e9 | ||
|
|
091cb00ab9 | ||
|
|
2b408d2699 | ||
|
|
702efcacca | ||
|
|
98ba01dc49 | ||
|
|
e1225efa03 | ||
|
|
37c7b1e22c | ||
|
|
eea8e06d6b | ||
|
|
c4234961bc | ||
|
|
42cfda23f3 | ||
|
|
78316b9ade | ||
|
|
dd09289d2b | ||
|
|
10a66e9f9a | ||
|
|
ad4719399d | ||
|
|
892ab921b7 | ||
|
|
6551c32f6b | ||
|
|
b8eac648ab | ||
|
|
53fb1a25b3 | ||
|
|
3fdaf2df0c | ||
|
|
4ba722f51c | ||
|
|
42b726c376 | ||
|
|
8bec5f4b98 | ||
|
|
9a8bc523c5 | ||
|
|
59253973ce | ||
|
|
205149a884 | ||
|
|
a89a69e7da | ||
|
|
9bb429d4e7 | ||
|
|
542a3a4e71 | ||
|
|
3646590506 | ||
|
|
cf87c9594c | ||
|
|
71420f6c81 | ||
|
|
b6fdd8adc3 | ||
|
|
45747bd2ef | ||
|
|
69c75c1b56 | ||
|
|
bed5d8567c | ||
|
|
0c5d25a763 | ||
|
|
c324ee8347 | ||
|
|
193bbddb4e | ||
|
|
6821bb82db | ||
|
|
1cbd0569eb | ||
|
|
14dbb661f8 | ||
|
|
ea5a986693 | ||
|
|
37ec94e2f0 | ||
|
|
157e3b032d | ||
|
|
910c71954e | ||
|
|
27107066e3 | ||
|
|
fd1843e120 | ||
|
|
dd2ab67d2b | ||
|
|
9dd5634759 | ||
|
|
a521ba3abd | ||
|
|
6b0f05d075 | ||
|
|
61d6c02ecd | ||
|
|
b7d4e12fbf | ||
|
|
dc6d015870 | ||
|
|
07a05a3995 | ||
|
|
182625774d | ||
|
|
b4684a2406 | ||
|
|
ecc0ec05bd | ||
|
|
5193b739ca | ||
|
|
70c0a902f4 | ||
|
|
7f29fd0fcd | ||
|
|
239536f1d8 | ||
|
|
71be391dd3 | ||
|
|
df738acaa4 | ||
|
|
8ed56677e5 | ||
|
|
60d0b7c97a | ||
|
|
17a2c9e0c2 | ||
|
|
7ee37564d3 | ||
|
|
2ee7513f80 | ||
|
|
7d6505d296 | ||
|
|
8722e50897 | ||
|
|
fa8d76fa37 | ||
|
|
c50fca363a | ||
|
|
e8ff308154 | ||
|
|
cdcb9db4ba | ||
|
|
a8e405ac5d | ||
|
|
b6705cceb2 | ||
|
|
af58b4f286 | ||
|
|
02dc048ad2 | ||
|
|
a981d91552 | ||
|
|
96ffa3e354 | ||
|
|
1c564ed5f7 | ||
|
|
9dd5f62e0e | ||
|
|
c4737fb66a | ||
|
|
43d3b2df91 | ||
|
|
87c5488c20 | ||
|
|
e0d5596e63 | ||
|
|
1f2671db3d | ||
|
|
940ab9bdb5 | ||
|
|
8017d39c4e | ||
|
|
25bb4ee812 | ||
|
|
7c1aff34e2 | ||
|
|
28670d4420 | ||
|
|
30f3a3520e | ||
|
|
9acca40aaf | ||
|
|
bf2ed2c87a | ||
|
|
3561d58203 | ||
|
|
1d80616068 | ||
|
|
61c93a7f57 | ||
|
|
b042b8efbd | ||
|
|
8c00ba48ae | ||
|
|
991a27b7f2 | ||
|
|
69e38e9495 | ||
|
|
16d0162ef0 | ||
|
|
d07f5c502f | ||
|
|
5b1493507d | ||
|
|
1180572926 | ||
|
|
6dc19ef871 | ||
|
|
4a641fc498 | ||
|
|
2a04014fa7 | ||
|
|
4f20a815ec | ||
|
|
4906e14e51 | ||
|
|
c9296c7371 | ||
|
|
4db36b214b | ||
|
|
a6d94c7504 | ||
|
|
045abb48bb | ||
|
|
10337c620b | ||
|
|
698f557aa3 | ||
|
|
692c7c1a09 | ||
|
|
1bdfdcdb38 | ||
|
|
cacfe00c1d | ||
|
|
0fd0fa9c73 | ||
|
|
52fdc0f734 | ||
|
|
451c11d5a1 | ||
|
|
e92fcf6d46 | ||
|
|
07140aceb8 | ||
|
|
2cc32928a4 | ||
|
|
153513d5e2 | ||
|
|
94308408a9 | ||
|
|
1ae6970b77 | ||
|
|
0704f877f5 | ||
|
|
7ff0e6f9c0 | ||
|
|
5b4498ac9d | ||
|
|
976cc79b0c | ||
|
|
8d6ff0d727 | ||
|
|
26c0e4dac4 | ||
|
|
9ce1821be0 | ||
|
|
eeff4847fe | ||
|
|
2956f88050 | ||
|
|
26d9c1c07b | ||
|
|
73af014cbd | ||
|
|
d206f437ef | ||
|
|
d099586632 | ||
|
|
058d95e441 | ||
|
|
b40ac55755 | ||
|
|
43aa88e5a6 | ||
|
|
8fe2d519d2 | ||
|
|
07ed645f45 | ||
|
|
9485e8f5eb | ||
|
|
dc96616781 | ||
|
|
0c44b4ae05 | ||
|
|
3568464ca7 | ||
|
|
8e5296c71a | ||
|
|
eee971e3ef | ||
|
|
7a1f8b2d30 | ||
|
|
157e122891 | ||
|
|
b12ab7eae4 | ||
|
|
10c8a923e6 | ||
|
|
2b91589750 | ||
|
|
3e9674eaa9 | ||
|
|
d902c6a9f4 | ||
|
|
04a17e8c55 | ||
|
|
1b6cd457d3 | ||
|
|
2bc2080fbe | ||
|
|
6b6425e8d7 | ||
|
|
fb0e95d8ce | ||
|
|
4e4702a31f | ||
|
|
5a2ad22f97 | ||
|
|
f02139f7ce | ||
|
|
d004e175e2 | ||
|
|
7928a95c34 | ||
|
|
202e6c5228 | ||
|
|
0aeaa5e71d | ||
|
|
9ad4ee304b | ||
|
|
5bd280553d | ||
|
|
7e215c8220 | ||
|
|
2c23680163 | ||
|
|
c4f179daa0 | ||
|
|
c2f657a15a | ||
|
|
9332081875 | ||
|
|
1cec97568b | ||
|
|
b567713641 | ||
|
|
de776c1f32 | ||
|
|
c498ea74ec | ||
|
|
f4aad3a494 | ||
|
|
1cebf576c3 | ||
|
|
25dac2e239 | ||
|
|
4a9de7094c | ||
|
|
c4eab3b677 | ||
|
|
dd125c7999 | ||
|
|
5e3dce8088 | ||
|
|
4c64f2c2e8 | ||
|
|
aa6e11dfc0 | ||
|
|
e7d1e7dd54 | ||
|
|
03843fd3f0 | ||
|
|
294e9900ea | ||
|
|
f13651979e | ||
|
|
3d8ba4d09b | ||
|
|
63984c8dda | ||
|
|
e2fd8a5835 | ||
|
|
a0263870b9 | ||
|
|
3c4ae58aff | ||
|
|
5965707575 | ||
|
|
dbe0140578 | ||
|
|
bc21289793 | ||
|
|
f11bd0928d | ||
|
|
6ffd5ad2a4 | ||
|
|
7ce8cbc01c | ||
|
|
12a7603c77 | ||
|
|
53a6355074 | ||
|
|
f8ad249e42 | ||
|
|
3c41d3961e | ||
|
|
18bc715bad | ||
|
|
3349d20663 | ||
|
|
bad70e3eab | ||
|
|
21286eb163 | ||
|
|
0e5f07558c | ||
|
|
6e26b901e4 | ||
|
|
81c67c8f12 | ||
|
|
990e21eefc | ||
|
|
7141144a2f | ||
|
|
8c343501c1 | ||
|
|
44f08686cd | ||
|
|
65883f8c2a | ||
|
|
bd28a8fad5 | ||
|
|
8ba86c2c67 | ||
|
|
d3cddf9e44 | ||
|
|
5f3babee5c | ||
|
|
26dfc9a872 | ||
|
|
e47439e8be | ||
|
|
1ef53758be | ||
|
|
8544042789 | ||
|
|
f564d43d98 | ||
|
|
32fa0666c9 |
9
.vscode/tasks.json
vendored
9
.vscode/tasks.json
vendored
@@ -11,6 +11,15 @@
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "build stage2",
|
||||
"type": "shell",
|
||||
"command": "make -C build/release stage2 -j$(nproc 2>/dev/null || sysctl -n hw.logicalcpu 2>/dev/null || echo 4)",
|
||||
"problemMatcher": [],
|
||||
"group": {
|
||||
"kind": "build"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "build-old",
|
||||
"type": "shell",
|
||||
|
||||
@@ -127,7 +127,8 @@ if(USE_MIMALLOC)
|
||||
# Unnecessarily deep directory structure, but it saves us from a complicated
|
||||
# stage0 update for now. If we ever update the other dependencies like
|
||||
# cadical, it might be worth reorganizing the directory structure.
|
||||
SOURCE_DIR "${CMAKE_BINARY_DIR}/mimalloc/src/mimalloc"
|
||||
SOURCE_DIR
|
||||
"${CMAKE_BINARY_DIR}/mimalloc/src/mimalloc"
|
||||
)
|
||||
FetchContent_MakeAvailable(mimalloc)
|
||||
endif()
|
||||
|
||||
@@ -243,6 +243,10 @@ public theorem lt_iff_le_and_ne [LE α] [LT α] [LawfulOrderLT α] [IsPartialOrd
|
||||
a < b ↔ a ≤ b ∧ a ≠ b := by
|
||||
simpa [le_iff_lt_or_eq, or_and_right] using Std.ne_of_lt
|
||||
|
||||
public theorem lt_trichotomy [LT α] [Std.Trichotomous (α := α) (· < ·)] (a b : α) :
|
||||
a < b ∨ a = b ∨ b < a :=
|
||||
Trichotomous.rel_or_eq_or_rel_swap
|
||||
|
||||
end LT
|
||||
end Std
|
||||
|
||||
|
||||
@@ -1386,6 +1386,11 @@ theorem Slice.copy_eq_copy_sliceTo {s : Slice} {pos : s.Pos} :
|
||||
rw [Nat.max_eq_right]
|
||||
exact pos.offset_str_le_offset_endExclusive
|
||||
|
||||
@[simp]
|
||||
theorem Slice.sliceTo_append_sliceFrom {s : Slice} {pos : s.Pos} :
|
||||
(s.sliceTo pos).copy ++ (s.sliceFrom pos).copy = s.copy :=
|
||||
copy_eq_copy_sliceTo.symm
|
||||
|
||||
/-- Given a slice `s` and a position on `s.copy`, obtain the corresponding position on `s`. -/
|
||||
@[inline]
|
||||
def Pos.ofCopy {s : Slice} (pos : s.copy.Pos) : s.Pos where
|
||||
@@ -1745,6 +1750,31 @@ theorem Slice.Pos.offset_cast {s t : Slice} {pos : s.Pos} {h : s.copy = t.copy}
|
||||
theorem Slice.Pos.cast_rfl {s : Slice} {pos : s.Pos} : pos.cast rfl = pos :=
|
||||
Slice.Pos.ext (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_cast {s t u : Slice} {hst : s.copy = t.copy} {htu : t.copy = u.copy}
|
||||
{pos : s.Pos} : (pos.cast hst).cast htu = pos.cast (hst.trans htu) :=
|
||||
Slice.Pos.ext (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_inj {s t : Slice} {hst : s.copy = t.copy} {p q : s.Pos} : p.cast hst = q.cast hst ↔ p = q := by
|
||||
simp [Slice.Pos.ext_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_startPos {s t : Slice} {hst : s.copy = t.copy} : s.startPos.cast hst = t.startPos :=
|
||||
Slice.Pos.ext (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_eq_startPos {s t : Slice} {p : s.Pos} {hst : s.copy = t.copy} : p.cast hst = t.startPos ↔ p = s.startPos := by
|
||||
rw [← cast_startPos (hst := hst), Pos.cast_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_endPos {s t : Slice} {hst : s.copy = t.copy} : s.endPos.cast hst = t.endPos :=
|
||||
Slice.Pos.ext (by simp [← rawEndPos_copy, hst])
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_eq_endPos {s t : Slice} {p : s.Pos} {hst : s.copy = t.copy} : p.cast hst = t.endPos ↔ p = s.endPos := by
|
||||
rw [← cast_endPos (hst := hst), Pos.cast_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_le_cast_iff {s t : Slice} {pos pos' : s.Pos} {h : s.copy = t.copy} :
|
||||
pos.cast h ≤ pos'.cast h ↔ pos ≤ pos' := by
|
||||
@@ -1755,6 +1785,22 @@ theorem Slice.Pos.cast_lt_cast_iff {s t : Slice} {pos pos' : s.Pos} {h : s.copy
|
||||
pos.cast h < pos'.cast h ↔ pos < pos' := by
|
||||
simp [Slice.Pos.lt_iff]
|
||||
|
||||
theorem Slice.Pos.cast_le_iff {s t : Slice} {pos : s.Pos} {pos' : t.Pos} {h : s.copy = t.copy} :
|
||||
pos.cast h ≤ pos' ↔ pos ≤ pos'.cast h.symm := by
|
||||
simp [Slice.Pos.le_iff]
|
||||
|
||||
theorem Slice.Pos.le_cast_iff {s t : Slice} {pos : t.Pos} {pos' : s.Pos} {h : s.copy = t.copy} :
|
||||
pos ≤ pos'.cast h ↔ pos.cast h.symm ≤ pos' := by
|
||||
simp [Slice.Pos.le_iff]
|
||||
|
||||
theorem Slice.Pos.cast_lt_iff {s t : Slice} {pos : s.Pos} {pos' : t.Pos} {h : s.copy = t.copy} :
|
||||
pos.cast h < pos' ↔ pos < pos'.cast h.symm := by
|
||||
simp [Slice.Pos.lt_iff]
|
||||
|
||||
theorem Slice.Pos.lt_cast_iff {s t : Slice} {pos : t.Pos} {pos' : s.Pos} {h : s.copy = t.copy} :
|
||||
pos < pos'.cast h ↔ pos.cast h.symm < pos' := by
|
||||
simp [Slice.Pos.lt_iff]
|
||||
|
||||
/-- Constructs a valid position on `t` from a valid position on `s` and a proof that `s = t`. -/
|
||||
@[inline]
|
||||
def Pos.cast {s t : String} (pos : s.Pos) (h : s = t) : t.Pos where
|
||||
@@ -1769,6 +1815,31 @@ theorem Pos.offset_cast {s t : String} {pos : s.Pos} {h : s = t} :
|
||||
theorem Pos.cast_rfl {s : String} {pos : s.Pos} : pos.cast rfl = pos :=
|
||||
Pos.ext (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_cast {s t u : String} {hst : s = t} {htu : t = u}
|
||||
{pos : s.Pos} : (pos.cast hst).cast htu = pos.cast (hst.trans htu) :=
|
||||
Pos.ext (by simp)
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_inj {s t : String} {hst : s = t} {p q : s.Pos} : p.cast hst = q.cast hst ↔ p = q := by
|
||||
simp [Pos.ext_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_startPos {s t : String} {hst : s = t} : s.startPos.cast hst = t.startPos := by
|
||||
subst hst; simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_eq_startPos {s t : String} {hst : s = t} {p : s.Pos} : p.cast hst = t.startPos ↔ p = s.startPos := by
|
||||
rw [← Pos.cast_startPos (hst := hst), Pos.cast_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_endPos {s t : String} {hst : s = t} : s.endPos.cast hst = t.endPos := by
|
||||
subst hst; simp
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_eq_endPos {s t : String} {hst : s = t} {p : s.Pos} : p.cast hst = t.endPos ↔ p = s.endPos := by
|
||||
rw [← Pos.cast_endPos (hst := hst), Pos.cast_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.cast_le_cast_iff {s t : String} {pos pos' : s.Pos} {h : s = t} :
|
||||
pos.cast h ≤ pos'.cast h ↔ pos ≤ pos' := by
|
||||
@@ -1779,6 +1850,22 @@ theorem Pos.cast_lt_cast_iff {s t : String} {pos pos' : s.Pos} {h : s = t} :
|
||||
pos.cast h < pos'.cast h ↔ pos < pos' := by
|
||||
cases h; simp
|
||||
|
||||
theorem Pos.cast_le_iff {s t : String} {pos : s.Pos} {pos' : t.Pos} {h : s = t} :
|
||||
pos.cast h ≤ pos' ↔ pos ≤ pos'.cast h.symm := by
|
||||
simp [Pos.le_iff]
|
||||
|
||||
theorem Pos.le_cast_iff {s t : String} {pos : t.Pos} {pos' : s.Pos} {h : s = t} :
|
||||
pos ≤ pos'.cast h ↔ pos.cast h.symm ≤ pos' := by
|
||||
simp [Pos.le_iff]
|
||||
|
||||
theorem Pos.cast_lt_iff {s t : String} {pos : s.Pos} {pos' : t.Pos} {h : s = t} :
|
||||
pos.cast h < pos' ↔ pos < pos'.cast h.symm := by
|
||||
simp [Pos.lt_iff]
|
||||
|
||||
theorem Pos.lt_cast_iff {s t : String} {pos : t.Pos} {pos' : s.Pos} {h : s = t} :
|
||||
pos < pos'.cast h ↔ pos.cast h.symm < pos' := by
|
||||
simp [Pos.lt_iff]
|
||||
|
||||
theorem Pos.copy_toSlice_eq_cast {s : String} (p : s.Pos) :
|
||||
p.toSlice.copy = p.cast copy_toSlice.symm :=
|
||||
Pos.ext (by simp)
|
||||
@@ -2054,6 +2141,10 @@ theorem Pos.le_ofToSlice_iff {s : String} {p : s.Pos} {q : s.toSlice.Pos} :
|
||||
theorem Pos.toSlice_lt_toSlice_iff {s : String} {p q : s.Pos} :
|
||||
p.toSlice < q.toSlice ↔ p < q := Iff.rfl
|
||||
|
||||
@[simp]
|
||||
theorem Pos.toSlice_le_toSlice_iff {s : String} {p q : s.Pos} :
|
||||
p.toSlice ≤ q.toSlice ↔ p ≤ q := Iff.rfl
|
||||
|
||||
theorem Pos.next_le_of_lt {s : String} {p q : s.Pos} {h} : p < q → p.next h ≤ q := by
|
||||
rw [next, Pos.ofToSlice_le_iff, ← Pos.toSlice_lt_toSlice_iff]
|
||||
exact Slice.Pos.next_le_of_lt
|
||||
|
||||
@@ -22,6 +22,10 @@ public section
|
||||
|
||||
namespace String
|
||||
|
||||
@[simp]
|
||||
theorem singleton_inj {c d : Char} : singleton c = singleton d ↔ c = d := by
|
||||
simp [← toList_inj]
|
||||
|
||||
@[simp]
|
||||
theorem singleton_append_inj : singleton c ++ s = singleton d ++ t ↔ c = d ∧ s = t := by
|
||||
simp [← toList_inj]
|
||||
@@ -191,18 +195,74 @@ theorem sliceTo_slice {s : String} {p₁ p₂ h p} :
|
||||
theorem Slice.sliceFrom_startPos {s : Slice} : s.sliceFrom s.startPos = s := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem Slice.sliceFrom_eq_self_iff {s : Slice} {p : s.Pos} : s.sliceFrom p = s ↔ p = s.startPos := by
|
||||
refine ⟨?_, by rintro rfl; simp⟩
|
||||
rcases s with ⟨str, startInclusive, endExclusive, h⟩
|
||||
simp [sliceFrom, Slice.startPos, String.Pos.ext_iff, Pos.Raw.ext_iff, Slice.Pos.ext_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.sliceTo_endPos {s : Slice} : s.sliceTo s.endPos = s := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem Slice.sliceTo_eq_self_iff {s : Slice} {p : s.Pos} : s.sliceTo p = s ↔ p = s.endPos := by
|
||||
refine ⟨?_, by rintro rfl; simp⟩
|
||||
rcases s with ⟨str, startInclusive, endExclusive, h⟩
|
||||
simp [sliceTo, Slice.endPos, String.Pos.ext_iff, Pos.Raw.ext_iff, Slice.Pos.ext_iff,
|
||||
utf8ByteSize_eq]
|
||||
omega
|
||||
|
||||
@[simp]
|
||||
theorem Slice.slice_startPos {s : Slice} {p : s.Pos} :
|
||||
s.slice s.startPos p (Pos.startPos_le _) = s.sliceTo p := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem Slice.slice_eq_self_iff {s : Slice} {p₁ p₂ : s.Pos} {h} :
|
||||
s.slice p₁ p₂ h = s ↔ p₁ = s.startPos ∧ p₂ = s.endPos := by
|
||||
refine ⟨?_, by rintro ⟨rfl, rfl⟩; simp⟩
|
||||
rcases s with ⟨str, startInclusive, endExclusive, h⟩
|
||||
simp [slice, Slice.endPos, String.Pos.ext_iff, Pos.Raw.ext_iff, Slice.Pos.ext_iff,
|
||||
utf8ByteSize_eq]
|
||||
omega
|
||||
|
||||
@[simp]
|
||||
theorem Slice.slice_endPos {s : Slice} {p : s.Pos} :
|
||||
s.slice p s.endPos (Pos.le_endPos _) = s.sliceFrom p := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem sliceFrom_startPos {s : String} : s.sliceFrom s.startPos = s := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem sliceFrom_eq_toSlice_iff {s : String} {p : s.Pos} : s.sliceFrom p = s.toSlice ↔ p = s.startPos := by
|
||||
simp [← sliceFrom_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem sliceTo_endPos {s : String} : s.sliceTo s.endPos = s := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem sliceTo_eq_toSlice_iff {s : String} {p : s.Pos} : s.sliceTo p = s.toSlice ↔ p = s.endPos := by
|
||||
simp [← sliceTo_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem slice_startPos {s : String} {p : s.Pos} :
|
||||
s.slice s.startPos p (Pos.startPos_le _) = s.sliceTo p := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem slice_endPos {s : String} {p : s.Pos} :
|
||||
s.slice p s.endPos (Pos.le_endPos _) = s.sliceFrom p := by
|
||||
ext <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem slice_eq_toSlice_iff {s : String} {p₁ p₂ : s.Pos} {h} :
|
||||
s.slice p₁ p₂ h = s.toSlice ↔ p₁ = s.startPos ∧ p₂ = s.endPos := by
|
||||
simp [← slice_toSlice]
|
||||
|
||||
end Iterate
|
||||
|
||||
theorem Slice.copy_eq_copy_slice {s : Slice} {pos₁ pos₂ : s.Pos} {h} :
|
||||
@@ -292,4 +352,39 @@ theorem nextn_endPos {s : String} : s.endPos.nextn n = s.endPos := by
|
||||
|
||||
end Pos
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.cast_toSlice_copy {s : Slice} {pos : s.Pos} :
|
||||
pos.copy.toSlice.cast (by simp) = pos := by
|
||||
ext; simp
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.sliceFrom_eq_startPos {s : Slice} {p : s.Pos} :
|
||||
(Pos.sliceFrom p p (Pos.le_refl _)) = Slice.startPos _ := by
|
||||
simp [← Pos.ofSliceFrom_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.sliceFrom_endPos {s : Slice} {p : s.Pos} :
|
||||
(Pos.sliceFrom p s.endPos (Pos.le_endPos _)) = Slice.endPos _ := by
|
||||
simp [← Pos.ofSliceFrom_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.sliceTo_startPos {s : Slice} {p : s.Pos} :
|
||||
(Pos.sliceTo p s.startPos (Pos.startPos_le _)) = Slice.startPos _ := by
|
||||
simp [← Pos.ofSliceTo_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.sliceTo_eq_endPos {s : Slice} {p : s.Pos} :
|
||||
(Pos.sliceTo p p (Pos.le_refl _)) = Slice.endPos _ := by
|
||||
simp [← Pos.ofSliceTo_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.slice_eq_startPos {s : Slice} {p₀ p₁ : s.Pos} {h} :
|
||||
(Pos.slice p₀ p₀ p₁ (Pos.le_refl _) h) = Slice.startPos _ := by
|
||||
simp [← Pos.ofSlice_inj]
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.slice_eq_endPos {s : Slice} {p₀ p₁ : s.Pos} {h} :
|
||||
(Pos.slice p₁ p₀ p₁ h (Pos.le_refl _)) = Slice.endPos _ := by
|
||||
simp [← Pos.ofSlice_inj]
|
||||
|
||||
end String
|
||||
|
||||
@@ -77,6 +77,15 @@ theorem join_cons : join (s :: l) = s ++ join l := by
|
||||
theorem toList_join {l : List String} : (String.join l).toList = l.flatMap String.toList := by
|
||||
induction l <;> simp_all
|
||||
|
||||
@[simp]
|
||||
theorem join_append {l m : List String} : String.join (l ++ m) = String.join l ++ String.join m := by
|
||||
simp [← toList_inj]
|
||||
|
||||
@[simp]
|
||||
theorem length_join {l : List String} : (String.join l).length = (l.map String.length).sum := by
|
||||
simp only [← length_toList, toList_join, List.length_flatMap]
|
||||
simp
|
||||
|
||||
namespace Slice
|
||||
|
||||
@[simp]
|
||||
|
||||
@@ -368,21 +368,41 @@ theorem Slice.Pos.ofSliceTo_ne_endPos {s : Slice} {p₀ : s.Pos} {p : (s.sliceTo
|
||||
refine (lt_endPos_iff _).1 (Std.lt_of_lt_of_le ?_ (le_endPos p₀))
|
||||
simpa [← lt_endPos_iff, ← ofSliceTo_lt_ofSliceTo_iff] using h
|
||||
|
||||
theorem Slice.Pos.ne_endPos_of_sliceTo_ne_endPos {s : Slice} {p p₀ : s.Pos} {h₀}
|
||||
(h : Pos.sliceTo p₀ p h₀ ≠ Slice.endPos _) : p ≠ s.endPos := by
|
||||
rw [← Pos.ofSliceTo_sliceTo (h := h₀)]
|
||||
apply Pos.ofSliceTo_ne_endPos h
|
||||
|
||||
theorem Slice.Pos.ofSliceFrom_ne_startPos {s : Slice} {p₀ : s.Pos} {p : (s.sliceFrom p₀).Pos}
|
||||
(h : p ≠ (s.sliceFrom p₀).startPos) : Pos.ofSliceFrom p ≠ s.startPos := by
|
||||
refine (startPos_lt_iff _).1 (Std.lt_of_le_of_lt (startPos_le p₀) ?_)
|
||||
simpa [← startPos_lt_iff, ← ofSliceFrom_lt_ofSliceFrom_iff] using h
|
||||
|
||||
theorem Slice.Pos.ne_startPos_of_sliceFrom_ne_startPos {s : Slice} {p p₀ : s.Pos} {h₀}
|
||||
(h : Pos.sliceFrom p₀ p h₀ ≠ Slice.startPos _) : p ≠ s.startPos := by
|
||||
rw [← Pos.ofSliceFrom_sliceFrom (h := h₀)]
|
||||
apply Pos.ofSliceFrom_ne_startPos h
|
||||
|
||||
theorem Pos.ofSliceTo_ne_endPos {s : String} {p₀ : s.Pos} {p : (s.sliceTo p₀).Pos}
|
||||
(h : p ≠ (s.sliceTo p₀).endPos) : Pos.ofSliceTo p ≠ s.endPos := by
|
||||
refine (lt_endPos_iff _).1 (Std.lt_of_lt_of_le ?_ (le_endPos p₀))
|
||||
simpa [← Slice.Pos.lt_endPos_iff, ← ofSliceTo_lt_ofSliceTo_iff] using h
|
||||
|
||||
theorem Pos.ne_endPos_of_sliceTo_ne_endPos {s : String} {p p₀ : s.Pos} {h₀}
|
||||
(h : Pos.sliceTo p₀ p h₀ ≠ Slice.endPos _) : p ≠ s.endPos := by
|
||||
rw [← Pos.ofSliceTo_sliceTo (h := h₀)]
|
||||
apply Pos.ofSliceTo_ne_endPos h
|
||||
|
||||
theorem Pos.ofSliceFrom_ne_startPos {s : String} {p₀ : s.Pos} {p : (s.sliceFrom p₀).Pos}
|
||||
(h : p ≠ (s.sliceFrom p₀).startPos) : Pos.ofSliceFrom p ≠ s.startPos := by
|
||||
refine (startPos_lt_iff _).1 (Std.lt_of_le_of_lt (startPos_le p₀) ?_)
|
||||
simpa [← Slice.Pos.startPos_lt_iff, ← ofSliceFrom_lt_ofSliceFrom_iff] using h
|
||||
|
||||
theorem Pos.ne_startPos_of_sliceFrom_ne_startPos {s : String} {p p₀ : s.Pos} {h₀}
|
||||
(h : Pos.sliceFrom p₀ p h₀ ≠ Slice.startPos _) : p ≠ s.startPos := by
|
||||
rw [← Pos.ofSliceFrom_sliceFrom (h := h₀)]
|
||||
apply Pos.ofSliceFrom_ne_startPos h
|
||||
|
||||
theorem Slice.Pos.ofSliceTo_next {s : Slice} {p₀ : s.Pos} {p : (s.sliceTo p₀).Pos} {h} :
|
||||
Pos.ofSliceTo (p.next h) = (Pos.ofSliceTo p).next (ofSliceTo_ne_endPos h) := by
|
||||
rw [eq_comm, Pos.next_eq_iff]
|
||||
@@ -514,21 +534,41 @@ theorem Slice.Pos.ofSlice_ne_endPos {s : Slice} {p₀ p₁ : s.Pos} {h} {p : (s.
|
||||
refine (lt_endPos_iff _).1 (Std.lt_of_lt_of_le ?_ (le_endPos p₁))
|
||||
simpa [← lt_endPos_iff, ← ofSlice_lt_ofSlice_iff] using h
|
||||
|
||||
theorem Slice.Pos.ne_endPos_of_slice_ne_endPos {s : Slice} {p p₀ p₁ : s.Pos} {h₁ h₂}
|
||||
(h : Pos.slice p p₀ p₁ h₁ h₂ ≠ Slice.endPos _) : p ≠ s.endPos := by
|
||||
rw [← Pos.ofSlice_slice (h₁ := h₁) (h₂ := h₂)]
|
||||
apply Pos.ofSlice_ne_endPos h
|
||||
|
||||
theorem Slice.Pos.ofSlice_ne_startPos {s : Slice} {p₀ p₁ : s.Pos} {h} {p : (s.slice p₀ p₁ h).Pos}
|
||||
(h : p ≠ (s.slice p₀ p₁ h).startPos) : Pos.ofSlice p ≠ s.startPos := by
|
||||
refine (startPos_lt_iff _).1 (Std.lt_of_le_of_lt (startPos_le p₀) ?_)
|
||||
simpa [← startPos_lt_iff, ← ofSlice_lt_ofSlice_iff] using h
|
||||
|
||||
theorem Slice.Pos.ne_startPos_of_slice_ne_startPos {s : Slice} {p p₀ p₁ : s.Pos} {h₁ h₂}
|
||||
(h : Pos.slice p p₀ p₁ h₁ h₂ ≠ Slice.startPos _) : p ≠ s.startPos := by
|
||||
rw [← Pos.ofSlice_slice (h₁ := h₁) (h₂ := h₂)]
|
||||
apply Pos.ofSlice_ne_startPos h
|
||||
|
||||
theorem Pos.ofSlice_ne_endPos {s : String} {p₀ p₁ : s.Pos} {h} {p : (s.slice p₀ p₁ h).Pos}
|
||||
(h : p ≠ (s.slice p₀ p₁ h).endPos) : Pos.ofSlice p ≠ s.endPos := by
|
||||
refine (lt_endPos_iff _).1 (Std.lt_of_lt_of_le ?_ (le_endPos p₁))
|
||||
simpa [← Slice.Pos.lt_endPos_iff, ← ofSlice_lt_ofSlice_iff] using h
|
||||
|
||||
theorem Pos.ne_endPos_of_slice_ne_endPos {s : String} {p p₀ p₁ : s.Pos} {h₁ h₂}
|
||||
(h : Pos.slice p p₀ p₁ h₁ h₂ ≠ Slice.endPos _) : p ≠ s.endPos := by
|
||||
rw [← Pos.ofSlice_slice (h₁ := h₁) (h₂ := h₂)]
|
||||
apply Pos.ofSlice_ne_endPos h
|
||||
|
||||
theorem Pos.ofSlice_ne_startPos {s : String} {p₀ p₁ : s.Pos} {h} {p : (s.slice p₀ p₁ h).Pos}
|
||||
(h : p ≠ (s.slice p₀ p₁ h).startPos) : Pos.ofSlice p ≠ s.startPos := by
|
||||
refine (startPos_lt_iff _).1 (Std.lt_of_le_of_lt (startPos_le p₀) ?_)
|
||||
simpa [← Slice.Pos.startPos_lt_iff, ← ofSlice_lt_ofSlice_iff] using h
|
||||
|
||||
theorem Pos.ne_startPos_of_slice_ne_startPos {s : String} {p p₀ p₁ : s.Pos} {h₁ h₂}
|
||||
(h : Pos.slice p p₀ p₁ h₁ h₂ ≠ Slice.startPos _) : p ≠ s.startPos := by
|
||||
rw [← Pos.ofSlice_slice (h₁ := h₁) (h₂ := h₂)]
|
||||
apply Pos.ofSlice_ne_startPos h
|
||||
|
||||
@[simp]
|
||||
theorem Slice.Pos.offset_le_rawEndPos {s : Slice} {p : s.Pos} :
|
||||
p.offset ≤ s.rawEndPos :=
|
||||
@@ -581,21 +621,37 @@ theorem Slice.Pos.get_eq_get_ofSliceTo {s : Slice} {p₀ : s.Pos} {pos : (s.slic
|
||||
pos.get h = (ofSliceTo pos).get (ofSliceTo_ne_endPos h) := by
|
||||
simp [Slice.Pos.get]
|
||||
|
||||
theorem Slice.Pos.get_sliceTo {s : Slice} {p₀ p : s.Pos} {h h'} :
|
||||
(Pos.sliceTo p₀ p h).get h' = p.get (ne_endPos_of_sliceTo_ne_endPos h') := by
|
||||
simp [get_eq_get_ofSliceTo]
|
||||
|
||||
theorem Pos.get_eq_get_ofSliceTo {s : String} {p₀ : s.Pos}
|
||||
{pos : (s.sliceTo p₀).Pos} {h} :
|
||||
pos.get h = (ofSliceTo pos).get (ofSliceTo_ne_endPos h) := by
|
||||
simp [Pos.get, Slice.Pos.get]
|
||||
|
||||
theorem Pos.get_sliceTo {s : String} {p₀ p : s.Pos} {h h'} :
|
||||
(Pos.sliceTo p₀ p h).get h' = p.get (ne_endPos_of_sliceTo_ne_endPos h') := by
|
||||
simp [get_eq_get_ofSliceTo]
|
||||
|
||||
theorem Slice.Pos.get_eq_get_ofSlice {s : Slice} {p₀ p₁ : s.Pos} {h}
|
||||
{pos : (s.slice p₀ p₁ h).Pos} {h'} :
|
||||
pos.get h' = (ofSlice pos).get (ofSlice_ne_endPos h') := by
|
||||
simp [Slice.Pos.get, Nat.add_assoc]
|
||||
|
||||
theorem Slice.Pos.get_slice {s : Slice} {p p₀ p₁ : s.Pos} {h₁ h₂ h} :
|
||||
(Pos.slice p p₀ p₁ h₁ h₂).get h = p.get (ne_endPos_of_slice_ne_endPos h) := by
|
||||
simp [get_eq_get_ofSlice]
|
||||
|
||||
theorem Pos.get_eq_get_ofSlice {s : String} {p₀ p₁ : s.Pos} {h}
|
||||
{pos : (s.slice p₀ p₁ h).Pos} {h'} :
|
||||
pos.get h' = (ofSlice pos).get (ofSlice_ne_endPos h') := by
|
||||
simp [Pos.get, Slice.Pos.get]
|
||||
|
||||
theorem Pos.get_slice {s : String} {p p₀ p₁ : s.Pos} {h₁ h₂ h} :
|
||||
(Pos.slice p p₀ p₁ h₁ h₂).get h = p.get (ne_endPos_of_slice_ne_endPos h) := by
|
||||
simp [get_eq_get_ofSlice]
|
||||
|
||||
theorem Slice.Pos.ofSlice_next {s : Slice} {p₀ p₁ : s.Pos} {h}
|
||||
{p : (s.slice p₀ p₁ h).Pos} {h'} :
|
||||
Pos.ofSlice (p.next h') = (Pos.ofSlice p).next (ofSlice_ne_endPos h') := by
|
||||
|
||||
@@ -12,7 +12,7 @@ public import Init.Data.Iterators.Consumers.Collect
|
||||
import all Init.Data.String.Pattern.Basic
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.String.Lemmas.IsEmpty
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
public import Init.Data.String.Lemmas.Basic
|
||||
import Init.Data.String.Lemmas.Order
|
||||
import Init.Data.String.Termination
|
||||
import Init.Data.Order.Lemmas
|
||||
@@ -52,19 +52,23 @@ The corresponding compatibility typeclasses are
|
||||
{name (scope := "Init.Data.String.Lemmas.Pattern.Basic")}`String.Slice.Pattern.Model.LawfulForwardPatternModel`
|
||||
and
|
||||
{name (scope := "Init.Data.String.Lemmas.Pattern.Basic")}`String.Slice.Pattern.Model.LawfulToForwardSearcherModel`.
|
||||
|
||||
We include the condition that the empty string is not a match. This is necessary for the theory to
|
||||
work out as there is just no reasonable notion of searching that works for the empty string that is
|
||||
still specific enough to yield reasonably strong correctness results for operations based on
|
||||
searching.
|
||||
|
||||
This means that pattern types that allow searching for the empty string will have to special-case
|
||||
the empty string in their correctness statements.
|
||||
-/
|
||||
class PatternModel {ρ : Type} (pat : ρ) : Type where
|
||||
/-- The predicate that says which strings match the pattern. -/
|
||||
Matches : String → Prop
|
||||
not_matches_empty : ¬ Matches ""
|
||||
|
||||
/--
|
||||
Type class for the condition that the empty string is not a match. This is necessary for the theory to
|
||||
work out as there is just no reasonable notion of searching that works for the empty string that is
|
||||
still specific enough to yield reasonably strong correctness results for operations based on
|
||||
searching.
|
||||
-/
|
||||
class StrictPatternModel {ρ : Type} (pat : ρ) [PatternModel pat] : Prop where
|
||||
not_matches_empty : ¬ PatternModel.Matches pat ""
|
||||
|
||||
theorem not_matches_empty {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat] :
|
||||
¬ PatternModel.Matches pat "" :=
|
||||
StrictPatternModel.not_matches_empty
|
||||
|
||||
/--
|
||||
Predicate stating that the region between the start of the slice {name}`s` and the position
|
||||
@@ -74,10 +78,10 @@ Predicate stating that the region between the start of the slice {name}`s` and t
|
||||
structure IsMatch (pat : ρ) [PatternModel pat] {s : Slice} (endPos : s.Pos) : Prop where
|
||||
matches_copy : PatternModel.Matches pat (s.sliceTo endPos).copy
|
||||
|
||||
theorem IsMatch.ne_startPos {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
theorem IsMatch.ne_startPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsMatch pat pos) : pos ≠ s.startPos := by
|
||||
intro hc
|
||||
apply PatternModel.not_matches_empty (pat := pat)
|
||||
apply not_matches_empty (pat := pat)
|
||||
simpa [hc] using h.matches_copy
|
||||
|
||||
theorem isMatch_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos} :
|
||||
@@ -90,6 +94,21 @@ theorem isMatch_iff_exists_splits {pat : ρ} [PatternModel pat] {s : Slice} {pos
|
||||
refine ⟨fun h => ⟨_, _, pos.splits, h⟩, fun ⟨t₁, t₂, h₁, h₂⟩ => ?_⟩
|
||||
rwa [h₁.eq_left pos.splits] at h₂
|
||||
|
||||
@[simp]
|
||||
theorem isMatch_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (h : s.copy = t.copy) {pos : s.Pos} :
|
||||
IsMatch pat (pos.cast h) ↔ IsMatch pat pos := by
|
||||
simp [isMatch_iff]
|
||||
|
||||
@[simp]
|
||||
theorem isMatch_sliceTo_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos p : s.Pos} {h} :
|
||||
IsMatch pat (Pos.sliceTo p pos h) ↔ IsMatch pat pos := by
|
||||
simp [isMatch_iff]
|
||||
|
||||
@[simp]
|
||||
theorem isMatch_ofSliceTo_iff {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {pos : (s.sliceTo p).Pos} :
|
||||
IsMatch pat (Pos.ofSliceTo pos) ↔ IsMatch pat pos := by
|
||||
rw [← isMatch_sliceTo_iff (p := p) (h := Pos.ofSliceTo_le), Pos.sliceTo_ofSliceTo]
|
||||
|
||||
/--
|
||||
Predicate stating that the region between the position {name}`startPos` and the end of the slice
|
||||
{name}`s` matches the pattern {name}`pat`. Note that there might be a longer match.
|
||||
@@ -97,10 +116,10 @@ Predicate stating that the region between the position {name}`startPos` and the
|
||||
structure IsRevMatch (pat : ρ) [PatternModel pat] {s : Slice} (startPos : s.Pos) : Prop where
|
||||
matches_copy : PatternModel.Matches pat (s.sliceFrom startPos).copy
|
||||
|
||||
theorem IsRevMatch.ne_endPos {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
theorem IsRevMatch.ne_endPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsRevMatch pat pos) : pos ≠ s.endPos := by
|
||||
intro hc
|
||||
apply PatternModel.not_matches_empty (pat := pat)
|
||||
apply not_matches_empty (pat := pat)
|
||||
simpa [hc] using h.matches_copy
|
||||
|
||||
theorem isRevMatch_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos} :
|
||||
@@ -113,6 +132,21 @@ theorem isRevMatch_iff_exists_splits {pat : ρ} [PatternModel pat] {s : Slice} {
|
||||
refine ⟨fun h => ⟨_, _, pos.splits, h⟩, fun ⟨t₁, t₂, h₁, h₂⟩ => ?_⟩
|
||||
rwa [h₁.eq_right pos.splits] at h₂
|
||||
|
||||
@[simp]
|
||||
theorem isRevMatch_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (h : s.copy = t.copy) {pos : s.Pos} :
|
||||
IsRevMatch pat (pos.cast h) ↔ IsRevMatch pat pos := by
|
||||
simp [isRevMatch_iff]
|
||||
|
||||
@[simp]
|
||||
theorem isRevMatch_sliceFrom_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos p : s.Pos} {h} :
|
||||
IsRevMatch pat (Pos.sliceFrom p pos h) ↔ IsRevMatch pat pos := by
|
||||
simp [isRevMatch_iff]
|
||||
|
||||
@[simp]
|
||||
theorem isRevMatch_ofSliceFrom_iff {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {pos : (s.sliceFrom p).Pos} :
|
||||
IsRevMatch pat (Pos.ofSliceFrom pos) ↔ IsRevMatch pat pos := by
|
||||
rw [← isRevMatch_sliceFrom_iff (p := p) (h := Pos.le_ofSliceFrom), Pos.sliceFrom_ofSliceFrom]
|
||||
|
||||
/--
|
||||
Predicate stating that the region between the start of the slice {name}`s` and the position
|
||||
{name}`pos` matches the pattern {name}`pat`, and that there is no longer match starting at the
|
||||
@@ -125,10 +159,19 @@ structure IsLongestMatch (pat : ρ) [PatternModel pat] {s : Slice} (pos : s.Pos)
|
||||
isMatch : IsMatch pat pos
|
||||
not_isMatch : ∀ pos', pos < pos' → ¬ IsMatch pat pos'
|
||||
|
||||
theorem IsLongestMatch.ne_startPos {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
theorem isLongestMatch_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos} :
|
||||
IsLongestMatch pat pos ↔ IsMatch pat pos ∧ ∀ pos', pos < pos' → ¬ IsMatch pat pos' :=
|
||||
⟨fun ⟨h, h'⟩ => ⟨h, h'⟩, fun ⟨h, h'⟩ => ⟨h, h'⟩⟩
|
||||
|
||||
theorem IsLongestMatch.ne_startPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsLongestMatch pat pos) : pos ≠ s.startPos :=
|
||||
h.isMatch.ne_startPos
|
||||
|
||||
@[simp]
|
||||
theorem not_isLongestMatch_startPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} :
|
||||
¬IsLongestMatch pat s.startPos :=
|
||||
fun h => h.ne_startPos rfl
|
||||
|
||||
theorem IsLongestMatch.eq {pat : ρ} [PatternModel pat] {s : Slice} {pos pos' : s.Pos}
|
||||
(h : IsLongestMatch pat pos) (h' : IsLongestMatch pat pos') : pos = pos' := by
|
||||
apply Std.le_antisymm
|
||||
@@ -149,6 +192,34 @@ theorem IsLongestMatch.le_of_isMatch {pat : ρ} [PatternModel pat] {s : Slice} {
|
||||
(h : IsLongestMatch pat pos) (h' : IsMatch pat pos') : pos' ≤ pos :=
|
||||
Std.not_lt.1 (fun hlt => h.not_isMatch _ hlt h')
|
||||
|
||||
@[simp]
|
||||
theorem isLongestMatch_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice}
|
||||
(hst : s.copy = t.copy) {pos : s.Pos} :
|
||||
IsLongestMatch pat (pos.cast hst) ↔ IsLongestMatch pat pos := by
|
||||
simp only [isLongestMatch_iff, isMatch_cast_iff, and_congr_right_iff]
|
||||
refine fun _ => ⟨fun h p hp => ?_, fun h p hp => ?_⟩
|
||||
· rw [← isMatch_cast_iff hst]
|
||||
exact h _ (by simpa)
|
||||
· have : p = (p.cast hst.symm).cast hst := by simp
|
||||
rw [this, isMatch_cast_iff hst]
|
||||
exact h _ (by rwa [this, Pos.cast_lt_cast_iff] at hp)
|
||||
|
||||
theorem IsLongestMatch.of_eq {pat : ρ} [PatternModel pat] {s t : Slice} {pos : s.Pos} {pos' : t.Pos}
|
||||
(h : IsLongestMatch pat pos) (h₁ : s.copy = t.copy) (h₂ : pos.cast h₁ = pos') :
|
||||
IsLongestMatch pat pos' := by
|
||||
subst h₂; simpa
|
||||
|
||||
theorem IsLongestMatch.sliceTo {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsLongestMatch pat pos) (p : s.Pos) (hp : pos ≤ p) : IsLongestMatch pat (Pos.sliceTo p pos hp) := by
|
||||
simp [isLongestMatch_iff] at ⊢ h
|
||||
refine ⟨h.1, fun p hp => ?_⟩
|
||||
rw [← isMatch_ofSliceTo_iff]
|
||||
exact h.2 _ (by simpa [Pos.sliceTo_lt_iff] using hp)
|
||||
|
||||
theorem isLongestMatch_of_ofSliceTo {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {pos : (s.sliceTo p).Pos}
|
||||
(h : IsLongestMatch pat (Pos.ofSliceTo pos)) : IsLongestMatch pat pos := by
|
||||
simpa using h.sliceTo p
|
||||
|
||||
/--
|
||||
Predicate stating that the region between the start of the slice {name}`s` and the position
|
||||
{name}`pos` matches the pattern {name}`pat`, and that there is no longer match starting at the
|
||||
@@ -161,10 +232,19 @@ structure IsLongestRevMatch (pat : ρ) [PatternModel pat] {s : Slice} (pos : s.P
|
||||
isRevMatch : IsRevMatch pat pos
|
||||
not_isRevMatch : ∀ pos', pos' < pos → ¬ IsRevMatch pat pos'
|
||||
|
||||
theorem IsLongestRevMatch.ne_endPos {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
theorem isLongestRevMatch_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos} :
|
||||
IsLongestRevMatch pat pos ↔ IsRevMatch pat pos ∧ ∀ pos', pos' < pos → ¬ IsRevMatch pat pos' :=
|
||||
⟨fun ⟨h, h'⟩ => ⟨h, h'⟩, fun ⟨h, h'⟩ => ⟨h, h'⟩⟩
|
||||
|
||||
theorem IsLongestRevMatch.ne_endPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsLongestRevMatch pat pos) : pos ≠ s.endPos :=
|
||||
h.isRevMatch.ne_endPos
|
||||
|
||||
@[simp]
|
||||
theorem not_isLongestRevMatch_endPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} :
|
||||
¬IsLongestRevMatch pat s.endPos :=
|
||||
fun h => h.ne_endPos rfl
|
||||
|
||||
theorem IsLongestRevMatch.eq {pat : ρ} [PatternModel pat] {s : Slice} {pos pos' : s.Pos}
|
||||
(h : IsLongestRevMatch pat pos) (h' : IsLongestRevMatch pat pos') : pos = pos' := by
|
||||
apply Std.le_antisymm
|
||||
@@ -185,6 +265,34 @@ theorem IsLongestRevMatch.le_of_isRevMatch {pat : ρ} [PatternModel pat] {s : Sl
|
||||
(h : IsLongestRevMatch pat pos) (h' : IsRevMatch pat pos') : pos ≤ pos' :=
|
||||
Std.not_lt.1 (fun hlt => h.not_isRevMatch _ hlt h')
|
||||
|
||||
@[simp]
|
||||
theorem isLongestRevMatch_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice}
|
||||
(hst : s.copy = t.copy) {pos : s.Pos} :
|
||||
IsLongestRevMatch pat (pos.cast hst) ↔ IsLongestRevMatch pat pos := by
|
||||
simp only [isLongestRevMatch_iff, isRevMatch_cast_iff, and_congr_right_iff]
|
||||
refine fun _ => ⟨fun h p hp => ?_, fun h p hp => ?_⟩
|
||||
· rw [← isRevMatch_cast_iff hst]
|
||||
exact h _ (by simpa)
|
||||
· have : p = (p.cast hst.symm).cast hst := by simp
|
||||
rw [this, isRevMatch_cast_iff hst]
|
||||
exact h _ (by rwa [this, Pos.cast_lt_cast_iff] at hp)
|
||||
|
||||
theorem IsLongestRevMatch.of_eq {pat : ρ} [PatternModel pat] {s t : Slice} {pos : s.Pos} {pos' : t.Pos}
|
||||
(h : IsLongestRevMatch pat pos) (h₁ : s.copy = t.copy) (h₂ : pos.cast h₁ = pos') :
|
||||
IsLongestRevMatch pat pos' := by
|
||||
subst h₂; simpa
|
||||
|
||||
theorem IsLongestRevMatch.sliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {pos : s.Pos}
|
||||
(h : IsLongestRevMatch pat pos) (p : s.Pos) (hp : p ≤ pos) : IsLongestRevMatch pat (Pos.sliceFrom p pos hp) := by
|
||||
simp [isLongestRevMatch_iff] at ⊢ h
|
||||
refine ⟨h.1, fun p' hp' => ?_⟩
|
||||
rw [← isRevMatch_ofSliceFrom_iff]
|
||||
exact h.2 _ (by simpa [Pos.lt_sliceFrom_iff] using hp')
|
||||
|
||||
theorem isLongestRevMatch_of_ofSliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {pos : (s.sliceFrom p).Pos}
|
||||
(h : IsLongestRevMatch pat (Pos.ofSliceFrom pos)) : IsLongestRevMatch pat pos := by
|
||||
simpa using h.sliceFrom p
|
||||
|
||||
/--
|
||||
Predicate stating that a match for a given pattern is never a proper prefix of another match.
|
||||
|
||||
@@ -240,12 +348,21 @@ theorem isLongestMatchAt_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos₁ p
|
||||
∃ (h : pos₁ ≤ pos₂), IsLongestMatch pat (Slice.Pos.sliceFrom _ _ h) :=
|
||||
⟨fun ⟨h, h'⟩ => ⟨h, h'⟩, fun ⟨h, h'⟩ => ⟨h, h'⟩⟩
|
||||
|
||||
theorem IsLongestMatchAt.lt {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
theorem IsLongestMatchAt.lt {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestMatchAt pat startPos endPos) : startPos < endPos := by
|
||||
have := h.isLongestMatch_sliceFrom.ne_startPos
|
||||
rw [← Pos.startPos_lt_iff, ← Slice.Pos.ofSliceFrom_lt_ofSliceFrom_iff] at this
|
||||
simpa
|
||||
|
||||
theorem IsLongestMatchAt.ne {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestMatchAt pat startPos endPos) : startPos ≠ endPos :=
|
||||
Std.ne_of_lt h.lt
|
||||
|
||||
@[simp]
|
||||
theorem not_isLongestMatchAt_self {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {startPos : s.Pos} :
|
||||
¬IsLongestMatchAt pat startPos startPos :=
|
||||
fun h => h.ne rfl
|
||||
|
||||
theorem IsLongestMatchAt.eq {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos endPos' : s.Pos}
|
||||
(h : IsLongestMatchAt pat startPos endPos) (h' : IsLongestMatchAt pat startPos endPos') :
|
||||
endPos = endPos' := by
|
||||
@@ -282,6 +399,77 @@ theorem isLongestMatchAt_startPos_iff {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
⟨fun h => isLongestMatch_of_eq (by simp) (by simp) h,
|
||||
fun h => isLongestMatch_of_eq (by simp) (by simp) h⟩
|
||||
|
||||
theorem isLongestMatch_iff_isLongestMatchAt_ofSliceFrom {pat : ρ} [PatternModel pat]
|
||||
{s : Slice} {base : s.Pos} (endPos : (s.sliceFrom base).Pos) :
|
||||
IsLongestMatch pat endPos ↔ IsLongestMatchAt pat base (Pos.ofSliceFrom endPos) := by
|
||||
simp [← isLongestMatchAt_startPos_iff, isLongestMatchAt_iff_isLongestMatchAt_ofSliceFrom]
|
||||
|
||||
theorem IsLongestMatchAt.matches_slice {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
{startPos endPos : s.Pos} (h : IsLongestMatchAt pat startPos endPos) :
|
||||
PatternModel.Matches pat (s.slice startPos endPos h.le).copy := by
|
||||
simpa using h.isLongestMatch_sliceFrom.isMatch.matches_copy
|
||||
|
||||
@[simp]
|
||||
theorem isLongestMatchAt_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{startPos endPos : s.Pos} :
|
||||
IsLongestMatchAt pat (startPos.cast hst) (endPos.cast hst) ↔ IsLongestMatchAt pat startPos endPos := by
|
||||
simp [isLongestMatchAt_iff, Pos.sliceFrom_cast]
|
||||
|
||||
theorem IsLongestMatchAt.of_eq {pat : ρ} [PatternModel pat] {s t : Slice} {s₁ e₁ : s.Pos} {s₂ e₂ : t.Pos}
|
||||
(h : IsLongestMatchAt pat s₁ e₁) (h₁ : s.copy = t.copy) (h₂ : s₁.cast h₁ = s₂) (h₃ : e₁.cast h₁ = e₂) :
|
||||
IsLongestMatchAt pat s₂ e₂ := by
|
||||
subst h₂ h₃; simpa
|
||||
|
||||
theorem IsLongestMatchAt.sliceTo {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestMatchAt pat startPos endPos) (p : s.Pos) (hp : endPos ≤ p) :
|
||||
IsLongestMatchAt pat (Pos.sliceTo p startPos (by exact Std.le_trans h.le hp)) (Pos.sliceTo p endPos hp) := by
|
||||
simp only [isLongestMatchAt_iff, Pos.sliceTo_le_sliceTo_iff] at ⊢ h
|
||||
obtain ⟨h, hp'⟩ := h
|
||||
exact ⟨h, (hp'.sliceTo (Pos.sliceFrom startPos p (Std.le_trans h hp)) (by simpa)).of_eq (by simp) (by ext; simp)⟩
|
||||
|
||||
theorem isLongestMatchAt_of_ofSliceTo {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {startPos endPos : (s.sliceTo p).Pos}
|
||||
(h : IsLongestMatchAt pat (Pos.ofSliceTo startPos) (Pos.ofSliceTo endPos)) :
|
||||
IsLongestMatchAt pat startPos endPos := by
|
||||
simpa using h.sliceTo p Pos.ofSliceTo_le
|
||||
|
||||
/--
|
||||
Predicate stating that the range between two positions of {name}`s` can be covered by longest
|
||||
matches of the pattern within {name}`s`.
|
||||
-/
|
||||
inductive IsLongestMatchAtChain (pat : ρ) [PatternModel pat] {s : Slice} : s.Pos → s.Pos → Prop where
|
||||
| nil (p : s.Pos) : IsLongestMatchAtChain pat p p
|
||||
| cons (startPos middlePos endPos : s.Pos) : IsLongestMatchAt pat startPos middlePos →
|
||||
IsLongestMatchAtChain pat middlePos endPos → IsLongestMatchAtChain pat startPos endPos
|
||||
|
||||
attribute [simp] IsLongestMatchAtChain.nil
|
||||
|
||||
theorem IsLongestMatchAtChain.eq_of_isLongestMatchAt_self {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
{startPos endPos : s.Pos} (h : IsLongestMatchAtChain pat startPos endPos) (h' : IsLongestMatchAt pat startPos startPos) :
|
||||
startPos = endPos := by
|
||||
induction h with
|
||||
| nil => rfl
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih =>
|
||||
obtain rfl : p₁ = p₂ := h'.eq h₁
|
||||
exact ih h₁
|
||||
|
||||
theorem IsLongestMatchAtChain.le {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestMatchAtChain pat startPos endPos) : startPos ≤ endPos := by
|
||||
induction h with
|
||||
| nil => exact Std.le_refl _
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih => exact Std.le_trans h₁.le ih
|
||||
|
||||
theorem IsLongestMatchAtChain.sliceTo {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestMatchAtChain pat startPos endPos) (p : s.Pos) (hp : endPos ≤ p) :
|
||||
IsLongestMatchAtChain pat (Pos.sliceTo p startPos (by exact Std.le_trans h.le hp)) (Pos.sliceTo p endPos hp) := by
|
||||
induction h with
|
||||
| nil => simp
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih => exact .cons _ _ _ (h₁.sliceTo p (Std.le_trans h₂.le hp)) (ih hp)
|
||||
|
||||
theorem isLongestMatchAtChain_of_ofSliceTo {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos}
|
||||
{startPos endPos : (s.sliceTo p).Pos} (h : IsLongestMatchAtChain pat (Pos.ofSliceTo startPos) (Pos.ofSliceTo endPos)) :
|
||||
IsLongestMatchAtChain pat startPos endPos := by
|
||||
simpa using h.sliceTo p Pos.ofSliceTo_le
|
||||
|
||||
/--
|
||||
Predicate stating that the slice formed by {name}`startPos` and {name}`endPos` contains is a match
|
||||
of {name}`pat` in {name}`s` and it is longest among matches ending at {name}`endPos`.
|
||||
@@ -295,12 +483,21 @@ theorem isLongestRevMatchAt_iff {pat : ρ} [PatternModel pat] {s : Slice} {pos
|
||||
∃ (h : pos₁ ≤ pos₂), IsLongestRevMatch pat (Slice.Pos.sliceTo _ _ h) :=
|
||||
⟨fun ⟨h, h'⟩ => ⟨h, h'⟩, fun ⟨h, h'⟩ => ⟨h, h'⟩⟩
|
||||
|
||||
theorem IsLongestRevMatchAt.lt {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
theorem IsLongestRevMatchAt.lt {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAt pat startPos endPos) : startPos < endPos := by
|
||||
have := h.isLongestRevMatch_sliceTo.ne_endPos
|
||||
rw [← Pos.lt_endPos_iff, ← Slice.Pos.ofSliceTo_lt_ofSliceTo_iff] at this
|
||||
simpa
|
||||
|
||||
theorem IsLongestRevMatchAt.ne {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAt pat startPos endPos) : startPos ≠ endPos :=
|
||||
Std.ne_of_lt h.lt
|
||||
|
||||
@[simp]
|
||||
theorem not_isLongestRevMatchAt_self {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} {endPos : s.Pos} :
|
||||
¬IsLongestRevMatchAt pat endPos endPos :=
|
||||
fun h => h.ne rfl
|
||||
|
||||
theorem IsLongestRevMatchAt.eq {pat : ρ} [PatternModel pat] {s : Slice} {startPos startPos' endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAt pat startPos endPos) (h' : IsLongestRevMatchAt pat startPos' endPos) :
|
||||
startPos = startPos' := by
|
||||
@@ -335,6 +532,77 @@ theorem isLongestRevMatchAt_endPos_iff {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
⟨fun h => isLongestRevMatch_of_eq (by simp) (by simp) h,
|
||||
fun h => isLongestRevMatch_of_eq (by simp) (by simp) h⟩
|
||||
|
||||
theorem isLongestRevMatch_iff_isLongestRevMatchAt_ofSliceTo {pat : ρ} [PatternModel pat]
|
||||
{s : Slice} {base : s.Pos} (startPos : (s.sliceTo base).Pos) :
|
||||
IsLongestRevMatch pat startPos ↔ IsLongestRevMatchAt pat (Pos.ofSliceTo startPos) base := by
|
||||
simp [← isLongestRevMatchAt_endPos_iff, isLongestRevMatchAt_iff_isLongestRevMatchAt_ofSliceTo]
|
||||
|
||||
theorem IsLongestRevMatchAt.matches_slice {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
{startPos endPos : s.Pos} (h : IsLongestRevMatchAt pat startPos endPos) :
|
||||
PatternModel.Matches pat (s.slice startPos endPos h.le).copy := by
|
||||
simpa using h.isLongestRevMatch_sliceTo.isRevMatch.matches_copy
|
||||
|
||||
@[simp]
|
||||
theorem isLongestRevMatchAt_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{startPos endPos : s.Pos} :
|
||||
IsLongestRevMatchAt pat (startPos.cast hst) (endPos.cast hst) ↔ IsLongestRevMatchAt pat startPos endPos := by
|
||||
simp [isLongestRevMatchAt_iff, Pos.sliceTo_cast]
|
||||
|
||||
theorem IsLongestRevMatchAt.of_eq {pat : ρ} [PatternModel pat] {s t : Slice} {s₁ e₁ : s.Pos} {s₂ e₂ : t.Pos}
|
||||
(h : IsLongestRevMatchAt pat s₁ e₁) (h₁ : s.copy = t.copy) (h₂ : s₁.cast h₁ = s₂) (h₃ : e₁.cast h₁ = e₂) :
|
||||
IsLongestRevMatchAt pat s₂ e₂ := by
|
||||
subst h₂ h₃; simpa
|
||||
|
||||
theorem IsLongestRevMatchAt.sliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAt pat startPos endPos) (p : s.Pos) (hp : p ≤ startPos) :
|
||||
IsLongestRevMatchAt pat (Pos.sliceFrom p startPos hp) (Pos.sliceFrom p endPos (by exact Std.le_trans hp h.le)) := by
|
||||
simp only [isLongestRevMatchAt_iff, Pos.sliceFrom_le_sliceFrom_iff] at ⊢ h
|
||||
obtain ⟨h, hp'⟩ := h
|
||||
exact ⟨h, (hp'.sliceFrom (Pos.sliceTo endPos p (Std.le_trans hp h)) (by simpa)).of_eq (by simp) (by ext; simp)⟩
|
||||
|
||||
theorem isLongestRevMatchAt_of_ofSliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos} {startPos endPos : (s.sliceFrom p).Pos}
|
||||
(h : IsLongestRevMatchAt pat (Pos.ofSliceFrom startPos) (Pos.ofSliceFrom endPos)) :
|
||||
IsLongestRevMatchAt pat startPos endPos := by
|
||||
simpa using h.sliceFrom p Pos.le_ofSliceFrom
|
||||
|
||||
/--
|
||||
Predicate stating that the range between two positions of {name}`s` can be covered by longest
|
||||
reverse matches of the pattern within {name}`s`.
|
||||
-/
|
||||
inductive IsLongestRevMatchAtChain (pat : ρ) [PatternModel pat] {s : Slice} : s.Pos → s.Pos → Prop where
|
||||
| nil (p : s.Pos) : IsLongestRevMatchAtChain pat p p
|
||||
| cons (startPos middlePos endPos : s.Pos) : IsLongestRevMatchAtChain pat startPos middlePos →
|
||||
IsLongestRevMatchAt pat middlePos endPos → IsLongestRevMatchAtChain pat startPos endPos
|
||||
|
||||
attribute [simp] IsLongestRevMatchAtChain.nil
|
||||
|
||||
theorem IsLongestRevMatchAtChain.eq_of_isLongestRevMatchAt_self {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
{startPos endPos : s.Pos} (h : IsLongestRevMatchAtChain pat startPos endPos) (h' : IsLongestRevMatchAt pat endPos endPos) :
|
||||
startPos = endPos := by
|
||||
induction h with
|
||||
| nil => rfl
|
||||
| cons mid endP hchain hmatch ih =>
|
||||
obtain rfl := hmatch.eq h'
|
||||
exact ih hmatch
|
||||
|
||||
theorem IsLongestRevMatchAtChain.le {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAtChain pat startPos endPos) : startPos ≤ endPos := by
|
||||
induction h with
|
||||
| nil => exact Std.le_refl _
|
||||
| cons mid endP hchain hmatch ih => exact Std.le_trans ih hmatch.le
|
||||
|
||||
theorem IsLongestRevMatchAtChain.sliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {startPos endPos : s.Pos}
|
||||
(h : IsLongestRevMatchAtChain pat startPos endPos) (p : s.Pos) (hp : p ≤ startPos) :
|
||||
IsLongestRevMatchAtChain pat (Pos.sliceFrom p startPos hp) (Pos.sliceFrom p endPos (by exact Std.le_trans hp h.le)) := by
|
||||
induction h with
|
||||
| nil => simp
|
||||
| cons mid endP hchain hmatch ih => exact .cons _ _ _ ih (hmatch.sliceFrom p (Std.le_trans hp hchain.le))
|
||||
|
||||
theorem isLongestRevMatchAtChain_of_ofSliceFrom {pat : ρ} [PatternModel pat] {s : Slice} {p : s.Pos}
|
||||
{startPos endPos : (s.sliceFrom p).Pos} (h : IsLongestRevMatchAtChain pat (Pos.ofSliceFrom startPos) (Pos.ofSliceFrom endPos)) :
|
||||
IsLongestRevMatchAtChain pat startPos endPos := by
|
||||
simpa using h.sliceFrom p Pos.le_ofSliceFrom
|
||||
|
||||
/--
|
||||
Predicate stating that there is a (longest) match starting at the given position.
|
||||
-/
|
||||
@@ -360,7 +628,7 @@ theorem matchesAt_iff_exists_isMatch {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
by simpa using hq⟩⟩
|
||||
|
||||
@[simp]
|
||||
theorem not_matchesAt_endPos {pat : ρ} [PatternModel pat] {s : Slice} :
|
||||
theorem not_matchesAt_endPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} :
|
||||
¬ MatchesAt pat s.endPos := by
|
||||
simp only [matchesAt_iff_exists_isMatch, Pos.endPos_le, exists_prop_eq]
|
||||
intro h
|
||||
@@ -380,6 +648,14 @@ theorem IsLongestMatchAt.matchesAt {pat : ρ} [PatternModel pat] {s : Slice} {st
|
||||
(h : IsLongestMatchAt pat startPos endPos) : MatchesAt pat startPos where
|
||||
exists_isLongestMatchAt := ⟨_, h⟩
|
||||
|
||||
@[simp]
|
||||
theorem matchesAt_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{pos : s.Pos} : MatchesAt pat (pos.cast hst) ↔ MatchesAt pat pos := by
|
||||
simp only [matchesAt_iff_exists_isLongestMatchAt]
|
||||
refine ⟨fun ⟨endPos, h⟩ => ?_, fun ⟨endPos, h⟩ => ?_⟩
|
||||
· exact ⟨endPos.cast hst.symm, by simpa [← isLongestMatchAt_cast_iff hst]⟩
|
||||
· exact ⟨endPos.cast hst, by simpa⟩
|
||||
|
||||
/--
|
||||
Predicate stating that there is a (longest) match ending at the given position.
|
||||
-/
|
||||
@@ -405,7 +681,7 @@ theorem revMatchesAt_iff_exists_isRevMatch {pat : ρ} [PatternModel pat] {s : Sl
|
||||
by simpa using hq⟩⟩
|
||||
|
||||
@[simp]
|
||||
theorem not_revMatchesAt_startPos {pat : ρ} [PatternModel pat] {s : Slice} :
|
||||
theorem not_revMatchesAt_startPos {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice} :
|
||||
¬ RevMatchesAt pat s.startPos := by
|
||||
simp only [revMatchesAt_iff_exists_isRevMatch, Pos.le_startPos, exists_prop_eq]
|
||||
intro h
|
||||
@@ -425,6 +701,14 @@ theorem IsLongestRevMatchAt.revMatchesAt {pat : ρ} [PatternModel pat] {s : Slic
|
||||
(h : IsLongestRevMatchAt pat startPos endPos) : RevMatchesAt pat endPos where
|
||||
exists_isLongestRevMatchAt := ⟨_, h⟩
|
||||
|
||||
@[simp]
|
||||
theorem revMatchesAt_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{pos : s.Pos} : RevMatchesAt pat (pos.cast hst) ↔ RevMatchesAt pat pos := by
|
||||
simp only [revMatchesAt_iff_exists_isLongestRevMatchAt]
|
||||
refine ⟨fun ⟨endPos, h⟩ => ?_, fun ⟨endPos, h⟩ => ?_⟩
|
||||
· exact ⟨endPos.cast hst.symm, by simpa [← isLongestRevMatchAt_cast_iff hst]⟩
|
||||
· exact ⟨endPos.cast hst, by simpa⟩
|
||||
|
||||
open Classical in
|
||||
/--
|
||||
Noncomputable model function returning the end point of the longest match starting at the given
|
||||
@@ -450,6 +734,21 @@ theorem matchAt?_eq_none_iff {ρ : Type} {pat : ρ} [PatternModel pat]
|
||||
| case1 h => simpa using ⟨h⟩
|
||||
| case2 h => simpa using fun ⟨h'⟩ => h h'
|
||||
|
||||
theorem lt_of_matchAt?_eq_some {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{s : Slice} {startPos endPos : s.Pos} (h : matchAt? pat startPos = some endPos) :
|
||||
startPos < endPos :=
|
||||
(matchAt?_eq_some_iff.1 h).lt
|
||||
|
||||
@[simp]
|
||||
theorem matchAt?_cast {ρ : Type} (pat : ρ) [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{startPos : s.Pos} :
|
||||
matchAt? pat (startPos.cast hst) = (matchAt? pat startPos).map (Slice.Pos.cast · hst) := by
|
||||
refine Option.ext (fun endPos => ?_)
|
||||
have : endPos = (endPos.cast hst.symm).cast hst := by simp
|
||||
conv => lhs; rw [this, matchAt?_eq_some_iff, isLongestMatchAt_cast_iff]
|
||||
simp only [Option.map_eq_some_iff, matchAt?_eq_some_iff]
|
||||
exact ⟨fun h => ⟨_, ⟨h, by simp⟩⟩, by rintro ⟨pos, h, rfl⟩; simpa⟩
|
||||
|
||||
open Classical in
|
||||
/--
|
||||
Noncomputable model function returning the start point of the longest match ending at the given
|
||||
@@ -475,6 +774,21 @@ theorem revMatchAt?_eq_none_iff {ρ : Type} {pat : ρ} [PatternModel pat]
|
||||
| case1 h => simpa using ⟨h⟩
|
||||
| case2 h => simpa using fun ⟨h'⟩ => h h'
|
||||
|
||||
theorem lt_of_revMatchAt?_eq_some {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{s : Slice} {startPos endPos : s.Pos} (h : revMatchAt? pat endPos = some startPos) :
|
||||
startPos < endPos :=
|
||||
(revMatchAt?_eq_some_iff.1 h).lt
|
||||
|
||||
@[simp]
|
||||
theorem revMatchAt?_cast {ρ : Type} (pat : ρ) [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{startPos : s.Pos} :
|
||||
revMatchAt? pat (startPos.cast hst) = (revMatchAt? pat startPos).map (Slice.Pos.cast · hst) := by
|
||||
refine Option.ext (fun endPos => ?_)
|
||||
have : endPos = (endPos.cast hst.symm).cast hst := by simp
|
||||
conv => lhs; rw [this, revMatchAt?_eq_some_iff, isLongestRevMatchAt_cast_iff]
|
||||
simp only [Option.map_eq_some_iff, revMatchAt?_eq_some_iff]
|
||||
exact ⟨fun h => ⟨_, ⟨h, by simp⟩⟩, by rintro ⟨pos, h, rfl⟩; simpa⟩
|
||||
|
||||
/--
|
||||
Predicate stating compatibility between {name}`PatternModel` and {name}`ForwardPattern`.
|
||||
|
||||
@@ -570,6 +884,24 @@ theorem IsValidSearchFrom.endPos_of_eq {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
cases hl
|
||||
exact IsValidSearchFrom.endPos
|
||||
|
||||
theorem isValidSearchFrom_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{pos : s.Pos} {l : List (SearchStep t)} :
|
||||
IsValidSearchFrom pat (pos.cast hst) l ↔ IsValidSearchFrom pat pos (l.map (·.cast hst.symm)) := by
|
||||
suffices ∀ (s t : Slice) (hst : s.copy = t.copy) (pos : s.Pos) (l : List (SearchStep s)),
|
||||
IsValidSearchFrom pat pos l → IsValidSearchFrom pat (pos.cast hst) (l.map (·.cast hst)) from
|
||||
⟨fun h => by simpa using this _ _ hst.symm _ _ h, fun h => by
|
||||
have hcomp : (SearchStep.cast hst) ∘ (SearchStep.cast hst.symm) = id := by ext; simp
|
||||
simpa [hcomp] using this _ _ hst _ _ h⟩
|
||||
intro s t hst pos l hl
|
||||
induction hl with
|
||||
| endPos => simpa using IsValidSearchFrom.endPos
|
||||
| matched h₁ h₂ ih =>
|
||||
simpa only [List.map_cons, SearchStep.cast_matched] using IsValidSearchFrom.matched (by simpa) ih
|
||||
| mismatched h₁ h₂ h₃ ih =>
|
||||
simp only [List.map_cons, SearchStep.cast_rejected]
|
||||
refine IsValidSearchFrom.mismatched (by simpa) (fun p hp₁ hp₂ hp₃ => ?_) ih
|
||||
exact h₂ (p.cast hst.symm) (by simpa [Pos.le_cast_iff]) (by simpa [Pos.cast_lt_iff]) (by simpa)
|
||||
|
||||
/--
|
||||
Predicate stating compatibility between {name}`PatternModel` and {name}`ToForwardSearcher`.
|
||||
|
||||
@@ -663,6 +995,24 @@ theorem IsValidRevSearchFrom.startPos_of_eq {pat : ρ} [PatternModel pat] {s : S
|
||||
cases hl
|
||||
exact IsValidRevSearchFrom.startPos
|
||||
|
||||
theorem isValidRevSearchFrom_cast_iff {pat : ρ} [PatternModel pat] {s t : Slice} (hst : s.copy = t.copy)
|
||||
{pos : s.Pos} {l : List (SearchStep t)} :
|
||||
IsValidRevSearchFrom pat (pos.cast hst) l ↔ IsValidRevSearchFrom pat pos (l.map (·.cast hst.symm)) := by
|
||||
suffices ∀ (s t : Slice) (hst : s.copy = t.copy) (pos : s.Pos) (l : List (SearchStep s)),
|
||||
IsValidRevSearchFrom pat pos l → IsValidRevSearchFrom pat (pos.cast hst) (l.map (·.cast hst)) from
|
||||
⟨fun h => by simpa using this _ _ hst.symm _ _ h, fun h => by
|
||||
have hcomp : (SearchStep.cast hst) ∘ (SearchStep.cast hst.symm) = id := by ext; simp
|
||||
simpa [hcomp] using this _ _ hst _ _ h⟩
|
||||
intro s t hst pos l hl
|
||||
induction hl with
|
||||
| startPos => simpa using IsValidRevSearchFrom.startPos
|
||||
| matched h₁ h₂ ih =>
|
||||
simpa only [List.map_cons, SearchStep.cast_matched] using IsValidRevSearchFrom.matched (by simpa) ih
|
||||
| mismatched h₁ h₂ h₃ ih =>
|
||||
simp only [List.map_cons, SearchStep.cast_rejected]
|
||||
refine IsValidRevSearchFrom.mismatched (by simpa) (fun p hp₁ hp₂ hp₃ => ?_) ih
|
||||
exact h₂ (p.cast hst.symm) (by simpa [Pos.lt_cast_iff]) (by simpa [Pos.cast_le_iff]) (by simpa)
|
||||
|
||||
/--
|
||||
Predicate stating compatibility between {name}`PatternModel` and {name}`ToBackwardSearcher`.
|
||||
|
||||
|
||||
@@ -28,7 +28,9 @@ namespace String.Slice.Pattern.Model.Char
|
||||
|
||||
instance {c : Char} : PatternModel c where
|
||||
Matches s := s = String.singleton c
|
||||
not_matches_empty := by simp
|
||||
|
||||
instance {c : Char} : StrictPatternModel c where
|
||||
not_matches_empty := by simp [PatternModel.Matches]
|
||||
|
||||
instance {c : Char} : NoPrefixPatternModel c :=
|
||||
.of_length_eq (by simp +contextual [PatternModel.Matches])
|
||||
@@ -168,11 +170,61 @@ theorem isLongestMatchAt_iff_isLongestMatchAt_beq {c : Char} {s : Slice}
|
||||
IsLongestMatchAt c pos pos' ↔ IsLongestMatchAt (· == c) pos pos' := by
|
||||
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff_isLongestMatch_beq]
|
||||
|
||||
theorem isLongestMatchAtChain_iff_isLongestMatchAtChain_beq {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain c pos pos' ↔ IsLongestMatchAtChain (· == c) pos pos' := by
|
||||
refine ⟨fun h => ?_, fun h => ?_⟩
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih => exact .cons _ _ _ (isLongestMatchAt_iff_isLongestMatchAt_beq.1 h₁) ih
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih => exact .cons _ _ _ (isLongestMatchAt_iff_isLongestMatchAt_beq.2 h₁) ih
|
||||
|
||||
theorem isLongestMatchAtChain_iff {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain c pos pos' ↔ pos ≤ pos' ∧ ∀ pos'', pos ≤ pos'' → (h : pos'' < pos') → pos''.get (Pos.ne_endPos_of_lt h) = c := by
|
||||
simp [isLongestMatchAtChain_iff_isLongestMatchAtChain_beq, CharPred.isLongestMatchAtChain_iff]
|
||||
|
||||
theorem isLongestMatchAtChain_iff_toList {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain c pos pos' ↔
|
||||
∃ (h : pos ≤ pos'), (s.slice pos pos' h).copy.toList = List.replicate (s.slice pos pos' h).copy.length c := by
|
||||
simp [isLongestMatchAtChain_iff_isLongestMatchAtChain_beq, CharPred.isLongestMatchAtChain_iff_toList,
|
||||
List.eq_replicate_iff]
|
||||
|
||||
theorem isLongestMatchAtChain_startPos_endPos_iff_toList {c : Char} {s : Slice} :
|
||||
IsLongestMatchAtChain c s.startPos s.endPos ↔ s.copy.toList = List.replicate s.copy.length c := by
|
||||
simp [isLongestMatchAtChain_iff_isLongestMatchAtChain_beq,
|
||||
CharPred.isLongestMatchAtChain_startPos_endPos_iff_toList, List.eq_replicate_iff]
|
||||
|
||||
theorem isLongestRevMatchAt_iff_isLongestRevMatchAt_beq {c : Char} {s : Slice}
|
||||
{pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAt c pos pos' ↔ IsLongestRevMatchAt (· == c) pos pos' := by
|
||||
simp [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff_isLongestRevMatch_beq]
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_beq {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain c pos pos' ↔ IsLongestRevMatchAtChain (· == c) pos pos' := by
|
||||
refine ⟨fun h => ?_, fun h => ?_⟩
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₂ p₃ _ hmatch ih => exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_beq.1 hmatch)
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₂ p₃ _ hmatch ih => exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_beq.2 hmatch)
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain c pos pos' ↔ pos ≤ pos' ∧ ∀ pos'', pos ≤ pos'' → (h : pos'' < pos') → pos''.get (Pos.ne_endPos_of_lt h) = c := by
|
||||
simp [isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_beq, CharPred.isLongestRevMatchAtChain_iff]
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff_toList {c : Char} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain c pos pos' ↔
|
||||
∃ (h : pos ≤ pos'), (s.slice pos pos' h).copy.toList = List.replicate (s.slice pos pos' h).copy.length c := by
|
||||
simp [isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_beq, CharPred.isLongestRevMatchAtChain_iff_toList,
|
||||
List.eq_replicate_iff]
|
||||
|
||||
theorem isLongestRevMatchAtChain_startPos_endPos_iff_toList {c : Char} {s : Slice} :
|
||||
IsLongestRevMatchAtChain c s.startPos s.endPos ↔ s.copy.toList = List.replicate s.copy.length c := by
|
||||
simp [isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_beq,
|
||||
CharPred.isLongestRevMatchAtChain_startPos_endPos_iff_toList, List.eq_replicate_iff]
|
||||
|
||||
theorem matchesAt_iff_matchesAt_beq {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
MatchesAt c pos ↔ MatchesAt (· == c) pos := by
|
||||
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_beq]
|
||||
@@ -242,18 +294,21 @@ theorem skipPrefix?_char_eq_skipPrefix?_beq {c : Char} {s : Slice} :
|
||||
theorem Pattern.ForwardPattern.skipPrefix?_char_eq_skipPrefix?_beq {c : Char} {s : Slice} :
|
||||
skipPrefix? c s = skipPrefix? (· == c) s := (rfl)
|
||||
|
||||
theorem Pos.skip?_char_eq_skip?_beq {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? c = pos.skip? (· == c) := (rfl)
|
||||
|
||||
theorem Pos.skipWhile_char_eq_skipWhile_beq {c : Char} {s : Slice} (curr : s.Pos) :
|
||||
Pos.skipWhile curr c = Pos.skipWhile curr (· == c) := by
|
||||
fun_induction Pos.skipWhile curr c with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_char_eq_skipPrefix?_beq, h₁, h₂, ih]
|
||||
simp [← Pos.skip?_char_eq_skip?_beq, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_char_eq_skipPrefix?_beq, h, ih]
|
||||
simp [← Pos.skip?_char_eq_skip?_beq, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_char_eq_skipPrefix?_beq]
|
||||
simp [← Pos.skip?_char_eq_skip?_beq, h]
|
||||
|
||||
theorem skipPrefixWhile_char_eq_skipPrefixWhile_beq {c : Char} {s : Slice} :
|
||||
s.skipPrefixWhile c = s.skipPrefixWhile (· == c) :=
|
||||
@@ -269,7 +324,7 @@ theorem takeWhile_char_eq_takeWhile_beq {c : Char} {s : Slice} :
|
||||
|
||||
theorem all_char_eq_all_beq {c : Char} {s : Slice} :
|
||||
s.all c = s.all (· == c) := by
|
||||
simp only [all, dropWhile_char_eq_dropWhile_beq]
|
||||
simp only [all, skipPrefixWhile_char_eq_skipPrefixWhile_beq]
|
||||
|
||||
theorem find?_char_eq_find?_beq {c : Char} {s : Slice} :
|
||||
s.find? c = s.find? (· == c) :=
|
||||
@@ -298,18 +353,21 @@ theorem dropSuffix_char_eq_dropSuffix_beq {c : Char} {s : Slice} :
|
||||
theorem Pattern.BackwardPattern.skipSuffix?_char_eq_skipSuffix?_beq {c : Char} {s : Slice} :
|
||||
skipSuffix? c s = skipSuffix? (· == c) s := (rfl)
|
||||
|
||||
theorem Pos.revSkip?_char_eq_revSkip?_beq {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? c = pos.revSkip? (· == c) := (rfl)
|
||||
|
||||
theorem Pos.revSkipWhile_char_eq_revSkipWhile_beq {c : Char} {s : Slice} (curr : s.Pos) :
|
||||
Pos.revSkipWhile curr c = Pos.revSkipWhile curr (· == c) := by
|
||||
fun_induction Pos.revSkipWhile curr c with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_char_eq_skipSuffix?_beq, h₁, h₂, ih]
|
||||
simp [← Pos.revSkip?_char_eq_revSkip?_beq, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_char_eq_skipSuffix?_beq, h, ih]
|
||||
simp [← Pos.revSkip?_char_eq_revSkip?_beq, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_char_eq_skipSuffix?_beq]
|
||||
simp [← Pos.revSkip?_char_eq_revSkip?_beq, h]
|
||||
|
||||
theorem skipSuffixWhile_char_eq_skipSuffixWhile_beq {c : Char} {s : Slice} :
|
||||
s.skipSuffixWhile c = s.skipSuffixWhile (· == c) :=
|
||||
@@ -323,4 +381,16 @@ theorem takeEndWhile_char_eq_takeEndWhile_beq {c : Char} {s : Slice} :
|
||||
s.takeEndWhile c = s.takeEndWhile (· == c) := by
|
||||
simp only [takeEndWhile]; exact congrArg _ skipSuffixWhile_char_eq_skipSuffixWhile_beq
|
||||
|
||||
theorem revFind?_char_eq_revFind?_beq {c : Char} {s : Slice} :
|
||||
s.revFind? c = s.revFind? (· == c) :=
|
||||
(rfl)
|
||||
|
||||
theorem Pos.revFind?_char_eq_revFind?_beq {c : Char} {s : Slice} {p : s.Pos} :
|
||||
p.revFind? c = p.revFind? (· == c) :=
|
||||
(rfl)
|
||||
|
||||
theorem revAll_char_eq_revAll_beq {c : Char} {s : Slice} :
|
||||
s.revAll c = s.revAll (· == c) := by
|
||||
simp [revAll, skipSuffixWhile_char_eq_skipSuffixWhile_beq]
|
||||
|
||||
end String.Slice
|
||||
|
||||
@@ -23,8 +23,8 @@ open Std String.Slice Pattern Pattern.Model
|
||||
|
||||
namespace String.Slice
|
||||
|
||||
theorem Pattern.Model.find?_eq_some_iff {ρ : Type} (pat : ρ) [PatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
theorem Pattern.Model.find?_eq_some_iff {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} {pos : s.Pos} :
|
||||
s.find? pat = some pos ↔ MatchesAt pat pos ∧ (∀ pos', pos' < pos → ¬ MatchesAt pat pos') := by
|
||||
@@ -40,8 +40,8 @@ theorem Pattern.Model.find?_eq_some_iff {ρ : Type} (pat : ρ) [PatternModel pat
|
||||
| matched h₁ _ _ => have := h₁.matchesAt; grind
|
||||
| mismatched => grind
|
||||
|
||||
theorem Pattern.Model.find?_eq_none_iff {ρ : Type} (pat : ρ) [PatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
theorem Pattern.Model.find?_eq_none_iff {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} :
|
||||
s.find? pat = none ↔ ∀ (pos : s.Pos), ¬ MatchesAt pat pos := by
|
||||
@@ -65,15 +65,15 @@ theorem find?_eq_none_iff {ρ : Type} (pat : ρ) {σ : Slice → Type}
|
||||
[ToForwardSearcher pat σ] {s : Slice} : s.find? pat = none ↔ s.contains pat = false := by
|
||||
rw [← Option.isNone_iff_eq_none, ← Option.isSome_eq_false_iff, isSome_find?]
|
||||
|
||||
theorem Pattern.Model.contains_eq_false_iff {ρ : Type} (pat : ρ) [PatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
theorem Pattern.Model.contains_eq_false_iff {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} :
|
||||
s.contains pat = false ↔ ∀ (pos : s.Pos), ¬ MatchesAt pat pos := by
|
||||
rw [← find?_eq_none_iff, Slice.find?_eq_none_iff]
|
||||
|
||||
theorem Pattern.Model.contains_eq_true_iff {ρ : Type} (pat : ρ) [PatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
theorem Pattern.Model.contains_eq_true_iff {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} :
|
||||
s.contains pat ↔ ∃ (pos : s.Pos), MatchesAt pat pos := by
|
||||
@@ -85,7 +85,7 @@ theorem Pos.find?_eq_find?_sliceFrom {ρ : Type} {pat : ρ} {σ : Slice → Type
|
||||
p.find? pat = ((s.sliceFrom p).find? pat).map Pos.ofSliceFrom :=
|
||||
(rfl)
|
||||
|
||||
theorem Pattern.Model.posFind?_eq_some_iff {ρ : Type} {pat : ρ} [PatternModel pat] {σ : Slice → Type}
|
||||
theorem Pattern.Model.posFind?_eq_some_iff {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} {pos pos' : s.Pos} :
|
||||
@@ -100,8 +100,8 @@ theorem Pattern.Model.posFind?_eq_some_iff {ρ : Type} {pat : ρ} [PatternModel
|
||||
refine ⟨Pos.sliceFrom _ _ h₁, ⟨by simpa using h₂, fun p hp₁ hp₂ => ?_⟩, by simp⟩
|
||||
exact h₃ (Pos.ofSliceFrom p) Slice.Pos.le_ofSliceFrom (Pos.lt_sliceFrom_iff.1 hp₁) hp₂
|
||||
|
||||
theorem Pattern.Model.posFind?_eq_none_iff {ρ : Type} {pat : ρ} [PatternModel pat] {σ : Slice → Type}
|
||||
[∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
theorem Pattern.Model.posFind?_eq_none_iff {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [∀ s, Iterator (σ s) Id (SearchStep s)] [∀ s, Iterators.Finite (σ s) Id]
|
||||
[∀ s, IteratorLoop (σ s) Id Id] [∀ s, LawfulIteratorLoop (σ s) Id Id]
|
||||
[ToForwardSearcher pat σ] [LawfulToForwardSearcherModel pat] {s : Slice} {pos : s.Pos} :
|
||||
pos.find? pat = none ↔ ∀ pos', pos ≤ pos' → ¬ MatchesAt pat pos' := by
|
||||
|
||||
@@ -49,9 +49,10 @@ theorem contains_slice_iff {t s : Slice} :
|
||||
by_cases ht : t.isEmpty
|
||||
· simp [contains_eq_true_of_isEmpty ht s, copy_eq_empty_iff.mpr ht, String.toList_empty]
|
||||
· simp only [Bool.not_eq_true] at ht
|
||||
have := Pattern.Model.ForwardSliceSearcher.strictPatternModel ht
|
||||
have := Pattern.Model.ForwardSliceSearcher.lawfulToForwardSearcherModel ht
|
||||
simp only [Pattern.Model.contains_eq_true_iff,
|
||||
Pattern.Model.ForwardSliceSearcher.exists_matchesAt_iff_eq_append ht, isInfix_toList_iff]
|
||||
Pattern.Model.ForwardSliceSearcher.exists_matchesAt_iff_eq_append, isInfix_toList_iff]
|
||||
|
||||
@[simp]
|
||||
theorem contains_string_iff {t : String} {s : Slice} :
|
||||
|
||||
@@ -18,6 +18,7 @@ import Init.Data.String.Lemmas.Basic
|
||||
import Init.Data.String.Lemmas.Order
|
||||
import Init.Data.Order.Lemmas
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.String.Lemmas.Iterate
|
||||
import Init.Omega
|
||||
import Init.Data.String.Lemmas.FindPos
|
||||
|
||||
@@ -27,8 +28,9 @@ namespace String.Slice.Pattern.Model.CharPred
|
||||
|
||||
instance {p : Char → Bool} : PatternModel p where
|
||||
Matches s := ∃ c, s = singleton c ∧ p c
|
||||
not_matches_empty := by
|
||||
simp
|
||||
|
||||
instance {p : Char → Bool} : StrictPatternModel p where
|
||||
not_matches_empty := by simp [PatternModel.Matches]
|
||||
|
||||
instance {p : Char → Bool} : NoPrefixPatternModel p :=
|
||||
.of_length_eq (by simp +contextual [PatternModel.Matches])
|
||||
@@ -71,6 +73,39 @@ theorem isLongestMatchAt_iff {p : Char → Bool} {s : Slice} {pos pos' : s.Pos}
|
||||
simp +contextual [Model.isLongestMatchAt_iff, isLongestMatch_iff, ← Pos.ofSliceFrom_inj,
|
||||
Pos.get_eq_get_ofSliceFrom, Pos.ofSliceFrom_next]
|
||||
|
||||
theorem isLongestMatchAtChain_iff {p : Char → Bool} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain p pos pos' ↔ pos ≤ pos' ∧ ∀ pos'', pos ≤ pos'' → (h : pos'' < pos') → p (pos''.get (Pos.ne_endPos_of_lt h)) := by
|
||||
induction pos using WellFounded.induction Pos.wellFounded_gt with | h pos ih
|
||||
obtain (h|rfl|h) := Std.lt_trichotomy pos pos'
|
||||
· refine ⟨fun h => ?_, fun ⟨h₁, h₂⟩ => ?_⟩
|
||||
· cases h with
|
||||
| nil => exact (Std.lt_irrefl h).elim
|
||||
| cons _ mid _ h₁ h₂ =>
|
||||
obtain ⟨h₀, rfl, h₁'⟩ := isLongestMatchAt_iff.1 h₁
|
||||
refine ⟨Std.le_of_lt h, fun pos'' hp₁ hp₂ => ?_⟩
|
||||
obtain (hh|rfl) := Std.le_iff_lt_or_eq.1 hp₁
|
||||
· exact ((ih (pos.next (Pos.ne_endPos_of_lt h)) Pos.lt_next).1 h₂).2 _ (by simpa) hp₂
|
||||
· exact h₁'
|
||||
· refine .cons _ (pos.next (Pos.ne_endPos_of_lt h)) _ ?_ ((ih _ Pos.lt_next).2 ?_)
|
||||
· exact isLongestMatchAt_iff.2 ⟨Pos.ne_endPos_of_lt h, rfl, h₂ _ (by simp) h⟩
|
||||
· exact ⟨by simpa, fun pos'' hp₁ hp₂ => h₂ _ (Std.le_trans Pos.le_next hp₁) hp₂⟩
|
||||
· simpa using fun _ h₁ h₂ => (Std.lt_irrefl (Std.lt_of_le_of_lt h₁ h₂)).elim
|
||||
· simpa [Std.not_le.2 h] using fun h' => (Std.not_le.2 h h'.le).elim
|
||||
|
||||
theorem isLongestMatchAtChain_iff_toList {p : Char → Bool} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain p pos pos' ↔ ∃ (h : pos ≤ pos'), ∀ c, c ∈ (s.slice pos pos' h).copy.toList → p c := by
|
||||
simp only [isLongestMatchAtChain_iff, mem_toList_copy_iff_exists_get, Pos.get_eq_get_ofSlice,
|
||||
forall_exists_index]
|
||||
refine ⟨fun ⟨h₁, h₂⟩ => ⟨h₁, fun c p' hp => ?_⟩, fun ⟨h₁, h₂⟩ => ⟨h₁, fun p' hp₁ hp₂ => ?_⟩⟩
|
||||
· rintro rfl
|
||||
exact h₂ _ Pos.le_ofSlice (by simp [Pos.ofSlice_lt_iff, h₁, hp])
|
||||
· refine h₂ _ (Pos.slice p' _ _ hp₁ (Std.le_of_lt hp₂)) ?_ (by simp)
|
||||
rwa [← Pos.lt_endPos_iff, ← Pos.slice_eq_endPos (h := h₁), Pos.slice_lt_slice_iff]
|
||||
|
||||
theorem isLongestMatchAtChain_startPos_endPos_iff_toList {p : Char → Bool} {s : Slice} :
|
||||
IsLongestMatchAtChain p s.startPos s.endPos ↔ ∀ c, c ∈ s.copy.toList → p c := by
|
||||
simp [isLongestMatchAtChain_iff_toList]
|
||||
|
||||
theorem isLongestRevMatchAt_iff {p : Char → Bool} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAt p pos pos' ↔ ∃ h, pos = pos'.prev h ∧ p ((pos'.prev h).get (by simp)) := by
|
||||
simp +contextual [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff, ← Pos.ofSliceTo_inj,
|
||||
@@ -84,6 +119,35 @@ theorem isLongestRevMatchAt_of_get {p : Char → Bool} {s : Slice} {pos : s.Pos}
|
||||
(hc : p ((pos.prev h).get (by simp))) : IsLongestRevMatchAt p (pos.prev h) pos :=
|
||||
isLongestRevMatchAt_iff.2 ⟨h, by simp [hc]⟩
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff {p : Char → Bool} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain p pos pos' ↔ pos ≤ pos' ∧ ∀ pos'', pos ≤ pos'' → (h : pos'' < pos') → p (pos''.get (Pos.ne_endPos_of_lt h)) := by
|
||||
induction pos' using WellFounded.induction Pos.wellFounded_lt with | h pos' ih
|
||||
obtain (h|rfl|h) := Std.lt_trichotomy pos pos'
|
||||
· refine ⟨fun h => ?_, fun ⟨h₁, h₂⟩ => ?_⟩
|
||||
· cases h with
|
||||
| nil => exact (Std.lt_irrefl h).elim
|
||||
| cons _ _ hchain hmatch =>
|
||||
obtain ⟨hne, hmid, hp⟩ := isLongestRevMatchAt_iff.1 hmatch
|
||||
refine ⟨Std.le_of_lt h, fun pos'' hp₁ hp₂ => ?_⟩
|
||||
rcases Std.le_iff_lt_or_eq.1 (Pos.le_prev_iff_lt.2 hp₂) with hh | heq
|
||||
· exact ((ih _ Pos.prev_lt).1 (hmid ▸ hchain)).2 _ hp₁ hh
|
||||
· exact heq ▸ hp
|
||||
· have hne : pos' ≠ s.startPos := Slice.Pos.ne_startPos_of_lt h
|
||||
refine .cons _ (pos'.prev hne) _ ((ih _ Pos.prev_lt).2 ?_)
|
||||
(isLongestRevMatchAt_of_get (h₂ _ (Pos.le_prev_iff_lt.2 h) Pos.prev_lt))
|
||||
exact ⟨Pos.le_prev_iff_lt.2 h, fun pos'' hp₁ hp₂ =>
|
||||
h₂ _ hp₁ (Std.lt_trans hp₂ Pos.prev_lt)⟩
|
||||
· simpa using fun _ h₁ h₂ => (Std.lt_irrefl (Std.lt_of_le_of_lt h₁ h₂)).elim
|
||||
· simpa [Std.not_le.2 h] using fun h' => (Std.not_le.2 h h'.le).elim
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff_toList {p : Char → Bool} {s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain p pos pos' ↔ ∃ (h : pos ≤ pos'), ∀ c, c ∈ (s.slice pos pos' h).copy.toList → p c :=
|
||||
isLongestRevMatchAtChain_iff.trans (isLongestMatchAtChain_iff.symm.trans isLongestMatchAtChain_iff_toList)
|
||||
|
||||
theorem isLongestRevMatchAtChain_startPos_endPos_iff_toList {p : Char → Bool} {s : Slice} :
|
||||
IsLongestRevMatchAtChain p s.startPos s.endPos ↔ ∀ c, c ∈ s.copy.toList → p c := by
|
||||
simp [isLongestRevMatchAtChain_iff_toList]
|
||||
|
||||
instance {p : Char → Bool} : LawfulForwardPatternModel p where
|
||||
skipPrefix?_eq_some_iff {s} pos := by
|
||||
simp [isLongestMatch_iff, ForwardPattern.skipPrefix?, and_comm, eq_comm (b := pos)]
|
||||
@@ -128,7 +192,9 @@ namespace Decidable
|
||||
|
||||
instance {p : Char → Prop} [DecidablePred p] : PatternModel p where
|
||||
Matches := PatternModel.Matches (decide <| p ·)
|
||||
not_matches_empty := PatternModel.not_matches_empty (pat := (decide <| p ·))
|
||||
|
||||
instance {p : Char → Prop} [DecidablePred p] : StrictPatternModel p where
|
||||
not_matches_empty := StrictPatternModel.not_matches_empty (pat := (decide <| p ·))
|
||||
|
||||
instance {p : Char → Prop} [DecidablePred p] : NoPrefixPatternModel p where
|
||||
eq_empty := NoPrefixPatternModel.eq_empty (pat := (decide <| p ·))
|
||||
@@ -182,6 +248,32 @@ theorem isLongestRevMatchAt_iff_isLongestRevMatchAt_decide {p : Char → Prop} [
|
||||
IsLongestRevMatchAt p pos pos' ↔ IsLongestRevMatchAt (decide <| p ·) pos pos' := by
|
||||
simp [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff_isLongestRevMatch_decide]
|
||||
|
||||
theorem isLongestMatchAtChain_iff_isLongestMatchAtChain_decide {p : Char → Prop} [DecidablePred p]
|
||||
{s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestMatchAtChain p pos pos' ↔ IsLongestMatchAtChain (decide <| p ·) pos pos' := by
|
||||
constructor
|
||||
· intro h; induction h with
|
||||
| nil => exact .nil _
|
||||
| cons _ mid _ hmatch hchain ih =>
|
||||
exact .cons _ mid _ (isLongestMatchAt_iff_isLongestMatchAt_decide.1 hmatch) ih
|
||||
· intro h; induction h with
|
||||
| nil => exact .nil _
|
||||
| cons _ mid _ hmatch hchain ih =>
|
||||
exact .cons _ mid _ (isLongestMatchAt_iff_isLongestMatchAt_decide.2 hmatch) ih
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_decide {p : Char → Prop} [DecidablePred p]
|
||||
{s : Slice} {pos pos' : s.Pos} :
|
||||
IsLongestRevMatchAtChain p pos pos' ↔ IsLongestRevMatchAtChain (decide <| p ·) pos pos' := by
|
||||
constructor
|
||||
· intro h; induction h with
|
||||
| nil => exact .nil _
|
||||
| cons _ _ hchain hmatch ih =>
|
||||
exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_decide.1 hmatch)
|
||||
· intro h; induction h with
|
||||
| nil => exact .nil _
|
||||
| cons _ _ hchain hmatch ih =>
|
||||
exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_decide.2 hmatch)
|
||||
|
||||
theorem isLongestMatchAt_iff {p : Char → Prop} [DecidablePred p] {s : Slice}
|
||||
{pos pos' : s.Pos} :
|
||||
IsLongestMatchAt p pos pos' ↔ ∃ h, pos' = pos.next h ∧ p (pos.get h) := by
|
||||
@@ -319,6 +411,9 @@ theorem dropPrefix_prop_eq_dropPrefix_decide {p : Char → Prop} [DecidablePred
|
||||
theorem skipPrefix?_prop_eq_skipPrefix?_decide {p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
s.skipPrefix? p = s.skipPrefix? (decide <| p ·) := (rfl)
|
||||
|
||||
theorem Pos.skip?_prop_eq_skip?_decide {p : Char → Prop} [DecidablePred p] {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? p = pos.skip? (decide <| p ·) := (rfl)
|
||||
|
||||
theorem Pattern.ForwardPattern.skipPrefix?_prop_eq_skipPrefix?_decide
|
||||
{p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
skipPrefix? p s = skipPrefix? (decide <| p ·) s := (rfl)
|
||||
@@ -329,13 +424,13 @@ theorem Pos.skipWhile_prop_eq_skipWhile_decide {p : Char → Prop} [DecidablePre
|
||||
fun_induction Pos.skipWhile curr p with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_prop_eq_skipPrefix?_decide, h₁, h₂, ih]
|
||||
simp [← Pos.skip?_prop_eq_skip?_decide, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_prop_eq_skipPrefix?_decide, h, ih]
|
||||
simp [← Pos.skip?_prop_eq_skip?_decide, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_prop_eq_skipPrefix?_decide]
|
||||
simp [← Pos.skip?_prop_eq_skip?_decide, h]
|
||||
|
||||
theorem skipPrefixWhile_prop_eq_skipPrefixWhile_decide {p : Char → Prop} [DecidablePred p]
|
||||
{s : Slice} :
|
||||
@@ -352,7 +447,7 @@ theorem takeWhile_prop_eq_takeWhile_decide {p : Char → Prop} [DecidablePred p]
|
||||
|
||||
theorem all_prop_eq_all_decide {p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
s.all p = s.all (decide <| p ·) := by
|
||||
simp only [all, dropWhile_prop_eq_dropWhile_decide]
|
||||
simp only [all, skipPrefixWhile_prop_eq_skipPrefixWhile_decide]
|
||||
|
||||
theorem find?_prop_eq_find?_decide {p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
s.find? p = s.find? (decide <| p ·) :=
|
||||
@@ -383,19 +478,22 @@ theorem Pattern.BackwardPattern.skipSuffix?_prop_eq_skipSuffix?_decide
|
||||
{p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
skipSuffix? p s = skipSuffix? (decide <| p ·) s := (rfl)
|
||||
|
||||
theorem Pos.revSkip?_prop_eq_revSkip?_decide {p : Char → Prop} [DecidablePred p] {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? p = pos.revSkip? (decide <| p ·) := (rfl)
|
||||
|
||||
theorem Pos.revSkipWhile_prop_eq_revSkipWhile_decide {p : Char → Prop} [DecidablePred p]
|
||||
{s : Slice} (curr : s.Pos) :
|
||||
Pos.revSkipWhile curr p = Pos.revSkipWhile curr (decide <| p ·) := by
|
||||
fun_induction Pos.revSkipWhile curr p with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_prop_eq_skipSuffix?_decide, h₁, h₂, ih]
|
||||
simp [← Pos.revSkip?_prop_eq_revSkip?_decide, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_prop_eq_skipSuffix?_decide, h, ih]
|
||||
simp [← Pos.revSkip?_prop_eq_revSkip?_decide, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_prop_eq_skipSuffix?_decide]
|
||||
simp [← Pos.revSkip?_prop_eq_revSkip?_decide, h]
|
||||
|
||||
theorem skipSuffixWhile_prop_eq_skipSuffixWhile_decide {p : Char → Prop} [DecidablePred p]
|
||||
{s : Slice} :
|
||||
@@ -412,4 +510,8 @@ theorem takeEndWhile_prop_eq_takeEndWhile_decide {p : Char → Prop} [DecidableP
|
||||
s.takeEndWhile p = s.takeEndWhile (decide <| p ·) := by
|
||||
simp only [takeEndWhile]; exact congrArg _ skipSuffixWhile_prop_eq_skipSuffixWhile_decide
|
||||
|
||||
theorem revAll_prop_eq_revAll_decide {p : Char → Prop} [DecidablePred p] {s : Slice} :
|
||||
s.revAll p = s.revAll (decide <| p ·) := by
|
||||
simp only [revAll, skipSuffixWhile_prop_eq_skipSuffixWhile_decide]
|
||||
|
||||
end String.Slice
|
||||
|
||||
@@ -36,7 +36,7 @@ This gives a low-level correctness proof from which higher-level API lemmas can
|
||||
namespace String.Slice.Pattern.Model
|
||||
|
||||
@[cbv_opaque]
|
||||
public protected noncomputable def split {ρ : Type} (pat : ρ) [PatternModel pat] {s : Slice}
|
||||
public protected noncomputable def split {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat] {s : Slice}
|
||||
(firstRejected curr : s.Pos) (hle : firstRejected ≤ curr) : List s.Subslice :=
|
||||
if h : curr = s.endPos then
|
||||
[s.subslice _ _ hle]
|
||||
@@ -49,12 +49,12 @@ public protected noncomputable def split {ρ : Type} (pat : ρ) [PatternModel pa
|
||||
termination_by curr
|
||||
|
||||
@[simp]
|
||||
public theorem split_endPos {ρ : Type} {pat : ρ} [PatternModel pat] {s : Slice}
|
||||
public theorem split_endPos {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat] {s : Slice}
|
||||
{firstRejected : s.Pos} :
|
||||
Model.split (s := s) pat firstRejected s.endPos (by simp) = [s.subslice firstRejected s.endPos (by simp)] := by
|
||||
simp [Model.split]
|
||||
|
||||
public theorem split_eq_of_isLongestMatchAt {ρ : Type} {pat : ρ} [PatternModel pat]
|
||||
public theorem split_eq_of_isLongestMatchAt {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{s : Slice} {firstRejected start stop : s.Pos} {hle} (h : IsLongestMatchAt pat start stop) :
|
||||
Model.split pat firstRejected start hle =
|
||||
s.subslice _ _ hle :: Model.split pat stop stop (by exact Std.le_refl _) := by
|
||||
@@ -63,7 +63,7 @@ public theorem split_eq_of_isLongestMatchAt {ρ : Type} {pat : ρ} [PatternModel
|
||||
· congr <;> exact (matchAt?_eq_some_iff.1 ‹_›).eq h
|
||||
· simp [matchAt?_eq_some_iff.2 ‹_›] at *
|
||||
|
||||
public theorem split_eq_of_not_matchesAt {ρ : Type} {pat : ρ} [PatternModel pat]
|
||||
public theorem split_eq_of_not_matchesAt {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{s : Slice} {firstRejected start} (stop : s.Pos) (h₀ : start ≤ stop) {hle}
|
||||
(h : ∀ p, start ≤ p → p < stop → ¬ MatchesAt pat p) :
|
||||
Model.split pat firstRejected start hle =
|
||||
@@ -80,7 +80,7 @@ public theorem split_eq_of_not_matchesAt {ρ : Type} {pat : ρ} [PatternModel pa
|
||||
· obtain rfl : start = stop := Std.le_antisymm h₀ (Std.not_lt.1 h')
|
||||
simp
|
||||
|
||||
public theorem split_eq_next_of_not_matchesAt {ρ : Type} {pat : ρ} [PatternModel pat]
|
||||
public theorem split_eq_next_of_not_matchesAt {ρ : Type} {pat : ρ} [PatternModel pat] [StrictPatternModel pat]
|
||||
{s : Slice} {firstRejected start} {hle} (hs : start ≠ s.endPos) (h : ¬ MatchesAt pat start) :
|
||||
Model.split pat firstRejected start hle =
|
||||
Model.split pat firstRejected (start.next hs) (by exact Std.le_trans hle (by simp)) := by
|
||||
@@ -103,7 +103,7 @@ def splitFromSteps {s : Slice} (currPos : s.Pos) (l : List (SearchStep s)) : Lis
|
||||
| .matched p q :: l => s.subslice! currPos p :: splitFromSteps q l
|
||||
|
||||
theorem IsValidSearchFrom.splitFromSteps_eq_extend_split {ρ : Type} (pat : ρ)
|
||||
[PatternModel pat] (l : List (SearchStep s)) (pos pos' : s.Pos) (h₀ : pos ≤ pos')
|
||||
[PatternModel pat] [StrictPatternModel pat] (l : List (SearchStep s)) (pos pos' : s.Pos) (h₀ : pos ≤ pos')
|
||||
(h' : ∀ p, pos ≤ p → p < pos' → ¬ MatchesAt pat p)
|
||||
(h : IsValidSearchFrom pat pos' l) :
|
||||
splitFromSteps pos l = Model.split pat pos pos' h₀ := by
|
||||
@@ -155,7 +155,7 @@ end Model
|
||||
open Model
|
||||
|
||||
@[cbv_eval]
|
||||
public theorem toList_splitToSubslice_eq_modelSplit {ρ : Type} (pat : ρ) [PatternModel pat]
|
||||
public theorem toList_splitToSubslice_eq_modelSplit {ρ : Type} (pat : ρ) [PatternModel pat] [StrictPatternModel pat]
|
||||
{σ : Slice → Type} [ToForwardSearcher pat σ] [∀ s, Std.Iterator (σ s) Id (SearchStep s)]
|
||||
[∀ s, Std.Iterators.Finite (σ s) Id] [LawfulToForwardSearcherModel pat] (s : Slice) :
|
||||
(s.splitToSubslice pat).toList = Model.split pat s.startPos s.startPos (by exact Std.le_refl _) := by
|
||||
@@ -168,7 +168,7 @@ end Pattern
|
||||
open Pattern
|
||||
|
||||
public theorem toList_splitToSubslice_of_isEmpty {ρ : Type} (pat : ρ)
|
||||
[Model.PatternModel pat] {σ : Slice → Type}
|
||||
[Model.PatternModel pat] [Model.StrictPatternModel pat] {σ : Slice → Type}
|
||||
[ToForwardSearcher pat σ] [∀ s, Std.Iterator (σ s) Id (SearchStep s)]
|
||||
[∀ s, Std.Iterators.Finite (σ s) Id] [Model.LawfulToForwardSearcherModel pat] {s : Slice}
|
||||
(h : s.isEmpty = true) :
|
||||
@@ -182,7 +182,7 @@ public theorem toList_split_eq_splitToSubslice {ρ : Type} (pat : ρ) {σ : Slic
|
||||
simp [split, Std.Iter.toList_map]
|
||||
|
||||
public theorem toList_split_of_isEmpty {ρ : Type} (pat : ρ)
|
||||
[Model.PatternModel pat] {σ : Slice → Type}
|
||||
[Model.PatternModel pat] [Model.StrictPatternModel pat] {σ : Slice → Type}
|
||||
[ToForwardSearcher pat σ] [∀ s, Std.Iterator (σ s) Id (SearchStep s)]
|
||||
[∀ s, Std.Iterators.Finite (σ s) Id] [Model.LawfulToForwardSearcherModel pat] {s : Slice}
|
||||
(h : s.isEmpty = true) :
|
||||
@@ -200,7 +200,7 @@ public theorem split_eq_split_toSlice {ρ : Type} {pat : ρ} {σ : Slice → Typ
|
||||
|
||||
@[simp]
|
||||
public theorem toList_split_empty {ρ : Type} (pat : ρ)
|
||||
[Model.PatternModel pat] {σ : Slice → Type}
|
||||
[Model.PatternModel pat] [Model.StrictPatternModel pat] {σ : Slice → Type}
|
||||
[ToForwardSearcher pat σ] [∀ s, Std.Iterator (σ s) Id (SearchStep s)]
|
||||
[∀ s, Std.Iterators.Finite (σ s) Id] [Model.LawfulToForwardSearcherModel pat] :
|
||||
("".split pat).toList.map Slice.copy = [""] := by
|
||||
|
||||
@@ -10,6 +10,9 @@ public import Init.Data.String.Pattern.String
|
||||
public import Init.Data.String.Lemmas.Pattern.Basic
|
||||
import Init.Data.String.Lemmas.IsEmpty
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
import Init.Data.String.Lemmas.Intercalate
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.String.Lemmas.Splits
|
||||
import Init.Data.ByteArray.Lemmas
|
||||
import Init.Omega
|
||||
|
||||
@@ -20,17 +23,10 @@ namespace String.Slice.Pattern.Model
|
||||
namespace ForwardSliceSearcher
|
||||
|
||||
instance {pat : Slice} : PatternModel pat where
|
||||
/-
|
||||
See the docstring of `PatternModel` for an explanation about why we disallow matching the
|
||||
empty string.
|
||||
Matches s := s = pat.copy
|
||||
|
||||
Requiring `s ≠ ""` is a trick that allows us to give a `PatternModel` instance
|
||||
unconditionally, without forcing `pat.copy` to be non-empty (which would make it very awkward
|
||||
to state theorems about the instance). It does not change anything about the fact that all lemmas
|
||||
about this instance require `pat.isEmpty = false`.
|
||||
-/
|
||||
Matches s := s ≠ "" ∧ s = pat.copy
|
||||
not_matches_empty := by simp
|
||||
theorem strictPatternModel {pat : Slice} (hpat : pat.isEmpty = false) : StrictPatternModel pat where
|
||||
not_matches_empty := by simpa [PatternModel.Matches]
|
||||
|
||||
instance {pat : Slice} : NoPrefixPatternModel pat :=
|
||||
.of_length_eq (by simp +contextual [PatternModel.Matches])
|
||||
@@ -38,59 +34,111 @@ instance {pat : Slice} : NoPrefixPatternModel pat :=
|
||||
instance {pat : Slice} : NoSuffixPatternModel pat :=
|
||||
.of_length_eq (by simp +contextual [PatternModel.Matches])
|
||||
|
||||
theorem isMatch_iff {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isMatch_iff {pat s : Slice} {pos : s.Pos} :
|
||||
IsMatch pat pos ↔ (s.sliceTo pos).copy = pat.copy := by
|
||||
simp only [Model.isMatch_iff, PatternModel.Matches, ne_eq, copy_eq_empty_iff,
|
||||
Bool.not_eq_true, and_iff_right_iff_imp]
|
||||
intro h'
|
||||
rw [← isEmpty_copy (s := s.sliceTo pos), h', isEmpty_copy, h]
|
||||
simp [Model.isMatch_iff, PatternModel.Matches]
|
||||
|
||||
theorem isRevMatch_iff {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isRevMatch_iff {pat s : Slice} {pos : s.Pos} :
|
||||
IsRevMatch pat pos ↔ (s.sliceFrom pos).copy = pat.copy := by
|
||||
simp only [Model.isRevMatch_iff, PatternModel.Matches, ne_eq, copy_eq_empty_iff,
|
||||
Bool.not_eq_true, and_iff_right_iff_imp]
|
||||
intro h'
|
||||
rw [← isEmpty_copy (s := s.sliceFrom pos), h', isEmpty_copy, h]
|
||||
simp [Model.isRevMatch_iff, PatternModel.Matches]
|
||||
|
||||
theorem isLongestMatch_iff {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestMatch_iff {pat s : Slice} {pos : s.Pos} :
|
||||
IsLongestMatch pat pos ↔ (s.sliceTo pos).copy = pat.copy := by
|
||||
rw [isLongestMatch_iff_isMatch, isMatch_iff h]
|
||||
rw [isLongestMatch_iff_isMatch, isMatch_iff]
|
||||
|
||||
theorem isLongestRevMatch_iff {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestRevMatch_iff {pat s : Slice} {pos : s.Pos} :
|
||||
IsLongestRevMatch pat pos ↔ (s.sliceFrom pos).copy = pat.copy := by
|
||||
rw [isLongestRevMatch_iff_isRevMatch, isRevMatch_iff h]
|
||||
rw [isLongestRevMatch_iff_isRevMatch, isRevMatch_iff]
|
||||
|
||||
theorem isLongestMatchAt_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestMatchAt_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔ ∃ h, (s.slice pos₁ pos₂ h).copy = pat.copy := by
|
||||
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff h]
|
||||
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff]
|
||||
|
||||
theorem isLongestRevMatchAt_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestMatchAtChain_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAtChain pat pos₁ pos₂ ↔
|
||||
∃ h n, (s.slice pos₁ pos₂ h).copy = String.join (List.replicate n pat.copy) := by
|
||||
refine ⟨fun h => ⟨h.le, ?_⟩, fun ⟨h, n, h'⟩ => ?_⟩
|
||||
· induction h with
|
||||
| nil => simpa using ⟨0, by simp⟩
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih =>
|
||||
rw [isLongestMatchAt_iff] at h₁
|
||||
obtain ⟨n, ih⟩ := ih
|
||||
obtain ⟨h₀, h₁⟩ := h₁
|
||||
have : (s.slice p₁ p₃ (Std.le_trans h₀ h₂.le)).copy = (s.slice p₁ p₂ h₀).copy ++ (s.slice p₂ p₃ h₂.le).copy := by
|
||||
simp [(Slice.Pos.slice p₂ _ _ h₀ h₂.le).splits.eq_append]
|
||||
refine ⟨n + 1, ?_⟩
|
||||
rw [this, h₁, ih]
|
||||
simp [← String.join_cons, ← List.replicate_succ]
|
||||
· induction n generalizing pos₁ pos₂ with
|
||||
| zero => simp_all
|
||||
| succ n ih =>
|
||||
rw [List.replicate_succ, String.join_cons] at h'
|
||||
refine .cons _ (Pos.ofSlice (Pos.ofEqAppend h')) _ ?_ (ih ?_ Pos.ofSlice_le ?_)
|
||||
· simpa [isLongestMatchAt_iff] using (Pos.splits_ofEqAppend h').copy_sliceTo_eq
|
||||
· simpa [sliceFrom_slice ▸ (Pos.splits_ofEqAppend h').copy_sliceFrom_eq] using ⟨n, rfl⟩
|
||||
· simpa using (Pos.splits_ofEqAppend h').copy_sliceFrom_eq
|
||||
|
||||
theorem isLongestMatchAtChain_startPos_endPos_iff {pat s : Slice} :
|
||||
IsLongestMatchAtChain pat s.startPos s.endPos ↔
|
||||
∃ n, s.copy = String.join (List.replicate n pat.copy) := by
|
||||
simp [isLongestMatchAtChain_iff]
|
||||
|
||||
theorem isLongestRevMatchAt_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔ ∃ h, (s.slice pos₁ pos₂ h).copy = pat.copy := by
|
||||
simp [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff h]
|
||||
simp [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff]
|
||||
|
||||
theorem isLongestMatchAt_iff_splits {pat s : Slice} {pos₁ pos₂ : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestRevMatchAtChain_iff {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAtChain pat pos₁ pos₂ ↔
|
||||
∃ h n, (s.slice pos₁ pos₂ h).copy = String.join (List.replicate n pat.copy) := by
|
||||
refine ⟨fun h => ⟨h.le, ?_⟩, fun ⟨h, n, h'⟩ => ?_⟩
|
||||
· induction h with
|
||||
| nil => simpa using ⟨0, by simp⟩
|
||||
| cons p₂ p₃ h₁ h₂ ih =>
|
||||
rw [isLongestRevMatchAt_iff] at h₂
|
||||
obtain ⟨n, ih⟩ := ih
|
||||
obtain ⟨h₀, h₂⟩ := h₂
|
||||
have : (s.slice pos₁ p₃ (Std.le_trans h₁.le h₀)).copy = (s.slice pos₁ p₂ h₁.le).copy ++ (s.slice p₂ p₃ h₀).copy := by
|
||||
simp [(Slice.Pos.slice p₂ _ _ (IsLongestRevMatchAtChain.le ‹_›) h₀).splits.eq_append]
|
||||
refine ⟨n + 1, ?_⟩
|
||||
rw [this, h₂, ih]
|
||||
simp [← List.replicate_append_replicate]
|
||||
· induction n generalizing pos₁ pos₂ with
|
||||
| zero => simp_all
|
||||
| succ n ih =>
|
||||
have h'' : (s.slice pos₁ pos₂ h).copy = String.join (List.replicate n pat.copy) ++ pat.copy := by
|
||||
rw [h', List.replicate_succ', String.join_append]; simp
|
||||
refine .cons _ (Pos.ofSlice (Pos.ofEqAppend h'')) _ (ih ?_ Pos.le_ofSlice ?_) ?_
|
||||
· simpa [sliceTo_slice ▸ (Pos.splits_ofEqAppend h'').copy_sliceTo_eq] using ⟨n, rfl⟩
|
||||
· simpa using (Pos.splits_ofEqAppend h'').copy_sliceTo_eq
|
||||
· simpa [isLongestRevMatchAt_iff] using (Pos.splits_ofEqAppend h'').copy_sliceFrom_eq
|
||||
|
||||
theorem isLongestRevMatchAtChain_startPos_endPos_iff {pat s : Slice} :
|
||||
IsLongestRevMatchAtChain pat s.startPos s.endPos ↔
|
||||
∃ n, s.copy = String.join (List.replicate n pat.copy) := by
|
||||
simp [isLongestRevMatchAtChain_iff]
|
||||
|
||||
theorem isLongestMatchAt_iff_splits {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔ ∃ t₁ t₂, pos₁.Splits t₁ (pat.copy ++ t₂) ∧
|
||||
pos₂.Splits (t₁ ++ pat.copy) t₂ := by
|
||||
simp only [isLongestMatchAt_iff h, copy_slice_eq_iff_splits]
|
||||
simp only [isLongestMatchAt_iff, copy_slice_eq_iff_splits]
|
||||
|
||||
theorem isLongestRevMatchAt_iff_splits {pat s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat.isEmpty = false) :
|
||||
theorem isLongestRevMatchAt_iff_splits {pat s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔ ∃ t₁ t₂, pos₁.Splits t₁ (pat.copy ++ t₂) ∧
|
||||
pos₂.Splits (t₁ ++ pat.copy) t₂ := by
|
||||
simp only [isLongestRevMatchAt_iff h, copy_slice_eq_iff_splits]
|
||||
simp only [isLongestRevMatchAt_iff, copy_slice_eq_iff_splits]
|
||||
|
||||
theorem isLongestMatch_iff_splits {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestMatch_iff_splits {pat s : Slice} {pos : s.Pos} :
|
||||
IsLongestMatch pat pos ↔ ∃ t, pos.Splits pat.copy t := by
|
||||
rw [isLongestMatch_iff h, copy_sliceTo_eq_iff_exists_splits]
|
||||
rw [isLongestMatch_iff, copy_sliceTo_eq_iff_exists_splits]
|
||||
|
||||
theorem isLongestRevMatch_iff_splits {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem isLongestRevMatch_iff_splits {pat s : Slice} {pos : s.Pos} :
|
||||
IsLongestRevMatch pat pos ↔ ∃ t, pos.Splits t pat.copy := by
|
||||
rw [isLongestRevMatch_iff h, copy_sliceFrom_eq_iff_exists_splits]
|
||||
rw [isLongestRevMatch_iff, copy_sliceFrom_eq_iff_exists_splits]
|
||||
|
||||
theorem isLongestMatchAt_iff_extract {pat s : Slice} {pos₁ pos₂ : s.Pos} (h : pat.isEmpty = false) :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔
|
||||
s.copy.toByteArray.extract pos₁.offset.byteIdx pos₂.offset.byteIdx = pat.copy.toByteArray := by
|
||||
rw [isLongestMatchAt_iff h]
|
||||
rw [isLongestMatchAt_iff]
|
||||
refine ⟨fun ⟨h, h'⟩ => ?_, fun h' => ?_⟩
|
||||
· simp [← h', toByteArray_copy_slice]
|
||||
· rw [← Slice.toByteArray_copy_ne_empty_iff, ← h', ne_eq, ByteArray.extract_eq_empty_iff] at h
|
||||
@@ -102,7 +150,7 @@ theorem isLongestRevMatchAt_iff_extract {pat s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔
|
||||
s.copy.toByteArray.extract pos₁.offset.byteIdx pos₂.offset.byteIdx =
|
||||
pat.copy.toByteArray := by
|
||||
rw [isLongestRevMatchAt_iff h]
|
||||
rw [isLongestRevMatchAt_iff]
|
||||
refine ⟨fun ⟨h, h'⟩ => ?_, fun h' => ?_⟩
|
||||
· simp [← h', toByteArray_copy_slice]
|
||||
· rw [← Slice.toByteArray_copy_ne_empty_iff, ← h', ne_eq, ByteArray.extract_eq_empty_iff] at h
|
||||
@@ -130,21 +178,21 @@ theorem offset_of_isLongestRevMatchAt {pat s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
suffices pos₂.offset.byteIdx ≤ s.utf8ByteSize by omega
|
||||
simpa [Pos.le_iff, Pos.Raw.le_iff] using pos₂.le_endPos
|
||||
|
||||
theorem matchesAt_iff_splits {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem matchesAt_iff_splits {pat s : Slice} {pos : s.Pos} :
|
||||
MatchesAt pat pos ↔ ∃ t₁ t₂, pos.Splits t₁ (pat.copy ++ t₂) := by
|
||||
simp only [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_splits h]
|
||||
simp only [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_splits]
|
||||
exact ⟨fun ⟨e, t₁, t₂, ht₁, ht₂⟩ => ⟨t₁, t₂, ht₁⟩,
|
||||
fun ⟨t₁, t₂, ht⟩ => ⟨ht.rotateRight, t₁, t₂, ht, ht.splits_rotateRight⟩⟩
|
||||
|
||||
theorem revMatchesAt_iff_splits {pat s : Slice} {pos : s.Pos} (h : pat.isEmpty = false) :
|
||||
theorem revMatchesAt_iff_splits {pat s : Slice} {pos : s.Pos} :
|
||||
RevMatchesAt pat pos ↔ ∃ t₁ t₂, pos.Splits (t₁ ++ pat.copy) t₂ := by
|
||||
simp only [revMatchesAt_iff_exists_isLongestRevMatchAt, isLongestRevMatchAt_iff_splits h]
|
||||
simp only [revMatchesAt_iff_exists_isLongestRevMatchAt, isLongestRevMatchAt_iff_splits]
|
||||
exact ⟨fun ⟨e, t₁, t₂, ht₁, ht₂⟩ => ⟨t₁, t₂, ht₂⟩,
|
||||
fun ⟨t₁, t₂, ht⟩ => ⟨ht.rotateLeft, t₁, t₂, ht.splits_rotateLeft, ht⟩⟩
|
||||
|
||||
theorem exists_matchesAt_iff_eq_append {pat s : Slice} (h : pat.isEmpty = false) :
|
||||
theorem exists_matchesAt_iff_eq_append {pat s : Slice} :
|
||||
(∃ (pos : s.Pos), MatchesAt pat pos) ↔ ∃ t₁ t₂, s.copy = t₁ ++ pat.copy ++ t₂ := by
|
||||
simp only [matchesAt_iff_splits h]
|
||||
simp only [matchesAt_iff_splits]
|
||||
constructor
|
||||
· rintro ⟨pos, t₁, t₂, hsplit⟩
|
||||
exact ⟨t₁, t₂, by rw [hsplit.eq_append, append_assoc]⟩
|
||||
@@ -154,9 +202,9 @@ theorem exists_matchesAt_iff_eq_append {pat s : Slice} (h : pat.isEmpty = false)
|
||||
⟨t₁, pat.copy ++ t₂, by rw [← append_assoc]; exact heq, rfl⟩
|
||||
exact ⟨s.pos _ hvalid, t₁, t₂, ⟨by rw [← append_assoc]; exact heq, by simp⟩⟩
|
||||
|
||||
theorem exists_revMatchesAt_iff_eq_append {pat s : Slice} (h : pat.isEmpty = false) :
|
||||
theorem exists_revMatchesAt_iff_eq_append {pat s : Slice} :
|
||||
(∃ (pos : s.Pos), RevMatchesAt pat pos) ↔ ∃ t₁ t₂, s.copy = t₁ ++ pat.copy ++ t₂ := by
|
||||
simp only [revMatchesAt_iff_splits h]
|
||||
simp only [revMatchesAt_iff_splits]
|
||||
constructor
|
||||
· rintro ⟨pos, t₁, t₂, hsplit⟩
|
||||
exact ⟨t₁, t₂, by rw [hsplit.eq_append, append_assoc]⟩
|
||||
@@ -233,8 +281,10 @@ end ForwardSliceSearcher
|
||||
namespace ForwardStringSearcher
|
||||
|
||||
instance {pat : String} : PatternModel pat where
|
||||
Matches s := s ≠ "" ∧ s = pat
|
||||
not_matches_empty := by simp
|
||||
Matches s := s = pat
|
||||
|
||||
theorem strictPatternModel {pat : String} (h : pat ≠ "") : StrictPatternModel pat where
|
||||
not_matches_empty := by simpa [PatternModel.Matches]
|
||||
|
||||
instance {pat : String} : NoPrefixPatternModel pat :=
|
||||
.of_length_eq (by simp +contextual [PatternModel.Matches])
|
||||
@@ -267,12 +317,60 @@ theorem isLongestMatchAt_iff_isLongestMatchAt_toSlice {pat : String} {s : Slice}
|
||||
IsLongestMatchAt (ρ := Slice) pat.toSlice pos₁ pos₂ := by
|
||||
simp [Model.isLongestMatchAt_iff, isLongestMatch_iff_isLongestMatch_toSlice]
|
||||
|
||||
theorem isLongestMatchAtChain_iff_isLongestMatchAtChain_toSlice {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAtChain pat pos₁ pos₂ ↔
|
||||
IsLongestMatchAtChain pat.toSlice pos₁ pos₂ := by
|
||||
refine ⟨fun h => ?_, fun h => ?_⟩
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih =>
|
||||
exact .cons _ _ _ (isLongestMatchAt_iff_isLongestMatchAt_toSlice.1 h₁) ih
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₁ p₂ p₃ h₁ h₂ ih =>
|
||||
exact .cons _ _ _ (isLongestMatchAt_iff_isLongestMatchAt_toSlice.2 h₁) ih
|
||||
|
||||
theorem isLongestMatchAtChain_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAtChain pat pos₁ pos₂ ↔
|
||||
∃ h n, (s.slice pos₁ pos₂ h).copy = String.join (List.replicate n pat) := by
|
||||
simp [isLongestMatchAtChain_iff_isLongestMatchAtChain_toSlice,
|
||||
ForwardSliceSearcher.isLongestMatchAtChain_iff]
|
||||
|
||||
theorem isLongestMatchAtChain_startPos_endPos_iff {pat : String} {s : Slice} :
|
||||
IsLongestMatchAtChain pat s.startPos s.endPos ↔
|
||||
∃ n, s.copy = String.join (List.replicate n pat) := by
|
||||
simp [isLongestMatchAtChain_iff]
|
||||
|
||||
theorem isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice {pat : String} {s : Slice}
|
||||
{pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAt (ρ := String) pat pos₁ pos₂ ↔
|
||||
IsLongestRevMatchAt (ρ := Slice) pat.toSlice pos₁ pos₂ := by
|
||||
simp [Model.isLongestRevMatchAt_iff, isLongestRevMatch_iff_isLongestRevMatch_toSlice]
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_toSlice {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAtChain pat pos₁ pos₂ ↔
|
||||
IsLongestRevMatchAtChain pat.toSlice pos₁ pos₂ := by
|
||||
refine ⟨fun h => ?_, fun h => ?_⟩
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₂ p₃ _ hmatch ih =>
|
||||
exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice.1 hmatch)
|
||||
· induction h with
|
||||
| nil => simp
|
||||
| cons p₂ p₃ _ hmatch ih =>
|
||||
exact .cons _ _ _ ih (isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice.2 hmatch)
|
||||
|
||||
theorem isLongestRevMatchAtChain_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAtChain pat pos₁ pos₂ ↔
|
||||
∃ h n, (s.slice pos₁ pos₂ h).copy = String.join (List.replicate n pat) := by
|
||||
simp [isLongestRevMatchAtChain_iff_isLongestRevMatchAtChain_toSlice,
|
||||
ForwardSliceSearcher.isLongestRevMatchAtChain_iff]
|
||||
|
||||
theorem isLongestRevMatchAtChain_startPos_endPos_iff {pat : String} {s : Slice} :
|
||||
IsLongestRevMatchAtChain pat s.startPos s.endPos ↔
|
||||
∃ n, s.copy = String.join (List.replicate n pat) := by
|
||||
simp [isLongestRevMatchAtChain_iff]
|
||||
|
||||
theorem matchesAt_iff_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
MatchesAt (ρ := String) pat pos ↔ MatchesAt (ρ := Slice) pat.toSlice pos := by
|
||||
simp [matchesAt_iff_exists_isLongestMatchAt, isLongestMatchAt_iff_isLongestMatchAt_toSlice]
|
||||
@@ -282,61 +380,55 @@ theorem revMatchesAt_iff_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
simp [revMatchesAt_iff_exists_isLongestRevMatchAt,
|
||||
isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice]
|
||||
|
||||
private theorem toSlice_isEmpty (h : pat ≠ "") : pat.toSlice.isEmpty = false := by
|
||||
rwa [isEmpty_toSlice, isEmpty_eq_false_iff]
|
||||
|
||||
theorem isMatch_iff {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem isMatch_iff {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
IsMatch pat pos ↔ (s.sliceTo pos).copy = pat := by
|
||||
rw [isMatch_iff_slice, ForwardSliceSearcher.isMatch_iff (toSlice_isEmpty h)]
|
||||
rw [isMatch_iff_slice, ForwardSliceSearcher.isMatch_iff]
|
||||
simp
|
||||
|
||||
theorem isRevMatch_iff {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem isRevMatch_iff {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
IsRevMatch pat pos ↔ (s.sliceFrom pos).copy = pat := by
|
||||
rw [isRevMatch_iff_slice, ForwardSliceSearcher.isRevMatch_iff (toSlice_isEmpty h)]
|
||||
rw [isRevMatch_iff_slice, ForwardSliceSearcher.isRevMatch_iff]
|
||||
simp
|
||||
|
||||
theorem isLongestMatch_iff {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem isLongestMatch_iff {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
IsLongestMatch pat pos ↔ (s.sliceTo pos).copy = pat := by
|
||||
rw [isLongestMatch_iff_isMatch, isMatch_iff h]
|
||||
rw [isLongestMatch_iff_isMatch, isMatch_iff]
|
||||
|
||||
theorem isLongestRevMatch_iff {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem isLongestRevMatch_iff {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
IsLongestRevMatch pat pos ↔ (s.sliceFrom pos).copy = pat := by
|
||||
rw [isLongestRevMatch_iff_isRevMatch, isRevMatch_iff h]
|
||||
rw [isLongestRevMatch_iff_isRevMatch, isRevMatch_iff]
|
||||
|
||||
theorem isLongestMatchAt_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} (h : pat ≠ "") :
|
||||
theorem isLongestMatchAt_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔ ∃ h, (s.slice pos₁ pos₂ h).copy = pat := by
|
||||
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff]
|
||||
simp
|
||||
|
||||
theorem isLongestRevMatchAt_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} (h : pat ≠ "") :
|
||||
theorem isLongestRevMatchAt_iff {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔ ∃ h, (s.slice pos₁ pos₂ h).copy = pat := by
|
||||
rw [isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff]
|
||||
simp
|
||||
|
||||
theorem isLongestMatchAt_iff_splits {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat ≠ "") :
|
||||
theorem isLongestMatchAt_iff_splits {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔
|
||||
∃ t₁ t₂, pos₁.Splits t₁ (pat ++ t₂) ∧ pos₂.Splits (t₁ ++ pat) t₂ := by
|
||||
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff_splits (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff_splits]
|
||||
simp
|
||||
|
||||
theorem isLongestRevMatchAt_iff_splits {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat ≠ "") :
|
||||
theorem isLongestRevMatchAt_iff_splits {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} :
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔
|
||||
∃ t₁ t₂, pos₁.Splits t₁ (pat ++ t₂) ∧ pos₂.Splits (t₁ ++ pat) t₂ := by
|
||||
rw [isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff_splits (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff_splits]
|
||||
simp
|
||||
|
||||
theorem isLongestMatchAt_iff_extract {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat ≠ "") :
|
||||
theorem isLongestMatchAt_iff_extract {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos} (h : pat ≠ "") :
|
||||
IsLongestMatchAt pat pos₁ pos₂ ↔
|
||||
s.copy.toByteArray.extract pos₁.offset.byteIdx pos₂.offset.byteIdx = pat.toByteArray := by
|
||||
rw [isLongestMatchAt_iff_isLongestMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff_extract (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestMatchAt_iff_extract (by simpa)]
|
||||
simp
|
||||
|
||||
theorem isLongestRevMatchAt_iff_extract {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
@@ -344,38 +436,38 @@ theorem isLongestRevMatchAt_iff_extract {pat : String} {s : Slice} {pos₁ pos
|
||||
IsLongestRevMatchAt pat pos₁ pos₂ ↔
|
||||
s.copy.toByteArray.extract pos₁.offset.byteIdx pos₂.offset.byteIdx = pat.toByteArray := by
|
||||
rw [isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice,
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff_extract (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.isLongestRevMatchAt_iff_extract (by simpa)]
|
||||
simp
|
||||
|
||||
theorem offset_of_isLongestMatchAt {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat ≠ "") (h' : IsLongestMatchAt pat pos₁ pos₂) :
|
||||
pos₂.offset = pos₁.offset.increaseBy pat.utf8ByteSize := by
|
||||
rw [show pat.utf8ByteSize = pat.toSlice.utf8ByteSize from utf8ByteSize_toSlice.symm]
|
||||
exact ForwardSliceSearcher.offset_of_isLongestMatchAt (toSlice_isEmpty h)
|
||||
exact ForwardSliceSearcher.offset_of_isLongestMatchAt (by simpa)
|
||||
(isLongestMatchAt_iff_isLongestMatchAt_toSlice.1 h')
|
||||
|
||||
theorem offset_of_isLongestRevMatchAt {pat : String} {s : Slice} {pos₁ pos₂ : s.Pos}
|
||||
(h : pat ≠ "") (h' : IsLongestRevMatchAt pat pos₁ pos₂) :
|
||||
pos₂.offset = pos₁.offset.increaseBy pat.utf8ByteSize := by
|
||||
rw [show pat.utf8ByteSize = pat.toSlice.utf8ByteSize from utf8ByteSize_toSlice.symm]
|
||||
exact ForwardSliceSearcher.offset_of_isLongestRevMatchAt (toSlice_isEmpty h)
|
||||
exact ForwardSliceSearcher.offset_of_isLongestRevMatchAt (by simpa)
|
||||
(isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice.1 h')
|
||||
|
||||
theorem matchesAt_iff_splits {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem matchesAt_iff_splits {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
MatchesAt pat pos ↔ ∃ t₁ t₂, pos.Splits t₁ (pat ++ t₂) := by
|
||||
rw [matchesAt_iff_toSlice,
|
||||
ForwardSliceSearcher.matchesAt_iff_splits (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.matchesAt_iff_splits]
|
||||
simp
|
||||
|
||||
theorem revMatchesAt_iff_splits {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "") :
|
||||
theorem revMatchesAt_iff_splits {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
RevMatchesAt pat pos ↔ ∃ t₁ t₂, pos.Splits (t₁ ++ pat) t₂ := by
|
||||
rw [revMatchesAt_iff_toSlice,
|
||||
ForwardSliceSearcher.revMatchesAt_iff_splits (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.revMatchesAt_iff_splits]
|
||||
simp
|
||||
|
||||
theorem exists_matchesAt_iff_eq_append {pat : String} {s : Slice} (h : pat ≠ "") :
|
||||
theorem exists_matchesAt_iff_eq_append {pat : String} {s : Slice} :
|
||||
(∃ (pos : s.Pos), MatchesAt pat pos) ↔ ∃ t₁ t₂, s.copy = t₁ ++ pat ++ t₂ := by
|
||||
simp only [matchesAt_iff_splits h]
|
||||
simp only [matchesAt_iff_splits]
|
||||
constructor
|
||||
· rintro ⟨pos, t₁, t₂, hsplit⟩
|
||||
exact ⟨t₁, t₂, by rw [hsplit.eq_append, append_assoc]⟩
|
||||
@@ -385,12 +477,12 @@ theorem exists_matchesAt_iff_eq_append {pat : String} {s : Slice} (h : pat ≠ "
|
||||
⟨t₁, pat ++ t₂, by rw [← append_assoc]; exact heq, rfl⟩
|
||||
exact ⟨s.pos _ hvalid, t₁, t₂, ⟨by rw [← append_assoc]; exact heq, by simp⟩⟩
|
||||
|
||||
theorem exists_revMatchesAt_iff_eq_append {pat : String} {s : Slice} (h : pat ≠ "") :
|
||||
theorem exists_revMatchesAt_iff_eq_append {pat : String} {s : Slice} :
|
||||
(∃ (pos : s.Pos), RevMatchesAt pat pos) ↔ ∃ t₁ t₂, s.copy = t₁ ++ pat ++ t₂ := by
|
||||
rw [show (∃ (pos : s.Pos), RevMatchesAt (ρ := String) pat pos) ↔
|
||||
(∃ (pos : s.Pos), RevMatchesAt (ρ := Slice) pat.toSlice pos) from by
|
||||
simp [revMatchesAt_iff_toSlice],
|
||||
ForwardSliceSearcher.exists_revMatchesAt_iff_eq_append (toSlice_isEmpty h)]
|
||||
ForwardSliceSearcher.exists_revMatchesAt_iff_eq_append]
|
||||
simp
|
||||
|
||||
theorem matchesAt_iff_isLongestMatchAt {pat : String} {s : Slice} {pos : s.Pos}
|
||||
@@ -398,7 +490,7 @@ theorem matchesAt_iff_isLongestMatchAt {pat : String} {s : Slice} {pos : s.Pos}
|
||||
MatchesAt pat pos ↔ ∃ (h : (pos.offset.increaseBy pat.utf8ByteSize).IsValidForSlice s),
|
||||
IsLongestMatchAt pat pos (s.pos _ h) := by
|
||||
have key := ForwardSliceSearcher.matchesAt_iff_isLongestMatchAt (pat := pat.toSlice)
|
||||
(toSlice_isEmpty h) (pos := pos)
|
||||
(by simpa) (pos := pos)
|
||||
simp only [utf8ByteSize_toSlice, ← isLongestMatchAt_iff_isLongestMatchAt_toSlice] at key
|
||||
rwa [matchesAt_iff_toSlice]
|
||||
|
||||
@@ -408,7 +500,7 @@ theorem revMatchesAt_iff_isLongestRevMatchAt {pat : String} {s : Slice} {pos : s
|
||||
∃ (h : (pos.offset.decreaseBy pat.utf8ByteSize).IsValidForSlice s),
|
||||
IsLongestRevMatchAt pat (s.pos _ h) pos := by
|
||||
have key := ForwardSliceSearcher.revMatchesAt_iff_isLongestRevMatchAt (pat := pat.toSlice)
|
||||
(toSlice_isEmpty h) (pos := pos)
|
||||
(by simpa) (pos := pos)
|
||||
simp only [utf8ByteSize_toSlice, ← isLongestRevMatchAt_iff_isLongestRevMatchAt_toSlice] at key
|
||||
rwa [revMatchesAt_iff_toSlice]
|
||||
|
||||
@@ -418,14 +510,14 @@ theorem matchesAt_iff_getElem {pat : String} {s : Slice} {pos : s.Pos} (h : pat
|
||||
∀ (j), (hj : j < pat.toByteArray.size) →
|
||||
pat.toByteArray[j] = s.copy.toByteArray[pos.offset.byteIdx + j] := by
|
||||
have key := ForwardSliceSearcher.matchesAt_iff_getElem (pat := pat.toSlice)
|
||||
(toSlice_isEmpty h) (pos := pos)
|
||||
(by simpa) (pos := pos)
|
||||
simp only [copy_toSlice] at key
|
||||
rwa [matchesAt_iff_toSlice]
|
||||
|
||||
theorem le_of_matchesAt {pat : String} {s : Slice} {pos : s.Pos} (h : pat ≠ "")
|
||||
(h' : MatchesAt pat pos) : pos.offset.increaseBy pat.utf8ByteSize ≤ s.rawEndPos := by
|
||||
rw [show pat.utf8ByteSize = pat.toSlice.utf8ByteSize from utf8ByteSize_toSlice.symm]
|
||||
exact ForwardSliceSearcher.le_of_matchesAt (toSlice_isEmpty h)
|
||||
exact ForwardSliceSearcher.le_of_matchesAt (by simpa)
|
||||
(matchesAt_iff_toSlice.1 h')
|
||||
|
||||
theorem matchesAt_iff_matchesAt_toSlice {pat : String} {s : Slice}
|
||||
|
||||
@@ -56,7 +56,7 @@ theorem skipPrefix?_eq_some_iff {pat s : Slice} {pos : s.Pos} :
|
||||
simp only [reduceCtorEq, false_iff]
|
||||
intro heq
|
||||
have := h (s.sliceFrom pos).copy
|
||||
simp [← heq, pos.splits.eq_append] at this
|
||||
simp [← heq, -sliceTo_append_sliceFrom, pos.splits.eq_append] at this
|
||||
|
||||
theorem isSome_skipPrefix? {pat s : Slice} : (skipPrefix? pat s).isSome = startsWith pat s := by
|
||||
fun_cases skipPrefix? <;> simp_all
|
||||
@@ -76,14 +76,11 @@ namespace Model.ForwardSliceSearcher
|
||||
|
||||
open Pattern.ForwardSliceSearcher
|
||||
|
||||
public instance {pat : Slice} : LawfulForwardPattern pat where
|
||||
public instance {pat : Slice} : LawfulForwardPatternModel pat where
|
||||
skipPrefixOfNonempty?_eq _ := rfl
|
||||
startsWith_eq _ := isSome_skipPrefix?.symm
|
||||
|
||||
public theorem lawfulForwardPatternModel {pat : Slice} (hpat : pat.isEmpty = false) :
|
||||
LawfulForwardPatternModel pat where
|
||||
skipPrefix?_eq_some_iff pos := by
|
||||
simp [ForwardPattern.skipPrefix?, skipPrefix?_eq_some_iff, isLongestMatch_iff hpat]
|
||||
simp [ForwardPattern.skipPrefix?, skipPrefix?_eq_some_iff, isLongestMatch_iff]
|
||||
|
||||
end Model.ForwardSliceSearcher
|
||||
|
||||
@@ -91,14 +88,11 @@ namespace Model.ForwardStringSearcher
|
||||
|
||||
open Pattern.ForwardSliceSearcher
|
||||
|
||||
public instance {pat : String} : LawfulForwardPattern pat where
|
||||
public instance {pat : String} : LawfulForwardPatternModel pat where
|
||||
skipPrefixOfNonempty?_eq _ := rfl
|
||||
startsWith_eq _ := isSome_skipPrefix?.symm
|
||||
|
||||
public theorem lawfulForwardPatternModel {pat : String} (hpat : pat ≠ "") :
|
||||
LawfulForwardPatternModel pat where
|
||||
skipPrefix?_eq_some_iff pos := by
|
||||
simp [ForwardPattern.skipPrefix?, skipPrefix?_eq_some_iff, isLongestMatch_iff hpat]
|
||||
simp [ForwardPattern.skipPrefix?, skipPrefix?_eq_some_iff, isLongestMatch_iff]
|
||||
|
||||
end Model.ForwardStringSearcher
|
||||
|
||||
@@ -153,7 +147,7 @@ theorem skipSuffix?_eq_some_iff {pat s : Slice} {pos : s.Pos} :
|
||||
simp only [reduceCtorEq, false_iff]
|
||||
intro heq
|
||||
have := h (s.sliceTo pos).copy
|
||||
simp [← heq, pos.splits.eq_append] at this
|
||||
simp [← heq, -sliceTo_append_sliceFrom, pos.splits.eq_append] at this
|
||||
|
||||
theorem isSome_skipSuffix? {pat s : Slice} : (skipSuffix? pat s).isSome = endsWith pat s := by
|
||||
fun_cases skipSuffix? <;> simp_all
|
||||
@@ -173,15 +167,12 @@ namespace Model.BackwardSliceSearcher
|
||||
|
||||
open Pattern.BackwardSliceSearcher
|
||||
|
||||
public instance {pat : Slice} : LawfulBackwardPattern pat where
|
||||
public instance {pat : Slice} : LawfulBackwardPatternModel pat where
|
||||
skipSuffixOfNonempty?_eq _ := rfl
|
||||
endsWith_eq _ := isSome_skipSuffix?.symm
|
||||
|
||||
public theorem lawfulBackwardPatternModel {pat : Slice} (hpat : pat.isEmpty = false) :
|
||||
LawfulBackwardPatternModel pat where
|
||||
skipSuffix?_eq_some_iff pos := by
|
||||
simp [BackwardPattern.skipSuffix?, skipSuffix?_eq_some_iff,
|
||||
ForwardSliceSearcher.isLongestRevMatch_iff hpat]
|
||||
ForwardSliceSearcher.isLongestRevMatch_iff]
|
||||
|
||||
end Model.BackwardSliceSearcher
|
||||
|
||||
@@ -189,15 +180,12 @@ namespace Model.BackwardStringSearcher
|
||||
|
||||
open Pattern.BackwardSliceSearcher
|
||||
|
||||
public instance {pat : String} : LawfulBackwardPattern pat where
|
||||
public instance {pat : String} : LawfulBackwardPatternModel pat where
|
||||
skipSuffixOfNonempty?_eq _ := rfl
|
||||
endsWith_eq _ := isSome_skipSuffix?.symm
|
||||
|
||||
public theorem lawfulBackwardPatternModel {pat : String} (hpat : pat ≠ "") :
|
||||
LawfulBackwardPatternModel pat where
|
||||
skipSuffix?_eq_some_iff pos := by
|
||||
simp [BackwardPattern.skipSuffix?, skipSuffix?_eq_some_iff,
|
||||
ForwardStringSearcher.isLongestRevMatch_iff hpat]
|
||||
ForwardStringSearcher.isLongestRevMatch_iff]
|
||||
|
||||
end Model.BackwardStringSearcher
|
||||
|
||||
@@ -219,19 +207,22 @@ public theorem Pattern.ForwardPattern.skipPrefix?_string_eq_skipPrefix?_toSlice
|
||||
{pat : String} {s : Slice} :
|
||||
skipPrefix? pat s = skipPrefix? pat.toSlice s := (rfl)
|
||||
|
||||
public theorem Pos.skip?_string_eq_skip?_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? pat = pos.skip? pat.toSlice := (rfl)
|
||||
|
||||
public theorem Pos.skipWhile_string_eq_skipWhile_toSlice {pat : String} {s : Slice}
|
||||
(curr : s.Pos) :
|
||||
Pos.skipWhile curr pat = Pos.skipWhile curr pat.toSlice := by
|
||||
fun_induction Pos.skipWhile curr pat with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_string_eq_skipPrefix?_toSlice, h₁, h₂, ih]
|
||||
simp [← Pos.skip?_string_eq_skip?_toSlice, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_string_eq_skipPrefix?_toSlice, h, ih]
|
||||
simp [← Pos.skip?_string_eq_skip?_toSlice, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.skipWhile]
|
||||
simp [← Pattern.ForwardPattern.skipPrefix?_string_eq_skipPrefix?_toSlice]
|
||||
simp [← Pos.skip?_string_eq_skip?_toSlice, h]
|
||||
|
||||
public theorem skipPrefixWhile_string_eq_skipPrefixWhile_toSlice {pat : String} {s : Slice} :
|
||||
s.skipPrefixWhile pat = s.skipPrefixWhile pat.toSlice :=
|
||||
@@ -247,7 +238,7 @@ public theorem takeWhile_string_eq_takeWhile_toSlice {pat : String} {s : Slice}
|
||||
|
||||
public theorem all_string_eq_all_toSlice {pat : String} {s : Slice} :
|
||||
s.all pat = s.all pat.toSlice := by
|
||||
simp only [all, dropWhile_string_eq_dropWhile_toSlice]
|
||||
simp only [all, skipPrefixWhile_string_eq_skipPrefixWhile_toSlice]
|
||||
|
||||
public theorem endsWith_string_eq_endsWith_toSlice {pat : String} {s : Slice} :
|
||||
s.endsWith pat = s.endsWith pat.toSlice := (rfl)
|
||||
@@ -265,19 +256,22 @@ public theorem Pattern.BackwardPattern.skipSuffix?_string_eq_skipSuffix?_toSlice
|
||||
{pat : String} {s : Slice} :
|
||||
skipSuffix? pat s = skipSuffix? pat.toSlice s := (rfl)
|
||||
|
||||
public theorem Pos.revSkip?_string_eq_revSkip?_toSlice {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? pat = pos.revSkip? pat.toSlice := (rfl)
|
||||
|
||||
public theorem Pos.revSkipWhile_string_eq_revSkipWhile_toSlice {pat : String} {s : Slice}
|
||||
(curr : s.Pos) :
|
||||
Pos.revSkipWhile curr pat = Pos.revSkipWhile curr pat.toSlice := by
|
||||
fun_induction Pos.revSkipWhile curr pat with
|
||||
| case1 pos nextCurr h₁ h₂ ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_string_eq_skipSuffix?_toSlice, h₁, h₂, ih]
|
||||
simp [← Pos.revSkip?_string_eq_revSkip?_toSlice, h₁, h₂, ih]
|
||||
| case2 pos nextCurr h ih =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_string_eq_skipSuffix?_toSlice, h, ih]
|
||||
simp [← Pos.revSkip?_string_eq_revSkip?_toSlice, h, ih]
|
||||
| case3 pos h =>
|
||||
conv => rhs; rw [Pos.revSkipWhile]
|
||||
simp [← Pattern.BackwardPattern.skipSuffix?_string_eq_skipSuffix?_toSlice]
|
||||
simp [← Pos.revSkip?_string_eq_revSkip?_toSlice, h]
|
||||
|
||||
public theorem skipSuffixWhile_string_eq_skipSuffixWhile_toSlice {pat : String} {s : Slice} :
|
||||
s.skipSuffixWhile pat = s.skipSuffixWhile pat.toSlice :=
|
||||
@@ -291,4 +285,8 @@ public theorem takeEndWhile_string_eq_takeEndWhile_toSlice {pat : String} {s : S
|
||||
s.takeEndWhile pat = s.takeEndWhile pat.toSlice := by
|
||||
simp only [takeEndWhile]; exact congrArg _ skipSuffixWhile_string_eq_skipSuffixWhile_toSlice
|
||||
|
||||
public theorem revAll_string_eq_revAll_toSlice {pat : String} {s : Slice} :
|
||||
s.revAll pat = s.revAll pat.toSlice := by
|
||||
simp [revAll, skipSuffixWhile_string_eq_skipSuffixWhile_toSlice]
|
||||
|
||||
end String.Slice
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,6 +56,77 @@ theorem eq_append_of_dropPrefix?_char_eq_some {c : Char} {s res : Slice} (h : s.
|
||||
s.copy = singleton c ++ res.copy := by
|
||||
simpa [PatternModel.Matches] using Pattern.Model.eq_append_of_dropPrefix?_eq_some h
|
||||
|
||||
theorem Pos.skip?_char_eq_some_iff {c : Char} {s : Slice} {pos res : s.Pos} :
|
||||
pos.skip? c = some res ↔ ∃ h, res = pos.next h ∧ pos.get h = c := by
|
||||
simp [Pattern.Model.Pos.skip?_eq_some_iff, Char.isLongestMatchAt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_char_eq_none_iff {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? c = none ↔ ∀ h, pos.get h ≠ c := by
|
||||
simp [Pattern.Model.Pos.skip?_eq_none_iff, Char.matchesAt_iff]
|
||||
|
||||
theorem Pos.get_skipWhile_char_ne {c : Char} {s : Slice} {pos : s.Pos} {h} :
|
||||
(pos.skipWhile c).get h ≠ c := by
|
||||
have := Pattern.Model.Pos.not_matchesAt_skipWhile c pos
|
||||
simp_all [Char.matchesAt_iff]
|
||||
|
||||
theorem Pos.skipWhile_char_eq_self_iff_get {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.skipWhile c = pos ↔ ∀ h, pos.get h ≠ c := by
|
||||
simp [Pattern.Model.Pos.skipWhile_eq_self_iff, Char.matchesAt_iff]
|
||||
|
||||
theorem Pos.get_eq_of_lt_skipWhile_char {c : Char} {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos ≤ pos') (h₂ : pos' < pos.skipWhile c) : pos'.get (ne_endPos_of_lt h₂) = c :=
|
||||
(Char.isLongestMatchAtChain_iff.1 (Pattern.Model.Pos.isLongestMatchAtChain_skipWhile c pos)).2 _ h₁ h₂
|
||||
|
||||
theorem get_skipPrefixWhile_char_ne {c : Char} {s : Slice} {h} :
|
||||
(s.skipPrefixWhile c).get h ≠ c := by
|
||||
simp [skipPrefixWhile_eq_skipWhile_startPos, Pos.get_skipWhile_char_ne]
|
||||
|
||||
theorem get_eq_of_lt_skipPrefixWhile_char {c : Char} {s : Slice} {pos : s.Pos} (h : pos < s.skipPrefixWhile c) :
|
||||
pos.get (Pos.ne_endPos_of_lt h) = c :=
|
||||
Pos.get_eq_of_lt_skipWhile_char (Pos.startPos_le _) (by rwa [skipPrefixWhile_eq_skipWhile_startPos] at h)
|
||||
|
||||
@[simp]
|
||||
theorem all_char_iff {c : Char} {s : Slice} : s.all c ↔ s.copy.toList = List.replicate s.copy.length c := by
|
||||
rw [Bool.eq_iff_iff]
|
||||
simp [Pattern.Model.all_eq_true_iff, Char.isLongestMatchAtChain_startPos_endPos_iff_toList]
|
||||
|
||||
theorem Pos.revSkip?_char_eq_some_iff {c : Char} {s : Slice} {pos res : s.Pos} :
|
||||
pos.revSkip? c = some res ↔ ∃ h, res = pos.prev h ∧ (pos.prev h).get (by simp) = c := by
|
||||
simp [Pattern.Model.Pos.revSkip?_eq_some_iff, Char.isLongestRevMatchAt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_char_eq_none_iff {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? c = none ↔ ∀ h, (pos.prev h).get (by simp) ≠ c := by
|
||||
simp [Pattern.Model.Pos.revSkip?_eq_none_iff, Char.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.get_revSkipWhile_char_ne {c : Char} {s : Slice} {pos : s.Pos} {h} :
|
||||
((pos.revSkipWhile c).prev h).get (by simp) ≠ c := by
|
||||
have := Pattern.Model.Pos.not_revMatchesAt_revSkipWhile c pos
|
||||
simp_all [Char.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.revSkipWhile_char_eq_self_iff_get {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkipWhile c = pos ↔ ∀ h, (pos.prev h).get (by simp) ≠ c := by
|
||||
simp [Pattern.Model.Pos.revSkipWhile_eq_self_iff, Char.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.get_eq_of_revSkipWhile_le_char {c : Char} {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos' < pos) (h₂ : pos.revSkipWhile c ≤ pos') : pos'.get (Pos.ne_endPos_of_lt h₁) = c :=
|
||||
(Char.isLongestRevMatchAtChain_iff.1 (Pattern.Model.Pos.isLongestRevMatchAtChain_revSkipWhile c pos)).2 _ h₂ h₁
|
||||
|
||||
theorem get_skipSuffixWhile_char_ne {c : Char} {s : Slice} {h} :
|
||||
((s.skipSuffixWhile c).prev h).get (by simp) ≠ c := by
|
||||
simp [skipSuffixWhile_eq_revSkipWhile_endPos, Pos.get_revSkipWhile_char_ne]
|
||||
|
||||
theorem get_eq_of_skipSuffixWhile_le_char {c : Char} {s : Slice} {pos : s.Pos}
|
||||
(h : s.skipSuffixWhile c ≤ pos) (h' : pos < s.endPos) :
|
||||
pos.get (Pos.ne_endPos_of_lt h') = c :=
|
||||
Pos.get_eq_of_revSkipWhile_le_char h' (by rwa [skipSuffixWhile_eq_revSkipWhile_endPos] at h)
|
||||
|
||||
@[simp]
|
||||
theorem revAll_char_iff {c : Char} {s : Slice} : s.revAll c ↔ s.copy.toList = List.replicate s.copy.length c := by
|
||||
rw [Bool.eq_iff_iff]
|
||||
simp [Pattern.Model.revAll_eq_true_iff, Char.isLongestRevMatchAtChain_startPos_endPos_iff_toList]
|
||||
|
||||
theorem skipSuffix?_char_eq_some_iff {c : Char} {s : Slice} {pos : s.Pos} :
|
||||
s.skipSuffix? c = some pos ↔ ∃ h, pos = s.endPos.prev h ∧ (s.endPos.prev h).get (by simp) = c := by
|
||||
rw [Pattern.Model.skipSuffix?_eq_some_iff, Char.isLongestRevMatch_iff]
|
||||
@@ -100,19 +171,19 @@ theorem skipPrefix?_char_eq_some_iff {c : Char} {s : String} {pos : s.Pos} :
|
||||
|
||||
theorem startsWith_char_iff_get {c : Char} {s : String} :
|
||||
s.startsWith c ↔ ∃ h, s.startPos.get h = c := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_char_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_char_iff_get]
|
||||
|
||||
theorem startsWith_char_eq_false_iff_get {c : Char} {s : String} :
|
||||
s.startsWith c = false ↔ ∀ h, s.startPos.get h ≠ c := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_char_eq_false_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_char_eq_false_iff_get]
|
||||
|
||||
theorem startsWith_char_eq_head? {c : Char} {s : String} :
|
||||
s.startsWith c = (s.toList.head? == some c) := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_char_eq_head?]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_char_eq_head?]
|
||||
|
||||
theorem startsWith_char_iff_exists_append {c : Char} {s : String} :
|
||||
s.startsWith c ↔ ∃ t, s = singleton c ++ t := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_char_iff_exists_append]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_char_iff_exists_append]
|
||||
|
||||
theorem startsWith_char_eq_false_iff_forall_append {c : Char} {s : String} :
|
||||
s.startsWith c = false ↔ ∀ t, s ≠ singleton c ++ t := by
|
||||
@@ -130,19 +201,19 @@ theorem skipSuffix?_char_eq_some_iff {c : Char} {s : String} {pos : s.Pos} :
|
||||
|
||||
theorem endsWith_char_iff_get {c : Char} {s : String} :
|
||||
s.endsWith c ↔ ∃ h, (s.endPos.prev h).get (by simp) = c := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_char_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_char_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_char_eq_false_iff_get {c : Char} {s : String} :
|
||||
s.endsWith c = false ↔ ∀ h, (s.endPos.prev h).get (by simp) ≠ c := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_char_eq_false_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_char_eq_false_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_char_eq_getLast? {c : Char} {s : String} :
|
||||
s.endsWith c = (s.toList.getLast? == some c) := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_char_eq_getLast?]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_char_eq_getLast?]
|
||||
|
||||
theorem endsWith_char_iff_exists_append {c : Char} {s : String} :
|
||||
s.endsWith c ↔ ∃ t, s = t ++ singleton c := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_char_iff_exists_append]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_char_iff_exists_append]
|
||||
|
||||
theorem endsWith_char_eq_false_iff_forall_append {c : Char} {s : String} :
|
||||
s.endsWith c = false ↔ ∀ t, s ≠ t ++ singleton c := by
|
||||
|
||||
@@ -8,11 +8,16 @@ module
|
||||
prelude
|
||||
public import Init.Data.String.Slice
|
||||
public import Init.Data.String.TakeDrop
|
||||
public import Init.Data.String.Lemmas.Order
|
||||
import Init.Data.String.Lemmas.Pattern.TakeDrop.Basic
|
||||
import Init.Data.String.Lemmas.Pattern.Pred
|
||||
import Init.Data.Option.Lemmas
|
||||
import Init.Data.String.Lemmas.FindPos
|
||||
import Init.Data.String.Lemmas.Intercalate
|
||||
import Init.ByCases
|
||||
import Init.Data.Order.Lemmas
|
||||
import Init.Data.String.OrderInstances
|
||||
import Init.Data.String.Lemmas.Basic
|
||||
|
||||
public section
|
||||
|
||||
@@ -49,6 +54,80 @@ theorem eq_append_of_dropPrefix?_bool_eq_some {p : Char → Bool} {s res : Slice
|
||||
obtain ⟨_, ⟨c, ⟨rfl, h₁⟩⟩, h₂⟩ := by simpa [PatternModel.Matches] using Pattern.Model.eq_append_of_dropPrefix?_eq_some h
|
||||
exact ⟨_, h₂, h₁⟩
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_bool_eq_some_iff {p : Char → Bool} {s : Slice} {pos res : s.Pos} :
|
||||
pos.skip? p = some res ↔ ∃ h, res = pos.next h ∧ p (pos.get h) := by
|
||||
simp [Pattern.Model.Pos.skip?_eq_some_iff, CharPred.isLongestMatchAt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_bool_eq_none_iff {p : Char → Bool} {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? p = none ↔ ∀ h, p (pos.get h) = false := by
|
||||
simp [Pattern.Model.Pos.skip?_eq_none_iff, CharPred.matchesAt_iff]
|
||||
|
||||
theorem Pos.apply_skipWhile_bool_eq_false {p : Char → Bool} {s : Slice} {pos : s.Pos} {h} :
|
||||
p ((pos.skipWhile p).get h) = false := by
|
||||
have := Pattern.Model.Pos.not_matchesAt_skipWhile p pos
|
||||
simp_all [CharPred.matchesAt_iff]
|
||||
|
||||
theorem Pos.skipWhile_bool_eq_self_iff_get {p : Char → Bool} {s : Slice} {pos : s.Pos} :
|
||||
pos.skipWhile p = pos ↔ ∀ h, p (pos.get h) = false := by
|
||||
simp [Pattern.Model.Pos.skipWhile_eq_self_iff, CharPred.matchesAt_iff]
|
||||
|
||||
theorem Pos.apply_eq_true_of_lt_skipWhile_bool {p : Char → Bool} {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos ≤ pos') (h₂ : pos' < pos.skipWhile p) : p (pos'.get (ne_endPos_of_lt h₂)) = true :=
|
||||
(CharPred.isLongestMatchAtChain_iff.1 (Pattern.Model.Pos.isLongestMatchAtChain_skipWhile p pos)).2 _ h₁ h₂
|
||||
|
||||
theorem apply_skipPrefixWhile_bool_eq_false {p : Char → Bool} {s : Slice} {h} :
|
||||
p ((s.skipPrefixWhile p).get h) = false := by
|
||||
simp [skipPrefixWhile_eq_skipWhile_startPos, Pos.apply_skipWhile_bool_eq_false]
|
||||
|
||||
theorem apply_eq_true_of_lt_skipPrefixWhile_bool {p : Char → Bool} {s : Slice} {pos : s.Pos} (h : pos < s.skipPrefixWhile p) :
|
||||
p (pos.get (Pos.ne_endPos_of_lt h)) = true :=
|
||||
Pos.apply_eq_true_of_lt_skipWhile_bool (Pos.startPos_le _) (skipPrefixWhile_eq_skipWhile_startPos ▸ h)
|
||||
|
||||
@[simp]
|
||||
theorem all_bool_eq {p : Char → Bool} {s : Slice} : s.all p = s.copy.toList.all p := by
|
||||
rw [Bool.eq_iff_iff, Pattern.Model.all_eq_true_iff,
|
||||
CharPred.isLongestMatchAtChain_startPos_endPos_iff_toList, List.all_eq_true]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : Slice} {pos res : s.Pos} :
|
||||
pos.skip? P = some res ↔ ∃ h, res = pos.next h ∧ P (pos.get h) := by
|
||||
simp [Pos.skip?_prop_eq_skip?_decide, skip?_bool_eq_some_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_prop_eq_none_iff {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} :
|
||||
pos.skip? P = none ↔ ∀ h, ¬ P (pos.get h) := by
|
||||
simp [Pos.skip?_prop_eq_skip?_decide, skip?_bool_eq_none_iff]
|
||||
|
||||
theorem Pos.apply_skipWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} {h} :
|
||||
¬ P ((pos.skipWhile P).get h) := by
|
||||
have := Pattern.Model.Pos.not_matchesAt_skipWhile P pos
|
||||
simp_all [CharPred.Decidable.matchesAt_iff]
|
||||
|
||||
theorem Pos.skipWhile_prop_eq_self_iff_get {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} :
|
||||
pos.skipWhile P = pos ↔ ∀ h, ¬ P (pos.get h) := by
|
||||
simp [Pos.skipWhile_prop_eq_skipWhile_decide, skipWhile_bool_eq_self_iff_get]
|
||||
|
||||
theorem Pos.apply_of_lt_skipWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos ≤ pos') (h₂ : pos' < pos.skipWhile P) : P (pos'.get (ne_endPos_of_lt h₂)) := by
|
||||
simp [Pos.skipWhile_prop_eq_skipWhile_decide] at h₂
|
||||
simpa using apply_eq_true_of_lt_skipWhile_bool h₁ h₂
|
||||
|
||||
theorem apply_skipPrefixWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {h} :
|
||||
¬ P ((s.skipPrefixWhile P).get h) := by
|
||||
simp [skipPrefixWhile_eq_skipWhile_startPos, Pos.apply_skipWhile_prop]
|
||||
|
||||
theorem apply_of_lt_skipPrefixWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos}
|
||||
(h : pos < s.skipPrefixWhile P) : P (pos.get (Pos.ne_endPos_of_lt h)) := by
|
||||
simp [skipPrefixWhile_prop_eq_skipPrefixWhile_decide] at h
|
||||
simpa using apply_eq_true_of_lt_skipPrefixWhile_bool h
|
||||
|
||||
@[simp]
|
||||
theorem all_prop_eq {P : Char → Prop} [DecidablePred P] {s : Slice} :
|
||||
s.all P = s.copy.toList.all (decide <| P ·) := by
|
||||
simp [all_prop_eq_all_decide]
|
||||
|
||||
theorem skipPrefix?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} :
|
||||
s.skipPrefix? P = some pos ↔ ∃ h, pos = s.startPos.next h ∧ P (s.startPos.get h) := by
|
||||
simp [skipPrefix?_prop_eq_skipPrefix?_decide, skipPrefix?_bool_eq_some_iff]
|
||||
@@ -118,6 +197,83 @@ theorem eq_append_of_dropSuffix?_prop_eq_some {P : Char → Prop} [DecidablePred
|
||||
rw [dropSuffix?_prop_eq_dropSuffix?_decide] at h
|
||||
simpa using eq_append_of_dropSuffix?_bool_eq_some h
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_bool_eq_some_iff {p : Char → Bool} {s : Slice} {pos res : s.Pos} :
|
||||
pos.revSkip? p = some res ↔ ∃ h, res = pos.prev h ∧ p ((pos.prev h).get (by simp)) := by
|
||||
simp [Pattern.Model.Pos.revSkip?_eq_some_iff, CharPred.isLongestRevMatchAt_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_bool_eq_none_iff {p : Char → Bool} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? p = none ↔ ∀ h, p ((pos.prev h).get (by simp)) = false := by
|
||||
simp [Pattern.Model.Pos.revSkip?_eq_none_iff, CharPred.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.apply_revSkipWhile_bool_eq_false {p : Char → Bool} {s : Slice} {pos : s.Pos} {h} :
|
||||
p (((pos.revSkipWhile p).prev h).get (by simp)) = false := by
|
||||
have := Pattern.Model.Pos.not_revMatchesAt_revSkipWhile p pos
|
||||
simp_all [CharPred.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.revSkipWhile_bool_eq_self_iff_get {p : Char → Bool} {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkipWhile p = pos ↔ ∀ h, p ((pos.prev h).get (by simp)) = false := by
|
||||
simp [Pattern.Model.Pos.revSkipWhile_eq_self_iff, CharPred.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.apply_eq_true_of_revSkipWhile_le_bool {p : Char → Bool} {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos' < pos) (h₂ : pos.revSkipWhile p ≤ pos') : p (pos'.get (Pos.ne_endPos_of_lt h₁)) = true :=
|
||||
(CharPred.isLongestRevMatchAtChain_iff.1 (Pattern.Model.Pos.isLongestRevMatchAtChain_revSkipWhile p pos)).2 _ h₂ h₁
|
||||
|
||||
theorem apply_skipSuffixWhile_bool_eq_false {p : Char → Bool} {s : Slice} {h} :
|
||||
p (((s.skipSuffixWhile p).prev h).get (by simp)) = false := by
|
||||
simp [skipSuffixWhile_eq_revSkipWhile_endPos, Pos.apply_revSkipWhile_bool_eq_false]
|
||||
|
||||
theorem apply_eq_true_of_skipSuffixWhile_le_bool {p : Char → Bool} {s : Slice} {pos : s.Pos}
|
||||
(h : s.skipSuffixWhile p ≤ pos) (h' : pos < s.endPos) :
|
||||
p (pos.get (Pos.ne_endPos_of_lt h')) = true :=
|
||||
Pos.apply_eq_true_of_revSkipWhile_le_bool h' (skipSuffixWhile_eq_revSkipWhile_endPos ▸ h)
|
||||
|
||||
@[simp]
|
||||
theorem revAll_bool_eq {p : Char → Bool} {s : Slice} : s.revAll p = s.copy.toList.all p := by
|
||||
rw [Bool.eq_iff_iff, Pattern.Model.revAll_eq_true_iff,
|
||||
CharPred.isLongestRevMatchAtChain_startPos_endPos_iff_toList, List.all_eq_true]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : Slice} {pos res : s.Pos} :
|
||||
pos.revSkip? P = some res ↔ ∃ h, res = pos.prev h ∧ P ((pos.prev h).get (by simp)) := by
|
||||
simp [Pos.revSkip?_prop_eq_revSkip?_decide, revSkip?_bool_eq_some_iff]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_prop_eq_none_iff {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkip? P = none ↔ ∀ h, ¬ P ((pos.prev h).get (by simp)) := by
|
||||
simp [Pos.revSkip?_prop_eq_revSkip?_decide, revSkip?_bool_eq_none_iff]
|
||||
|
||||
theorem Pos.apply_revSkipWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} {h} :
|
||||
¬ P (((pos.revSkipWhile P).prev h).get (by simp)) := by
|
||||
have := Pattern.Model.Pos.not_revMatchesAt_revSkipWhile P pos
|
||||
simp_all [CharPred.Decidable.revMatchesAt_iff]
|
||||
|
||||
theorem Pos.revSkipWhile_prop_eq_self_iff_get {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos} :
|
||||
pos.revSkipWhile P = pos ↔ ∀ h, ¬ P ((pos.prev h).get (by simp)) := by
|
||||
simp [Pos.revSkipWhile_prop_eq_revSkipWhile_decide, revSkipWhile_bool_eq_self_iff_get]
|
||||
|
||||
theorem Pos.apply_of_revSkipWhile_le_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos pos' : s.Pos}
|
||||
(h₁ : pos' < pos) (h₂ : pos.revSkipWhile P ≤ pos') : P (pos'.get (Pos.ne_endPos_of_lt h₁)) := by
|
||||
have h₂' : pos.revSkipWhile (decide <| P ·) ≤ pos' :=
|
||||
Pos.revSkipWhile_prop_eq_revSkipWhile_decide (p := P) pos ▸ h₂
|
||||
simpa using Pos.apply_eq_true_of_revSkipWhile_le_bool h₁ h₂'
|
||||
|
||||
theorem apply_skipSuffixWhile_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {h} :
|
||||
¬ P (((s.skipSuffixWhile P).prev h).get (by simp)) := by
|
||||
have := Pattern.Model.Pos.not_revMatchesAt_revSkipWhile P s.endPos
|
||||
simp_all [CharPred.Decidable.revMatchesAt_iff, skipSuffixWhile_eq_revSkipWhile_endPos]
|
||||
|
||||
theorem apply_of_skipSuffixWhile_le_prop {P : Char → Prop} [DecidablePred P] {s : Slice} {pos : s.Pos}
|
||||
(h : s.skipSuffixWhile P ≤ pos) (h' : pos < s.endPos) :
|
||||
P (pos.get (Pos.ne_endPos_of_lt h')) :=
|
||||
Pos.apply_of_revSkipWhile_le_prop h' (skipSuffixWhile_eq_revSkipWhile_endPos (pat := P) ▸ h)
|
||||
|
||||
@[simp]
|
||||
theorem revAll_prop_eq {P : Char → Prop} [DecidablePred P] {s : Slice} :
|
||||
s.revAll P = s.copy.toList.all (decide <| P ·) := by
|
||||
simp [revAll_prop_eq_revAll_decide, revAll_bool_eq]
|
||||
|
||||
end Slice
|
||||
|
||||
theorem skipPrefix?_bool_eq_some_iff {p : Char → Bool} {s : String} {pos : s.Pos} :
|
||||
@@ -127,21 +283,58 @@ theorem skipPrefix?_bool_eq_some_iff {p : Char → Bool} {s : String} {pos : s.P
|
||||
|
||||
theorem startsWith_bool_iff_get {p : Char → Bool} {s : String} :
|
||||
s.startsWith p ↔ ∃ h, p (s.startPos.get h) = true := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_bool_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_bool_iff_get]
|
||||
|
||||
theorem startsWith_bool_eq_false_iff_get {p : Char → Bool} {s : String} :
|
||||
s.startsWith p = false ↔ ∀ h, p (s.startPos.get h) = false := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_bool_eq_false_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_bool_eq_false_iff_get]
|
||||
|
||||
theorem startsWith_bool_eq_head? {p : Char → Bool} {s : String} :
|
||||
s.startsWith p = s.toList.head?.any p := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_bool_eq_head?]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_bool_eq_head?]
|
||||
|
||||
theorem eq_append_of_dropPrefix?_bool_eq_some {p : Char → Bool} {s : String} {res : Slice} (h : s.dropPrefix? p = some res) :
|
||||
∃ c, s = singleton c ++ res.copy ∧ p c = true := by
|
||||
rw [dropPrefix?_eq_dropPrefix?_toSlice] at h
|
||||
simpa using Slice.eq_append_of_dropPrefix?_bool_eq_some h
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_bool_eq_some_iff {p : Char → Bool} {s : String} {pos res : s.Pos} :
|
||||
pos.skip? p = some res ↔ ∃ h, res = pos.next h ∧ p (pos.get h) := by
|
||||
simp [skip?_eq_skip?_toSlice, ← toSlice_inj, toSlice_next]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_bool_eq_none_iff {p : Char → Bool} {s : String} {pos : s.Pos} :
|
||||
pos.skip? p = none ↔ ∀ h, p (pos.get h) = false := by
|
||||
simp [skip?_eq_skip?_toSlice]
|
||||
|
||||
theorem Pos.apply_skipWhile_bool_eq_false {p : Char → Bool} {s : String} {pos : s.Pos} {h} :
|
||||
p ((pos.skipWhile p).get h) = false := by
|
||||
simp [skipWhile_eq_skipWhile_toSlice, Slice.Pos.apply_skipWhile_bool_eq_false]
|
||||
|
||||
theorem Pos.skipWhile_bool_eq_self_iff_get {p : Char → Bool} {s : String} {pos : s.Pos} :
|
||||
pos.skipWhile p = pos ↔ ∀ h, p (pos.get h) = false := by
|
||||
simp [skipWhile_eq_skipWhile_toSlice, ← toSlice_inj, Slice.Pos.skipWhile_bool_eq_self_iff_get]
|
||||
|
||||
theorem Pos.apply_eq_true_of_lt_skipWhile_bool {p : Char → Bool} {s : String} {pos pos' : s.Pos}
|
||||
(h₁ : pos ≤ pos') (h₂ : pos' < pos.skipWhile p) : p (pos'.get (ne_endPos_of_lt h₂)) = true := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.Pos.apply_eq_true_of_lt_skipWhile_bool (toSlice_le_toSlice_iff.2 h₁)
|
||||
(by simpa [skipWhile_eq_skipWhile_toSlice] using h₂)
|
||||
|
||||
theorem apply_skipPrefixWhile_bool_eq_false {p : Char → Bool} {s : String} {h} :
|
||||
p ((s.skipPrefixWhile p).get h) = false := by
|
||||
simp [skipPrefixWhile_eq_skipPrefixWhile_toSlice, Slice.apply_skipPrefixWhile_bool_eq_false]
|
||||
|
||||
theorem apply_eq_true_of_lt_skipPrefixWhile_bool {p : Char → Bool} {s : String} {pos : s.Pos} (h : pos < s.skipPrefixWhile p) :
|
||||
p (pos.get (Pos.ne_endPos_of_lt h)) = true := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.apply_eq_true_of_lt_skipPrefixWhile_bool (by simpa [skipPrefixWhile_eq_skipPrefixWhile_toSlice] using h)
|
||||
|
||||
@[simp]
|
||||
theorem all_bool_eq {p : Char → Bool} {s : String} : s.all p = s.toList.all p := by
|
||||
simp [← all_toSlice]
|
||||
|
||||
theorem skipPrefix?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} :
|
||||
s.skipPrefix? P = some pos ↔ ∃ h, pos = s.startPos.next h ∧ P (s.startPos.get h) := by
|
||||
simp [skipPrefix?_eq_skipPrefix?_toSlice, Slice.skipPrefix?_prop_eq_some_iff, ← Pos.toSlice_inj,
|
||||
@@ -149,15 +342,15 @@ theorem skipPrefix?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s :
|
||||
|
||||
theorem startsWith_prop_iff_get {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.startsWith P ↔ ∃ h, P (s.startPos.get h) := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_prop_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_prop_iff_get]
|
||||
|
||||
theorem startsWith_prop_eq_false_iff_get {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.startsWith P = false ↔ ∀ h, ¬ P (s.startPos.get h) := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_prop_eq_false_iff_get]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_prop_eq_false_iff_get]
|
||||
|
||||
theorem startsWith_prop_eq_head? {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.startsWith P = s.toList.head?.any (decide <| P ·) := by
|
||||
simp [startsWith_eq_startsWith_toSlice, Slice.startsWith_prop_eq_head?]
|
||||
simp [← startsWith_toSlice, Slice.startsWith_prop_eq_head?]
|
||||
|
||||
theorem eq_append_of_dropPrefix?_prop_eq_some {P : Char → Prop} [DecidablePred P] {s : String} {res : Slice}
|
||||
(h : s.dropPrefix? P = some res) : ∃ c, s = singleton c ++ res.copy ∧ P c := by
|
||||
@@ -171,15 +364,15 @@ theorem skipSuffix?_bool_eq_some_iff {p : Char → Bool} {s : String} {pos : s.P
|
||||
|
||||
theorem endsWith_bool_iff_get {p : Char → Bool} {s : String} :
|
||||
s.endsWith p ↔ ∃ h, p ((s.endPos.prev h).get (by simp)) = true := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_bool_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_bool_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_bool_eq_false_iff_get {p : Char → Bool} {s : String} :
|
||||
s.endsWith p = false ↔ ∀ h, p ((s.endPos.prev h).get (by simp)) = false := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_bool_eq_false_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_bool_eq_false_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_bool_eq_getLast? {p : Char → Bool} {s : String} :
|
||||
s.endsWith p = s.toList.getLast?.any p := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_bool_eq_getLast?]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_bool_eq_getLast?]
|
||||
|
||||
theorem eq_append_of_dropSuffix?_bool_eq_some {p : Char → Bool} {s : String} {res : Slice} (h : s.dropSuffix? p = some res) :
|
||||
∃ c, s = res.copy ++ singleton c ∧ p c = true := by
|
||||
@@ -193,19 +386,154 @@ theorem skipSuffix?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s :
|
||||
|
||||
theorem endsWith_prop_iff_get {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.endsWith P ↔ ∃ h, P ((s.endPos.prev h).get (by simp)) := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_prop_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_prop_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_prop_eq_false_iff_get {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.endsWith P = false ↔ ∀ h, ¬ P ((s.endPos.prev h).get (by simp)) := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_prop_eq_false_iff_get, Pos.prev_toSlice]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_prop_eq_false_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem endsWith_prop_eq_getLast? {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.endsWith P = s.toList.getLast?.any (decide <| P ·) := by
|
||||
simp [endsWith_eq_endsWith_toSlice, Slice.endsWith_prop_eq_getLast?]
|
||||
simp [← endsWith_toSlice, Slice.endsWith_prop_eq_getLast?]
|
||||
|
||||
theorem eq_append_of_dropSuffix?_prop_eq_some {P : Char → Prop} [DecidablePred P] {s : String} {res : Slice}
|
||||
(h : s.dropSuffix? P = some res) : ∃ c, s = res.copy ++ singleton c ∧ P c := by
|
||||
rw [dropSuffix?_eq_dropSuffix?_toSlice] at h
|
||||
simpa using Slice.eq_append_of_dropSuffix?_prop_eq_some h
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : String} {pos res : s.Pos} :
|
||||
pos.skip? P = some res ↔ ∃ h, res = pos.next h ∧ P (pos.get h) := by
|
||||
simp [skip?_eq_skip?_toSlice, ← toSlice_inj, toSlice_next]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.skip?_prop_eq_none_iff {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} :
|
||||
pos.skip? P = none ↔ ∀ h, ¬ P (pos.get h) := by
|
||||
simp [skip?_eq_skip?_toSlice]
|
||||
|
||||
theorem Pos.apply_skipWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} {h} :
|
||||
¬ P ((pos.skipWhile P).get h) := by
|
||||
simp [skipWhile_eq_skipWhile_toSlice, Slice.Pos.apply_skipWhile_prop]
|
||||
|
||||
theorem Pos.skipWhile_prop_eq_self_iff_get {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} :
|
||||
pos.skipWhile P = pos ↔ ∀ h, ¬ P (pos.get h) := by
|
||||
simp [skipWhile_eq_skipWhile_toSlice, ← toSlice_inj, Slice.Pos.skipWhile_prop_eq_self_iff_get]
|
||||
|
||||
theorem Pos.apply_of_lt_skipWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos pos' : s.Pos}
|
||||
(h₁ : pos ≤ pos') (h₂ : pos' < pos.skipWhile P) : P (pos'.get (ne_endPos_of_lt h₂)) := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.Pos.apply_of_lt_skipWhile_prop (toSlice_le_toSlice_iff.2 h₁)
|
||||
(by simpa [skipWhile_eq_skipWhile_toSlice] using h₂)
|
||||
|
||||
theorem apply_skipPrefixWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {h} :
|
||||
¬ P ((s.skipPrefixWhile P).get h) := by
|
||||
simp [skipPrefixWhile_eq_skipPrefixWhile_toSlice, Slice.apply_skipPrefixWhile_prop]
|
||||
|
||||
theorem apply_of_lt_skipPrefixWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos}
|
||||
(h : pos < s.skipPrefixWhile P) : P (pos.get (Pos.ne_endPos_of_lt h)) := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.apply_of_lt_skipPrefixWhile_prop (by simpa [skipPrefixWhile_eq_skipPrefixWhile_toSlice] using h)
|
||||
|
||||
@[simp]
|
||||
theorem all_prop_eq {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.all P = s.toList.all (decide <| P ·) := by
|
||||
simp [← all_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_bool_eq_some_iff {p : Char → Bool} {s : String} {pos res : s.Pos} :
|
||||
pos.revSkip? p = some res ↔ ∃ h, res = pos.prev h ∧ p ((pos.prev h).get (by simp)) := by
|
||||
simp [revSkip?_eq_revSkip?_toSlice, ← toSlice_inj, toSlice_prev, get_eq_get_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_bool_eq_none_iff {p : Char → Bool} {s : String} {pos : s.Pos} :
|
||||
pos.revSkip? p = none ↔ ∀ h, p ((pos.prev h).get (by simp)) = false := by
|
||||
simp [revSkip?_eq_revSkip?_toSlice, Pos.prev_toSlice]
|
||||
|
||||
theorem Pos.apply_revSkipWhile_bool_eq_false {p : Char → Bool} {s : String} {pos : s.Pos} {h} :
|
||||
p (((pos.revSkipWhile p).prev h).get (by simp)) = false := by
|
||||
have h' : pos.toSlice.revSkipWhile p ≠ s.toSlice.startPos := by
|
||||
simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, ← toSlice_inj] using h
|
||||
have := Slice.Pos.apply_revSkipWhile_bool_eq_false (pos := pos.toSlice) (h := h')
|
||||
simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, Pos.prev_ofToSlice]
|
||||
|
||||
theorem Pos.revSkipWhile_bool_eq_self_iff_get {p : Char → Bool} {s : String} {pos : s.Pos} :
|
||||
pos.revSkipWhile p = pos ↔ ∀ h, p ((pos.prev h).get (by simp)) = false := by
|
||||
simp [Pos.revSkipWhile_eq_revSkipWhile_toSlice, ← toSlice_inj, Slice.Pos.revSkipWhile_bool_eq_self_iff_get,
|
||||
Pos.prev_toSlice]
|
||||
|
||||
theorem Pos.apply_eq_true_of_revSkipWhile_le_bool {p : Char → Bool} {s : String} {pos pos' : s.Pos}
|
||||
(h₁ : pos' < pos) (h₂ : pos.revSkipWhile p ≤ pos') : p (pos'.get (ne_endPos_of_lt h₁)) = true := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.Pos.apply_eq_true_of_revSkipWhile_le_bool
|
||||
(Pos.toSlice_lt_toSlice_iff.2 h₁)
|
||||
(by simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, Pos.ofToSlice_le_iff] using h₂)
|
||||
|
||||
theorem apply_skipSuffixWhile_bool_eq_false {p : Char → Bool} {s : String} {h} :
|
||||
p (((s.skipSuffixWhile p).prev h).get (by simp)) = false := by
|
||||
have h' : s.toSlice.skipSuffixWhile p ≠ s.toSlice.startPos := by
|
||||
simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, ← Pos.toSlice_inj] using h
|
||||
have := Slice.apply_skipSuffixWhile_bool_eq_false (s := s.toSlice) (h := h')
|
||||
simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, Pos.prev_ofToSlice]
|
||||
|
||||
theorem apply_eq_true_of_skipSuffixWhile_le_bool {p : Char → Bool} {s : String} {pos : s.Pos}
|
||||
(h : s.skipSuffixWhile p ≤ pos) (h' : pos < s.endPos) :
|
||||
p (pos.get (Pos.ne_endPos_of_lt h')) = true := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.apply_eq_true_of_skipSuffixWhile_le_bool
|
||||
(by simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, Pos.ofToSlice_le_iff] using h)
|
||||
(by simpa [Pos.toSlice_lt_toSlice_iff] using h')
|
||||
|
||||
@[simp]
|
||||
theorem revAll_bool_eq {p : Char → Bool} {s : String} : s.revAll p = s.toList.all p := by
|
||||
simp [← revAll_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_prop_eq_some_iff {P : Char → Prop} [DecidablePred P] {s : String} {pos res : s.Pos} :
|
||||
pos.revSkip? P = some res ↔ ∃ h, res = pos.prev h ∧ P ((pos.prev h).get (by simp)) := by
|
||||
simp [revSkip?_eq_revSkip?_toSlice, ← toSlice_inj, toSlice_prev, get_eq_get_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem Pos.revSkip?_prop_eq_none_iff {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} :
|
||||
pos.revSkip? P = none ↔ ∀ h, ¬ P ((pos.prev h).get (by simp)) := by
|
||||
simp [revSkip?_eq_revSkip?_toSlice, Pos.prev_toSlice]
|
||||
|
||||
theorem Pos.apply_revSkipWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} {h} :
|
||||
¬ P (((pos.revSkipWhile P).prev h).get (by simp)) := by
|
||||
have h' : pos.toSlice.revSkipWhile P ≠ s.toSlice.startPos := by
|
||||
simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, ← toSlice_inj] using h
|
||||
have := Slice.Pos.apply_revSkipWhile_prop (pos := pos.toSlice) (h := h')
|
||||
simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, Pos.prev_ofToSlice]
|
||||
|
||||
theorem Pos.revSkipWhile_prop_eq_self_iff_get {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos} :
|
||||
pos.revSkipWhile P = pos ↔ ∀ h, ¬ P ((pos.prev h).get (by simp)) := by
|
||||
simp [Pos.revSkipWhile_eq_revSkipWhile_toSlice, ← toSlice_inj,
|
||||
Slice.Pos.revSkipWhile_prop_eq_self_iff_get, Pos.prev_toSlice]
|
||||
|
||||
theorem Pos.apply_of_revSkipWhile_le_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos pos' : s.Pos}
|
||||
(h₁ : pos' < pos) (h₂ : pos.revSkipWhile P ≤ pos') : P (pos'.get (ne_endPos_of_lt h₁)) := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.Pos.apply_of_revSkipWhile_le_prop
|
||||
(Pos.toSlice_lt_toSlice_iff.2 h₁)
|
||||
(by simpa [Pos.revSkipWhile_eq_revSkipWhile_toSlice, Pos.ofToSlice_le_iff] using h₂)
|
||||
|
||||
theorem apply_skipSuffixWhile_prop {P : Char → Prop} [DecidablePred P] {s : String} {h} :
|
||||
¬ P (((s.skipSuffixWhile P).prev h).get (by simp)) := by
|
||||
have h' : s.toSlice.skipSuffixWhile P ≠ s.toSlice.startPos := by
|
||||
simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, ← Pos.toSlice_inj] using h
|
||||
have := Slice.apply_skipSuffixWhile_prop (s := s.toSlice) (h := h')
|
||||
simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, Pos.prev_ofToSlice]
|
||||
|
||||
theorem apply_of_skipSuffixWhile_le_prop {P : Char → Prop} [DecidablePred P] {s : String} {pos : s.Pos}
|
||||
(h : s.skipSuffixWhile P ≤ pos) (h' : pos < s.endPos) :
|
||||
P (pos.get (Pos.ne_endPos_of_lt h')) := by
|
||||
rw [Pos.get_eq_get_toSlice]
|
||||
exact Slice.apply_of_skipSuffixWhile_le_prop
|
||||
(by simpa [skipSuffixWhile_eq_skipSuffixWhile_toSlice, Pos.ofToSlice_le_iff] using h)
|
||||
(by simpa [Pos.toSlice_lt_toSlice_iff] using h')
|
||||
|
||||
@[simp]
|
||||
theorem revAll_prop_eq {P : Char → Prop} [DecidablePred P] {s : String} :
|
||||
s.revAll P = s.toList.all (decide <| P ·) := by
|
||||
simp [← revAll_toSlice]
|
||||
|
||||
end String
|
||||
|
||||
@@ -30,11 +30,7 @@ theorem skipPrefix?_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true)
|
||||
@[simp]
|
||||
theorem skipPrefix?_slice_eq_some_iff {pat s : Slice} {pos : s.Pos} :
|
||||
s.skipPrefix? pat = some pos ↔ ∃ t, pos.Splits pat.copy t := by
|
||||
match h : pat.isEmpty with
|
||||
| false =>
|
||||
have := ForwardSliceSearcher.lawfulForwardPatternModel h
|
||||
rw [Pattern.Model.skipPrefix?_eq_some_iff, ForwardSliceSearcher.isLongestMatch_iff_splits h]
|
||||
| true => simp [skipPrefix?_slice_of_isEmpty h, (show pat.copy = "" by simpa), eq_comm]
|
||||
rw [Pattern.Model.skipPrefix?_eq_some_iff, ForwardSliceSearcher.isLongestMatch_iff_splits]
|
||||
|
||||
theorem startsWith_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true) :
|
||||
s.startsWith pat = true := by
|
||||
@@ -43,14 +39,10 @@ theorem startsWith_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true)
|
||||
@[simp]
|
||||
theorem startsWith_slice_iff {pat s : Slice} :
|
||||
s.startsWith pat ↔ pat.copy.toList <+: s.copy.toList := by
|
||||
match h : pat.isEmpty with
|
||||
| false =>
|
||||
have := ForwardSliceSearcher.lawfulForwardPatternModel h
|
||||
simp only [Model.startsWith_iff, ForwardSliceSearcher.matchesAt_iff_splits h,
|
||||
splits_startPos_iff, exists_and_left, exists_eq_left]
|
||||
simp only [← toList_inj, toList_append, List.prefix_iff_exists_append_eq]
|
||||
exact ⟨fun ⟨t, ht⟩ => ⟨t.toList, by simp [ht]⟩, fun ⟨t, ht⟩ => ⟨String.ofList t, by simp [← ht]⟩⟩
|
||||
| true => simp [startsWith_slice_of_isEmpty h, (show pat.copy = "" by simpa)]
|
||||
simp only [Model.startsWith_iff, ForwardSliceSearcher.matchesAt_iff_splits,
|
||||
splits_startPos_iff, exists_and_left, exists_eq_left]
|
||||
simp only [← toList_inj, toList_append, List.prefix_iff_exists_append_eq]
|
||||
exact ⟨fun ⟨t, ht⟩ => ⟨t.toList, by simp [ht]⟩, fun ⟨t, ht⟩ => ⟨String.ofList t, by simp [← ht]⟩⟩
|
||||
|
||||
@[simp]
|
||||
theorem startsWith_slice_eq_false_iff {pat s : Slice} :
|
||||
@@ -63,14 +55,18 @@ theorem dropPrefix?_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true)
|
||||
|
||||
theorem eq_append_of_dropPrefix?_slice_eq_some {pat s res : Slice} (h : s.dropPrefix? pat = some res) :
|
||||
s.copy = pat.copy ++ res.copy := by
|
||||
match hpat : pat.isEmpty with
|
||||
| false =>
|
||||
have := ForwardSliceSearcher.lawfulForwardPatternModel hpat
|
||||
have := Pattern.Model.eq_append_of_dropPrefix?_eq_some h
|
||||
simp only [PatternModel.Matches] at this
|
||||
obtain ⟨_, ⟨-, rfl⟩, h⟩ := this
|
||||
exact h
|
||||
| true => simp [Option.some.inj (h ▸ dropPrefix?_slice_of_isEmpty hpat), (show pat.copy = "" by simpa)]
|
||||
have := Pattern.Model.eq_append_of_dropPrefix?_eq_some h
|
||||
simp only [PatternModel.Matches] at this
|
||||
obtain ⟨_, ⟨-, rfl⟩, h⟩ := this
|
||||
exact h
|
||||
|
||||
@[simp]
|
||||
theorem all_slice_iff {pat s : Slice} : s.all pat ↔ ∃ n, s.copy = String.join (List.replicate n pat.copy) := by
|
||||
simp [Pattern.Model.all_eq_true_iff, ForwardSliceSearcher.isLongestMatchAtChain_startPos_endPos_iff]
|
||||
|
||||
@[simp]
|
||||
theorem revAll_slice_iff {pat s : Slice} : s.revAll pat ↔ ∃ n, s.copy = String.join (List.replicate n pat.copy) := by
|
||||
simp [Pattern.Model.revAll_eq_true_iff, ForwardSliceSearcher.isLongestRevMatchAtChain_startPos_endPos_iff]
|
||||
|
||||
@[simp]
|
||||
theorem skipPrefix?_string_eq_some_iff {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
@@ -104,6 +100,7 @@ theorem eq_append_of_dropPrefix?_string_eq_some {pat : String} {s res : Slice} (
|
||||
rw [dropPrefix?_string_eq_dropPrefix?_toSlice] at h
|
||||
simpa using eq_append_of_dropPrefix?_slice_eq_some h
|
||||
|
||||
|
||||
theorem skipSuffix?_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true) :
|
||||
s.skipSuffix? pat = some s.endPos := by
|
||||
rw [skipSuffix?_eq_backwardPatternSkipSuffix?, BackwardSliceSearcher.skipSuffix?_of_isEmpty hpat]
|
||||
@@ -111,11 +108,7 @@ theorem skipSuffix?_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true)
|
||||
@[simp]
|
||||
theorem skipSuffix?_slice_eq_some_iff {pat s : Slice} {pos : s.Pos} :
|
||||
s.skipSuffix? pat = some pos ↔ ∃ t, pos.Splits t pat.copy := by
|
||||
match h : pat.isEmpty with
|
||||
| false =>
|
||||
have := BackwardSliceSearcher.lawfulBackwardPatternModel h
|
||||
rw [Pattern.Model.skipSuffix?_eq_some_iff, ForwardSliceSearcher.isLongestRevMatch_iff_splits h]
|
||||
| true => simp [skipSuffix?_slice_of_isEmpty h, (show pat.copy = "" by simpa), eq_comm]
|
||||
rw [Pattern.Model.skipSuffix?_eq_some_iff, ForwardSliceSearcher.isLongestRevMatch_iff_splits]
|
||||
|
||||
theorem endsWith_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true) :
|
||||
s.endsWith pat = true := by
|
||||
@@ -124,14 +117,10 @@ theorem endsWith_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true) :
|
||||
@[simp]
|
||||
theorem endsWith_slice_iff {pat s : Slice} :
|
||||
s.endsWith pat ↔ pat.copy.toList <:+ s.copy.toList := by
|
||||
match h : pat.isEmpty with
|
||||
| false =>
|
||||
have := BackwardSliceSearcher.lawfulBackwardPatternModel h
|
||||
simp only [Model.endsWith_iff, ForwardSliceSearcher.revMatchesAt_iff_splits h,
|
||||
splits_endPos_iff, exists_eq_right]
|
||||
simp only [← toList_inj, toList_append, List.suffix_iff_exists_append_eq]
|
||||
exact ⟨fun ⟨t, ht⟩ => ⟨t.toList, by simp [ht]⟩, fun ⟨t, ht⟩ => ⟨String.ofList t, by simp [← ht]⟩⟩
|
||||
| true => simp [endsWith_slice_of_isEmpty h, (show pat.copy = "" by simpa)]
|
||||
simp only [Model.endsWith_iff, ForwardSliceSearcher.revMatchesAt_iff_splits,
|
||||
splits_endPos_iff, exists_eq_right]
|
||||
simp only [← toList_inj, toList_append, List.suffix_iff_exists_append_eq]
|
||||
exact ⟨fun ⟨t, ht⟩ => ⟨t.toList, by simp [ht]⟩, fun ⟨t, ht⟩ => ⟨String.ofList t, by simp [← ht]⟩⟩
|
||||
|
||||
@[simp]
|
||||
theorem endsWith_slice_eq_false_iff {pat s : Slice} :
|
||||
@@ -144,14 +133,10 @@ theorem dropSuffix?_slice_of_isEmpty {pat s : Slice} (hpat : pat.isEmpty = true)
|
||||
|
||||
theorem eq_append_of_dropSuffix?_slice_eq_some {pat s res : Slice} (h : s.dropSuffix? pat = some res) :
|
||||
s.copy = res.copy ++ pat.copy := by
|
||||
match hpat : pat.isEmpty with
|
||||
| false =>
|
||||
have := BackwardSliceSearcher.lawfulBackwardPatternModel hpat
|
||||
have := Pattern.Model.eq_append_of_dropSuffix?_eq_some h
|
||||
simp only [PatternModel.Matches] at this
|
||||
obtain ⟨_, ⟨-, rfl⟩, h⟩ := this
|
||||
exact h
|
||||
| true => simp [Option.some.inj (h ▸ dropSuffix?_slice_of_isEmpty hpat), (show pat.copy = "" by simpa)]
|
||||
have := Pattern.Model.eq_append_of_dropSuffix?_eq_some h
|
||||
simp only [PatternModel.Matches] at this
|
||||
obtain ⟨_, ⟨-, rfl⟩, h⟩ := this
|
||||
exact h
|
||||
|
||||
@[simp]
|
||||
theorem skipSuffix?_string_eq_some_iff' {pat : String} {s : Slice} {pos : s.Pos} :
|
||||
@@ -208,12 +193,12 @@ theorem startsWith_slice_of_isEmpty {pat : Slice} {s : String} (hpat : pat.isEmp
|
||||
@[simp]
|
||||
theorem startsWith_slice_iff {pat : Slice} {s : String} :
|
||||
s.startsWith pat ↔ pat.copy.toList <+: s.toList := by
|
||||
simp [startsWith_eq_startsWith_toSlice]
|
||||
simp [← startsWith_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem startsWith_slice_eq_false_iff {pat : Slice} {s : String} :
|
||||
s.startsWith pat = false ↔ ¬ (pat.copy.toList <+: s.toList) := by
|
||||
simp [startsWith_eq_startsWith_toSlice]
|
||||
simp [← startsWith_toSlice]
|
||||
|
||||
theorem dropPrefix?_slice_of_isEmpty {pat : Slice} {s : String} (hpat : pat.isEmpty = true) :
|
||||
s.dropPrefix? pat = some s.toSlice := by
|
||||
@@ -239,21 +224,21 @@ theorem skipPrefix?_string_eq_some_iff {pat s : String} {pos : s.Pos} :
|
||||
|
||||
@[simp]
|
||||
theorem startsWith_string_empty {s : String} : s.startsWith "" = true := by
|
||||
simp [startsWith_eq_startsWith_toSlice]
|
||||
simp [← startsWith_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem startsWith_string_iff {pat s : String} :
|
||||
s.startsWith pat ↔ pat.toList <+: s.toList := by
|
||||
simp [startsWith_eq_startsWith_toSlice]
|
||||
simp [← startsWith_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem startsWith_string_eq_false_iff {pat s : String} :
|
||||
s.startsWith pat = false ↔ ¬ (pat.toList <+: s.toList) := by
|
||||
simp [startsWith_eq_startsWith_toSlice]
|
||||
simp [← startsWith_toSlice]
|
||||
|
||||
@[simp]
|
||||
theorem dropPrefix?_string_empty {s : String} : s.dropPrefix? "" = some s.toSlice := by
|
||||
simp [dropPrefix?_eq_dropPrefix?_toSlice]
|
||||
simp [← dropPrefix?_toSlice]
|
||||
|
||||
theorem eq_append_of_dropPrefix?_string_eq_some {s pat : String} {res : Slice} (h : s.dropPrefix? pat = some res) :
|
||||
s = pat ++ res.copy := by
|
||||
|
||||
@@ -99,6 +99,11 @@ theorem Pos.splits {s : String} (p : s.Pos) :
|
||||
eq_append := by simp [← toByteArray_inj, Slice.toByteArray_copy, ← size_toByteArray]
|
||||
offset_eq_rawEndPos := by simp
|
||||
|
||||
@[simp]
|
||||
theorem sliceTo_append_sliceFrom {s : String} {pos : s.Pos} :
|
||||
(s.sliceTo pos).copy ++ (s.sliceFrom pos).copy = s :=
|
||||
pos.splits.eq_append.symm
|
||||
|
||||
theorem Slice.Pos.splits {s : Slice} (p : s.Pos) :
|
||||
p.Splits (s.sliceTo p).copy (s.sliceFrom p).copy where
|
||||
eq_append := copy_eq_copy_sliceTo
|
||||
@@ -375,6 +380,10 @@ theorem Slice.copy_sliceTo_eq_iff_exists_splits {s : Slice} {p : s.Pos} {t₁ :
|
||||
· rintro ⟨t₂, h⟩
|
||||
exact p.splits.eq_left h
|
||||
|
||||
theorem Slice.copy_sliceTo_eq_iff_splits {s : Slice} {p : s.Pos} {t₁ : String} :
|
||||
(s.sliceTo p).copy = t₁ ↔ p.Splits t₁ (s.sliceFrom p).copy :=
|
||||
⟨fun h => h ▸ p.splits, p.splits.eq_left⟩
|
||||
|
||||
theorem Slice.copy_sliceFrom_eq_iff_exists_splits {s : Slice} {p : s.Pos} {t₂ : String} :
|
||||
(s.sliceFrom p).copy = t₂ ↔ ∃ t₁, p.Splits t₁ t₂ := by
|
||||
refine ⟨?_, ?_⟩
|
||||
@@ -383,14 +392,26 @@ theorem Slice.copy_sliceFrom_eq_iff_exists_splits {s : Slice} {p : s.Pos} {t₂
|
||||
· rintro ⟨t₂, h⟩
|
||||
exact p.splits.eq_right h
|
||||
|
||||
theorem Slice.copy_sliceFrom_eq_iff_splits {s : Slice} {p : s.Pos} {t₂ : String} :
|
||||
(s.sliceFrom p).copy = t₂ ↔ p.Splits (s.sliceTo p).copy t₂ :=
|
||||
⟨fun h => h ▸ p.splits, p.splits.eq_right⟩
|
||||
|
||||
theorem copy_sliceTo_eq_iff_exists_splits {s : String} {p : s.Pos} {t₁ : String} :
|
||||
(s.sliceTo p).copy = t₁ ↔ ∃ t₂, p.Splits t₁ t₂ := by
|
||||
simp [← Pos.splits_toSlice_iff, ← Slice.copy_sliceTo_eq_iff_exists_splits]
|
||||
|
||||
theorem copy_sliceTo_eq_iff_splits {s : String} {p : s.Pos} {t₁ : String} :
|
||||
(s.sliceTo p).copy = t₁ ↔ p.Splits t₁ (s.sliceFrom p).copy :=
|
||||
⟨fun h => h ▸ p.splits, p.splits.eq_left⟩
|
||||
|
||||
theorem copy_sliceFrom_eq_iff_exists_splits {s : String} {p : s.Pos} {t₂ : String} :
|
||||
(s.sliceFrom p).copy = t₂ ↔ ∃ t₁, p.Splits t₁ t₂ := by
|
||||
simp [← Pos.splits_toSlice_iff, ← Slice.copy_sliceFrom_eq_iff_exists_splits]
|
||||
|
||||
theorem copy_sliceFrom_eq_iff_splits {s : String} {p : s.Pos} {t₂ : String} :
|
||||
(s.sliceFrom p).copy = t₂ ↔ p.Splits (s.sliceTo p).copy t₂ :=
|
||||
⟨fun h => h ▸ p.splits, p.splits.eq_right⟩
|
||||
|
||||
theorem Pos.Splits.offset_eq_decreaseBy {s : String} {p : s.Pos} (h : p.Splits t₁ t₂) :
|
||||
p.offset = s.rawEndPos.decreaseBy t₂.utf8ByteSize := by
|
||||
simp [h.offset_eq_rawEndPos, h.eq_append, Pos.Raw.ext_iff]
|
||||
@@ -641,6 +662,28 @@ theorem Pos.splits_append_rawEndPos {s t : String} :
|
||||
eq_append := rfl
|
||||
offset_eq_rawEndPos := rfl
|
||||
|
||||
/--
|
||||
Given a slice `s` such that `s.copy = t₁ ++ t₂`, obtain the position sitting between `t₁` and `t₂`.
|
||||
-/
|
||||
def Slice.Pos.ofEqAppend {s : Slice} {t₁ t₂ : String} (h : s.copy = t₁ ++ t₂) : s.Pos :=
|
||||
s.pos t₁.rawEndPos
|
||||
(by simpa [← Pos.Raw.isValid_copy_iff, h] using ((Pos.Raw.isValid_rawEndPos).append_right t₂))
|
||||
|
||||
theorem Slice.Pos.splits_ofEqAppend {s : Slice} {t₁ t₂ : String} (h : s.copy = t₁ ++ t₂) :
|
||||
(ofEqAppend h).Splits t₁ t₂ where
|
||||
eq_append := h
|
||||
offset_eq_rawEndPos := by simp [ofEqAppend]
|
||||
|
||||
/--
|
||||
Given a string `s` such that `s = t₁ ++ t₂`, obtain the position sitting between `t₁` and `t₂`.
|
||||
-/
|
||||
def Pos.ofEqAppend {s t₁ t₂ : String} (h : s = t₁ ++ t₂) : s.Pos :=
|
||||
((t₁ ++ t₂).pos t₁.rawEndPos ((Pos.Raw.isValid_rawEndPos).append_right t₂)).cast h.symm
|
||||
|
||||
theorem Pos.splits_ofEqAppend {s t₁ t₂ : String} (h : s = t₁ ++ t₂) : (ofEqAppend h).Splits t₁ t₂ where
|
||||
eq_append := h
|
||||
offset_eq_rawEndPos := by simp [ofEqAppend]
|
||||
|
||||
theorem Pos.Splits.copy_sliceTo_eq {s : String} {p : s.Pos} (h : p.Splits t₁ t₂) :
|
||||
(s.sliceTo p).copy = t₁ :=
|
||||
p.splits.eq_left h
|
||||
@@ -740,4 +783,44 @@ theorem splits_prevn_endPos (s : String) (n : Nat) :
|
||||
(s.endPos.prevn n).Splits (String.ofList (s.toList.take (s.length - n))) (String.ofList (s.toList.drop (s.length - n))) := by
|
||||
simpa using s.splits_endPos.prevn n
|
||||
|
||||
@[simp]
|
||||
theorem Slice.copy_sliceFrom_cast {s t : Slice} (hst : s.copy = t.copy) {pos : s.Pos} :
|
||||
(t.sliceFrom (pos.cast hst)).copy = (s.sliceFrom pos).copy := by
|
||||
simpa [copy_sliceFrom_eq_iff_exists_splits] using ⟨_, pos.splits⟩
|
||||
|
||||
@[simp]
|
||||
theorem Slice.copy_sliceTo_cast {s t : Slice} (hst : s.copy = t.copy) {pos : s.Pos} :
|
||||
(t.sliceTo (pos.cast hst)).copy = (s.sliceTo pos).copy := by
|
||||
simpa [copy_sliceTo_eq_iff_exists_splits] using ⟨_, pos.splits⟩
|
||||
|
||||
@[simp]
|
||||
theorem copy_sliceFrom_cast {s t : String} (hst : s = t) {pos : s.Pos} :
|
||||
(t.sliceFrom (pos.cast hst)).copy = (s.sliceFrom pos).copy := by
|
||||
simpa [copy_sliceFrom_eq_iff_exists_splits] using ⟨_, pos.splits⟩
|
||||
|
||||
@[simp]
|
||||
theorem copy_sliceTo_cast {s t : String} (hst : s = t) {pos : s.Pos} :
|
||||
(t.sliceTo (pos.cast hst)).copy = (s.sliceTo pos).copy := by
|
||||
simpa [copy_sliceTo_eq_iff_exists_splits] using ⟨_, pos.splits⟩
|
||||
|
||||
theorem Slice.Pos.sliceFrom_cast {s t : Slice} {hst : s.copy = t.copy} (p q : s.Pos) {h} :
|
||||
Slice.Pos.sliceFrom (p.cast hst) (q.cast hst) h =
|
||||
(Slice.Pos.sliceFrom p q (by simpa using h)).cast (by simp) := by
|
||||
ext1; simp
|
||||
|
||||
theorem Slice.Pos.sliceTo_cast {s t : Slice} {hst : s.copy = t.copy} (p q : s.Pos) {h} :
|
||||
Slice.Pos.sliceTo (p.cast hst) (q.cast hst) h =
|
||||
(Slice.Pos.sliceTo p q (by simpa using h)).cast (by simp) := by
|
||||
ext1; simp
|
||||
|
||||
theorem Pos.sliceFrom_cast {s t : String} {hst : s = t} (p q : s.Pos) {h} :
|
||||
Pos.sliceFrom (p.cast hst) (q.cast hst) h =
|
||||
(Pos.sliceFrom p q (by simpa using h)).cast (by simp) := by
|
||||
ext1; simp
|
||||
|
||||
theorem Pos.sliceTo_cast {s t : String} {hst : s = t} (p q : s.Pos) {h} :
|
||||
Pos.sliceTo (p.cast hst) (q.cast hst) h =
|
||||
(Pos.sliceTo p q (by simpa using h)).cast (by simp) := by
|
||||
ext1; simp
|
||||
|
||||
end String
|
||||
|
||||
@@ -96,6 +96,44 @@ theorem endPos_ofSliceFrom {s : Slice} {p : s.Pos} {st : SearchStep (s.sliceFrom
|
||||
st.ofSliceFrom.endPos = Slice.Pos.ofSliceFrom st.endPos := by
|
||||
cases st <;> simp [ofSliceFrom]
|
||||
|
||||
/--
|
||||
Converts a {lean}`SearchStep s` into a {lean}`SearchStep t` by applying {name}`Slice.Pos.cast` to the
|
||||
start and end position.
|
||||
-/
|
||||
@[inline]
|
||||
def cast {s t : Slice} (hst : s.copy = t.copy) : SearchStep s → SearchStep t
|
||||
| .rejected startPos endPos => .rejected (startPos.cast hst) (endPos.cast hst)
|
||||
| .matched startPos endPos => .matched (startPos.cast hst) (endPos.cast hst)
|
||||
|
||||
@[simp]
|
||||
theorem cast_rejected {s t : Slice} {hst : s.copy = t.copy} {startPos endPos : s.Pos} :
|
||||
(SearchStep.rejected startPos endPos).cast hst = .rejected (startPos.cast hst) (endPos.cast hst) :=
|
||||
(rfl)
|
||||
|
||||
@[simp]
|
||||
theorem cast_matched {s t : Slice} {hst : s.copy = t.copy} {startPos endPos : s.Pos} :
|
||||
(SearchStep.matched startPos endPos).cast hst = .matched (startPos.cast hst) (endPos.cast hst) :=
|
||||
(rfl)
|
||||
|
||||
@[simp]
|
||||
theorem startPos_cast {s t : Slice} (hst : s.copy = t.copy) {st : SearchStep s} :
|
||||
(st.cast hst).startPos = st.startPos.cast hst := by
|
||||
cases st <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem endPos_cast {s t : Slice} (hst : s.copy = t.copy) {st : SearchStep s} :
|
||||
(st.cast hst).endPos = st.endPos.cast hst := by
|
||||
cases st <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem cast_rfl {s : Slice} {st : SearchStep s} : st.cast rfl = st := by
|
||||
cases st <;> simp
|
||||
|
||||
@[simp]
|
||||
theorem cast_cast {s t u : Slice} {hst : s.copy = t.copy} {htu : t.copy = u.copy} {st : SearchStep s} :
|
||||
(st.cast hst).cast htu = st.cast (hst.trans htu) := by
|
||||
cases st <;> simp
|
||||
|
||||
end SearchStep
|
||||
|
||||
/--
|
||||
|
||||
@@ -311,23 +311,6 @@ def Internal.containsImpl (s : String) (c : Char) : Bool :=
|
||||
def Internal.anyImpl (s : String) (p : Char → Bool) :=
|
||||
String.any s p
|
||||
|
||||
/--
|
||||
Checks whether a slice only consists of matches of the pattern {name}`pat`.
|
||||
|
||||
Short-circuits at the first pattern mis-match.
|
||||
|
||||
This function is generic over all currently supported patterns.
|
||||
|
||||
Examples:
|
||||
* {lean}`"brown".all Char.isLower = true`
|
||||
* {lean}`"brown and orange".all Char.isLower = false`
|
||||
* {lean}`"aaaaaa".all 'a' = true`
|
||||
* {lean}`"aaaaaa".all "aa" = true`
|
||||
* {lean}`"aaaaaaa".all "aa" = false`
|
||||
-/
|
||||
@[inline, suggest_for String.every] def all (s : String) (pat : ρ) [ForwardPattern pat] : Bool :=
|
||||
s.toSlice.all pat
|
||||
|
||||
/--
|
||||
Checks whether the string can be interpreted as the decimal representation of a natural number.
|
||||
|
||||
|
||||
@@ -426,13 +426,13 @@ Advances {name}`pos` as long as {name}`pat` matches.
|
||||
-/
|
||||
@[specialize pat]
|
||||
def Pos.skipWhile {s : Slice} (pos : s.Pos) (pat : ρ) [ForwardPattern pat] : s.Pos :=
|
||||
if let some nextCurr := ForwardPattern.skipPrefix? pat (s.sliceFrom pos) then
|
||||
if pos < Pos.ofSliceFrom nextCurr then
|
||||
skipWhile (Pos.ofSliceFrom nextCurr) pat
|
||||
match pos.skip? pat with
|
||||
| some nextCurr =>
|
||||
if pos < nextCurr then
|
||||
skipWhile nextCurr pat
|
||||
else
|
||||
pos
|
||||
else
|
||||
pos
|
||||
| none => pos
|
||||
termination_by pos
|
||||
|
||||
/--
|
||||
@@ -572,7 +572,7 @@ Examples:
|
||||
-/
|
||||
@[inline]
|
||||
def all (s : Slice) (pat : ρ) [ForwardPattern pat] : Bool :=
|
||||
s.dropWhile pat |>.isEmpty
|
||||
s.skipPrefixWhile pat == s.endPos
|
||||
|
||||
end ForwardPatternUsers
|
||||
|
||||
@@ -707,7 +707,7 @@ This function is generic over all currently supported patterns.
|
||||
-/
|
||||
@[inline]
|
||||
def Pos.revSkip? {s : Slice} (pos : s.Pos) (pat : ρ) [BackwardPattern pat] : Option s.Pos :=
|
||||
((s.sliceFrom pos).skipSuffix? pat).map Pos.ofSliceFrom
|
||||
((s.sliceTo pos).skipSuffix? pat).map Pos.ofSliceTo
|
||||
|
||||
/--
|
||||
If {name}`pat` matches a suffix of {name}`s`, returns the remainder. Returns {name}`none` otherwise.
|
||||
@@ -765,13 +765,13 @@ Rewinds {name}`pos` as long as {name}`pat` matches.
|
||||
-/
|
||||
@[specialize pat]
|
||||
def Pos.revSkipWhile {s : Slice} (pos : s.Pos) (pat : ρ) [BackwardPattern pat] : s.Pos :=
|
||||
if let some nextCurr := BackwardPattern.skipSuffix? pat (s.sliceTo pos) then
|
||||
if Pos.ofSliceTo nextCurr < pos then
|
||||
revSkipWhile (Pos.ofSliceTo nextCurr) pat
|
||||
match pos.revSkip? pat with
|
||||
| some nextCurr =>
|
||||
if nextCurr < pos then
|
||||
revSkipWhile nextCurr pat
|
||||
else
|
||||
pos
|
||||
else
|
||||
pos
|
||||
| none => pos
|
||||
termination_by pos.down
|
||||
|
||||
/--
|
||||
@@ -782,6 +782,36 @@ Returns the position at the start of the longest suffix of {name}`s` for which {
|
||||
def skipSuffixWhile (s : Slice) (pat : ρ) [BackwardPattern pat] : s.Pos :=
|
||||
s.endPos.revSkipWhile pat
|
||||
|
||||
/--
|
||||
Checks whether a slice only consists of matches of the pattern {name}`pat`, starting from the back
|
||||
of the string.
|
||||
|
||||
Short-circuits at the first pattern mis-match.
|
||||
|
||||
This function is generic over all currently supported patterns.
|
||||
|
||||
For many types of patterns, this function can be expected to return the same result as
|
||||
{name}`Slice.all`. If mismatches are expected to occur close to the end of the string, this function
|
||||
might be more efficient.
|
||||
|
||||
For some types of patterns, this function will return a different result than {name}`Slice.all`.
|
||||
Consider, for example, a pattern that matches the longest string at the given position that matches
|
||||
the regular expression {lean}`"a|aa|ab"`. Then, given the input string {lean}`"aab"`, performing
|
||||
{name}`Slice.all` will greedily match the prefix {lean}`"aa"` and then get stuck on the remainder
|
||||
{lean}`"b"`, causing it to return {lean}`false`. On the other hand, {name}`Slice.revAll` will match
|
||||
the suffix {lean}`"ab"` and then match the remainder {lean}`"a"`, so it will return {lean}`true`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"brown".toSlice.revAll Char.isLower = true`
|
||||
* {lean}`"brown and orange".toSlice.revAll Char.isLower = false`
|
||||
* {lean}`"aaaaaa".toSlice.revAll 'a' = true`
|
||||
* {lean}`"aaaaaa".toSlice.revAll "aa" = true`
|
||||
* {lean}`"aaaaaaa".toSlice.revAll "aa" = false`
|
||||
-/
|
||||
@[inline]
|
||||
def revAll (s : Slice) (pat : ρ) [BackwardPattern pat] : Bool :=
|
||||
s.skipSuffixWhile pat == s.startPos
|
||||
|
||||
/--
|
||||
Creates a new slice that contains the longest suffix of {name}`s` for which {name}`pat` matched
|
||||
(potentially repeatedly).
|
||||
|
||||
@@ -224,6 +224,53 @@ Returns the position after the longest prefix of {name}`s` for which {name}`pat`
|
||||
@[inline] def skipPrefixWhile (s : String) (pat : ρ) [ForwardPattern pat] : s.Pos :=
|
||||
Pos.ofToSlice (s.toSlice.skipPrefixWhile pat)
|
||||
|
||||
/--
|
||||
Checks whether a string only consists of matches of the pattern {name}`pat`.
|
||||
|
||||
Short-circuits at the first pattern mis-match.
|
||||
|
||||
This function is generic over all currently supported patterns.
|
||||
|
||||
Examples:
|
||||
* {lean}`"brown".all Char.isLower = true`
|
||||
* {lean}`"brown and orange".all Char.isLower = false`
|
||||
* {lean}`"aaaaaa".all 'a' = true`
|
||||
* {lean}`"aaaaaa".all "aa" = true`
|
||||
* {lean}`"aaaaaaa".all "aa" = false`
|
||||
-/
|
||||
@[inline, suggest_for String.every] def all (s : String) (pat : ρ) [ForwardPattern pat] : Bool :=
|
||||
s.toSlice.all pat
|
||||
|
||||
/--
|
||||
Checks whether a string only consists of matches of the pattern {name}`pat`, starting from the back
|
||||
of the string.
|
||||
|
||||
Short-circuits at the first pattern mis-match.
|
||||
|
||||
This function is generic over all currently supported patterns.
|
||||
|
||||
For many types of patterns, this function can be expected to return the same result as
|
||||
{name}`String.all`. If mismatches are expected to occur close to the end of the string, this function
|
||||
might be more efficient.
|
||||
|
||||
For some types of patterns, this function will return a different result than {name}`String.all`.
|
||||
Consider, for example, a pattern that matches the longest string at the given position that matches
|
||||
the regular expression {lean}`"a|aa|ab"`. Then, given the input string {lean}`"aab"`, performing
|
||||
{name}`String.all` will greedily match the prefix {lean}`"aa"` and then get stuck on the remainder
|
||||
{lean}`"b"`, causing it to return {lean}`false`. On the other hand, {name}`String.revAll` will match
|
||||
the suffix {lean}`"ab"` and then match the remainder {lean}`"a"`, so it will return {lean}`true`.
|
||||
|
||||
Examples:
|
||||
* {lean}`"brown".revAll Char.isLower = true`
|
||||
* {lean}`"brown and orange".revAll Char.isLower = false`
|
||||
* {lean}`"aaaaaa".revAll 'a' = true`
|
||||
* {lean}`"aaaaaa".revAll "aa" = true`
|
||||
* {lean}`"aaaaaaa".revAll "aa" = false`
|
||||
-/
|
||||
@[inline]
|
||||
def revAll (s : String) (pat : ρ) [BackwardPattern pat] : Bool :=
|
||||
s.toSlice.revAll pat
|
||||
|
||||
/--
|
||||
If {name}`pat` matches at {name}`pos`, returns the position after the end of the match.
|
||||
Returns {name}`none` otherwise.
|
||||
|
||||
@@ -1230,7 +1230,14 @@ def instantiateRevRangeArgs (e : Expr) (beginIdx endIdx : Nat) (args : Array (Ar
|
||||
else
|
||||
e.instantiateRevRange beginIdx endIdx (args.map (·.toExpr))
|
||||
|
||||
/-- Lookup function for compiler extensions with sorted persisted state that works in both `lean` and `leanir`. -/
|
||||
/--
|
||||
Lookup function for compiler extensions with sorted persisted state that works in both `lean` and
|
||||
`leanir`.
|
||||
|
||||
`preferImported` defaults to false because in `leanir`, we do not want to mix information from
|
||||
`meta` compilation in `lean` with our own state. But in `lean`, setting `preferImported` can help
|
||||
with avoiding unnecessary task blocks.
|
||||
-/
|
||||
@[inline] def findExtEntry? [Inhabited σ] (env : Environment) (ext : PersistentEnvExtension α β σ) (declName : Name)
|
||||
(findAtSorted? : Array α → Name → Option α')
|
||||
(findInState? : σ → Name → Option α') : Option α' :=
|
||||
|
||||
@@ -78,9 +78,13 @@ def isValidMainType (type : Expr) : Bool :=
|
||||
isValidResultName resultName
|
||||
| _ => false
|
||||
|
||||
/-- A postponed call of `compileDecls`. -/
|
||||
structure PostponedCompileDecls where
|
||||
/-- Declaration names of this mutual group. -/
|
||||
declNames : Array Name
|
||||
deriving BEq, Hashable
|
||||
/-- Options at time of original call, to be restored for tracing etc. -/
|
||||
options : Options
|
||||
deriving BEq
|
||||
|
||||
/--
|
||||
Saves postponed `compileDecls` calls.
|
||||
@@ -101,16 +105,20 @@ builtin_initialize postponedCompileDeclsExt : SimplePersistentEnvExtension Postp
|
||||
{ exported := #[], server := #[], «private» := es.toArray }
|
||||
}
|
||||
|
||||
def resumeCompilation (declName : Name) : CoreM Unit := do
|
||||
def resumeCompilation (declName : Name) (baseOpts : Options) : CoreM Unit := do
|
||||
let some decls := postponedCompileDeclsExt.getState (← getEnv) |>.find? declName | return
|
||||
let opts := baseOpts.mergeBy (fun _ base _ => base) decls.options
|
||||
let opts := compiler.postponeCompile.set opts false
|
||||
modifyEnv (postponedCompileDeclsExt.modifyState · fun s => decls.declNames.foldl (·.erase) s)
|
||||
withOptions (compiler.postponeCompile.set · false) do
|
||||
-- NOTE: we *must* throw away the current options as they could depend on the specific recursion
|
||||
-- we did to get here.
|
||||
withOptions (fun _ => opts) do
|
||||
Core.prependError m!"Failed to compile `{declName}`" do
|
||||
(← compileDeclsRef.get) decls.declNames
|
||||
(← compileDeclsRef.get) decls.declNames baseOpts
|
||||
|
||||
namespace PassManager
|
||||
|
||||
partial def run (declNames : Array Name) : CompilerM Unit := withAtLeastMaxRecDepth 8192 do
|
||||
partial def run (declNames : Array Name) (baseOpts : Options) : CompilerM Unit := withAtLeastMaxRecDepth 8192 do
|
||||
/-
|
||||
Note: we need to increase the recursion depth because we currently do to save phase1
|
||||
declarations in .olean files. Then, we have to recursively compile all dependencies,
|
||||
@@ -141,11 +149,14 @@ partial def run (declNames : Array Name) : CompilerM Unit := withAtLeastMaxRecDe
|
||||
|
||||
-- Now that we have done all input checks, check for postponement
|
||||
if (← getEnv).header.isModule && (← compiler.postponeCompile.getM) then
|
||||
modifyEnv (postponedCompileDeclsExt.addEntry · { declNames := decls.map (·.name) })
|
||||
modifyEnv (postponedCompileDeclsExt.addEntry · { declNames := decls.map (·.name), options := ← getOptions })
|
||||
-- meta defs are compiled locally so they are available for execution/compilation without
|
||||
-- importing `.ir` but still marked for `leanir` compilation so that we do not have to persist
|
||||
-- module-local compilation information between the two processes
|
||||
if !decls.any (isMarkedMeta (← getEnv) ·.name) then
|
||||
if decls.any (isMarkedMeta (← getEnv) ·.name) then
|
||||
-- avoid re-compiling the meta defs in this process; the entry for `leanir` is not affected
|
||||
modifyEnv (postponedCompileDeclsExt.modifyState · fun s => decls.foldl (·.erase ·.name) s)
|
||||
else
|
||||
trace[Compiler] "postponing compilation of {decls.map (·.name)}"
|
||||
return
|
||||
|
||||
@@ -157,7 +168,7 @@ partial def run (declNames : Array Name) : CompilerM Unit := withAtLeastMaxRecDe
|
||||
let .let { value := .const c .., .. } .. := c | return
|
||||
-- Need to do some lookups to get the actual name passed to `compileDecls`
|
||||
let c := Compiler.getImplementedBy? (← getEnv) c |>.getD c
|
||||
resumeCompilation c
|
||||
resumeCompilation c baseOpts
|
||||
|
||||
let decls := markRecDecls decls
|
||||
let manager ← getPassManager
|
||||
@@ -200,9 +211,9 @@ where
|
||||
|
||||
end PassManager
|
||||
|
||||
def main (declNames : Array Name) : CoreM Unit := do
|
||||
def main (declNames : Array Name) (baseOpts : Options) : CoreM Unit := do
|
||||
withTraceNode `Compiler (fun _ => return m!"compiling: {declNames}") do
|
||||
CompilerM.run <| PassManager.run declNames
|
||||
CompilerM.run <| PassManager.run declNames baseOpts
|
||||
|
||||
builtin_initialize
|
||||
compileDeclsRef.set main
|
||||
|
||||
@@ -279,13 +279,13 @@ partial def casesFloatArrayToMono (c : Cases .pure) (_ : c.typeName == ``FloatAr
|
||||
let k ← k.toMono
|
||||
return .let decl k
|
||||
|
||||
/-- Eliminate `cases` for `String. -/
|
||||
/-- Eliminate `cases` for `String`. -/
|
||||
partial def casesStringToMono (c : Cases .pure) (_ : c.typeName == ``String) : ToMonoM (Code .pure) := do
|
||||
assert! c.alts.size == 1
|
||||
let .alt _ ps k := c.alts[0]! | unreachable!
|
||||
eraseParams ps
|
||||
let p := ps[0]!
|
||||
let decl := { fvarId := p.fvarId, binderName := p.binderName, type := anyExpr, value := .const ``String.toList [] #[.fvar c.discr] }
|
||||
let decl := { fvarId := p.fvarId, binderName := p.binderName, type := anyExpr, value := .const ``String.toByteArray [] #[.fvar c.discr] }
|
||||
modifyLCtx fun lctx => lctx.addLetDecl decl
|
||||
let k ← k.toMono
|
||||
return .let decl k
|
||||
|
||||
@@ -19,7 +19,7 @@ that fulfill the requirements of `shouldGenerateCode`.
|
||||
def compile (declNames : Array Name) : CoreM Unit := do profileitM Exception "compiler new" (← getOptions) do
|
||||
withOptions (compiler.postponeCompile.set · false) do
|
||||
withTraceNode `Compiler (fun _ => return m!"compiling: {declNames}") do
|
||||
LCNF.main declNames
|
||||
LCNF.main declNames {}
|
||||
|
||||
builtin_initialize
|
||||
registerTraceClass `Compiler
|
||||
|
||||
@@ -711,11 +711,11 @@ breaks the cycle by making `compileDeclsImpl` a "dynamic" call through the ref t
|
||||
to the linker. In the compiler there is a matching `builtin_initialize` to set this ref to the
|
||||
actual implementation of compileDeclsRef.
|
||||
-/
|
||||
builtin_initialize compileDeclsRef : IO.Ref (Array Name → CoreM Unit) ←
|
||||
IO.mkRef (fun _ => throwError m!"call to compileDecls with uninitialized compileDeclsRef")
|
||||
builtin_initialize compileDeclsRef : IO.Ref (Array Name → Options → CoreM Unit) ←
|
||||
IO.mkRef (fun _ _ => throwError m!"call to compileDecls with uninitialized compileDeclsRef")
|
||||
|
||||
private def compileDeclsImpl (declNames : Array Name) : CoreM Unit := do
|
||||
(← compileDeclsRef.get) declNames
|
||||
(← compileDeclsRef.get) declNames {}
|
||||
|
||||
-- `ref?` is used for error reporting if available
|
||||
def compileDecls (decls : Array Name) (logErrors := true) : CoreM Unit := do
|
||||
|
||||
@@ -82,11 +82,17 @@ def mergeBy (f : Name → DataValue → DataValue → DataValue) (o1 o2 : Option
|
||||
|
||||
end Options
|
||||
|
||||
structure OptionDeprecation where
|
||||
since : String
|
||||
text? : Option String := none
|
||||
deriving Inhabited
|
||||
|
||||
structure OptionDecl where
|
||||
name : Name
|
||||
declName : Name := by exact decl_name%
|
||||
defValue : DataValue
|
||||
descr : String := ""
|
||||
deprecation? : Option OptionDeprecation := none
|
||||
deriving Inhabited
|
||||
|
||||
def OptionDecl.fullDescr (self : OptionDecl) : String := Id.run do
|
||||
@@ -183,6 +189,7 @@ namespace Option
|
||||
protected structure Decl (α : Type) where
|
||||
defValue : α
|
||||
descr : String := ""
|
||||
deprecation? : Option OptionDeprecation := none
|
||||
|
||||
protected def get? [KVMap.Value α] (opts : Options) (opt : Lean.Option α) : Option α :=
|
||||
opts.get? opt.name
|
||||
@@ -214,6 +221,7 @@ protected def register [KVMap.Value α] (name : Name) (decl : Lean.Option.Decl
|
||||
declName := ref
|
||||
defValue := KVMap.Value.toDataValue decl.defValue
|
||||
descr := decl.descr
|
||||
deprecation? := decl.deprecation?
|
||||
}
|
||||
return { name := name, defValue := decl.defValue }
|
||||
|
||||
|
||||
@@ -1832,13 +1832,15 @@ To infer a namespace from the expected type, we do the following operations:
|
||||
- if the type is of the form `c x₁ ... xₙ` with `c` a constant, then try using `c` as the namespace,
|
||||
and if that doesn't work, try unfolding the expression and continuing.
|
||||
-/
|
||||
private partial def resolveDottedIdentFn (idRef : Syntax) (id : Name) (expectedType? : Option Expr) : TermElabM (List (Expr × Syntax × List Syntax)) := do
|
||||
private partial def resolveDottedIdentFn (idRef : Syntax) (id : Name) (explicitUnivs : List Level) (expectedType? : Option Expr) : TermElabM (List (Expr × Syntax × List Syntax)) := do
|
||||
unless id.isAtomic do
|
||||
throwError "Invalid dotted identifier notation: The name `{id}` must be atomic"
|
||||
tryPostponeIfNoneOrMVar expectedType?
|
||||
let some expectedType := expectedType?
|
||||
| throwNoExpectedType
|
||||
addCompletionInfo <| CompletionInfo.dotId idRef id (← getLCtx) expectedType?
|
||||
-- We will check deprecations in `elabAppFnResolutions`.
|
||||
withoutCheckDeprecated do
|
||||
withForallBody expectedType fun resultType => do
|
||||
go resultType expectedType #[]
|
||||
where
|
||||
@@ -1878,8 +1880,10 @@ where
|
||||
|>.filter (fun (_, fieldList) => fieldList.isEmpty)
|
||||
|>.map Prod.fst
|
||||
if !candidates.isEmpty then
|
||||
candidates.mapM fun resolvedName => return (← mkConst resolvedName, ← getRef, [])
|
||||
candidates.mapM fun resolvedName => return (← mkConst resolvedName explicitUnivs, ← getRef, [])
|
||||
else if let some (fvar, []) ← resolveLocalName fullName then
|
||||
unless explicitUnivs.isEmpty do
|
||||
throwInvalidExplicitUniversesForLocal fvar
|
||||
return [(fvar, ← getRef, [])]
|
||||
else
|
||||
throwUnknownIdentifierAt (← getRef) (declHint := fullName) <| m!"Unknown constant `{.ofConstName fullName}`"
|
||||
@@ -1919,6 +1923,10 @@ private partial def elabAppFn (f : Syntax) (lvals : List LVal) (namedArgs : Arra
|
||||
let some idx := idxStx.isFieldIdx?
|
||||
| throwError "Internal error: Unexpected field index syntax `{idxStx}`"
|
||||
elabAppFn e (LVal.fieldIdx idxStx idx :: lvals) namedArgs args expectedType? explicit ellipsis overloaded acc
|
||||
let elabDottedIdent (id : Syntax) (explicitUnivs : List Level) (explicit : Bool) : TermElabM (Array (TermElabResult Expr)) := do
|
||||
let res ← withRef f <| resolveDottedIdentFn id id.getId.eraseMacroScopes explicitUnivs expectedType?
|
||||
-- Use (forceTermInfo := true) because we want to record the result of .ident resolution even in patterns
|
||||
elabAppFnResolutions f res lvals namedArgs args expectedType? explicit ellipsis overloaded acc (forceTermInfo := true)
|
||||
match f with
|
||||
| `($(e).$idx:fieldIdx) => elabFieldIdx e idx explicit
|
||||
| `($e |>.$idx:fieldIdx) => elabFieldIdx e idx explicit
|
||||
@@ -1934,16 +1942,17 @@ private partial def elabAppFn (f : Syntax) (lvals : List LVal) (namedArgs : Arra
|
||||
| `($id:ident.{$us,*}) => do
|
||||
let us ← elabExplicitUnivs us
|
||||
elabAppFnId id us lvals namedArgs args expectedType? explicit ellipsis overloaded acc
|
||||
| `(@$id:ident) =>
|
||||
elabAppFn id lvals namedArgs args expectedType? (explicit := true) ellipsis overloaded acc
|
||||
| `(@$_:ident.{$_us,*}) =>
|
||||
| `(.$id:ident) => elabDottedIdent id [] explicit
|
||||
| `(.$id:ident.{$us,*}) =>
|
||||
let us ← elabExplicitUnivs us
|
||||
elabDottedIdent id us explicit
|
||||
| `(@$_:ident)
|
||||
| `(@$_:ident.{$_us,*})
|
||||
| `(@.$_:ident)
|
||||
| `(@.$_:ident.{$_us,*}) =>
|
||||
elabAppFn (f.getArg 1) lvals namedArgs args expectedType? (explicit := true) ellipsis overloaded acc
|
||||
| `(@$_) => throwUnsupportedSyntax -- invalid occurrence of `@`
|
||||
| `(_) => throwError "A placeholder `_` cannot be used where a function is expected"
|
||||
| `(.$id:ident) =>
|
||||
let res ← withRef f <| resolveDottedIdentFn id id.getId.eraseMacroScopes expectedType?
|
||||
-- Use (forceTermInfo := true) because we want to record the result of .ident resolution even in patterns
|
||||
elabAppFnResolutions f res lvals namedArgs args expectedType? explicit ellipsis overloaded acc (forceTermInfo := true)
|
||||
| _ => do
|
||||
let catchPostpone := !overloaded
|
||||
/- If we are processing a choice node, then we should use `catchPostpone == false` when elaborating terms.
|
||||
@@ -2086,13 +2095,15 @@ private def elabAtom : TermElab := fun stx expectedType? => do
|
||||
|
||||
@[builtin_term_elab explicit] def elabExplicit : TermElab := fun stx expectedType? =>
|
||||
match stx with
|
||||
| `(@$_:ident) => elabAtom stx expectedType? -- Recall that `elabApp` also has support for `@`
|
||||
| `(@$_:ident.{$_us,*}) => elabAtom stx expectedType?
|
||||
| `(@$(_).$_:fieldIdx) => elabAtom stx expectedType?
|
||||
| `(@$(_).$_:ident) => elabAtom stx expectedType?
|
||||
| `(@($t)) => elabTerm t expectedType? (implicitLambda := false) -- `@` is being used just to disable implicit lambdas
|
||||
| `(@$t) => elabTerm t expectedType? (implicitLambda := false) -- `@` is being used just to disable implicit lambdas
|
||||
| _ => throwUnsupportedSyntax
|
||||
| `(@$_:ident) => elabAtom stx expectedType? -- Recall that `elabApp` also has support for `@`
|
||||
| `(@$_:ident.{$_us,*}) => elabAtom stx expectedType?
|
||||
| `(@$(_).$_:fieldIdx) => elabAtom stx expectedType?
|
||||
| `(@$(_).$_:ident) => elabAtom stx expectedType?
|
||||
| `(@.$_:ident) => elabAtom stx expectedType?
|
||||
| `(@.$_:ident.{$_us,*}) => elabAtom stx expectedType?
|
||||
| `(@($t)) => elabTerm t expectedType? (implicitLambda := false) -- `@` is being used just to disable implicit lambdas
|
||||
| `(@$t) => elabTerm t expectedType? (implicitLambda := false) -- `@` is being used just to disable implicit lambdas
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
@[builtin_term_elab choice] def elabChoice : TermElab := elabAtom
|
||||
@[builtin_term_elab proj] def elabProj : TermElab := elabAtom
|
||||
|
||||
@@ -510,7 +510,8 @@ def failIfSucceeds (x : CommandElabM Unit) : CommandElabM Unit := do
|
||||
pure ()
|
||||
|
||||
@[builtin_command_elab «set_option»] def elabSetOption : CommandElab := fun stx => do
|
||||
let options ← Elab.elabSetOption stx[1] stx[3]
|
||||
let (options, decl) ← Elab.elabSetOption stx[1] stx[3]
|
||||
withRef stx[1] <| Elab.checkDeprecatedOption (stx[1].getId.eraseMacroScopes) decl
|
||||
modify fun s => { s with maxRecDepth := maxRecDepth.get options }
|
||||
modifyScope fun scope => { scope with opts := options }
|
||||
|
||||
|
||||
@@ -81,8 +81,15 @@ private def pushTypeIntoReassignment (letOrReassign : LetOrReassign) (decl : TSy
|
||||
else
|
||||
pure decl
|
||||
|
||||
partial def elabDoLetOrReassign (letOrReassign : LetOrReassign) (decl : TSyntax ``letDecl)
|
||||
private def checkLetConfigInDo (config : Term.LetConfig) : DoElabM Unit := do
|
||||
if config.postponeValue then
|
||||
throwError "`+postponeValue` is not supported in `do` blocks"
|
||||
if config.generalize then
|
||||
throwError "`+generalize` is not supported in `do` blocks"
|
||||
|
||||
partial def elabDoLetOrReassign (config : Term.LetConfig) (letOrReassign : LetOrReassign) (decl : TSyntax ``letDecl)
|
||||
(dec : DoElemCont) : DoElabM Expr := do
|
||||
checkLetConfigInDo config
|
||||
let vars ← getLetDeclVars decl
|
||||
letOrReassign.checkMutVars vars
|
||||
-- Some decl preprocessing on the patterns and expected types:
|
||||
@@ -91,7 +98,7 @@ partial def elabDoLetOrReassign (letOrReassign : LetOrReassign) (decl : TSyntax
|
||||
match decl with
|
||||
| `(letDecl| $decl:letEqnsDecl) =>
|
||||
let declNew ← `(letDecl| $(⟨← liftMacroM <| Term.expandLetEqnsDecl decl⟩):letIdDecl)
|
||||
return ← Term.withMacroExpansion decl declNew <| elabDoLetOrReassign letOrReassign declNew dec
|
||||
return ← Term.withMacroExpansion decl declNew <| elabDoLetOrReassign config letOrReassign declNew dec
|
||||
| `(letDecl| $pattern:term $[: $xType?]? := $rhs) =>
|
||||
let rhs ← match xType? with | some xType => `(($rhs : $xType)) | none => pure rhs
|
||||
let contElab : DoElabM Expr := elabWithReassignments letOrReassign vars dec.continueWithUnit
|
||||
@@ -99,15 +106,21 @@ partial def elabDoLetOrReassign (letOrReassign : LetOrReassign) (decl : TSyntax
|
||||
-- The infamous MVar postponement trick below popularized by `if` is necessary in Lake.CLI.Main.
|
||||
-- We need it because we specify a constant motive, otherwise the `match` elaborator would have postponed.
|
||||
let mvar ← Lean.withRef rhs `(?m)
|
||||
let term ← `(let_mvar% ?m := $rhs;
|
||||
wait_if_type_mvar% ?m;
|
||||
match (motive := ∀_, $(← Term.exprToSyntax mγ)) $mvar:term with
|
||||
| $pattern:term => $body)
|
||||
let term ← if let some h := config.eq? then
|
||||
`(let_mvar% ?m := $rhs;
|
||||
wait_if_type_mvar% ?m;
|
||||
match $h:ident : $mvar:term with
|
||||
| $pattern:term => $body)
|
||||
else
|
||||
`(let_mvar% ?m := $rhs;
|
||||
wait_if_type_mvar% ?m;
|
||||
match (motive := ∀_, $(← Term.exprToSyntax mγ)) $mvar:term with
|
||||
| $pattern:term => $body)
|
||||
Term.withMacroExpansion (← getRef) term do Term.elabTermEnsuringType term (some mγ)
|
||||
| `(letDecl| $decl:letIdDecl) =>
|
||||
let { id, binders, type, value } := Term.mkLetIdDeclView decl
|
||||
let id ← if id.isIdent then pure id else Term.mkFreshIdent id (canonical := true)
|
||||
let nondep := letOrReassign matches .have
|
||||
let nondep := config.nondep || letOrReassign matches .have
|
||||
-- Only non-`mut` lets will be elaborated as `let`s; `let mut` and reassigns behave as `have`s.
|
||||
-- See `elabLetDeclAux` for rationale.
|
||||
let (type, val) ← Term.elabBindersEx binders fun xs => do
|
||||
@@ -128,8 +141,25 @@ partial def elabDoLetOrReassign (letOrReassign : LetOrReassign) (decl : TSyntax
|
||||
withLetDecl id.getId (kind := kind) type val (nondep := nondep) fun x => do
|
||||
Term.addLocalVarInfo id x
|
||||
elabWithReassignments letOrReassign vars do
|
||||
let body ← dec.continueWithUnit
|
||||
mkLetFVars #[x] body (usedLetOnly := false) (generalizeNondepLet := false)
|
||||
match config.eq? with
|
||||
| none =>
|
||||
let body ← dec.continueWithUnit
|
||||
if config.zeta then
|
||||
pure <| (← body.abstractM #[x]).instantiate1 val
|
||||
else
|
||||
mkLetFVars #[x] body (usedLetOnly := config.usedOnly) (generalizeNondepLet := false)
|
||||
| some h =>
|
||||
let hTy ← mkEq x val
|
||||
withLetDecl h.getId hTy (← mkEqRefl x) (nondep := true) fun h' => do
|
||||
Term.addLocalVarInfo h h'
|
||||
let body ← dec.continueWithUnit
|
||||
if config.zeta then
|
||||
pure <| (← body.abstractM #[x, h']).instantiateRev #[val, ← mkEqRefl val]
|
||||
else if nondep then
|
||||
let f ← mkLambdaFVars #[x, h'] body
|
||||
return mkApp2 f val (← mkEqRefl val)
|
||||
else
|
||||
mkLetFVars #[x, h'] body (usedLetOnly := config.usedOnly) (generalizeNondepLet := false)
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
def elabDoArrow (letOrReassign : LetOrReassign) (stx : TSyntax [``doIdDecl, ``doPatDecl]) (dec : DoElemCont) : DoElabM Expr := do
|
||||
@@ -168,13 +198,21 @@ def elabDoArrow (letOrReassign : LetOrReassign) (stx : TSyntax [``doIdDecl, ``do
|
||||
elabDoElem (← `(doElem| $pattern:term := $x)) dec
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
private def getLetConfigAndCheckMut (letConfigStx : TSyntax ``Parser.Term.letConfig)
|
||||
(mutTk? : Option Syntax) (initConfig : Term.LetConfig := {}) : DoElabM Term.LetConfig := do
|
||||
if mutTk?.isSome && !letConfigStx.raw[0].getArgs.isEmpty then
|
||||
throwErrorAt letConfigStx "configuration options are not allowed with `let mut`"
|
||||
Term.mkLetConfig letConfigStx initConfig
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doLet] def elabDoLet : DoElab := fun stx dec => do
|
||||
let `(doLet| let $[mut%$mutTk?]? $decl:letDecl) := stx | throwUnsupportedSyntax
|
||||
elabDoLetOrReassign (.let mutTk?) decl dec
|
||||
let `(doLet| let $[mut%$mutTk?]? $config:letConfig $decl:letDecl) := stx | throwUnsupportedSyntax
|
||||
let config ← getLetConfigAndCheckMut config mutTk?
|
||||
elabDoLetOrReassign config (.let mutTk?) decl dec
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doHave] def elabDoHave : DoElab := fun stx dec => do
|
||||
let `(doHave| have $decl:letDecl) := stx | throwUnsupportedSyntax
|
||||
elabDoLetOrReassign .have decl dec
|
||||
let `(doHave| have $config:letConfig $decl:letDecl) := stx | throwUnsupportedSyntax
|
||||
let config ← Term.mkLetConfig config { nondep := true }
|
||||
elabDoLetOrReassign config .have decl dec
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doLetRec] def elabDoLetRec : DoElab := fun stx dec => do
|
||||
let `(doLetRec| let rec $decls:letRecDecls) := stx | throwUnsupportedSyntax
|
||||
@@ -192,14 +230,17 @@ def elabDoArrow (letOrReassign : LetOrReassign) (stx : TSyntax [``doIdDecl, ``do
|
||||
| `(doReassign| $x:ident $[: $xType?]? := $rhs) =>
|
||||
let decl : TSyntax ``letIdDecl ← `(letIdDecl| $x:ident $[: $xType?]? := $rhs)
|
||||
let decl : TSyntax ``letDecl := ⟨mkNode ``letDecl #[decl]⟩
|
||||
elabDoLetOrReassign .reassign decl dec
|
||||
elabDoLetOrReassign {} .reassign decl dec
|
||||
| `(doReassign| $decl:letPatDecl) =>
|
||||
let decl : TSyntax ``letDecl := ⟨mkNode ``letDecl #[decl]⟩
|
||||
elabDoLetOrReassign .reassign decl dec
|
||||
elabDoLetOrReassign {} .reassign decl dec
|
||||
| _ => throwUnsupportedSyntax
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doLetElse] def elabDoLetElse : DoElab := fun stx dec => do
|
||||
let `(doLetElse| let $[mut%$mutTk?]? $pattern := $rhs | $otherwise $(body?)?) := stx | throwUnsupportedSyntax
|
||||
let `(doLetElse| let $[mut%$mutTk?]? $cfg:letConfig $pattern := $rhs | $otherwise $(body?)?) := stx
|
||||
| throwUnsupportedSyntax
|
||||
let config ← getLetConfigAndCheckMut cfg mutTk?
|
||||
checkLetConfigInDo config
|
||||
let letOrReassign := LetOrReassign.let mutTk?
|
||||
let vars ← getPatternVarsEx pattern
|
||||
letOrReassign.checkMutVars vars
|
||||
@@ -208,10 +249,17 @@ def elabDoArrow (letOrReassign : LetOrReassign) (stx : TSyntax [``doIdDecl, ``do
|
||||
if mutTk?.isSome then
|
||||
for var in vars do
|
||||
body ← `(doSeqIndent| let mut $var := $var; do $body:doSeqIndent)
|
||||
elabDoElem (← `(doElem| match $rhs:term with | $pattern => $body:doSeqIndent | _ => $otherwise:doSeqIndent)) dec
|
||||
if let some h := config.eq? then
|
||||
elabDoElem (← `(doElem| match $h:ident : $rhs:term with | $pattern => $body:doSeqIndent | _ => $otherwise:doSeqIndent)) dec
|
||||
else
|
||||
elabDoElem (← `(doElem| match $rhs:term with | $pattern => $body:doSeqIndent | _ => $otherwise:doSeqIndent)) dec
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doLetArrow] def elabDoLetArrow : DoElab := fun stx dec => do
|
||||
let `(doLetArrow| let $[mut%$mutTk?]? $decl) := stx | throwUnsupportedSyntax
|
||||
let `(doLetArrow| let $[mut%$mutTk?]? $cfg:letConfig $decl) := stx | throwUnsupportedSyntax
|
||||
let config ← getLetConfigAndCheckMut cfg mutTk?
|
||||
checkLetConfigInDo config
|
||||
if config.nondep || config.usedOnly || config.zeta || config.eq?.isSome then
|
||||
throwErrorAt cfg "configuration options are not supported with `←`"
|
||||
elabDoArrow (.let mutTk?) decl dec
|
||||
|
||||
@[builtin_doElem_elab Lean.Parser.Term.doReassignArrow] def elabDoReassignArrow : DoElab := fun stx dec => do
|
||||
|
||||
@@ -371,7 +371,8 @@ private def mkSilentAnnotationIfHole (e : Expr) : TermElabM Expr := do
|
||||
popScope
|
||||
|
||||
@[builtin_term_elab «set_option»] def elabSetOption : TermElab := fun stx expectedType? => do
|
||||
let options ← Elab.elabSetOption stx[1] stx[3]
|
||||
let (options, decl) ← Elab.elabSetOption stx[1] stx[3]
|
||||
withRef stx[1] <| Elab.checkDeprecatedOption (stx[1].getId.eraseMacroScopes) decl
|
||||
withOptions (fun _ => options) do
|
||||
try
|
||||
elabTerm stx[5] expectedType?
|
||||
|
||||
@@ -875,7 +875,7 @@ first evaluates any local `set_option ... in ...` clauses and then invokes `cmd`
|
||||
partial def withSetOptionIn (cmd : CommandElab) : CommandElab := fun stx => do
|
||||
if stx.getKind == ``Lean.Parser.Command.in &&
|
||||
stx[0].getKind == ``Lean.Parser.Command.set_option then
|
||||
let opts ← Elab.elabSetOption stx[0][1] stx[0][3]
|
||||
let (opts, _) ← Elab.elabSetOption stx[0][1] stx[0][3]
|
||||
Command.withScope (fun scope => { scope with opts }) do
|
||||
withSetOptionIn cmd stx[2]
|
||||
else
|
||||
|
||||
@@ -94,12 +94,12 @@ partial def ofElem (stx : TSyntax `doElem) : TermElabM ControlInfo := do
|
||||
| `(doExpr| $_:term) => return { numRegularExits := 1 }
|
||||
| `(doElem| do $doSeq) => ofSeq doSeq
|
||||
-- Let
|
||||
| `(doElem| let $[mut]? $_:letDecl) => return .pure
|
||||
| `(doElem| have $_:letDecl) => return .pure
|
||||
| `(doElem| let $[mut]? $_:letConfig $_:letDecl) => return .pure
|
||||
| `(doElem| have $_:letConfig $_:letDecl) => return .pure
|
||||
| `(doElem| let rec $_:letRecDecl) => return .pure
|
||||
| `(doElem| let $[mut]? $_ := $_ | $otherwise $(body?)?) =>
|
||||
| `(doElem| let $[mut]? $_:letConfig $_ := $_ | $otherwise $(body?)?) =>
|
||||
ofLetOrReassign #[] none otherwise body?
|
||||
| `(doElem| let $[mut]? $decl) =>
|
||||
| `(doElem| let $[mut]? $_:letConfig $decl) =>
|
||||
ofLetOrReassignArrow false decl
|
||||
| `(doElem| $decl:letIdDeclNoBinders) =>
|
||||
ofLetOrReassign (← getLetIdDeclVars ⟨decl⟩) none none none
|
||||
@@ -169,15 +169,16 @@ partial def ofElem (stx : TSyntax `doElem) : TermElabM ControlInfo := do
|
||||
let bodyInfo ← match body? with | none => pure {} | some body => ofSeq ⟨body⟩
|
||||
return otherwiseInfo.alternative bodyInfo
|
||||
| _ =>
|
||||
let handlers := controlInfoElemAttribute.getEntries (← getEnv) stx.raw.getKind
|
||||
let kind := stx.raw.getKind
|
||||
let handlers := controlInfoElemAttribute.getEntries (← getEnv) kind
|
||||
for handler in handlers do
|
||||
let res ← catchInternalId unsupportedSyntaxExceptionId
|
||||
(some <$> handler.value stx)
|
||||
(fun _ => pure none)
|
||||
if let some info := res then return info
|
||||
throwError
|
||||
"No `ControlInfo` inference handler found for `{stx.raw.getKind}` in syntax {indentD stx}\n\
|
||||
Register a handler with `@[doElem_control_info {stx.raw.getKind}]`."
|
||||
"No `ControlInfo` inference handler found for `{kind}` in syntax {indentD stx}\n\
|
||||
Register a handler with `@[doElem_control_info {kind}]`."
|
||||
|
||||
partial def ofLetOrReassignArrow (reassignment : Bool) (decl : TSyntax [``doIdDecl, ``doPatDecl]) : TermElabM ControlInfo := do
|
||||
match decl with
|
||||
|
||||
@@ -36,6 +36,7 @@ private def getDoSeq (doStx : Syntax) : Syntax :=
|
||||
def elabLiftMethod : TermElab := fun stx _ =>
|
||||
throwErrorAt stx "invalid use of `(<- ...)`, must be nested inside a 'do' expression"
|
||||
|
||||
|
||||
/-- Return true if we should not lift `(<- ...)` actions nested in the syntax nodes with the given kind. -/
|
||||
private def liftMethodDelimiter (k : SyntaxNodeKind) : Bool :=
|
||||
k == ``Parser.Term.do ||
|
||||
@@ -76,9 +77,9 @@ private def liftMethodForbiddenBinder (stx : Syntax) : Bool :=
|
||||
else if k == ``Parser.Term.let then
|
||||
letDeclHasBinders stx[1]
|
||||
else if k == ``Parser.Term.doLet then
|
||||
letDeclHasBinders stx[2]
|
||||
letDeclHasBinders stx[3]
|
||||
else if k == ``Parser.Term.doLetArrow then
|
||||
letDeclArgHasBinders stx[2]
|
||||
letDeclArgHasBinders stx[3]
|
||||
else
|
||||
false
|
||||
|
||||
@@ -701,12 +702,12 @@ def getLetDeclVars (letDecl : Syntax) : TermElabM (Array Var) := do
|
||||
throwError "unexpected kind of let declaration"
|
||||
|
||||
def getDoLetVars (doLet : Syntax) : TermElabM (Array Var) :=
|
||||
-- leading_parser "let " >> optional "mut " >> letDecl
|
||||
getLetDeclVars doLet[2]
|
||||
-- leading_parser "let " >> optional "mut " >> letConfig >> letDecl
|
||||
getLetDeclVars doLet[3]
|
||||
|
||||
def getDoHaveVars (doHave : Syntax) : TermElabM (Array Var) :=
|
||||
-- leading_parser "have" >> letDecl
|
||||
getLetDeclVars doHave[1]
|
||||
-- leading_parser "have" >> letConfig >> letDecl
|
||||
getLetDeclVars doHave[2]
|
||||
|
||||
def getDoLetRecVars (doLetRec : Syntax) : TermElabM (Array Var) := do
|
||||
-- letRecDecls is an array of `(group (optional attributes >> letDecl))`
|
||||
@@ -727,9 +728,9 @@ def getDoPatDeclVars (doPatDecl : Syntax) : TermElabM (Array Var) := do
|
||||
let pattern := doPatDecl[0]
|
||||
getPatternVarsEx pattern
|
||||
|
||||
-- leading_parser "let " >> optional "mut " >> (doIdDecl <|> doPatDecl)
|
||||
-- leading_parser "let " >> optional "mut " >> letConfig >> (doIdDecl <|> doPatDecl)
|
||||
def getDoLetArrowVars (doLetArrow : Syntax) : TermElabM (Array Var) := do
|
||||
let decl := doLetArrow[2]
|
||||
let decl := doLetArrow[3]
|
||||
if decl.getKind == ``Parser.Term.doIdDecl then
|
||||
return #[getDoIdDeclVar decl]
|
||||
else if decl.getKind == ``Parser.Term.doPatDecl then
|
||||
@@ -1060,14 +1061,15 @@ def seqToTerm (action : Syntax) (k : Syntax) : M Syntax := withRef action <| wit
|
||||
def declToTerm (decl : Syntax) (k : Syntax) : M Syntax := withRef decl <| withFreshMacroScope do
|
||||
let kind := decl.getKind
|
||||
if kind == ``Parser.Term.doLet then
|
||||
let letDecl := decl[2]
|
||||
`(let $letDecl:letDecl; $k)
|
||||
let letConfig : TSyntax ``Parser.Term.letConfig := ⟨decl[2]⟩
|
||||
let letDecl := decl[3]
|
||||
`(let $letConfig:letConfig $letDecl:letDecl; $k)
|
||||
else if kind == ``Parser.Term.doLetRec then
|
||||
let letRecToken := decl[0]
|
||||
let letRecDecls := decl[1]
|
||||
return mkNode ``Parser.Term.letrec #[letRecToken, letRecDecls, mkNullNode, k]
|
||||
else if kind == ``Parser.Term.doLetArrow then
|
||||
let arg := decl[2]
|
||||
let arg := decl[3]
|
||||
if arg.getKind == ``Parser.Term.doIdDecl then
|
||||
let id := arg[0]
|
||||
let type := expandOptType id arg[1]
|
||||
@@ -1415,7 +1417,7 @@ mutual
|
||||
/-- Generate `CodeBlock` for `doLetArrow; doElems`
|
||||
`doLetArrow` is of the form
|
||||
```
|
||||
"let " >> optional "mut " >> (doIdDecl <|> doPatDecl)
|
||||
"let " >> optional "mut " >> letConfig >> (doIdDecl <|> doPatDecl)
|
||||
```
|
||||
where
|
||||
```
|
||||
@@ -1424,7 +1426,7 @@ mutual
|
||||
```
|
||||
-/
|
||||
partial def doLetArrowToCode (doLetArrow : Syntax) (doElems : List Syntax) : M CodeBlock := do
|
||||
let decl := doLetArrow[2]
|
||||
let decl := doLetArrow[3]
|
||||
if decl.getKind == ``Parser.Term.doIdDecl then
|
||||
let y := decl[0]
|
||||
checkNotShadowingMutable #[y]
|
||||
@@ -1475,11 +1477,11 @@ mutual
|
||||
throwError "unexpected kind of `do` declaration"
|
||||
|
||||
partial def doLetElseToCode (doLetElse : Syntax) (doElems : List Syntax) : M CodeBlock := do
|
||||
-- "let " >> optional "mut " >> termParser >> " := " >> termParser >> (checkColGt >> " | " >> doSeq) >> optional doSeq
|
||||
let pattern := doLetElse[2]
|
||||
let val := doLetElse[4]
|
||||
let elseSeq := doLetElse[6]
|
||||
let bodySeq := doLetElse[7][0]
|
||||
-- "let " >> optional "mut " >> letConfig >> termParser >> " := " >> termParser >> (checkColGt >> " | " >> doSeq) >> optional doSeq
|
||||
let pattern := doLetElse[3]
|
||||
let val := doLetElse[5]
|
||||
let elseSeq := doLetElse[7]
|
||||
let bodySeq := doLetElse[8][0]
|
||||
let contSeq ← if isMutableLet doLetElse then
|
||||
let vars ← (← getPatternVarsEx pattern).mapM fun var => `(doElem| let mut $var := $var)
|
||||
pure (vars ++ (getDoSeqElems bodySeq).toArray)
|
||||
|
||||
@@ -1042,7 +1042,16 @@ def mkRedundantAlternativeMsg (altName? : Option Name) (altMsg? : Option Message
|
||||
|
||||
def reportMatcherResultErrors (altLHSS : List AltLHS) (result : MatcherResult) : TermElabM Unit := do
|
||||
unless result.counterExamples.isEmpty do
|
||||
withHeadRefOnly <| logError m!"Missing cases:\n{Meta.Match.counterExamplesToMessageData result.counterExamples}"
|
||||
let maxCEx := Meta.Match.match.maxCounterExamples.get (← getOptions)
|
||||
let (shown, truncated) :=
|
||||
if result.counterExamples.size > maxCEx then
|
||||
(result.counterExamples.take maxCEx, true)
|
||||
else
|
||||
(result.counterExamples, false)
|
||||
let mut msg := m!"Missing cases:\n{Meta.Match.counterExamplesToMessageData shown}"
|
||||
if truncated then
|
||||
msg := msg ++ m!"\n(further cases omitted, increase `set_option match.maxCounterExamples {maxCEx}` to see more)"
|
||||
withHeadRefOnly <| logError msg
|
||||
return ()
|
||||
unless match.ignoreUnusedAlts.get (← getOptions) || result.unusedAltIdxs.isEmpty do
|
||||
let mut i := 0
|
||||
|
||||
@@ -69,6 +69,8 @@ private def throwCtorExpected {α} (ident : Option Syntax) : M α := do
|
||||
|
||||
if candidates.size = 0 then
|
||||
throwError message
|
||||
-- Sort for deterministic output (iteration order of `env.constants` is not stable)
|
||||
candidates := candidates.qsort Name.lt
|
||||
let oneOfThese := if h : candidates.size = 1 then m!"`{candidates[0]}`" else m!"one of these"
|
||||
let hint ← m!"Using {oneOfThese} would be valid:".hint (ref? := idStx) (candidates.map fun candidate => {
|
||||
suggestion := mkIdent candidate
|
||||
@@ -320,7 +322,7 @@ where
|
||||
if f.getKind == ``Parser.Term.dotIdent then
|
||||
let namedArgsNew ← namedArgs.mapM fun
|
||||
-- We must ensure that `ref[1]` remains original to allow named-argument hints
|
||||
| { ref, name, val := Arg.stx arg } => withRef ref do `(Lean.Parser.Term.namedArgument| ($(ref[1]) := $(← collect arg)))
|
||||
| { ref, name, val := Arg.stx arg, .. } => withRef ref do `(Lean.Parser.Term.namedArgument| ($(ref[1]) := $(← collect arg)))
|
||||
| _ => unreachable!
|
||||
let mut argsNew ← args.mapM fun | Arg.stx arg => collect arg | _ => unreachable!
|
||||
if ellipsis then
|
||||
|
||||
@@ -12,6 +12,11 @@ public import Init.Syntax
|
||||
public section
|
||||
namespace Lean.Elab
|
||||
|
||||
register_builtin_option linter.deprecated.options : Bool := {
|
||||
defValue := true
|
||||
descr := "if true, generate deprecation warnings for deprecated options"
|
||||
}
|
||||
|
||||
variable [Monad m] [MonadOptions m] [MonadError m] [MonadLiftT (EIO Exception) m] [MonadInfoTree m]
|
||||
|
||||
private def throwUnconfigurable {α} (optionName : Name) : m α :=
|
||||
@@ -43,7 +48,7 @@ where
|
||||
{indentExpr defValType}"
|
||||
| _ => throwUnconfigurable optionName
|
||||
|
||||
def elabSetOption (id : Syntax) (val : Syntax) : m Options := do
|
||||
def elabSetOption (id : Syntax) (val : Syntax) : m (Options × OptionDecl) := do
|
||||
let ref ← getRef
|
||||
-- For completion purposes, we discard `val` and any later arguments.
|
||||
-- We include the first argument (the keyword) for position information in case `id` is `missing`.
|
||||
@@ -51,9 +56,9 @@ def elabSetOption (id : Syntax) (val : Syntax) : m Options := do
|
||||
let optionName := id.getId.eraseMacroScopes
|
||||
let decl ← IO.toEIO (fun (ex : IO.Error) => Exception.error ref ex.toString) (getOptionDecl optionName)
|
||||
pushInfoLeaf <| .ofOptionInfo { stx := id, optionName, declName := decl.declName }
|
||||
let rec setOption (val : DataValue) : m Options := do
|
||||
let rec setOption (val : DataValue) : m (Options × OptionDecl) := do
|
||||
validateOptionValue optionName decl val
|
||||
return (← getOptions).set optionName val
|
||||
return ((← getOptions).set optionName val, decl)
|
||||
match val.isStrLit? with
|
||||
| some str => setOption (DataValue.ofString str)
|
||||
| none =>
|
||||
@@ -70,3 +75,17 @@ def elabSetOption (id : Syntax) (val : Syntax) : m Options := do
|
||||
throwUnconfigurable optionName
|
||||
|
||||
end Lean.Elab
|
||||
|
||||
namespace Lean.Elab
|
||||
|
||||
variable {m : Type → Type} [Monad m] [MonadOptions m] [MonadLog m] [AddMessageContext m]
|
||||
|
||||
def checkDeprecatedOption (optionName : Name) (decl : OptionDecl) : m Unit := do
|
||||
unless linter.deprecated.options.get (← getOptions) do return
|
||||
let some dep := decl.deprecation? | return
|
||||
let extraMsg := match dep.text? with
|
||||
| some text => m!": {text}"
|
||||
| none => m!""
|
||||
logWarning m!"`{optionName}` has been deprecated{extraMsg}"
|
||||
|
||||
end Lean.Elab
|
||||
|
||||
@@ -574,8 +574,14 @@ private def addSourceFields (structName : Name) (sources : Array ExplicitSourceV
|
||||
|
||||
private structure StructInstContext where
|
||||
view : StructInstView
|
||||
/-- True if the structure instance has a trailing `..`. -/
|
||||
ellipsis : Bool
|
||||
/-- If true, then try using parent instances for missing fields. -/
|
||||
useParentInstanceFields : Bool
|
||||
/-- If true, then try using default values or autoParams for missing fields.
|
||||
(Considered after `useParentInstanceFields`.) -/
|
||||
useDefaults : Bool
|
||||
/-- If true, then missing fields after default value synthesis remain as metavariables rather than yielding an error.
|
||||
Only applies if `useDefaults` is true. -/
|
||||
unsynthesizedAsMVars : Bool
|
||||
structName : Name
|
||||
structType : Expr
|
||||
/-- Structure universe levels. -/
|
||||
@@ -748,6 +754,8 @@ private structure PendingField where
|
||||
deps : NameSet
|
||||
val? : Option Expr
|
||||
|
||||
private def registerFieldMVarError (e : Expr) (ref : Syntax) (fieldName : Name) : StructInstM Unit :=
|
||||
registerCustomErrorIfMVar e ref m!"Cannot synthesize placeholder for field `{fieldName}`"
|
||||
|
||||
/--
|
||||
Synthesize pending optParams.
|
||||
@@ -778,7 +786,7 @@ private def synthOptParamFields : StructInstM Unit := do
|
||||
-- Process default values for pending optParam fields.
|
||||
let mut pendingFields : Array PendingField ← optParamFields.filterMapM fun (fieldName, fieldType, required) => do
|
||||
if required || (← isFieldNotSolved? fieldName).isSome then
|
||||
let (deps, val?) ← getFieldDefaultValue? fieldName
|
||||
let (deps, val?) ← if (← read).useDefaults then getFieldDefaultValue? fieldName else pure ({}, none)
|
||||
if let some val := val? then
|
||||
trace[Elab.struct] "default value for {fieldName}:{indentExpr val}"
|
||||
else
|
||||
@@ -831,44 +839,46 @@ private def synthOptParamFields : StructInstM Unit := do
|
||||
pending
|
||||
toRemove := toRemove.push selected.fieldName
|
||||
if toRemove.isEmpty then
|
||||
if (← read).ellipsis then
|
||||
for pendingField in pendingFields do
|
||||
if let some mvarId ← isFieldNotSolved? pendingField.fieldName then
|
||||
registerCustomErrorIfMVar (.mvar mvarId) (← read).view.ref m!"\
|
||||
Cannot synthesize placeholder for field `{pendingField.fieldName}`"
|
||||
return
|
||||
let assignErrorsMsg := MessageData.joinSep (assignErrors.map (m!"\n\n" ++ ·)).toList ""
|
||||
let mut requiredErrors : Array MessageData := #[]
|
||||
let mut unsolvedFields : Std.HashSet Name := {}
|
||||
for pendingField in pendingFields do
|
||||
if (← isFieldNotSolved? pendingField.fieldName).isNone then
|
||||
unsolvedFields := unsolvedFields.insert pendingField.fieldName
|
||||
let e := (← get).fieldMap.get! pendingField.fieldName
|
||||
requiredErrors := requiredErrors.push m!"\
|
||||
Field `{pendingField.fieldName}` must be explicitly provided; its synthesized value is{indentExpr e}"
|
||||
let requiredErrorsMsg := MessageData.joinSep (requiredErrors.map (m!"\n\n" ++ ·)).toList ""
|
||||
let missingFields := pendingFields |>.filter (fun pending => pending.val?.isNone)
|
||||
-- TODO(kmill): when fields are all stuck, report better.
|
||||
-- For now, just report all pending fields in case there are no obviously missing ones.
|
||||
let missingFields := if missingFields.isEmpty then pendingFields else missingFields
|
||||
let missing := missingFields |>.map (s!"`{·.fieldName}`") |>.toList
|
||||
let missingFieldsValues ← missingFields.mapM fun field => do
|
||||
if unsolvedFields.contains field.fieldName then
|
||||
pure <| (field.fieldName, some <| (← get).fieldMap.get! field.fieldName)
|
||||
else pure (field.fieldName, none)
|
||||
let missingFieldsHint ← mkMissingFieldsHint missingFieldsValues (← read).view.ref
|
||||
let msg := m!"Fields missing: {", ".intercalate missing}{assignErrorsMsg}{requiredErrorsMsg}{missingFieldsHint}"
|
||||
if (← readThe Term.Context).errToSorry then
|
||||
-- Assign all pending problems using synthetic sorries and log an error.
|
||||
for pendingField in pendingFields do
|
||||
if let some mvarId ← isFieldNotSolved? pendingField.fieldName then
|
||||
mvarId.assign <| ← mkLabeledSorry (← mvarId.getType) (synthetic := true) (unique := true)
|
||||
logError msg
|
||||
return
|
||||
else
|
||||
throwError msg
|
||||
return ← handleStuck pendingFields assignErrors
|
||||
pendingSet := pendingSet.filter (!toRemove.contains ·)
|
||||
pendingFields := pendingFields.filter fun pendingField => pendingField.val?.isNone || !toRemove.contains pendingField.fieldName
|
||||
where
|
||||
handleStuck (pendingFields : Array PendingField) (assignErrors : Array MessageData) : StructInstM Unit := do
|
||||
if (← read).unsynthesizedAsMVars then
|
||||
for pendingField in pendingFields do
|
||||
if let some mvarId ← isFieldNotSolved? pendingField.fieldName then
|
||||
registerFieldMVarError (.mvar mvarId) (← read).view.ref pendingField.fieldName
|
||||
return
|
||||
let assignErrorsMsg := MessageData.joinSep (assignErrors.map (m!"\n\n" ++ ·)).toList ""
|
||||
let mut requiredErrors : Array MessageData := #[]
|
||||
let mut unsolvedFields : Std.HashSet Name := {}
|
||||
for pendingField in pendingFields do
|
||||
if (← isFieldNotSolved? pendingField.fieldName).isNone then
|
||||
unsolvedFields := unsolvedFields.insert pendingField.fieldName
|
||||
let e := (← get).fieldMap.get! pendingField.fieldName
|
||||
requiredErrors := requiredErrors.push m!"\
|
||||
Field `{pendingField.fieldName}` must be explicitly provided; its synthesized value is{indentExpr e}"
|
||||
let requiredErrorsMsg := MessageData.joinSep (requiredErrors.map (m!"\n\n" ++ ·)).toList ""
|
||||
let missingFields := pendingFields |>.filter (fun pending => pending.val?.isNone)
|
||||
-- TODO(kmill): when fields are all stuck, report better.
|
||||
-- For now, just report all pending fields in case there are no obviously missing ones.
|
||||
let missingFields := if missingFields.isEmpty then pendingFields else missingFields
|
||||
let missing := missingFields |>.map (s!"`{·.fieldName}`") |>.toList
|
||||
let missingFieldsValues ← missingFields.mapM fun field => do
|
||||
if unsolvedFields.contains field.fieldName then
|
||||
pure <| (field.fieldName, some <| (← get).fieldMap.get! field.fieldName)
|
||||
else pure (field.fieldName, none)
|
||||
let missingFieldsHint ← mkMissingFieldsHint missingFieldsValues (← read).view.ref
|
||||
let msg := m!"Fields missing: {", ".intercalate missing}{assignErrorsMsg}{requiredErrorsMsg}{missingFieldsHint}"
|
||||
if (← readThe Term.Context).errToSorry then
|
||||
-- Assign all pending problems using synthetic sorries and log an error.
|
||||
for pendingField in pendingFields do
|
||||
if let some mvarId ← isFieldNotSolved? pendingField.fieldName then
|
||||
mvarId.assign <| ← mkLabeledSorry (← mvarId.getType) (synthetic := true) (unique := true)
|
||||
logError msg
|
||||
return
|
||||
else
|
||||
throwError msg
|
||||
|
||||
private def finalize : StructInstM Expr := withViewRef do
|
||||
let val := (← read).val.beta (← get).fields
|
||||
@@ -1049,19 +1059,13 @@ These fields can still be solved for by parent instance synthesis later.
|
||||
-/
|
||||
private def processNoField (loop : StructInstM α) (fieldName : Name) (binfo : BinderInfo) (fieldType : Expr) : StructInstM α := do
|
||||
trace[Elab.struct] "processNoField `{fieldName}` of type {fieldType}"
|
||||
if (← read).ellipsis && (← readThe Term.Context).inPattern then
|
||||
-- See the note in `ElabAppArgs.processExplicitArg`
|
||||
-- In ellipsis & pattern mode, do not use optParams or autoParams.
|
||||
let e ← addStructFieldMVar fieldName fieldType
|
||||
registerCustomErrorIfMVar e (← read).view.ref m!"don't know how to synthesize placeholder for field `{fieldName}`"
|
||||
loop
|
||||
else
|
||||
if (← read).useDefaults then
|
||||
let autoParam? := fieldType.getAutoParamTactic?
|
||||
let fieldType := fieldType.consumeTypeAnnotations
|
||||
if binfo.isInstImplicit then
|
||||
let e ← addStructFieldMVar fieldName fieldType .synthetic
|
||||
modify fun s => { s with instMVars := s.instMVars.push e.mvarId! }
|
||||
loop
|
||||
return ← loop
|
||||
else if let some (.const tacticDecl ..) := autoParam? then
|
||||
match evalSyntaxConstant (← getEnv) (← getOptions) tacticDecl with
|
||||
| .error err => throwError err
|
||||
@@ -1078,12 +1082,11 @@ private def processNoField (loop : StructInstM α) (fieldName : Name) (binfo : B
|
||||
-- (See `processExplicitArg` for a comment about this.)
|
||||
addTermInfo' stx mvar
|
||||
addStructFieldAux fieldName mvar
|
||||
loop
|
||||
else
|
||||
-- Default case: natural metavariable, register it for optParams
|
||||
discard <| addStructFieldMVar fieldName fieldType
|
||||
modify fun s => { s with optParamFields := s.optParamFields.push (fieldName, fieldType, binfo.isExplicit) }
|
||||
loop
|
||||
return ← loop
|
||||
-- Default case: natural metavariable, register it for optParams
|
||||
discard <| addStructFieldMVar fieldName fieldType
|
||||
modify fun s => { s with optParamFields := s.optParamFields.push (fieldName, fieldType, binfo.isExplicit) }
|
||||
loop
|
||||
|
||||
private partial def loop : StructInstM Expr := withViewRef do
|
||||
let type := (← get).type
|
||||
@@ -1178,8 +1181,7 @@ private partial def addParentInstanceFields : StructInstM Unit := do
|
||||
|
||||
private def main : StructInstM Expr := do
|
||||
initializeState
|
||||
unless (← read).ellipsis && (← readThe Term.Context).inPattern do
|
||||
-- Inside a pattern with ellipsis mode, users expect to match just the fields provided.
|
||||
if (← read).useParentInstanceFields then
|
||||
addParentInstanceFields
|
||||
loop
|
||||
|
||||
@@ -1198,7 +1200,17 @@ private def elabStructInstView (s : StructInstView) (structName : Name) (structT
|
||||
trace[Elab.struct] "expanded fields:\n{MessageData.joinSep (fields.toList.map (fun (_, field) => m!"- {MessageData.nestD (toMessageData field)}")) "\n"}"
|
||||
let ellipsis := s.sources.implicit.isSome
|
||||
let (val, _) ← main
|
||||
|>.run { view := s, structName, structType, levels, params, fieldViews := fields, val := ctorFn, ellipsis }
|
||||
|>.run { view := s, structName, structType, levels, params, fieldViews := fields, val := ctorFn
|
||||
-- See the note in `ElabAppArgs.processExplicitArg`
|
||||
-- For structure instances though, there's a sense in which app-style ellipsis mode is always enabled,
|
||||
-- so we do not specifically check for it to disable defaults.
|
||||
-- An effect of this is that if a user forgets `..` they'll be reminded with a "Fields missing" error.
|
||||
useDefaults := !(← readThe Term.Context).inPattern
|
||||
-- Similarly, for patterns we disable using parent instances to fill in fields
|
||||
useParentInstanceFields := !(← readThe Term.Context).inPattern
|
||||
-- In ellipsis mode, unsynthesized missing fields become metavariables, rather than being an error
|
||||
unsynthesizedAsMVars := ellipsis
|
||||
}
|
||||
|>.run { type := ctorFnType }
|
||||
return val
|
||||
|
||||
|
||||
@@ -190,7 +190,8 @@ private def getOptRotation (stx : Syntax) : Nat :=
|
||||
popScope
|
||||
|
||||
@[builtin_tactic Parser.Tactic.set_option] def elabSetOption : Tactic := fun stx => do
|
||||
let options ← Elab.elabSetOption stx[1] stx[3]
|
||||
let (options, decl) ← Elab.elabSetOption stx[1] stx[3]
|
||||
withRef stx[1] <| Elab.checkDeprecatedOption (stx[1].getId.eraseMacroScopes) decl
|
||||
withOptions (fun _ => options) do
|
||||
try
|
||||
evalTactic stx[5]
|
||||
|
||||
@@ -437,7 +437,8 @@ where
|
||||
replaceMainGoal [{ goal with mvarId }]
|
||||
|
||||
@[builtin_grind_tactic setOption] def elabSetOption : GrindTactic := fun stx => do
|
||||
let options ← Elab.elabSetOption stx[1] stx[3]
|
||||
let (options, decl) ← Elab.elabSetOption stx[1] stx[3]
|
||||
withRef stx[1] <| Elab.checkDeprecatedOption (stx[1].getId.eraseMacroScopes) decl
|
||||
withOptions (fun _ => options) do evalGrindTactic stx[5]
|
||||
|
||||
@[builtin_grind_tactic setConfig] def elabSetConfig : GrindTactic := fun stx => do
|
||||
|
||||
@@ -2124,11 +2124,14 @@ private def mkConsts (candidates : List (Name × List String)) (explicitLevels :
|
||||
let const ← withoutCheckDeprecated <| mkConst declName explicitLevels
|
||||
return (const, projs) :: result
|
||||
|
||||
def throwInvalidExplicitUniversesForLocal {α} (e : Expr) : TermElabM α :=
|
||||
throwError "invalid use of explicit universe parameters, `{e}` is a local variable"
|
||||
|
||||
def resolveName (stx : Syntax) (n : Name) (preresolved : List Syntax.Preresolved) (explicitLevels : List Level) (expectedType? : Option Expr := none) : TermElabM (List (Expr × List String)) := do
|
||||
addCompletionInfo <| CompletionInfo.id stx stx.getId (danglingDot := false) (← getLCtx) expectedType?
|
||||
if let some (e, projs) ← resolveLocalName n then
|
||||
unless explicitLevels.isEmpty do
|
||||
throwError "invalid use of explicit universe parameters, `{e}` is a local variable"
|
||||
throwInvalidExplicitUniversesForLocal e
|
||||
return [(e, projs)]
|
||||
let preresolved := preresolved.filterMap fun
|
||||
| .decl n projs => some (n, projs)
|
||||
|
||||
@@ -2255,13 +2255,13 @@ def finalizeImport (s : ImportState) (imports : Array Import) (opts : Options) (
|
||||
return data
|
||||
let numPrivateConsts := moduleData.foldl (init := 0) fun numPrivateConsts data =>
|
||||
numPrivateConsts + data.constants.size
|
||||
let numPrivateConsts := irData.foldl (init := numPrivateConsts) fun numPrivateConsts data =>
|
||||
numPrivateConsts + data.extraConstNames.size
|
||||
let numExtraConsts := irData.foldl (init := 0) fun numExtraConsts data =>
|
||||
numExtraConsts + data.extraConstNames.size
|
||||
let numPublicConsts := modules.foldl (init := 0) fun numPublicConsts mod => Id.run do
|
||||
if !mod.isExported then numPublicConsts else
|
||||
let some data := mod.publicModule? | numPublicConsts
|
||||
numPublicConsts + data.constants.size
|
||||
let mut const2ModIdx : Std.HashMap Name ModuleIdx := Std.HashMap.emptyWithCapacity (capacity := numPrivateConsts + numPublicConsts)
|
||||
let mut const2ModIdx : Std.HashMap Name ModuleIdx := Std.HashMap.emptyWithCapacity (capacity := numPrivateConsts + numExtraConsts)
|
||||
let mut privateConstantMap : Std.HashMap Name ConstantInfo := Std.HashMap.emptyWithCapacity (capacity := numPrivateConsts)
|
||||
let mut publicConstantMap : Std.HashMap Name ConstantInfo := Std.HashMap.emptyWithCapacity (capacity := numPublicConsts)
|
||||
for h : modIdx in *...moduleData.size do
|
||||
|
||||
@@ -314,7 +314,7 @@ def checkRegisterSimpAttr : SimpleHandler := mkSimpleHandler "simp attr"
|
||||
@[builtin_missing_docs_handler «in»]
|
||||
def handleIn : Handler := fun _ stx => do
|
||||
if stx[0].getKind == ``«set_option» then
|
||||
let opts ← Elab.elabSetOption stx[0][1] stx[0][3]
|
||||
let (opts, _) ← Elab.elabSetOption stx[0][1] stx[0][3]
|
||||
withScope (fun scope => { scope with opts }) do
|
||||
missingDocs.run stx[2]
|
||||
else
|
||||
|
||||
@@ -19,6 +19,7 @@ namespace Lean.Meta
|
||||
register_builtin_option backward.eqns.nonrecursive : Bool := {
|
||||
defValue := true
|
||||
descr := "Create fine-grained equational lemmas even for non-recursive definitions."
|
||||
deprecation? := some { since := "2026-03-30" }
|
||||
}
|
||||
|
||||
register_builtin_option backward.eqns.deepRecursiveSplit : Bool := {
|
||||
@@ -28,6 +29,7 @@ register_builtin_option backward.eqns.deepRecursiveSplit : Bool := {
|
||||
that do not contain recursive calls do not cause further splits in the \
|
||||
equational lemmas. This was the behavior before Lean 4.12, and the purpose of \
|
||||
this option is to help migrating old code."
|
||||
deprecation? := some { since := "2026-03-30" }
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -180,12 +180,14 @@ private def inferFVarType (fvarId : FVarId) : MetaM Expr := do
|
||||
|
||||
@[inline] private def checkInferTypeCache (e : Expr) (inferType : MetaM Expr) : MetaM Expr := do
|
||||
if !(← read).cacheInferType || e.hasMVar then
|
||||
Core.checkInterrupted
|
||||
inferType
|
||||
else
|
||||
let key ← mkExprConfigCacheKey e
|
||||
match (← get).cache.inferType.find? key with
|
||||
| some type => return type
|
||||
| none =>
|
||||
Core.checkInterrupted
|
||||
let type ← inferType
|
||||
unless type.hasMVar do
|
||||
modifyInferTypeCache fun c => c.insert key type
|
||||
|
||||
@@ -256,12 +256,12 @@ abbrev CounterExample := List Example
|
||||
def counterExampleToMessageData (cex : CounterExample) : MessageData :=
|
||||
examplesToMessageData cex
|
||||
|
||||
def counterExamplesToMessageData (cexs : List CounterExample) : MessageData :=
|
||||
MessageData.joinSep (cexs.map counterExampleToMessageData) Format.line
|
||||
def counterExamplesToMessageData (cexs : Array CounterExample) : MessageData :=
|
||||
MessageData.joinSep (cexs.toList.map counterExampleToMessageData) Format.line
|
||||
|
||||
structure MatcherResult where
|
||||
matcher : Expr -- The matcher. It is not just `Expr.const matcherName` because the type of the major premises may contain free variables.
|
||||
counterExamples : List CounterExample
|
||||
counterExamples : Array CounterExample
|
||||
unusedAltIdxs : List Nat
|
||||
addMatcher : MetaM Unit
|
||||
|
||||
|
||||
@@ -78,6 +78,14 @@ register_builtin_option backward.match.rowMajor : Bool := {
|
||||
it splits them from left to right, which can lead to unnecessary code bloat."
|
||||
}
|
||||
|
||||
register_builtin_option match.maxCounterExamples : Nat := {
|
||||
defValue := 5
|
||||
descr := "Maximum number of missing-case counter-examples to generate. \
|
||||
When this limit is reached, the match compiler stops exploring further \
|
||||
case splits for counter-example generation. Increase if you need to see \
|
||||
all missing cases."
|
||||
}
|
||||
|
||||
private def mkIncorrectNumberOfPatternsMsg [ToMessageData α]
|
||||
(discrepancyKind : String) (expected actual : Nat) (pats : List α) :=
|
||||
let patternsMsg := MessageData.joinSep (pats.map toMessageData) ", "
|
||||
@@ -202,7 +210,7 @@ structure State where
|
||||
Used during splitter generation to avoid going through all pairs of patterns.
|
||||
-/
|
||||
overlaps : Overlaps := {}
|
||||
counterExamples : List (List Example) := []
|
||||
counterExamples : Array (List Example) := #[]
|
||||
|
||||
/-- Return true if the given (sub-)problem has been solved. -/
|
||||
private def isDone (p : Problem) : Bool :=
|
||||
@@ -269,10 +277,16 @@ def isCurrVarInductive (p : Problem) : MetaM Bool := do
|
||||
let val? ← getInductiveVal? x
|
||||
return val?.isSome
|
||||
|
||||
private def isConstructorTransition (p : Problem) : MetaM Bool := do
|
||||
return (← isCurrVarInductive p)
|
||||
&& (hasCtorPattern p || p.alts.isEmpty)
|
||||
&& p.alts.all fun alt => match alt.patterns with
|
||||
private def isConstructorTransition (p : Problem) : StateRefT State MetaM Bool := do
|
||||
if !(← isCurrVarInductive p) then return false
|
||||
if p.alts.isEmpty then
|
||||
/- When there are no alternatives left and we have already accumulated enough
|
||||
counter-examples, stop exploring further case splits. This prevents
|
||||
combinatorial explosion when generating "missing cases" diagnostics. -/
|
||||
let maxCEx := match.maxCounterExamples.get (← getOptions)
|
||||
return (← get).counterExamples.size < maxCEx
|
||||
else
|
||||
return hasCtorPattern p && p.alts.all fun alt => match alt.patterns with
|
||||
| .ctor .. :: _ => true
|
||||
| .inaccessible _ :: _ => true -- should be a done pattern by now
|
||||
| _ => false
|
||||
@@ -467,7 +481,7 @@ where
|
||||
trace[Meta.Match.match] "contradiction succeeded"
|
||||
else
|
||||
trace[Meta.Match.match] "contradiction failed, missing alternative"
|
||||
modify fun s => { s with counterExamples := p.examples :: s.counterExamples }
|
||||
modify fun s => { s with counterExamples := s.counterExamples.push p.examples }
|
||||
| alt :: overlapped =>
|
||||
solveCnstrs p.mvarId alt
|
||||
for otherAlt in overlapped do
|
||||
|
||||
@@ -67,9 +67,9 @@ def notFollowedByRedefinedTermToken :=
|
||||
"token at 'do' element"
|
||||
|
||||
@[builtin_doElem_parser] def doLet := leading_parser
|
||||
"let " >> optional "mut " >> letDecl
|
||||
"let " >> optional "mut " >> letConfig >> letDecl
|
||||
@[builtin_doElem_parser] def doLetElse := leading_parser withPosition <|
|
||||
"let " >> optional "mut " >> termParser >> " := " >> termParser >>
|
||||
"let " >> optional "mut " >> letConfig >> termParser >> " := " >> termParser >>
|
||||
(checkColGe >> " | " >> doSeqIndent) >> optional (checkColGe >> doSeqIndent)
|
||||
|
||||
@[builtin_doElem_parser] def doLetExpr := leading_parser withPosition <|
|
||||
@@ -89,7 +89,7 @@ def doPatDecl := leading_parser
|
||||
atomic (termParser >> optType >> ppSpace >> leftArrow) >>
|
||||
doElemParser >> optional ((checkColGe >> " | " >> doSeqIndent) >> optional (checkColGe >> doSeqIndent))
|
||||
@[builtin_doElem_parser] def doLetArrow := leading_parser withPosition <|
|
||||
"let " >> optional "mut " >> (doIdDecl <|> doPatDecl)
|
||||
"let " >> optional "mut " >> letConfig >> (doIdDecl <|> doPatDecl)
|
||||
|
||||
/-
|
||||
We use `letIdDeclNoBinders` to define `doReassign`.
|
||||
@@ -114,7 +114,7 @@ def letIdDeclNoBinders := leading_parser
|
||||
@[builtin_doElem_parser] def doReassignArrow := leading_parser
|
||||
notFollowedByRedefinedTermToken >> (doIdDecl <|> doPatDecl)
|
||||
@[builtin_doElem_parser] def doHave := leading_parser
|
||||
"have" >> Term.letDecl
|
||||
"have" >> Term.letConfig >> Term.letDecl
|
||||
/-
|
||||
In `do` blocks, we support `if` without an `else`.
|
||||
Thus, we use indentation to prevent examples such as
|
||||
|
||||
@@ -882,13 +882,19 @@ the available context).
|
||||
-/
|
||||
def identProjKind := `Lean.Parser.Term.identProj
|
||||
|
||||
@[builtin_term_parser] def dotIdent := leading_parser
|
||||
"." >> checkNoWsBefore >> rawIdent
|
||||
|
||||
def isIdent (stx : Syntax) : Bool :=
|
||||
-- antiquotations should also be allowed where an identifier is expected
|
||||
stx.isAntiquot || stx.isIdent
|
||||
|
||||
def isIdentOrDotIdent (stx : Syntax) : Bool :=
|
||||
isIdent stx || stx.isOfKind ``dotIdent
|
||||
|
||||
/-- `x.{u, ...}` explicitly specifies the universes `u, ...` of the constant `x`. -/
|
||||
@[builtin_term_parser] def explicitUniv : TrailingParser := trailing_parser
|
||||
checkStackTop isIdent "expected preceding identifier" >>
|
||||
checkStackTop isIdentOrDotIdent "expected preceding identifier" >>
|
||||
checkNoWsBefore "no space before '.{'" >> ".{" >>
|
||||
sepBy1 levelParser ", " >> "}"
|
||||
/-- `x@e` or `x@h:e` matches the pattern `e` and binds its value to the identifier `x`.
|
||||
@@ -976,9 +982,6 @@ appropriate parameter for the underlying monad's `ST` effects, then passes it to
|
||||
@[builtin_term_parser] def dynamicQuot := withoutPosition <| leading_parser
|
||||
"`(" >> ident >> "| " >> incQuotDepth (parserOfStack 1) >> ")"
|
||||
|
||||
@[builtin_term_parser] def dotIdent := leading_parser
|
||||
"." >> checkNoWsBefore >> rawIdent
|
||||
|
||||
/--
|
||||
Implementation of the `show_term` term elaborator.
|
||||
-/
|
||||
|
||||
@@ -619,8 +619,9 @@ private partial def collectStructFields
|
||||
if s'.induct == parentName then
|
||||
let (fieldValues, fields) ← collectStructFields structName levels params fields fieldValues s'
|
||||
return (i + 1, fieldValues, fields)
|
||||
/- Does this field have a default value? and if so, can we omit the field? -/
|
||||
unless ← getPPOption getPPStructureInstancesDefaults do
|
||||
/- Does this field have a default value? and if so, can we omit the field?
|
||||
We cannot omit fields for patterns, since default values do not apply for them. -/
|
||||
unless ← pure (← read).inPattern <||> getPPOption getPPStructureInstancesDefaults do
|
||||
if let some defFn := getEffectiveDefaultFnForField? (← getEnv) structName fieldName then
|
||||
-- Use `withNewMCtxDepth` to prevent delaborator from solving metavariables.
|
||||
if let some (_, defValue) ← withNewMCtxDepth <| instantiateStructDefaultValueFn? defFn levels params (pure ∘ fieldValues.get?) then
|
||||
|
||||
@@ -641,13 +641,13 @@ def processGenericRequest : RunnerM Unit := do
|
||||
let params := params.setObjVal! "position" (toJson s.pos)
|
||||
logResponse s.method params
|
||||
|
||||
def processDirective (ws directive : String) (directiveTargetLineNo : Nat) : RunnerM Unit := do
|
||||
def processDirective (_ws directive : String) (directiveTargetLineNo : Nat)
|
||||
(directiveTargetColumn : Nat) : RunnerM Unit := do
|
||||
let directive := directive.drop 1
|
||||
let colon := directive.find ':'
|
||||
let method := directive.sliceTo colon |>.trimAscii |>.copy
|
||||
-- TODO: correctly compute in presence of Unicode
|
||||
let directiveTargetColumn := ws.rawEndPos + "--"
|
||||
let pos : Lsp.Position := { line := directiveTargetLineNo, character := directiveTargetColumn.byteIdx }
|
||||
let pos : Lsp.Position := { line := directiveTargetLineNo, character := directiveTargetColumn }
|
||||
let params :=
|
||||
if h : ¬colon.IsAtEnd then
|
||||
directive.sliceFrom (colon.next h) |>.trimAscii.copy
|
||||
@@ -686,10 +686,15 @@ def processLine (line : String) : RunnerM Unit := do
|
||||
match directive.front with
|
||||
| 'v' => pure <| (← get).lineNo + 1 -- TODO: support subsequent 'v'... or not
|
||||
| '^' => pure <| (← get).lastActualLineNo
|
||||
-- `⬑` is like `^` but targets the column of the `--` marker itself (i.e. column 0 when the
|
||||
-- marker is at the start of the line), rather than the column after `--`.
|
||||
| '⬑' => pure <| (← get).lastActualLineNo
|
||||
| _ =>
|
||||
skipLineWithoutDirective
|
||||
return
|
||||
processDirective ws directive directiveTargetLineNo
|
||||
let directiveTargetColumn :=
|
||||
if directive.front == '⬑' then ws.rawEndPos.byteIdx else (ws.rawEndPos + "--").byteIdx
|
||||
processDirective ws directive directiveTargetLineNo (directiveTargetColumn := directiveTargetColumn)
|
||||
skipLineWithDirective
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import Lean.Server.Watchdog
|
||||
import Lean.Server.FileWorker
|
||||
import Lean.Compiler.LCNF.EmitC
|
||||
import Init.System.Platform
|
||||
import Lean.Compiler.Options
|
||||
|
||||
/- Lean companion to `shell.cpp` -/
|
||||
|
||||
@@ -340,7 +341,10 @@ def ShellOptions.process (opts : ShellOptions)
|
||||
| 'I' => -- `-I, --stdin`
|
||||
return {opts with useStdin := true}
|
||||
| 'r' => -- `--run`
|
||||
return {opts with run := true}
|
||||
return {opts with
|
||||
run := true
|
||||
-- can't get IR if it's postponed
|
||||
leanOpts := Compiler.compiler.postponeCompile.set opts.leanOpts false }
|
||||
| 'o' => -- `--o, olean=fname`
|
||||
return {opts with oleanFileName? := ← checkOptArg "o" optArg?}
|
||||
| 'i' => -- `--i, ilean=fname`
|
||||
|
||||
@@ -57,15 +57,19 @@ def setConfigOption (opts : Options) (arg : String) : IO Options := do
|
||||
|
||||
public def main (args : List String) : IO UInt32 := do
|
||||
let setupFile::irFile::c::optArgs := args | do
|
||||
IO.println s!"usage: leanir <setup.json> <module> <output.ir> <output.c> <-Dopt=val>..."
|
||||
IO.println s!"usage: leanir <setup.json> <output.ir> <output.c> [--stat] <-Dopt=val>..."
|
||||
return 1
|
||||
|
||||
let setup ← ModuleSetup.load setupFile
|
||||
let modName := setup.name
|
||||
|
||||
let mut printStats := false
|
||||
let mut opts := setup.options.toOptions
|
||||
for optArg in optArgs do
|
||||
opts ← setConfigOption opts optArg
|
||||
if optArg == "--stat" then
|
||||
printStats := true
|
||||
else
|
||||
opts ← setConfigOption opts optArg
|
||||
opts := Compiler.compiler.inLeanIR.set opts true
|
||||
opts := maxHeartbeats.set opts 0
|
||||
|
||||
@@ -127,12 +131,15 @@ public def main (args : List String) : IO UInt32 := do
|
||||
modifyEnv (postponedCompileDeclsExt.setState · (decls.foldl (fun s e => e.declNames.foldl (·.insert · e) s) {}))
|
||||
for decl in decls do
|
||||
for decl in decl.declNames do
|
||||
resumeCompilation decl
|
||||
try
|
||||
resumeCompilation decl (← getOptions)
|
||||
finally
|
||||
addTraceAsMessages
|
||||
for msg in (← Core.getAndEmptyMessageLog).unreported do
|
||||
IO.eprintln (← msg.toString)
|
||||
catch e =>
|
||||
unless e.isInterrupt do
|
||||
logError e.toMessageData
|
||||
finally
|
||||
addTraceAsMessages
|
||||
|
||||
let .ok (_, s) := res? | unreachable!
|
||||
let env := s.env
|
||||
@@ -155,4 +162,6 @@ public def main (args : List String) : IO UInt32 := do
|
||||
out.write data.toUTF8
|
||||
|
||||
displayCumulativeProfilingTimes
|
||||
if printStats then
|
||||
env.displayStats
|
||||
return 0
|
||||
|
||||
@@ -10,6 +10,8 @@ public import Init.Data.Random
|
||||
public import Std.Internal.Async.Basic
|
||||
import Init.Data.ByteArray.Extra
|
||||
import Init.Data.Array.Lemmas
|
||||
public import Std.Sync.Mutex
|
||||
public import Std.Sync.Barrier
|
||||
import Init.Omega
|
||||
|
||||
public section
|
||||
@@ -132,6 +134,8 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
|
||||
let gen := mkStdGen seed
|
||||
let selectables := shuffleIt selectables gen
|
||||
|
||||
let gate ← IO.Promise.new
|
||||
|
||||
for selectable in selectables do
|
||||
if let some val ← selectable.selector.tryFn then
|
||||
let result ← selectable.cont val
|
||||
@@ -141,6 +145,9 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
|
||||
let promise ← IO.Promise.new
|
||||
|
||||
for selectable in selectables do
|
||||
if ← finished.get then
|
||||
break
|
||||
|
||||
let waiterPromise ← IO.Promise.new
|
||||
let waiter := Waiter.mk finished waiterPromise
|
||||
selectable.selector.registerFn waiter
|
||||
@@ -157,18 +164,20 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
|
||||
let async : Async _ :=
|
||||
try
|
||||
let res ← IO.ofExcept res
|
||||
discard <| await gate.result?
|
||||
|
||||
for selectable in selectables do
|
||||
selectable.selector.unregisterFn
|
||||
|
||||
let contRes ← selectable.cont res
|
||||
promise.resolve (.ok contRes)
|
||||
promise.resolve (.ok (← selectable.cont res))
|
||||
catch e =>
|
||||
promise.resolve (.error e)
|
||||
|
||||
async.toBaseIO
|
||||
|
||||
Async.ofPromise (pure promise)
|
||||
gate.resolve ()
|
||||
let result ← Async.ofPromise (pure promise)
|
||||
return result
|
||||
|
||||
/--
|
||||
Performs fair and data-loss free non-blocking multiplexing on the `Selectable`s in `selectables`.
|
||||
@@ -224,6 +233,8 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
|
||||
let derivedWaiter := Waiter.mk waiter.finished waiterPromise
|
||||
selectable.selector.registerFn derivedWaiter
|
||||
|
||||
let barrier ← IO.Promise.new
|
||||
|
||||
discard <| IO.bindTask (t := waiterPromise.result?) fun res? => do
|
||||
match res? with
|
||||
| none => return (Task.pure (.ok ()))
|
||||
@@ -231,6 +242,7 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
|
||||
let async : Async _ := do
|
||||
let mainPromise := waiter.promise
|
||||
|
||||
await barrier
|
||||
for selectable in selectables do
|
||||
selectable.selector.unregisterFn
|
||||
|
||||
|
||||
@@ -6,4 +6,188 @@ Authors: Sofia Rodrigues
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Server
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP Library
|
||||
|
||||
A low-level HTTP/1.1 server implementation for Lean. This library provides a pure,
|
||||
sans-I/O protocol implementation that can be used with the `Async` library or with
|
||||
custom connection handlers.
|
||||
|
||||
## Overview
|
||||
|
||||
This module provides a complete HTTP/1.1 server implementation with support for:
|
||||
|
||||
- Request/response handling with directional streaming bodies
|
||||
- Keep-alive connections
|
||||
- Chunked transfer encoding
|
||||
- Header validation and management
|
||||
- Configurable timeouts and limits
|
||||
|
||||
**Sans I/O Architecture**: The core protocol logic doesn't perform any actual I/O itself -
|
||||
it just defines how data should be processed. This separation allows the protocol implementation
|
||||
to remain pure and testable, while different transports (TCP sockets, mock clients) handle
|
||||
the actual reading and writing of bytes.
|
||||
|
||||
## Quick Start
|
||||
|
||||
The main entry point is `Server.serve`, which starts an HTTP/1.1 server. Implement the
|
||||
`Server.Handler` type class to define how the server handles requests, errors, and
|
||||
`Expect: 100-continue` headers:
|
||||
|
||||
```lean
|
||||
import Std.Internal.Http
|
||||
|
||||
open Std Internal IO Async
|
||||
open Std Http Server
|
||||
|
||||
structure MyHandler
|
||||
|
||||
instance : Handler MyHandler where
|
||||
onRequest _ req := do
|
||||
Response.ok |>.text "Hello, World!"
|
||||
|
||||
def main : IO Unit := Async.block do
|
||||
let addr : Net.SocketAddress := .v4 ⟨.ofParts 127 0 0 1, 8080⟩
|
||||
let server ← Server.serve addr MyHandler.mk
|
||||
server.waitShutdown
|
||||
```
|
||||
|
||||
## Working with Requests
|
||||
|
||||
Incoming requests are represented by `Request Body.Stream`, which bundles the request
|
||||
line, parsed headers, and a lazily-consumed body. Headers are available immediately,
|
||||
while the body can be streamed or collected on demand, allowing handlers to process both
|
||||
small and large payloads efficiently.
|
||||
|
||||
### Reading Headers
|
||||
|
||||
```lean
|
||||
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
|
||||
-- Access request method and URI
|
||||
let method := req.head.method -- Method.get, Method.post, etc.
|
||||
let uri := req.head.uri -- RequestTarget
|
||||
|
||||
-- Read a specific header
|
||||
if let some contentType := req.head.headers.get? (.mk "content-type") then
|
||||
IO.println s!"Content-Type: {contentType}"
|
||||
|
||||
Response.ok |>.text "OK"
|
||||
```
|
||||
|
||||
### URI Query Semantics
|
||||
|
||||
`RequestTarget.query` is parsed using form-style key/value conventions (`k=v&...`), and `+` is decoded as a
|
||||
space in query components. If you need RFC 3986 opaque query handling, use the raw request target string
|
||||
(`toString req.head.uri`) and parse it with custom logic.
|
||||
|
||||
### Reading the Request Body
|
||||
|
||||
The request body is exposed as `Body.Stream`, which can be consumed incrementally or
|
||||
collected into memory. The `readAll` method reads the entire body, with an optional size
|
||||
limit to protect against unbounded payloads.
|
||||
|
||||
```lean
|
||||
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
|
||||
-- Collect entire body as a String
|
||||
let bodyStr : String ← req.body.readAll
|
||||
|
||||
-- Or with a maximum size limit
|
||||
let bodyStr : String ← req.body.readAll (maximumSize := some 1024)
|
||||
|
||||
Response.ok |>.text s!"Received: {bodyStr}"
|
||||
```
|
||||
|
||||
## Building Responses
|
||||
|
||||
Responses are constructed using a builder API that starts from a status code and adds
|
||||
headers and a body. Common helpers exist for text, HTML, JSON, and binary responses, while
|
||||
still allowing full control over status codes and header values.
|
||||
|
||||
Response builders produce `Async (Response Body.Stream)`.
|
||||
|
||||
```lean
|
||||
-- Text response
|
||||
Response.ok |>.text "Hello!"
|
||||
|
||||
-- HTML response
|
||||
Response.ok |>.html "<h1>Hello!</h1>"
|
||||
|
||||
-- JSON response
|
||||
Response.ok |>.json "{\"key\": \"value\"}"
|
||||
|
||||
-- Binary response
|
||||
Response.ok |>.bytes someByteArray
|
||||
|
||||
-- Custom status
|
||||
Response.new |>.status .created |>.text "Resource created"
|
||||
|
||||
-- With custom headers
|
||||
Response.ok
|
||||
|>.header! "X-Custom-Header" "value"
|
||||
|>.header! "Cache-Control" "no-cache"
|
||||
|>.text "Response with headers"
|
||||
```
|
||||
|
||||
### Streaming Responses
|
||||
|
||||
For large responses or server-sent events, use streaming:
|
||||
|
||||
```lean
|
||||
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
|
||||
Response.ok
|
||||
|>.header! "Content-Type" "text/plain"
|
||||
|>.stream fun stream => do
|
||||
for i in [0:10] do
|
||||
stream.send { data := s!"chunk {i}\n".toUTF8 }
|
||||
Async.sleep 1000
|
||||
stream.close
|
||||
```
|
||||
|
||||
## Server Configuration
|
||||
|
||||
Configure server behavior with `Config`:
|
||||
|
||||
```lean
|
||||
def config : Config := {
|
||||
maxRequests := 10000000,
|
||||
lingeringTimeout := 5000,
|
||||
}
|
||||
|
||||
let server ← Server.serve addr MyHandler.mk config
|
||||
```
|
||||
|
||||
## Handler Type Class
|
||||
|
||||
Implement `Server.Handler` to define how the server processes events. The class has three
|
||||
methods, all with default implementations:
|
||||
|
||||
- `onRequest` — called for each incoming request; returns a response inside `ContextAsync`
|
||||
- `onFailure` — called when an error occurs while processing a request
|
||||
- `onContinue` — called when a request includes an `Expect: 100-continue` header; return
|
||||
`true` to accept the body or `false` to reject it
|
||||
|
||||
```lean
|
||||
structure MyHandler where
|
||||
greeting : String
|
||||
|
||||
instance : Handler MyHandler where
|
||||
onRequest self req := do
|
||||
Response.ok |>.text self.greeting
|
||||
|
||||
onFailure self err := do
|
||||
IO.eprintln s!"Error: {err}"
|
||||
```
|
||||
|
||||
The handler methods operate in the following monads:
|
||||
|
||||
- `onRequest` uses `ContextAsync` — an asynchronous monad (`ReaderT CancellationContext Async`) that provides:
|
||||
- Full access to `Async` operations (spawning tasks, sleeping, concurrent I/O)
|
||||
- A `CancellationContext` tied to the client connection — when the client disconnects, the
|
||||
context is cancelled, allowing your handler to detect this and stop work early
|
||||
- `onFailure` uses `Async`
|
||||
- `onContinue` uses `Async`
|
||||
-/
|
||||
|
||||
@@ -7,7 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
public import Init.Dynamic
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
public import Std.Data.TreeMap
|
||||
|
||||
open Lean
|
||||
|
||||
@@ -6,9 +6,11 @@ Authors: Sofia Rodrigues
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data.URI
|
||||
public import Std.Internal.Http.Data.Headers.Name
|
||||
public import Std.Internal.Http.Data.Headers.Value
|
||||
public import Std.Internal.Parsec.Basic
|
||||
import Init.Data.String.Search
|
||||
|
||||
public section
|
||||
|
||||
@@ -214,4 +216,97 @@ def serialize (connection : Connection) : Header.Name × Header.Value :=
|
||||
|
||||
instance : Header Connection := ⟨parse, serialize⟩
|
||||
|
||||
end Std.Http.Header.Connection
|
||||
end Connection
|
||||
|
||||
/--
|
||||
The `Host` header.
|
||||
|
||||
Represents the authority component of a URI:
|
||||
host [ ":" port ]
|
||||
|
||||
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-host-and-authority
|
||||
-/
|
||||
structure Host where
|
||||
/--
|
||||
Host name (reg-name, IPv4, or IPv6 literal).
|
||||
-/
|
||||
host : URI.Host
|
||||
|
||||
/--
|
||||
Optional port.
|
||||
-/
|
||||
port : URI.Port
|
||||
deriving Repr, BEq
|
||||
|
||||
namespace Host
|
||||
|
||||
/--
|
||||
Parses a `Host` header value.
|
||||
-/
|
||||
def parse (v : Value) : Option Host :=
|
||||
let parsed := (Std.Http.URI.Parser.parseHostHeader <* Std.Internal.Parsec.eof).run v.value.toUTF8
|
||||
match parsed with
|
||||
| .ok ⟨host, port⟩ => some ⟨host, port⟩
|
||||
| .error _ => none
|
||||
|
||||
/--
|
||||
Serializes a `Host` header back to a name and a value.
|
||||
-/
|
||||
def serialize (host : Host) : Header.Name × Header.Value :=
|
||||
let value := match host.port with
|
||||
| .value port => Header.Value.ofString! s!"{host.host}:{port}"
|
||||
| .empty => Header.Value.ofString! s!"{host.host}:"
|
||||
| .omitted => Header.Value.ofString! <| toString host.host
|
||||
|
||||
(.mk "host", value)
|
||||
|
||||
instance : Header Host := ⟨parse, serialize⟩
|
||||
|
||||
end Host
|
||||
|
||||
/--
|
||||
The `Expect` header.
|
||||
|
||||
Represents an expectation token.
|
||||
The only standardized expectation is `100-continue`.
|
||||
|
||||
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-expect
|
||||
-/
|
||||
structure Expect where
|
||||
|
||||
/--
|
||||
True if the client expects `100-continue`.
|
||||
-/
|
||||
expect : Bool
|
||||
deriving Repr, BEq
|
||||
|
||||
namespace Expect
|
||||
|
||||
/--
|
||||
Parses an `Expect` header.
|
||||
|
||||
Succeeds only if the value is exactly `100-continue`
|
||||
(case-insensitive, trimmed).
|
||||
-/
|
||||
def parse (v : Value) : Option Expect :=
|
||||
let normalized := v.value.trimAscii.toString.toLower
|
||||
|
||||
if normalized == "100-continue" then
|
||||
some ⟨true⟩
|
||||
else
|
||||
none
|
||||
|
||||
/--
|
||||
Serializes an `Expect` header.
|
||||
-/
|
||||
def serialize (e : Expect) : Header.Name × Header.Value :=
|
||||
if e.expect then
|
||||
(Header.Name.expect, Value.ofString! "100-continue")
|
||||
else
|
||||
(Header.Name.expect, Value.ofString! "")
|
||||
|
||||
instance : Header Expect := ⟨parse, serialize⟩
|
||||
|
||||
end Expect
|
||||
|
||||
end Std.Http.Header
|
||||
|
||||
@@ -8,6 +8,8 @@ module
|
||||
prelude
|
||||
public import Init.Data.ToString
|
||||
public import Std.Internal.Http.Internal
|
||||
import Init.Data.String.Search
|
||||
import Init.Data.String.Iter
|
||||
|
||||
public section
|
||||
|
||||
@@ -107,10 +109,10 @@ but since HTTP header names are case-insensitive, this always uses simple capita
|
||||
-/
|
||||
@[inline]
|
||||
def toCanonical (name : Name) : String :=
|
||||
let it := name.value.splitOn "-"
|
||||
|>.map (·.capitalize)
|
||||
let it := name.value.split '-'
|
||||
|>.map (·.copy.capitalize)
|
||||
|
||||
String.intercalate "-" it
|
||||
it.intercalateString "-"
|
||||
|
||||
/--
|
||||
Performs a case-insensitive comparison between a `Name` and a `String`. Returns `true` if they match.
|
||||
|
||||
@@ -10,6 +10,7 @@ import Init.Data.ToString
|
||||
public import Std.Net
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Data.URI.Encoding
|
||||
public import Init.Data.String.Search
|
||||
|
||||
public section
|
||||
|
||||
@@ -164,8 +165,8 @@ Proposition that asserts `s` is a valid dot-separated domain name.
|
||||
Each label must satisfy `IsValidDomainLabel`, and the full name must be at most 255 characters.
|
||||
-/
|
||||
abbrev IsValidDomainName (s : String) : Prop :=
|
||||
let labels := s.splitOn "."
|
||||
¬labels.isEmpty ∧ labels.all isValidDomainLabel ∧ s.length ≤ 255
|
||||
let labels := s.split '.'
|
||||
¬labels.isEmpty ∧ labels.all (fun s => isValidDomainLabel s.copy) ∧ s.length ≤ 255
|
||||
|
||||
/--
|
||||
A domain name represented as a validated, lowercase-normalized string.
|
||||
|
||||
@@ -12,7 +12,7 @@ import Init.Data.SInt.Lemmas
|
||||
import Init.Data.UInt.Lemmas
|
||||
import Init.Data.UInt.Bitwise
|
||||
import Init.Data.Array.Lemmas
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
public import Std.Internal.Http.Internal.Char
|
||||
|
||||
public section
|
||||
|
||||
@@ -7,11 +7,12 @@ module
|
||||
|
||||
prelude
|
||||
import Init.While
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
public import Std.Internal.Parsec
|
||||
public import Std.Internal.Parsec.ByteArray
|
||||
public import Std.Internal.Http.Data.URI.Basic
|
||||
public import Std.Internal.Http.Data.URI.Config
|
||||
import Init.Data.String.Search
|
||||
|
||||
public section
|
||||
|
||||
@@ -51,13 +52,13 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
|
||||
if config.maxSchemeLength = 0 then
|
||||
fail "scheme length limit is 0 (no scheme allowed)"
|
||||
|
||||
let first ← takeWhileUpTo1 isAlphaByte 1
|
||||
let rest ← takeWhileUpTo
|
||||
let first : UInt8 ← satisfy (fun b : UInt8 => Internal.Char.isAlphaByte b)
|
||||
let rest ← takeWhileAtMost
|
||||
(fun c =>
|
||||
isAlphaNum c ∨
|
||||
c = '+'.toUInt8 ∨ c = '-'.toUInt8 ∨ c = '.'.toUInt8)
|
||||
(config.maxSchemeLength - 1)
|
||||
let schemeBytes := first.toByteArray ++ rest.toByteArray
|
||||
let schemeBytes := ByteArray.empty.push first ++ rest.toByteArray
|
||||
let str := String.fromUTF8! schemeBytes |>.toLower
|
||||
|
||||
if h : URI.IsValidScheme str then
|
||||
@@ -67,7 +68,7 @@ private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
|
||||
|
||||
-- port = 1*DIGIT
|
||||
private def parsePortNumber : Parser UInt16 := do
|
||||
let portBytes ← takeWhileUpTo1 isDigitByte 5
|
||||
let portBytes ← takeWhileAtMost isDigitByte 5
|
||||
|
||||
let portStr := String.fromUTF8! portBytes.toByteArray
|
||||
|
||||
@@ -81,7 +82,7 @@ private def parsePortNumber : Parser UInt16 := do
|
||||
|
||||
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
|
||||
private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
let userBytesName ← takeWhileUpTo
|
||||
let userBytesName ← takeWhileAtMost
|
||||
(fun x =>
|
||||
x ≠ ':'.toUInt8 ∧
|
||||
(isUserInfoChar x ∨ x = '%'.toUInt8))
|
||||
@@ -93,7 +94,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
let userPassEncoded ← if ← peekIs (· == ':'.toUInt8) then
|
||||
skip
|
||||
|
||||
let userBytesPass ← takeWhileUpTo
|
||||
let userBytesPass ← takeWhileAtMost
|
||||
(fun x => isUserInfoChar x ∨ x = '%'.toUInt8)
|
||||
config.maxUserInfoLength
|
||||
|
||||
@@ -112,7 +113,7 @@ private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
|
||||
private def parseIPv6 : Parser Net.IPv6Addr := do
|
||||
skipByte '['.toUInt8
|
||||
|
||||
let result ← takeWhileUpTo1
|
||||
let result ← takeWhile1AtMost
|
||||
(fun x => x = ':'.toUInt8 ∨ x = '.'.toUInt8 ∨ isHexDigitByte x)
|
||||
256
|
||||
|
||||
@@ -126,7 +127,7 @@ private def parseIPv6 : Parser Net.IPv6Addr := do
|
||||
|
||||
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
|
||||
private def parseIPv4 : Parser Net.IPv4Addr := do
|
||||
let result ← takeWhileUpTo1
|
||||
let result ← takeWhile1AtMost
|
||||
(fun x => x = '.'.toUInt8 ∨ isDigitByte x)
|
||||
256
|
||||
|
||||
@@ -147,8 +148,8 @@ private def parseHost (config : URI.Config) : Parser URI.Host := do
|
||||
if let some ipv4 ← tryOpt parseIPv4 then
|
||||
return .ipv4 ipv4
|
||||
|
||||
-- We intentionally parse DNS names here (not full RFC 3986 reg-name).
|
||||
let some str := String.fromUTF8? (← takeWhileUpTo1
|
||||
-- It needs to be a legal DNS label, so it differs from reg-name.
|
||||
let some str := String.fromUTF8? (← takeWhile1AtMost
|
||||
(fun x => isAlphaNum x ∨ x = '-'.toUInt8 ∨ x = '.'.toUInt8)
|
||||
config.maxHostLength).toByteArray
|
||||
| fail s!"invalid host"
|
||||
@@ -186,7 +187,7 @@ private def parseAuthority (config : URI.Config) : Parser URI.Authority := do
|
||||
|
||||
-- segment = *pchar
|
||||
private def parseSegment (config : URI.Config) : Parser ByteSlice := do
|
||||
takeWhileUpTo (fun c => isPChar c ∨ c = '%'.toUInt8) config.maxSegmentLength
|
||||
takeWhileAtMost (fun c => isPChar c ∨ c = '%'.toUInt8) config.maxSegmentLength
|
||||
|
||||
/-
|
||||
path = path-abempty ; begins with "/" or is empty
|
||||
@@ -271,7 +272,7 @@ def parsePath (config : URI.Config) (forceAbsolute : Bool) (allowEmpty : Bool) :
|
||||
-- query = *( pchar / "/" / "?" )
|
||||
private def parseQuery (config : URI.Config) : Parser URI.Query := do
|
||||
let queryBytes ←
|
||||
takeWhileUpTo (fun c => isQueryChar c ∨ c = '%'.toUInt8) config.maxQueryLength
|
||||
takeWhileAtMost (fun c => isQueryChar c ∨ c = '%'.toUInt8) config.maxQueryLength
|
||||
|
||||
let some queryStr := String.fromUTF8? queryBytes.toByteArray
|
||||
| fail "invalid query string"
|
||||
@@ -279,13 +280,13 @@ private def parseQuery (config : URI.Config) : Parser URI.Query := do
|
||||
if queryStr.isEmpty then
|
||||
return URI.Query.empty
|
||||
|
||||
let rawPairs := queryStr.splitOn "&"
|
||||
let rawPairs := queryStr.split '&'
|
||||
|
||||
if rawPairs.length > config.maxQueryParams then
|
||||
fail s!"too many query parameters (limit: {config.maxQueryParams})"
|
||||
|
||||
let pairs : Option URI.Query := rawPairs.foldlM (init := URI.Query.empty) fun acc pair => do
|
||||
match pair.splitOn "=" with
|
||||
let pairs : Option URI.Query := rawPairs.foldM (init := URI.Query.empty) fun acc pair => do
|
||||
match pair.split '=' |>.toStringList with
|
||||
| [key] =>
|
||||
let key ← URI.EncodedQueryParam.fromString? key
|
||||
pure (acc.insertEncoded key none)
|
||||
@@ -303,7 +304,7 @@ private def parseQuery (config : URI.Config) : Parser URI.Query := do
|
||||
-- fragment = *( pchar / "/" / "?" )
|
||||
private def parseFragment (config : URI.Config) : Parser URI.EncodedFragment := do
|
||||
let fragmentBytes ←
|
||||
takeWhileUpTo (fun c => isFragmentChar c ∨ c = '%'.toUInt8) config.maxFragmentLength
|
||||
takeWhileAtMost (fun c => isFragmentChar c ∨ c = '%'.toUInt8) config.maxFragmentLength
|
||||
|
||||
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
|
||||
| fail "invalid percent encoding in fragment"
|
||||
|
||||
@@ -7,7 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
import Init.Data.ToString
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
|
||||
public section
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
public import Init.Data.Char
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
public import Init.Data.Int
|
||||
public import Init.Grind
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ module
|
||||
prelude
|
||||
import Init.Data.ToString
|
||||
import Init.Data.Array.Lemmas
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Basic
|
||||
public import Init.Data.ByteArray
|
||||
|
||||
public section
|
||||
|
||||
@@ -9,7 +9,8 @@ prelude
|
||||
import Init.Grind
|
||||
import Init.Data.Int.OfNat
|
||||
import Init.Data.UInt.Lemmas
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.Modify
|
||||
import Init.Data.String.Lemmas.Modify
|
||||
|
||||
@[expose]
|
||||
public section
|
||||
|
||||
@@ -7,7 +7,7 @@ module
|
||||
|
||||
prelude
|
||||
import Init.Grind
|
||||
public import Init.Data.String
|
||||
public import Init.Data.String.TakeDrop
|
||||
public import Std.Internal.Http.Internal.Char
|
||||
|
||||
public section
|
||||
|
||||
1625
src/Std/Internal/Http/Protocol/H1.lean
Normal file
1625
src/Std/Internal/Http/Protocol/H1.lean
Normal file
File diff suppressed because it is too large
Load Diff
134
src/Std/Internal/Http/Protocol/H1/Config.lean
Normal file
134
src/Std/Internal/Http/Protocol/H1/Config.lean
Normal file
@@ -0,0 +1,134 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Configuration
|
||||
|
||||
This module defines the configuration options for HTTP/1.1 protocol processing,
|
||||
including connection limits, header constraints, and various size limits.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
open Std Internal Parsec ByteArray
|
||||
open Internal
|
||||
|
||||
/--
|
||||
Connection limits and parser bounds configuration.
|
||||
-/
|
||||
structure Config where
|
||||
/--
|
||||
Maximum number of requests (server) or responses (client) per connection.
|
||||
-/
|
||||
maxMessages : Nat := 100
|
||||
|
||||
/--
|
||||
Maximum number of headers allowed per message.
|
||||
-/
|
||||
maxHeaders : Nat := 100
|
||||
|
||||
/--
|
||||
Maximum aggregate byte size of all header field lines in a single message
|
||||
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
|
||||
-/
|
||||
maxHeaderBytes : Nat := 65536
|
||||
|
||||
/--
|
||||
Whether to enable keep-alive connections by default.
|
||||
-/
|
||||
enableKeepAlive : Bool := true
|
||||
|
||||
/--
|
||||
The `Server` header value injected into outgoing responses (receiving mode) or the
|
||||
`User-Agent` header value injected into outgoing requests (sending mode).
|
||||
`none` suppresses the header entirely.
|
||||
-/
|
||||
agentName : Option Header.Value := none
|
||||
|
||||
/--
|
||||
Maximum length of request URI (default: 8192 bytes).
|
||||
-/
|
||||
maxUriLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing request/status start-lines (default: 8192 bytes).
|
||||
-/
|
||||
maxStartLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum length of header field name (default: 256 bytes).
|
||||
-/
|
||||
maxHeaderNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of header field value (default: 8192 bytes).
|
||||
-/
|
||||
maxHeaderValueLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of spaces in delimiter sequences (default: 16).
|
||||
-/
|
||||
maxSpaceSequence : Nat := 16
|
||||
|
||||
/--
|
||||
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
|
||||
(RFC 9112 §2.2 robustness). Default: 8.
|
||||
-/
|
||||
maxLeadingEmptyLines : Nat := 8
|
||||
|
||||
/--
|
||||
Maximum number of extensions on a single chunk-size line (default: 16).
|
||||
-/
|
||||
maxChunkExtensions : Nat := 16
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension name (default: 256 bytes).
|
||||
-/
|
||||
maxChunkExtNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension value (default: 256 bytes).
|
||||
-/
|
||||
maxChunkExtValueLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
|
||||
-/
|
||||
maxChunkLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum allowed chunk payload size in bytes (default: 8 MiB).
|
||||
-/
|
||||
maxChunkSize : Nat := 8 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum allowed total body size per message in bytes (default: 64 MiB).
|
||||
This limit applies across all body framing modes. For chunked transfer encoding,
|
||||
chunk-size lines (including extensions) and the trailer section also count toward
|
||||
this limit, so the total wire bytes consumed by the body cannot exceed this value.
|
||||
-/
|
||||
maxBodySize : Nat := 64 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum length of reason phrase (default: 512 bytes).
|
||||
-/
|
||||
maxReasonPhraseLength : Nat := 512
|
||||
|
||||
/--
|
||||
Maximum number of trailer headers (default: 20).
|
||||
-/
|
||||
maxTrailerHeaders : Nat := 20
|
||||
|
||||
end Std.Http.Protocol.H1
|
||||
110
src/Std/Internal/Http/Protocol/H1/Error.lean
Normal file
110
src/Std/Internal/Http/Protocol/H1/Error.lean
Normal file
@@ -0,0 +1,110 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Errors
|
||||
|
||||
This module defines the error types for HTTP/1.1 protocol processing,
|
||||
including parsing errors, timeout errors, and connection errors.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Specific HTTP processing errors with detailed information.
|
||||
-/
|
||||
inductive Error
|
||||
/--
|
||||
Malformed start line (request-line or status-line).
|
||||
-/
|
||||
| invalidStatusLine
|
||||
|
||||
/--
|
||||
Invalid or malformed header.
|
||||
-/
|
||||
| invalidHeader
|
||||
|
||||
/--
|
||||
Request timeout occurred.
|
||||
-/
|
||||
| timeout
|
||||
|
||||
/--
|
||||
Request entity too large.
|
||||
-/
|
||||
| entityTooLarge
|
||||
|
||||
/--
|
||||
Request URI is too long.
|
||||
-/
|
||||
| uriTooLong
|
||||
|
||||
/--
|
||||
Unsupported HTTP version.
|
||||
-/
|
||||
| unsupportedVersion
|
||||
|
||||
/--
|
||||
Invalid chunk encoding.
|
||||
-/
|
||||
| invalidChunk
|
||||
|
||||
/--
|
||||
Connection closed.
|
||||
-/
|
||||
| connectionClosed
|
||||
|
||||
/--
|
||||
Bad request or response message.
|
||||
-/
|
||||
| badMessage
|
||||
|
||||
/--
|
||||
The number of header fields in the message exceeds the configured limit.
|
||||
Maps to HTTP 431 Request Header Fields Too Large.
|
||||
-/
|
||||
| tooManyHeaders
|
||||
|
||||
/--
|
||||
The aggregate byte size of all header fields exceeds the configured limit.
|
||||
Maps to HTTP 431 Request Header Fields Too Large.
|
||||
-/
|
||||
| headersTooLarge
|
||||
|
||||
/--
|
||||
Generic error with message.
|
||||
-/
|
||||
| other (message : String)
|
||||
deriving Repr, BEq
|
||||
|
||||
instance : ToString Error where
|
||||
toString
|
||||
| .invalidStatusLine => "Invalid status line"
|
||||
| .invalidHeader => "Invalid header"
|
||||
| .timeout => "Timeout"
|
||||
| .entityTooLarge => "Entity too large"
|
||||
| .uriTooLong => "URI too long"
|
||||
| .unsupportedVersion => "Unsupported version"
|
||||
| .invalidChunk => "Invalid chunk"
|
||||
| .connectionClosed => "Connection closed"
|
||||
| .badMessage => "Bad message"
|
||||
| .tooManyHeaders => "Too many headers"
|
||||
| .headersTooLarge => "Headers too large"
|
||||
| .other msg => s!"Other error: {msg}"
|
||||
|
||||
73
src/Std/Internal/Http/Protocol/H1/Event.lean
Normal file
73
src/Std/Internal/Http/Protocol/H1/Event.lean
Normal file
@@ -0,0 +1,73 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Events
|
||||
|
||||
This module defines the events that can occur during HTTP/1.1 message processing,
|
||||
including header completion and control/error signals.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Events emitted during HTTP message processing.
|
||||
-/
|
||||
inductive Event (dir : Direction)
|
||||
/--
|
||||
Indicates that all headers have been successfully parsed.
|
||||
-/
|
||||
| endHeaders (head : Message.Head dir)
|
||||
|
||||
/--
|
||||
Signals that additional input data is required to continue processing.
|
||||
-/
|
||||
| needMoreData (size : Option Nat)
|
||||
|
||||
/--
|
||||
Indicates a failure during parsing or processing.
|
||||
-/
|
||||
| failed (err : Error)
|
||||
|
||||
/--
|
||||
Requests that the connection be closed.
|
||||
-/
|
||||
| close
|
||||
|
||||
/--
|
||||
The body should be closed.
|
||||
-/
|
||||
| closeBody
|
||||
|
||||
/--
|
||||
Indicates that a response is required.
|
||||
-/
|
||||
| needAnswer
|
||||
|
||||
/--
|
||||
Indicates readiness to process the next message.
|
||||
-/
|
||||
| next
|
||||
|
||||
/--
|
||||
Signals that an `Expect: 100-continue` decision is pending.
|
||||
-/
|
||||
| «continue»
|
||||
deriving Inhabited, Repr
|
||||
139
src/Std/Internal/Http/Protocol/H1/Message.lean
Normal file
139
src/Std/Internal/Http/Protocol/H1/Message.lean
Normal file
@@ -0,0 +1,139 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
import Init.Data.Array
|
||||
public import Std.Internal.Http.Data
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Message
|
||||
|
||||
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
|
||||
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
|
||||
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
|
||||
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
|
||||
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
|
||||
while exposing common operations such as headers, version, and `shouldKeepAlive`
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Direction of message flow from the server's perspective.
|
||||
-/
|
||||
inductive Direction
|
||||
/--
|
||||
Receiving and parsing incoming requests from clients.
|
||||
-/
|
||||
| receiving
|
||||
|
||||
/--
|
||||
Client perspective: writing outgoing requests and reading incoming responses.
|
||||
-/
|
||||
| sending
|
||||
deriving BEq
|
||||
|
||||
/--
|
||||
Inverts the message direction.
|
||||
-/
|
||||
@[expose]
|
||||
abbrev Direction.swap : Direction → Direction
|
||||
| .receiving => .sending
|
||||
| .sending => .receiving
|
||||
|
||||
/--
|
||||
Gets the message head type based on direction.
|
||||
-/
|
||||
@[expose]
|
||||
def Message.Head : Direction → Type
|
||||
| .receiving => Request.Head
|
||||
| .sending => Response.Head
|
||||
|
||||
/--
|
||||
Gets the headers of a `Message`.
|
||||
-/
|
||||
def Message.Head.headers (m : Message.Head dir) : Headers :=
|
||||
match dir with
|
||||
| .receiving => Request.Head.headers m
|
||||
| .sending => Response.Head.headers m
|
||||
|
||||
/--
|
||||
Gets the version of a `Message`.
|
||||
-/
|
||||
def Message.Head.version (m : Message.Head dir) : Version :=
|
||||
match dir with
|
||||
| .receiving => Request.Head.version m
|
||||
| .sending => Response.Head.version m
|
||||
|
||||
/--
|
||||
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
|
||||
-/
|
||||
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
|
||||
let contentLength := message.headers.getAll? .contentLength
|
||||
|
||||
match message.headers.getAll? .transferEncoding with
|
||||
| none =>
|
||||
match contentLength with
|
||||
| some #[cl] => .fixed <$> cl.value.toNat?
|
||||
| some _ => none -- To avoid request smuggling with malformed/multiple content-length headers.
|
||||
| none => if allowEOFBody then some (.fixed 0) else none
|
||||
|
||||
-- Single transfer-encoding header.
|
||||
| some #[header] =>
|
||||
let te := Header.TransferEncoding.parse header
|
||||
|
||||
match Header.TransferEncoding.isChunked <$> te, contentLength with
|
||||
| some true, none =>
|
||||
-- HTTP/1.0 does not define chunked transfer encoding (RFC 2068 §19.4.6).
|
||||
-- A server MUST NOT use chunked with an HTTP/1.0 peer; likewise, an
|
||||
-- HTTP/1.0 request carrying Transfer-Encoding: chunked is malformed.
|
||||
if message.version == .v10 then none else some .chunked
|
||||
| _, _ => none -- To avoid request smuggling when TE and CL are mixed.
|
||||
|
||||
-- We disallow multiple transfer-encoding headers.
|
||||
| some _ => none
|
||||
/--
|
||||
Checks whether the message indicates that the connection should be kept alive.
|
||||
-/
|
||||
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
|
||||
let tokens? : Option (Array String) :=
|
||||
match message.headers.getAll? .connection with
|
||||
| none => some #[]
|
||||
| some values =>
|
||||
values.foldl (fun acc raw => do
|
||||
let acc ← acc
|
||||
let parsed ← Header.Connection.parse raw
|
||||
pure (acc ++ parsed.tokens)
|
||||
) (some #[])
|
||||
|
||||
match tokens? with
|
||||
| none =>false
|
||||
| some tokens =>
|
||||
if message.version == .v11 then
|
||||
!tokens.any (· == "close")
|
||||
else
|
||||
tokens.any (· == "keep-alive")
|
||||
|
||||
instance : Repr (Message.Head dir) :=
|
||||
match dir with
|
||||
| .receiving => inferInstanceAs (Repr Request.Head)
|
||||
| .sending => inferInstanceAs (Repr Response.Head)
|
||||
|
||||
instance : Internal.Encode .v11 (Message.Head dir) :=
|
||||
match dir with
|
||||
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
|
||||
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
|
||||
|
||||
instance : EmptyCollection (Message.Head dir) where
|
||||
emptyCollection :=
|
||||
match dir with
|
||||
| .receiving => { method := .get, version := .v11 }
|
||||
| .sending => {}
|
||||
548
src/Std/Internal/Http/Protocol/H1/Parser.lean
Normal file
548
src/Std/Internal/Http/Protocol/H1/Parser.lean
Normal file
@@ -0,0 +1,548 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Parsec
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Parsec.ByteArray
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
|
||||
/-!
|
||||
This module defines parsers for HTTP/1.1 request and response lines, headers, and body framing. The
|
||||
reference used is https://httpwg.org/specs/rfc9112.html.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
open Std Internal Parsec ByteArray Internal Internal.Char
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Checks if a byte may appear inside a field value.
|
||||
|
||||
This parser enforces strict ASCII-only field values and allows only `field-content`
|
||||
(`HTAB / SP / VCHAR`).
|
||||
-/
|
||||
@[inline]
|
||||
def isFieldVChar (c : UInt8) : Bool :=
|
||||
fieldContent (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Checks if a byte may appear unescaped inside a quoted-string value.
|
||||
|
||||
Allows `HTAB / SP / %x21 / %x23-5B / %x5D-7E` (strict ASCII-only; no obs-text).
|
||||
-/
|
||||
@[inline]
|
||||
def isQdText (c : UInt8) : Bool :=
|
||||
qdtext (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Checks if a byte is optional whitespace (`OWS = SP / HTAB`, RFC 9110 §5.6.3).
|
||||
-/
|
||||
@[inline]
|
||||
def isOwsByte (c : UInt8) : Bool :=
|
||||
ows (Char.ofUInt8 c)
|
||||
|
||||
-- Parser blocks
|
||||
|
||||
/--
|
||||
Repeatedly applies `parser` until it returns `none` or the `maxCount` limit is
|
||||
exceeded. Returns the collected results as an array.
|
||||
-/
|
||||
partial def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
|
||||
let rec go (acc : Array α) : Parser (Array α) := do
|
||||
let step ← optional <| attempt do
|
||||
match ← parser with
|
||||
| none => fail "end of items"
|
||||
| some x => return x
|
||||
|
||||
match step with
|
||||
| none =>
|
||||
return acc
|
||||
| some x =>
|
||||
let acc := acc.push x
|
||||
|
||||
if acc.size > maxCount then
|
||||
fail s!"too many items: {acc.size} > {maxCount}"
|
||||
|
||||
go acc
|
||||
go #[]
|
||||
|
||||
|
||||
/--
|
||||
Lifts an `Option` into the parser monad, failing with a generic message if the value is `none`.
|
||||
-/
|
||||
def liftOption (x : Option α) : Parser α :=
|
||||
if let some res := x then
|
||||
return res
|
||||
else
|
||||
fail "expected value but got none"
|
||||
|
||||
/--
|
||||
Parses an HTTP token (RFC 9110 §5.6.2): one or more token characters, up to `limit` bytes.
|
||||
Fails if the input starts with a non-token character or is empty.
|
||||
-/
|
||||
@[inline]
|
||||
def parseToken (limit : Nat) : Parser ByteSlice :=
|
||||
takeWhileUpTo1 (fun c => tchar (Char.ofUInt8 c)) limit
|
||||
|
||||
/--
|
||||
Parses a line terminator.
|
||||
-/
|
||||
@[inline]
|
||||
def crlf : Parser Unit := do
|
||||
skipBytes "\r\n".toUTF8
|
||||
|
||||
/--
|
||||
Consumes and ignores empty lines (`CRLF`) that appear before a request-line.
|
||||
|
||||
https://httpwg.org/specs/rfc9112.html#rfc.section.2.2:
|
||||
|
||||
"In the interest of robustness, a server that is expecting to receive and parse a request-line SHOULD
|
||||
ignore at least one empty line (CRLF) received prior to the request-line."
|
||||
-/
|
||||
def skipLeadingRequestEmptyLines (limits : H1.Config) : Parser Unit := do
|
||||
let mut count := 0
|
||||
while (← peekWhen? (· == '\r'.toUInt8)).isSome do
|
||||
if count >= limits.maxLeadingEmptyLines then
|
||||
fail "too many leading empty lines"
|
||||
crlf
|
||||
count := count + 1
|
||||
|
||||
/--
|
||||
Parses a single space (SP, 0x20).
|
||||
-/
|
||||
@[inline]
|
||||
def sp : Parser Unit :=
|
||||
skipByte ' '.toUInt8
|
||||
|
||||
/--
|
||||
Parses optional whitespace (OWS = *(SP / HTAB), RFC 9110 §5.6.3), bounded by
|
||||
`limits.maxSpaceSequence`. Fails if more whitespace follows the limit, so oversized
|
||||
padding is rejected rather than silently truncated.
|
||||
-/
|
||||
@[inline]
|
||||
def ows (limits : H1.Config) : Parser Unit := do
|
||||
discard <| takeWhileUpTo isOwsByte limits.maxSpaceSequence
|
||||
|
||||
if (← peekWhen? isOwsByte) |>.isSome then
|
||||
fail "invalid space sequence"
|
||||
else
|
||||
pure ()
|
||||
|
||||
/--
|
||||
Parses a single ASCII hex digit and returns its numeric value (`0`–`15`).
|
||||
-/
|
||||
def hexDigit : Parser UInt8 := do
|
||||
let b ← any
|
||||
if isHexDigitByte b then
|
||||
if b ≥ '0'.toUInt8 && b ≤ '9'.toUInt8 then return b - '0'.toUInt8
|
||||
else if b ≥ 'A'.toUInt8 && b ≤ 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
|
||||
else return b - 'a'.toUInt8 + 10
|
||||
else fail s!"invalid hex digit {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
/--
|
||||
Parses a hexadecimal integer (one or more hex digits, up to 16 digits).
|
||||
Used for chunk-size lines in chunked transfer encoding.
|
||||
-/
|
||||
partial def hex : Parser Nat := do
|
||||
let rec go (acc : Nat) (count : Nat) : Parser Nat := do
|
||||
match ← optional (attempt hexDigit) with
|
||||
| some d =>
|
||||
if count + 1 > 16 then
|
||||
fail "chunk size too large"
|
||||
else
|
||||
go (acc * 16 + d.toNat) (count + 1)
|
||||
| none =>
|
||||
if count = 0 then
|
||||
-- Preserve EOF as incremental chunk-size parsing can request more data.
|
||||
-- For non-EOF invalid bytes, keep the specific parse failure.
|
||||
let _ ← peek!
|
||||
fail "expected hex digit"
|
||||
else
|
||||
return acc
|
||||
go 0 0
|
||||
|
||||
-- Actual parsers
|
||||
|
||||
/--
|
||||
Parses `HTTP-version = HTTP-name "/" DIGIT "." DIGIT` and returns the major and
|
||||
minor version numbers as a pair.
|
||||
-/
|
||||
def parseHttpVersionNumber : Parser (Nat × Nat) := do
|
||||
skipBytes "HTTP/".toUTF8
|
||||
let major ← digit
|
||||
skipByte '.'.toUInt8
|
||||
let minor ← digit
|
||||
pure ((major.toNat - 48), (minor.toNat - 48))
|
||||
|
||||
/--
|
||||
Parses an HTTP version string and returns the corresponding `Version` value.
|
||||
Fails if the version is not recognized by `Version.ofNumber?`.
|
||||
-/
|
||||
def parseHttpVersion : Parser Version := do
|
||||
let (major, minor) ← parseHttpVersionNumber
|
||||
liftOption <| Version.ofNumber? major minor
|
||||
|
||||
/-
|
||||
method = token
|
||||
|
||||
Every branch is wrapped in `attempt` so that `<|>` always backtracks on
|
||||
failure, even after consuming bytes. This is strictly necessary only for the
|
||||
P-group (POST / PUT / PATCH) which share a common first byte, but wrapping
|
||||
all alternatives keeps the parser defensively correct if new methods are
|
||||
added in the future.
|
||||
-/
|
||||
def parseMethod : Parser Method :=
|
||||
(attempt <| skipBytes "GET".toUTF8 <&> fun _ => Method.get)
|
||||
<|> (attempt <| skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
|
||||
<|> (attempt <| skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
|
||||
<|> (attempt <| skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
|
||||
<|> (attempt <| skipBytes "ACL".toUTF8 <&> fun _ => Method.acl)
|
||||
<|> (attempt <| skipBytes "QUERY".toUTF8 <&> fun _ => Method.query)
|
||||
<|> (attempt <| skipBytes "SEARCH".toUTF8 <&> fun _ => Method.search)
|
||||
<|> (attempt <| skipBytes "BASELINE-CONTROL".toUTF8 <&> fun _ => Method.baselineControl)
|
||||
<|> (attempt <| skipBytes "BIND".toUTF8 <&> fun _ => Method.bind)
|
||||
<|> (attempt <| skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
|
||||
<|> (attempt <| skipBytes "CHECKIN".toUTF8 <&> fun _ => Method.checkin)
|
||||
<|> (attempt <| skipBytes "CHECKOUT".toUTF8 <&> fun _ => Method.checkout)
|
||||
<|> (attempt <| skipBytes "COPY".toUTF8 <&> fun _ => Method.copy)
|
||||
<|> (attempt <| skipBytes "LABEL".toUTF8 <&> fun _ => Method.label)
|
||||
<|> (attempt <| skipBytes "LINK".toUTF8 <&> fun _ => Method.link)
|
||||
<|> (attempt <| skipBytes "LOCK".toUTF8 <&> fun _ => Method.lock)
|
||||
<|> (attempt <| skipBytes "MERGE".toUTF8 <&> fun _ => Method.merge)
|
||||
<|> (attempt <| skipBytes "MKACTIVITY".toUTF8 <&> fun _ => Method.mkactivity)
|
||||
<|> (attempt <| skipBytes "MKCALENDAR".toUTF8 <&> fun _ => Method.mkcalendar)
|
||||
<|> (attempt <| skipBytes "MKCOL".toUTF8 <&> fun _ => Method.mkcol)
|
||||
<|> (attempt <| skipBytes "MKREDIRECTREF".toUTF8 <&> fun _ => Method.mkredirectref)
|
||||
<|> (attempt <| skipBytes "MKWORKSPACE".toUTF8 <&> fun _ => Method.mkworkspace)
|
||||
<|> (attempt <| skipBytes "MOVE".toUTF8 <&> fun _ => Method.move)
|
||||
<|> (attempt <| skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
|
||||
<|> (attempt <| skipBytes "ORDERPATCH".toUTF8 <&> fun _ => Method.orderpatch)
|
||||
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
|
||||
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
|
||||
<|> (attempt <| skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
|
||||
<|> (attempt <| skipBytes "PRI".toUTF8 <&> fun _ => Method.pri)
|
||||
<|> (attempt <| skipBytes "PROPFIND".toUTF8 <&> fun _ => Method.propfind)
|
||||
<|> (attempt <| skipBytes "PROPPATCH".toUTF8 <&> fun _ => Method.proppatch)
|
||||
<|> (attempt <| skipBytes "REBIND".toUTF8 <&> fun _ => Method.rebind)
|
||||
<|> (attempt <| skipBytes "REPORT".toUTF8 <&> fun _ => Method.report)
|
||||
<|> (attempt <| skipBytes "UNBIND".toUTF8 <&> fun _ => Method.unbind)
|
||||
<|> (attempt <| skipBytes "UNCHECKOUT".toUTF8 <&> fun _ => Method.uncheckout)
|
||||
<|> (attempt <| skipBytes "UNLINK".toUTF8 <&> fun _ => Method.unlink)
|
||||
<|> (attempt <| skipBytes "UNLOCK".toUTF8 <&> fun _ => Method.unlock)
|
||||
<|> (attempt <| skipBytes "UPDATEREDIRECTREF".toUTF8 <&> fun _ => Method.updateredirectref)
|
||||
<|> (attempt <| skipBytes "UPDATE".toUTF8 <&> fun _ => Method.update)
|
||||
<|> (attempt <| skipBytes "VERSION-CONTROL".toUTF8 <&> fun _ => Method.versionControl)
|
||||
<|> (parseToken 64 *> fail "unrecognized method")
|
||||
|
||||
/--
|
||||
Parses a request-target URI, up to `limits.maxUriLength` bytes.
|
||||
Fails with `"uri too long"` if the target exceeds the configured limit.
|
||||
-/
|
||||
def parseURI (limits : H1.Config) : Parser ByteArray := do
|
||||
let uri ← takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
|
||||
if uri.size == limits.maxUriLength then
|
||||
if (← peekWhen? (· != ' '.toUInt8)) |>.isSome then
|
||||
fail "uri too long"
|
||||
|
||||
return uri.toByteArray
|
||||
|
||||
/--
|
||||
Shared core for request-line parsing: parses `request-target SP HTTP-version CRLF`
|
||||
and returns the `RequestTarget` together with the raw major/minor version numbers.
|
||||
|
||||
Both `parseRequestLine` and `parseRequestLineRawVersion` call this after consuming
|
||||
the method token, keeping URI validation and version parsing in one place.
|
||||
-/
|
||||
private def parseRequestLineBody (limits : H1.Config) : Parser (RequestTarget × Nat × Nat) := do
|
||||
let rawUri ← parseURI limits <* sp
|
||||
let uri ← match (Std.Http.URI.Parser.parseRequestTarget <* eof).run rawUri with
|
||||
| .ok res => pure res
|
||||
| .error res => fail res
|
||||
let versionPair ← parseHttpVersionNumber <* crlf
|
||||
return (uri, versionPair)
|
||||
|
||||
/--
|
||||
Parses a request line and returns a fully-typed `Request.Head`.
|
||||
`request-line = method SP request-target SP HTTP-version`
|
||||
-/
|
||||
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
|
||||
skipLeadingRequestEmptyLines limits
|
||||
let method ← parseMethod <* sp
|
||||
let (uri, (major, minor)) ← parseRequestLineBody limits
|
||||
if major == 1 ∧ minor == 1 then
|
||||
return ⟨method, .v11, uri, .empty⟩
|
||||
else if major == 1 ∧ minor == 0 then
|
||||
return ⟨method, .v10, uri, .empty⟩
|
||||
else
|
||||
fail "unsupported HTTP version"
|
||||
|
||||
/--
|
||||
Parses a request line and returns the recognized HTTP method and version when available.
|
||||
|
||||
request-line = method SP request-target SP HTTP-version
|
||||
-/
|
||||
public def parseRequestLineRawVersion (limits : H1.Config) : Parser (Method × RequestTarget × Option Version) := do
|
||||
skipLeadingRequestEmptyLines limits
|
||||
let method ← parseMethod <* sp
|
||||
let (uri, (major, minor)) ← parseRequestLineBody limits
|
||||
return (method, uri, Version.ofNumber? major minor)
|
||||
|
||||
/--
|
||||
Parses a single header field line.
|
||||
|
||||
`field-line = field-name ":" OWS field-value OWS`
|
||||
-/
|
||||
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
|
||||
let name ← parseToken limits.maxHeaderNameLength
|
||||
let value ← skipByte ':'.toUInt8 *> ows limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* ows limits
|
||||
|
||||
let name ← liftOption <| String.fromUTF8? name.toByteArray
|
||||
let value ← liftOption <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
|
||||
let value := value.trimAsciiEnd.toString
|
||||
|
||||
return (name, value)
|
||||
|
||||
/--
|
||||
Parses a single header field line, or returns `none` when it sees the blank line that
|
||||
terminates the header section.
|
||||
|
||||
```
|
||||
field-line = field-name ":" OWS field-value OWS CRLF
|
||||
```
|
||||
-/
|
||||
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
|
||||
let next ← peek?
|
||||
if next == some '\r'.toUInt8 ∨ next == some '\n'.toUInt8 then
|
||||
crlf
|
||||
pure none
|
||||
else
|
||||
some <$> (parseFieldLine limits <* crlf)
|
||||
|
||||
/--
|
||||
Parses a backslash-escaped character inside a quoted-string.
|
||||
|
||||
`quoted-pair = "\" ( HTAB / SP / VCHAR )` — strict ASCII-only (no obs-text).
|
||||
-/
|
||||
def parseQuotedPair : Parser UInt8 := do
|
||||
skipByte '\\'.toUInt8
|
||||
let b ← any
|
||||
|
||||
if quotedPairChar (Char.ofUInt8 b) then
|
||||
return b
|
||||
else
|
||||
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
/--
|
||||
Parses a quoted-string value, unescaping quoted-pairs.
|
||||
|
||||
`quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE`
|
||||
-/
|
||||
partial def parseQuotedString (maxLength : Nat) : Parser String := do
|
||||
skipByte '"'.toUInt8
|
||||
|
||||
let rec loop (buf : ByteArray) (length : Nat) : Parser ByteArray := do
|
||||
let b ← any
|
||||
|
||||
if b == '"'.toUInt8 then
|
||||
return buf
|
||||
else if b == '\\'.toUInt8 then
|
||||
let next ← any
|
||||
if quotedPairChar (Char.ofUInt8 next)
|
||||
then
|
||||
let length := length + 1
|
||||
if length > maxLength then
|
||||
fail "quoted-string too long"
|
||||
else
|
||||
loop (buf.push next) length
|
||||
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
|
||||
else if isQdText b then
|
||||
let length := length + 1
|
||||
if length > maxLength then
|
||||
fail "quoted-string too long"
|
||||
else
|
||||
loop (buf.push b) length
|
||||
else
|
||||
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
|
||||
|
||||
liftOption <| String.fromUTF8? (← loop .empty 0)
|
||||
|
||||
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
|
||||
def parseChunkExt (limits : H1.Config) : Parser (Chunk.ExtensionName × Option Chunk.ExtensionValue) := do
|
||||
ows limits *> skipByte ';'.toUInt8 *> ows limits
|
||||
let name ← (liftOption =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtNameLength) <* ows limits
|
||||
|
||||
let some name := Chunk.ExtensionName.ofString? name
|
||||
| fail "invalid extension name"
|
||||
|
||||
if (← peekWhen? (· == '='.toUInt8)) |>.isSome then
|
||||
-- RFC 9112 §7.1.1: BWS is allowed around "=".
|
||||
-- The `<* ows limits` after the name already consumed any trailing whitespace,
|
||||
-- so these ows calls are no-ops in practice, but kept for explicit grammar correspondence.
|
||||
ows limits *> skipByte '='.toUInt8 *> ows limits
|
||||
let value ← ows limits *> (parseQuotedString limits.maxChunkExtValueLength <|> liftOption =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtValueLength))
|
||||
|
||||
let some value := Chunk.ExtensionValue.ofString? value
|
||||
| fail "invalid extension value"
|
||||
|
||||
return (name, some value)
|
||||
|
||||
return (name, none)
|
||||
|
||||
/--
|
||||
Parses the size and extensions of a chunk.
|
||||
-/
|
||||
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) := do
|
||||
let size ← hex
|
||||
let ext ← manyItems (optional (attempt (parseChunkExt limits))) limits.maxChunkExtensions
|
||||
crlf
|
||||
return (size, ext)
|
||||
|
||||
/--
|
||||
Result of parsing partial or complete information.
|
||||
-/
|
||||
public inductive TakeResult
|
||||
| complete (data : ByteSlice)
|
||||
| incomplete (data : ByteSlice) (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parses a single chunk in chunked transfer encoding.
|
||||
-/
|
||||
public def parseChunkPartial (limits : H1.Config) : Parser (Option (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue) × ByteSlice)) := do
|
||||
let (size, ext) ← parseChunkSize limits
|
||||
if size == 0 then
|
||||
return none
|
||||
else
|
||||
let data ← take size
|
||||
return some ⟨size, ext, data⟩
|
||||
|
||||
/--
|
||||
Parses fixed-size data that can be incomplete.
|
||||
-/
|
||||
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
|
||||
if it.remainingBytes = 0 then
|
||||
.error it .eof
|
||||
else if it.remainingBytes < size then
|
||||
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
|
||||
else
|
||||
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
|
||||
|
||||
/--
|
||||
Parses fixed-size chunk data that can be incomplete.
|
||||
-/
|
||||
public def parseChunkSizedData (size : Nat) : Parser TakeResult := do
|
||||
match ← parseFixedSizeData size with
|
||||
| .complete data => crlf *> return .complete data
|
||||
| .incomplete data res => return .incomplete data res
|
||||
|
||||
/--
|
||||
Returns `true` if `name` (compared case-insensitively) is a field that MUST NOT appear in HTTP/1.1
|
||||
trailer sections per RFC 9112 §6.5. Forbidden fields are those required for message framing
|
||||
(`content-length`, `transfer-encoding`), routing (`host`), or connection management (`connection`).
|
||||
-/
|
||||
private def isForbiddenTrailerField (name : String) : Bool :=
|
||||
let n := name.toLower
|
||||
n == "content-length" || n == "transfer-encoding" || n == "host" ||
|
||||
n == "connection" || n == "expect" || n == "te" ||
|
||||
n == "authorization" || n == "max-forwards" || n == "cache-control" ||
|
||||
n == "content-encoding" || n == "upgrade" || n == "trailer"
|
||||
|
||||
/--
|
||||
Parses a trailer header (used after a chunked body), rejecting forbidden field names per RFC 9112
|
||||
§6.5. Fields used for message framing (`content-length`, `transfer-encoding`), routing (`host`),
|
||||
or connection management (`connection`, `te`, `upgrade`) are rejected to prevent trailer injection
|
||||
attacks where a downstream proxy might re-interpret them.
|
||||
-/
|
||||
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := do
|
||||
let result ← parseSingleHeader limits
|
||||
if let some (name, _) := result then
|
||||
if isForbiddenTrailerField name then
|
||||
fail s!"forbidden trailer field: {name}"
|
||||
return result
|
||||
|
||||
/--
|
||||
Parses trailer headers after a chunked body and returns them as an array of name-value pairs.
|
||||
|
||||
This is exposed for callers that need the trailer values directly (e.g. clients). The
|
||||
internal protocol machine uses `parseLastChunkBody` instead, which discards trailer values.
|
||||
-/
|
||||
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
|
||||
let trailers ← manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
|
||||
crlf
|
||||
return trailers
|
||||
|
||||
/--
|
||||
Returns `true` if `c` is a valid reason-phrase byte (`HTAB / SP / VCHAR`, strict ASCII-only).
|
||||
-/
|
||||
@[inline]
|
||||
def isReasonPhraseByte (c : UInt8) : Bool :=
|
||||
fieldContent (Char.ofUInt8 c)
|
||||
|
||||
/--
|
||||
Parses a reason phrase (text after status code).
|
||||
|
||||
Allows only `HTAB / SP / VCHAR` bytes (strict ASCII-only).
|
||||
-/
|
||||
def parseReasonPhrase (limits : H1.Config) : Parser String := do
|
||||
let bytes ← takeWhileUpTo isReasonPhraseByte limits.maxReasonPhraseLength
|
||||
liftOption <| String.fromUTF8? bytes.toByteArray
|
||||
|
||||
/--
|
||||
Parses a status-code (3 decimal digits), the following reason phrase, and the
|
||||
terminating CRLF; returns a typed `Status`.
|
||||
-/
|
||||
def parseStatusCode (limits : H1.Config) : Parser Status := do
|
||||
let d1 ← digit
|
||||
let d2 ← digit
|
||||
let d3 ← digit
|
||||
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
|
||||
sp
|
||||
let phrase ← parseReasonPhrase limits <* crlf
|
||||
|
||||
if h : IsValidReasonPhrase phrase then
|
||||
if let some status := Status.ofCode (some ⟨phrase, h⟩) code.toUInt16 then
|
||||
return status
|
||||
|
||||
fail "invalid status code"
|
||||
|
||||
/--
|
||||
Parses a status line and returns a fully-typed `Response.Head`.
|
||||
`status-line = HTTP-version SP status-code SP [ reason-phrase ]`
|
||||
Accepts only HTTP/1.1. For parsing where the version may be unrecognized and must be
|
||||
mapped to an error event, use `parseStatusLineRawVersion`.
|
||||
-/
|
||||
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
|
||||
let (major, minor) ← parseHttpVersionNumber <* sp
|
||||
let status ← parseStatusCode limits
|
||||
if major == 1 ∧ minor == 1 then
|
||||
return { status, version := .v11, headers := .empty }
|
||||
else if major == 1 ∧ minor == 0 then
|
||||
return { status, version := .v10, headers := .empty }
|
||||
else
|
||||
fail "unsupported HTTP version"
|
||||
|
||||
/--
|
||||
Parses a status line and returns the status code plus recognized HTTP version when available.
|
||||
Consumes and discards the reason phrase.
|
||||
|
||||
status-line = HTTP-version SP status-code SP [ reason-phrase ] CRLF
|
||||
-/
|
||||
public def parseStatusLineRawVersion (limits : H1.Config) : Parser (Status × Option Version) := do
|
||||
let (major, minor) ← parseHttpVersionNumber <* sp
|
||||
let status ← parseStatusCode limits
|
||||
return (status, Version.ofNumber? major minor)
|
||||
|
||||
/--
|
||||
Parses the trailer section that follows the last chunk size line (`0\r\n`).
|
||||
-/
|
||||
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
|
||||
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
|
||||
crlf
|
||||
|
||||
end Std.Http.Protocol.H1
|
||||
319
src/Std/Internal/Http/Protocol/H1/Reader.lean
Normal file
319
src/Std/Internal/Http/Protocol/H1/Reader.lean
Normal file
@@ -0,0 +1,319 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Reader
|
||||
|
||||
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
|
||||
It tracks the parsing state including start line, headers, and body handling for
|
||||
both fixed-length and chunked transfer encodings.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
The body-framing sub-state of the `Reader` state machine.
|
||||
-/
|
||||
inductive Reader.BodyState where
|
||||
/--
|
||||
Parse fixed-length body bytes, tracking the number of bytes remaining.
|
||||
-/
|
||||
| fixed (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parse the next chunk-size line in chunked transfer encoding.
|
||||
-/
|
||||
| chunkedSize
|
||||
|
||||
/--
|
||||
Parse chunk data for the current chunk.
|
||||
-/
|
||||
| chunkedBody (ext : Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) (remaining : Nat)
|
||||
|
||||
/--
|
||||
Parse body bytes until EOF (connection close).
|
||||
-/
|
||||
| closeDelimited
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
The state of the `Reader` state machine.
|
||||
-/
|
||||
inductive Reader.State (dir : Direction) : Type
|
||||
/--
|
||||
Initial state waiting for HTTP start line.
|
||||
-/
|
||||
| needStartLine : State dir
|
||||
|
||||
/--
|
||||
State waiting for HTTP headers, tracking number of headers parsed.
|
||||
-/
|
||||
| needHeader : Nat → State dir
|
||||
|
||||
/--
|
||||
Unified body-reading state.
|
||||
-/
|
||||
| readBody : Reader.BodyState → State dir
|
||||
|
||||
/--
|
||||
Paused waiting for a `canContinue` decision, carrying the next state.
|
||||
-/
|
||||
| continue : State dir → State dir
|
||||
|
||||
/--
|
||||
State waiting to be able to read new data.
|
||||
-/
|
||||
| pending : State dir
|
||||
|
||||
/--
|
||||
State that it completed a single request or response and can go to the next one
|
||||
-/
|
||||
| complete
|
||||
|
||||
/--
|
||||
State that it has completed and cannot process more data.
|
||||
-/
|
||||
| closed
|
||||
|
||||
/--
|
||||
The input is malformed.
|
||||
-/
|
||||
| failed (error : Error) : State dir
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
Manages the reading state of the HTTP parsing and processing machine.
|
||||
-/
|
||||
structure Reader (dir : Direction) where
|
||||
/--
|
||||
The current state of the machine.
|
||||
-/
|
||||
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
|
||||
|
||||
/--
|
||||
The input byte array.
|
||||
-/
|
||||
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
|
||||
|
||||
/--
|
||||
The incoming message head.
|
||||
-/
|
||||
messageHead : Message.Head dir := {}
|
||||
|
||||
/--
|
||||
Count of messages that this connection has already parsed.
|
||||
-/
|
||||
messageCount : Nat := 0
|
||||
|
||||
/--
|
||||
Number of body bytes read for the current message.
|
||||
-/
|
||||
bodyBytesRead : Nat := 0
|
||||
|
||||
/--
|
||||
Number of header bytes accumulated for the current message.
|
||||
Counts name + value bytes plus 4 bytes per line for `: ` and `\r\n`.
|
||||
-/
|
||||
headerBytesRead : Nat := 0
|
||||
|
||||
/--
|
||||
Set when no further input bytes will arrive (the remote end has closed the connection).
|
||||
-/
|
||||
noMoreInput : Bool := false
|
||||
|
||||
namespace Reader
|
||||
|
||||
/--
|
||||
Checks if the reader is in a closed state and cannot process more messages.
|
||||
-/
|
||||
@[inline]
|
||||
def isClosed (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .closed => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the reader has completed parsing the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def isComplete (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .complete => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the reader has encountered an error.
|
||||
-/
|
||||
@[inline]
|
||||
def hasFailed (reader : Reader dir) : Bool :=
|
||||
match reader.state with
|
||||
| .failed _ => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Feeds new data into the reader's input buffer.
|
||||
If the current input is exhausted, replaces it; otherwise compacts the buffer
|
||||
by discarding already-parsed bytes before appending.
|
||||
-/
|
||||
@[inline]
|
||||
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input :=
|
||||
if reader.input.atEnd
|
||||
then data.iter
|
||||
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
|
||||
|
||||
/--
|
||||
Replaces the reader's input iterator with a new one.
|
||||
-/
|
||||
@[inline]
|
||||
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input }
|
||||
|
||||
/--
|
||||
Updates the message head being constructed.
|
||||
-/
|
||||
@[inline]
|
||||
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with messageHead }
|
||||
|
||||
/--
|
||||
Adds a header to the current message head.
|
||||
-/
|
||||
@[inline]
|
||||
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
|
||||
match dir with
|
||||
| .sending | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
|
||||
|
||||
/--
|
||||
Closes the reader, transitioning to the closed state.
|
||||
-/
|
||||
@[inline]
|
||||
def close (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .closed, noMoreInput := true }
|
||||
|
||||
/--
|
||||
Marks the current message as complete and prepares for the next message.
|
||||
-/
|
||||
@[inline]
|
||||
def markComplete (reader : Reader dir) : Reader dir :=
|
||||
{ reader with
|
||||
state := .complete
|
||||
messageCount := reader.messageCount + 1 }
|
||||
|
||||
/--
|
||||
Transitions the reader to a failed state with the given error.
|
||||
-/
|
||||
@[inline]
|
||||
def fail (error : Error) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .failed error }
|
||||
|
||||
/--
|
||||
Resets the reader to parse a new message on the same connection.
|
||||
-/
|
||||
@[inline]
|
||||
def reset (reader : Reader dir) : Reader dir :=
|
||||
{ reader with
|
||||
state := .needStartLine
|
||||
bodyBytesRead := 0
|
||||
headerBytesRead := 0
|
||||
messageHead := {} }
|
||||
|
||||
/--
|
||||
Checks if more input is needed to continue parsing.
|
||||
-/
|
||||
@[inline]
|
||||
def needsMoreInput (reader : Reader dir) : Bool :=
|
||||
reader.input.atEnd && !reader.noMoreInput &&
|
||||
match reader.state with
|
||||
| .complete | .closed | .failed _ | .«continue» _ => false
|
||||
| _ => true
|
||||
|
||||
/--
|
||||
Returns the current parse error if the reader has failed.
|
||||
-/
|
||||
@[inline]
|
||||
def getError (reader : Reader dir) : Option Error :=
|
||||
match reader.state with
|
||||
| .failed err => some err
|
||||
| _ => none
|
||||
|
||||
/--
|
||||
Gets the number of bytes remaining in the input buffer.
|
||||
-/
|
||||
@[inline]
|
||||
def remainingBytes (reader : Reader dir) : Nat :=
|
||||
reader.input.array.size - reader.input.pos
|
||||
|
||||
/--
|
||||
Advances the input iterator by n bytes.
|
||||
-/
|
||||
@[inline]
|
||||
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with input := reader.input.forward n }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading headers.
|
||||
-/
|
||||
@[inline]
|
||||
def startHeaders (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .needHeader 0, bodyBytesRead := 0, headerBytesRead := 0 }
|
||||
|
||||
/--
|
||||
Adds body bytes parsed for the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def addBodyBytes (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with bodyBytesRead := reader.bodyBytesRead + n }
|
||||
|
||||
/--
|
||||
Adds header bytes accumulated for the current message.
|
||||
-/
|
||||
@[inline]
|
||||
def addHeaderBytes (n : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with headerBytesRead := reader.headerBytesRead + n }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading a fixed-length body.
|
||||
-/
|
||||
@[inline]
|
||||
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .readBody (.fixed size) }
|
||||
|
||||
/--
|
||||
Transitions to the state for reading chunked transfer encoding.
|
||||
-/
|
||||
@[inline]
|
||||
def startChunkedBody (reader : Reader dir) : Reader dir :=
|
||||
{ reader with state := .readBody .chunkedSize }
|
||||
|
||||
/--
|
||||
Marks that no more input will be provided (connection closed).
|
||||
-/
|
||||
@[inline]
|
||||
def markNoMoreInput (reader : Reader dir) : Reader dir :=
|
||||
{ reader with noMoreInput := true }
|
||||
|
||||
/--
|
||||
Checks if the connection should be kept alive for the next message.
|
||||
-/
|
||||
def shouldKeepAlive (reader : Reader dir) : Bool :=
|
||||
reader.messageHead.shouldKeepAlive
|
||||
|
||||
end Reader
|
||||
280
src/Std/Internal/Http/Protocol/H1/Writer.lean
Normal file
280
src/Std/Internal/Http/Protocol/H1/Writer.lean
Normal file
@@ -0,0 +1,280 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Http.Internal
|
||||
public import Std.Internal.Http.Protocol.H1.Parser
|
||||
public import Std.Internal.Http.Protocol.H1.Config
|
||||
public import Std.Internal.Http.Protocol.H1.Message
|
||||
public import Std.Internal.Http.Protocol.H1.Error
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP/1.1 Writer
|
||||
|
||||
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
|
||||
It handles encoding headers and body content for both fixed-length and chunked
|
||||
transfer encodings.
|
||||
-/
|
||||
|
||||
namespace Std.Http.Protocol.H1
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
open Internal
|
||||
|
||||
/--
|
||||
The state of the `Writer` state machine.
|
||||
-/
|
||||
inductive Writer.State
|
||||
/--
|
||||
Initial state before any outgoing message has been prepared.
|
||||
-/
|
||||
| pending
|
||||
|
||||
/--
|
||||
Waiting for the application to provide the outgoing message head via `send`.
|
||||
-/
|
||||
| waitingHeaders
|
||||
|
||||
/--
|
||||
The message head has been provided; waiting for `shouldFlush` to become true before
|
||||
serializing headers to output.
|
||||
-/
|
||||
| waitingForFlush
|
||||
|
||||
/--
|
||||
Writing the body output (either fixed-length or chunked).
|
||||
-/
|
||||
| writingBody (mode : Body.Length)
|
||||
|
||||
/--
|
||||
Completed writing a single message and ready to begin the next one.
|
||||
-/
|
||||
| complete
|
||||
|
||||
/--
|
||||
Closed; no further data can be written.
|
||||
-/
|
||||
| closed
|
||||
deriving Inhabited, Repr, BEq
|
||||
|
||||
/--
|
||||
Manages the writing state of the HTTP generating and writing machine.
|
||||
-/
|
||||
structure Writer (dir : Direction) where
|
||||
/--
|
||||
Body chunks supplied by the user, accumulated before being flushed to output.
|
||||
-/
|
||||
userData : Array Chunk := .empty
|
||||
|
||||
/--
|
||||
All the data produced by the writer, ready to be sent to the socket.
|
||||
-/
|
||||
outputData : ChunkedBuffer := .empty
|
||||
|
||||
/--
|
||||
The state of the writer machine.
|
||||
-/
|
||||
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
|
||||
|
||||
/--
|
||||
When the user specifies the exact body size upfront, `Content-Length` framing is
|
||||
used instead of chunked transfer encoding.
|
||||
-/
|
||||
knownSize : Option Body.Length := none
|
||||
|
||||
/--
|
||||
The outgoing message that will be written to the output.
|
||||
-/
|
||||
messageHead : Message.Head dir.swap := {}
|
||||
|
||||
/--
|
||||
Whether the user has called `send` to provide the outgoing message head.
|
||||
-/
|
||||
sentMessage : Bool := false
|
||||
|
||||
/--
|
||||
Set when the user has finished sending body data, allowing fixed-size framing
|
||||
to be determined upfront.
|
||||
-/
|
||||
userClosedBody : Bool := false
|
||||
|
||||
/--
|
||||
When `true`, body bytes are intentionally omitted from the wire for this message
|
||||
(e.g. HEAD responses), while headers/framing metadata may still describe the
|
||||
hypothetical representation.
|
||||
-/
|
||||
omitBody : Bool := false
|
||||
|
||||
/--
|
||||
Running total of bytes across all `userData` chunks, maintained incrementally
|
||||
to avoid an O(n) fold on every fixed-length write step.
|
||||
-/
|
||||
userDataBytes : Nat := 0
|
||||
|
||||
namespace Writer
|
||||
|
||||
/--
|
||||
Returns `true` when no more user body data will arrive: either the user called
|
||||
`closeBody`, or the writer has already transitioned to `complete` or `closed`.
|
||||
|
||||
Note: this does **not** mean the wire is ready to accept new bytes — a `closed`
|
||||
writer cannot send anything. Use this to decide whether to flush pending body
|
||||
data rather than to check writability.
|
||||
-/
|
||||
@[inline]
|
||||
def noMoreUserData {dir} (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .closed | .complete => true
|
||||
| _ => writer.userClosedBody
|
||||
|
||||
/--
|
||||
Checks if the writer is closed (cannot process more data).
|
||||
-/
|
||||
@[inline]
|
||||
def isClosed (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .closed => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the writer has completed processing a request.
|
||||
-/
|
||||
@[inline]
|
||||
def isComplete (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .complete => true
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Checks if the writer can accept more data from the user.
|
||||
-/
|
||||
@[inline]
|
||||
def canAcceptData (writer : Writer dir) : Bool :=
|
||||
match writer.state with
|
||||
| .waitingHeaders => true
|
||||
| .waitingForFlush => true
|
||||
| .writingBody _ => !writer.userClosedBody
|
||||
| _ => false
|
||||
|
||||
/--
|
||||
Marks the body as closed, indicating no more user data will be added.
|
||||
-/
|
||||
@[inline]
|
||||
def closeBody (writer : Writer dir) : Writer dir :=
|
||||
{ writer with userClosedBody := true }
|
||||
|
||||
/--
|
||||
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked.
|
||||
-/
|
||||
def determineTransferMode (writer : Writer dir) : Body.Length :=
|
||||
if let some mode := writer.knownSize then
|
||||
mode
|
||||
else if writer.userClosedBody then
|
||||
.fixed writer.userDataBytes
|
||||
else
|
||||
.chunked
|
||||
|
||||
/--
|
||||
Adds user data chunks to the writer's buffer if the writer can accept data.
|
||||
-/
|
||||
@[inline]
|
||||
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
|
||||
if writer.canAcceptData then
|
||||
let extraBytes := data.foldl (fun acc c => acc + c.data.size) 0
|
||||
{ writer with userData := writer.userData ++ data, userDataBytes := writer.userDataBytes + extraBytes }
|
||||
else
|
||||
writer
|
||||
|
||||
/--
|
||||
Writes accumulated user data to output using fixed-size encoding.
|
||||
-/
|
||||
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
|
||||
if writer.userData.size = 0 then
|
||||
(writer, limitSize)
|
||||
else
|
||||
let (chunks, pending, totalSize) := writer.userData.foldl (fun (state : Array ByteArray × Array Chunk × Nat) chunk =>
|
||||
let (acc, pending, size) := state
|
||||
if size >= limitSize then
|
||||
(acc, pending.push chunk, size)
|
||||
else
|
||||
let remaining := limitSize - size
|
||||
let takeSize := min chunk.data.size remaining
|
||||
let dataPart := chunk.data.extract 0 takeSize
|
||||
let acc := if takeSize = 0 then acc else acc.push dataPart
|
||||
let size := size + takeSize
|
||||
if takeSize < chunk.data.size then
|
||||
let pendingChunk : Chunk := { chunk with data := chunk.data.extract takeSize chunk.data.size }
|
||||
(acc, pending.push pendingChunk, size)
|
||||
else
|
||||
(acc, pending, size)
|
||||
) (#[], #[], 0)
|
||||
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
|
||||
let remaining := limitSize - totalSize
|
||||
({ writer with userData := pending, outputData, userDataBytes := writer.userDataBytes - totalSize }, remaining)
|
||||
|
||||
/--
|
||||
Writes accumulated user data to output using chunked transfer encoding.
|
||||
-/
|
||||
def writeChunkedBody (writer : Writer dir) : Writer dir :=
|
||||
if writer.userData.size = 0 then
|
||||
writer
|
||||
else
|
||||
let data := writer.userData
|
||||
{ writer with userData := #[], userDataBytes := 0, outputData := data.foldl (Encode.encode .v11) writer.outputData }
|
||||
|
||||
/--
|
||||
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state.
|
||||
-/
|
||||
def writeFinalChunk (writer : Writer dir) : Writer dir :=
|
||||
let writer := writer.writeChunkedBody
|
||||
{ writer with
|
||||
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
|
||||
state := .complete
|
||||
}
|
||||
|
||||
/--
|
||||
Extracts all accumulated output data and returns it with a cleared output buffer.
|
||||
-/
|
||||
@[inline]
|
||||
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
|
||||
let output := writer.outputData.toByteArray
|
||||
some ({ writer with outputData := ChunkedBuffer.empty }, output)
|
||||
|
||||
/--
|
||||
Updates the writer's state machine to a new state.
|
||||
-/
|
||||
@[inline]
|
||||
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
|
||||
{ writer with state }
|
||||
|
||||
/--
|
||||
Writes the message headers to the output buffer.
|
||||
-/
|
||||
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
|
||||
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
|
||||
|
||||
/--
|
||||
Checks if the connection should be kept alive based on the Connection header.
|
||||
-/
|
||||
def shouldKeepAlive (writer : Writer dir) : Bool :=
|
||||
writer.messageHead.headers.get? .connection
|
||||
|>.map (fun v => v.value.toLower != "close")
|
||||
|>.getD true
|
||||
|
||||
/--
|
||||
Closes the writer, transitioning to the closed state.
|
||||
-/
|
||||
@[inline]
|
||||
def close (writer : Writer dir) : Writer dir :=
|
||||
{ writer with state := .closed }
|
||||
|
||||
end Writer
|
||||
188
src/Std/Internal/Http/Server.lean
Normal file
188
src/Std/Internal/Http/Server.lean
Normal file
@@ -0,0 +1,188 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Async
|
||||
public import Std.Internal.Async.TCP
|
||||
public import Std.Sync.CancellationToken
|
||||
public import Std.Sync.Semaphore
|
||||
public import Std.Internal.Http.Server.Config
|
||||
public import Std.Internal.Http.Server.Handler
|
||||
public import Std.Internal.Http.Server.Connection
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# HTTP Server
|
||||
|
||||
This module defines a simple, asynchronous HTTP/1.1 server implementation.
|
||||
|
||||
It provides the `Std.Http.Server` structure, which encapsulates all server state, and functions for
|
||||
starting, managing, and gracefully shutting down the server.
|
||||
|
||||
The server runs entirely using `Async` and uses a shared `CancellationContext` to signal shutdowns.
|
||||
Each active client connection is tracked, and the server automatically resolves its shutdown
|
||||
promise once all connections have closed.
|
||||
-/
|
||||
|
||||
namespace Std.Http
|
||||
open Std.Internal.IO.Async TCP
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
The `Server` structure holds all state required to manage the lifecycle of an HTTP server, including
|
||||
connection tracking and shutdown coordination.
|
||||
-/
|
||||
structure Server where
|
||||
|
||||
/--
|
||||
The context used for shutting down all connections and the server.
|
||||
-/
|
||||
context : Std.CancellationContext
|
||||
|
||||
/--
|
||||
Active HTTP connections
|
||||
-/
|
||||
activeConnections : Std.Mutex UInt64
|
||||
|
||||
/--
|
||||
Semaphore used to enforce the maximum number of simultaneous active connections.
|
||||
`none` means no connection limit.
|
||||
-/
|
||||
connectionLimit : Option Std.Semaphore
|
||||
|
||||
/--
|
||||
Indicates when the server has successfully shut down.
|
||||
-/
|
||||
shutdownPromise : Std.Channel Unit
|
||||
|
||||
/--
|
||||
Configuration of the server
|
||||
-/
|
||||
config : Std.Http.Config
|
||||
|
||||
namespace Server
|
||||
|
||||
/--
|
||||
Create a new `Server` structure with an optional configuration.
|
||||
-/
|
||||
def new (config : Std.Http.Config := {}) : IO Server := do
|
||||
let context ← Std.CancellationContext.new
|
||||
let activeConnections ← Std.Mutex.new 0
|
||||
let connectionLimit ←
|
||||
if config.maxConnections = 0 then
|
||||
pure none
|
||||
else
|
||||
some <$> Std.Semaphore.new config.maxConnections
|
||||
let shutdownPromise ← Std.Channel.new
|
||||
|
||||
return { context, activeConnections, connectionLimit, shutdownPromise, config }
|
||||
|
||||
/--
|
||||
Triggers cancellation of all requests and the accept loop in the server. This function should be used
|
||||
in conjunction with `waitShutdown` to properly coordinate the shutdown sequence.
|
||||
-/
|
||||
@[inline]
|
||||
def shutdown (s : Server) : Async Unit :=
|
||||
s.context.cancel .shutdown
|
||||
|
||||
/--
|
||||
Waits for the server to shut down. Blocks until another task or async operation calls the `shutdown` function.
|
||||
-/
|
||||
@[inline]
|
||||
def waitShutdown (s : Server) : Async Unit := do
|
||||
Async.ofAsyncTask ((← s.shutdownPromise.recv).map Except.ok)
|
||||
|
||||
/--
|
||||
Returns a `Selector` that waits for the server to shut down.
|
||||
-/
|
||||
@[inline]
|
||||
def waitShutdownSelector (s : Server) : Selector Unit :=
|
||||
s.shutdownPromise.recvSelector
|
||||
|
||||
/--
|
||||
Triggers cancellation of all requests and the accept loop, then waits for the server to fully shut down.
|
||||
This is a convenience function combining `shutdown` and then `waitShutdown`.
|
||||
-/
|
||||
@[inline]
|
||||
def shutdownAndWait (s : Server) : Async Unit := do
|
||||
s.context.cancel .shutdown
|
||||
s.waitShutdown
|
||||
|
||||
@[inline]
|
||||
private def frameCancellation (s : Server) (releaseConnectionPermit : Bool := false)
|
||||
(action : ContextAsync α) : ContextAsync α := do
|
||||
s.activeConnections.atomically (modify (· + 1))
|
||||
try
|
||||
action
|
||||
finally
|
||||
if releaseConnectionPermit then
|
||||
if let some limit := s.connectionLimit then
|
||||
limit.release
|
||||
|
||||
s.activeConnections.atomically do
|
||||
modify (· - 1)
|
||||
|
||||
if (← get) = 0 ∧ (← s.context.isCancelled) then
|
||||
discard <| s.shutdownPromise.send ()
|
||||
|
||||
/--
|
||||
Start a new HTTP/1.1 server on the given socket address. This function uses `Async` to handle tasks
|
||||
and TCP connections, and returns a `Server` structure that can be used to cancel the server.
|
||||
-/
|
||||
def serve {σ : Type} [Handler σ]
|
||||
(addr : Net.SocketAddress)
|
||||
(handler : σ)
|
||||
(config : Config := {}) (backlog : UInt32 := 1024) : Async Server := do
|
||||
|
||||
let httpServer ← Server.new config
|
||||
|
||||
let server ← Socket.Server.mk
|
||||
server.bind addr
|
||||
server.listen backlog
|
||||
server.noDelay
|
||||
|
||||
let runServer := do
|
||||
frameCancellation httpServer (action := do
|
||||
while true do
|
||||
let permitAcquired ←
|
||||
if let some limit := httpServer.connectionLimit then
|
||||
let permit ← limit.acquire
|
||||
await permit
|
||||
pure true
|
||||
else
|
||||
pure false
|
||||
|
||||
let result ← Selectable.one #[
|
||||
.case (server.acceptSelector) (fun x => pure <| some x),
|
||||
.case (← ContextAsync.doneSelector) (fun _ => pure none)
|
||||
]
|
||||
|
||||
match result with
|
||||
| some client =>
|
||||
let extensions ← do
|
||||
match (← EIO.toBaseIO client.getPeerName) with
|
||||
| .ok addr => pure <| Extensions.empty.insert (Server.RemoteAddr.mk addr)
|
||||
| .error _ => pure Extensions.empty
|
||||
|
||||
ContextAsync.background
|
||||
(frameCancellation httpServer (releaseConnectionPermit := permitAcquired)
|
||||
(action := do
|
||||
serveConnection client handler config extensions))
|
||||
| none =>
|
||||
if permitAcquired then
|
||||
if let some limit := httpServer.connectionLimit then
|
||||
limit.release
|
||||
break
|
||||
)
|
||||
|
||||
background (runServer httpServer.context)
|
||||
|
||||
return httpServer
|
||||
|
||||
end Std.Http.Server
|
||||
196
src/Std/Internal/Http/Server/Config.lean
Normal file
196
src/Std/Internal/Http/Server/Config.lean
Normal file
@@ -0,0 +1,196 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Time
|
||||
public import Std.Internal.Http.Protocol.H1
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Config
|
||||
|
||||
This module exposes the `Config`, a structure that describes timeout, request and headers
|
||||
configuration of an HTTP Server.
|
||||
-/
|
||||
|
||||
namespace Std.Http
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Connection limits configuration with validation.
|
||||
-/
|
||||
structure Config where
|
||||
/--
|
||||
Maximum number of simultaneous active connections (default: 1024).
|
||||
Setting this to `0` disables the limit entirely: the server will accept any number of
|
||||
concurrent connections and no semaphore-based cap is enforced. Use with care — an
|
||||
unconstrained server may exhaust file descriptors or memory under adversarial load.
|
||||
-/
|
||||
maxConnections : Nat := 1024
|
||||
|
||||
/--
|
||||
Maximum number of requests per connection.
|
||||
-/
|
||||
maxRequests : Nat := 100
|
||||
|
||||
/--
|
||||
Maximum number of headers allowed per request.
|
||||
-/
|
||||
maxHeaders : Nat := 50
|
||||
|
||||
/--
|
||||
Maximum aggregate byte size of all header field lines in a single message
|
||||
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
|
||||
-/
|
||||
maxHeaderBytes : Nat := 65536
|
||||
|
||||
/--
|
||||
Timeout (in milliseconds) for receiving additional data while a request is actively being
|
||||
processed (e.g. reading the request body). Applies after the request headers have been parsed
|
||||
and replaces the keep-alive timeout for the duration of the request.
|
||||
-/
|
||||
lingeringTimeout : Time.Millisecond.Offset := 10000
|
||||
|
||||
/--
|
||||
Timeout for keep-alive connections
|
||||
-/
|
||||
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := ⟨12000, by decide⟩
|
||||
|
||||
/--
|
||||
Maximum time (in milliseconds) allowed to receive the complete request headers after the first
|
||||
byte of a new request arrives. This prevents Slowloris-style attacks where a client sends bytes
|
||||
at a slow rate to hold a connection slot open without completing a request. Once a request starts,
|
||||
each individual read must complete within this window. Default: 5 seconds.
|
||||
-/
|
||||
headerTimeout : Time.Millisecond.Offset := 5000
|
||||
|
||||
/--
|
||||
Whether to enable keep-alive connections by default.
|
||||
-/
|
||||
enableKeepAlive : Bool := true
|
||||
|
||||
/--
|
||||
The maximum size that the connection can receive in a single recv call.
|
||||
-/
|
||||
maximumRecvSize : Nat := 8192
|
||||
|
||||
/--
|
||||
Default buffer size for the connection
|
||||
-/
|
||||
defaultPayloadBytes : Nat := 8192
|
||||
|
||||
/--
|
||||
Whether to automatically generate the `Date` header in responses.
|
||||
-/
|
||||
generateDate : Bool := true
|
||||
|
||||
/--
|
||||
The `Server` header value injected into outgoing responses.
|
||||
`none` suppresses the header entirely.
|
||||
-/
|
||||
serverName : Option Header.Value := some (.mk "LeanHTTP/1.1")
|
||||
|
||||
/--
|
||||
Maximum length of request URI (default: 8192 bytes)
|
||||
-/
|
||||
maxUriLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing request start-lines (default: 8192 bytes).
|
||||
-/
|
||||
maxStartLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum length of header field name (default: 256 bytes)
|
||||
-/
|
||||
maxHeaderNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of header field value (default: 8192 bytes)
|
||||
-/
|
||||
maxHeaderValueLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum number of spaces in delimiter sequences (default: 16)
|
||||
-/
|
||||
maxSpaceSequence : Nat := 16
|
||||
|
||||
/--
|
||||
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
|
||||
(RFC 9112 §2.2 robustness). Default: 8.
|
||||
-/
|
||||
maxLeadingEmptyLines : Nat := 8
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension name (default: 256 bytes)
|
||||
-/
|
||||
maxChunkExtNameLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum length of chunk extension value (default: 256 bytes)
|
||||
-/
|
||||
maxChunkExtValueLength : Nat := 256
|
||||
|
||||
/--
|
||||
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
|
||||
-/
|
||||
maxChunkLineLength : Nat := 8192
|
||||
|
||||
/--
|
||||
Maximum allowed chunk payload size in bytes (default: 8 MiB).
|
||||
-/
|
||||
maxChunkSize : Nat := 8 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum allowed total body size per request in bytes (default: 64 MiB).
|
||||
-/
|
||||
maxBodySize : Nat := 64 * 1024 * 1024
|
||||
|
||||
/--
|
||||
Maximum length of reason phrase (default: 512 bytes)
|
||||
-/
|
||||
maxReasonPhraseLength : Nat := 512
|
||||
|
||||
/--
|
||||
Maximum number of trailer headers (default: 20)
|
||||
-/
|
||||
maxTrailerHeaders : Nat := 20
|
||||
|
||||
/--
|
||||
Maximum number of extensions on a single chunk-size line (default: 16).
|
||||
-/
|
||||
maxChunkExtensions : Nat := 16
|
||||
|
||||
namespace Config
|
||||
|
||||
/--
|
||||
Converts to HTTP/1.1 config.
|
||||
-/
|
||||
def toH1Config (config : Config) : Protocol.H1.Config where
|
||||
maxMessages := config.maxRequests
|
||||
maxHeaders := config.maxHeaders
|
||||
maxHeaderBytes := config.maxHeaderBytes
|
||||
enableKeepAlive := config.enableKeepAlive
|
||||
agentName := config.serverName
|
||||
maxUriLength := config.maxUriLength
|
||||
maxStartLineLength := config.maxStartLineLength
|
||||
maxHeaderNameLength := config.maxHeaderNameLength
|
||||
maxHeaderValueLength := config.maxHeaderValueLength
|
||||
maxSpaceSequence := config.maxSpaceSequence
|
||||
maxLeadingEmptyLines := config.maxLeadingEmptyLines
|
||||
maxChunkExtensions := config.maxChunkExtensions
|
||||
maxChunkExtNameLength := config.maxChunkExtNameLength
|
||||
maxChunkExtValueLength := config.maxChunkExtValueLength
|
||||
maxChunkLineLength := config.maxChunkLineLength
|
||||
maxChunkSize := config.maxChunkSize
|
||||
maxBodySize := config.maxBodySize
|
||||
maxReasonPhraseLength := config.maxReasonPhraseLength
|
||||
maxTrailerHeaders := config.maxTrailerHeaders
|
||||
|
||||
end Std.Http.Config
|
||||
530
src/Std/Internal/Http/Server/Connection.lean
Normal file
530
src/Std/Internal/Http/Server/Connection.lean
Normal file
@@ -0,0 +1,530 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Async.TCP
|
||||
public import Std.Internal.Async.ContextAsync
|
||||
public import Std.Internal.Http.Transport
|
||||
public import Std.Internal.Http.Protocol.H1
|
||||
public import Std.Internal.Http.Server.Config
|
||||
public import Std.Internal.Http.Server.Handler
|
||||
|
||||
public section
|
||||
|
||||
namespace Std
|
||||
namespace Http
|
||||
namespace Server
|
||||
|
||||
open Std Internal IO Async TCP Protocol
|
||||
open Time
|
||||
|
||||
/-!
|
||||
# Connection
|
||||
|
||||
This module defines `Server.Connection`, a structure used to handle a single HTTP connection with
|
||||
possibly multiple requests.
|
||||
-/
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Represents the remote address of a client connection.
|
||||
-/
|
||||
public structure RemoteAddr where
|
||||
/--
|
||||
The socket address of the remote client.
|
||||
-/
|
||||
addr : Net.SocketAddress
|
||||
deriving TypeName
|
||||
|
||||
instance : ToString RemoteAddr where
|
||||
toString addr := toString addr.addr.ipAddr ++ ":" ++ toString addr.addr.port
|
||||
|
||||
/--
|
||||
A single HTTP connection.
|
||||
-/
|
||||
public structure Connection (α : Type) where
|
||||
/--
|
||||
The client connection.
|
||||
-/
|
||||
socket : α
|
||||
|
||||
/--
|
||||
The processing machine for HTTP/1.1.
|
||||
-/
|
||||
machine : H1.Machine .receiving
|
||||
|
||||
/--
|
||||
Extensions to attach to each request (e.g., remote address).
|
||||
-/
|
||||
extensions : Extensions := .empty
|
||||
|
||||
namespace Connection
|
||||
|
||||
/--
|
||||
Events produced by the async select loop in `receiveWithTimeout`.
|
||||
Each variant corresponds to one possible outcome of waiting for I/O.
|
||||
-/
|
||||
private inductive Recv (β : Type)
|
||||
| bytes (x : Option ByteArray)
|
||||
| responseBody (x : Option Chunk)
|
||||
| bodyInterest (x : Bool)
|
||||
| response (x : (Except Error (Response β)))
|
||||
| timeout
|
||||
| keepAliveTimeout
|
||||
| shutdown
|
||||
| close
|
||||
|
||||
/--
|
||||
The set of I/O sources to wait on during a single poll iteration.
|
||||
Each `Option` field is `none` when that source is not currently active.
|
||||
-/
|
||||
private structure PollSources (α β : Type) where
|
||||
socket : Option α
|
||||
expect : Option Nat
|
||||
response : Option (Std.Channel (Except Error (Response β)))
|
||||
responseBody : Option β
|
||||
requestBody : Option Body.Stream
|
||||
timeout : Millisecond.Offset
|
||||
keepAliveTimeout : Option Millisecond.Offset
|
||||
headerTimeout : Option Timestamp
|
||||
connectionContext : CancellationContext
|
||||
|
||||
/--
|
||||
Waits for the next I/O event across all active sources described by `sources`.
|
||||
Computes the socket recv size from `config`, then races all active selectables.
|
||||
Calls `Handler.onFailure` and returns `.close` on transport errors.
|
||||
-/
|
||||
private def pollNextEvent
|
||||
{σ β : Type} [Transport α] [Handler σ] [Body β]
|
||||
(config : Config) (handler : σ) (sources : PollSources α β)
|
||||
: Async (Recv β) := do
|
||||
let expectedBytes := sources.expect
|
||||
|>.getD config.defaultPayloadBytes
|
||||
|>.min config.maximumRecvSize
|
||||
|>.toUInt64
|
||||
|
||||
let mut selectables : Array (Selectable (Recv β)) := #[
|
||||
.case sources.connectionContext.doneSelector (fun _ => do
|
||||
let reason ← sources.connectionContext.getCancellationReason
|
||||
match reason with
|
||||
| some .deadline => pure .timeout
|
||||
| _ => pure .shutdown)
|
||||
]
|
||||
|
||||
if let some socket := sources.socket then
|
||||
selectables := selectables.push (.case (Transport.recvSelector socket expectedBytes) (Recv.bytes · |> pure))
|
||||
|
||||
if let some keepAliveTimeout := sources.keepAliveTimeout then
|
||||
selectables := selectables.push (.case (← Selector.sleep keepAliveTimeout) (fun _ => pure .keepAliveTimeout))
|
||||
else if let some timeout := sources.headerTimeout then
|
||||
selectables := selectables.push (.case (← Selector.sleep (timeout - (← Timestamp.now)).toMilliseconds) (fun _ => pure .timeout))
|
||||
else
|
||||
selectables := selectables.push (.case (← Selector.sleep sources.timeout) (fun _ => pure .timeout))
|
||||
|
||||
if let some responseBody := sources.responseBody then
|
||||
selectables := selectables.push (.case (Body.recvSelector responseBody) (Recv.responseBody · |> pure))
|
||||
|
||||
if let some requestBody := sources.requestBody then
|
||||
selectables := selectables.push (.case (requestBody.interestSelector) (Recv.bodyInterest · |> pure))
|
||||
|
||||
if let some response := sources.response then
|
||||
selectables := selectables.push (.case response.recvSelector (Recv.response · |> pure))
|
||||
|
||||
try Selectable.one selectables
|
||||
catch e =>
|
||||
Handler.onFailure handler e
|
||||
pure .close
|
||||
|
||||
/--
|
||||
Handles the `Expect: 100-continue` protocol for a pending request head.
|
||||
Races between the handler's decision (`Handler.onContinue`), the connection being
|
||||
cancelled, and a lingering timeout. Returns the updated machine and whether
|
||||
`pendingHead` should be cleared (i.e. when the request is rejected).
|
||||
-/
|
||||
private def handleContinueEvent
|
||||
{σ : Type} [Handler σ]
|
||||
(handler : σ) (machine : H1.Machine .receiving) (head : Request.Head)
|
||||
(config : Config) (connectionContext : CancellationContext)
|
||||
: Async (H1.Machine .receiving × Bool) := do
|
||||
|
||||
let continueChannel : Std.Channel Bool ← Std.Channel.new
|
||||
let continueTask ← Handler.onContinue handler head |>.asTask
|
||||
|
||||
BaseIO.chainTask continueTask fun
|
||||
| .ok v => discard <| continueChannel.send v
|
||||
| .error _ => discard <| continueChannel.send false
|
||||
|
||||
let canContinue ← Selectable.one #[
|
||||
.case continueChannel.recvSelector pure,
|
||||
.case connectionContext.doneSelector (fun _ => pure false),
|
||||
.case (← Selector.sleep config.lingeringTimeout) (fun _ => pure false)
|
||||
]
|
||||
|
||||
let status := if canContinue then Status.«continue» else Status.expectationFailed
|
||||
return (machine.canContinue status, !canContinue)
|
||||
|
||||
/--
|
||||
Injects a `Date` header into a response head if `Config.generateDate` is set
|
||||
and the response does not already include one.
|
||||
-/
|
||||
private def prepareResponseHead (config : Config) (head : Response.Head) : Async Response.Head := do
|
||||
if config.generateDate ∧ ¬head.headers.contains Header.Name.date then
|
||||
let now ← Std.Time.DateTime.now (tz := .UTC)
|
||||
return { head with headers := head.headers.insert Header.Name.date (Header.Value.ofString! now.toRFC822String) }
|
||||
else
|
||||
return head
|
||||
|
||||
/--
|
||||
Applies a successful handler response to the machine.
|
||||
Optionally injects a `Date` header, records the known body size, and sends the
|
||||
response head. Returns the updated machine and the body stream to drain, or `none`
|
||||
when the body should be omitted (e.g., for HEAD requests).
|
||||
-/
|
||||
private def applyResponse
|
||||
{β : Type} [Body β]
|
||||
(config : Config) (machine : H1.Machine .receiving) (res : Response β)
|
||||
: Async (H1.Machine .receiving × Option β) := do
|
||||
let size ← Body.getKnownSize res.body
|
||||
let machineSized :=
|
||||
if let some knownSize := size then machine.setKnownSize knownSize
|
||||
else machine
|
||||
let responseHead ← prepareResponseHead config res.line
|
||||
let machineWithHead := machineSized.send responseHead
|
||||
if machineWithHead.writer.omitBody then
|
||||
if ¬(← Body.isClosed res.body) then
|
||||
Body.close res.body
|
||||
return (machineWithHead, none)
|
||||
else
|
||||
return (machineWithHead, some res.body)
|
||||
|
||||
/--
|
||||
All mutable state carried through the connection processing loop.
|
||||
Bundled into a struct so it can be passed to and returned from helper functions.
|
||||
-/
|
||||
private structure ConnectionState (β : Type) where
|
||||
machine : H1.Machine .receiving
|
||||
requestStream : Body.Stream
|
||||
keepAliveTimeout : Option Millisecond.Offset
|
||||
currentTimeout : Millisecond.Offset
|
||||
headerTimeout : Option Timestamp
|
||||
response : Std.Channel (Except Error (Response β))
|
||||
respStream : Option β
|
||||
requiresData : Bool
|
||||
expectData : Option Nat
|
||||
handlerDispatched : Bool
|
||||
pendingHead : Option Request.Head
|
||||
|
||||
/--
|
||||
Processes all H1 events from a single machine step, updating the connection state.
|
||||
Handles keep-alive resets, body-size tracking, `Expect: 100-continue`, and parse errors.
|
||||
Returns the updated state; stops early on `.failed`.
|
||||
-/
|
||||
private def processH1Events
|
||||
{σ β : Type} [Handler σ] [Body β]
|
||||
(handler : σ) (config : Config) (connectionContext : CancellationContext)
|
||||
(events : Array (H1.Event .receiving))
|
||||
(state : ConnectionState β)
|
||||
: Async (ConnectionState β) := do
|
||||
|
||||
let mut st := state
|
||||
|
||||
for event in events do
|
||||
match event with
|
||||
| .needMoreData expect =>
|
||||
st := { st with requiresData := true, expectData := expect }
|
||||
|
||||
| .needAnswer => pure ()
|
||||
|
||||
| .endHeaders head =>
|
||||
|
||||
-- Sets the pending head and removes the KeepAlive or Header timeout.
|
||||
st := { st with
|
||||
currentTimeout := config.lingeringTimeout
|
||||
keepAliveTimeout := none
|
||||
headerTimeout := none
|
||||
pendingHead := some head
|
||||
}
|
||||
|
||||
if let some length := head.getSize true then
|
||||
-- Sets the size of the body that is going out of the connection.
|
||||
Body.setKnownSize st.requestStream (some length)
|
||||
|
||||
| .«continue» =>
|
||||
if let some head := st.pendingHead then
|
||||
let (newMachine, clearPending) ← handleContinueEvent handler st.machine head config connectionContext
|
||||
st := { st with machine := newMachine }
|
||||
if clearPending then
|
||||
st := { st with pendingHead := none }
|
||||
|
||||
| .next =>
|
||||
-- Reset all per-request state for the next pipelined request.
|
||||
if ¬(← Body.isClosed st.requestStream) then
|
||||
Body.close st.requestStream
|
||||
|
||||
if let some res := st.respStream then
|
||||
if ¬(← Body.isClosed res) then
|
||||
Body.close res
|
||||
|
||||
let newStream ← Body.mkStream
|
||||
|
||||
st := { st with
|
||||
requestStream := newStream
|
||||
response := ← Std.Channel.new
|
||||
respStream := none
|
||||
keepAliveTimeout := some config.keepAliveTimeout.val
|
||||
currentTimeout := config.keepAliveTimeout.val
|
||||
headerTimeout := none
|
||||
handlerDispatched := false
|
||||
}
|
||||
|
||||
| .failed err =>
|
||||
Handler.onFailure handler (toString err)
|
||||
|
||||
if ¬(← Body.isClosed st.requestStream) then
|
||||
Body.close st.requestStream
|
||||
|
||||
st := { st with requiresData := false, pendingHead := none }
|
||||
break
|
||||
|
||||
| .closeBody =>
|
||||
if ¬(← Body.isClosed st.requestStream) then
|
||||
Body.close st.requestStream
|
||||
|
||||
| .close => pure ()
|
||||
|
||||
return st
|
||||
|
||||
/--
|
||||
Dispatches a pending request head to the handler if one is waiting.
|
||||
Spawns the handler as an async task and routes its result back through `state.response`.
|
||||
Returns the updated state with `pendingHead` cleared and `handlerDispatched` set.
|
||||
-/
|
||||
private def dispatchPendingRequest
|
||||
{σ : Type} [Handler σ]
|
||||
(handler : σ) (extensions : Extensions) (connectionContext : CancellationContext)
|
||||
(state : ConnectionState (Handler.ResponseBody σ))
|
||||
: Async (ConnectionState (Handler.ResponseBody σ)) := do
|
||||
if let some line := state.pendingHead then
|
||||
|
||||
let task ← Handler.onRequest handler { line, body := state.requestStream, extensions } connectionContext
|
||||
|>.asTask
|
||||
|
||||
BaseIO.chainTask task (discard ∘ state.response.send)
|
||||
return { state with pendingHead := none, handlerDispatched := true }
|
||||
else
|
||||
return state
|
||||
|
||||
/--
|
||||
Processes a single async I/O event and updates the connection state.
|
||||
Returns the updated state and `true` if the connection should be closed immediately.
|
||||
-/
|
||||
private def handleRecvEvent
|
||||
{σ β : Type} [Handler σ] [Body β]
|
||||
(handler : σ) (config : Config)
|
||||
(event : Recv β) (state : ConnectionState β)
|
||||
: Async (ConnectionState β × Bool) := do
|
||||
|
||||
match event with
|
||||
| .bytes (some bs) =>
|
||||
|
||||
let mut st := state
|
||||
|
||||
-- After the first byte after idle we switch from keep-alive timeout to per-request header timeout.
|
||||
if st.keepAliveTimeout.isSome then
|
||||
st := { st with
|
||||
keepAliveTimeout := none
|
||||
headerTimeout := some <| (← Timestamp.now) + config.headerTimeout
|
||||
}
|
||||
|
||||
return ({ st with machine := st.machine.feed bs }, false)
|
||||
|
||||
| .bytes none =>
|
||||
return ({ state with machine := state.machine.noMoreInput }, false)
|
||||
|
||||
| .responseBody (some chunk) =>
|
||||
return ({ state with machine := state.machine.sendData #[chunk] }, false)
|
||||
|
||||
| .responseBody none =>
|
||||
if let some res := state.respStream then
|
||||
if ¬(← Body.isClosed res) then Body.close res
|
||||
return ({ state with machine := state.machine.userClosedBody, respStream := none }, false)
|
||||
|
||||
| .bodyInterest interested =>
|
||||
if interested then
|
||||
let (newMachine, pulledChunk) := state.machine.pullBody
|
||||
let mut st := { state with machine := newMachine }
|
||||
|
||||
if let some pulled := pulledChunk then
|
||||
try st.requestStream.send pulled.chunk pulled.incomplete
|
||||
catch e => Handler.onFailure handler e
|
||||
if pulled.final then
|
||||
if ¬(← Body.isClosed st.requestStream) then
|
||||
Body.close st.requestStream
|
||||
|
||||
return (st, false)
|
||||
else
|
||||
return (state, false)
|
||||
|
||||
| .close => return (state, true)
|
||||
|
||||
| .timeout =>
|
||||
Handler.onFailure handler "request header timeout"
|
||||
return ({ state with machine := state.machine.closeWithError .requestTimeout, handlerDispatched := false }, false)
|
||||
|
||||
| .keepAliveTimeout =>
|
||||
return ({ state with machine := state.machine.closeWithError .requestTimeout, handlerDispatched := false }, false)
|
||||
|
||||
| .shutdown =>
|
||||
return ({ state with machine := state.machine.closeWithError .serviceUnavailable, handlerDispatched := false }, false)
|
||||
|
||||
| .response (.error err) =>
|
||||
Handler.onFailure handler err
|
||||
return ({ state with machine := state.machine.closeWithError .internalServerError, handlerDispatched := false }, false)
|
||||
|
||||
| .response (.ok res) =>
|
||||
if state.machine.failed then
|
||||
if ¬(← Body.isClosed res.body) then Body.close res.body
|
||||
return ({ state with handlerDispatched := false }, false)
|
||||
else
|
||||
let (newMachine, newRespStream) ← applyResponse config state.machine res
|
||||
return ({ state with machine := newMachine, handlerDispatched := false, respStream := newRespStream }, false)
|
||||
|
||||
/--
|
||||
Computes the active `PollSources` for the current connection state.
|
||||
Determines which IO sources need attention and whether to include the socket.
|
||||
-/
|
||||
private def buildPollSources
|
||||
{α β : Type} [Transport α]
|
||||
(socket : α) (connectionContext : CancellationContext) (state : ConnectionState β)
|
||||
: Async (PollSources α β) := do
|
||||
let requestBodyOpen ←
|
||||
if state.machine.canPullBody then pure !(← Body.isClosed state.requestStream)
|
||||
else pure false
|
||||
|
||||
let requestBodyInterested ←
|
||||
if state.machine.canPullBody ∧ requestBodyOpen then state.requestStream.hasInterest
|
||||
else pure false
|
||||
|
||||
let requestBody ←
|
||||
if state.machine.canPullBodyNow ∧ requestBodyOpen then pure (some state.requestStream)
|
||||
else pure none
|
||||
|
||||
-- Include the socket only when there is more to do than waiting for the handler alone.
|
||||
let pollSocket :=
|
||||
state.requiresData ∨ !state.handlerDispatched ∨ state.respStream.isSome ∨
|
||||
state.machine.writer.sentMessage ∨ (state.machine.canPullBody ∧ requestBodyInterested)
|
||||
|
||||
return {
|
||||
socket := if pollSocket then some socket else none
|
||||
expect := state.expectData
|
||||
response := if state.handlerDispatched then some state.response else none
|
||||
responseBody := state.respStream
|
||||
requestBody := requestBody
|
||||
timeout := state.currentTimeout
|
||||
keepAliveTimeout := state.keepAliveTimeout
|
||||
headerTimeout := state.headerTimeout
|
||||
connectionContext := connectionContext
|
||||
}
|
||||
|
||||
/--
|
||||
Runs the main request/response processing loop for a single connection.
|
||||
Drives the HTTP/1.1 state machine through four phases each iteration:
|
||||
send buffered output, process H1 events, dispatch pending requests, poll for I/O.
|
||||
-/
|
||||
private def handle
|
||||
{σ : Type} [Transport α] [h : Handler σ]
|
||||
(connection : Connection α)
|
||||
(config : Config)
|
||||
(connectionContext : CancellationContext)
|
||||
(handler : σ) : Async Unit := do
|
||||
|
||||
let _ : Body (Handler.ResponseBody σ) := Handler.responseBodyInstance
|
||||
|
||||
let socket := connection.socket
|
||||
let initStream ← Body.mkStream
|
||||
|
||||
let mut state : ConnectionState (Handler.ResponseBody σ) := {
|
||||
machine := connection.machine
|
||||
requestStream := initStream
|
||||
keepAliveTimeout := some config.keepAliveTimeout.val
|
||||
currentTimeout := config.keepAliveTimeout.val
|
||||
headerTimeout := none
|
||||
response := ← Std.Channel.new
|
||||
respStream := none
|
||||
requiresData := false
|
||||
expectData := none
|
||||
handlerDispatched := false
|
||||
pendingHead := none
|
||||
}
|
||||
|
||||
while ¬state.machine.halted do
|
||||
|
||||
-- Phase 1: advance the state machine and flush any output.
|
||||
let (newMachine, step) := state.machine.step
|
||||
state := { state with machine := newMachine }
|
||||
|
||||
if step.output.size > 0 then
|
||||
try Transport.sendAll socket step.output.data
|
||||
catch e =>
|
||||
Handler.onFailure handler e
|
||||
break
|
||||
|
||||
-- Phase 2: process all events emitted by this step.
|
||||
state ← processH1Events handler config connectionContext step.events state
|
||||
|
||||
-- Phase 3: dispatch any newly parsed request to the handler.
|
||||
state ← dispatchPendingRequest handler connection.extensions connectionContext state
|
||||
|
||||
-- Phase 4: wait for the next IO event when any source needs attention.
|
||||
if state.requiresData ∨ state.handlerDispatched ∨ state.respStream.isSome ∨ state.machine.canPullBody then
|
||||
state := { state with requiresData := false }
|
||||
let sources ← buildPollSources socket connectionContext state
|
||||
let event ← pollNextEvent config handler sources
|
||||
let (newState, shouldClose) ← handleRecvEvent handler config event state
|
||||
state := newState
|
||||
if shouldClose then break
|
||||
|
||||
-- Clean up: close all open streams and the socket.
|
||||
if ¬(← Body.isClosed state.requestStream) then
|
||||
Body.close state.requestStream
|
||||
|
||||
if let some res := state.respStream then
|
||||
if ¬(← Body.isClosed res) then Body.close res
|
||||
|
||||
Transport.close socket
|
||||
|
||||
end Connection
|
||||
|
||||
/--
|
||||
Handles request/response processing for a single connection using an `Async` handler.
|
||||
The library-level entry point for running a server is `Server.serve`.
|
||||
This function can be used with a `TCP.Socket` or any other type that implements
|
||||
`Transport` to build custom server loops.
|
||||
|
||||
# Example
|
||||
|
||||
```lean
|
||||
-- Create a TCP socket server instance
|
||||
let server ← Socket.Server.mk
|
||||
server.bind addr
|
||||
server.listen backlog
|
||||
|
||||
-- Enter an infinite loop to handle incoming client connections
|
||||
while true do
|
||||
let client ← server.accept
|
||||
background (serveConnection client handler config)
|
||||
```
|
||||
-/
|
||||
def serveConnection
|
||||
{σ : Type} [Transport t] [Handler σ]
|
||||
(client : t) (handler : σ)
|
||||
(config : Config) (extensions : Extensions := .empty) : ContextAsync Unit := do
|
||||
(Connection.mk client { config := config.toH1Config } extensions)
|
||||
|>.handle config (← ContextAsync.getContext) handler
|
||||
|
||||
end Std.Http.Server
|
||||
60
src/Std/Internal/Http/Server/Handler.lean
Normal file
60
src/Std/Internal/Http/Server/Handler.lean
Normal file
@@ -0,0 +1,60 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Async
|
||||
public import Std.Internal.Http.Data
|
||||
public import Std.Internal.Async.ContextAsync
|
||||
|
||||
public section
|
||||
|
||||
namespace Std.Http.Server
|
||||
|
||||
open Std.Internal.IO.Async
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
A type class for handling HTTP server requests. Implement this class to define how the server
|
||||
responds to incoming requests, failures, and `Expect: 100-continue` headers.
|
||||
-/
|
||||
class Handler (σ : Type) where
|
||||
/--
|
||||
Concrete body type produced by `onRequest`.
|
||||
Defaults to `Body.Any`, but handlers may override it with any reader/writer-compatible body.
|
||||
-/
|
||||
ResponseBody : Type := Body.Any
|
||||
|
||||
/--
|
||||
Body instance required by the connection loop for receiving response chunks.
|
||||
-/
|
||||
[responseBodyInstance : Body ResponseBody]
|
||||
|
||||
/--
|
||||
Called for each incoming HTTP request.
|
||||
-/
|
||||
onRequest (self : σ) (request : Request Body.Stream) : ContextAsync (Response ResponseBody)
|
||||
|
||||
/--
|
||||
Called when an I/O or transport error occurs while processing a request (e.g. broken socket,
|
||||
handler exception). This is a **notification only**: the connection will close regardless of
|
||||
the handler's response. Use this for logging and metrics. The default implementation does nothing.
|
||||
-/
|
||||
onFailure (self : σ) (error : IO.Error) : Async Unit :=
|
||||
pure ()
|
||||
|
||||
/--
|
||||
Called when a request includes an `Expect: 100-continue` header. Return `true` to send a
|
||||
`100 Continue` response and accept the body. If `false` is returned the server sends
|
||||
`417 Expectation Failed`, disables keep-alive, and closes the request body reader.
|
||||
This function is guarded by `Config.lingeringTimeout` and may be cancelled on server shutdown.
|
||||
The default implementation always returns `true`.
|
||||
-/
|
||||
onContinue (self : σ) (request : Request.Head) : Async Bool :=
|
||||
pure true
|
||||
|
||||
end Std.Http.Server
|
||||
249
src/Std/Internal/Http/Transport.lean
Normal file
249
src/Std/Internal/Http/Transport.lean
Normal file
@@ -0,0 +1,249 @@
|
||||
/-
|
||||
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Sofia Rodrigues
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Std.Internal.Http.Protocol.H1
|
||||
|
||||
public section
|
||||
|
||||
/-!
|
||||
# Transport
|
||||
|
||||
This module exposes a `Transport` type class that is used to represent different transport mechanisms
|
||||
that can be used with an HTTP connection.
|
||||
-/
|
||||
|
||||
namespace Std.Http
|
||||
open Std Internal IO Async TCP
|
||||
|
||||
set_option linter.all true
|
||||
|
||||
/--
|
||||
Generic HTTP interface that abstracts over different transport mechanisms.
|
||||
-/
|
||||
class Transport (α : Type) where
|
||||
/--
|
||||
Receive data from the client connection, up to the expected size.
|
||||
Returns None if the connection is closed or no data is available.
|
||||
-/
|
||||
recv : α → UInt64 → Async (Option ByteArray)
|
||||
|
||||
/--
|
||||
Send all data through the client connection.
|
||||
-/
|
||||
sendAll : α → Array ByteArray → Async Unit
|
||||
|
||||
/--
|
||||
Get a selector for receiving data asynchronously.
|
||||
-/
|
||||
recvSelector : α → UInt64 → Selector (Option ByteArray)
|
||||
|
||||
/--
|
||||
Close the transport connection.
|
||||
The default implementation is a no-op; override this for transports that require explicit teardown.
|
||||
For `Socket.Client`, the runtime closes the file descriptor when the object is finalized.
|
||||
-/
|
||||
close : α → IO Unit := fun _ => pure ()
|
||||
|
||||
instance : Transport Socket.Client where
|
||||
recv client expect := client.recv? expect
|
||||
sendAll client data := client.sendAll data
|
||||
recvSelector client expect := client.recvSelector expect
|
||||
|
||||
open Internal.IO.Async in
|
||||
|
||||
/--
|
||||
Shared state for a bidirectional mock connection.
|
||||
-/
|
||||
private structure MockLink.SharedState where
|
||||
/--
|
||||
Client to server direction.
|
||||
-/
|
||||
clientToServer : Std.CloseableChannel ByteArray
|
||||
|
||||
/--
|
||||
Server to client direction.
|
||||
-/
|
||||
serverToClient : Std.CloseableChannel ByteArray
|
||||
|
||||
/--
|
||||
Mock client endpoint for testing.
|
||||
-/
|
||||
structure Mock.Client where
|
||||
private shared : MockLink.SharedState
|
||||
|
||||
/--
|
||||
Mock server endpoint for testing.
|
||||
-/
|
||||
structure Mock.Server where
|
||||
private shared : MockLink.SharedState
|
||||
|
||||
namespace Mock
|
||||
|
||||
/--
|
||||
Creates a mock server and client that are connected to each other and share the
|
||||
same underlying state, enabling bidirectional communication.
|
||||
-/
|
||||
def new : BaseIO (Mock.Client × Mock.Server) := do
|
||||
let first ← Std.CloseableChannel.new
|
||||
let second ← Std.CloseableChannel.new
|
||||
|
||||
return (⟨⟨first, second⟩⟩, ⟨⟨first, second⟩⟩)
|
||||
|
||||
/--
|
||||
Receives data from a channel, joining all available data up to the expected size. First does a
|
||||
blocking recv, then greedily consumes available data with tryRecv until `expect` bytes are reached.
|
||||
-/
|
||||
def recvJoined (recvChan : Std.CloseableChannel ByteArray) (expect : Option UInt64) : Async (Option ByteArray) := do
|
||||
match ← await (← recvChan.recv) with
|
||||
| none => return none
|
||||
| some first =>
|
||||
let mut result := first
|
||||
repeat
|
||||
if let some expect := expect then
|
||||
if result.size.toUInt64 ≥ expect then break
|
||||
|
||||
match ← recvChan.tryRecv with
|
||||
| none => break
|
||||
| some chunk => result := result ++ chunk
|
||||
return some result
|
||||
|
||||
/--
|
||||
Sends a single ByteArray through a channel.
|
||||
-/
|
||||
def send (sendChan : Std.CloseableChannel ByteArray) (data : ByteArray) : Async Unit := do
|
||||
Async.ofAsyncTask ((← sendChan.send data) |>.map (Except.mapError (IO.userError ∘ toString)))
|
||||
|
||||
/--
|
||||
Sends ByteArrays through a channel.
|
||||
-/
|
||||
def sendAll (sendChan : Std.CloseableChannel ByteArray) (data : Array ByteArray) : Async Unit := do
|
||||
for chunk in data do
|
||||
send sendChan chunk
|
||||
|
||||
/--
|
||||
Creates a selector for receiving from a channel.
|
||||
-/
|
||||
def recvSelector (recvChan : Std.CloseableChannel ByteArray) : Selector (Option ByteArray) :=
|
||||
recvChan.recvSelector
|
||||
|
||||
end Mock
|
||||
|
||||
namespace Mock.Client
|
||||
|
||||
/--
|
||||
Gets the receive channel for a client (server to client direction).
|
||||
-/
|
||||
def getRecvChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
|
||||
client.shared.serverToClient
|
||||
|
||||
/--
|
||||
Gets the send channel for a client (client to server direction).
|
||||
-/
|
||||
def getSendChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
|
||||
client.shared.clientToServer
|
||||
|
||||
/--
|
||||
Sends a single ByteArray.
|
||||
-/
|
||||
def send (client : Mock.Client) (data : ByteArray) : Async Unit :=
|
||||
Mock.send (getSendChan client) data
|
||||
|
||||
/--
|
||||
Receives data, joining all available chunks.
|
||||
-/
|
||||
def recv? (client : Mock.Client) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
|
||||
Mock.recvJoined (getRecvChan client) expect
|
||||
|
||||
/--
|
||||
Tries to receive data without blocking, joining all immediately available chunks.
|
||||
Returns `none` if no data is available.
|
||||
-/
|
||||
def tryRecv? (client : Mock.Client) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
|
||||
match ← (getRecvChan client).tryRecv with
|
||||
| none => return none
|
||||
| some first =>
|
||||
let mut result := first
|
||||
repeat
|
||||
match ← (getRecvChan client).tryRecv with
|
||||
| none => break
|
||||
| some chunk => result := result ++ chunk
|
||||
return some result
|
||||
|
||||
/--
|
||||
Closes the mock server and client.
|
||||
-/
|
||||
def close (client : Mock.Client) : IO Unit := do
|
||||
if !(← client.shared.clientToServer.isClosed) then client.shared.clientToServer.close
|
||||
if !(← client.shared.serverToClient.isClosed) then client.shared.serverToClient.close
|
||||
|
||||
end Mock.Client
|
||||
|
||||
namespace Mock.Server
|
||||
|
||||
/--
|
||||
Gets the receive channel for a server (client to server direction).
|
||||
-/
|
||||
def getRecvChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
|
||||
server.shared.clientToServer
|
||||
|
||||
/--
|
||||
Gets the send channel for a server (server to client direction).
|
||||
-/
|
||||
def getSendChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
|
||||
server.shared.serverToClient
|
||||
|
||||
/--
|
||||
Sends a single ByteArray.
|
||||
-/
|
||||
def send (server : Mock.Server) (data : ByteArray) : Async Unit :=
|
||||
Mock.send (getSendChan server) data
|
||||
|
||||
/--
|
||||
Receives data, joining all available chunks.
|
||||
-/
|
||||
def recv? (server : Mock.Server) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
|
||||
Mock.recvJoined (getRecvChan server) expect
|
||||
|
||||
/--
|
||||
Tries to receive data without blocking, joining all immediately available chunks. Returns `none` if no
|
||||
data is available.
|
||||
-/
|
||||
def tryRecv? (server : Mock.Server) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
|
||||
match ← (getRecvChan server).tryRecv with
|
||||
| none => return none
|
||||
| some first =>
|
||||
let mut result := first
|
||||
repeat
|
||||
match ← (getRecvChan server).tryRecv with
|
||||
| none => break
|
||||
| some chunk => result := result ++ chunk
|
||||
return some result
|
||||
|
||||
/--
|
||||
Closes the mock server and client.
|
||||
-/
|
||||
def close (server : Mock.Server) : IO Unit := do
|
||||
if !(← server.shared.clientToServer.isClosed) then server.shared.clientToServer.close
|
||||
if !(← server.shared.serverToClient.isClosed) then server.shared.serverToClient.close
|
||||
|
||||
|
||||
end Mock.Server
|
||||
|
||||
instance : Transport Mock.Client where
|
||||
recv client expect := Mock.recvJoined (Mock.Client.getRecvChan client) (some expect)
|
||||
sendAll client data := Mock.sendAll (Mock.Client.getSendChan client) data
|
||||
recvSelector client _ := Mock.recvSelector (Mock.Client.getRecvChan client)
|
||||
close client := client.close
|
||||
|
||||
instance : Transport Mock.Server where
|
||||
recv server expect := Mock.recvJoined (Mock.Server.getRecvChan server) (some expect)
|
||||
sendAll server data := Mock.sendAll (Mock.Server.getSendChan server) data
|
||||
recvSelector server _ := Mock.recvSelector (Mock.Server.getRecvChan server)
|
||||
close server := server.close
|
||||
|
||||
end Std.Http
|
||||
@@ -44,8 +44,15 @@ protected def Parser.run (p : Parser α) (arr : ByteArray) : Except String α :=
|
||||
Parse a single byte equal to `b`, fails if different.
|
||||
-/
|
||||
@[inline]
|
||||
def pbyte (b : UInt8) : Parser UInt8 := attempt do
|
||||
if (← any) = b then pure b else fail s!"expected: '{b}'"
|
||||
def pbyte (b : UInt8) : Parser UInt8 := fun it =>
|
||||
if h : it.hasNext then
|
||||
let got := it.curr' h
|
||||
if got = b then
|
||||
.success (it.next' h) got
|
||||
else
|
||||
.error it (.other s!"expected: '{b}'")
|
||||
else
|
||||
.error it .eof
|
||||
|
||||
/--
|
||||
Skip a single byte equal to `b`, fails if different.
|
||||
@@ -57,16 +64,29 @@ def skipByte (b : UInt8) : Parser Unit :=
|
||||
/--
|
||||
Skip a sequence of bytes equal to the given `ByteArray`.
|
||||
-/
|
||||
def skipBytes (arr : ByteArray) : Parser Unit := do
|
||||
for b in arr do
|
||||
skipByte b
|
||||
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
|
||||
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
|
||||
if h : idx < arr.size then
|
||||
if hnext : it.hasNext then
|
||||
let got := it.curr' hnext
|
||||
let want := arr[idx]
|
||||
if got = want then
|
||||
go (idx + 1) (it.next' hnext)
|
||||
else
|
||||
.error it (.other s!"expected byte {want}, got {got}")
|
||||
else
|
||||
.error it .eof
|
||||
else
|
||||
.success it ()
|
||||
go 0 it
|
||||
|
||||
/--
|
||||
Parse a string by matching its UTF-8 bytes, returns the string on success.
|
||||
-/
|
||||
@[inline]
|
||||
def pstring (s : String) : Parser String := do
|
||||
skipBytes s.toUTF8
|
||||
let utf8 := s.toUTF8
|
||||
skipBytes utf8
|
||||
return s
|
||||
|
||||
/--
|
||||
@@ -193,19 +213,47 @@ def take (n : Nat) : Parser ByteSlice := fun it =>
|
||||
else
|
||||
.success (it.forward n) (it.array[it.idx...(it.idx+n)])
|
||||
|
||||
/--
|
||||
Scans while `pred` is satisfied. Returns `(count, iterator, hitEof)`.
|
||||
-/
|
||||
private partial def scanWhile (pred : UInt8 → Bool) (count : Nat) (iter : ByteArray.Iterator) :
|
||||
Nat × ByteArray.Iterator × Bool :=
|
||||
if h : iter.hasNext then
|
||||
if pred (iter.curr' h) then
|
||||
scanWhile pred (count + 1) (iter.next' h)
|
||||
else
|
||||
(count, iter, false)
|
||||
else
|
||||
(count, iter, true)
|
||||
|
||||
/--
|
||||
Scans while `pred` is satisfied, bounded by `limit`.
|
||||
Returns `(count, iterator, hitEof)`.
|
||||
-/
|
||||
private partial def scanWhileUpTo (pred : UInt8 → Bool) (limit : Nat) (count : Nat)
|
||||
(iter : ByteArray.Iterator) : Nat × ByteArray.Iterator × Bool :=
|
||||
if count ≥ limit then
|
||||
(count, iter, false)
|
||||
else if h : iter.hasNext then
|
||||
if pred (iter.curr' h) then
|
||||
scanWhileUpTo pred limit (count + 1) (iter.next' h)
|
||||
else
|
||||
(count, iter, false)
|
||||
else
|
||||
(count, iter, true)
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied.
|
||||
Fails with `.eof` if input ends while the predicate still holds.
|
||||
-/
|
||||
@[inline]
|
||||
partial def takeWhile (pred : UInt8 → Bool) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
let (length, newIt, hitEof) := scanWhile pred 0 it
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses until a predicate is satisfied (exclusive).
|
||||
@@ -216,16 +264,16 @@ def takeUntil (pred : UInt8 → Bool) : Parser ByteSlice :=
|
||||
|
||||
/--
|
||||
Skips while a predicate is satisfied.
|
||||
Fails with `.eof` if input ends while the predicate still holds.
|
||||
-/
|
||||
@[inline]
|
||||
partial def skipWhile (pred : UInt8 → Bool) : Parser Unit :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
|
||||
if ¬iter.hasNext then iter
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else iter
|
||||
|
||||
.success (findEnd 0 it) ()
|
||||
let (_, newIt, hitEof) := scanWhile pred 0 it
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt ()
|
||||
|
||||
/--
|
||||
Skips until a predicate is satisfied.
|
||||
@@ -236,34 +284,31 @@ def skipUntil (pred : UInt8 → Bool) : Parser Unit :=
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, up to a given limit.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
partial def takeWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if count ≥ limit then (count, iter)
|
||||
else if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, up to a given limit, requiring at least one byte.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhileUpTo1 (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
|
||||
if count ≥ limit then (count, iter)
|
||||
else if ¬iter.hasNext then (count, iter)
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else (count, iter)
|
||||
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
let (length, newIt) := findEnd 0 it
|
||||
if length = 0 then
|
||||
.error it (if newIt.atEnd then .eof else .other "expected at least one char")
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else if length = 0 then
|
||||
.error it (.other "expected at least one char")
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
@@ -274,19 +319,42 @@ Parses until a predicate is satisfied (exclusive), up to a given limit.
|
||||
def takeUntilUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
takeWhileUpTo (fun b => ¬pred b) limit
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, consuming at most `limit` bytes.
|
||||
Unlike `takeWhileUpTo`, succeeds even if input ends before the predicate stops holding.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhileAtMost (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Parses while a predicate is satisfied, consuming at most `limit` bytes, requiring at least one.
|
||||
Unlike `takeWhileUpTo1`, succeeds even if input ends before the predicate stops holding.
|
||||
-/
|
||||
@[inline]
|
||||
def takeWhile1AtMost (pred : UInt8 → Bool) (limit : Nat) : Parser ByteSlice :=
|
||||
fun it =>
|
||||
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
|
||||
if length = 0 then
|
||||
.error it (.other "expected at least one char")
|
||||
else
|
||||
.success newIt (it.array[it.idx...(it.idx + length)])
|
||||
|
||||
/--
|
||||
Skips while a predicate is satisfied, up to a given limit.
|
||||
Fails with `.eof` if input ends before stopping or reaching the limit.
|
||||
-/
|
||||
@[inline]
|
||||
partial def skipWhileUpTo (pred : UInt8 → Bool) (limit : Nat) : Parser Unit :=
|
||||
fun it =>
|
||||
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
|
||||
if count ≥ limit then iter
|
||||
else if ¬iter.hasNext then iter
|
||||
else if pred iter.curr then findEnd (count + 1) iter.next
|
||||
else iter
|
||||
let (_, newIt, hitEof) := scanWhileUpTo pred limit 0 it
|
||||
|
||||
.success (findEnd 0 it) ()
|
||||
if hitEof then
|
||||
.error newIt .eof
|
||||
else
|
||||
.success newIt ()
|
||||
|
||||
/--
|
||||
Skips until a predicate is satisfied, up to a given limit.
|
||||
|
||||
@@ -11,6 +11,7 @@ public import Std.Sync.Channel
|
||||
public import Std.Sync.Mutex
|
||||
public import Std.Sync.RecursiveMutex
|
||||
public import Std.Sync.Barrier
|
||||
public import Std.Sync.Semaphore
|
||||
public import Std.Sync.SharedMutex
|
||||
public import Std.Sync.Notify
|
||||
public import Std.Sync.Broadcast
|
||||
|
||||
96
src/Std/Sync/Semaphore.lean
Normal file
96
src/Std/Sync/Semaphore.lean
Normal file
@@ -0,0 +1,96 @@
|
||||
/-
|
||||
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Lean FRO Contributors
|
||||
-/
|
||||
module
|
||||
|
||||
prelude
|
||||
public import Init.Data.Queue
|
||||
public import Init.System.Promise
|
||||
public import Std.Sync.Mutex
|
||||
|
||||
public section
|
||||
|
||||
namespace Std
|
||||
|
||||
private structure SemaphoreState where
|
||||
permits : Nat
|
||||
waiters : Std.Queue (IO.Promise Unit) := ∅
|
||||
deriving Nonempty
|
||||
|
||||
/--
|
||||
Counting semaphore.
|
||||
|
||||
`Semaphore.acquire` returns a promise that is resolved once a permit is available.
|
||||
If a permit is currently available, the returned promise is already resolved.
|
||||
`Semaphore.release` either resolves one waiting promise or increments the available permits.
|
||||
-/
|
||||
structure Semaphore where private mk ::
|
||||
private lock : Mutex SemaphoreState
|
||||
|
||||
/--
|
||||
Creates a resolved promise.
|
||||
-/
|
||||
private def mkResolvedPromise [Nonempty α] (a : α) : BaseIO (IO.Promise α) := do
|
||||
let promise ← IO.Promise.new
|
||||
promise.resolve a
|
||||
return promise
|
||||
|
||||
/--
|
||||
Creates a new semaphore with `permits` initially available permits.
|
||||
-/
|
||||
def Semaphore.new (permits : Nat) : BaseIO Semaphore := do
|
||||
return { lock := ← Mutex.new { permits } }
|
||||
|
||||
/--
|
||||
Requests one permit.
|
||||
Returns a promise that resolves once the permit is acquired.
|
||||
-/
|
||||
def Semaphore.acquire (sem : Semaphore) : BaseIO (IO.Promise Unit) := do
|
||||
sem.lock.atomically do
|
||||
let st ← get
|
||||
if st.permits > 0 then
|
||||
set { st with permits := st.permits - 1 }
|
||||
mkResolvedPromise ()
|
||||
else
|
||||
let promise ← IO.Promise.new
|
||||
set { st with waiters := st.waiters.enqueue promise }
|
||||
return promise
|
||||
|
||||
/--
|
||||
Tries to acquire a permit without blocking. Returns `true` on success.
|
||||
-/
|
||||
def Semaphore.tryAcquire (sem : Semaphore) : BaseIO Bool := do
|
||||
sem.lock.atomically do
|
||||
let st ← get
|
||||
if st.permits > 0 then
|
||||
set { st with permits := st.permits - 1 }
|
||||
return true
|
||||
else
|
||||
return false
|
||||
|
||||
/--
|
||||
Releases one permit and resolves one waiting acquirer, if any.
|
||||
-/
|
||||
def Semaphore.release (sem : Semaphore) : BaseIO Unit := do
|
||||
let waiter? ← sem.lock.atomically do
|
||||
let st ← get
|
||||
match st.waiters.dequeue? with
|
||||
| some (waiter, waiters) =>
|
||||
set { st with waiters }
|
||||
return some waiter
|
||||
| none =>
|
||||
set { st with permits := st.permits + 1 }
|
||||
return none
|
||||
if let some waiter := waiter? then
|
||||
waiter.resolve ()
|
||||
|
||||
/--
|
||||
Returns the number of currently available permits.
|
||||
-/
|
||||
def Semaphore.availablePermits (sem : Semaphore) : BaseIO Nat :=
|
||||
sem.lock.atomically do
|
||||
return (← get).permits
|
||||
|
||||
end Std
|
||||
@@ -49,7 +49,7 @@ data_value mk_data_value(data_value_kind k, char const * val) {
|
||||
extern "C" object * lean_register_option(obj_arg name, obj_arg decl);
|
||||
|
||||
void register_option(name const & n, name const & decl_name, data_value_kind k, char const * default_value, char const * description) {
|
||||
object_ref decl = mk_cnstr(0, n, decl_name, mk_data_value(k, default_value), string_ref(description));
|
||||
object_ref decl = mk_cnstr(0, n, decl_name, mk_data_value(k, default_value), string_ref(description), object_ref(lean_box(0)));
|
||||
consume_io_result(lean_register_option(n.to_obj_arg(), decl.to_obj_arg()));
|
||||
}
|
||||
}
|
||||
|
||||
BIN
stage0/src/include/lean/lean.h
generated
BIN
stage0/src/include/lean/lean.h
generated
Binary file not shown.
BIN
stage0/src/runtime/object.cpp
generated
BIN
stage0/src/runtime/object.cpp
generated
Binary file not shown.
BIN
stage0/stdlib/Init/Data/ByteArray/Basic.c
generated
BIN
stage0/stdlib/Init/Data/ByteArray/Basic.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/Attributes.c
generated
BIN
stage0/stdlib/Lake/DSL/Attributes.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/AttributesCore.c
generated
BIN
stage0/stdlib/Lake/DSL/AttributesCore.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/DeclUtil.c
generated
BIN
stage0/stdlib/Lake/DSL/DeclUtil.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/Meta.c
generated
BIN
stage0/stdlib/Lake/DSL/Meta.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/Package.c
generated
BIN
stage0/stdlib/Lake/DSL/Package.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/DSL/Targets.c
generated
BIN
stage0/stdlib/Lake/DSL/Targets.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/Toml/Elab/Expression.c
generated
BIN
stage0/stdlib/Lake/Toml/Elab/Expression.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/Toml/Elab/Value.c
generated
BIN
stage0/stdlib/Lake/Toml/Elab/Value.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/Toml/ParserUtil.c
generated
BIN
stage0/stdlib/Lake/Toml/ParserUtil.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lake/Util/OrderedTagAttribute.c
generated
BIN
stage0/stdlib/Lake/Util/OrderedTagAttribute.c
generated
Binary file not shown.
BIN
stage0/stdlib/Lean/AddDecl.c
generated
BIN
stage0/stdlib/Lean/AddDecl.c
generated
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user